Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/Lib/site-packages/pkg_resources/_vendor/__init__.py +0 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/__init__.py +36 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_adapters.py +170 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_common.py +207 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_compat.py +108 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_itertools.py +35 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_legacy.py +120 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/abc.py +170 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/readers.py +120 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/simple.py +106 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__init__.py +0 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/jaraco/context.py +288 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/jaraco/functools.py +556 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/jaraco/text/__init__.py +599 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__init__.py +6 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/more.py +0 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/recipes.py +930 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py +15 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/_elffile.py +108 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/_manylinux.py +240 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/_musllinux.py +80 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/_parser.py +353 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py +61 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/_tokenizer.py +192 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/markers.py +252 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/metadata.py +408 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/requirements.py +95 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py +1008 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/tags.py +546 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/utils.py +141 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/packaging/version.py +564 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/android.py +120 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/typing_extensions.py +2209 -0
- .venv/Lib/site-packages/pkg_resources/_vendor/zipp.py +329 -0
- .venv/Lib/site-packages/pkg_resources/extern/__init__.py +80 -0
- .venv/Lib/site-packages/setuptools/__init__.py +266 -0
- .venv/Lib/site-packages/setuptools/_core_metadata.py +258 -0
- .venv/Lib/site-packages/setuptools/_entry_points.py +88 -0
- .venv/Lib/site-packages/setuptools/_imp.py +88 -0
- .venv/Lib/site-packages/setuptools/_importlib.py +51 -0
- .venv/Lib/site-packages/setuptools/_itertools.py +23 -0
- .venv/Lib/site-packages/setuptools/_normalization.py +125 -0
- .venv/Lib/site-packages/setuptools/_path.py +37 -0
- .venv/Lib/site-packages/setuptools/_reqs.py +33 -0
- .venv/Lib/site-packages/setuptools/cli-32.exe +0 -0
- .venv/Lib/site-packages/setuptools/cli-64.exe +0 -0
- .venv/Lib/site-packages/setuptools/cli-arm64.exe +0 -0
- .venv/Lib/site-packages/setuptools/cli.exe +0 -0
- .venv/Lib/site-packages/setuptools/command/alias.py +78 -0
- .venv/Lib/site-packages/setuptools/command/bdist_egg.py +464 -0
.venv/Lib/site-packages/pkg_resources/_vendor/__init__.py
ADDED
|
File without changes
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/__init__.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Read resources contained within a package."""
|
| 2 |
+
|
| 3 |
+
from ._common import (
|
| 4 |
+
as_file,
|
| 5 |
+
files,
|
| 6 |
+
Package,
|
| 7 |
+
)
|
| 8 |
+
|
| 9 |
+
from ._legacy import (
|
| 10 |
+
contents,
|
| 11 |
+
open_binary,
|
| 12 |
+
read_binary,
|
| 13 |
+
open_text,
|
| 14 |
+
read_text,
|
| 15 |
+
is_resource,
|
| 16 |
+
path,
|
| 17 |
+
Resource,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from .abc import ResourceReader
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
__all__ = [
|
| 24 |
+
'Package',
|
| 25 |
+
'Resource',
|
| 26 |
+
'ResourceReader',
|
| 27 |
+
'as_file',
|
| 28 |
+
'contents',
|
| 29 |
+
'files',
|
| 30 |
+
'is_resource',
|
| 31 |
+
'open_binary',
|
| 32 |
+
'open_text',
|
| 33 |
+
'path',
|
| 34 |
+
'read_binary',
|
| 35 |
+
'read_text',
|
| 36 |
+
]
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_adapters.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from contextlib import suppress
|
| 2 |
+
from io import TextIOWrapper
|
| 3 |
+
|
| 4 |
+
from . import abc
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class SpecLoaderAdapter:
|
| 8 |
+
"""
|
| 9 |
+
Adapt a package spec to adapt the underlying loader.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
def __init__(self, spec, adapter=lambda spec: spec.loader):
|
| 13 |
+
self.spec = spec
|
| 14 |
+
self.loader = adapter(spec)
|
| 15 |
+
|
| 16 |
+
def __getattr__(self, name):
|
| 17 |
+
return getattr(self.spec, name)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TraversableResourcesLoader:
|
| 21 |
+
"""
|
| 22 |
+
Adapt a loader to provide TraversableResources.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init__(self, spec):
|
| 26 |
+
self.spec = spec
|
| 27 |
+
|
| 28 |
+
def get_resource_reader(self, name):
|
| 29 |
+
return CompatibilityFiles(self.spec)._native()
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def _io_wrapper(file, mode='r', *args, **kwargs):
|
| 33 |
+
if mode == 'r':
|
| 34 |
+
return TextIOWrapper(file, *args, **kwargs)
|
| 35 |
+
elif mode == 'rb':
|
| 36 |
+
return file
|
| 37 |
+
raise ValueError(
|
| 38 |
+
"Invalid mode value '{}', only 'r' and 'rb' are supported".format(mode)
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class CompatibilityFiles:
|
| 43 |
+
"""
|
| 44 |
+
Adapter for an existing or non-existent resource reader
|
| 45 |
+
to provide a compatibility .files().
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
class SpecPath(abc.Traversable):
|
| 49 |
+
"""
|
| 50 |
+
Path tied to a module spec.
|
| 51 |
+
Can be read and exposes the resource reader children.
|
| 52 |
+
"""
|
| 53 |
+
|
| 54 |
+
def __init__(self, spec, reader):
|
| 55 |
+
self._spec = spec
|
| 56 |
+
self._reader = reader
|
| 57 |
+
|
| 58 |
+
def iterdir(self):
|
| 59 |
+
if not self._reader:
|
| 60 |
+
return iter(())
|
| 61 |
+
return iter(
|
| 62 |
+
CompatibilityFiles.ChildPath(self._reader, path)
|
| 63 |
+
for path in self._reader.contents()
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
def is_file(self):
|
| 67 |
+
return False
|
| 68 |
+
|
| 69 |
+
is_dir = is_file
|
| 70 |
+
|
| 71 |
+
def joinpath(self, other):
|
| 72 |
+
if not self._reader:
|
| 73 |
+
return CompatibilityFiles.OrphanPath(other)
|
| 74 |
+
return CompatibilityFiles.ChildPath(self._reader, other)
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
def name(self):
|
| 78 |
+
return self._spec.name
|
| 79 |
+
|
| 80 |
+
def open(self, mode='r', *args, **kwargs):
|
| 81 |
+
return _io_wrapper(self._reader.open_resource(None), mode, *args, **kwargs)
|
| 82 |
+
|
| 83 |
+
class ChildPath(abc.Traversable):
|
| 84 |
+
"""
|
| 85 |
+
Path tied to a resource reader child.
|
| 86 |
+
Can be read but doesn't expose any meaningful children.
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
def __init__(self, reader, name):
|
| 90 |
+
self._reader = reader
|
| 91 |
+
self._name = name
|
| 92 |
+
|
| 93 |
+
def iterdir(self):
|
| 94 |
+
return iter(())
|
| 95 |
+
|
| 96 |
+
def is_file(self):
|
| 97 |
+
return self._reader.is_resource(self.name)
|
| 98 |
+
|
| 99 |
+
def is_dir(self):
|
| 100 |
+
return not self.is_file()
|
| 101 |
+
|
| 102 |
+
def joinpath(self, other):
|
| 103 |
+
return CompatibilityFiles.OrphanPath(self.name, other)
|
| 104 |
+
|
| 105 |
+
@property
|
| 106 |
+
def name(self):
|
| 107 |
+
return self._name
|
| 108 |
+
|
| 109 |
+
def open(self, mode='r', *args, **kwargs):
|
| 110 |
+
return _io_wrapper(
|
| 111 |
+
self._reader.open_resource(self.name), mode, *args, **kwargs
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
class OrphanPath(abc.Traversable):
|
| 115 |
+
"""
|
| 116 |
+
Orphan path, not tied to a module spec or resource reader.
|
| 117 |
+
Can't be read and doesn't expose any meaningful children.
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
def __init__(self, *path_parts):
|
| 121 |
+
if len(path_parts) < 1:
|
| 122 |
+
raise ValueError('Need at least one path part to construct a path')
|
| 123 |
+
self._path = path_parts
|
| 124 |
+
|
| 125 |
+
def iterdir(self):
|
| 126 |
+
return iter(())
|
| 127 |
+
|
| 128 |
+
def is_file(self):
|
| 129 |
+
return False
|
| 130 |
+
|
| 131 |
+
is_dir = is_file
|
| 132 |
+
|
| 133 |
+
def joinpath(self, other):
|
| 134 |
+
return CompatibilityFiles.OrphanPath(*self._path, other)
|
| 135 |
+
|
| 136 |
+
@property
|
| 137 |
+
def name(self):
|
| 138 |
+
return self._path[-1]
|
| 139 |
+
|
| 140 |
+
def open(self, mode='r', *args, **kwargs):
|
| 141 |
+
raise FileNotFoundError("Can't open orphan path")
|
| 142 |
+
|
| 143 |
+
def __init__(self, spec):
|
| 144 |
+
self.spec = spec
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
def _reader(self):
|
| 148 |
+
with suppress(AttributeError):
|
| 149 |
+
return self.spec.loader.get_resource_reader(self.spec.name)
|
| 150 |
+
|
| 151 |
+
def _native(self):
|
| 152 |
+
"""
|
| 153 |
+
Return the native reader if it supports files().
|
| 154 |
+
"""
|
| 155 |
+
reader = self._reader
|
| 156 |
+
return reader if hasattr(reader, 'files') else self
|
| 157 |
+
|
| 158 |
+
def __getattr__(self, attr):
|
| 159 |
+
return getattr(self._reader, attr)
|
| 160 |
+
|
| 161 |
+
def files(self):
|
| 162 |
+
return CompatibilityFiles.SpecPath(self.spec, self._reader)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def wrap_spec(package):
|
| 166 |
+
"""
|
| 167 |
+
Construct a package spec with traversable compatibility
|
| 168 |
+
on the spec/loader/reader.
|
| 169 |
+
"""
|
| 170 |
+
return SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_common.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import pathlib
|
| 3 |
+
import tempfile
|
| 4 |
+
import functools
|
| 5 |
+
import contextlib
|
| 6 |
+
import types
|
| 7 |
+
import importlib
|
| 8 |
+
import inspect
|
| 9 |
+
import warnings
|
| 10 |
+
import itertools
|
| 11 |
+
|
| 12 |
+
from typing import Union, Optional, cast
|
| 13 |
+
from .abc import ResourceReader, Traversable
|
| 14 |
+
|
| 15 |
+
from ._compat import wrap_spec
|
| 16 |
+
|
| 17 |
+
Package = Union[types.ModuleType, str]
|
| 18 |
+
Anchor = Package
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def package_to_anchor(func):
|
| 22 |
+
"""
|
| 23 |
+
Replace 'package' parameter as 'anchor' and warn about the change.
|
| 24 |
+
|
| 25 |
+
Other errors should fall through.
|
| 26 |
+
|
| 27 |
+
>>> files('a', 'b')
|
| 28 |
+
Traceback (most recent call last):
|
| 29 |
+
TypeError: files() takes from 0 to 1 positional arguments but 2 were given
|
| 30 |
+
"""
|
| 31 |
+
undefined = object()
|
| 32 |
+
|
| 33 |
+
@functools.wraps(func)
|
| 34 |
+
def wrapper(anchor=undefined, package=undefined):
|
| 35 |
+
if package is not undefined:
|
| 36 |
+
if anchor is not undefined:
|
| 37 |
+
return func(anchor, package)
|
| 38 |
+
warnings.warn(
|
| 39 |
+
"First parameter to files is renamed to 'anchor'",
|
| 40 |
+
DeprecationWarning,
|
| 41 |
+
stacklevel=2,
|
| 42 |
+
)
|
| 43 |
+
return func(package)
|
| 44 |
+
elif anchor is undefined:
|
| 45 |
+
return func()
|
| 46 |
+
return func(anchor)
|
| 47 |
+
|
| 48 |
+
return wrapper
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@package_to_anchor
|
| 52 |
+
def files(anchor: Optional[Anchor] = None) -> Traversable:
|
| 53 |
+
"""
|
| 54 |
+
Get a Traversable resource for an anchor.
|
| 55 |
+
"""
|
| 56 |
+
return from_package(resolve(anchor))
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def get_resource_reader(package: types.ModuleType) -> Optional[ResourceReader]:
|
| 60 |
+
"""
|
| 61 |
+
Return the package's loader if it's a ResourceReader.
|
| 62 |
+
"""
|
| 63 |
+
# We can't use
|
| 64 |
+
# a issubclass() check here because apparently abc.'s __subclasscheck__()
|
| 65 |
+
# hook wants to create a weak reference to the object, but
|
| 66 |
+
# zipimport.zipimporter does not support weak references, resulting in a
|
| 67 |
+
# TypeError. That seems terrible.
|
| 68 |
+
spec = package.__spec__
|
| 69 |
+
reader = getattr(spec.loader, 'get_resource_reader', None) # type: ignore
|
| 70 |
+
if reader is None:
|
| 71 |
+
return None
|
| 72 |
+
return reader(spec.name) # type: ignore
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
@functools.singledispatch
|
| 76 |
+
def resolve(cand: Optional[Anchor]) -> types.ModuleType:
|
| 77 |
+
return cast(types.ModuleType, cand)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
@resolve.register
|
| 81 |
+
def _(cand: str) -> types.ModuleType:
|
| 82 |
+
return importlib.import_module(cand)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
@resolve.register
|
| 86 |
+
def _(cand: None) -> types.ModuleType:
|
| 87 |
+
return resolve(_infer_caller().f_globals['__name__'])
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def _infer_caller():
|
| 91 |
+
"""
|
| 92 |
+
Walk the stack and find the frame of the first caller not in this module.
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
def is_this_file(frame_info):
|
| 96 |
+
return frame_info.filename == __file__
|
| 97 |
+
|
| 98 |
+
def is_wrapper(frame_info):
|
| 99 |
+
return frame_info.function == 'wrapper'
|
| 100 |
+
|
| 101 |
+
not_this_file = itertools.filterfalse(is_this_file, inspect.stack())
|
| 102 |
+
# also exclude 'wrapper' due to singledispatch in the call stack
|
| 103 |
+
callers = itertools.filterfalse(is_wrapper, not_this_file)
|
| 104 |
+
return next(callers).frame
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def from_package(package: types.ModuleType):
|
| 108 |
+
"""
|
| 109 |
+
Return a Traversable object for the given package.
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
spec = wrap_spec(package)
|
| 113 |
+
reader = spec.loader.get_resource_reader(spec.name)
|
| 114 |
+
return reader.files()
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
@contextlib.contextmanager
|
| 118 |
+
def _tempfile(
|
| 119 |
+
reader,
|
| 120 |
+
suffix='',
|
| 121 |
+
# gh-93353: Keep a reference to call os.remove() in late Python
|
| 122 |
+
# finalization.
|
| 123 |
+
*,
|
| 124 |
+
_os_remove=os.remove,
|
| 125 |
+
):
|
| 126 |
+
# Not using tempfile.NamedTemporaryFile as it leads to deeper 'try'
|
| 127 |
+
# blocks due to the need to close the temporary file to work on Windows
|
| 128 |
+
# properly.
|
| 129 |
+
fd, raw_path = tempfile.mkstemp(suffix=suffix)
|
| 130 |
+
try:
|
| 131 |
+
try:
|
| 132 |
+
os.write(fd, reader())
|
| 133 |
+
finally:
|
| 134 |
+
os.close(fd)
|
| 135 |
+
del reader
|
| 136 |
+
yield pathlib.Path(raw_path)
|
| 137 |
+
finally:
|
| 138 |
+
try:
|
| 139 |
+
_os_remove(raw_path)
|
| 140 |
+
except FileNotFoundError:
|
| 141 |
+
pass
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _temp_file(path):
|
| 145 |
+
return _tempfile(path.read_bytes, suffix=path.name)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def _is_present_dir(path: Traversable) -> bool:
|
| 149 |
+
"""
|
| 150 |
+
Some Traversables implement ``is_dir()`` to raise an
|
| 151 |
+
exception (i.e. ``FileNotFoundError``) when the
|
| 152 |
+
directory doesn't exist. This function wraps that call
|
| 153 |
+
to always return a boolean and only return True
|
| 154 |
+
if there's a dir and it exists.
|
| 155 |
+
"""
|
| 156 |
+
with contextlib.suppress(FileNotFoundError):
|
| 157 |
+
return path.is_dir()
|
| 158 |
+
return False
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
@functools.singledispatch
|
| 162 |
+
def as_file(path):
|
| 163 |
+
"""
|
| 164 |
+
Given a Traversable object, return that object as a
|
| 165 |
+
path on the local file system in a context manager.
|
| 166 |
+
"""
|
| 167 |
+
return _temp_dir(path) if _is_present_dir(path) else _temp_file(path)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
@as_file.register(pathlib.Path)
|
| 171 |
+
@contextlib.contextmanager
|
| 172 |
+
def _(path):
|
| 173 |
+
"""
|
| 174 |
+
Degenerate behavior for pathlib.Path objects.
|
| 175 |
+
"""
|
| 176 |
+
yield path
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
@contextlib.contextmanager
|
| 180 |
+
def _temp_path(dir: tempfile.TemporaryDirectory):
|
| 181 |
+
"""
|
| 182 |
+
Wrap tempfile.TemporyDirectory to return a pathlib object.
|
| 183 |
+
"""
|
| 184 |
+
with dir as result:
|
| 185 |
+
yield pathlib.Path(result)
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
@contextlib.contextmanager
|
| 189 |
+
def _temp_dir(path):
|
| 190 |
+
"""
|
| 191 |
+
Given a traversable dir, recursively replicate the whole tree
|
| 192 |
+
to the file system in a context manager.
|
| 193 |
+
"""
|
| 194 |
+
assert path.is_dir()
|
| 195 |
+
with _temp_path(tempfile.TemporaryDirectory()) as temp_dir:
|
| 196 |
+
yield _write_contents(temp_dir, path)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
def _write_contents(target, source):
|
| 200 |
+
child = target.joinpath(source.name)
|
| 201 |
+
if source.is_dir():
|
| 202 |
+
child.mkdir()
|
| 203 |
+
for item in source.iterdir():
|
| 204 |
+
_write_contents(child, item)
|
| 205 |
+
else:
|
| 206 |
+
child.write_bytes(source.read_bytes())
|
| 207 |
+
return child
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_compat.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# flake8: noqa
|
| 2 |
+
|
| 3 |
+
import abc
|
| 4 |
+
import os
|
| 5 |
+
import sys
|
| 6 |
+
import pathlib
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
from typing import Union
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
if sys.version_info >= (3, 10):
|
| 12 |
+
from zipfile import Path as ZipPath # type: ignore
|
| 13 |
+
else:
|
| 14 |
+
from ..zipp import Path as ZipPath # type: ignore
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
try:
|
| 18 |
+
from typing import runtime_checkable # type: ignore
|
| 19 |
+
except ImportError:
|
| 20 |
+
|
| 21 |
+
def runtime_checkable(cls): # type: ignore
|
| 22 |
+
return cls
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
try:
|
| 26 |
+
from typing import Protocol # type: ignore
|
| 27 |
+
except ImportError:
|
| 28 |
+
Protocol = abc.ABC # type: ignore
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class TraversableResourcesLoader:
|
| 32 |
+
"""
|
| 33 |
+
Adapt loaders to provide TraversableResources and other
|
| 34 |
+
compatibility.
|
| 35 |
+
|
| 36 |
+
Used primarily for Python 3.9 and earlier where the native
|
| 37 |
+
loaders do not yet implement TraversableResources.
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
def __init__(self, spec):
|
| 41 |
+
self.spec = spec
|
| 42 |
+
|
| 43 |
+
@property
|
| 44 |
+
def path(self):
|
| 45 |
+
return self.spec.origin
|
| 46 |
+
|
| 47 |
+
def get_resource_reader(self, name):
|
| 48 |
+
from . import readers, _adapters
|
| 49 |
+
|
| 50 |
+
def _zip_reader(spec):
|
| 51 |
+
with suppress(AttributeError):
|
| 52 |
+
return readers.ZipReader(spec.loader, spec.name)
|
| 53 |
+
|
| 54 |
+
def _namespace_reader(spec):
|
| 55 |
+
with suppress(AttributeError, ValueError):
|
| 56 |
+
return readers.NamespaceReader(spec.submodule_search_locations)
|
| 57 |
+
|
| 58 |
+
def _available_reader(spec):
|
| 59 |
+
with suppress(AttributeError):
|
| 60 |
+
return spec.loader.get_resource_reader(spec.name)
|
| 61 |
+
|
| 62 |
+
def _native_reader(spec):
|
| 63 |
+
reader = _available_reader(spec)
|
| 64 |
+
return reader if hasattr(reader, 'files') else None
|
| 65 |
+
|
| 66 |
+
def _file_reader(spec):
|
| 67 |
+
try:
|
| 68 |
+
path = pathlib.Path(self.path)
|
| 69 |
+
except TypeError:
|
| 70 |
+
return None
|
| 71 |
+
if path.exists():
|
| 72 |
+
return readers.FileReader(self)
|
| 73 |
+
|
| 74 |
+
return (
|
| 75 |
+
# native reader if it supplies 'files'
|
| 76 |
+
_native_reader(self.spec)
|
| 77 |
+
or
|
| 78 |
+
# local ZipReader if a zip module
|
| 79 |
+
_zip_reader(self.spec)
|
| 80 |
+
or
|
| 81 |
+
# local NamespaceReader if a namespace module
|
| 82 |
+
_namespace_reader(self.spec)
|
| 83 |
+
or
|
| 84 |
+
# local FileReader
|
| 85 |
+
_file_reader(self.spec)
|
| 86 |
+
# fallback - adapt the spec ResourceReader to TraversableReader
|
| 87 |
+
or _adapters.CompatibilityFiles(self.spec)
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def wrap_spec(package):
|
| 92 |
+
"""
|
| 93 |
+
Construct a package spec with traversable compatibility
|
| 94 |
+
on the spec/loader/reader.
|
| 95 |
+
|
| 96 |
+
Supersedes _adapters.wrap_spec to use TraversableResourcesLoader
|
| 97 |
+
from above for older Python compatibility (<3.10).
|
| 98 |
+
"""
|
| 99 |
+
from . import _adapters
|
| 100 |
+
|
| 101 |
+
return _adapters.SpecLoaderAdapter(package.__spec__, TraversableResourcesLoader)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
if sys.version_info >= (3, 9):
|
| 105 |
+
StrPath = Union[str, os.PathLike[str]]
|
| 106 |
+
else:
|
| 107 |
+
# PathLike is only subscriptable at runtime in 3.9+
|
| 108 |
+
StrPath = Union[str, "os.PathLike[str]"]
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_itertools.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from itertools import filterfalse
|
| 2 |
+
|
| 3 |
+
from typing import (
|
| 4 |
+
Callable,
|
| 5 |
+
Iterable,
|
| 6 |
+
Iterator,
|
| 7 |
+
Optional,
|
| 8 |
+
Set,
|
| 9 |
+
TypeVar,
|
| 10 |
+
Union,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
# Type and type variable definitions
|
| 14 |
+
_T = TypeVar('_T')
|
| 15 |
+
_U = TypeVar('_U')
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def unique_everseen(
|
| 19 |
+
iterable: Iterable[_T], key: Optional[Callable[[_T], _U]] = None
|
| 20 |
+
) -> Iterator[_T]:
|
| 21 |
+
"List unique elements, preserving order. Remember all elements ever seen."
|
| 22 |
+
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
|
| 23 |
+
# unique_everseen('ABBCcAD', str.lower) --> A B C D
|
| 24 |
+
seen: Set[Union[_T, _U]] = set()
|
| 25 |
+
seen_add = seen.add
|
| 26 |
+
if key is None:
|
| 27 |
+
for element in filterfalse(seen.__contains__, iterable):
|
| 28 |
+
seen_add(element)
|
| 29 |
+
yield element
|
| 30 |
+
else:
|
| 31 |
+
for element in iterable:
|
| 32 |
+
k = key(element)
|
| 33 |
+
if k not in seen:
|
| 34 |
+
seen_add(k)
|
| 35 |
+
yield element
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/_legacy.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import os
|
| 3 |
+
import pathlib
|
| 4 |
+
import types
|
| 5 |
+
import warnings
|
| 6 |
+
|
| 7 |
+
from typing import Union, Iterable, ContextManager, BinaryIO, TextIO, Any
|
| 8 |
+
|
| 9 |
+
from . import _common
|
| 10 |
+
|
| 11 |
+
Package = Union[types.ModuleType, str]
|
| 12 |
+
Resource = str
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def deprecated(func):
|
| 16 |
+
@functools.wraps(func)
|
| 17 |
+
def wrapper(*args, **kwargs):
|
| 18 |
+
warnings.warn(
|
| 19 |
+
f"{func.__name__} is deprecated. Use files() instead. "
|
| 20 |
+
"Refer to https://importlib-resources.readthedocs.io"
|
| 21 |
+
"/en/latest/using.html#migrating-from-legacy for migration advice.",
|
| 22 |
+
DeprecationWarning,
|
| 23 |
+
stacklevel=2,
|
| 24 |
+
)
|
| 25 |
+
return func(*args, **kwargs)
|
| 26 |
+
|
| 27 |
+
return wrapper
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def normalize_path(path: Any) -> str:
|
| 31 |
+
"""Normalize a path by ensuring it is a string.
|
| 32 |
+
|
| 33 |
+
If the resulting string contains path separators, an exception is raised.
|
| 34 |
+
"""
|
| 35 |
+
str_path = str(path)
|
| 36 |
+
parent, file_name = os.path.split(str_path)
|
| 37 |
+
if parent:
|
| 38 |
+
raise ValueError(f'{path!r} must be only a file name')
|
| 39 |
+
return file_name
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
@deprecated
|
| 43 |
+
def open_binary(package: Package, resource: Resource) -> BinaryIO:
|
| 44 |
+
"""Return a file-like object opened for binary reading of the resource."""
|
| 45 |
+
return (_common.files(package) / normalize_path(resource)).open('rb')
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
@deprecated
|
| 49 |
+
def read_binary(package: Package, resource: Resource) -> bytes:
|
| 50 |
+
"""Return the binary contents of the resource."""
|
| 51 |
+
return (_common.files(package) / normalize_path(resource)).read_bytes()
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@deprecated
|
| 55 |
+
def open_text(
|
| 56 |
+
package: Package,
|
| 57 |
+
resource: Resource,
|
| 58 |
+
encoding: str = 'utf-8',
|
| 59 |
+
errors: str = 'strict',
|
| 60 |
+
) -> TextIO:
|
| 61 |
+
"""Return a file-like object opened for text reading of the resource."""
|
| 62 |
+
return (_common.files(package) / normalize_path(resource)).open(
|
| 63 |
+
'r', encoding=encoding, errors=errors
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
@deprecated
|
| 68 |
+
def read_text(
|
| 69 |
+
package: Package,
|
| 70 |
+
resource: Resource,
|
| 71 |
+
encoding: str = 'utf-8',
|
| 72 |
+
errors: str = 'strict',
|
| 73 |
+
) -> str:
|
| 74 |
+
"""Return the decoded string of the resource.
|
| 75 |
+
|
| 76 |
+
The decoding-related arguments have the same semantics as those of
|
| 77 |
+
bytes.decode().
|
| 78 |
+
"""
|
| 79 |
+
with open_text(package, resource, encoding, errors) as fp:
|
| 80 |
+
return fp.read()
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@deprecated
|
| 84 |
+
def contents(package: Package) -> Iterable[str]:
|
| 85 |
+
"""Return an iterable of entries in `package`.
|
| 86 |
+
|
| 87 |
+
Note that not all entries are resources. Specifically, directories are
|
| 88 |
+
not considered resources. Use `is_resource()` on each entry returned here
|
| 89 |
+
to check if it is a resource or not.
|
| 90 |
+
"""
|
| 91 |
+
return [path.name for path in _common.files(package).iterdir()]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@deprecated
|
| 95 |
+
def is_resource(package: Package, name: str) -> bool:
|
| 96 |
+
"""True if `name` is a resource inside `package`.
|
| 97 |
+
|
| 98 |
+
Directories are *not* resources.
|
| 99 |
+
"""
|
| 100 |
+
resource = normalize_path(name)
|
| 101 |
+
return any(
|
| 102 |
+
traversable.name == resource and traversable.is_file()
|
| 103 |
+
for traversable in _common.files(package).iterdir()
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@deprecated
|
| 108 |
+
def path(
|
| 109 |
+
package: Package,
|
| 110 |
+
resource: Resource,
|
| 111 |
+
) -> ContextManager[pathlib.Path]:
|
| 112 |
+
"""A context manager providing a file path object to the resource.
|
| 113 |
+
|
| 114 |
+
If the resource does not already exist on its own on the file system,
|
| 115 |
+
a temporary file will be created. If the file was created, the file
|
| 116 |
+
will be deleted upon exiting the context manager (no exception is
|
| 117 |
+
raised if the file was deleted prior to the context manager
|
| 118 |
+
exiting).
|
| 119 |
+
"""
|
| 120 |
+
return _common.as_file(_common.files(package) / normalize_path(resource))
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/abc.py
ADDED
|
@@ -0,0 +1,170 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import io
|
| 3 |
+
import itertools
|
| 4 |
+
import pathlib
|
| 5 |
+
from typing import Any, BinaryIO, Iterable, Iterator, NoReturn, Text, Optional
|
| 6 |
+
|
| 7 |
+
from ._compat import runtime_checkable, Protocol, StrPath
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
__all__ = ["ResourceReader", "Traversable", "TraversableResources"]
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class ResourceReader(metaclass=abc.ABCMeta):
|
| 14 |
+
"""Abstract base class for loaders to provide resource reading support."""
|
| 15 |
+
|
| 16 |
+
@abc.abstractmethod
|
| 17 |
+
def open_resource(self, resource: Text) -> BinaryIO:
|
| 18 |
+
"""Return an opened, file-like object for binary reading.
|
| 19 |
+
|
| 20 |
+
The 'resource' argument is expected to represent only a file name.
|
| 21 |
+
If the resource cannot be found, FileNotFoundError is raised.
|
| 22 |
+
"""
|
| 23 |
+
# This deliberately raises FileNotFoundError instead of
|
| 24 |
+
# NotImplementedError so that if this method is accidentally called,
|
| 25 |
+
# it'll still do the right thing.
|
| 26 |
+
raise FileNotFoundError
|
| 27 |
+
|
| 28 |
+
@abc.abstractmethod
|
| 29 |
+
def resource_path(self, resource: Text) -> Text:
|
| 30 |
+
"""Return the file system path to the specified resource.
|
| 31 |
+
|
| 32 |
+
The 'resource' argument is expected to represent only a file name.
|
| 33 |
+
If the resource does not exist on the file system, raise
|
| 34 |
+
FileNotFoundError.
|
| 35 |
+
"""
|
| 36 |
+
# This deliberately raises FileNotFoundError instead of
|
| 37 |
+
# NotImplementedError so that if this method is accidentally called,
|
| 38 |
+
# it'll still do the right thing.
|
| 39 |
+
raise FileNotFoundError
|
| 40 |
+
|
| 41 |
+
@abc.abstractmethod
|
| 42 |
+
def is_resource(self, path: Text) -> bool:
|
| 43 |
+
"""Return True if the named 'path' is a resource.
|
| 44 |
+
|
| 45 |
+
Files are resources, directories are not.
|
| 46 |
+
"""
|
| 47 |
+
raise FileNotFoundError
|
| 48 |
+
|
| 49 |
+
@abc.abstractmethod
|
| 50 |
+
def contents(self) -> Iterable[str]:
|
| 51 |
+
"""Return an iterable of entries in `package`."""
|
| 52 |
+
raise FileNotFoundError
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class TraversalError(Exception):
|
| 56 |
+
pass
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
@runtime_checkable
|
| 60 |
+
class Traversable(Protocol):
|
| 61 |
+
"""
|
| 62 |
+
An object with a subset of pathlib.Path methods suitable for
|
| 63 |
+
traversing directories and opening files.
|
| 64 |
+
|
| 65 |
+
Any exceptions that occur when accessing the backing resource
|
| 66 |
+
may propagate unaltered.
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
@abc.abstractmethod
|
| 70 |
+
def iterdir(self) -> Iterator["Traversable"]:
|
| 71 |
+
"""
|
| 72 |
+
Yield Traversable objects in self
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
def read_bytes(self) -> bytes:
|
| 76 |
+
"""
|
| 77 |
+
Read contents of self as bytes
|
| 78 |
+
"""
|
| 79 |
+
with self.open('rb') as strm:
|
| 80 |
+
return strm.read()
|
| 81 |
+
|
| 82 |
+
def read_text(self, encoding: Optional[str] = None) -> str:
|
| 83 |
+
"""
|
| 84 |
+
Read contents of self as text
|
| 85 |
+
"""
|
| 86 |
+
with self.open(encoding=encoding) as strm:
|
| 87 |
+
return strm.read()
|
| 88 |
+
|
| 89 |
+
@abc.abstractmethod
|
| 90 |
+
def is_dir(self) -> bool:
|
| 91 |
+
"""
|
| 92 |
+
Return True if self is a directory
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
@abc.abstractmethod
|
| 96 |
+
def is_file(self) -> bool:
|
| 97 |
+
"""
|
| 98 |
+
Return True if self is a file
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
def joinpath(self, *descendants: StrPath) -> "Traversable":
|
| 102 |
+
"""
|
| 103 |
+
Return Traversable resolved with any descendants applied.
|
| 104 |
+
|
| 105 |
+
Each descendant should be a path segment relative to self
|
| 106 |
+
and each may contain multiple levels separated by
|
| 107 |
+
``posixpath.sep`` (``/``).
|
| 108 |
+
"""
|
| 109 |
+
if not descendants:
|
| 110 |
+
return self
|
| 111 |
+
names = itertools.chain.from_iterable(
|
| 112 |
+
path.parts for path in map(pathlib.PurePosixPath, descendants)
|
| 113 |
+
)
|
| 114 |
+
target = next(names)
|
| 115 |
+
matches = (
|
| 116 |
+
traversable for traversable in self.iterdir() if traversable.name == target
|
| 117 |
+
)
|
| 118 |
+
try:
|
| 119 |
+
match = next(matches)
|
| 120 |
+
except StopIteration:
|
| 121 |
+
raise TraversalError(
|
| 122 |
+
"Target not found during traversal.", target, list(names)
|
| 123 |
+
)
|
| 124 |
+
return match.joinpath(*names)
|
| 125 |
+
|
| 126 |
+
def __truediv__(self, child: StrPath) -> "Traversable":
|
| 127 |
+
"""
|
| 128 |
+
Return Traversable child in self
|
| 129 |
+
"""
|
| 130 |
+
return self.joinpath(child)
|
| 131 |
+
|
| 132 |
+
@abc.abstractmethod
|
| 133 |
+
def open(self, mode='r', *args, **kwargs):
|
| 134 |
+
"""
|
| 135 |
+
mode may be 'r' or 'rb' to open as text or binary. Return a handle
|
| 136 |
+
suitable for reading (same as pathlib.Path.open).
|
| 137 |
+
|
| 138 |
+
When opening as text, accepts encoding parameters such as those
|
| 139 |
+
accepted by io.TextIOWrapper.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
@property
|
| 143 |
+
@abc.abstractmethod
|
| 144 |
+
def name(self) -> str:
|
| 145 |
+
"""
|
| 146 |
+
The base name of this object without any parent references.
|
| 147 |
+
"""
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class TraversableResources(ResourceReader):
|
| 151 |
+
"""
|
| 152 |
+
The required interface for providing traversable
|
| 153 |
+
resources.
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
@abc.abstractmethod
|
| 157 |
+
def files(self) -> "Traversable":
|
| 158 |
+
"""Return a Traversable object for the loaded package."""
|
| 159 |
+
|
| 160 |
+
def open_resource(self, resource: StrPath) -> io.BufferedReader:
|
| 161 |
+
return self.files().joinpath(resource).open('rb')
|
| 162 |
+
|
| 163 |
+
def resource_path(self, resource: Any) -> NoReturn:
|
| 164 |
+
raise FileNotFoundError(resource)
|
| 165 |
+
|
| 166 |
+
def is_resource(self, path: StrPath) -> bool:
|
| 167 |
+
return self.files().joinpath(path).is_file()
|
| 168 |
+
|
| 169 |
+
def contents(self) -> Iterator[str]:
|
| 170 |
+
return (item.name for item in self.files().iterdir())
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/readers.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import pathlib
|
| 3 |
+
import operator
|
| 4 |
+
|
| 5 |
+
from . import abc
|
| 6 |
+
|
| 7 |
+
from ._itertools import unique_everseen
|
| 8 |
+
from ._compat import ZipPath
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def remove_duplicates(items):
|
| 12 |
+
return iter(collections.OrderedDict.fromkeys(items))
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FileReader(abc.TraversableResources):
|
| 16 |
+
def __init__(self, loader):
|
| 17 |
+
self.path = pathlib.Path(loader.path).parent
|
| 18 |
+
|
| 19 |
+
def resource_path(self, resource):
|
| 20 |
+
"""
|
| 21 |
+
Return the file system path to prevent
|
| 22 |
+
`resources.path()` from creating a temporary
|
| 23 |
+
copy.
|
| 24 |
+
"""
|
| 25 |
+
return str(self.path.joinpath(resource))
|
| 26 |
+
|
| 27 |
+
def files(self):
|
| 28 |
+
return self.path
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class ZipReader(abc.TraversableResources):
|
| 32 |
+
def __init__(self, loader, module):
|
| 33 |
+
_, _, name = module.rpartition('.')
|
| 34 |
+
self.prefix = loader.prefix.replace('\\', '/') + name + '/'
|
| 35 |
+
self.archive = loader.archive
|
| 36 |
+
|
| 37 |
+
def open_resource(self, resource):
|
| 38 |
+
try:
|
| 39 |
+
return super().open_resource(resource)
|
| 40 |
+
except KeyError as exc:
|
| 41 |
+
raise FileNotFoundError(exc.args[0])
|
| 42 |
+
|
| 43 |
+
def is_resource(self, path):
|
| 44 |
+
# workaround for `zipfile.Path.is_file` returning true
|
| 45 |
+
# for non-existent paths.
|
| 46 |
+
target = self.files().joinpath(path)
|
| 47 |
+
return target.is_file() and target.exists()
|
| 48 |
+
|
| 49 |
+
def files(self):
|
| 50 |
+
return ZipPath(self.archive, self.prefix)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class MultiplexedPath(abc.Traversable):
|
| 54 |
+
"""
|
| 55 |
+
Given a series of Traversable objects, implement a merged
|
| 56 |
+
version of the interface across all objects. Useful for
|
| 57 |
+
namespace packages which may be multihomed at a single
|
| 58 |
+
name.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
def __init__(self, *paths):
|
| 62 |
+
self._paths = list(map(pathlib.Path, remove_duplicates(paths)))
|
| 63 |
+
if not self._paths:
|
| 64 |
+
message = 'MultiplexedPath must contain at least one path'
|
| 65 |
+
raise FileNotFoundError(message)
|
| 66 |
+
if not all(path.is_dir() for path in self._paths):
|
| 67 |
+
raise NotADirectoryError('MultiplexedPath only supports directories')
|
| 68 |
+
|
| 69 |
+
def iterdir(self):
|
| 70 |
+
files = (file for path in self._paths for file in path.iterdir())
|
| 71 |
+
return unique_everseen(files, key=operator.attrgetter('name'))
|
| 72 |
+
|
| 73 |
+
def read_bytes(self):
|
| 74 |
+
raise FileNotFoundError(f'{self} is not a file')
|
| 75 |
+
|
| 76 |
+
def read_text(self, *args, **kwargs):
|
| 77 |
+
raise FileNotFoundError(f'{self} is not a file')
|
| 78 |
+
|
| 79 |
+
def is_dir(self):
|
| 80 |
+
return True
|
| 81 |
+
|
| 82 |
+
def is_file(self):
|
| 83 |
+
return False
|
| 84 |
+
|
| 85 |
+
def joinpath(self, *descendants):
|
| 86 |
+
try:
|
| 87 |
+
return super().joinpath(*descendants)
|
| 88 |
+
except abc.TraversalError:
|
| 89 |
+
# One of the paths did not resolve (a directory does not exist).
|
| 90 |
+
# Just return something that will not exist.
|
| 91 |
+
return self._paths[0].joinpath(*descendants)
|
| 92 |
+
|
| 93 |
+
def open(self, *args, **kwargs):
|
| 94 |
+
raise FileNotFoundError(f'{self} is not a file')
|
| 95 |
+
|
| 96 |
+
@property
|
| 97 |
+
def name(self):
|
| 98 |
+
return self._paths[0].name
|
| 99 |
+
|
| 100 |
+
def __repr__(self):
|
| 101 |
+
paths = ', '.join(f"'{path}'" for path in self._paths)
|
| 102 |
+
return f'MultiplexedPath({paths})'
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class NamespaceReader(abc.TraversableResources):
|
| 106 |
+
def __init__(self, namespace_path):
|
| 107 |
+
if 'NamespacePath' not in str(namespace_path):
|
| 108 |
+
raise ValueError('Invalid path')
|
| 109 |
+
self.path = MultiplexedPath(*list(namespace_path))
|
| 110 |
+
|
| 111 |
+
def resource_path(self, resource):
|
| 112 |
+
"""
|
| 113 |
+
Return the file system path to prevent
|
| 114 |
+
`resources.path()` from creating a temporary
|
| 115 |
+
copy.
|
| 116 |
+
"""
|
| 117 |
+
return str(self.path.joinpath(resource))
|
| 118 |
+
|
| 119 |
+
def files(self):
|
| 120 |
+
return self.path
|
.venv/Lib/site-packages/pkg_resources/_vendor/importlib_resources/simple.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Interface adapters for low-level readers.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import abc
|
| 6 |
+
import io
|
| 7 |
+
import itertools
|
| 8 |
+
from typing import BinaryIO, List
|
| 9 |
+
|
| 10 |
+
from .abc import Traversable, TraversableResources
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SimpleReader(abc.ABC):
|
| 14 |
+
"""
|
| 15 |
+
The minimum, low-level interface required from a resource
|
| 16 |
+
provider.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
@property
|
| 20 |
+
@abc.abstractmethod
|
| 21 |
+
def package(self) -> str:
|
| 22 |
+
"""
|
| 23 |
+
The name of the package for which this reader loads resources.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
@abc.abstractmethod
|
| 27 |
+
def children(self) -> List['SimpleReader']:
|
| 28 |
+
"""
|
| 29 |
+
Obtain an iterable of SimpleReader for available
|
| 30 |
+
child containers (e.g. directories).
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
@abc.abstractmethod
|
| 34 |
+
def resources(self) -> List[str]:
|
| 35 |
+
"""
|
| 36 |
+
Obtain available named resources for this virtual package.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
@abc.abstractmethod
|
| 40 |
+
def open_binary(self, resource: str) -> BinaryIO:
|
| 41 |
+
"""
|
| 42 |
+
Obtain a File-like for a named resource.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
@property
|
| 46 |
+
def name(self):
|
| 47 |
+
return self.package.split('.')[-1]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class ResourceContainer(Traversable):
|
| 51 |
+
"""
|
| 52 |
+
Traversable container for a package's resources via its reader.
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(self, reader: SimpleReader):
|
| 56 |
+
self.reader = reader
|
| 57 |
+
|
| 58 |
+
def is_dir(self):
|
| 59 |
+
return True
|
| 60 |
+
|
| 61 |
+
def is_file(self):
|
| 62 |
+
return False
|
| 63 |
+
|
| 64 |
+
def iterdir(self):
|
| 65 |
+
files = (ResourceHandle(self, name) for name in self.reader.resources)
|
| 66 |
+
dirs = map(ResourceContainer, self.reader.children())
|
| 67 |
+
return itertools.chain(files, dirs)
|
| 68 |
+
|
| 69 |
+
def open(self, *args, **kwargs):
|
| 70 |
+
raise IsADirectoryError()
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class ResourceHandle(Traversable):
|
| 74 |
+
"""
|
| 75 |
+
Handle to a named resource in a ResourceReader.
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
def __init__(self, parent: ResourceContainer, name: str):
|
| 79 |
+
self.parent = parent
|
| 80 |
+
self.name = name # type: ignore
|
| 81 |
+
|
| 82 |
+
def is_file(self):
|
| 83 |
+
return True
|
| 84 |
+
|
| 85 |
+
def is_dir(self):
|
| 86 |
+
return False
|
| 87 |
+
|
| 88 |
+
def open(self, mode='r', *args, **kwargs):
|
| 89 |
+
stream = self.parent.reader.open_binary(self.name)
|
| 90 |
+
if 'b' not in mode:
|
| 91 |
+
stream = io.TextIOWrapper(*args, **kwargs)
|
| 92 |
+
return stream
|
| 93 |
+
|
| 94 |
+
def joinpath(self, name):
|
| 95 |
+
raise RuntimeError("Cannot traverse into a resource")
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class TraversableReader(TraversableResources, SimpleReader):
|
| 99 |
+
"""
|
| 100 |
+
A TraversableResources based on SimpleReader. Resource providers
|
| 101 |
+
may derive from this class to provide the TraversableResources
|
| 102 |
+
interface by supplying the SimpleReader interface.
|
| 103 |
+
"""
|
| 104 |
+
|
| 105 |
+
def files(self):
|
| 106 |
+
return ResourceContainer(self)
|
.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/__init__.py
ADDED
|
File without changes
|
.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/context.py
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import subprocess
|
| 3 |
+
import contextlib
|
| 4 |
+
import functools
|
| 5 |
+
import tempfile
|
| 6 |
+
import shutil
|
| 7 |
+
import operator
|
| 8 |
+
import warnings
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@contextlib.contextmanager
|
| 12 |
+
def pushd(dir):
|
| 13 |
+
"""
|
| 14 |
+
>>> tmp_path = getfixture('tmp_path')
|
| 15 |
+
>>> with pushd(tmp_path):
|
| 16 |
+
... assert os.getcwd() == os.fspath(tmp_path)
|
| 17 |
+
>>> assert os.getcwd() != os.fspath(tmp_path)
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
orig = os.getcwd()
|
| 21 |
+
os.chdir(dir)
|
| 22 |
+
try:
|
| 23 |
+
yield dir
|
| 24 |
+
finally:
|
| 25 |
+
os.chdir(orig)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
@contextlib.contextmanager
|
| 29 |
+
def tarball_context(url, target_dir=None, runner=None, pushd=pushd):
|
| 30 |
+
"""
|
| 31 |
+
Get a tarball, extract it, change to that directory, yield, then
|
| 32 |
+
clean up.
|
| 33 |
+
`runner` is the function to invoke commands.
|
| 34 |
+
`pushd` is a context manager for changing the directory.
|
| 35 |
+
"""
|
| 36 |
+
if target_dir is None:
|
| 37 |
+
target_dir = os.path.basename(url).replace('.tar.gz', '').replace('.tgz', '')
|
| 38 |
+
if runner is None:
|
| 39 |
+
runner = functools.partial(subprocess.check_call, shell=True)
|
| 40 |
+
else:
|
| 41 |
+
warnings.warn("runner parameter is deprecated", DeprecationWarning)
|
| 42 |
+
# In the tar command, use --strip-components=1 to strip the first path and
|
| 43 |
+
# then
|
| 44 |
+
# use -C to cause the files to be extracted to {target_dir}. This ensures
|
| 45 |
+
# that we always know where the files were extracted.
|
| 46 |
+
runner('mkdir {target_dir}'.format(**vars()))
|
| 47 |
+
try:
|
| 48 |
+
getter = 'wget {url} -O -'
|
| 49 |
+
extract = 'tar x{compression} --strip-components=1 -C {target_dir}'
|
| 50 |
+
cmd = ' | '.join((getter, extract))
|
| 51 |
+
runner(cmd.format(compression=infer_compression(url), **vars()))
|
| 52 |
+
with pushd(target_dir):
|
| 53 |
+
yield target_dir
|
| 54 |
+
finally:
|
| 55 |
+
runner('rm -Rf {target_dir}'.format(**vars()))
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
def infer_compression(url):
|
| 59 |
+
"""
|
| 60 |
+
Given a URL or filename, infer the compression code for tar.
|
| 61 |
+
|
| 62 |
+
>>> infer_compression('http://foo/bar.tar.gz')
|
| 63 |
+
'z'
|
| 64 |
+
>>> infer_compression('http://foo/bar.tgz')
|
| 65 |
+
'z'
|
| 66 |
+
>>> infer_compression('file.bz')
|
| 67 |
+
'j'
|
| 68 |
+
>>> infer_compression('file.xz')
|
| 69 |
+
'J'
|
| 70 |
+
"""
|
| 71 |
+
# cheat and just assume it's the last two characters
|
| 72 |
+
compression_indicator = url[-2:]
|
| 73 |
+
mapping = dict(gz='z', bz='j', xz='J')
|
| 74 |
+
# Assume 'z' (gzip) if no match
|
| 75 |
+
return mapping.get(compression_indicator, 'z')
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
@contextlib.contextmanager
|
| 79 |
+
def temp_dir(remover=shutil.rmtree):
|
| 80 |
+
"""
|
| 81 |
+
Create a temporary directory context. Pass a custom remover
|
| 82 |
+
to override the removal behavior.
|
| 83 |
+
|
| 84 |
+
>>> import pathlib
|
| 85 |
+
>>> with temp_dir() as the_dir:
|
| 86 |
+
... assert os.path.isdir(the_dir)
|
| 87 |
+
... _ = pathlib.Path(the_dir).joinpath('somefile').write_text('contents')
|
| 88 |
+
>>> assert not os.path.exists(the_dir)
|
| 89 |
+
"""
|
| 90 |
+
temp_dir = tempfile.mkdtemp()
|
| 91 |
+
try:
|
| 92 |
+
yield temp_dir
|
| 93 |
+
finally:
|
| 94 |
+
remover(temp_dir)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
@contextlib.contextmanager
|
| 98 |
+
def repo_context(url, branch=None, quiet=True, dest_ctx=temp_dir):
|
| 99 |
+
"""
|
| 100 |
+
Check out the repo indicated by url.
|
| 101 |
+
|
| 102 |
+
If dest_ctx is supplied, it should be a context manager
|
| 103 |
+
to yield the target directory for the check out.
|
| 104 |
+
"""
|
| 105 |
+
exe = 'git' if 'git' in url else 'hg'
|
| 106 |
+
with dest_ctx() as repo_dir:
|
| 107 |
+
cmd = [exe, 'clone', url, repo_dir]
|
| 108 |
+
if branch:
|
| 109 |
+
cmd.extend(['--branch', branch])
|
| 110 |
+
devnull = open(os.path.devnull, 'w')
|
| 111 |
+
stdout = devnull if quiet else None
|
| 112 |
+
subprocess.check_call(cmd, stdout=stdout)
|
| 113 |
+
yield repo_dir
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
@contextlib.contextmanager
|
| 117 |
+
def null():
|
| 118 |
+
"""
|
| 119 |
+
A null context suitable to stand in for a meaningful context.
|
| 120 |
+
|
| 121 |
+
>>> with null() as value:
|
| 122 |
+
... assert value is None
|
| 123 |
+
"""
|
| 124 |
+
yield
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
class ExceptionTrap:
|
| 128 |
+
"""
|
| 129 |
+
A context manager that will catch certain exceptions and provide an
|
| 130 |
+
indication they occurred.
|
| 131 |
+
|
| 132 |
+
>>> with ExceptionTrap() as trap:
|
| 133 |
+
... raise Exception()
|
| 134 |
+
>>> bool(trap)
|
| 135 |
+
True
|
| 136 |
+
|
| 137 |
+
>>> with ExceptionTrap() as trap:
|
| 138 |
+
... pass
|
| 139 |
+
>>> bool(trap)
|
| 140 |
+
False
|
| 141 |
+
|
| 142 |
+
>>> with ExceptionTrap(ValueError) as trap:
|
| 143 |
+
... raise ValueError("1 + 1 is not 3")
|
| 144 |
+
>>> bool(trap)
|
| 145 |
+
True
|
| 146 |
+
>>> trap.value
|
| 147 |
+
ValueError('1 + 1 is not 3')
|
| 148 |
+
>>> trap.tb
|
| 149 |
+
<traceback object at ...>
|
| 150 |
+
|
| 151 |
+
>>> with ExceptionTrap(ValueError) as trap:
|
| 152 |
+
... raise Exception()
|
| 153 |
+
Traceback (most recent call last):
|
| 154 |
+
...
|
| 155 |
+
Exception
|
| 156 |
+
|
| 157 |
+
>>> bool(trap)
|
| 158 |
+
False
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
exc_info = None, None, None
|
| 162 |
+
|
| 163 |
+
def __init__(self, exceptions=(Exception,)):
|
| 164 |
+
self.exceptions = exceptions
|
| 165 |
+
|
| 166 |
+
def __enter__(self):
|
| 167 |
+
return self
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def type(self):
|
| 171 |
+
return self.exc_info[0]
|
| 172 |
+
|
| 173 |
+
@property
|
| 174 |
+
def value(self):
|
| 175 |
+
return self.exc_info[1]
|
| 176 |
+
|
| 177 |
+
@property
|
| 178 |
+
def tb(self):
|
| 179 |
+
return self.exc_info[2]
|
| 180 |
+
|
| 181 |
+
def __exit__(self, *exc_info):
|
| 182 |
+
type = exc_info[0]
|
| 183 |
+
matches = type and issubclass(type, self.exceptions)
|
| 184 |
+
if matches:
|
| 185 |
+
self.exc_info = exc_info
|
| 186 |
+
return matches
|
| 187 |
+
|
| 188 |
+
def __bool__(self):
|
| 189 |
+
return bool(self.type)
|
| 190 |
+
|
| 191 |
+
def raises(self, func, *, _test=bool):
|
| 192 |
+
"""
|
| 193 |
+
Wrap func and replace the result with the truth
|
| 194 |
+
value of the trap (True if an exception occurred).
|
| 195 |
+
|
| 196 |
+
First, give the decorator an alias to support Python 3.8
|
| 197 |
+
Syntax.
|
| 198 |
+
|
| 199 |
+
>>> raises = ExceptionTrap(ValueError).raises
|
| 200 |
+
|
| 201 |
+
Now decorate a function that always fails.
|
| 202 |
+
|
| 203 |
+
>>> @raises
|
| 204 |
+
... def fail():
|
| 205 |
+
... raise ValueError('failed')
|
| 206 |
+
>>> fail()
|
| 207 |
+
True
|
| 208 |
+
"""
|
| 209 |
+
|
| 210 |
+
@functools.wraps(func)
|
| 211 |
+
def wrapper(*args, **kwargs):
|
| 212 |
+
with ExceptionTrap(self.exceptions) as trap:
|
| 213 |
+
func(*args, **kwargs)
|
| 214 |
+
return _test(trap)
|
| 215 |
+
|
| 216 |
+
return wrapper
|
| 217 |
+
|
| 218 |
+
def passes(self, func):
|
| 219 |
+
"""
|
| 220 |
+
Wrap func and replace the result with the truth
|
| 221 |
+
value of the trap (True if no exception).
|
| 222 |
+
|
| 223 |
+
First, give the decorator an alias to support Python 3.8
|
| 224 |
+
Syntax.
|
| 225 |
+
|
| 226 |
+
>>> passes = ExceptionTrap(ValueError).passes
|
| 227 |
+
|
| 228 |
+
Now decorate a function that always fails.
|
| 229 |
+
|
| 230 |
+
>>> @passes
|
| 231 |
+
... def fail():
|
| 232 |
+
... raise ValueError('failed')
|
| 233 |
+
|
| 234 |
+
>>> fail()
|
| 235 |
+
False
|
| 236 |
+
"""
|
| 237 |
+
return self.raises(func, _test=operator.not_)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
class suppress(contextlib.suppress, contextlib.ContextDecorator):
|
| 241 |
+
"""
|
| 242 |
+
A version of contextlib.suppress with decorator support.
|
| 243 |
+
|
| 244 |
+
>>> @suppress(KeyError)
|
| 245 |
+
... def key_error():
|
| 246 |
+
... {}['']
|
| 247 |
+
>>> key_error()
|
| 248 |
+
"""
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class on_interrupt(contextlib.ContextDecorator):
|
| 252 |
+
"""
|
| 253 |
+
Replace a KeyboardInterrupt with SystemExit(1)
|
| 254 |
+
|
| 255 |
+
>>> def do_interrupt():
|
| 256 |
+
... raise KeyboardInterrupt()
|
| 257 |
+
>>> on_interrupt('error')(do_interrupt)()
|
| 258 |
+
Traceback (most recent call last):
|
| 259 |
+
...
|
| 260 |
+
SystemExit: 1
|
| 261 |
+
>>> on_interrupt('error', code=255)(do_interrupt)()
|
| 262 |
+
Traceback (most recent call last):
|
| 263 |
+
...
|
| 264 |
+
SystemExit: 255
|
| 265 |
+
>>> on_interrupt('suppress')(do_interrupt)()
|
| 266 |
+
>>> with __import__('pytest').raises(KeyboardInterrupt):
|
| 267 |
+
... on_interrupt('ignore')(do_interrupt)()
|
| 268 |
+
"""
|
| 269 |
+
|
| 270 |
+
def __init__(
|
| 271 |
+
self,
|
| 272 |
+
action='error',
|
| 273 |
+
# py3.7 compat
|
| 274 |
+
# /,
|
| 275 |
+
code=1,
|
| 276 |
+
):
|
| 277 |
+
self.action = action
|
| 278 |
+
self.code = code
|
| 279 |
+
|
| 280 |
+
def __enter__(self):
|
| 281 |
+
return self
|
| 282 |
+
|
| 283 |
+
def __exit__(self, exctype, excinst, exctb):
|
| 284 |
+
if exctype is not KeyboardInterrupt or self.action == 'ignore':
|
| 285 |
+
return
|
| 286 |
+
elif self.action == 'error':
|
| 287 |
+
raise SystemExit(self.code) from excinst
|
| 288 |
+
return self.action == 'suppress'
|
.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/functools.py
ADDED
|
@@ -0,0 +1,556 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import time
|
| 3 |
+
import inspect
|
| 4 |
+
import collections
|
| 5 |
+
import types
|
| 6 |
+
import itertools
|
| 7 |
+
import warnings
|
| 8 |
+
|
| 9 |
+
import pkg_resources.extern.more_itertools
|
| 10 |
+
|
| 11 |
+
from typing import Callable, TypeVar
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
CallableT = TypeVar("CallableT", bound=Callable[..., object])
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def compose(*funcs):
|
| 18 |
+
"""
|
| 19 |
+
Compose any number of unary functions into a single unary function.
|
| 20 |
+
|
| 21 |
+
>>> import textwrap
|
| 22 |
+
>>> expected = str.strip(textwrap.dedent(compose.__doc__))
|
| 23 |
+
>>> strip_and_dedent = compose(str.strip, textwrap.dedent)
|
| 24 |
+
>>> strip_and_dedent(compose.__doc__) == expected
|
| 25 |
+
True
|
| 26 |
+
|
| 27 |
+
Compose also allows the innermost function to take arbitrary arguments.
|
| 28 |
+
|
| 29 |
+
>>> round_three = lambda x: round(x, ndigits=3)
|
| 30 |
+
>>> f = compose(round_three, int.__truediv__)
|
| 31 |
+
>>> [f(3*x, x+1) for x in range(1,10)]
|
| 32 |
+
[1.5, 2.0, 2.25, 2.4, 2.5, 2.571, 2.625, 2.667, 2.7]
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
def compose_two(f1, f2):
|
| 36 |
+
return lambda *args, **kwargs: f1(f2(*args, **kwargs))
|
| 37 |
+
|
| 38 |
+
return functools.reduce(compose_two, funcs)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def method_caller(method_name, *args, **kwargs):
|
| 42 |
+
"""
|
| 43 |
+
Return a function that will call a named method on the
|
| 44 |
+
target object with optional positional and keyword
|
| 45 |
+
arguments.
|
| 46 |
+
|
| 47 |
+
>>> lower = method_caller('lower')
|
| 48 |
+
>>> lower('MyString')
|
| 49 |
+
'mystring'
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
def call_method(target):
|
| 53 |
+
func = getattr(target, method_name)
|
| 54 |
+
return func(*args, **kwargs)
|
| 55 |
+
|
| 56 |
+
return call_method
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def once(func):
|
| 60 |
+
"""
|
| 61 |
+
Decorate func so it's only ever called the first time.
|
| 62 |
+
|
| 63 |
+
This decorator can ensure that an expensive or non-idempotent function
|
| 64 |
+
will not be expensive on subsequent calls and is idempotent.
|
| 65 |
+
|
| 66 |
+
>>> add_three = once(lambda a: a+3)
|
| 67 |
+
>>> add_three(3)
|
| 68 |
+
6
|
| 69 |
+
>>> add_three(9)
|
| 70 |
+
6
|
| 71 |
+
>>> add_three('12')
|
| 72 |
+
6
|
| 73 |
+
|
| 74 |
+
To reset the stored value, simply clear the property ``saved_result``.
|
| 75 |
+
|
| 76 |
+
>>> del add_three.saved_result
|
| 77 |
+
>>> add_three(9)
|
| 78 |
+
12
|
| 79 |
+
>>> add_three(8)
|
| 80 |
+
12
|
| 81 |
+
|
| 82 |
+
Or invoke 'reset()' on it.
|
| 83 |
+
|
| 84 |
+
>>> add_three.reset()
|
| 85 |
+
>>> add_three(-3)
|
| 86 |
+
0
|
| 87 |
+
>>> add_three(0)
|
| 88 |
+
0
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
@functools.wraps(func)
|
| 92 |
+
def wrapper(*args, **kwargs):
|
| 93 |
+
if not hasattr(wrapper, 'saved_result'):
|
| 94 |
+
wrapper.saved_result = func(*args, **kwargs)
|
| 95 |
+
return wrapper.saved_result
|
| 96 |
+
|
| 97 |
+
wrapper.reset = lambda: vars(wrapper).__delitem__('saved_result')
|
| 98 |
+
return wrapper
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def method_cache(
|
| 102 |
+
method: CallableT,
|
| 103 |
+
cache_wrapper: Callable[
|
| 104 |
+
[CallableT], CallableT
|
| 105 |
+
] = functools.lru_cache(), # type: ignore[assignment]
|
| 106 |
+
) -> CallableT:
|
| 107 |
+
"""
|
| 108 |
+
Wrap lru_cache to support storing the cache data in the object instances.
|
| 109 |
+
|
| 110 |
+
Abstracts the common paradigm where the method explicitly saves an
|
| 111 |
+
underscore-prefixed protected property on first call and returns that
|
| 112 |
+
subsequently.
|
| 113 |
+
|
| 114 |
+
>>> class MyClass:
|
| 115 |
+
... calls = 0
|
| 116 |
+
...
|
| 117 |
+
... @method_cache
|
| 118 |
+
... def method(self, value):
|
| 119 |
+
... self.calls += 1
|
| 120 |
+
... return value
|
| 121 |
+
|
| 122 |
+
>>> a = MyClass()
|
| 123 |
+
>>> a.method(3)
|
| 124 |
+
3
|
| 125 |
+
>>> for x in range(75):
|
| 126 |
+
... res = a.method(x)
|
| 127 |
+
>>> a.calls
|
| 128 |
+
75
|
| 129 |
+
|
| 130 |
+
Note that the apparent behavior will be exactly like that of lru_cache
|
| 131 |
+
except that the cache is stored on each instance, so values in one
|
| 132 |
+
instance will not flush values from another, and when an instance is
|
| 133 |
+
deleted, so are the cached values for that instance.
|
| 134 |
+
|
| 135 |
+
>>> b = MyClass()
|
| 136 |
+
>>> for x in range(35):
|
| 137 |
+
... res = b.method(x)
|
| 138 |
+
>>> b.calls
|
| 139 |
+
35
|
| 140 |
+
>>> a.method(0)
|
| 141 |
+
0
|
| 142 |
+
>>> a.calls
|
| 143 |
+
75
|
| 144 |
+
|
| 145 |
+
Note that if method had been decorated with ``functools.lru_cache()``,
|
| 146 |
+
a.calls would have been 76 (due to the cached value of 0 having been
|
| 147 |
+
flushed by the 'b' instance).
|
| 148 |
+
|
| 149 |
+
Clear the cache with ``.cache_clear()``
|
| 150 |
+
|
| 151 |
+
>>> a.method.cache_clear()
|
| 152 |
+
|
| 153 |
+
Same for a method that hasn't yet been called.
|
| 154 |
+
|
| 155 |
+
>>> c = MyClass()
|
| 156 |
+
>>> c.method.cache_clear()
|
| 157 |
+
|
| 158 |
+
Another cache wrapper may be supplied:
|
| 159 |
+
|
| 160 |
+
>>> cache = functools.lru_cache(maxsize=2)
|
| 161 |
+
>>> MyClass.method2 = method_cache(lambda self: 3, cache_wrapper=cache)
|
| 162 |
+
>>> a = MyClass()
|
| 163 |
+
>>> a.method2()
|
| 164 |
+
3
|
| 165 |
+
|
| 166 |
+
Caution - do not subsequently wrap the method with another decorator, such
|
| 167 |
+
as ``@property``, which changes the semantics of the function.
|
| 168 |
+
|
| 169 |
+
See also
|
| 170 |
+
http://code.activestate.com/recipes/577452-a-memoize-decorator-for-instance-methods/
|
| 171 |
+
for another implementation and additional justification.
|
| 172 |
+
"""
|
| 173 |
+
|
| 174 |
+
def wrapper(self: object, *args: object, **kwargs: object) -> object:
|
| 175 |
+
# it's the first call, replace the method with a cached, bound method
|
| 176 |
+
bound_method: CallableT = types.MethodType( # type: ignore[assignment]
|
| 177 |
+
method, self
|
| 178 |
+
)
|
| 179 |
+
cached_method = cache_wrapper(bound_method)
|
| 180 |
+
setattr(self, method.__name__, cached_method)
|
| 181 |
+
return cached_method(*args, **kwargs)
|
| 182 |
+
|
| 183 |
+
# Support cache clear even before cache has been created.
|
| 184 |
+
wrapper.cache_clear = lambda: None # type: ignore[attr-defined]
|
| 185 |
+
|
| 186 |
+
return ( # type: ignore[return-value]
|
| 187 |
+
_special_method_cache(method, cache_wrapper) or wrapper
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def _special_method_cache(method, cache_wrapper):
|
| 192 |
+
"""
|
| 193 |
+
Because Python treats special methods differently, it's not
|
| 194 |
+
possible to use instance attributes to implement the cached
|
| 195 |
+
methods.
|
| 196 |
+
|
| 197 |
+
Instead, install the wrapper method under a different name
|
| 198 |
+
and return a simple proxy to that wrapper.
|
| 199 |
+
|
| 200 |
+
https://github.com/jaraco/jaraco.functools/issues/5
|
| 201 |
+
"""
|
| 202 |
+
name = method.__name__
|
| 203 |
+
special_names = '__getattr__', '__getitem__'
|
| 204 |
+
if name not in special_names:
|
| 205 |
+
return
|
| 206 |
+
|
| 207 |
+
wrapper_name = '__cached' + name
|
| 208 |
+
|
| 209 |
+
def proxy(self, *args, **kwargs):
|
| 210 |
+
if wrapper_name not in vars(self):
|
| 211 |
+
bound = types.MethodType(method, self)
|
| 212 |
+
cache = cache_wrapper(bound)
|
| 213 |
+
setattr(self, wrapper_name, cache)
|
| 214 |
+
else:
|
| 215 |
+
cache = getattr(self, wrapper_name)
|
| 216 |
+
return cache(*args, **kwargs)
|
| 217 |
+
|
| 218 |
+
return proxy
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def apply(transform):
|
| 222 |
+
"""
|
| 223 |
+
Decorate a function with a transform function that is
|
| 224 |
+
invoked on results returned from the decorated function.
|
| 225 |
+
|
| 226 |
+
>>> @apply(reversed)
|
| 227 |
+
... def get_numbers(start):
|
| 228 |
+
... "doc for get_numbers"
|
| 229 |
+
... return range(start, start+3)
|
| 230 |
+
>>> list(get_numbers(4))
|
| 231 |
+
[6, 5, 4]
|
| 232 |
+
>>> get_numbers.__doc__
|
| 233 |
+
'doc for get_numbers'
|
| 234 |
+
"""
|
| 235 |
+
|
| 236 |
+
def wrap(func):
|
| 237 |
+
return functools.wraps(func)(compose(transform, func))
|
| 238 |
+
|
| 239 |
+
return wrap
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def result_invoke(action):
|
| 243 |
+
r"""
|
| 244 |
+
Decorate a function with an action function that is
|
| 245 |
+
invoked on the results returned from the decorated
|
| 246 |
+
function (for its side-effect), then return the original
|
| 247 |
+
result.
|
| 248 |
+
|
| 249 |
+
>>> @result_invoke(print)
|
| 250 |
+
... def add_two(a, b):
|
| 251 |
+
... return a + b
|
| 252 |
+
>>> x = add_two(2, 3)
|
| 253 |
+
5
|
| 254 |
+
>>> x
|
| 255 |
+
5
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
def wrap(func):
|
| 259 |
+
@functools.wraps(func)
|
| 260 |
+
def wrapper(*args, **kwargs):
|
| 261 |
+
result = func(*args, **kwargs)
|
| 262 |
+
action(result)
|
| 263 |
+
return result
|
| 264 |
+
|
| 265 |
+
return wrapper
|
| 266 |
+
|
| 267 |
+
return wrap
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def invoke(f, *args, **kwargs):
|
| 271 |
+
"""
|
| 272 |
+
Call a function for its side effect after initialization.
|
| 273 |
+
|
| 274 |
+
The benefit of using the decorator instead of simply invoking a function
|
| 275 |
+
after defining it is that it makes explicit the author's intent for the
|
| 276 |
+
function to be called immediately. Whereas if one simply calls the
|
| 277 |
+
function immediately, it's less obvious if that was intentional or
|
| 278 |
+
incidental. It also avoids repeating the name - the two actions, defining
|
| 279 |
+
the function and calling it immediately are modeled separately, but linked
|
| 280 |
+
by the decorator construct.
|
| 281 |
+
|
| 282 |
+
The benefit of having a function construct (opposed to just invoking some
|
| 283 |
+
behavior inline) is to serve as a scope in which the behavior occurs. It
|
| 284 |
+
avoids polluting the global namespace with local variables, provides an
|
| 285 |
+
anchor on which to attach documentation (docstring), keeps the behavior
|
| 286 |
+
logically separated (instead of conceptually separated or not separated at
|
| 287 |
+
all), and provides potential to re-use the behavior for testing or other
|
| 288 |
+
purposes.
|
| 289 |
+
|
| 290 |
+
This function is named as a pithy way to communicate, "call this function
|
| 291 |
+
primarily for its side effect", or "while defining this function, also
|
| 292 |
+
take it aside and call it". It exists because there's no Python construct
|
| 293 |
+
for "define and call" (nor should there be, as decorators serve this need
|
| 294 |
+
just fine). The behavior happens immediately and synchronously.
|
| 295 |
+
|
| 296 |
+
>>> @invoke
|
| 297 |
+
... def func(): print("called")
|
| 298 |
+
called
|
| 299 |
+
>>> func()
|
| 300 |
+
called
|
| 301 |
+
|
| 302 |
+
Use functools.partial to pass parameters to the initial call
|
| 303 |
+
|
| 304 |
+
>>> @functools.partial(invoke, name='bingo')
|
| 305 |
+
... def func(name): print("called with", name)
|
| 306 |
+
called with bingo
|
| 307 |
+
"""
|
| 308 |
+
f(*args, **kwargs)
|
| 309 |
+
return f
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
def call_aside(*args, **kwargs):
|
| 313 |
+
"""
|
| 314 |
+
Deprecated name for invoke.
|
| 315 |
+
"""
|
| 316 |
+
warnings.warn("call_aside is deprecated, use invoke", DeprecationWarning)
|
| 317 |
+
return invoke(*args, **kwargs)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
class Throttler:
|
| 321 |
+
"""
|
| 322 |
+
Rate-limit a function (or other callable)
|
| 323 |
+
"""
|
| 324 |
+
|
| 325 |
+
def __init__(self, func, max_rate=float('Inf')):
|
| 326 |
+
if isinstance(func, Throttler):
|
| 327 |
+
func = func.func
|
| 328 |
+
self.func = func
|
| 329 |
+
self.max_rate = max_rate
|
| 330 |
+
self.reset()
|
| 331 |
+
|
| 332 |
+
def reset(self):
|
| 333 |
+
self.last_called = 0
|
| 334 |
+
|
| 335 |
+
def __call__(self, *args, **kwargs):
|
| 336 |
+
self._wait()
|
| 337 |
+
return self.func(*args, **kwargs)
|
| 338 |
+
|
| 339 |
+
def _wait(self):
|
| 340 |
+
"ensure at least 1/max_rate seconds from last call"
|
| 341 |
+
elapsed = time.time() - self.last_called
|
| 342 |
+
must_wait = 1 / self.max_rate - elapsed
|
| 343 |
+
time.sleep(max(0, must_wait))
|
| 344 |
+
self.last_called = time.time()
|
| 345 |
+
|
| 346 |
+
def __get__(self, obj, type=None):
|
| 347 |
+
return first_invoke(self._wait, functools.partial(self.func, obj))
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def first_invoke(func1, func2):
|
| 351 |
+
"""
|
| 352 |
+
Return a function that when invoked will invoke func1 without
|
| 353 |
+
any parameters (for its side-effect) and then invoke func2
|
| 354 |
+
with whatever parameters were passed, returning its result.
|
| 355 |
+
"""
|
| 356 |
+
|
| 357 |
+
def wrapper(*args, **kwargs):
|
| 358 |
+
func1()
|
| 359 |
+
return func2(*args, **kwargs)
|
| 360 |
+
|
| 361 |
+
return wrapper
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def retry_call(func, cleanup=lambda: None, retries=0, trap=()):
|
| 365 |
+
"""
|
| 366 |
+
Given a callable func, trap the indicated exceptions
|
| 367 |
+
for up to 'retries' times, invoking cleanup on the
|
| 368 |
+
exception. On the final attempt, allow any exceptions
|
| 369 |
+
to propagate.
|
| 370 |
+
"""
|
| 371 |
+
attempts = itertools.count() if retries == float('inf') else range(retries)
|
| 372 |
+
for attempt in attempts:
|
| 373 |
+
try:
|
| 374 |
+
return func()
|
| 375 |
+
except trap:
|
| 376 |
+
cleanup()
|
| 377 |
+
|
| 378 |
+
return func()
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
def retry(*r_args, **r_kwargs):
|
| 382 |
+
"""
|
| 383 |
+
Decorator wrapper for retry_call. Accepts arguments to retry_call
|
| 384 |
+
except func and then returns a decorator for the decorated function.
|
| 385 |
+
|
| 386 |
+
Ex:
|
| 387 |
+
|
| 388 |
+
>>> @retry(retries=3)
|
| 389 |
+
... def my_func(a, b):
|
| 390 |
+
... "this is my funk"
|
| 391 |
+
... print(a, b)
|
| 392 |
+
>>> my_func.__doc__
|
| 393 |
+
'this is my funk'
|
| 394 |
+
"""
|
| 395 |
+
|
| 396 |
+
def decorate(func):
|
| 397 |
+
@functools.wraps(func)
|
| 398 |
+
def wrapper(*f_args, **f_kwargs):
|
| 399 |
+
bound = functools.partial(func, *f_args, **f_kwargs)
|
| 400 |
+
return retry_call(bound, *r_args, **r_kwargs)
|
| 401 |
+
|
| 402 |
+
return wrapper
|
| 403 |
+
|
| 404 |
+
return decorate
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
def print_yielded(func):
|
| 408 |
+
"""
|
| 409 |
+
Convert a generator into a function that prints all yielded elements
|
| 410 |
+
|
| 411 |
+
>>> @print_yielded
|
| 412 |
+
... def x():
|
| 413 |
+
... yield 3; yield None
|
| 414 |
+
>>> x()
|
| 415 |
+
3
|
| 416 |
+
None
|
| 417 |
+
"""
|
| 418 |
+
print_all = functools.partial(map, print)
|
| 419 |
+
print_results = compose(more_itertools.consume, print_all, func)
|
| 420 |
+
return functools.wraps(func)(print_results)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
def pass_none(func):
|
| 424 |
+
"""
|
| 425 |
+
Wrap func so it's not called if its first param is None
|
| 426 |
+
|
| 427 |
+
>>> print_text = pass_none(print)
|
| 428 |
+
>>> print_text('text')
|
| 429 |
+
text
|
| 430 |
+
>>> print_text(None)
|
| 431 |
+
"""
|
| 432 |
+
|
| 433 |
+
@functools.wraps(func)
|
| 434 |
+
def wrapper(param, *args, **kwargs):
|
| 435 |
+
if param is not None:
|
| 436 |
+
return func(param, *args, **kwargs)
|
| 437 |
+
|
| 438 |
+
return wrapper
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
def assign_params(func, namespace):
|
| 442 |
+
"""
|
| 443 |
+
Assign parameters from namespace where func solicits.
|
| 444 |
+
|
| 445 |
+
>>> def func(x, y=3):
|
| 446 |
+
... print(x, y)
|
| 447 |
+
>>> assigned = assign_params(func, dict(x=2, z=4))
|
| 448 |
+
>>> assigned()
|
| 449 |
+
2 3
|
| 450 |
+
|
| 451 |
+
The usual errors are raised if a function doesn't receive
|
| 452 |
+
its required parameters:
|
| 453 |
+
|
| 454 |
+
>>> assigned = assign_params(func, dict(y=3, z=4))
|
| 455 |
+
>>> assigned()
|
| 456 |
+
Traceback (most recent call last):
|
| 457 |
+
TypeError: func() ...argument...
|
| 458 |
+
|
| 459 |
+
It even works on methods:
|
| 460 |
+
|
| 461 |
+
>>> class Handler:
|
| 462 |
+
... def meth(self, arg):
|
| 463 |
+
... print(arg)
|
| 464 |
+
>>> assign_params(Handler().meth, dict(arg='crystal', foo='clear'))()
|
| 465 |
+
crystal
|
| 466 |
+
"""
|
| 467 |
+
sig = inspect.signature(func)
|
| 468 |
+
params = sig.parameters.keys()
|
| 469 |
+
call_ns = {k: namespace[k] for k in params if k in namespace}
|
| 470 |
+
return functools.partial(func, **call_ns)
|
| 471 |
+
|
| 472 |
+
|
| 473 |
+
def save_method_args(method):
|
| 474 |
+
"""
|
| 475 |
+
Wrap a method such that when it is called, the args and kwargs are
|
| 476 |
+
saved on the method.
|
| 477 |
+
|
| 478 |
+
>>> class MyClass:
|
| 479 |
+
... @save_method_args
|
| 480 |
+
... def method(self, a, b):
|
| 481 |
+
... print(a, b)
|
| 482 |
+
>>> my_ob = MyClass()
|
| 483 |
+
>>> my_ob.method(1, 2)
|
| 484 |
+
1 2
|
| 485 |
+
>>> my_ob._saved_method.args
|
| 486 |
+
(1, 2)
|
| 487 |
+
>>> my_ob._saved_method.kwargs
|
| 488 |
+
{}
|
| 489 |
+
>>> my_ob.method(a=3, b='foo')
|
| 490 |
+
3 foo
|
| 491 |
+
>>> my_ob._saved_method.args
|
| 492 |
+
()
|
| 493 |
+
>>> my_ob._saved_method.kwargs == dict(a=3, b='foo')
|
| 494 |
+
True
|
| 495 |
+
|
| 496 |
+
The arguments are stored on the instance, allowing for
|
| 497 |
+
different instance to save different args.
|
| 498 |
+
|
| 499 |
+
>>> your_ob = MyClass()
|
| 500 |
+
>>> your_ob.method({str('x'): 3}, b=[4])
|
| 501 |
+
{'x': 3} [4]
|
| 502 |
+
>>> your_ob._saved_method.args
|
| 503 |
+
({'x': 3},)
|
| 504 |
+
>>> my_ob._saved_method.args
|
| 505 |
+
()
|
| 506 |
+
"""
|
| 507 |
+
args_and_kwargs = collections.namedtuple('args_and_kwargs', 'args kwargs')
|
| 508 |
+
|
| 509 |
+
@functools.wraps(method)
|
| 510 |
+
def wrapper(self, *args, **kwargs):
|
| 511 |
+
attr_name = '_saved_' + method.__name__
|
| 512 |
+
attr = args_and_kwargs(args, kwargs)
|
| 513 |
+
setattr(self, attr_name, attr)
|
| 514 |
+
return method(self, *args, **kwargs)
|
| 515 |
+
|
| 516 |
+
return wrapper
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
def except_(*exceptions, replace=None, use=None):
|
| 520 |
+
"""
|
| 521 |
+
Replace the indicated exceptions, if raised, with the indicated
|
| 522 |
+
literal replacement or evaluated expression (if present).
|
| 523 |
+
|
| 524 |
+
>>> safe_int = except_(ValueError)(int)
|
| 525 |
+
>>> safe_int('five')
|
| 526 |
+
>>> safe_int('5')
|
| 527 |
+
5
|
| 528 |
+
|
| 529 |
+
Specify a literal replacement with ``replace``.
|
| 530 |
+
|
| 531 |
+
>>> safe_int_r = except_(ValueError, replace=0)(int)
|
| 532 |
+
>>> safe_int_r('five')
|
| 533 |
+
0
|
| 534 |
+
|
| 535 |
+
Provide an expression to ``use`` to pass through particular parameters.
|
| 536 |
+
|
| 537 |
+
>>> safe_int_pt = except_(ValueError, use='args[0]')(int)
|
| 538 |
+
>>> safe_int_pt('five')
|
| 539 |
+
'five'
|
| 540 |
+
|
| 541 |
+
"""
|
| 542 |
+
|
| 543 |
+
def decorate(func):
|
| 544 |
+
@functools.wraps(func)
|
| 545 |
+
def wrapper(*args, **kwargs):
|
| 546 |
+
try:
|
| 547 |
+
return func(*args, **kwargs)
|
| 548 |
+
except exceptions:
|
| 549 |
+
try:
|
| 550 |
+
return eval(use)
|
| 551 |
+
except TypeError:
|
| 552 |
+
return replace
|
| 553 |
+
|
| 554 |
+
return wrapper
|
| 555 |
+
|
| 556 |
+
return decorate
|
.venv/Lib/site-packages/pkg_resources/_vendor/jaraco/text/__init__.py
ADDED
|
@@ -0,0 +1,599 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
import itertools
|
| 3 |
+
import textwrap
|
| 4 |
+
import functools
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
from importlib.resources import files # type: ignore
|
| 8 |
+
except ImportError: # pragma: nocover
|
| 9 |
+
from pkg_resources.extern.importlib_resources import files # type: ignore
|
| 10 |
+
|
| 11 |
+
from pkg_resources.extern.jaraco.functools import compose, method_cache
|
| 12 |
+
from pkg_resources.extern.jaraco.context import ExceptionTrap
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def substitution(old, new):
|
| 16 |
+
"""
|
| 17 |
+
Return a function that will perform a substitution on a string
|
| 18 |
+
"""
|
| 19 |
+
return lambda s: s.replace(old, new)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def multi_substitution(*substitutions):
|
| 23 |
+
"""
|
| 24 |
+
Take a sequence of pairs specifying substitutions, and create
|
| 25 |
+
a function that performs those substitutions.
|
| 26 |
+
|
| 27 |
+
>>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
|
| 28 |
+
'baz'
|
| 29 |
+
"""
|
| 30 |
+
substitutions = itertools.starmap(substitution, substitutions)
|
| 31 |
+
# compose function applies last function first, so reverse the
|
| 32 |
+
# substitutions to get the expected order.
|
| 33 |
+
substitutions = reversed(tuple(substitutions))
|
| 34 |
+
return compose(*substitutions)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class FoldedCase(str):
|
| 38 |
+
"""
|
| 39 |
+
A case insensitive string class; behaves just like str
|
| 40 |
+
except compares equal when the only variation is case.
|
| 41 |
+
|
| 42 |
+
>>> s = FoldedCase('hello world')
|
| 43 |
+
|
| 44 |
+
>>> s == 'Hello World'
|
| 45 |
+
True
|
| 46 |
+
|
| 47 |
+
>>> 'Hello World' == s
|
| 48 |
+
True
|
| 49 |
+
|
| 50 |
+
>>> s != 'Hello World'
|
| 51 |
+
False
|
| 52 |
+
|
| 53 |
+
>>> s.index('O')
|
| 54 |
+
4
|
| 55 |
+
|
| 56 |
+
>>> s.split('O')
|
| 57 |
+
['hell', ' w', 'rld']
|
| 58 |
+
|
| 59 |
+
>>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
|
| 60 |
+
['alpha', 'Beta', 'GAMMA']
|
| 61 |
+
|
| 62 |
+
Sequence membership is straightforward.
|
| 63 |
+
|
| 64 |
+
>>> "Hello World" in [s]
|
| 65 |
+
True
|
| 66 |
+
>>> s in ["Hello World"]
|
| 67 |
+
True
|
| 68 |
+
|
| 69 |
+
You may test for set inclusion, but candidate and elements
|
| 70 |
+
must both be folded.
|
| 71 |
+
|
| 72 |
+
>>> FoldedCase("Hello World") in {s}
|
| 73 |
+
True
|
| 74 |
+
>>> s in {FoldedCase("Hello World")}
|
| 75 |
+
True
|
| 76 |
+
|
| 77 |
+
String inclusion works as long as the FoldedCase object
|
| 78 |
+
is on the right.
|
| 79 |
+
|
| 80 |
+
>>> "hello" in FoldedCase("Hello World")
|
| 81 |
+
True
|
| 82 |
+
|
| 83 |
+
But not if the FoldedCase object is on the left:
|
| 84 |
+
|
| 85 |
+
>>> FoldedCase('hello') in 'Hello World'
|
| 86 |
+
False
|
| 87 |
+
|
| 88 |
+
In that case, use ``in_``:
|
| 89 |
+
|
| 90 |
+
>>> FoldedCase('hello').in_('Hello World')
|
| 91 |
+
True
|
| 92 |
+
|
| 93 |
+
>>> FoldedCase('hello') > FoldedCase('Hello')
|
| 94 |
+
False
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
def __lt__(self, other):
|
| 98 |
+
return self.lower() < other.lower()
|
| 99 |
+
|
| 100 |
+
def __gt__(self, other):
|
| 101 |
+
return self.lower() > other.lower()
|
| 102 |
+
|
| 103 |
+
def __eq__(self, other):
|
| 104 |
+
return self.lower() == other.lower()
|
| 105 |
+
|
| 106 |
+
def __ne__(self, other):
|
| 107 |
+
return self.lower() != other.lower()
|
| 108 |
+
|
| 109 |
+
def __hash__(self):
|
| 110 |
+
return hash(self.lower())
|
| 111 |
+
|
| 112 |
+
def __contains__(self, other):
|
| 113 |
+
return super().lower().__contains__(other.lower())
|
| 114 |
+
|
| 115 |
+
def in_(self, other):
|
| 116 |
+
"Does self appear in other?"
|
| 117 |
+
return self in FoldedCase(other)
|
| 118 |
+
|
| 119 |
+
# cache lower since it's likely to be called frequently.
|
| 120 |
+
@method_cache
|
| 121 |
+
def lower(self):
|
| 122 |
+
return super().lower()
|
| 123 |
+
|
| 124 |
+
def index(self, sub):
|
| 125 |
+
return self.lower().index(sub.lower())
|
| 126 |
+
|
| 127 |
+
def split(self, splitter=' ', maxsplit=0):
|
| 128 |
+
pattern = re.compile(re.escape(splitter), re.I)
|
| 129 |
+
return pattern.split(self, maxsplit)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
# Python 3.8 compatibility
|
| 133 |
+
_unicode_trap = ExceptionTrap(UnicodeDecodeError)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
@_unicode_trap.passes
|
| 137 |
+
def is_decodable(value):
|
| 138 |
+
r"""
|
| 139 |
+
Return True if the supplied value is decodable (using the default
|
| 140 |
+
encoding).
|
| 141 |
+
|
| 142 |
+
>>> is_decodable(b'\xff')
|
| 143 |
+
False
|
| 144 |
+
>>> is_decodable(b'\x32')
|
| 145 |
+
True
|
| 146 |
+
"""
|
| 147 |
+
value.decode()
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def is_binary(value):
|
| 151 |
+
r"""
|
| 152 |
+
Return True if the value appears to be binary (that is, it's a byte
|
| 153 |
+
string and isn't decodable).
|
| 154 |
+
|
| 155 |
+
>>> is_binary(b'\xff')
|
| 156 |
+
True
|
| 157 |
+
>>> is_binary('\xff')
|
| 158 |
+
False
|
| 159 |
+
"""
|
| 160 |
+
return isinstance(value, bytes) and not is_decodable(value)
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def trim(s):
|
| 164 |
+
r"""
|
| 165 |
+
Trim something like a docstring to remove the whitespace that
|
| 166 |
+
is common due to indentation and formatting.
|
| 167 |
+
|
| 168 |
+
>>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
|
| 169 |
+
'foo = bar\n\tbar = baz'
|
| 170 |
+
"""
|
| 171 |
+
return textwrap.dedent(s).strip()
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def wrap(s):
|
| 175 |
+
"""
|
| 176 |
+
Wrap lines of text, retaining existing newlines as
|
| 177 |
+
paragraph markers.
|
| 178 |
+
|
| 179 |
+
>>> print(wrap(lorem_ipsum))
|
| 180 |
+
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
|
| 181 |
+
eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
|
| 182 |
+
minim veniam, quis nostrud exercitation ullamco laboris nisi ut
|
| 183 |
+
aliquip ex ea commodo consequat. Duis aute irure dolor in
|
| 184 |
+
reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
|
| 185 |
+
pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
|
| 186 |
+
culpa qui officia deserunt mollit anim id est laborum.
|
| 187 |
+
<BLANKLINE>
|
| 188 |
+
Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
|
| 189 |
+
varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
|
| 190 |
+
magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
|
| 191 |
+
gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
|
| 192 |
+
risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
|
| 193 |
+
eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
|
| 194 |
+
fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
|
| 195 |
+
a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
|
| 196 |
+
neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
|
| 197 |
+
sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
|
| 198 |
+
nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
|
| 199 |
+
quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
|
| 200 |
+
molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
|
| 201 |
+
"""
|
| 202 |
+
paragraphs = s.splitlines()
|
| 203 |
+
wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
|
| 204 |
+
return '\n\n'.join(wrapped)
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def unwrap(s):
|
| 208 |
+
r"""
|
| 209 |
+
Given a multi-line string, return an unwrapped version.
|
| 210 |
+
|
| 211 |
+
>>> wrapped = wrap(lorem_ipsum)
|
| 212 |
+
>>> wrapped.count('\n')
|
| 213 |
+
20
|
| 214 |
+
>>> unwrapped = unwrap(wrapped)
|
| 215 |
+
>>> unwrapped.count('\n')
|
| 216 |
+
1
|
| 217 |
+
>>> print(unwrapped)
|
| 218 |
+
Lorem ipsum dolor sit amet, consectetur adipiscing ...
|
| 219 |
+
Curabitur pretium tincidunt lacus. Nulla gravida orci ...
|
| 220 |
+
|
| 221 |
+
"""
|
| 222 |
+
paragraphs = re.split(r'\n\n+', s)
|
| 223 |
+
cleaned = (para.replace('\n', ' ') for para in paragraphs)
|
| 224 |
+
return '\n'.join(cleaned)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class Splitter(object):
|
| 230 |
+
"""object that will split a string with the given arguments for each call
|
| 231 |
+
|
| 232 |
+
>>> s = Splitter(',')
|
| 233 |
+
>>> s('hello, world, this is your, master calling')
|
| 234 |
+
['hello', ' world', ' this is your', ' master calling']
|
| 235 |
+
"""
|
| 236 |
+
|
| 237 |
+
def __init__(self, *args):
|
| 238 |
+
self.args = args
|
| 239 |
+
|
| 240 |
+
def __call__(self, s):
|
| 241 |
+
return s.split(*self.args)
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def indent(string, prefix=' ' * 4):
|
| 245 |
+
"""
|
| 246 |
+
>>> indent('foo')
|
| 247 |
+
' foo'
|
| 248 |
+
"""
|
| 249 |
+
return prefix + string
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
class WordSet(tuple):
|
| 253 |
+
"""
|
| 254 |
+
Given an identifier, return the words that identifier represents,
|
| 255 |
+
whether in camel case, underscore-separated, etc.
|
| 256 |
+
|
| 257 |
+
>>> WordSet.parse("camelCase")
|
| 258 |
+
('camel', 'Case')
|
| 259 |
+
|
| 260 |
+
>>> WordSet.parse("under_sep")
|
| 261 |
+
('under', 'sep')
|
| 262 |
+
|
| 263 |
+
Acronyms should be retained
|
| 264 |
+
|
| 265 |
+
>>> WordSet.parse("firstSNL")
|
| 266 |
+
('first', 'SNL')
|
| 267 |
+
|
| 268 |
+
>>> WordSet.parse("you_and_I")
|
| 269 |
+
('you', 'and', 'I')
|
| 270 |
+
|
| 271 |
+
>>> WordSet.parse("A simple test")
|
| 272 |
+
('A', 'simple', 'test')
|
| 273 |
+
|
| 274 |
+
Multiple caps should not interfere with the first cap of another word.
|
| 275 |
+
|
| 276 |
+
>>> WordSet.parse("myABCClass")
|
| 277 |
+
('my', 'ABC', 'Class')
|
| 278 |
+
|
| 279 |
+
The result is a WordSet, so you can get the form you need.
|
| 280 |
+
|
| 281 |
+
>>> WordSet.parse("myABCClass").underscore_separated()
|
| 282 |
+
'my_ABC_Class'
|
| 283 |
+
|
| 284 |
+
>>> WordSet.parse('a-command').camel_case()
|
| 285 |
+
'ACommand'
|
| 286 |
+
|
| 287 |
+
>>> WordSet.parse('someIdentifier').lowered().space_separated()
|
| 288 |
+
'some identifier'
|
| 289 |
+
|
| 290 |
+
Slices of the result should return another WordSet.
|
| 291 |
+
|
| 292 |
+
>>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
|
| 293 |
+
'out_of_context'
|
| 294 |
+
|
| 295 |
+
>>> WordSet.from_class_name(WordSet()).lowered().space_separated()
|
| 296 |
+
'word set'
|
| 297 |
+
|
| 298 |
+
>>> example = WordSet.parse('figured it out')
|
| 299 |
+
>>> example.headless_camel_case()
|
| 300 |
+
'figuredItOut'
|
| 301 |
+
>>> example.dash_separated()
|
| 302 |
+
'figured-it-out'
|
| 303 |
+
|
| 304 |
+
"""
|
| 305 |
+
|
| 306 |
+
_pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
|
| 307 |
+
|
| 308 |
+
def capitalized(self):
|
| 309 |
+
return WordSet(word.capitalize() for word in self)
|
| 310 |
+
|
| 311 |
+
def lowered(self):
|
| 312 |
+
return WordSet(word.lower() for word in self)
|
| 313 |
+
|
| 314 |
+
def camel_case(self):
|
| 315 |
+
return ''.join(self.capitalized())
|
| 316 |
+
|
| 317 |
+
def headless_camel_case(self):
|
| 318 |
+
words = iter(self)
|
| 319 |
+
first = next(words).lower()
|
| 320 |
+
new_words = itertools.chain((first,), WordSet(words).camel_case())
|
| 321 |
+
return ''.join(new_words)
|
| 322 |
+
|
| 323 |
+
def underscore_separated(self):
|
| 324 |
+
return '_'.join(self)
|
| 325 |
+
|
| 326 |
+
def dash_separated(self):
|
| 327 |
+
return '-'.join(self)
|
| 328 |
+
|
| 329 |
+
def space_separated(self):
|
| 330 |
+
return ' '.join(self)
|
| 331 |
+
|
| 332 |
+
def trim_right(self, item):
|
| 333 |
+
"""
|
| 334 |
+
Remove the item from the end of the set.
|
| 335 |
+
|
| 336 |
+
>>> WordSet.parse('foo bar').trim_right('foo')
|
| 337 |
+
('foo', 'bar')
|
| 338 |
+
>>> WordSet.parse('foo bar').trim_right('bar')
|
| 339 |
+
('foo',)
|
| 340 |
+
>>> WordSet.parse('').trim_right('bar')
|
| 341 |
+
()
|
| 342 |
+
"""
|
| 343 |
+
return self[:-1] if self and self[-1] == item else self
|
| 344 |
+
|
| 345 |
+
def trim_left(self, item):
|
| 346 |
+
"""
|
| 347 |
+
Remove the item from the beginning of the set.
|
| 348 |
+
|
| 349 |
+
>>> WordSet.parse('foo bar').trim_left('foo')
|
| 350 |
+
('bar',)
|
| 351 |
+
>>> WordSet.parse('foo bar').trim_left('bar')
|
| 352 |
+
('foo', 'bar')
|
| 353 |
+
>>> WordSet.parse('').trim_left('bar')
|
| 354 |
+
()
|
| 355 |
+
"""
|
| 356 |
+
return self[1:] if self and self[0] == item else self
|
| 357 |
+
|
| 358 |
+
def trim(self, item):
|
| 359 |
+
"""
|
| 360 |
+
>>> WordSet.parse('foo bar').trim('foo')
|
| 361 |
+
('bar',)
|
| 362 |
+
"""
|
| 363 |
+
return self.trim_left(item).trim_right(item)
|
| 364 |
+
|
| 365 |
+
def __getitem__(self, item):
|
| 366 |
+
result = super(WordSet, self).__getitem__(item)
|
| 367 |
+
if isinstance(item, slice):
|
| 368 |
+
result = WordSet(result)
|
| 369 |
+
return result
|
| 370 |
+
|
| 371 |
+
@classmethod
|
| 372 |
+
def parse(cls, identifier):
|
| 373 |
+
matches = cls._pattern.finditer(identifier)
|
| 374 |
+
return WordSet(match.group(0) for match in matches)
|
| 375 |
+
|
| 376 |
+
@classmethod
|
| 377 |
+
def from_class_name(cls, subject):
|
| 378 |
+
return cls.parse(subject.__class__.__name__)
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
# for backward compatibility
|
| 382 |
+
words = WordSet.parse
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def simple_html_strip(s):
|
| 386 |
+
r"""
|
| 387 |
+
Remove HTML from the string `s`.
|
| 388 |
+
|
| 389 |
+
>>> str(simple_html_strip(''))
|
| 390 |
+
''
|
| 391 |
+
|
| 392 |
+
>>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
|
| 393 |
+
A stormy day in paradise
|
| 394 |
+
|
| 395 |
+
>>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
|
| 396 |
+
Somebody tell the truth.
|
| 397 |
+
|
| 398 |
+
>>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
|
| 399 |
+
What about
|
| 400 |
+
multiple lines?
|
| 401 |
+
"""
|
| 402 |
+
html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
|
| 403 |
+
texts = (match.group(3) or '' for match in html_stripper.finditer(s))
|
| 404 |
+
return ''.join(texts)
|
| 405 |
+
|
| 406 |
+
|
| 407 |
+
class SeparatedValues(str):
|
| 408 |
+
"""
|
| 409 |
+
A string separated by a separator. Overrides __iter__ for getting
|
| 410 |
+
the values.
|
| 411 |
+
|
| 412 |
+
>>> list(SeparatedValues('a,b,c'))
|
| 413 |
+
['a', 'b', 'c']
|
| 414 |
+
|
| 415 |
+
Whitespace is stripped and empty values are discarded.
|
| 416 |
+
|
| 417 |
+
>>> list(SeparatedValues(' a, b , c, '))
|
| 418 |
+
['a', 'b', 'c']
|
| 419 |
+
"""
|
| 420 |
+
|
| 421 |
+
separator = ','
|
| 422 |
+
|
| 423 |
+
def __iter__(self):
|
| 424 |
+
parts = self.split(self.separator)
|
| 425 |
+
return filter(None, (part.strip() for part in parts))
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
class Stripper:
|
| 429 |
+
r"""
|
| 430 |
+
Given a series of lines, find the common prefix and strip it from them.
|
| 431 |
+
|
| 432 |
+
>>> lines = [
|
| 433 |
+
... 'abcdefg\n',
|
| 434 |
+
... 'abc\n',
|
| 435 |
+
... 'abcde\n',
|
| 436 |
+
... ]
|
| 437 |
+
>>> res = Stripper.strip_prefix(lines)
|
| 438 |
+
>>> res.prefix
|
| 439 |
+
'abc'
|
| 440 |
+
>>> list(res.lines)
|
| 441 |
+
['defg\n', '\n', 'de\n']
|
| 442 |
+
|
| 443 |
+
If no prefix is common, nothing should be stripped.
|
| 444 |
+
|
| 445 |
+
>>> lines = [
|
| 446 |
+
... 'abcd\n',
|
| 447 |
+
... '1234\n',
|
| 448 |
+
... ]
|
| 449 |
+
>>> res = Stripper.strip_prefix(lines)
|
| 450 |
+
>>> res.prefix = ''
|
| 451 |
+
>>> list(res.lines)
|
| 452 |
+
['abcd\n', '1234\n']
|
| 453 |
+
"""
|
| 454 |
+
|
| 455 |
+
def __init__(self, prefix, lines):
|
| 456 |
+
self.prefix = prefix
|
| 457 |
+
self.lines = map(self, lines)
|
| 458 |
+
|
| 459 |
+
@classmethod
|
| 460 |
+
def strip_prefix(cls, lines):
|
| 461 |
+
prefix_lines, lines = itertools.tee(lines)
|
| 462 |
+
prefix = functools.reduce(cls.common_prefix, prefix_lines)
|
| 463 |
+
return cls(prefix, lines)
|
| 464 |
+
|
| 465 |
+
def __call__(self, line):
|
| 466 |
+
if not self.prefix:
|
| 467 |
+
return line
|
| 468 |
+
null, prefix, rest = line.partition(self.prefix)
|
| 469 |
+
return rest
|
| 470 |
+
|
| 471 |
+
@staticmethod
|
| 472 |
+
def common_prefix(s1, s2):
|
| 473 |
+
"""
|
| 474 |
+
Return the common prefix of two lines.
|
| 475 |
+
"""
|
| 476 |
+
index = min(len(s1), len(s2))
|
| 477 |
+
while s1[:index] != s2[:index]:
|
| 478 |
+
index -= 1
|
| 479 |
+
return s1[:index]
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
def remove_prefix(text, prefix):
|
| 483 |
+
"""
|
| 484 |
+
Remove the prefix from the text if it exists.
|
| 485 |
+
|
| 486 |
+
>>> remove_prefix('underwhelming performance', 'underwhelming ')
|
| 487 |
+
'performance'
|
| 488 |
+
|
| 489 |
+
>>> remove_prefix('something special', 'sample')
|
| 490 |
+
'something special'
|
| 491 |
+
"""
|
| 492 |
+
null, prefix, rest = text.rpartition(prefix)
|
| 493 |
+
return rest
|
| 494 |
+
|
| 495 |
+
|
| 496 |
+
def remove_suffix(text, suffix):
|
| 497 |
+
"""
|
| 498 |
+
Remove the suffix from the text if it exists.
|
| 499 |
+
|
| 500 |
+
>>> remove_suffix('name.git', '.git')
|
| 501 |
+
'name'
|
| 502 |
+
|
| 503 |
+
>>> remove_suffix('something special', 'sample')
|
| 504 |
+
'something special'
|
| 505 |
+
"""
|
| 506 |
+
rest, suffix, null = text.partition(suffix)
|
| 507 |
+
return rest
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def normalize_newlines(text):
|
| 511 |
+
r"""
|
| 512 |
+
Replace alternate newlines with the canonical newline.
|
| 513 |
+
|
| 514 |
+
>>> normalize_newlines('Lorem Ipsum\u2029')
|
| 515 |
+
'Lorem Ipsum\n'
|
| 516 |
+
>>> normalize_newlines('Lorem Ipsum\r\n')
|
| 517 |
+
'Lorem Ipsum\n'
|
| 518 |
+
>>> normalize_newlines('Lorem Ipsum\x85')
|
| 519 |
+
'Lorem Ipsum\n'
|
| 520 |
+
"""
|
| 521 |
+
newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
|
| 522 |
+
pattern = '|'.join(newlines)
|
| 523 |
+
return re.sub(pattern, '\n', text)
|
| 524 |
+
|
| 525 |
+
|
| 526 |
+
def _nonblank(str):
|
| 527 |
+
return str and not str.startswith('#')
|
| 528 |
+
|
| 529 |
+
|
| 530 |
+
@functools.singledispatch
|
| 531 |
+
def yield_lines(iterable):
|
| 532 |
+
r"""
|
| 533 |
+
Yield valid lines of a string or iterable.
|
| 534 |
+
|
| 535 |
+
>>> list(yield_lines(''))
|
| 536 |
+
[]
|
| 537 |
+
>>> list(yield_lines(['foo', 'bar']))
|
| 538 |
+
['foo', 'bar']
|
| 539 |
+
>>> list(yield_lines('foo\nbar'))
|
| 540 |
+
['foo', 'bar']
|
| 541 |
+
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
| 542 |
+
['foo', 'baz #comment']
|
| 543 |
+
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
| 544 |
+
['foo', 'bar', 'baz', 'bing']
|
| 545 |
+
"""
|
| 546 |
+
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
| 547 |
+
|
| 548 |
+
|
| 549 |
+
@yield_lines.register(str)
|
| 550 |
+
def _(text):
|
| 551 |
+
return filter(_nonblank, map(str.strip, text.splitlines()))
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
def drop_comment(line):
|
| 555 |
+
"""
|
| 556 |
+
Drop comments.
|
| 557 |
+
|
| 558 |
+
>>> drop_comment('foo # bar')
|
| 559 |
+
'foo'
|
| 560 |
+
|
| 561 |
+
A hash without a space may be in a URL.
|
| 562 |
+
|
| 563 |
+
>>> drop_comment('http://example.com/foo#bar')
|
| 564 |
+
'http://example.com/foo#bar'
|
| 565 |
+
"""
|
| 566 |
+
return line.partition(' #')[0]
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
def join_continuation(lines):
|
| 570 |
+
r"""
|
| 571 |
+
Join lines continued by a trailing backslash.
|
| 572 |
+
|
| 573 |
+
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
| 574 |
+
['foobar', 'baz']
|
| 575 |
+
>>> list(join_continuation(['foo \\', 'bar', 'baz']))
|
| 576 |
+
['foobar', 'baz']
|
| 577 |
+
>>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
|
| 578 |
+
['foobarbaz']
|
| 579 |
+
|
| 580 |
+
Not sure why, but...
|
| 581 |
+
The character preceeding the backslash is also elided.
|
| 582 |
+
|
| 583 |
+
>>> list(join_continuation(['goo\\', 'dly']))
|
| 584 |
+
['godly']
|
| 585 |
+
|
| 586 |
+
A terrible idea, but...
|
| 587 |
+
If no line is available to continue, suppress the lines.
|
| 588 |
+
|
| 589 |
+
>>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
|
| 590 |
+
['foo']
|
| 591 |
+
"""
|
| 592 |
+
lines = iter(lines)
|
| 593 |
+
for item in lines:
|
| 594 |
+
while item.endswith('\\'):
|
| 595 |
+
try:
|
| 596 |
+
item = item[:-2].strip() + next(lines)
|
| 597 |
+
except StopIteration:
|
| 598 |
+
return
|
| 599 |
+
yield item
|
.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""More routines for operating on iterables, beyond itertools"""
|
| 2 |
+
|
| 3 |
+
from .more import * # noqa
|
| 4 |
+
from .recipes import * # noqa
|
| 5 |
+
|
| 6 |
+
__version__ = '9.1.0'
|
.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/more.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
.venv/Lib/site-packages/pkg_resources/_vendor/more_itertools/recipes.py
ADDED
|
@@ -0,0 +1,930 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Imported from the recipes section of the itertools documentation.
|
| 2 |
+
|
| 3 |
+
All functions taken from the recipes section of the itertools library docs
|
| 4 |
+
[1]_.
|
| 5 |
+
Some backward-compatible usability improvements have been made.
|
| 6 |
+
|
| 7 |
+
.. [1] http://docs.python.org/library/itertools.html#recipes
|
| 8 |
+
|
| 9 |
+
"""
|
| 10 |
+
import math
|
| 11 |
+
import operator
|
| 12 |
+
import warnings
|
| 13 |
+
|
| 14 |
+
from collections import deque
|
| 15 |
+
from collections.abc import Sized
|
| 16 |
+
from functools import reduce
|
| 17 |
+
from itertools import (
|
| 18 |
+
chain,
|
| 19 |
+
combinations,
|
| 20 |
+
compress,
|
| 21 |
+
count,
|
| 22 |
+
cycle,
|
| 23 |
+
groupby,
|
| 24 |
+
islice,
|
| 25 |
+
product,
|
| 26 |
+
repeat,
|
| 27 |
+
starmap,
|
| 28 |
+
tee,
|
| 29 |
+
zip_longest,
|
| 30 |
+
)
|
| 31 |
+
from random import randrange, sample, choice
|
| 32 |
+
from sys import hexversion
|
| 33 |
+
|
| 34 |
+
__all__ = [
|
| 35 |
+
'all_equal',
|
| 36 |
+
'batched',
|
| 37 |
+
'before_and_after',
|
| 38 |
+
'consume',
|
| 39 |
+
'convolve',
|
| 40 |
+
'dotproduct',
|
| 41 |
+
'first_true',
|
| 42 |
+
'factor',
|
| 43 |
+
'flatten',
|
| 44 |
+
'grouper',
|
| 45 |
+
'iter_except',
|
| 46 |
+
'iter_index',
|
| 47 |
+
'matmul',
|
| 48 |
+
'ncycles',
|
| 49 |
+
'nth',
|
| 50 |
+
'nth_combination',
|
| 51 |
+
'padnone',
|
| 52 |
+
'pad_none',
|
| 53 |
+
'pairwise',
|
| 54 |
+
'partition',
|
| 55 |
+
'polynomial_from_roots',
|
| 56 |
+
'powerset',
|
| 57 |
+
'prepend',
|
| 58 |
+
'quantify',
|
| 59 |
+
'random_combination_with_replacement',
|
| 60 |
+
'random_combination',
|
| 61 |
+
'random_permutation',
|
| 62 |
+
'random_product',
|
| 63 |
+
'repeatfunc',
|
| 64 |
+
'roundrobin',
|
| 65 |
+
'sieve',
|
| 66 |
+
'sliding_window',
|
| 67 |
+
'subslices',
|
| 68 |
+
'tabulate',
|
| 69 |
+
'tail',
|
| 70 |
+
'take',
|
| 71 |
+
'transpose',
|
| 72 |
+
'triplewise',
|
| 73 |
+
'unique_everseen',
|
| 74 |
+
'unique_justseen',
|
| 75 |
+
]
|
| 76 |
+
|
| 77 |
+
_marker = object()
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def take(n, iterable):
|
| 81 |
+
"""Return first *n* items of the iterable as a list.
|
| 82 |
+
|
| 83 |
+
>>> take(3, range(10))
|
| 84 |
+
[0, 1, 2]
|
| 85 |
+
|
| 86 |
+
If there are fewer than *n* items in the iterable, all of them are
|
| 87 |
+
returned.
|
| 88 |
+
|
| 89 |
+
>>> take(10, range(3))
|
| 90 |
+
[0, 1, 2]
|
| 91 |
+
|
| 92 |
+
"""
|
| 93 |
+
return list(islice(iterable, n))
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def tabulate(function, start=0):
|
| 97 |
+
"""Return an iterator over the results of ``func(start)``,
|
| 98 |
+
``func(start + 1)``, ``func(start + 2)``...
|
| 99 |
+
|
| 100 |
+
*func* should be a function that accepts one integer argument.
|
| 101 |
+
|
| 102 |
+
If *start* is not specified it defaults to 0. It will be incremented each
|
| 103 |
+
time the iterator is advanced.
|
| 104 |
+
|
| 105 |
+
>>> square = lambda x: x ** 2
|
| 106 |
+
>>> iterator = tabulate(square, -3)
|
| 107 |
+
>>> take(4, iterator)
|
| 108 |
+
[9, 4, 1, 0]
|
| 109 |
+
|
| 110 |
+
"""
|
| 111 |
+
return map(function, count(start))
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def tail(n, iterable):
|
| 115 |
+
"""Return an iterator over the last *n* items of *iterable*.
|
| 116 |
+
|
| 117 |
+
>>> t = tail(3, 'ABCDEFG')
|
| 118 |
+
>>> list(t)
|
| 119 |
+
['E', 'F', 'G']
|
| 120 |
+
|
| 121 |
+
"""
|
| 122 |
+
# If the given iterable has a length, then we can use islice to get its
|
| 123 |
+
# final elements. Note that if the iterable is not actually Iterable,
|
| 124 |
+
# either islice or deque will throw a TypeError. This is why we don't
|
| 125 |
+
# check if it is Iterable.
|
| 126 |
+
if isinstance(iterable, Sized):
|
| 127 |
+
yield from islice(iterable, max(0, len(iterable) - n), None)
|
| 128 |
+
else:
|
| 129 |
+
yield from iter(deque(iterable, maxlen=n))
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def consume(iterator, n=None):
|
| 133 |
+
"""Advance *iterable* by *n* steps. If *n* is ``None``, consume it
|
| 134 |
+
entirely.
|
| 135 |
+
|
| 136 |
+
Efficiently exhausts an iterator without returning values. Defaults to
|
| 137 |
+
consuming the whole iterator, but an optional second argument may be
|
| 138 |
+
provided to limit consumption.
|
| 139 |
+
|
| 140 |
+
>>> i = (x for x in range(10))
|
| 141 |
+
>>> next(i)
|
| 142 |
+
0
|
| 143 |
+
>>> consume(i, 3)
|
| 144 |
+
>>> next(i)
|
| 145 |
+
4
|
| 146 |
+
>>> consume(i)
|
| 147 |
+
>>> next(i)
|
| 148 |
+
Traceback (most recent call last):
|
| 149 |
+
File "<stdin>", line 1, in <module>
|
| 150 |
+
StopIteration
|
| 151 |
+
|
| 152 |
+
If the iterator has fewer items remaining than the provided limit, the
|
| 153 |
+
whole iterator will be consumed.
|
| 154 |
+
|
| 155 |
+
>>> i = (x for x in range(3))
|
| 156 |
+
>>> consume(i, 5)
|
| 157 |
+
>>> next(i)
|
| 158 |
+
Traceback (most recent call last):
|
| 159 |
+
File "<stdin>", line 1, in <module>
|
| 160 |
+
StopIteration
|
| 161 |
+
|
| 162 |
+
"""
|
| 163 |
+
# Use functions that consume iterators at C speed.
|
| 164 |
+
if n is None:
|
| 165 |
+
# feed the entire iterator into a zero-length deque
|
| 166 |
+
deque(iterator, maxlen=0)
|
| 167 |
+
else:
|
| 168 |
+
# advance to the empty slice starting at position n
|
| 169 |
+
next(islice(iterator, n, n), None)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def nth(iterable, n, default=None):
|
| 173 |
+
"""Returns the nth item or a default value.
|
| 174 |
+
|
| 175 |
+
>>> l = range(10)
|
| 176 |
+
>>> nth(l, 3)
|
| 177 |
+
3
|
| 178 |
+
>>> nth(l, 20, "zebra")
|
| 179 |
+
'zebra'
|
| 180 |
+
|
| 181 |
+
"""
|
| 182 |
+
return next(islice(iterable, n, None), default)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def all_equal(iterable):
|
| 186 |
+
"""
|
| 187 |
+
Returns ``True`` if all the elements are equal to each other.
|
| 188 |
+
|
| 189 |
+
>>> all_equal('aaaa')
|
| 190 |
+
True
|
| 191 |
+
>>> all_equal('aaab')
|
| 192 |
+
False
|
| 193 |
+
|
| 194 |
+
"""
|
| 195 |
+
g = groupby(iterable)
|
| 196 |
+
return next(g, True) and not next(g, False)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
def quantify(iterable, pred=bool):
|
| 200 |
+
"""Return the how many times the predicate is true.
|
| 201 |
+
|
| 202 |
+
>>> quantify([True, False, True])
|
| 203 |
+
2
|
| 204 |
+
|
| 205 |
+
"""
|
| 206 |
+
return sum(map(pred, iterable))
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def pad_none(iterable):
|
| 210 |
+
"""Returns the sequence of elements and then returns ``None`` indefinitely.
|
| 211 |
+
|
| 212 |
+
>>> take(5, pad_none(range(3)))
|
| 213 |
+
[0, 1, 2, None, None]
|
| 214 |
+
|
| 215 |
+
Useful for emulating the behavior of the built-in :func:`map` function.
|
| 216 |
+
|
| 217 |
+
See also :func:`padded`.
|
| 218 |
+
|
| 219 |
+
"""
|
| 220 |
+
return chain(iterable, repeat(None))
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
padnone = pad_none
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def ncycles(iterable, n):
|
| 227 |
+
"""Returns the sequence elements *n* times
|
| 228 |
+
|
| 229 |
+
>>> list(ncycles(["a", "b"], 3))
|
| 230 |
+
['a', 'b', 'a', 'b', 'a', 'b']
|
| 231 |
+
|
| 232 |
+
"""
|
| 233 |
+
return chain.from_iterable(repeat(tuple(iterable), n))
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
def dotproduct(vec1, vec2):
|
| 237 |
+
"""Returns the dot product of the two iterables.
|
| 238 |
+
|
| 239 |
+
>>> dotproduct([10, 10], [20, 20])
|
| 240 |
+
400
|
| 241 |
+
|
| 242 |
+
"""
|
| 243 |
+
return sum(map(operator.mul, vec1, vec2))
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
def flatten(listOfLists):
|
| 247 |
+
"""Return an iterator flattening one level of nesting in a list of lists.
|
| 248 |
+
|
| 249 |
+
>>> list(flatten([[0, 1], [2, 3]]))
|
| 250 |
+
[0, 1, 2, 3]
|
| 251 |
+
|
| 252 |
+
See also :func:`collapse`, which can flatten multiple levels of nesting.
|
| 253 |
+
|
| 254 |
+
"""
|
| 255 |
+
return chain.from_iterable(listOfLists)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def repeatfunc(func, times=None, *args):
|
| 259 |
+
"""Call *func* with *args* repeatedly, returning an iterable over the
|
| 260 |
+
results.
|
| 261 |
+
|
| 262 |
+
If *times* is specified, the iterable will terminate after that many
|
| 263 |
+
repetitions:
|
| 264 |
+
|
| 265 |
+
>>> from operator import add
|
| 266 |
+
>>> times = 4
|
| 267 |
+
>>> args = 3, 5
|
| 268 |
+
>>> list(repeatfunc(add, times, *args))
|
| 269 |
+
[8, 8, 8, 8]
|
| 270 |
+
|
| 271 |
+
If *times* is ``None`` the iterable will not terminate:
|
| 272 |
+
|
| 273 |
+
>>> from random import randrange
|
| 274 |
+
>>> times = None
|
| 275 |
+
>>> args = 1, 11
|
| 276 |
+
>>> take(6, repeatfunc(randrange, times, *args)) # doctest:+SKIP
|
| 277 |
+
[2, 4, 8, 1, 8, 4]
|
| 278 |
+
|
| 279 |
+
"""
|
| 280 |
+
if times is None:
|
| 281 |
+
return starmap(func, repeat(args))
|
| 282 |
+
return starmap(func, repeat(args, times))
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
def _pairwise(iterable):
|
| 286 |
+
"""Returns an iterator of paired items, overlapping, from the original
|
| 287 |
+
|
| 288 |
+
>>> take(4, pairwise(count()))
|
| 289 |
+
[(0, 1), (1, 2), (2, 3), (3, 4)]
|
| 290 |
+
|
| 291 |
+
On Python 3.10 and above, this is an alias for :func:`itertools.pairwise`.
|
| 292 |
+
|
| 293 |
+
"""
|
| 294 |
+
a, b = tee(iterable)
|
| 295 |
+
next(b, None)
|
| 296 |
+
yield from zip(a, b)
|
| 297 |
+
|
| 298 |
+
|
| 299 |
+
try:
|
| 300 |
+
from itertools import pairwise as itertools_pairwise
|
| 301 |
+
except ImportError:
|
| 302 |
+
pairwise = _pairwise
|
| 303 |
+
else:
|
| 304 |
+
|
| 305 |
+
def pairwise(iterable):
|
| 306 |
+
yield from itertools_pairwise(iterable)
|
| 307 |
+
|
| 308 |
+
pairwise.__doc__ = _pairwise.__doc__
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
class UnequalIterablesError(ValueError):
|
| 312 |
+
def __init__(self, details=None):
|
| 313 |
+
msg = 'Iterables have different lengths'
|
| 314 |
+
if details is not None:
|
| 315 |
+
msg += (': index 0 has length {}; index {} has length {}').format(
|
| 316 |
+
*details
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
super().__init__(msg)
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
def _zip_equal_generator(iterables):
|
| 323 |
+
for combo in zip_longest(*iterables, fillvalue=_marker):
|
| 324 |
+
for val in combo:
|
| 325 |
+
if val is _marker:
|
| 326 |
+
raise UnequalIterablesError()
|
| 327 |
+
yield combo
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
def _zip_equal(*iterables):
|
| 331 |
+
# Check whether the iterables are all the same size.
|
| 332 |
+
try:
|
| 333 |
+
first_size = len(iterables[0])
|
| 334 |
+
for i, it in enumerate(iterables[1:], 1):
|
| 335 |
+
size = len(it)
|
| 336 |
+
if size != first_size:
|
| 337 |
+
break
|
| 338 |
+
else:
|
| 339 |
+
# If we didn't break out, we can use the built-in zip.
|
| 340 |
+
return zip(*iterables)
|
| 341 |
+
|
| 342 |
+
# If we did break out, there was a mismatch.
|
| 343 |
+
raise UnequalIterablesError(details=(first_size, i, size))
|
| 344 |
+
# If any one of the iterables didn't have a length, start reading
|
| 345 |
+
# them until one runs out.
|
| 346 |
+
except TypeError:
|
| 347 |
+
return _zip_equal_generator(iterables)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def grouper(iterable, n, incomplete='fill', fillvalue=None):
|
| 351 |
+
"""Group elements from *iterable* into fixed-length groups of length *n*.
|
| 352 |
+
|
| 353 |
+
>>> list(grouper('ABCDEF', 3))
|
| 354 |
+
[('A', 'B', 'C'), ('D', 'E', 'F')]
|
| 355 |
+
|
| 356 |
+
The keyword arguments *incomplete* and *fillvalue* control what happens for
|
| 357 |
+
iterables whose length is not a multiple of *n*.
|
| 358 |
+
|
| 359 |
+
When *incomplete* is `'fill'`, the last group will contain instances of
|
| 360 |
+
*fillvalue*.
|
| 361 |
+
|
| 362 |
+
>>> list(grouper('ABCDEFG', 3, incomplete='fill', fillvalue='x'))
|
| 363 |
+
[('A', 'B', 'C'), ('D', 'E', 'F'), ('G', 'x', 'x')]
|
| 364 |
+
|
| 365 |
+
When *incomplete* is `'ignore'`, the last group will not be emitted.
|
| 366 |
+
|
| 367 |
+
>>> list(grouper('ABCDEFG', 3, incomplete='ignore', fillvalue='x'))
|
| 368 |
+
[('A', 'B', 'C'), ('D', 'E', 'F')]
|
| 369 |
+
|
| 370 |
+
When *incomplete* is `'strict'`, a subclass of `ValueError` will be raised.
|
| 371 |
+
|
| 372 |
+
>>> it = grouper('ABCDEFG', 3, incomplete='strict')
|
| 373 |
+
>>> list(it) # doctest: +IGNORE_EXCEPTION_DETAIL
|
| 374 |
+
Traceback (most recent call last):
|
| 375 |
+
...
|
| 376 |
+
UnequalIterablesError
|
| 377 |
+
|
| 378 |
+
"""
|
| 379 |
+
args = [iter(iterable)] * n
|
| 380 |
+
if incomplete == 'fill':
|
| 381 |
+
return zip_longest(*args, fillvalue=fillvalue)
|
| 382 |
+
if incomplete == 'strict':
|
| 383 |
+
return _zip_equal(*args)
|
| 384 |
+
if incomplete == 'ignore':
|
| 385 |
+
return zip(*args)
|
| 386 |
+
else:
|
| 387 |
+
raise ValueError('Expected fill, strict, or ignore')
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
def roundrobin(*iterables):
|
| 391 |
+
"""Yields an item from each iterable, alternating between them.
|
| 392 |
+
|
| 393 |
+
>>> list(roundrobin('ABC', 'D', 'EF'))
|
| 394 |
+
['A', 'D', 'E', 'B', 'F', 'C']
|
| 395 |
+
|
| 396 |
+
This function produces the same output as :func:`interleave_longest`, but
|
| 397 |
+
may perform better for some inputs (in particular when the number of
|
| 398 |
+
iterables is small).
|
| 399 |
+
|
| 400 |
+
"""
|
| 401 |
+
# Recipe credited to George Sakkis
|
| 402 |
+
pending = len(iterables)
|
| 403 |
+
nexts = cycle(iter(it).__next__ for it in iterables)
|
| 404 |
+
while pending:
|
| 405 |
+
try:
|
| 406 |
+
for next in nexts:
|
| 407 |
+
yield next()
|
| 408 |
+
except StopIteration:
|
| 409 |
+
pending -= 1
|
| 410 |
+
nexts = cycle(islice(nexts, pending))
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
def partition(pred, iterable):
|
| 414 |
+
"""
|
| 415 |
+
Returns a 2-tuple of iterables derived from the input iterable.
|
| 416 |
+
The first yields the items that have ``pred(item) == False``.
|
| 417 |
+
The second yields the items that have ``pred(item) == True``.
|
| 418 |
+
|
| 419 |
+
>>> is_odd = lambda x: x % 2 != 0
|
| 420 |
+
>>> iterable = range(10)
|
| 421 |
+
>>> even_items, odd_items = partition(is_odd, iterable)
|
| 422 |
+
>>> list(even_items), list(odd_items)
|
| 423 |
+
([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
|
| 424 |
+
|
| 425 |
+
If *pred* is None, :func:`bool` is used.
|
| 426 |
+
|
| 427 |
+
>>> iterable = [0, 1, False, True, '', ' ']
|
| 428 |
+
>>> false_items, true_items = partition(None, iterable)
|
| 429 |
+
>>> list(false_items), list(true_items)
|
| 430 |
+
([0, False, ''], [1, True, ' '])
|
| 431 |
+
|
| 432 |
+
"""
|
| 433 |
+
if pred is None:
|
| 434 |
+
pred = bool
|
| 435 |
+
|
| 436 |
+
evaluations = ((pred(x), x) for x in iterable)
|
| 437 |
+
t1, t2 = tee(evaluations)
|
| 438 |
+
return (
|
| 439 |
+
(x for (cond, x) in t1 if not cond),
|
| 440 |
+
(x for (cond, x) in t2 if cond),
|
| 441 |
+
)
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
def powerset(iterable):
|
| 445 |
+
"""Yields all possible subsets of the iterable.
|
| 446 |
+
|
| 447 |
+
>>> list(powerset([1, 2, 3]))
|
| 448 |
+
[(), (1,), (2,), (3,), (1, 2), (1, 3), (2, 3), (1, 2, 3)]
|
| 449 |
+
|
| 450 |
+
:func:`powerset` will operate on iterables that aren't :class:`set`
|
| 451 |
+
instances, so repeated elements in the input will produce repeated elements
|
| 452 |
+
in the output. Use :func:`unique_everseen` on the input to avoid generating
|
| 453 |
+
duplicates:
|
| 454 |
+
|
| 455 |
+
>>> seq = [1, 1, 0]
|
| 456 |
+
>>> list(powerset(seq))
|
| 457 |
+
[(), (1,), (1,), (0,), (1, 1), (1, 0), (1, 0), (1, 1, 0)]
|
| 458 |
+
>>> from more_itertools import unique_everseen
|
| 459 |
+
>>> list(powerset(unique_everseen(seq)))
|
| 460 |
+
[(), (1,), (0,), (1, 0)]
|
| 461 |
+
|
| 462 |
+
"""
|
| 463 |
+
s = list(iterable)
|
| 464 |
+
return chain.from_iterable(combinations(s, r) for r in range(len(s) + 1))
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def unique_everseen(iterable, key=None):
|
| 468 |
+
"""
|
| 469 |
+
Yield unique elements, preserving order.
|
| 470 |
+
|
| 471 |
+
>>> list(unique_everseen('AAAABBBCCDAABBB'))
|
| 472 |
+
['A', 'B', 'C', 'D']
|
| 473 |
+
>>> list(unique_everseen('ABBCcAD', str.lower))
|
| 474 |
+
['A', 'B', 'C', 'D']
|
| 475 |
+
|
| 476 |
+
Sequences with a mix of hashable and unhashable items can be used.
|
| 477 |
+
The function will be slower (i.e., `O(n^2)`) for unhashable items.
|
| 478 |
+
|
| 479 |
+
Remember that ``list`` objects are unhashable - you can use the *key*
|
| 480 |
+
parameter to transform the list to a tuple (which is hashable) to
|
| 481 |
+
avoid a slowdown.
|
| 482 |
+
|
| 483 |
+
>>> iterable = ([1, 2], [2, 3], [1, 2])
|
| 484 |
+
>>> list(unique_everseen(iterable)) # Slow
|
| 485 |
+
[[1, 2], [2, 3]]
|
| 486 |
+
>>> list(unique_everseen(iterable, key=tuple)) # Faster
|
| 487 |
+
[[1, 2], [2, 3]]
|
| 488 |
+
|
| 489 |
+
Similary, you may want to convert unhashable ``set`` objects with
|
| 490 |
+
``key=frozenset``. For ``dict`` objects,
|
| 491 |
+
``key=lambda x: frozenset(x.items())`` can be used.
|
| 492 |
+
|
| 493 |
+
"""
|
| 494 |
+
seenset = set()
|
| 495 |
+
seenset_add = seenset.add
|
| 496 |
+
seenlist = []
|
| 497 |
+
seenlist_add = seenlist.append
|
| 498 |
+
use_key = key is not None
|
| 499 |
+
|
| 500 |
+
for element in iterable:
|
| 501 |
+
k = key(element) if use_key else element
|
| 502 |
+
try:
|
| 503 |
+
if k not in seenset:
|
| 504 |
+
seenset_add(k)
|
| 505 |
+
yield element
|
| 506 |
+
except TypeError:
|
| 507 |
+
if k not in seenlist:
|
| 508 |
+
seenlist_add(k)
|
| 509 |
+
yield element
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
def unique_justseen(iterable, key=None):
|
| 513 |
+
"""Yields elements in order, ignoring serial duplicates
|
| 514 |
+
|
| 515 |
+
>>> list(unique_justseen('AAAABBBCCDAABBB'))
|
| 516 |
+
['A', 'B', 'C', 'D', 'A', 'B']
|
| 517 |
+
>>> list(unique_justseen('ABBCcAD', str.lower))
|
| 518 |
+
['A', 'B', 'C', 'A', 'D']
|
| 519 |
+
|
| 520 |
+
"""
|
| 521 |
+
return map(next, map(operator.itemgetter(1), groupby(iterable, key)))
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
def iter_except(func, exception, first=None):
|
| 525 |
+
"""Yields results from a function repeatedly until an exception is raised.
|
| 526 |
+
|
| 527 |
+
Converts a call-until-exception interface to an iterator interface.
|
| 528 |
+
Like ``iter(func, sentinel)``, but uses an exception instead of a sentinel
|
| 529 |
+
to end the loop.
|
| 530 |
+
|
| 531 |
+
>>> l = [0, 1, 2]
|
| 532 |
+
>>> list(iter_except(l.pop, IndexError))
|
| 533 |
+
[2, 1, 0]
|
| 534 |
+
|
| 535 |
+
Multiple exceptions can be specified as a stopping condition:
|
| 536 |
+
|
| 537 |
+
>>> l = [1, 2, 3, '...', 4, 5, 6]
|
| 538 |
+
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
|
| 539 |
+
[7, 6, 5]
|
| 540 |
+
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
|
| 541 |
+
[4, 3, 2]
|
| 542 |
+
>>> list(iter_except(lambda: 1 + l.pop(), (IndexError, TypeError)))
|
| 543 |
+
[]
|
| 544 |
+
|
| 545 |
+
"""
|
| 546 |
+
try:
|
| 547 |
+
if first is not None:
|
| 548 |
+
yield first()
|
| 549 |
+
while 1:
|
| 550 |
+
yield func()
|
| 551 |
+
except exception:
|
| 552 |
+
pass
|
| 553 |
+
|
| 554 |
+
|
| 555 |
+
def first_true(iterable, default=None, pred=None):
|
| 556 |
+
"""
|
| 557 |
+
Returns the first true value in the iterable.
|
| 558 |
+
|
| 559 |
+
If no true value is found, returns *default*
|
| 560 |
+
|
| 561 |
+
If *pred* is not None, returns the first item for which
|
| 562 |
+
``pred(item) == True`` .
|
| 563 |
+
|
| 564 |
+
>>> first_true(range(10))
|
| 565 |
+
1
|
| 566 |
+
>>> first_true(range(10), pred=lambda x: x > 5)
|
| 567 |
+
6
|
| 568 |
+
>>> first_true(range(10), default='missing', pred=lambda x: x > 9)
|
| 569 |
+
'missing'
|
| 570 |
+
|
| 571 |
+
"""
|
| 572 |
+
return next(filter(pred, iterable), default)
|
| 573 |
+
|
| 574 |
+
|
| 575 |
+
def random_product(*args, repeat=1):
|
| 576 |
+
"""Draw an item at random from each of the input iterables.
|
| 577 |
+
|
| 578 |
+
>>> random_product('abc', range(4), 'XYZ') # doctest:+SKIP
|
| 579 |
+
('c', 3, 'Z')
|
| 580 |
+
|
| 581 |
+
If *repeat* is provided as a keyword argument, that many items will be
|
| 582 |
+
drawn from each iterable.
|
| 583 |
+
|
| 584 |
+
>>> random_product('abcd', range(4), repeat=2) # doctest:+SKIP
|
| 585 |
+
('a', 2, 'd', 3)
|
| 586 |
+
|
| 587 |
+
This equivalent to taking a random selection from
|
| 588 |
+
``itertools.product(*args, **kwarg)``.
|
| 589 |
+
|
| 590 |
+
"""
|
| 591 |
+
pools = [tuple(pool) for pool in args] * repeat
|
| 592 |
+
return tuple(choice(pool) for pool in pools)
|
| 593 |
+
|
| 594 |
+
|
| 595 |
+
def random_permutation(iterable, r=None):
|
| 596 |
+
"""Return a random *r* length permutation of the elements in *iterable*.
|
| 597 |
+
|
| 598 |
+
If *r* is not specified or is ``None``, then *r* defaults to the length of
|
| 599 |
+
*iterable*.
|
| 600 |
+
|
| 601 |
+
>>> random_permutation(range(5)) # doctest:+SKIP
|
| 602 |
+
(3, 4, 0, 1, 2)
|
| 603 |
+
|
| 604 |
+
This equivalent to taking a random selection from
|
| 605 |
+
``itertools.permutations(iterable, r)``.
|
| 606 |
+
|
| 607 |
+
"""
|
| 608 |
+
pool = tuple(iterable)
|
| 609 |
+
r = len(pool) if r is None else r
|
| 610 |
+
return tuple(sample(pool, r))
|
| 611 |
+
|
| 612 |
+
|
| 613 |
+
def random_combination(iterable, r):
|
| 614 |
+
"""Return a random *r* length subsequence of the elements in *iterable*.
|
| 615 |
+
|
| 616 |
+
>>> random_combination(range(5), 3) # doctest:+SKIP
|
| 617 |
+
(2, 3, 4)
|
| 618 |
+
|
| 619 |
+
This equivalent to taking a random selection from
|
| 620 |
+
``itertools.combinations(iterable, r)``.
|
| 621 |
+
|
| 622 |
+
"""
|
| 623 |
+
pool = tuple(iterable)
|
| 624 |
+
n = len(pool)
|
| 625 |
+
indices = sorted(sample(range(n), r))
|
| 626 |
+
return tuple(pool[i] for i in indices)
|
| 627 |
+
|
| 628 |
+
|
| 629 |
+
def random_combination_with_replacement(iterable, r):
|
| 630 |
+
"""Return a random *r* length subsequence of elements in *iterable*,
|
| 631 |
+
allowing individual elements to be repeated.
|
| 632 |
+
|
| 633 |
+
>>> random_combination_with_replacement(range(3), 5) # doctest:+SKIP
|
| 634 |
+
(0, 0, 1, 2, 2)
|
| 635 |
+
|
| 636 |
+
This equivalent to taking a random selection from
|
| 637 |
+
``itertools.combinations_with_replacement(iterable, r)``.
|
| 638 |
+
|
| 639 |
+
"""
|
| 640 |
+
pool = tuple(iterable)
|
| 641 |
+
n = len(pool)
|
| 642 |
+
indices = sorted(randrange(n) for i in range(r))
|
| 643 |
+
return tuple(pool[i] for i in indices)
|
| 644 |
+
|
| 645 |
+
|
| 646 |
+
def nth_combination(iterable, r, index):
|
| 647 |
+
"""Equivalent to ``list(combinations(iterable, r))[index]``.
|
| 648 |
+
|
| 649 |
+
The subsequences of *iterable* that are of length *r* can be ordered
|
| 650 |
+
lexicographically. :func:`nth_combination` computes the subsequence at
|
| 651 |
+
sort position *index* directly, without computing the previous
|
| 652 |
+
subsequences.
|
| 653 |
+
|
| 654 |
+
>>> nth_combination(range(5), 3, 5)
|
| 655 |
+
(0, 3, 4)
|
| 656 |
+
|
| 657 |
+
``ValueError`` will be raised If *r* is negative or greater than the length
|
| 658 |
+
of *iterable*.
|
| 659 |
+
``IndexError`` will be raised if the given *index* is invalid.
|
| 660 |
+
"""
|
| 661 |
+
pool = tuple(iterable)
|
| 662 |
+
n = len(pool)
|
| 663 |
+
if (r < 0) or (r > n):
|
| 664 |
+
raise ValueError
|
| 665 |
+
|
| 666 |
+
c = 1
|
| 667 |
+
k = min(r, n - r)
|
| 668 |
+
for i in range(1, k + 1):
|
| 669 |
+
c = c * (n - k + i) // i
|
| 670 |
+
|
| 671 |
+
if index < 0:
|
| 672 |
+
index += c
|
| 673 |
+
|
| 674 |
+
if (index < 0) or (index >= c):
|
| 675 |
+
raise IndexError
|
| 676 |
+
|
| 677 |
+
result = []
|
| 678 |
+
while r:
|
| 679 |
+
c, n, r = c * r // n, n - 1, r - 1
|
| 680 |
+
while index >= c:
|
| 681 |
+
index -= c
|
| 682 |
+
c, n = c * (n - r) // n, n - 1
|
| 683 |
+
result.append(pool[-1 - n])
|
| 684 |
+
|
| 685 |
+
return tuple(result)
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
def prepend(value, iterator):
|
| 689 |
+
"""Yield *value*, followed by the elements in *iterator*.
|
| 690 |
+
|
| 691 |
+
>>> value = '0'
|
| 692 |
+
>>> iterator = ['1', '2', '3']
|
| 693 |
+
>>> list(prepend(value, iterator))
|
| 694 |
+
['0', '1', '2', '3']
|
| 695 |
+
|
| 696 |
+
To prepend multiple values, see :func:`itertools.chain`
|
| 697 |
+
or :func:`value_chain`.
|
| 698 |
+
|
| 699 |
+
"""
|
| 700 |
+
return chain([value], iterator)
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
def convolve(signal, kernel):
|
| 704 |
+
"""Convolve the iterable *signal* with the iterable *kernel*.
|
| 705 |
+
|
| 706 |
+
>>> signal = (1, 2, 3, 4, 5)
|
| 707 |
+
>>> kernel = [3, 2, 1]
|
| 708 |
+
>>> list(convolve(signal, kernel))
|
| 709 |
+
[3, 8, 14, 20, 26, 14, 5]
|
| 710 |
+
|
| 711 |
+
Note: the input arguments are not interchangeable, as the *kernel*
|
| 712 |
+
is immediately consumed and stored.
|
| 713 |
+
|
| 714 |
+
"""
|
| 715 |
+
kernel = tuple(kernel)[::-1]
|
| 716 |
+
n = len(kernel)
|
| 717 |
+
window = deque([0], maxlen=n) * n
|
| 718 |
+
for x in chain(signal, repeat(0, n - 1)):
|
| 719 |
+
window.append(x)
|
| 720 |
+
yield sum(map(operator.mul, kernel, window))
|
| 721 |
+
|
| 722 |
+
|
| 723 |
+
def before_and_after(predicate, it):
|
| 724 |
+
"""A variant of :func:`takewhile` that allows complete access to the
|
| 725 |
+
remainder of the iterator.
|
| 726 |
+
|
| 727 |
+
>>> it = iter('ABCdEfGhI')
|
| 728 |
+
>>> all_upper, remainder = before_and_after(str.isupper, it)
|
| 729 |
+
>>> ''.join(all_upper)
|
| 730 |
+
'ABC'
|
| 731 |
+
>>> ''.join(remainder) # takewhile() would lose the 'd'
|
| 732 |
+
'dEfGhI'
|
| 733 |
+
|
| 734 |
+
Note that the first iterator must be fully consumed before the second
|
| 735 |
+
iterator can generate valid results.
|
| 736 |
+
"""
|
| 737 |
+
it = iter(it)
|
| 738 |
+
transition = []
|
| 739 |
+
|
| 740 |
+
def true_iterator():
|
| 741 |
+
for elem in it:
|
| 742 |
+
if predicate(elem):
|
| 743 |
+
yield elem
|
| 744 |
+
else:
|
| 745 |
+
transition.append(elem)
|
| 746 |
+
return
|
| 747 |
+
|
| 748 |
+
# Note: this is different from itertools recipes to allow nesting
|
| 749 |
+
# before_and_after remainders into before_and_after again. See tests
|
| 750 |
+
# for an example.
|
| 751 |
+
remainder_iterator = chain(transition, it)
|
| 752 |
+
|
| 753 |
+
return true_iterator(), remainder_iterator
|
| 754 |
+
|
| 755 |
+
|
| 756 |
+
def triplewise(iterable):
|
| 757 |
+
"""Return overlapping triplets from *iterable*.
|
| 758 |
+
|
| 759 |
+
>>> list(triplewise('ABCDE'))
|
| 760 |
+
[('A', 'B', 'C'), ('B', 'C', 'D'), ('C', 'D', 'E')]
|
| 761 |
+
|
| 762 |
+
"""
|
| 763 |
+
for (a, _), (b, c) in pairwise(pairwise(iterable)):
|
| 764 |
+
yield a, b, c
|
| 765 |
+
|
| 766 |
+
|
| 767 |
+
def sliding_window(iterable, n):
|
| 768 |
+
"""Return a sliding window of width *n* over *iterable*.
|
| 769 |
+
|
| 770 |
+
>>> list(sliding_window(range(6), 4))
|
| 771 |
+
[(0, 1, 2, 3), (1, 2, 3, 4), (2, 3, 4, 5)]
|
| 772 |
+
|
| 773 |
+
If *iterable* has fewer than *n* items, then nothing is yielded:
|
| 774 |
+
|
| 775 |
+
>>> list(sliding_window(range(3), 4))
|
| 776 |
+
[]
|
| 777 |
+
|
| 778 |
+
For a variant with more features, see :func:`windowed`.
|
| 779 |
+
"""
|
| 780 |
+
it = iter(iterable)
|
| 781 |
+
window = deque(islice(it, n), maxlen=n)
|
| 782 |
+
if len(window) == n:
|
| 783 |
+
yield tuple(window)
|
| 784 |
+
for x in it:
|
| 785 |
+
window.append(x)
|
| 786 |
+
yield tuple(window)
|
| 787 |
+
|
| 788 |
+
|
| 789 |
+
def subslices(iterable):
|
| 790 |
+
"""Return all contiguous non-empty subslices of *iterable*.
|
| 791 |
+
|
| 792 |
+
>>> list(subslices('ABC'))
|
| 793 |
+
[['A'], ['A', 'B'], ['A', 'B', 'C'], ['B'], ['B', 'C'], ['C']]
|
| 794 |
+
|
| 795 |
+
This is similar to :func:`substrings`, but emits items in a different
|
| 796 |
+
order.
|
| 797 |
+
"""
|
| 798 |
+
seq = list(iterable)
|
| 799 |
+
slices = starmap(slice, combinations(range(len(seq) + 1), 2))
|
| 800 |
+
return map(operator.getitem, repeat(seq), slices)
|
| 801 |
+
|
| 802 |
+
|
| 803 |
+
def polynomial_from_roots(roots):
|
| 804 |
+
"""Compute a polynomial's coefficients from its roots.
|
| 805 |
+
|
| 806 |
+
>>> roots = [5, -4, 3] # (x - 5) * (x + 4) * (x - 3)
|
| 807 |
+
>>> polynomial_from_roots(roots) # x^3 - 4 * x^2 - 17 * x + 60
|
| 808 |
+
[1, -4, -17, 60]
|
| 809 |
+
"""
|
| 810 |
+
# Use math.prod for Python 3.8+,
|
| 811 |
+
prod = getattr(math, 'prod', lambda x: reduce(operator.mul, x, 1))
|
| 812 |
+
roots = list(map(operator.neg, roots))
|
| 813 |
+
return [
|
| 814 |
+
sum(map(prod, combinations(roots, k))) for k in range(len(roots) + 1)
|
| 815 |
+
]
|
| 816 |
+
|
| 817 |
+
|
| 818 |
+
def iter_index(iterable, value, start=0):
|
| 819 |
+
"""Yield the index of each place in *iterable* that *value* occurs,
|
| 820 |
+
beginning with index *start*.
|
| 821 |
+
|
| 822 |
+
See :func:`locate` for a more general means of finding the indexes
|
| 823 |
+
associated with particular values.
|
| 824 |
+
|
| 825 |
+
>>> list(iter_index('AABCADEAF', 'A'))
|
| 826 |
+
[0, 1, 4, 7]
|
| 827 |
+
"""
|
| 828 |
+
try:
|
| 829 |
+
seq_index = iterable.index
|
| 830 |
+
except AttributeError:
|
| 831 |
+
# Slow path for general iterables
|
| 832 |
+
it = islice(iterable, start, None)
|
| 833 |
+
for i, element in enumerate(it, start):
|
| 834 |
+
if element is value or element == value:
|
| 835 |
+
yield i
|
| 836 |
+
else:
|
| 837 |
+
# Fast path for sequences
|
| 838 |
+
i = start - 1
|
| 839 |
+
try:
|
| 840 |
+
while True:
|
| 841 |
+
i = seq_index(value, i + 1)
|
| 842 |
+
yield i
|
| 843 |
+
except ValueError:
|
| 844 |
+
pass
|
| 845 |
+
|
| 846 |
+
|
| 847 |
+
def sieve(n):
|
| 848 |
+
"""Yield the primes less than n.
|
| 849 |
+
|
| 850 |
+
>>> list(sieve(30))
|
| 851 |
+
[2, 3, 5, 7, 11, 13, 17, 19, 23, 29]
|
| 852 |
+
"""
|
| 853 |
+
isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
|
| 854 |
+
data = bytearray((0, 1)) * (n // 2)
|
| 855 |
+
data[:3] = 0, 0, 0
|
| 856 |
+
limit = isqrt(n) + 1
|
| 857 |
+
for p in compress(range(limit), data):
|
| 858 |
+
data[p * p : n : p + p] = bytes(len(range(p * p, n, p + p)))
|
| 859 |
+
data[2] = 1
|
| 860 |
+
return iter_index(data, 1) if n > 2 else iter([])
|
| 861 |
+
|
| 862 |
+
|
| 863 |
+
def batched(iterable, n):
|
| 864 |
+
"""Batch data into lists of length *n*. The last batch may be shorter.
|
| 865 |
+
|
| 866 |
+
>>> list(batched('ABCDEFG', 3))
|
| 867 |
+
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
|
| 868 |
+
|
| 869 |
+
This recipe is from the ``itertools`` docs. This library also provides
|
| 870 |
+
:func:`chunked`, which has a different implementation.
|
| 871 |
+
"""
|
| 872 |
+
if hexversion >= 0x30C00A0: # Python 3.12.0a0
|
| 873 |
+
warnings.warn(
|
| 874 |
+
(
|
| 875 |
+
'batched will be removed in a future version of '
|
| 876 |
+
'more-itertools. Use the standard library '
|
| 877 |
+
'itertools.batched function instead'
|
| 878 |
+
),
|
| 879 |
+
DeprecationWarning,
|
| 880 |
+
)
|
| 881 |
+
|
| 882 |
+
it = iter(iterable)
|
| 883 |
+
while True:
|
| 884 |
+
batch = list(islice(it, n))
|
| 885 |
+
if not batch:
|
| 886 |
+
break
|
| 887 |
+
yield batch
|
| 888 |
+
|
| 889 |
+
|
| 890 |
+
def transpose(it):
|
| 891 |
+
"""Swap the rows and columns of the input.
|
| 892 |
+
|
| 893 |
+
>>> list(transpose([(1, 2, 3), (11, 22, 33)]))
|
| 894 |
+
[(1, 11), (2, 22), (3, 33)]
|
| 895 |
+
|
| 896 |
+
The caller should ensure that the dimensions of the input are compatible.
|
| 897 |
+
"""
|
| 898 |
+
# TODO: when 3.9 goes end-of-life, add stric=True to this.
|
| 899 |
+
return zip(*it)
|
| 900 |
+
|
| 901 |
+
|
| 902 |
+
def matmul(m1, m2):
|
| 903 |
+
"""Multiply two matrices.
|
| 904 |
+
>>> list(matmul([(7, 5), (3, 5)], [(2, 5), (7, 9)]))
|
| 905 |
+
[[49, 80], [41, 60]]
|
| 906 |
+
|
| 907 |
+
The caller should ensure that the dimensions of the input matrices are
|
| 908 |
+
compatible with each other.
|
| 909 |
+
"""
|
| 910 |
+
n = len(m2[0])
|
| 911 |
+
return batched(starmap(dotproduct, product(m1, transpose(m2))), n)
|
| 912 |
+
|
| 913 |
+
|
| 914 |
+
def factor(n):
|
| 915 |
+
"""Yield the prime factors of n.
|
| 916 |
+
>>> list(factor(360))
|
| 917 |
+
[2, 2, 2, 3, 3, 5]
|
| 918 |
+
"""
|
| 919 |
+
isqrt = getattr(math, 'isqrt', lambda x: int(math.sqrt(x)))
|
| 920 |
+
for prime in sieve(isqrt(n) + 1):
|
| 921 |
+
while True:
|
| 922 |
+
quotient, remainder = divmod(n, prime)
|
| 923 |
+
if remainder:
|
| 924 |
+
break
|
| 925 |
+
yield prime
|
| 926 |
+
n = quotient
|
| 927 |
+
if n == 1:
|
| 928 |
+
return
|
| 929 |
+
if n >= 2:
|
| 930 |
+
yield n
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
__title__ = "packaging"
|
| 6 |
+
__summary__ = "Core utilities for Python packages"
|
| 7 |
+
__uri__ = "https://github.com/pypa/packaging"
|
| 8 |
+
|
| 9 |
+
__version__ = "23.1"
|
| 10 |
+
|
| 11 |
+
__author__ = "Donald Stufft and individual contributors"
|
| 12 |
+
__email__ = "donald@stufft.io"
|
| 13 |
+
|
| 14 |
+
__license__ = "BSD-2-Clause or Apache-2.0"
|
| 15 |
+
__copyright__ = "2014-2019 %s" % __author__
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/_elffile.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ELF file parser.
|
| 3 |
+
|
| 4 |
+
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
| 5 |
+
interface to ``ZipFile``. Only the read interface is implemented.
|
| 6 |
+
|
| 7 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
| 8 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import enum
|
| 12 |
+
import os
|
| 13 |
+
import struct
|
| 14 |
+
from typing import IO, Optional, Tuple
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ELFInvalid(ValueError):
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class EIClass(enum.IntEnum):
|
| 22 |
+
C32 = 1
|
| 23 |
+
C64 = 2
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class EIData(enum.IntEnum):
|
| 27 |
+
Lsb = 1
|
| 28 |
+
Msb = 2
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class EMachine(enum.IntEnum):
|
| 32 |
+
I386 = 3
|
| 33 |
+
S390 = 22
|
| 34 |
+
Arm = 40
|
| 35 |
+
X8664 = 62
|
| 36 |
+
AArc64 = 183
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ELFFile:
|
| 40 |
+
"""
|
| 41 |
+
Representation of an ELF executable.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, f: IO[bytes]) -> None:
|
| 45 |
+
self._f = f
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
ident = self._read("16B")
|
| 49 |
+
except struct.error:
|
| 50 |
+
raise ELFInvalid("unable to parse identification")
|
| 51 |
+
magic = bytes(ident[:4])
|
| 52 |
+
if magic != b"\x7fELF":
|
| 53 |
+
raise ELFInvalid(f"invalid magic: {magic!r}")
|
| 54 |
+
|
| 55 |
+
self.capacity = ident[4] # Format for program header (bitness).
|
| 56 |
+
self.encoding = ident[5] # Data structure encoding (endianness).
|
| 57 |
+
|
| 58 |
+
try:
|
| 59 |
+
# e_fmt: Format for program header.
|
| 60 |
+
# p_fmt: Format for section header.
|
| 61 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
| 62 |
+
e_fmt, self._p_fmt, self._p_idx = {
|
| 63 |
+
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
| 64 |
+
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
| 65 |
+
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
| 66 |
+
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
| 67 |
+
}[(self.capacity, self.encoding)]
|
| 68 |
+
except KeyError:
|
| 69 |
+
raise ELFInvalid(
|
| 70 |
+
f"unrecognized capacity ({self.capacity}) or "
|
| 71 |
+
f"encoding ({self.encoding})"
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
try:
|
| 75 |
+
(
|
| 76 |
+
_,
|
| 77 |
+
self.machine, # Architecture type.
|
| 78 |
+
_,
|
| 79 |
+
_,
|
| 80 |
+
self._e_phoff, # Offset of program header.
|
| 81 |
+
_,
|
| 82 |
+
self.flags, # Processor-specific flags.
|
| 83 |
+
_,
|
| 84 |
+
self._e_phentsize, # Size of section.
|
| 85 |
+
self._e_phnum, # Number of sections.
|
| 86 |
+
) = self._read(e_fmt)
|
| 87 |
+
except struct.error as e:
|
| 88 |
+
raise ELFInvalid("unable to parse machine and section information") from e
|
| 89 |
+
|
| 90 |
+
def _read(self, fmt: str) -> Tuple[int, ...]:
|
| 91 |
+
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
| 92 |
+
|
| 93 |
+
@property
|
| 94 |
+
def interpreter(self) -> Optional[str]:
|
| 95 |
+
"""
|
| 96 |
+
The path recorded in the ``PT_INTERP`` section header.
|
| 97 |
+
"""
|
| 98 |
+
for index in range(self._e_phnum):
|
| 99 |
+
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
| 100 |
+
try:
|
| 101 |
+
data = self._read(self._p_fmt)
|
| 102 |
+
except struct.error:
|
| 103 |
+
continue
|
| 104 |
+
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
| 105 |
+
continue
|
| 106 |
+
self._f.seek(data[self._p_idx[1]])
|
| 107 |
+
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
| 108 |
+
return None
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/_manylinux.py
ADDED
|
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import contextlib
|
| 3 |
+
import functools
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import warnings
|
| 8 |
+
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
|
| 9 |
+
|
| 10 |
+
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
| 11 |
+
|
| 12 |
+
EF_ARM_ABIMASK = 0xFF000000
|
| 13 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
| 14 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
| 18 |
+
# as the type for `path` until then.
|
| 19 |
+
@contextlib.contextmanager
|
| 20 |
+
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
| 21 |
+
try:
|
| 22 |
+
with open(path, "rb") as f:
|
| 23 |
+
yield ELFFile(f)
|
| 24 |
+
except (OSError, TypeError, ValueError):
|
| 25 |
+
yield None
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def _is_linux_armhf(executable: str) -> bool:
|
| 29 |
+
# hard-float ABI can be detected from the ELF header of the running
|
| 30 |
+
# process
|
| 31 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
| 32 |
+
with _parse_elf(executable) as f:
|
| 33 |
+
return (
|
| 34 |
+
f is not None
|
| 35 |
+
and f.capacity == EIClass.C32
|
| 36 |
+
and f.encoding == EIData.Lsb
|
| 37 |
+
and f.machine == EMachine.Arm
|
| 38 |
+
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
| 39 |
+
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _is_linux_i686(executable: str) -> bool:
|
| 44 |
+
with _parse_elf(executable) as f:
|
| 45 |
+
return (
|
| 46 |
+
f is not None
|
| 47 |
+
and f.capacity == EIClass.C32
|
| 48 |
+
and f.encoding == EIData.Lsb
|
| 49 |
+
and f.machine == EMachine.I386
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _have_compatible_abi(executable: str, arch: str) -> bool:
|
| 54 |
+
if arch == "armv7l":
|
| 55 |
+
return _is_linux_armhf(executable)
|
| 56 |
+
if arch == "i686":
|
| 57 |
+
return _is_linux_i686(executable)
|
| 58 |
+
return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
# If glibc ever changes its major version, we need to know what the last
|
| 62 |
+
# minor version was, so we can build the complete list of all versions.
|
| 63 |
+
# For now, guess what the highest minor version might be, assume it will
|
| 64 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
| 65 |
+
# with the actual value.
|
| 66 |
+
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class _GLibCVersion(NamedTuple):
|
| 70 |
+
major: int
|
| 71 |
+
minor: int
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def _glibc_version_string_confstr() -> Optional[str]:
|
| 75 |
+
"""
|
| 76 |
+
Primary implementation of glibc_version_string using os.confstr.
|
| 77 |
+
"""
|
| 78 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
| 79 |
+
# to be broken or missing. This strategy is used in the standard library
|
| 80 |
+
# platform module.
|
| 81 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
| 82 |
+
try:
|
| 83 |
+
# Should be a string like "glibc 2.17".
|
| 84 |
+
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
|
| 85 |
+
assert version_string is not None
|
| 86 |
+
_, version = version_string.rsplit()
|
| 87 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
| 88 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
| 89 |
+
return None
|
| 90 |
+
return version
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def _glibc_version_string_ctypes() -> Optional[str]:
|
| 94 |
+
"""
|
| 95 |
+
Fallback implementation of glibc_version_string using ctypes.
|
| 96 |
+
"""
|
| 97 |
+
try:
|
| 98 |
+
import ctypes
|
| 99 |
+
except ImportError:
|
| 100 |
+
return None
|
| 101 |
+
|
| 102 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
| 103 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
| 104 |
+
# main program". This way we can let the linker do the work to figure out
|
| 105 |
+
# which libc our process is actually using.
|
| 106 |
+
#
|
| 107 |
+
# We must also handle the special case where the executable is not a
|
| 108 |
+
# dynamically linked executable. This can occur when using musl libc,
|
| 109 |
+
# for example. In this situation, dlopen() will error, leading to an
|
| 110 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
| 111 |
+
# errno set on the OSError. The single string argument used to construct
|
| 112 |
+
# OSError comes from libc itself and is therefore not portable to
|
| 113 |
+
# hard code here. In any case, failure to call dlopen() means we
|
| 114 |
+
# can proceed, so we bail on our attempt.
|
| 115 |
+
try:
|
| 116 |
+
process_namespace = ctypes.CDLL(None)
|
| 117 |
+
except OSError:
|
| 118 |
+
return None
|
| 119 |
+
|
| 120 |
+
try:
|
| 121 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
| 122 |
+
except AttributeError:
|
| 123 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
| 124 |
+
# glibc.
|
| 125 |
+
return None
|
| 126 |
+
|
| 127 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
| 128 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
| 129 |
+
version_str: str = gnu_get_libc_version()
|
| 130 |
+
# py2 / py3 compatibility:
|
| 131 |
+
if not isinstance(version_str, str):
|
| 132 |
+
version_str = version_str.decode("ascii")
|
| 133 |
+
|
| 134 |
+
return version_str
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def _glibc_version_string() -> Optional[str]:
|
| 138 |
+
"""Returns glibc version string, or None if not using glibc."""
|
| 139 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
| 143 |
+
"""Parse glibc version.
|
| 144 |
+
|
| 145 |
+
We use a regexp instead of str.split because we want to discard any
|
| 146 |
+
random junk that might come after the minor version -- this might happen
|
| 147 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
| 148 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
| 149 |
+
"""
|
| 150 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
| 151 |
+
if not m:
|
| 152 |
+
warnings.warn(
|
| 153 |
+
f"Expected glibc version with 2 components major.minor,"
|
| 154 |
+
f" got: {version_str}",
|
| 155 |
+
RuntimeWarning,
|
| 156 |
+
)
|
| 157 |
+
return -1, -1
|
| 158 |
+
return int(m.group("major")), int(m.group("minor"))
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
@functools.lru_cache()
|
| 162 |
+
def _get_glibc_version() -> Tuple[int, int]:
|
| 163 |
+
version_str = _glibc_version_string()
|
| 164 |
+
if version_str is None:
|
| 165 |
+
return (-1, -1)
|
| 166 |
+
return _parse_glibc_version(version_str)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
# From PEP 513, PEP 600
|
| 170 |
+
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
|
| 171 |
+
sys_glibc = _get_glibc_version()
|
| 172 |
+
if sys_glibc < version:
|
| 173 |
+
return False
|
| 174 |
+
# Check for presence of _manylinux module.
|
| 175 |
+
try:
|
| 176 |
+
import _manylinux # noqa
|
| 177 |
+
except ImportError:
|
| 178 |
+
return True
|
| 179 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
| 180 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
| 181 |
+
if result is not None:
|
| 182 |
+
return bool(result)
|
| 183 |
+
return True
|
| 184 |
+
if version == _GLibCVersion(2, 5):
|
| 185 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
| 186 |
+
return bool(_manylinux.manylinux1_compatible)
|
| 187 |
+
if version == _GLibCVersion(2, 12):
|
| 188 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
| 189 |
+
return bool(_manylinux.manylinux2010_compatible)
|
| 190 |
+
if version == _GLibCVersion(2, 17):
|
| 191 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
| 192 |
+
return bool(_manylinux.manylinux2014_compatible)
|
| 193 |
+
return True
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
_LEGACY_MANYLINUX_MAP = {
|
| 197 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
| 198 |
+
(2, 17): "manylinux2014",
|
| 199 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
| 200 |
+
(2, 12): "manylinux2010",
|
| 201 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
| 202 |
+
(2, 5): "manylinux1",
|
| 203 |
+
}
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
def platform_tags(linux: str, arch: str) -> Iterator[str]:
|
| 207 |
+
if not _have_compatible_abi(sys.executable, arch):
|
| 208 |
+
return
|
| 209 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
| 210 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
| 211 |
+
if arch in {"x86_64", "i686"}:
|
| 212 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
| 213 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
| 214 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
| 215 |
+
glibc_max_list = [current_glibc]
|
| 216 |
+
# We can assume compatibility across glibc major versions.
|
| 217 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
| 218 |
+
#
|
| 219 |
+
# Build a list of maximum glibc versions so that we can
|
| 220 |
+
# output the canonical list of all glibc from current_glibc
|
| 221 |
+
# down to too_old_glibc2, including all intermediary versions.
|
| 222 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
| 223 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
| 224 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
| 225 |
+
for glibc_max in glibc_max_list:
|
| 226 |
+
if glibc_max.major == too_old_glibc2.major:
|
| 227 |
+
min_minor = too_old_glibc2.minor
|
| 228 |
+
else:
|
| 229 |
+
# For other glibc major versions oldest supported is (x, 0).
|
| 230 |
+
min_minor = -1
|
| 231 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
| 232 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
| 233 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
| 234 |
+
if _is_compatible(tag, arch, glibc_version):
|
| 235 |
+
yield linux.replace("linux", tag)
|
| 236 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
| 237 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
| 238 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
| 239 |
+
if _is_compatible(legacy_tag, arch, glibc_version):
|
| 240 |
+
yield linux.replace("linux", legacy_tag)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/_musllinux.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""PEP 656 support.
|
| 2 |
+
|
| 3 |
+
This module implements logic to detect if the currently running Python is
|
| 4 |
+
linked against musl, and what musl version is used.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import functools
|
| 8 |
+
import re
|
| 9 |
+
import subprocess
|
| 10 |
+
import sys
|
| 11 |
+
from typing import Iterator, NamedTuple, Optional
|
| 12 |
+
|
| 13 |
+
from ._elffile import ELFFile
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class _MuslVersion(NamedTuple):
|
| 17 |
+
major: int
|
| 18 |
+
minor: int
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
| 22 |
+
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
| 23 |
+
if len(lines) < 2 or lines[0][:4] != "musl":
|
| 24 |
+
return None
|
| 25 |
+
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
| 26 |
+
if not m:
|
| 27 |
+
return None
|
| 28 |
+
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@functools.lru_cache()
|
| 32 |
+
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
| 33 |
+
"""Detect currently-running musl runtime version.
|
| 34 |
+
|
| 35 |
+
This is done by checking the specified executable's dynamic linking
|
| 36 |
+
information, and invoking the loader to parse its output for a version
|
| 37 |
+
string. If the loader is musl, the output would be something like::
|
| 38 |
+
|
| 39 |
+
musl libc (x86_64)
|
| 40 |
+
Version 1.2.2
|
| 41 |
+
Dynamic Program Loader
|
| 42 |
+
"""
|
| 43 |
+
try:
|
| 44 |
+
with open(executable, "rb") as f:
|
| 45 |
+
ld = ELFFile(f).interpreter
|
| 46 |
+
except (OSError, TypeError, ValueError):
|
| 47 |
+
return None
|
| 48 |
+
if ld is None or "musl" not in ld:
|
| 49 |
+
return None
|
| 50 |
+
proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
|
| 51 |
+
return _parse_musl_version(proc.stderr)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def platform_tags(arch: str) -> Iterator[str]:
|
| 55 |
+
"""Generate musllinux tags compatible to the current platform.
|
| 56 |
+
|
| 57 |
+
:param arch: Should be the part of platform tag after the ``linux_``
|
| 58 |
+
prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
|
| 59 |
+
prerequisite for the current platform to be musllinux-compatible.
|
| 60 |
+
|
| 61 |
+
:returns: An iterator of compatible musllinux tags.
|
| 62 |
+
"""
|
| 63 |
+
sys_musl = _get_musl_version(sys.executable)
|
| 64 |
+
if sys_musl is None: # Python not dynamically linked against musl.
|
| 65 |
+
return
|
| 66 |
+
for minor in range(sys_musl.minor, -1, -1):
|
| 67 |
+
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
if __name__ == "__main__": # pragma: no cover
|
| 71 |
+
import sysconfig
|
| 72 |
+
|
| 73 |
+
plat = sysconfig.get_platform()
|
| 74 |
+
assert plat.startswith("linux-"), "not linux"
|
| 75 |
+
|
| 76 |
+
print("plat:", plat)
|
| 77 |
+
print("musl:", _get_musl_version(sys.executable))
|
| 78 |
+
print("tags:", end=" ")
|
| 79 |
+
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
| 80 |
+
print(t, end="\n ")
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/_parser.py
ADDED
|
@@ -0,0 +1,353 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Handwritten parser of dependency specifiers.
|
| 2 |
+
|
| 3 |
+
The docstring for each __parse_* function contains ENBF-inspired grammar representing
|
| 4 |
+
the implementation.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import ast
|
| 8 |
+
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
| 9 |
+
|
| 10 |
+
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Node:
|
| 14 |
+
def __init__(self, value: str) -> None:
|
| 15 |
+
self.value = value
|
| 16 |
+
|
| 17 |
+
def __str__(self) -> str:
|
| 18 |
+
return self.value
|
| 19 |
+
|
| 20 |
+
def __repr__(self) -> str:
|
| 21 |
+
return f"<{self.__class__.__name__}('{self}')>"
|
| 22 |
+
|
| 23 |
+
def serialize(self) -> str:
|
| 24 |
+
raise NotImplementedError
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Variable(Node):
|
| 28 |
+
def serialize(self) -> str:
|
| 29 |
+
return str(self)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Value(Node):
|
| 33 |
+
def serialize(self) -> str:
|
| 34 |
+
return f'"{self}"'
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class Op(Node):
|
| 38 |
+
def serialize(self) -> str:
|
| 39 |
+
return str(self)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
MarkerVar = Union[Variable, Value]
|
| 43 |
+
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
| 44 |
+
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
| 45 |
+
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
| 46 |
+
# mypy does not support recursive type definition
|
| 47 |
+
# https://github.com/python/mypy/issues/731
|
| 48 |
+
MarkerAtom = Any
|
| 49 |
+
MarkerList = List[Any]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class ParsedRequirement(NamedTuple):
|
| 53 |
+
name: str
|
| 54 |
+
url: str
|
| 55 |
+
extras: List[str]
|
| 56 |
+
specifier: str
|
| 57 |
+
marker: Optional[MarkerList]
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# --------------------------------------------------------------------------------------
|
| 61 |
+
# Recursive descent parser for dependency specifier
|
| 62 |
+
# --------------------------------------------------------------------------------------
|
| 63 |
+
def parse_requirement(source: str) -> ParsedRequirement:
|
| 64 |
+
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
| 68 |
+
"""
|
| 69 |
+
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
| 70 |
+
"""
|
| 71 |
+
tokenizer.consume("WS")
|
| 72 |
+
|
| 73 |
+
name_token = tokenizer.expect(
|
| 74 |
+
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
| 75 |
+
)
|
| 76 |
+
name = name_token.text
|
| 77 |
+
tokenizer.consume("WS")
|
| 78 |
+
|
| 79 |
+
extras = _parse_extras(tokenizer)
|
| 80 |
+
tokenizer.consume("WS")
|
| 81 |
+
|
| 82 |
+
url, specifier, marker = _parse_requirement_details(tokenizer)
|
| 83 |
+
tokenizer.expect("END", expected="end of dependency specifier")
|
| 84 |
+
|
| 85 |
+
return ParsedRequirement(name, url, extras, specifier, marker)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _parse_requirement_details(
|
| 89 |
+
tokenizer: Tokenizer,
|
| 90 |
+
) -> Tuple[str, str, Optional[MarkerList]]:
|
| 91 |
+
"""
|
| 92 |
+
requirement_details = AT URL (WS requirement_marker?)?
|
| 93 |
+
| specifier WS? (requirement_marker)?
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
specifier = ""
|
| 97 |
+
url = ""
|
| 98 |
+
marker = None
|
| 99 |
+
|
| 100 |
+
if tokenizer.check("AT"):
|
| 101 |
+
tokenizer.read()
|
| 102 |
+
tokenizer.consume("WS")
|
| 103 |
+
|
| 104 |
+
url_start = tokenizer.position
|
| 105 |
+
url = tokenizer.expect("URL", expected="URL after @").text
|
| 106 |
+
if tokenizer.check("END", peek=True):
|
| 107 |
+
return (url, specifier, marker)
|
| 108 |
+
|
| 109 |
+
tokenizer.expect("WS", expected="whitespace after URL")
|
| 110 |
+
|
| 111 |
+
# The input might end after whitespace.
|
| 112 |
+
if tokenizer.check("END", peek=True):
|
| 113 |
+
return (url, specifier, marker)
|
| 114 |
+
|
| 115 |
+
marker = _parse_requirement_marker(
|
| 116 |
+
tokenizer, span_start=url_start, after="URL and whitespace"
|
| 117 |
+
)
|
| 118 |
+
else:
|
| 119 |
+
specifier_start = tokenizer.position
|
| 120 |
+
specifier = _parse_specifier(tokenizer)
|
| 121 |
+
tokenizer.consume("WS")
|
| 122 |
+
|
| 123 |
+
if tokenizer.check("END", peek=True):
|
| 124 |
+
return (url, specifier, marker)
|
| 125 |
+
|
| 126 |
+
marker = _parse_requirement_marker(
|
| 127 |
+
tokenizer,
|
| 128 |
+
span_start=specifier_start,
|
| 129 |
+
after=(
|
| 130 |
+
"version specifier"
|
| 131 |
+
if specifier
|
| 132 |
+
else "name and no valid version specifier"
|
| 133 |
+
),
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
return (url, specifier, marker)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def _parse_requirement_marker(
|
| 140 |
+
tokenizer: Tokenizer, *, span_start: int, after: str
|
| 141 |
+
) -> MarkerList:
|
| 142 |
+
"""
|
| 143 |
+
requirement_marker = SEMICOLON marker WS?
|
| 144 |
+
"""
|
| 145 |
+
|
| 146 |
+
if not tokenizer.check("SEMICOLON"):
|
| 147 |
+
tokenizer.raise_syntax_error(
|
| 148 |
+
f"Expected end or semicolon (after {after})",
|
| 149 |
+
span_start=span_start,
|
| 150 |
+
)
|
| 151 |
+
tokenizer.read()
|
| 152 |
+
|
| 153 |
+
marker = _parse_marker(tokenizer)
|
| 154 |
+
tokenizer.consume("WS")
|
| 155 |
+
|
| 156 |
+
return marker
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
| 160 |
+
"""
|
| 161 |
+
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
| 162 |
+
"""
|
| 163 |
+
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
| 164 |
+
return []
|
| 165 |
+
|
| 166 |
+
with tokenizer.enclosing_tokens(
|
| 167 |
+
"LEFT_BRACKET",
|
| 168 |
+
"RIGHT_BRACKET",
|
| 169 |
+
around="extras",
|
| 170 |
+
):
|
| 171 |
+
tokenizer.consume("WS")
|
| 172 |
+
extras = _parse_extras_list(tokenizer)
|
| 173 |
+
tokenizer.consume("WS")
|
| 174 |
+
|
| 175 |
+
return extras
|
| 176 |
+
|
| 177 |
+
|
| 178 |
+
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
| 179 |
+
"""
|
| 180 |
+
extras_list = identifier (wsp* ',' wsp* identifier)*
|
| 181 |
+
"""
|
| 182 |
+
extras: List[str] = []
|
| 183 |
+
|
| 184 |
+
if not tokenizer.check("IDENTIFIER"):
|
| 185 |
+
return extras
|
| 186 |
+
|
| 187 |
+
extras.append(tokenizer.read().text)
|
| 188 |
+
|
| 189 |
+
while True:
|
| 190 |
+
tokenizer.consume("WS")
|
| 191 |
+
if tokenizer.check("IDENTIFIER", peek=True):
|
| 192 |
+
tokenizer.raise_syntax_error("Expected comma between extra names")
|
| 193 |
+
elif not tokenizer.check("COMMA"):
|
| 194 |
+
break
|
| 195 |
+
|
| 196 |
+
tokenizer.read()
|
| 197 |
+
tokenizer.consume("WS")
|
| 198 |
+
|
| 199 |
+
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
| 200 |
+
extras.append(extra_token.text)
|
| 201 |
+
|
| 202 |
+
return extras
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
| 206 |
+
"""
|
| 207 |
+
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
| 208 |
+
| WS? version_many WS?
|
| 209 |
+
"""
|
| 210 |
+
with tokenizer.enclosing_tokens(
|
| 211 |
+
"LEFT_PARENTHESIS",
|
| 212 |
+
"RIGHT_PARENTHESIS",
|
| 213 |
+
around="version specifier",
|
| 214 |
+
):
|
| 215 |
+
tokenizer.consume("WS")
|
| 216 |
+
parsed_specifiers = _parse_version_many(tokenizer)
|
| 217 |
+
tokenizer.consume("WS")
|
| 218 |
+
|
| 219 |
+
return parsed_specifiers
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
| 223 |
+
"""
|
| 224 |
+
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
| 225 |
+
"""
|
| 226 |
+
parsed_specifiers = ""
|
| 227 |
+
while tokenizer.check("SPECIFIER"):
|
| 228 |
+
span_start = tokenizer.position
|
| 229 |
+
parsed_specifiers += tokenizer.read().text
|
| 230 |
+
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
| 231 |
+
tokenizer.raise_syntax_error(
|
| 232 |
+
".* suffix can only be used with `==` or `!=` operators",
|
| 233 |
+
span_start=span_start,
|
| 234 |
+
span_end=tokenizer.position + 1,
|
| 235 |
+
)
|
| 236 |
+
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
| 237 |
+
tokenizer.raise_syntax_error(
|
| 238 |
+
"Local version label can only be used with `==` or `!=` operators",
|
| 239 |
+
span_start=span_start,
|
| 240 |
+
span_end=tokenizer.position,
|
| 241 |
+
)
|
| 242 |
+
tokenizer.consume("WS")
|
| 243 |
+
if not tokenizer.check("COMMA"):
|
| 244 |
+
break
|
| 245 |
+
parsed_specifiers += tokenizer.read().text
|
| 246 |
+
tokenizer.consume("WS")
|
| 247 |
+
|
| 248 |
+
return parsed_specifiers
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
# --------------------------------------------------------------------------------------
|
| 252 |
+
# Recursive descent parser for marker expression
|
| 253 |
+
# --------------------------------------------------------------------------------------
|
| 254 |
+
def parse_marker(source: str) -> MarkerList:
|
| 255 |
+
return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
| 259 |
+
"""
|
| 260 |
+
marker = marker_atom (BOOLOP marker_atom)+
|
| 261 |
+
"""
|
| 262 |
+
expression = [_parse_marker_atom(tokenizer)]
|
| 263 |
+
while tokenizer.check("BOOLOP"):
|
| 264 |
+
token = tokenizer.read()
|
| 265 |
+
expr_right = _parse_marker_atom(tokenizer)
|
| 266 |
+
expression.extend((token.text, expr_right))
|
| 267 |
+
return expression
|
| 268 |
+
|
| 269 |
+
|
| 270 |
+
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
| 271 |
+
"""
|
| 272 |
+
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
| 273 |
+
| WS? marker_item WS?
|
| 274 |
+
"""
|
| 275 |
+
|
| 276 |
+
tokenizer.consume("WS")
|
| 277 |
+
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
| 278 |
+
with tokenizer.enclosing_tokens(
|
| 279 |
+
"LEFT_PARENTHESIS",
|
| 280 |
+
"RIGHT_PARENTHESIS",
|
| 281 |
+
around="marker expression",
|
| 282 |
+
):
|
| 283 |
+
tokenizer.consume("WS")
|
| 284 |
+
marker: MarkerAtom = _parse_marker(tokenizer)
|
| 285 |
+
tokenizer.consume("WS")
|
| 286 |
+
else:
|
| 287 |
+
marker = _parse_marker_item(tokenizer)
|
| 288 |
+
tokenizer.consume("WS")
|
| 289 |
+
return marker
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
| 293 |
+
"""
|
| 294 |
+
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
| 295 |
+
"""
|
| 296 |
+
tokenizer.consume("WS")
|
| 297 |
+
marker_var_left = _parse_marker_var(tokenizer)
|
| 298 |
+
tokenizer.consume("WS")
|
| 299 |
+
marker_op = _parse_marker_op(tokenizer)
|
| 300 |
+
tokenizer.consume("WS")
|
| 301 |
+
marker_var_right = _parse_marker_var(tokenizer)
|
| 302 |
+
tokenizer.consume("WS")
|
| 303 |
+
return (marker_var_left, marker_op, marker_var_right)
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
| 307 |
+
"""
|
| 308 |
+
marker_var = VARIABLE | QUOTED_STRING
|
| 309 |
+
"""
|
| 310 |
+
if tokenizer.check("VARIABLE"):
|
| 311 |
+
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
| 312 |
+
elif tokenizer.check("QUOTED_STRING"):
|
| 313 |
+
return process_python_str(tokenizer.read().text)
|
| 314 |
+
else:
|
| 315 |
+
tokenizer.raise_syntax_error(
|
| 316 |
+
message="Expected a marker variable or quoted string"
|
| 317 |
+
)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def process_env_var(env_var: str) -> Variable:
|
| 321 |
+
if (
|
| 322 |
+
env_var == "platform_python_implementation"
|
| 323 |
+
or env_var == "python_implementation"
|
| 324 |
+
):
|
| 325 |
+
return Variable("platform_python_implementation")
|
| 326 |
+
else:
|
| 327 |
+
return Variable(env_var)
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
def process_python_str(python_str: str) -> Value:
|
| 331 |
+
value = ast.literal_eval(python_str)
|
| 332 |
+
return Value(str(value))
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
| 336 |
+
"""
|
| 337 |
+
marker_op = IN | NOT IN | OP
|
| 338 |
+
"""
|
| 339 |
+
if tokenizer.check("IN"):
|
| 340 |
+
tokenizer.read()
|
| 341 |
+
return Op("in")
|
| 342 |
+
elif tokenizer.check("NOT"):
|
| 343 |
+
tokenizer.read()
|
| 344 |
+
tokenizer.expect("WS", expected="whitespace after 'not'")
|
| 345 |
+
tokenizer.expect("IN", expected="'in' after 'not'")
|
| 346 |
+
return Op("not in")
|
| 347 |
+
elif tokenizer.check("OP"):
|
| 348 |
+
return Op(tokenizer.read().text)
|
| 349 |
+
else:
|
| 350 |
+
return tokenizer.raise_syntax_error(
|
| 351 |
+
"Expected marker operator, one of "
|
| 352 |
+
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
| 353 |
+
)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class InfinityType:
|
| 7 |
+
def __repr__(self) -> str:
|
| 8 |
+
return "Infinity"
|
| 9 |
+
|
| 10 |
+
def __hash__(self) -> int:
|
| 11 |
+
return hash(repr(self))
|
| 12 |
+
|
| 13 |
+
def __lt__(self, other: object) -> bool:
|
| 14 |
+
return False
|
| 15 |
+
|
| 16 |
+
def __le__(self, other: object) -> bool:
|
| 17 |
+
return False
|
| 18 |
+
|
| 19 |
+
def __eq__(self, other: object) -> bool:
|
| 20 |
+
return isinstance(other, self.__class__)
|
| 21 |
+
|
| 22 |
+
def __gt__(self, other: object) -> bool:
|
| 23 |
+
return True
|
| 24 |
+
|
| 25 |
+
def __ge__(self, other: object) -> bool:
|
| 26 |
+
return True
|
| 27 |
+
|
| 28 |
+
def __neg__(self: object) -> "NegativeInfinityType":
|
| 29 |
+
return NegativeInfinity
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
Infinity = InfinityType()
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class NegativeInfinityType:
|
| 36 |
+
def __repr__(self) -> str:
|
| 37 |
+
return "-Infinity"
|
| 38 |
+
|
| 39 |
+
def __hash__(self) -> int:
|
| 40 |
+
return hash(repr(self))
|
| 41 |
+
|
| 42 |
+
def __lt__(self, other: object) -> bool:
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
def __le__(self, other: object) -> bool:
|
| 46 |
+
return True
|
| 47 |
+
|
| 48 |
+
def __eq__(self, other: object) -> bool:
|
| 49 |
+
return isinstance(other, self.__class__)
|
| 50 |
+
|
| 51 |
+
def __gt__(self, other: object) -> bool:
|
| 52 |
+
return False
|
| 53 |
+
|
| 54 |
+
def __ge__(self, other: object) -> bool:
|
| 55 |
+
return False
|
| 56 |
+
|
| 57 |
+
def __neg__(self: object) -> InfinityType:
|
| 58 |
+
return Infinity
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
NegativeInfinity = NegativeInfinityType()
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/_tokenizer.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import re
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
| 5 |
+
|
| 6 |
+
from .specifiers import Specifier
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@dataclass
|
| 10 |
+
class Token:
|
| 11 |
+
name: str
|
| 12 |
+
text: str
|
| 13 |
+
position: int
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ParserSyntaxError(Exception):
|
| 17 |
+
"""The provided source text could not be parsed correctly."""
|
| 18 |
+
|
| 19 |
+
def __init__(
|
| 20 |
+
self,
|
| 21 |
+
message: str,
|
| 22 |
+
*,
|
| 23 |
+
source: str,
|
| 24 |
+
span: Tuple[int, int],
|
| 25 |
+
) -> None:
|
| 26 |
+
self.span = span
|
| 27 |
+
self.message = message
|
| 28 |
+
self.source = source
|
| 29 |
+
|
| 30 |
+
super().__init__()
|
| 31 |
+
|
| 32 |
+
def __str__(self) -> str:
|
| 33 |
+
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
| 34 |
+
return "\n ".join([self.message, self.source, marker])
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
| 38 |
+
"LEFT_PARENTHESIS": r"\(",
|
| 39 |
+
"RIGHT_PARENTHESIS": r"\)",
|
| 40 |
+
"LEFT_BRACKET": r"\[",
|
| 41 |
+
"RIGHT_BRACKET": r"\]",
|
| 42 |
+
"SEMICOLON": r";",
|
| 43 |
+
"COMMA": r",",
|
| 44 |
+
"QUOTED_STRING": re.compile(
|
| 45 |
+
r"""
|
| 46 |
+
(
|
| 47 |
+
('[^']*')
|
| 48 |
+
|
|
| 49 |
+
("[^"]*")
|
| 50 |
+
)
|
| 51 |
+
""",
|
| 52 |
+
re.VERBOSE,
|
| 53 |
+
),
|
| 54 |
+
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
| 55 |
+
"BOOLOP": r"\b(or|and)\b",
|
| 56 |
+
"IN": r"\bin\b",
|
| 57 |
+
"NOT": r"\bnot\b",
|
| 58 |
+
"VARIABLE": re.compile(
|
| 59 |
+
r"""
|
| 60 |
+
\b(
|
| 61 |
+
python_version
|
| 62 |
+
|python_full_version
|
| 63 |
+
|os[._]name
|
| 64 |
+
|sys[._]platform
|
| 65 |
+
|platform_(release|system)
|
| 66 |
+
|platform[._](version|machine|python_implementation)
|
| 67 |
+
|python_implementation
|
| 68 |
+
|implementation_(name|version)
|
| 69 |
+
|extra
|
| 70 |
+
)\b
|
| 71 |
+
""",
|
| 72 |
+
re.VERBOSE,
|
| 73 |
+
),
|
| 74 |
+
"SPECIFIER": re.compile(
|
| 75 |
+
Specifier._operator_regex_str + Specifier._version_regex_str,
|
| 76 |
+
re.VERBOSE | re.IGNORECASE,
|
| 77 |
+
),
|
| 78 |
+
"AT": r"\@",
|
| 79 |
+
"URL": r"[^ \t]+",
|
| 80 |
+
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
| 81 |
+
"VERSION_PREFIX_TRAIL": r"\.\*",
|
| 82 |
+
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
| 83 |
+
"WS": r"[ \t]+",
|
| 84 |
+
"END": r"$",
|
| 85 |
+
}
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class Tokenizer:
|
| 89 |
+
"""Context-sensitive token parsing.
|
| 90 |
+
|
| 91 |
+
Provides methods to examine the input stream to check whether the next token
|
| 92 |
+
matches.
|
| 93 |
+
"""
|
| 94 |
+
|
| 95 |
+
def __init__(
|
| 96 |
+
self,
|
| 97 |
+
source: str,
|
| 98 |
+
*,
|
| 99 |
+
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
| 100 |
+
) -> None:
|
| 101 |
+
self.source = source
|
| 102 |
+
self.rules: Dict[str, re.Pattern[str]] = {
|
| 103 |
+
name: re.compile(pattern) for name, pattern in rules.items()
|
| 104 |
+
}
|
| 105 |
+
self.next_token: Optional[Token] = None
|
| 106 |
+
self.position = 0
|
| 107 |
+
|
| 108 |
+
def consume(self, name: str) -> None:
|
| 109 |
+
"""Move beyond provided token name, if at current position."""
|
| 110 |
+
if self.check(name):
|
| 111 |
+
self.read()
|
| 112 |
+
|
| 113 |
+
def check(self, name: str, *, peek: bool = False) -> bool:
|
| 114 |
+
"""Check whether the next token has the provided name.
|
| 115 |
+
|
| 116 |
+
By default, if the check succeeds, the token *must* be read before
|
| 117 |
+
another check. If `peek` is set to `True`, the token is not loaded and
|
| 118 |
+
would need to be checked again.
|
| 119 |
+
"""
|
| 120 |
+
assert (
|
| 121 |
+
self.next_token is None
|
| 122 |
+
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
| 123 |
+
assert name in self.rules, f"Unknown token name: {name!r}"
|
| 124 |
+
|
| 125 |
+
expression = self.rules[name]
|
| 126 |
+
|
| 127 |
+
match = expression.match(self.source, self.position)
|
| 128 |
+
if match is None:
|
| 129 |
+
return False
|
| 130 |
+
if not peek:
|
| 131 |
+
self.next_token = Token(name, match[0], self.position)
|
| 132 |
+
return True
|
| 133 |
+
|
| 134 |
+
def expect(self, name: str, *, expected: str) -> Token:
|
| 135 |
+
"""Expect a certain token name next, failing with a syntax error otherwise.
|
| 136 |
+
|
| 137 |
+
The token is *not* read.
|
| 138 |
+
"""
|
| 139 |
+
if not self.check(name):
|
| 140 |
+
raise self.raise_syntax_error(f"Expected {expected}")
|
| 141 |
+
return self.read()
|
| 142 |
+
|
| 143 |
+
def read(self) -> Token:
|
| 144 |
+
"""Consume the next token and return it."""
|
| 145 |
+
token = self.next_token
|
| 146 |
+
assert token is not None
|
| 147 |
+
|
| 148 |
+
self.position += len(token.text)
|
| 149 |
+
self.next_token = None
|
| 150 |
+
|
| 151 |
+
return token
|
| 152 |
+
|
| 153 |
+
def raise_syntax_error(
|
| 154 |
+
self,
|
| 155 |
+
message: str,
|
| 156 |
+
*,
|
| 157 |
+
span_start: Optional[int] = None,
|
| 158 |
+
span_end: Optional[int] = None,
|
| 159 |
+
) -> NoReturn:
|
| 160 |
+
"""Raise ParserSyntaxError at the given position."""
|
| 161 |
+
span = (
|
| 162 |
+
self.position if span_start is None else span_start,
|
| 163 |
+
self.position if span_end is None else span_end,
|
| 164 |
+
)
|
| 165 |
+
raise ParserSyntaxError(
|
| 166 |
+
message,
|
| 167 |
+
source=self.source,
|
| 168 |
+
span=span,
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
@contextlib.contextmanager
|
| 172 |
+
def enclosing_tokens(
|
| 173 |
+
self, open_token: str, close_token: str, *, around: str
|
| 174 |
+
) -> Iterator[None]:
|
| 175 |
+
if self.check(open_token):
|
| 176 |
+
open_position = self.position
|
| 177 |
+
self.read()
|
| 178 |
+
else:
|
| 179 |
+
open_position = None
|
| 180 |
+
|
| 181 |
+
yield
|
| 182 |
+
|
| 183 |
+
if open_position is None:
|
| 184 |
+
return
|
| 185 |
+
|
| 186 |
+
if not self.check(close_token):
|
| 187 |
+
self.raise_syntax_error(
|
| 188 |
+
f"Expected matching {close_token} for {open_token}, after {around}",
|
| 189 |
+
span_start=open_position,
|
| 190 |
+
)
|
| 191 |
+
|
| 192 |
+
self.read()
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/markers.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import operator
|
| 6 |
+
import os
|
| 7 |
+
import platform
|
| 8 |
+
import sys
|
| 9 |
+
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
| 10 |
+
|
| 11 |
+
from ._parser import (
|
| 12 |
+
MarkerAtom,
|
| 13 |
+
MarkerList,
|
| 14 |
+
Op,
|
| 15 |
+
Value,
|
| 16 |
+
Variable,
|
| 17 |
+
parse_marker as _parse_marker,
|
| 18 |
+
)
|
| 19 |
+
from ._tokenizer import ParserSyntaxError
|
| 20 |
+
from .specifiers import InvalidSpecifier, Specifier
|
| 21 |
+
from .utils import canonicalize_name
|
| 22 |
+
|
| 23 |
+
__all__ = [
|
| 24 |
+
"InvalidMarker",
|
| 25 |
+
"UndefinedComparison",
|
| 26 |
+
"UndefinedEnvironmentName",
|
| 27 |
+
"Marker",
|
| 28 |
+
"default_environment",
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
Operator = Callable[[str, str], bool]
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class InvalidMarker(ValueError):
|
| 35 |
+
"""
|
| 36 |
+
An invalid marker was found, users should refer to PEP 508.
|
| 37 |
+
"""
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class UndefinedComparison(ValueError):
|
| 41 |
+
"""
|
| 42 |
+
An invalid operation was attempted on a value that doesn't support it.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class UndefinedEnvironmentName(ValueError):
|
| 47 |
+
"""
|
| 48 |
+
A name was attempted to be used that does not exist inside of the
|
| 49 |
+
environment.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
def _normalize_extra_values(results: Any) -> Any:
|
| 54 |
+
"""
|
| 55 |
+
Normalize extra values.
|
| 56 |
+
"""
|
| 57 |
+
if isinstance(results[0], tuple):
|
| 58 |
+
lhs, op, rhs = results[0]
|
| 59 |
+
if isinstance(lhs, Variable) and lhs.value == "extra":
|
| 60 |
+
normalized_extra = canonicalize_name(rhs.value)
|
| 61 |
+
rhs = Value(normalized_extra)
|
| 62 |
+
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
| 63 |
+
normalized_extra = canonicalize_name(lhs.value)
|
| 64 |
+
lhs = Value(normalized_extra)
|
| 65 |
+
results[0] = lhs, op, rhs
|
| 66 |
+
return results
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def _format_marker(
|
| 70 |
+
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
| 71 |
+
) -> str:
|
| 72 |
+
|
| 73 |
+
assert isinstance(marker, (list, tuple, str))
|
| 74 |
+
|
| 75 |
+
# Sometimes we have a structure like [[...]] which is a single item list
|
| 76 |
+
# where the single item is itself it's own list. In that case we want skip
|
| 77 |
+
# the rest of this function so that we don't get extraneous () on the
|
| 78 |
+
# outside.
|
| 79 |
+
if (
|
| 80 |
+
isinstance(marker, list)
|
| 81 |
+
and len(marker) == 1
|
| 82 |
+
and isinstance(marker[0], (list, tuple))
|
| 83 |
+
):
|
| 84 |
+
return _format_marker(marker[0])
|
| 85 |
+
|
| 86 |
+
if isinstance(marker, list):
|
| 87 |
+
inner = (_format_marker(m, first=False) for m in marker)
|
| 88 |
+
if first:
|
| 89 |
+
return " ".join(inner)
|
| 90 |
+
else:
|
| 91 |
+
return "(" + " ".join(inner) + ")"
|
| 92 |
+
elif isinstance(marker, tuple):
|
| 93 |
+
return " ".join([m.serialize() for m in marker])
|
| 94 |
+
else:
|
| 95 |
+
return marker
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
_operators: Dict[str, Operator] = {
|
| 99 |
+
"in": lambda lhs, rhs: lhs in rhs,
|
| 100 |
+
"not in": lambda lhs, rhs: lhs not in rhs,
|
| 101 |
+
"<": operator.lt,
|
| 102 |
+
"<=": operator.le,
|
| 103 |
+
"==": operator.eq,
|
| 104 |
+
"!=": operator.ne,
|
| 105 |
+
">=": operator.ge,
|
| 106 |
+
">": operator.gt,
|
| 107 |
+
}
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
| 111 |
+
try:
|
| 112 |
+
spec = Specifier("".join([op.serialize(), rhs]))
|
| 113 |
+
except InvalidSpecifier:
|
| 114 |
+
pass
|
| 115 |
+
else:
|
| 116 |
+
return spec.contains(lhs, prereleases=True)
|
| 117 |
+
|
| 118 |
+
oper: Optional[Operator] = _operators.get(op.serialize())
|
| 119 |
+
if oper is None:
|
| 120 |
+
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
| 121 |
+
|
| 122 |
+
return oper(lhs, rhs)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
| 126 |
+
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
| 127 |
+
# https://peps.python.org/pep-0685/
|
| 128 |
+
# > When comparing extra names, tools MUST normalize the names being
|
| 129 |
+
# > compared using the semantics outlined in PEP 503 for names
|
| 130 |
+
if key == "extra":
|
| 131 |
+
return tuple(canonicalize_name(v) for v in values)
|
| 132 |
+
|
| 133 |
+
# other environment markers don't have such standards
|
| 134 |
+
return values
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
| 138 |
+
groups: List[List[bool]] = [[]]
|
| 139 |
+
|
| 140 |
+
for marker in markers:
|
| 141 |
+
assert isinstance(marker, (list, tuple, str))
|
| 142 |
+
|
| 143 |
+
if isinstance(marker, list):
|
| 144 |
+
groups[-1].append(_evaluate_markers(marker, environment))
|
| 145 |
+
elif isinstance(marker, tuple):
|
| 146 |
+
lhs, op, rhs = marker
|
| 147 |
+
|
| 148 |
+
if isinstance(lhs, Variable):
|
| 149 |
+
environment_key = lhs.value
|
| 150 |
+
lhs_value = environment[environment_key]
|
| 151 |
+
rhs_value = rhs.value
|
| 152 |
+
else:
|
| 153 |
+
lhs_value = lhs.value
|
| 154 |
+
environment_key = rhs.value
|
| 155 |
+
rhs_value = environment[environment_key]
|
| 156 |
+
|
| 157 |
+
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
| 158 |
+
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
| 159 |
+
else:
|
| 160 |
+
assert marker in ["and", "or"]
|
| 161 |
+
if marker == "or":
|
| 162 |
+
groups.append([])
|
| 163 |
+
|
| 164 |
+
return any(all(item) for item in groups)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def format_full_version(info: "sys._version_info") -> str:
|
| 168 |
+
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
| 169 |
+
kind = info.releaselevel
|
| 170 |
+
if kind != "final":
|
| 171 |
+
version += kind[0] + str(info.serial)
|
| 172 |
+
return version
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def default_environment() -> Dict[str, str]:
|
| 176 |
+
iver = format_full_version(sys.implementation.version)
|
| 177 |
+
implementation_name = sys.implementation.name
|
| 178 |
+
return {
|
| 179 |
+
"implementation_name": implementation_name,
|
| 180 |
+
"implementation_version": iver,
|
| 181 |
+
"os_name": os.name,
|
| 182 |
+
"platform_machine": platform.machine(),
|
| 183 |
+
"platform_release": platform.release(),
|
| 184 |
+
"platform_system": platform.system(),
|
| 185 |
+
"platform_version": platform.version(),
|
| 186 |
+
"python_full_version": platform.python_version(),
|
| 187 |
+
"platform_python_implementation": platform.python_implementation(),
|
| 188 |
+
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
| 189 |
+
"sys_platform": sys.platform,
|
| 190 |
+
}
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
class Marker:
|
| 194 |
+
def __init__(self, marker: str) -> None:
|
| 195 |
+
# Note: We create a Marker object without calling this constructor in
|
| 196 |
+
# packaging.requirements.Requirement. If any additional logic is
|
| 197 |
+
# added here, make sure to mirror/adapt Requirement.
|
| 198 |
+
try:
|
| 199 |
+
self._markers = _normalize_extra_values(_parse_marker(marker))
|
| 200 |
+
# The attribute `_markers` can be described in terms of a recursive type:
|
| 201 |
+
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
| 202 |
+
#
|
| 203 |
+
# For example, the following expression:
|
| 204 |
+
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
| 205 |
+
#
|
| 206 |
+
# is parsed into:
|
| 207 |
+
# [
|
| 208 |
+
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
| 209 |
+
# 'and',
|
| 210 |
+
# [
|
| 211 |
+
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
| 212 |
+
# 'or',
|
| 213 |
+
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
| 214 |
+
# ]
|
| 215 |
+
# ]
|
| 216 |
+
except ParserSyntaxError as e:
|
| 217 |
+
raise InvalidMarker(str(e)) from e
|
| 218 |
+
|
| 219 |
+
def __str__(self) -> str:
|
| 220 |
+
return _format_marker(self._markers)
|
| 221 |
+
|
| 222 |
+
def __repr__(self) -> str:
|
| 223 |
+
return f"<Marker('{self}')>"
|
| 224 |
+
|
| 225 |
+
def __hash__(self) -> int:
|
| 226 |
+
return hash((self.__class__.__name__, str(self)))
|
| 227 |
+
|
| 228 |
+
def __eq__(self, other: Any) -> bool:
|
| 229 |
+
if not isinstance(other, Marker):
|
| 230 |
+
return NotImplemented
|
| 231 |
+
|
| 232 |
+
return str(self) == str(other)
|
| 233 |
+
|
| 234 |
+
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
| 235 |
+
"""Evaluate a marker.
|
| 236 |
+
|
| 237 |
+
Return the boolean from evaluating the given marker against the
|
| 238 |
+
environment. environment is an optional argument to override all or
|
| 239 |
+
part of the determined environment.
|
| 240 |
+
|
| 241 |
+
The environment is determined from the current Python process.
|
| 242 |
+
"""
|
| 243 |
+
current_environment = default_environment()
|
| 244 |
+
current_environment["extra"] = ""
|
| 245 |
+
if environment is not None:
|
| 246 |
+
current_environment.update(environment)
|
| 247 |
+
# The API used to allow setting extra to None. We need to handle this
|
| 248 |
+
# case for backwards compatibility.
|
| 249 |
+
if current_environment["extra"] is None:
|
| 250 |
+
current_environment["extra"] = ""
|
| 251 |
+
|
| 252 |
+
return _evaluate_markers(self._markers, current_environment)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/metadata.py
ADDED
|
@@ -0,0 +1,408 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import email.feedparser
|
| 2 |
+
import email.header
|
| 3 |
+
import email.message
|
| 4 |
+
import email.parser
|
| 5 |
+
import email.policy
|
| 6 |
+
import sys
|
| 7 |
+
import typing
|
| 8 |
+
from typing import Dict, List, Optional, Tuple, Union, cast
|
| 9 |
+
|
| 10 |
+
if sys.version_info >= (3, 8): # pragma: no cover
|
| 11 |
+
from typing import TypedDict
|
| 12 |
+
else: # pragma: no cover
|
| 13 |
+
if typing.TYPE_CHECKING:
|
| 14 |
+
from typing_extensions import TypedDict
|
| 15 |
+
else:
|
| 16 |
+
try:
|
| 17 |
+
from typing_extensions import TypedDict
|
| 18 |
+
except ImportError:
|
| 19 |
+
|
| 20 |
+
class TypedDict:
|
| 21 |
+
def __init_subclass__(*_args, **_kwargs):
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
# The RawMetadata class attempts to make as few assumptions about the underlying
|
| 26 |
+
# serialization formats as possible. The idea is that as long as a serialization
|
| 27 |
+
# formats offer some very basic primitives in *some* way then we can support
|
| 28 |
+
# serializing to and from that format.
|
| 29 |
+
class RawMetadata(TypedDict, total=False):
|
| 30 |
+
"""A dictionary of raw core metadata.
|
| 31 |
+
|
| 32 |
+
Each field in core metadata maps to a key of this dictionary (when data is
|
| 33 |
+
provided). The key is lower-case and underscores are used instead of dashes
|
| 34 |
+
compared to the equivalent core metadata field. Any core metadata field that
|
| 35 |
+
can be specified multiple times or can hold multiple values in a single
|
| 36 |
+
field have a key with a plural name.
|
| 37 |
+
|
| 38 |
+
Core metadata fields that can be specified multiple times are stored as a
|
| 39 |
+
list or dict depending on which is appropriate for the field. Any fields
|
| 40 |
+
which hold multiple values in a single field are stored as a list.
|
| 41 |
+
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
# Metadata 1.0 - PEP 241
|
| 45 |
+
metadata_version: str
|
| 46 |
+
name: str
|
| 47 |
+
version: str
|
| 48 |
+
platforms: List[str]
|
| 49 |
+
summary: str
|
| 50 |
+
description: str
|
| 51 |
+
keywords: List[str]
|
| 52 |
+
home_page: str
|
| 53 |
+
author: str
|
| 54 |
+
author_email: str
|
| 55 |
+
license: str
|
| 56 |
+
|
| 57 |
+
# Metadata 1.1 - PEP 314
|
| 58 |
+
supported_platforms: List[str]
|
| 59 |
+
download_url: str
|
| 60 |
+
classifiers: List[str]
|
| 61 |
+
requires: List[str]
|
| 62 |
+
provides: List[str]
|
| 63 |
+
obsoletes: List[str]
|
| 64 |
+
|
| 65 |
+
# Metadata 1.2 - PEP 345
|
| 66 |
+
maintainer: str
|
| 67 |
+
maintainer_email: str
|
| 68 |
+
requires_dist: List[str]
|
| 69 |
+
provides_dist: List[str]
|
| 70 |
+
obsoletes_dist: List[str]
|
| 71 |
+
requires_python: str
|
| 72 |
+
requires_external: List[str]
|
| 73 |
+
project_urls: Dict[str, str]
|
| 74 |
+
|
| 75 |
+
# Metadata 2.0
|
| 76 |
+
# PEP 426 attempted to completely revamp the metadata format
|
| 77 |
+
# but got stuck without ever being able to build consensus on
|
| 78 |
+
# it and ultimately ended up withdrawn.
|
| 79 |
+
#
|
| 80 |
+
# However, a number of tools had started emiting METADATA with
|
| 81 |
+
# `2.0` Metadata-Version, so for historical reasons, this version
|
| 82 |
+
# was skipped.
|
| 83 |
+
|
| 84 |
+
# Metadata 2.1 - PEP 566
|
| 85 |
+
description_content_type: str
|
| 86 |
+
provides_extra: List[str]
|
| 87 |
+
|
| 88 |
+
# Metadata 2.2 - PEP 643
|
| 89 |
+
dynamic: List[str]
|
| 90 |
+
|
| 91 |
+
# Metadata 2.3 - PEP 685
|
| 92 |
+
# No new fields were added in PEP 685, just some edge case were
|
| 93 |
+
# tightened up to provide better interoptability.
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
_STRING_FIELDS = {
|
| 97 |
+
"author",
|
| 98 |
+
"author_email",
|
| 99 |
+
"description",
|
| 100 |
+
"description_content_type",
|
| 101 |
+
"download_url",
|
| 102 |
+
"home_page",
|
| 103 |
+
"license",
|
| 104 |
+
"maintainer",
|
| 105 |
+
"maintainer_email",
|
| 106 |
+
"metadata_version",
|
| 107 |
+
"name",
|
| 108 |
+
"requires_python",
|
| 109 |
+
"summary",
|
| 110 |
+
"version",
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
_LIST_STRING_FIELDS = {
|
| 114 |
+
"classifiers",
|
| 115 |
+
"dynamic",
|
| 116 |
+
"obsoletes",
|
| 117 |
+
"obsoletes_dist",
|
| 118 |
+
"platforms",
|
| 119 |
+
"provides",
|
| 120 |
+
"provides_dist",
|
| 121 |
+
"provides_extra",
|
| 122 |
+
"requires",
|
| 123 |
+
"requires_dist",
|
| 124 |
+
"requires_external",
|
| 125 |
+
"supported_platforms",
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _parse_keywords(data: str) -> List[str]:
|
| 130 |
+
"""Split a string of comma-separate keyboards into a list of keywords."""
|
| 131 |
+
return [k.strip() for k in data.split(",")]
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def _parse_project_urls(data: List[str]) -> Dict[str, str]:
|
| 135 |
+
"""Parse a list of label/URL string pairings separated by a comma."""
|
| 136 |
+
urls = {}
|
| 137 |
+
for pair in data:
|
| 138 |
+
# Our logic is slightly tricky here as we want to try and do
|
| 139 |
+
# *something* reasonable with malformed data.
|
| 140 |
+
#
|
| 141 |
+
# The main thing that we have to worry about, is data that does
|
| 142 |
+
# not have a ',' at all to split the label from the Value. There
|
| 143 |
+
# isn't a singular right answer here, and we will fail validation
|
| 144 |
+
# later on (if the caller is validating) so it doesn't *really*
|
| 145 |
+
# matter, but since the missing value has to be an empty str
|
| 146 |
+
# and our return value is dict[str, str], if we let the key
|
| 147 |
+
# be the missing value, then they'd have multiple '' values that
|
| 148 |
+
# overwrite each other in a accumulating dict.
|
| 149 |
+
#
|
| 150 |
+
# The other potentional issue is that it's possible to have the
|
| 151 |
+
# same label multiple times in the metadata, with no solid "right"
|
| 152 |
+
# answer with what to do in that case. As such, we'll do the only
|
| 153 |
+
# thing we can, which is treat the field as unparseable and add it
|
| 154 |
+
# to our list of unparsed fields.
|
| 155 |
+
parts = [p.strip() for p in pair.split(",", 1)]
|
| 156 |
+
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
|
| 157 |
+
|
| 158 |
+
# TODO: The spec doesn't say anything about if the keys should be
|
| 159 |
+
# considered case sensitive or not... logically they should
|
| 160 |
+
# be case-preserving and case-insensitive, but doing that
|
| 161 |
+
# would open up more cases where we might have duplicate
|
| 162 |
+
# entries.
|
| 163 |
+
label, url = parts
|
| 164 |
+
if label in urls:
|
| 165 |
+
# The label already exists in our set of urls, so this field
|
| 166 |
+
# is unparseable, and we can just add the whole thing to our
|
| 167 |
+
# unparseable data and stop processing it.
|
| 168 |
+
raise KeyError("duplicate labels in project urls")
|
| 169 |
+
urls[label] = url
|
| 170 |
+
|
| 171 |
+
return urls
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
|
| 175 |
+
"""Get the body of the message."""
|
| 176 |
+
# If our source is a str, then our caller has managed encodings for us,
|
| 177 |
+
# and we don't need to deal with it.
|
| 178 |
+
if isinstance(source, str):
|
| 179 |
+
payload: str = msg.get_payload()
|
| 180 |
+
return payload
|
| 181 |
+
# If our source is a bytes, then we're managing the encoding and we need
|
| 182 |
+
# to deal with it.
|
| 183 |
+
else:
|
| 184 |
+
bpayload: bytes = msg.get_payload(decode=True)
|
| 185 |
+
try:
|
| 186 |
+
return bpayload.decode("utf8", "strict")
|
| 187 |
+
except UnicodeDecodeError:
|
| 188 |
+
raise ValueError("payload in an invalid encoding")
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
# The various parse_FORMAT functions here are intended to be as lenient as
|
| 192 |
+
# possible in their parsing, while still returning a correctly typed
|
| 193 |
+
# RawMetadata.
|
| 194 |
+
#
|
| 195 |
+
# To aid in this, we also generally want to do as little touching of the
|
| 196 |
+
# data as possible, except where there are possibly some historic holdovers
|
| 197 |
+
# that make valid data awkward to work with.
|
| 198 |
+
#
|
| 199 |
+
# While this is a lower level, intermediate format than our ``Metadata``
|
| 200 |
+
# class, some light touch ups can make a massive difference in usability.
|
| 201 |
+
|
| 202 |
+
# Map METADATA fields to RawMetadata.
|
| 203 |
+
_EMAIL_TO_RAW_MAPPING = {
|
| 204 |
+
"author": "author",
|
| 205 |
+
"author-email": "author_email",
|
| 206 |
+
"classifier": "classifiers",
|
| 207 |
+
"description": "description",
|
| 208 |
+
"description-content-type": "description_content_type",
|
| 209 |
+
"download-url": "download_url",
|
| 210 |
+
"dynamic": "dynamic",
|
| 211 |
+
"home-page": "home_page",
|
| 212 |
+
"keywords": "keywords",
|
| 213 |
+
"license": "license",
|
| 214 |
+
"maintainer": "maintainer",
|
| 215 |
+
"maintainer-email": "maintainer_email",
|
| 216 |
+
"metadata-version": "metadata_version",
|
| 217 |
+
"name": "name",
|
| 218 |
+
"obsoletes": "obsoletes",
|
| 219 |
+
"obsoletes-dist": "obsoletes_dist",
|
| 220 |
+
"platform": "platforms",
|
| 221 |
+
"project-url": "project_urls",
|
| 222 |
+
"provides": "provides",
|
| 223 |
+
"provides-dist": "provides_dist",
|
| 224 |
+
"provides-extra": "provides_extra",
|
| 225 |
+
"requires": "requires",
|
| 226 |
+
"requires-dist": "requires_dist",
|
| 227 |
+
"requires-external": "requires_external",
|
| 228 |
+
"requires-python": "requires_python",
|
| 229 |
+
"summary": "summary",
|
| 230 |
+
"supported-platform": "supported_platforms",
|
| 231 |
+
"version": "version",
|
| 232 |
+
}
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
|
| 236 |
+
"""Parse a distribution's metadata.
|
| 237 |
+
|
| 238 |
+
This function returns a two-item tuple of dicts. The first dict is of
|
| 239 |
+
recognized fields from the core metadata specification. Fields that can be
|
| 240 |
+
parsed and translated into Python's built-in types are converted
|
| 241 |
+
appropriately. All other fields are left as-is. Fields that are allowed to
|
| 242 |
+
appear multiple times are stored as lists.
|
| 243 |
+
|
| 244 |
+
The second dict contains all other fields from the metadata. This includes
|
| 245 |
+
any unrecognized fields. It also includes any fields which are expected to
|
| 246 |
+
be parsed into a built-in type but were not formatted appropriately. Finally,
|
| 247 |
+
any fields that are expected to appear only once but are repeated are
|
| 248 |
+
included in this dict.
|
| 249 |
+
|
| 250 |
+
"""
|
| 251 |
+
raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
|
| 252 |
+
unparsed: Dict[str, List[str]] = {}
|
| 253 |
+
|
| 254 |
+
if isinstance(data, str):
|
| 255 |
+
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
|
| 256 |
+
else:
|
| 257 |
+
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
|
| 258 |
+
|
| 259 |
+
# We have to wrap parsed.keys() in a set, because in the case of multiple
|
| 260 |
+
# values for a key (a list), the key will appear multiple times in the
|
| 261 |
+
# list of keys, but we're avoiding that by using get_all().
|
| 262 |
+
for name in frozenset(parsed.keys()):
|
| 263 |
+
# Header names in RFC are case insensitive, so we'll normalize to all
|
| 264 |
+
# lower case to make comparisons easier.
|
| 265 |
+
name = name.lower()
|
| 266 |
+
|
| 267 |
+
# We use get_all() here, even for fields that aren't multiple use,
|
| 268 |
+
# because otherwise someone could have e.g. two Name fields, and we
|
| 269 |
+
# would just silently ignore it rather than doing something about it.
|
| 270 |
+
headers = parsed.get_all(name)
|
| 271 |
+
|
| 272 |
+
# The way the email module works when parsing bytes is that it
|
| 273 |
+
# unconditionally decodes the bytes as ascii using the surrogateescape
|
| 274 |
+
# handler. When you pull that data back out (such as with get_all() ),
|
| 275 |
+
# it looks to see if the str has any surrogate escapes, and if it does
|
| 276 |
+
# it wraps it in a Header object instead of returning the string.
|
| 277 |
+
#
|
| 278 |
+
# As such, we'll look for those Header objects, and fix up the encoding.
|
| 279 |
+
value = []
|
| 280 |
+
# Flag if we have run into any issues processing the headers, thus
|
| 281 |
+
# signalling that the data belongs in 'unparsed'.
|
| 282 |
+
valid_encoding = True
|
| 283 |
+
for h in headers:
|
| 284 |
+
# It's unclear if this can return more types than just a Header or
|
| 285 |
+
# a str, so we'll just assert here to make sure.
|
| 286 |
+
assert isinstance(h, (email.header.Header, str))
|
| 287 |
+
|
| 288 |
+
# If it's a header object, we need to do our little dance to get
|
| 289 |
+
# the real data out of it. In cases where there is invalid data
|
| 290 |
+
# we're going to end up with mojibake, but there's no obvious, good
|
| 291 |
+
# way around that without reimplementing parts of the Header object
|
| 292 |
+
# ourselves.
|
| 293 |
+
#
|
| 294 |
+
# That should be fine since, if mojibacked happens, this key is
|
| 295 |
+
# going into the unparsed dict anyways.
|
| 296 |
+
if isinstance(h, email.header.Header):
|
| 297 |
+
# The Header object stores it's data as chunks, and each chunk
|
| 298 |
+
# can be independently encoded, so we'll need to check each
|
| 299 |
+
# of them.
|
| 300 |
+
chunks: List[Tuple[bytes, Optional[str]]] = []
|
| 301 |
+
for bin, encoding in email.header.decode_header(h):
|
| 302 |
+
try:
|
| 303 |
+
bin.decode("utf8", "strict")
|
| 304 |
+
except UnicodeDecodeError:
|
| 305 |
+
# Enable mojibake.
|
| 306 |
+
encoding = "latin1"
|
| 307 |
+
valid_encoding = False
|
| 308 |
+
else:
|
| 309 |
+
encoding = "utf8"
|
| 310 |
+
chunks.append((bin, encoding))
|
| 311 |
+
|
| 312 |
+
# Turn our chunks back into a Header object, then let that
|
| 313 |
+
# Header object do the right thing to turn them into a
|
| 314 |
+
# string for us.
|
| 315 |
+
value.append(str(email.header.make_header(chunks)))
|
| 316 |
+
# This is already a string, so just add it.
|
| 317 |
+
else:
|
| 318 |
+
value.append(h)
|
| 319 |
+
|
| 320 |
+
# We've processed all of our values to get them into a list of str,
|
| 321 |
+
# but we may have mojibake data, in which case this is an unparsed
|
| 322 |
+
# field.
|
| 323 |
+
if not valid_encoding:
|
| 324 |
+
unparsed[name] = value
|
| 325 |
+
continue
|
| 326 |
+
|
| 327 |
+
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
|
| 328 |
+
if raw_name is None:
|
| 329 |
+
# This is a bit of a weird situation, we've encountered a key that
|
| 330 |
+
# we don't know what it means, so we don't know whether it's meant
|
| 331 |
+
# to be a list or not.
|
| 332 |
+
#
|
| 333 |
+
# Since we can't really tell one way or another, we'll just leave it
|
| 334 |
+
# as a list, even though it may be a single item list, because that's
|
| 335 |
+
# what makes the most sense for email headers.
|
| 336 |
+
unparsed[name] = value
|
| 337 |
+
continue
|
| 338 |
+
|
| 339 |
+
# If this is one of our string fields, then we'll check to see if our
|
| 340 |
+
# value is a list of a single item. If it is then we'll assume that
|
| 341 |
+
# it was emitted as a single string, and unwrap the str from inside
|
| 342 |
+
# the list.
|
| 343 |
+
#
|
| 344 |
+
# If it's any other kind of data, then we haven't the faintest clue
|
| 345 |
+
# what we should parse it as, and we have to just add it to our list
|
| 346 |
+
# of unparsed stuff.
|
| 347 |
+
if raw_name in _STRING_FIELDS and len(value) == 1:
|
| 348 |
+
raw[raw_name] = value[0]
|
| 349 |
+
# If this is one of our list of string fields, then we can just assign
|
| 350 |
+
# the value, since email *only* has strings, and our get_all() call
|
| 351 |
+
# above ensures that this is a list.
|
| 352 |
+
elif raw_name in _LIST_STRING_FIELDS:
|
| 353 |
+
raw[raw_name] = value
|
| 354 |
+
# Special Case: Keywords
|
| 355 |
+
# The keywords field is implemented in the metadata spec as a str,
|
| 356 |
+
# but it conceptually is a list of strings, and is serialized using
|
| 357 |
+
# ", ".join(keywords), so we'll do some light data massaging to turn
|
| 358 |
+
# this into what it logically is.
|
| 359 |
+
elif raw_name == "keywords" and len(value) == 1:
|
| 360 |
+
raw[raw_name] = _parse_keywords(value[0])
|
| 361 |
+
# Special Case: Project-URL
|
| 362 |
+
# The project urls is implemented in the metadata spec as a list of
|
| 363 |
+
# specially-formatted strings that represent a key and a value, which
|
| 364 |
+
# is fundamentally a mapping, however the email format doesn't support
|
| 365 |
+
# mappings in a sane way, so it was crammed into a list of strings
|
| 366 |
+
# instead.
|
| 367 |
+
#
|
| 368 |
+
# We will do a little light data massaging to turn this into a map as
|
| 369 |
+
# it logically should be.
|
| 370 |
+
elif raw_name == "project_urls":
|
| 371 |
+
try:
|
| 372 |
+
raw[raw_name] = _parse_project_urls(value)
|
| 373 |
+
except KeyError:
|
| 374 |
+
unparsed[name] = value
|
| 375 |
+
# Nothing that we've done has managed to parse this, so it'll just
|
| 376 |
+
# throw it in our unparseable data and move on.
|
| 377 |
+
else:
|
| 378 |
+
unparsed[name] = value
|
| 379 |
+
|
| 380 |
+
# We need to support getting the Description from the message payload in
|
| 381 |
+
# addition to getting it from the the headers. This does mean, though, there
|
| 382 |
+
# is the possibility of it being set both ways, in which case we put both
|
| 383 |
+
# in 'unparsed' since we don't know which is right.
|
| 384 |
+
try:
|
| 385 |
+
payload = _get_payload(parsed, data)
|
| 386 |
+
except ValueError:
|
| 387 |
+
unparsed.setdefault("description", []).append(
|
| 388 |
+
parsed.get_payload(decode=isinstance(data, bytes))
|
| 389 |
+
)
|
| 390 |
+
else:
|
| 391 |
+
if payload:
|
| 392 |
+
# Check to see if we've already got a description, if so then both
|
| 393 |
+
# it, and this body move to unparseable.
|
| 394 |
+
if "description" in raw:
|
| 395 |
+
description_header = cast(str, raw.pop("description"))
|
| 396 |
+
unparsed.setdefault("description", []).extend(
|
| 397 |
+
[description_header, payload]
|
| 398 |
+
)
|
| 399 |
+
elif "description" in unparsed:
|
| 400 |
+
unparsed["description"].append(payload)
|
| 401 |
+
else:
|
| 402 |
+
raw["description"] = payload
|
| 403 |
+
|
| 404 |
+
# We need to cast our `raw` to a metadata, because a TypedDict only support
|
| 405 |
+
# literal key names, but we're computing our key names on purpose, but the
|
| 406 |
+
# way this function is implemented, our `TypedDict` can only have valid key
|
| 407 |
+
# names.
|
| 408 |
+
return cast(RawMetadata, raw), unparsed
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/requirements.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import urllib.parse
|
| 6 |
+
from typing import Any, List, Optional, Set
|
| 7 |
+
|
| 8 |
+
from ._parser import parse_requirement as _parse_requirement
|
| 9 |
+
from ._tokenizer import ParserSyntaxError
|
| 10 |
+
from .markers import Marker, _normalize_extra_values
|
| 11 |
+
from .specifiers import SpecifierSet
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class InvalidRequirement(ValueError):
|
| 15 |
+
"""
|
| 16 |
+
An invalid requirement was found, users should refer to PEP 508.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Requirement:
|
| 21 |
+
"""Parse a requirement.
|
| 22 |
+
|
| 23 |
+
Parse a given requirement string into its parts, such as name, specifier,
|
| 24 |
+
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
| 25 |
+
string.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# TODO: Can we test whether something is contained within a requirement?
|
| 29 |
+
# If so how do we do that? Do we need to test against the _name_ of
|
| 30 |
+
# the thing as well as the version? What about the markers?
|
| 31 |
+
# TODO: Can we normalize the name and extra name?
|
| 32 |
+
|
| 33 |
+
def __init__(self, requirement_string: str) -> None:
|
| 34 |
+
try:
|
| 35 |
+
parsed = _parse_requirement(requirement_string)
|
| 36 |
+
except ParserSyntaxError as e:
|
| 37 |
+
raise InvalidRequirement(str(e)) from e
|
| 38 |
+
|
| 39 |
+
self.name: str = parsed.name
|
| 40 |
+
if parsed.url:
|
| 41 |
+
parsed_url = urllib.parse.urlparse(parsed.url)
|
| 42 |
+
if parsed_url.scheme == "file":
|
| 43 |
+
if urllib.parse.urlunparse(parsed_url) != parsed.url:
|
| 44 |
+
raise InvalidRequirement("Invalid URL given")
|
| 45 |
+
elif not (parsed_url.scheme and parsed_url.netloc) or (
|
| 46 |
+
not parsed_url.scheme and not parsed_url.netloc
|
| 47 |
+
):
|
| 48 |
+
raise InvalidRequirement(f"Invalid URL: {parsed.url}")
|
| 49 |
+
self.url: Optional[str] = parsed.url
|
| 50 |
+
else:
|
| 51 |
+
self.url = None
|
| 52 |
+
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
|
| 53 |
+
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
| 54 |
+
self.marker: Optional[Marker] = None
|
| 55 |
+
if parsed.marker is not None:
|
| 56 |
+
self.marker = Marker.__new__(Marker)
|
| 57 |
+
self.marker._markers = _normalize_extra_values(parsed.marker)
|
| 58 |
+
|
| 59 |
+
def __str__(self) -> str:
|
| 60 |
+
parts: List[str] = [self.name]
|
| 61 |
+
|
| 62 |
+
if self.extras:
|
| 63 |
+
formatted_extras = ",".join(sorted(self.extras))
|
| 64 |
+
parts.append(f"[{formatted_extras}]")
|
| 65 |
+
|
| 66 |
+
if self.specifier:
|
| 67 |
+
parts.append(str(self.specifier))
|
| 68 |
+
|
| 69 |
+
if self.url:
|
| 70 |
+
parts.append(f"@ {self.url}")
|
| 71 |
+
if self.marker:
|
| 72 |
+
parts.append(" ")
|
| 73 |
+
|
| 74 |
+
if self.marker:
|
| 75 |
+
parts.append(f"; {self.marker}")
|
| 76 |
+
|
| 77 |
+
return "".join(parts)
|
| 78 |
+
|
| 79 |
+
def __repr__(self) -> str:
|
| 80 |
+
return f"<Requirement('{self}')>"
|
| 81 |
+
|
| 82 |
+
def __hash__(self) -> int:
|
| 83 |
+
return hash((self.__class__.__name__, str(self)))
|
| 84 |
+
|
| 85 |
+
def __eq__(self, other: Any) -> bool:
|
| 86 |
+
if not isinstance(other, Requirement):
|
| 87 |
+
return NotImplemented
|
| 88 |
+
|
| 89 |
+
return (
|
| 90 |
+
self.name == other.name
|
| 91 |
+
and self.extras == other.extras
|
| 92 |
+
and self.specifier == other.specifier
|
| 93 |
+
and self.url == other.url
|
| 94 |
+
and self.marker == other.marker
|
| 95 |
+
)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py
ADDED
|
@@ -0,0 +1,1008 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
| 8 |
+
from packaging.version import Version
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import abc
|
| 12 |
+
import itertools
|
| 13 |
+
import re
|
| 14 |
+
from typing import (
|
| 15 |
+
Callable,
|
| 16 |
+
Iterable,
|
| 17 |
+
Iterator,
|
| 18 |
+
List,
|
| 19 |
+
Optional,
|
| 20 |
+
Set,
|
| 21 |
+
Tuple,
|
| 22 |
+
TypeVar,
|
| 23 |
+
Union,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
from .utils import canonicalize_version
|
| 27 |
+
from .version import Version
|
| 28 |
+
|
| 29 |
+
UnparsedVersion = Union[Version, str]
|
| 30 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
| 31 |
+
CallableOperator = Callable[[Version, str], bool]
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
| 35 |
+
if not isinstance(version, Version):
|
| 36 |
+
version = Version(version)
|
| 37 |
+
return version
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class InvalidSpecifier(ValueError):
|
| 41 |
+
"""
|
| 42 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
| 43 |
+
string that is invalid.
|
| 44 |
+
|
| 45 |
+
>>> Specifier("lolwat")
|
| 46 |
+
Traceback (most recent call last):
|
| 47 |
+
...
|
| 48 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
| 53 |
+
@abc.abstractmethod
|
| 54 |
+
def __str__(self) -> str:
|
| 55 |
+
"""
|
| 56 |
+
Returns the str representation of this Specifier-like object. This
|
| 57 |
+
should be representative of the Specifier itself.
|
| 58 |
+
"""
|
| 59 |
+
|
| 60 |
+
@abc.abstractmethod
|
| 61 |
+
def __hash__(self) -> int:
|
| 62 |
+
"""
|
| 63 |
+
Returns a hash value for this Specifier-like object.
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
@abc.abstractmethod
|
| 67 |
+
def __eq__(self, other: object) -> bool:
|
| 68 |
+
"""
|
| 69 |
+
Returns a boolean representing whether or not the two Specifier-like
|
| 70 |
+
objects are equal.
|
| 71 |
+
|
| 72 |
+
:param other: The other object to check against.
|
| 73 |
+
"""
|
| 74 |
+
|
| 75 |
+
@property
|
| 76 |
+
@abc.abstractmethod
|
| 77 |
+
def prereleases(self) -> Optional[bool]:
|
| 78 |
+
"""Whether or not pre-releases as a whole are allowed.
|
| 79 |
+
|
| 80 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
| 81 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
| 82 |
+
"""
|
| 83 |
+
|
| 84 |
+
@prereleases.setter
|
| 85 |
+
def prereleases(self, value: bool) -> None:
|
| 86 |
+
"""Setter for :attr:`prereleases`.
|
| 87 |
+
|
| 88 |
+
:param value: The value to set.
|
| 89 |
+
"""
|
| 90 |
+
|
| 91 |
+
@abc.abstractmethod
|
| 92 |
+
def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
|
| 93 |
+
"""
|
| 94 |
+
Determines if the given item is contained within this specifier.
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
@abc.abstractmethod
|
| 98 |
+
def filter(
|
| 99 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 100 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 101 |
+
"""
|
| 102 |
+
Takes an iterable of items and filters them so that only items which
|
| 103 |
+
are contained within this specifier are allowed in it.
|
| 104 |
+
"""
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class Specifier(BaseSpecifier):
|
| 108 |
+
"""This class abstracts handling of version specifiers.
|
| 109 |
+
|
| 110 |
+
.. tip::
|
| 111 |
+
|
| 112 |
+
It is generally not required to instantiate this manually. You should instead
|
| 113 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
| 114 |
+
comma-separated version specifiers (which is what package metadata contains).
|
| 115 |
+
"""
|
| 116 |
+
|
| 117 |
+
_operator_regex_str = r"""
|
| 118 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
| 119 |
+
"""
|
| 120 |
+
_version_regex_str = r"""
|
| 121 |
+
(?P<version>
|
| 122 |
+
(?:
|
| 123 |
+
# The identity operators allow for an escape hatch that will
|
| 124 |
+
# do an exact string match of the version you wish to install.
|
| 125 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
| 126 |
+
# any semantic meaning from it. This operator is discouraged
|
| 127 |
+
# but included entirely as an escape hatch.
|
| 128 |
+
(?<====) # Only match for the identity operator
|
| 129 |
+
\s*
|
| 130 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
| 131 |
+
# we match everything except for whitespace, a
|
| 132 |
+
# semi-colon for marker support, and a closing paren
|
| 133 |
+
# since versions can be enclosed in them.
|
| 134 |
+
)
|
| 135 |
+
|
|
| 136 |
+
(?:
|
| 137 |
+
# The (non)equality operators allow for wild card and local
|
| 138 |
+
# versions to be specified so we have to define these two
|
| 139 |
+
# operators separately to enable that.
|
| 140 |
+
(?<===|!=) # Only match for equals and not equals
|
| 141 |
+
|
| 142 |
+
\s*
|
| 143 |
+
v?
|
| 144 |
+
(?:[0-9]+!)? # epoch
|
| 145 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 146 |
+
|
| 147 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
| 148 |
+
# local version together so group them with a | and make them optional.
|
| 149 |
+
(?:
|
| 150 |
+
\.\* # Wild card syntax of .*
|
| 151 |
+
|
|
| 152 |
+
(?: # pre release
|
| 153 |
+
[-_\.]?
|
| 154 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 155 |
+
[-_\.]?
|
| 156 |
+
[0-9]*
|
| 157 |
+
)?
|
| 158 |
+
(?: # post release
|
| 159 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 160 |
+
)?
|
| 161 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 162 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
| 163 |
+
)?
|
| 164 |
+
)
|
| 165 |
+
|
|
| 166 |
+
(?:
|
| 167 |
+
# The compatible operator requires at least two digits in the
|
| 168 |
+
# release segment.
|
| 169 |
+
(?<=~=) # Only match for the compatible operator
|
| 170 |
+
|
| 171 |
+
\s*
|
| 172 |
+
v?
|
| 173 |
+
(?:[0-9]+!)? # epoch
|
| 174 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
| 175 |
+
(?: # pre release
|
| 176 |
+
[-_\.]?
|
| 177 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 178 |
+
[-_\.]?
|
| 179 |
+
[0-9]*
|
| 180 |
+
)?
|
| 181 |
+
(?: # post release
|
| 182 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 183 |
+
)?
|
| 184 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 185 |
+
)
|
| 186 |
+
|
|
| 187 |
+
(?:
|
| 188 |
+
# All other operators only allow a sub set of what the
|
| 189 |
+
# (non)equality operators do. Specifically they do not allow
|
| 190 |
+
# local versions to be specified nor do they allow the prefix
|
| 191 |
+
# matching wild cards.
|
| 192 |
+
(?<!==|!=|~=) # We have special cases for these
|
| 193 |
+
# operators so we want to make sure they
|
| 194 |
+
# don't match here.
|
| 195 |
+
|
| 196 |
+
\s*
|
| 197 |
+
v?
|
| 198 |
+
(?:[0-9]+!)? # epoch
|
| 199 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 200 |
+
(?: # pre release
|
| 201 |
+
[-_\.]?
|
| 202 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 203 |
+
[-_\.]?
|
| 204 |
+
[0-9]*
|
| 205 |
+
)?
|
| 206 |
+
(?: # post release
|
| 207 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 208 |
+
)?
|
| 209 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 210 |
+
)
|
| 211 |
+
)
|
| 212 |
+
"""
|
| 213 |
+
|
| 214 |
+
_regex = re.compile(
|
| 215 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
| 216 |
+
re.VERBOSE | re.IGNORECASE,
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
_operators = {
|
| 220 |
+
"~=": "compatible",
|
| 221 |
+
"==": "equal",
|
| 222 |
+
"!=": "not_equal",
|
| 223 |
+
"<=": "less_than_equal",
|
| 224 |
+
">=": "greater_than_equal",
|
| 225 |
+
"<": "less_than",
|
| 226 |
+
">": "greater_than",
|
| 227 |
+
"===": "arbitrary",
|
| 228 |
+
}
|
| 229 |
+
|
| 230 |
+
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
| 231 |
+
"""Initialize a Specifier instance.
|
| 232 |
+
|
| 233 |
+
:param spec:
|
| 234 |
+
The string representation of a specifier which will be parsed and
|
| 235 |
+
normalized before use.
|
| 236 |
+
:param prereleases:
|
| 237 |
+
This tells the specifier if it should accept prerelease versions if
|
| 238 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 239 |
+
given specifiers.
|
| 240 |
+
:raises InvalidSpecifier:
|
| 241 |
+
If the given specifier is invalid (i.e. bad syntax).
|
| 242 |
+
"""
|
| 243 |
+
match = self._regex.search(spec)
|
| 244 |
+
if not match:
|
| 245 |
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
| 246 |
+
|
| 247 |
+
self._spec: Tuple[str, str] = (
|
| 248 |
+
match.group("operator").strip(),
|
| 249 |
+
match.group("version").strip(),
|
| 250 |
+
)
|
| 251 |
+
|
| 252 |
+
# Store whether or not this Specifier should accept prereleases
|
| 253 |
+
self._prereleases = prereleases
|
| 254 |
+
|
| 255 |
+
# https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
|
| 256 |
+
@property # type: ignore[override]
|
| 257 |
+
def prereleases(self) -> bool:
|
| 258 |
+
# If there is an explicit prereleases set for this, then we'll just
|
| 259 |
+
# blindly use that.
|
| 260 |
+
if self._prereleases is not None:
|
| 261 |
+
return self._prereleases
|
| 262 |
+
|
| 263 |
+
# Look at all of our specifiers and determine if they are inclusive
|
| 264 |
+
# operators, and if they are if they are including an explicit
|
| 265 |
+
# prerelease.
|
| 266 |
+
operator, version = self._spec
|
| 267 |
+
if operator in ["==", ">=", "<=", "~=", "==="]:
|
| 268 |
+
# The == specifier can include a trailing .*, if it does we
|
| 269 |
+
# want to remove before parsing.
|
| 270 |
+
if operator == "==" and version.endswith(".*"):
|
| 271 |
+
version = version[:-2]
|
| 272 |
+
|
| 273 |
+
# Parse the version, and if it is a pre-release than this
|
| 274 |
+
# specifier allows pre-releases.
|
| 275 |
+
if Version(version).is_prerelease:
|
| 276 |
+
return True
|
| 277 |
+
|
| 278 |
+
return False
|
| 279 |
+
|
| 280 |
+
@prereleases.setter
|
| 281 |
+
def prereleases(self, value: bool) -> None:
|
| 282 |
+
self._prereleases = value
|
| 283 |
+
|
| 284 |
+
@property
|
| 285 |
+
def operator(self) -> str:
|
| 286 |
+
"""The operator of this specifier.
|
| 287 |
+
|
| 288 |
+
>>> Specifier("==1.2.3").operator
|
| 289 |
+
'=='
|
| 290 |
+
"""
|
| 291 |
+
return self._spec[0]
|
| 292 |
+
|
| 293 |
+
@property
|
| 294 |
+
def version(self) -> str:
|
| 295 |
+
"""The version of this specifier.
|
| 296 |
+
|
| 297 |
+
>>> Specifier("==1.2.3").version
|
| 298 |
+
'1.2.3'
|
| 299 |
+
"""
|
| 300 |
+
return self._spec[1]
|
| 301 |
+
|
| 302 |
+
def __repr__(self) -> str:
|
| 303 |
+
"""A representation of the Specifier that shows all internal state.
|
| 304 |
+
|
| 305 |
+
>>> Specifier('>=1.0.0')
|
| 306 |
+
<Specifier('>=1.0.0')>
|
| 307 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
| 308 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
| 309 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
| 310 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
| 311 |
+
"""
|
| 312 |
+
pre = (
|
| 313 |
+
f", prereleases={self.prereleases!r}"
|
| 314 |
+
if self._prereleases is not None
|
| 315 |
+
else ""
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
| 319 |
+
|
| 320 |
+
def __str__(self) -> str:
|
| 321 |
+
"""A string representation of the Specifier that can be round-tripped.
|
| 322 |
+
|
| 323 |
+
>>> str(Specifier('>=1.0.0'))
|
| 324 |
+
'>=1.0.0'
|
| 325 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
| 326 |
+
'>=1.0.0'
|
| 327 |
+
"""
|
| 328 |
+
return "{}{}".format(*self._spec)
|
| 329 |
+
|
| 330 |
+
@property
|
| 331 |
+
def _canonical_spec(self) -> Tuple[str, str]:
|
| 332 |
+
canonical_version = canonicalize_version(
|
| 333 |
+
self._spec[1],
|
| 334 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
| 335 |
+
)
|
| 336 |
+
return self._spec[0], canonical_version
|
| 337 |
+
|
| 338 |
+
def __hash__(self) -> int:
|
| 339 |
+
return hash(self._canonical_spec)
|
| 340 |
+
|
| 341 |
+
def __eq__(self, other: object) -> bool:
|
| 342 |
+
"""Whether or not the two Specifier-like objects are equal.
|
| 343 |
+
|
| 344 |
+
:param other: The other object to check against.
|
| 345 |
+
|
| 346 |
+
The value of :attr:`prereleases` is ignored.
|
| 347 |
+
|
| 348 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
| 349 |
+
True
|
| 350 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
| 351 |
+
... Specifier("==1.2.3", prereleases=True))
|
| 352 |
+
True
|
| 353 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
| 354 |
+
True
|
| 355 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
| 356 |
+
False
|
| 357 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
| 358 |
+
False
|
| 359 |
+
"""
|
| 360 |
+
if isinstance(other, str):
|
| 361 |
+
try:
|
| 362 |
+
other = self.__class__(str(other))
|
| 363 |
+
except InvalidSpecifier:
|
| 364 |
+
return NotImplemented
|
| 365 |
+
elif not isinstance(other, self.__class__):
|
| 366 |
+
return NotImplemented
|
| 367 |
+
|
| 368 |
+
return self._canonical_spec == other._canonical_spec
|
| 369 |
+
|
| 370 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
| 371 |
+
operator_callable: CallableOperator = getattr(
|
| 372 |
+
self, f"_compare_{self._operators[op]}"
|
| 373 |
+
)
|
| 374 |
+
return operator_callable
|
| 375 |
+
|
| 376 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
| 377 |
+
|
| 378 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
| 379 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
| 380 |
+
# implement this in terms of the other specifiers instead of
|
| 381 |
+
# implementing it ourselves. The only thing we need to do is construct
|
| 382 |
+
# the other specifiers.
|
| 383 |
+
|
| 384 |
+
# We want everything but the last item in the version, but we want to
|
| 385 |
+
# ignore suffix segments.
|
| 386 |
+
prefix = ".".join(
|
| 387 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
| 388 |
+
)
|
| 389 |
+
|
| 390 |
+
# Add the prefix notation to the end of our string
|
| 391 |
+
prefix += ".*"
|
| 392 |
+
|
| 393 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
| 394 |
+
prospective, prefix
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
| 398 |
+
|
| 399 |
+
# We need special logic to handle prefix matching
|
| 400 |
+
if spec.endswith(".*"):
|
| 401 |
+
# In the case of prefix matching we want to ignore local segment.
|
| 402 |
+
normalized_prospective = canonicalize_version(
|
| 403 |
+
prospective.public, strip_trailing_zero=False
|
| 404 |
+
)
|
| 405 |
+
# Get the normalized version string ignoring the trailing .*
|
| 406 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
| 407 |
+
# Split the spec out by dots, and pretend that there is an implicit
|
| 408 |
+
# dot in between a release segment and a pre-release segment.
|
| 409 |
+
split_spec = _version_split(normalized_spec)
|
| 410 |
+
|
| 411 |
+
# Split the prospective version out by dots, and pretend that there
|
| 412 |
+
# is an implicit dot in between a release segment and a pre-release
|
| 413 |
+
# segment.
|
| 414 |
+
split_prospective = _version_split(normalized_prospective)
|
| 415 |
+
|
| 416 |
+
# 0-pad the prospective version before shortening it to get the correct
|
| 417 |
+
# shortened version.
|
| 418 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
| 419 |
+
|
| 420 |
+
# Shorten the prospective version to be the same length as the spec
|
| 421 |
+
# so that we can determine if the specifier is a prefix of the
|
| 422 |
+
# prospective version or not.
|
| 423 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
| 424 |
+
|
| 425 |
+
return shortened_prospective == split_spec
|
| 426 |
+
else:
|
| 427 |
+
# Convert our spec string into a Version
|
| 428 |
+
spec_version = Version(spec)
|
| 429 |
+
|
| 430 |
+
# If the specifier does not have a local segment, then we want to
|
| 431 |
+
# act as if the prospective version also does not have a local
|
| 432 |
+
# segment.
|
| 433 |
+
if not spec_version.local:
|
| 434 |
+
prospective = Version(prospective.public)
|
| 435 |
+
|
| 436 |
+
return prospective == spec_version
|
| 437 |
+
|
| 438 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
| 439 |
+
return not self._compare_equal(prospective, spec)
|
| 440 |
+
|
| 441 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 442 |
+
|
| 443 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 444 |
+
# specifier, so local version labels can be universally removed from
|
| 445 |
+
# the prospective version.
|
| 446 |
+
return Version(prospective.public) <= Version(spec)
|
| 447 |
+
|
| 448 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 449 |
+
|
| 450 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 451 |
+
# specifier, so local version labels can be universally removed from
|
| 452 |
+
# the prospective version.
|
| 453 |
+
return Version(prospective.public) >= Version(spec)
|
| 454 |
+
|
| 455 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
| 456 |
+
|
| 457 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 458 |
+
# it as a version.
|
| 459 |
+
spec = Version(spec_str)
|
| 460 |
+
|
| 461 |
+
# Check to see if the prospective version is less than the spec
|
| 462 |
+
# version. If it's not we can short circuit and just return False now
|
| 463 |
+
# instead of doing extra unneeded work.
|
| 464 |
+
if not prospective < spec:
|
| 465 |
+
return False
|
| 466 |
+
|
| 467 |
+
# This special case is here so that, unless the specifier itself
|
| 468 |
+
# includes is a pre-release version, that we do not accept pre-release
|
| 469 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
| 470 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
| 471 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
| 472 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 473 |
+
return False
|
| 474 |
+
|
| 475 |
+
# If we've gotten to here, it means that prospective version is both
|
| 476 |
+
# less than the spec version *and* it's not a pre-release of the same
|
| 477 |
+
# version in the spec.
|
| 478 |
+
return True
|
| 479 |
+
|
| 480 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
| 481 |
+
|
| 482 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 483 |
+
# it as a version.
|
| 484 |
+
spec = Version(spec_str)
|
| 485 |
+
|
| 486 |
+
# Check to see if the prospective version is greater than the spec
|
| 487 |
+
# version. If it's not we can short circuit and just return False now
|
| 488 |
+
# instead of doing extra unneeded work.
|
| 489 |
+
if not prospective > spec:
|
| 490 |
+
return False
|
| 491 |
+
|
| 492 |
+
# This special case is here so that, unless the specifier itself
|
| 493 |
+
# includes is a post-release version, that we do not accept
|
| 494 |
+
# post-release versions for the version mentioned in the specifier
|
| 495 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
| 496 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
| 497 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 498 |
+
return False
|
| 499 |
+
|
| 500 |
+
# Ensure that we do not allow a local version of the version mentioned
|
| 501 |
+
# in the specifier, which is technically greater than, to match.
|
| 502 |
+
if prospective.local is not None:
|
| 503 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 504 |
+
return False
|
| 505 |
+
|
| 506 |
+
# If we've gotten to here, it means that prospective version is both
|
| 507 |
+
# greater than the spec version *and* it's not a pre-release of the
|
| 508 |
+
# same version in the spec.
|
| 509 |
+
return True
|
| 510 |
+
|
| 511 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
| 512 |
+
return str(prospective).lower() == str(spec).lower()
|
| 513 |
+
|
| 514 |
+
def __contains__(self, item: Union[str, Version]) -> bool:
|
| 515 |
+
"""Return whether or not the item is contained in this specifier.
|
| 516 |
+
|
| 517 |
+
:param item: The item to check for.
|
| 518 |
+
|
| 519 |
+
This is used for the ``in`` operator and behaves the same as
|
| 520 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 521 |
+
|
| 522 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
| 523 |
+
True
|
| 524 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
| 525 |
+
True
|
| 526 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
| 527 |
+
False
|
| 528 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
| 529 |
+
False
|
| 530 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
| 531 |
+
True
|
| 532 |
+
"""
|
| 533 |
+
return self.contains(item)
|
| 534 |
+
|
| 535 |
+
def contains(
|
| 536 |
+
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
| 537 |
+
) -> bool:
|
| 538 |
+
"""Return whether or not the item is contained in this specifier.
|
| 539 |
+
|
| 540 |
+
:param item:
|
| 541 |
+
The item to check for, which can be a version string or a
|
| 542 |
+
:class:`Version` instance.
|
| 543 |
+
:param prereleases:
|
| 544 |
+
Whether or not to match prereleases with this Specifier. If set to
|
| 545 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 546 |
+
whether or not prereleases are allowed.
|
| 547 |
+
|
| 548 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
| 549 |
+
True
|
| 550 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
| 551 |
+
True
|
| 552 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
| 553 |
+
False
|
| 554 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
| 555 |
+
False
|
| 556 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
| 557 |
+
True
|
| 558 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
| 559 |
+
True
|
| 560 |
+
"""
|
| 561 |
+
|
| 562 |
+
# Determine if prereleases are to be allowed or not.
|
| 563 |
+
if prereleases is None:
|
| 564 |
+
prereleases = self.prereleases
|
| 565 |
+
|
| 566 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
| 567 |
+
# "2.0" in Specifier(">=2")
|
| 568 |
+
normalized_item = _coerce_version(item)
|
| 569 |
+
|
| 570 |
+
# Determine if we should be supporting prereleases in this specifier
|
| 571 |
+
# or not, if we do not support prereleases than we can short circuit
|
| 572 |
+
# logic if this version is a prereleases.
|
| 573 |
+
if normalized_item.is_prerelease and not prereleases:
|
| 574 |
+
return False
|
| 575 |
+
|
| 576 |
+
# Actually do the comparison to determine if this item is contained
|
| 577 |
+
# within this Specifier or not.
|
| 578 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
| 579 |
+
return operator_callable(normalized_item, self.version)
|
| 580 |
+
|
| 581 |
+
def filter(
|
| 582 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 583 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 584 |
+
"""Filter items in the given iterable, that match the specifier.
|
| 585 |
+
|
| 586 |
+
:param iterable:
|
| 587 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 588 |
+
The items in the iterable will be filtered according to the specifier.
|
| 589 |
+
:param prereleases:
|
| 590 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 591 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 592 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 593 |
+
whether the only versions matching are prereleases).
|
| 594 |
+
|
| 595 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
| 596 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 597 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 598 |
+
|
| 599 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 600 |
+
['1.3']
|
| 601 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
| 602 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
| 603 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 604 |
+
['1.5a1']
|
| 605 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 606 |
+
['1.3', '1.5a1']
|
| 607 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 608 |
+
['1.3', '1.5a1']
|
| 609 |
+
"""
|
| 610 |
+
|
| 611 |
+
yielded = False
|
| 612 |
+
found_prereleases = []
|
| 613 |
+
|
| 614 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
| 615 |
+
|
| 616 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
| 617 |
+
# them match, yield them.
|
| 618 |
+
for version in iterable:
|
| 619 |
+
parsed_version = _coerce_version(version)
|
| 620 |
+
|
| 621 |
+
if self.contains(parsed_version, **kw):
|
| 622 |
+
# If our version is a prerelease, and we were not set to allow
|
| 623 |
+
# prereleases, then we'll store it for later in case nothing
|
| 624 |
+
# else matches this specifier.
|
| 625 |
+
if parsed_version.is_prerelease and not (
|
| 626 |
+
prereleases or self.prereleases
|
| 627 |
+
):
|
| 628 |
+
found_prereleases.append(version)
|
| 629 |
+
# Either this is not a prerelease, or we should have been
|
| 630 |
+
# accepting prereleases from the beginning.
|
| 631 |
+
else:
|
| 632 |
+
yielded = True
|
| 633 |
+
yield version
|
| 634 |
+
|
| 635 |
+
# Now that we've iterated over everything, determine if we've yielded
|
| 636 |
+
# any values, and if we have not and we have any prereleases stored up
|
| 637 |
+
# then we will go ahead and yield the prereleases.
|
| 638 |
+
if not yielded and found_prereleases:
|
| 639 |
+
for version in found_prereleases:
|
| 640 |
+
yield version
|
| 641 |
+
|
| 642 |
+
|
| 643 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
| 644 |
+
|
| 645 |
+
|
| 646 |
+
def _version_split(version: str) -> List[str]:
|
| 647 |
+
result: List[str] = []
|
| 648 |
+
for item in version.split("."):
|
| 649 |
+
match = _prefix_regex.search(item)
|
| 650 |
+
if match:
|
| 651 |
+
result.extend(match.groups())
|
| 652 |
+
else:
|
| 653 |
+
result.append(item)
|
| 654 |
+
return result
|
| 655 |
+
|
| 656 |
+
|
| 657 |
+
def _is_not_suffix(segment: str) -> bool:
|
| 658 |
+
return not any(
|
| 659 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
| 660 |
+
)
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
|
| 664 |
+
left_split, right_split = [], []
|
| 665 |
+
|
| 666 |
+
# Get the release segment of our versions
|
| 667 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
| 668 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
| 669 |
+
|
| 670 |
+
# Get the rest of our versions
|
| 671 |
+
left_split.append(left[len(left_split[0]) :])
|
| 672 |
+
right_split.append(right[len(right_split[0]) :])
|
| 673 |
+
|
| 674 |
+
# Insert our padding
|
| 675 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
| 676 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
| 677 |
+
|
| 678 |
+
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
| 679 |
+
|
| 680 |
+
|
| 681 |
+
class SpecifierSet(BaseSpecifier):
|
| 682 |
+
"""This class abstracts handling of a set of version specifiers.
|
| 683 |
+
|
| 684 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
| 685 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
| 686 |
+
"""
|
| 687 |
+
|
| 688 |
+
def __init__(
|
| 689 |
+
self, specifiers: str = "", prereleases: Optional[bool] = None
|
| 690 |
+
) -> None:
|
| 691 |
+
"""Initialize a SpecifierSet instance.
|
| 692 |
+
|
| 693 |
+
:param specifiers:
|
| 694 |
+
The string representation of a specifier or a comma-separated list of
|
| 695 |
+
specifiers which will be parsed and normalized before use.
|
| 696 |
+
:param prereleases:
|
| 697 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
| 698 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 699 |
+
given specifiers.
|
| 700 |
+
|
| 701 |
+
:raises InvalidSpecifier:
|
| 702 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
| 703 |
+
raised.
|
| 704 |
+
"""
|
| 705 |
+
|
| 706 |
+
# Split on `,` to break each individual specifier into it's own item, and
|
| 707 |
+
# strip each item to remove leading/trailing whitespace.
|
| 708 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
| 709 |
+
|
| 710 |
+
# Parsed each individual specifier, attempting first to make it a
|
| 711 |
+
# Specifier.
|
| 712 |
+
parsed: Set[Specifier] = set()
|
| 713 |
+
for specifier in split_specifiers:
|
| 714 |
+
parsed.add(Specifier(specifier))
|
| 715 |
+
|
| 716 |
+
# Turn our parsed specifiers into a frozen set and save them for later.
|
| 717 |
+
self._specs = frozenset(parsed)
|
| 718 |
+
|
| 719 |
+
# Store our prereleases value so we can use it later to determine if
|
| 720 |
+
# we accept prereleases or not.
|
| 721 |
+
self._prereleases = prereleases
|
| 722 |
+
|
| 723 |
+
@property
|
| 724 |
+
def prereleases(self) -> Optional[bool]:
|
| 725 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
| 726 |
+
# pass that through here.
|
| 727 |
+
if self._prereleases is not None:
|
| 728 |
+
return self._prereleases
|
| 729 |
+
|
| 730 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
| 731 |
+
# then we'll just return None since we don't know if this should have
|
| 732 |
+
# pre-releases or not.
|
| 733 |
+
if not self._specs:
|
| 734 |
+
return None
|
| 735 |
+
|
| 736 |
+
# Otherwise we'll see if any of the given specifiers accept
|
| 737 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
| 738 |
+
return any(s.prereleases for s in self._specs)
|
| 739 |
+
|
| 740 |
+
@prereleases.setter
|
| 741 |
+
def prereleases(self, value: bool) -> None:
|
| 742 |
+
self._prereleases = value
|
| 743 |
+
|
| 744 |
+
def __repr__(self) -> str:
|
| 745 |
+
"""A representation of the specifier set that shows all internal state.
|
| 746 |
+
|
| 747 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 748 |
+
match the input string.
|
| 749 |
+
|
| 750 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
| 751 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
| 752 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
| 753 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
| 754 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
| 755 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
| 756 |
+
"""
|
| 757 |
+
pre = (
|
| 758 |
+
f", prereleases={self.prereleases!r}"
|
| 759 |
+
if self._prereleases is not None
|
| 760 |
+
else ""
|
| 761 |
+
)
|
| 762 |
+
|
| 763 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
| 764 |
+
|
| 765 |
+
def __str__(self) -> str:
|
| 766 |
+
"""A string representation of the specifier set that can be round-tripped.
|
| 767 |
+
|
| 768 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 769 |
+
match the input string.
|
| 770 |
+
|
| 771 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
| 772 |
+
'!=1.0.1,>=1.0.0'
|
| 773 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
| 774 |
+
'!=1.0.1,>=1.0.0'
|
| 775 |
+
"""
|
| 776 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
| 777 |
+
|
| 778 |
+
def __hash__(self) -> int:
|
| 779 |
+
return hash(self._specs)
|
| 780 |
+
|
| 781 |
+
def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
|
| 782 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
| 783 |
+
|
| 784 |
+
:param other: The other object to combine with.
|
| 785 |
+
|
| 786 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
| 787 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 788 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
| 789 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 790 |
+
"""
|
| 791 |
+
if isinstance(other, str):
|
| 792 |
+
other = SpecifierSet(other)
|
| 793 |
+
elif not isinstance(other, SpecifierSet):
|
| 794 |
+
return NotImplemented
|
| 795 |
+
|
| 796 |
+
specifier = SpecifierSet()
|
| 797 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
| 798 |
+
|
| 799 |
+
if self._prereleases is None and other._prereleases is not None:
|
| 800 |
+
specifier._prereleases = other._prereleases
|
| 801 |
+
elif self._prereleases is not None and other._prereleases is None:
|
| 802 |
+
specifier._prereleases = self._prereleases
|
| 803 |
+
elif self._prereleases == other._prereleases:
|
| 804 |
+
specifier._prereleases = self._prereleases
|
| 805 |
+
else:
|
| 806 |
+
raise ValueError(
|
| 807 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
| 808 |
+
"overrides."
|
| 809 |
+
)
|
| 810 |
+
|
| 811 |
+
return specifier
|
| 812 |
+
|
| 813 |
+
def __eq__(self, other: object) -> bool:
|
| 814 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
| 815 |
+
|
| 816 |
+
:param other: The other object to check against.
|
| 817 |
+
|
| 818 |
+
The value of :attr:`prereleases` is ignored.
|
| 819 |
+
|
| 820 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
| 821 |
+
True
|
| 822 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
| 823 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
| 824 |
+
True
|
| 825 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
| 826 |
+
True
|
| 827 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
| 828 |
+
False
|
| 829 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
| 830 |
+
False
|
| 831 |
+
"""
|
| 832 |
+
if isinstance(other, (str, Specifier)):
|
| 833 |
+
other = SpecifierSet(str(other))
|
| 834 |
+
elif not isinstance(other, SpecifierSet):
|
| 835 |
+
return NotImplemented
|
| 836 |
+
|
| 837 |
+
return self._specs == other._specs
|
| 838 |
+
|
| 839 |
+
def __len__(self) -> int:
|
| 840 |
+
"""Returns the number of specifiers in this specifier set."""
|
| 841 |
+
return len(self._specs)
|
| 842 |
+
|
| 843 |
+
def __iter__(self) -> Iterator[Specifier]:
|
| 844 |
+
"""
|
| 845 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
| 846 |
+
in this specifier set.
|
| 847 |
+
|
| 848 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
| 849 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
| 850 |
+
"""
|
| 851 |
+
return iter(self._specs)
|
| 852 |
+
|
| 853 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
| 854 |
+
"""Return whether or not the item is contained in this specifier.
|
| 855 |
+
|
| 856 |
+
:param item: The item to check for.
|
| 857 |
+
|
| 858 |
+
This is used for the ``in`` operator and behaves the same as
|
| 859 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 860 |
+
|
| 861 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 862 |
+
True
|
| 863 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 864 |
+
True
|
| 865 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 866 |
+
False
|
| 867 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 868 |
+
False
|
| 869 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
| 870 |
+
True
|
| 871 |
+
"""
|
| 872 |
+
return self.contains(item)
|
| 873 |
+
|
| 874 |
+
def contains(
|
| 875 |
+
self,
|
| 876 |
+
item: UnparsedVersion,
|
| 877 |
+
prereleases: Optional[bool] = None,
|
| 878 |
+
installed: Optional[bool] = None,
|
| 879 |
+
) -> bool:
|
| 880 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
| 881 |
+
|
| 882 |
+
:param item:
|
| 883 |
+
The item to check for, which can be a version string or a
|
| 884 |
+
:class:`Version` instance.
|
| 885 |
+
:param prereleases:
|
| 886 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
| 887 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 888 |
+
whether or not prereleases are allowed.
|
| 889 |
+
|
| 890 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
| 891 |
+
True
|
| 892 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
| 893 |
+
True
|
| 894 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
| 895 |
+
False
|
| 896 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
| 897 |
+
False
|
| 898 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
| 899 |
+
True
|
| 900 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
| 901 |
+
True
|
| 902 |
+
"""
|
| 903 |
+
# Ensure that our item is a Version instance.
|
| 904 |
+
if not isinstance(item, Version):
|
| 905 |
+
item = Version(item)
|
| 906 |
+
|
| 907 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 908 |
+
# one for this particular filter call, then we'll use whatever the
|
| 909 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 910 |
+
if prereleases is None:
|
| 911 |
+
prereleases = self.prereleases
|
| 912 |
+
|
| 913 |
+
# We can determine if we're going to allow pre-releases by looking to
|
| 914 |
+
# see if any of the underlying items supports them. If none of them do
|
| 915 |
+
# and this item is a pre-release then we do not allow it and we can
|
| 916 |
+
# short circuit that here.
|
| 917 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
| 918 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
| 919 |
+
if not prereleases and item.is_prerelease:
|
| 920 |
+
return False
|
| 921 |
+
|
| 922 |
+
if installed and item.is_prerelease:
|
| 923 |
+
item = Version(item.base_version)
|
| 924 |
+
|
| 925 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
| 926 |
+
# given version is contained within all of them.
|
| 927 |
+
# Note: This use of all() here means that an empty set of specifiers
|
| 928 |
+
# will always return True, this is an explicit design decision.
|
| 929 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
| 930 |
+
|
| 931 |
+
def filter(
|
| 932 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 933 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 934 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
| 935 |
+
|
| 936 |
+
:param iterable:
|
| 937 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 938 |
+
The items in the iterable will be filtered according to the specifier.
|
| 939 |
+
:param prereleases:
|
| 940 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 941 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 942 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 943 |
+
whether the only versions matching are prereleases).
|
| 944 |
+
|
| 945 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
| 946 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 947 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 948 |
+
|
| 949 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 950 |
+
['1.3']
|
| 951 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
| 952 |
+
['1.3', <Version('1.4')>]
|
| 953 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 954 |
+
[]
|
| 955 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 956 |
+
['1.3', '1.5a1']
|
| 957 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 958 |
+
['1.3', '1.5a1']
|
| 959 |
+
|
| 960 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
| 961 |
+
versions in the set.
|
| 962 |
+
|
| 963 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
| 964 |
+
['1.3']
|
| 965 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
| 966 |
+
['1.5a1']
|
| 967 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 968 |
+
['1.3', '1.5a1']
|
| 969 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
| 970 |
+
['1.3', '1.5a1']
|
| 971 |
+
"""
|
| 972 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 973 |
+
# one for this particular filter call, then we'll use whatever the
|
| 974 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 975 |
+
if prereleases is None:
|
| 976 |
+
prereleases = self.prereleases
|
| 977 |
+
|
| 978 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
| 979 |
+
# filter method for each one, this will act as a logical AND amongst
|
| 980 |
+
# each specifier.
|
| 981 |
+
if self._specs:
|
| 982 |
+
for spec in self._specs:
|
| 983 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
| 984 |
+
return iter(iterable)
|
| 985 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
| 986 |
+
# which will filter out any pre-releases, unless there are no final
|
| 987 |
+
# releases.
|
| 988 |
+
else:
|
| 989 |
+
filtered: List[UnparsedVersionVar] = []
|
| 990 |
+
found_prereleases: List[UnparsedVersionVar] = []
|
| 991 |
+
|
| 992 |
+
for item in iterable:
|
| 993 |
+
parsed_version = _coerce_version(item)
|
| 994 |
+
|
| 995 |
+
# Store any item which is a pre-release for later unless we've
|
| 996 |
+
# already found a final version or we are accepting prereleases
|
| 997 |
+
if parsed_version.is_prerelease and not prereleases:
|
| 998 |
+
if not filtered:
|
| 999 |
+
found_prereleases.append(item)
|
| 1000 |
+
else:
|
| 1001 |
+
filtered.append(item)
|
| 1002 |
+
|
| 1003 |
+
# If we've found no items except for pre-releases, then we'll go
|
| 1004 |
+
# ahead and use the pre-releases
|
| 1005 |
+
if not filtered and found_prereleases and prereleases is None:
|
| 1006 |
+
return iter(found_prereleases)
|
| 1007 |
+
|
| 1008 |
+
return iter(filtered)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/tags.py
ADDED
|
@@ -0,0 +1,546 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import logging
|
| 6 |
+
import platform
|
| 7 |
+
import subprocess
|
| 8 |
+
import sys
|
| 9 |
+
import sysconfig
|
| 10 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 11 |
+
from typing import (
|
| 12 |
+
Dict,
|
| 13 |
+
FrozenSet,
|
| 14 |
+
Iterable,
|
| 15 |
+
Iterator,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Sequence,
|
| 19 |
+
Tuple,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from . import _manylinux, _musllinux
|
| 25 |
+
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
PythonVersion = Sequence[int]
|
| 29 |
+
MacVersion = Tuple[int, int]
|
| 30 |
+
|
| 31 |
+
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
| 32 |
+
"python": "py", # Generic.
|
| 33 |
+
"cpython": "cp",
|
| 34 |
+
"pypy": "pp",
|
| 35 |
+
"ironpython": "ip",
|
| 36 |
+
"jython": "jy",
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
_32_BIT_INTERPRETER = sys.maxsize <= 2**32
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class Tag:
|
| 44 |
+
"""
|
| 45 |
+
A representation of the tag triple for a wheel.
|
| 46 |
+
|
| 47 |
+
Instances are considered immutable and thus are hashable. Equality checking
|
| 48 |
+
is also supported.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
| 52 |
+
|
| 53 |
+
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
| 54 |
+
self._interpreter = interpreter.lower()
|
| 55 |
+
self._abi = abi.lower()
|
| 56 |
+
self._platform = platform.lower()
|
| 57 |
+
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
| 58 |
+
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
| 59 |
+
# times when scanning a page of links for packages with tags matching that
|
| 60 |
+
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
| 61 |
+
# downstream consumers.
|
| 62 |
+
self._hash = hash((self._interpreter, self._abi, self._platform))
|
| 63 |
+
|
| 64 |
+
@property
|
| 65 |
+
def interpreter(self) -> str:
|
| 66 |
+
return self._interpreter
|
| 67 |
+
|
| 68 |
+
@property
|
| 69 |
+
def abi(self) -> str:
|
| 70 |
+
return self._abi
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def platform(self) -> str:
|
| 74 |
+
return self._platform
|
| 75 |
+
|
| 76 |
+
def __eq__(self, other: object) -> bool:
|
| 77 |
+
if not isinstance(other, Tag):
|
| 78 |
+
return NotImplemented
|
| 79 |
+
|
| 80 |
+
return (
|
| 81 |
+
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
| 82 |
+
and (self._platform == other._platform)
|
| 83 |
+
and (self._abi == other._abi)
|
| 84 |
+
and (self._interpreter == other._interpreter)
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
def __hash__(self) -> int:
|
| 88 |
+
return self._hash
|
| 89 |
+
|
| 90 |
+
def __str__(self) -> str:
|
| 91 |
+
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
| 92 |
+
|
| 93 |
+
def __repr__(self) -> str:
|
| 94 |
+
return f"<{self} @ {id(self)}>"
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
| 98 |
+
"""
|
| 99 |
+
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
| 100 |
+
|
| 101 |
+
Returning a set is required due to the possibility that the tag is a
|
| 102 |
+
compressed tag set.
|
| 103 |
+
"""
|
| 104 |
+
tags = set()
|
| 105 |
+
interpreters, abis, platforms = tag.split("-")
|
| 106 |
+
for interpreter in interpreters.split("."):
|
| 107 |
+
for abi in abis.split("."):
|
| 108 |
+
for platform_ in platforms.split("."):
|
| 109 |
+
tags.add(Tag(interpreter, abi, platform_))
|
| 110 |
+
return frozenset(tags)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
| 114 |
+
value: Union[int, str, None] = sysconfig.get_config_var(name)
|
| 115 |
+
if value is None and warn:
|
| 116 |
+
logger.debug(
|
| 117 |
+
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
| 118 |
+
)
|
| 119 |
+
return value
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _normalize_string(string: str) -> str:
|
| 123 |
+
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _abi3_applies(python_version: PythonVersion) -> bool:
|
| 127 |
+
"""
|
| 128 |
+
Determine if the Python version supports abi3.
|
| 129 |
+
|
| 130 |
+
PEP 384 was first implemented in Python 3.2.
|
| 131 |
+
"""
|
| 132 |
+
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
| 136 |
+
py_version = tuple(py_version) # To allow for version comparison.
|
| 137 |
+
abis = []
|
| 138 |
+
version = _version_nodot(py_version[:2])
|
| 139 |
+
debug = pymalloc = ucs4 = ""
|
| 140 |
+
with_debug = _get_config_var("Py_DEBUG", warn)
|
| 141 |
+
has_refcount = hasattr(sys, "gettotalrefcount")
|
| 142 |
+
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
| 143 |
+
# extension modules is the best option.
|
| 144 |
+
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
| 145 |
+
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
| 146 |
+
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
| 147 |
+
debug = "d"
|
| 148 |
+
if py_version < (3, 8):
|
| 149 |
+
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
| 150 |
+
if with_pymalloc or with_pymalloc is None:
|
| 151 |
+
pymalloc = "m"
|
| 152 |
+
if py_version < (3, 3):
|
| 153 |
+
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
| 154 |
+
if unicode_size == 4 or (
|
| 155 |
+
unicode_size is None and sys.maxunicode == 0x10FFFF
|
| 156 |
+
):
|
| 157 |
+
ucs4 = "u"
|
| 158 |
+
elif debug:
|
| 159 |
+
# Debug builds can also load "normal" extension modules.
|
| 160 |
+
# We can also assume no UCS-4 or pymalloc requirement.
|
| 161 |
+
abis.append(f"cp{version}")
|
| 162 |
+
abis.insert(
|
| 163 |
+
0,
|
| 164 |
+
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
| 165 |
+
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
| 166 |
+
),
|
| 167 |
+
)
|
| 168 |
+
return abis
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def cpython_tags(
|
| 172 |
+
python_version: Optional[PythonVersion] = None,
|
| 173 |
+
abis: Optional[Iterable[str]] = None,
|
| 174 |
+
platforms: Optional[Iterable[str]] = None,
|
| 175 |
+
*,
|
| 176 |
+
warn: bool = False,
|
| 177 |
+
) -> Iterator[Tag]:
|
| 178 |
+
"""
|
| 179 |
+
Yields the tags for a CPython interpreter.
|
| 180 |
+
|
| 181 |
+
The tags consist of:
|
| 182 |
+
- cp<python_version>-<abi>-<platform>
|
| 183 |
+
- cp<python_version>-abi3-<platform>
|
| 184 |
+
- cp<python_version>-none-<platform>
|
| 185 |
+
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
| 186 |
+
|
| 187 |
+
If python_version only specifies a major version then user-provided ABIs and
|
| 188 |
+
the 'none' ABItag will be used.
|
| 189 |
+
|
| 190 |
+
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
| 191 |
+
their normal position and not at the beginning.
|
| 192 |
+
"""
|
| 193 |
+
if not python_version:
|
| 194 |
+
python_version = sys.version_info[:2]
|
| 195 |
+
|
| 196 |
+
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
| 197 |
+
|
| 198 |
+
if abis is None:
|
| 199 |
+
if len(python_version) > 1:
|
| 200 |
+
abis = _cpython_abis(python_version, warn)
|
| 201 |
+
else:
|
| 202 |
+
abis = []
|
| 203 |
+
abis = list(abis)
|
| 204 |
+
# 'abi3' and 'none' are explicitly handled later.
|
| 205 |
+
for explicit_abi in ("abi3", "none"):
|
| 206 |
+
try:
|
| 207 |
+
abis.remove(explicit_abi)
|
| 208 |
+
except ValueError:
|
| 209 |
+
pass
|
| 210 |
+
|
| 211 |
+
platforms = list(platforms or platform_tags())
|
| 212 |
+
for abi in abis:
|
| 213 |
+
for platform_ in platforms:
|
| 214 |
+
yield Tag(interpreter, abi, platform_)
|
| 215 |
+
if _abi3_applies(python_version):
|
| 216 |
+
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
| 217 |
+
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
| 218 |
+
|
| 219 |
+
if _abi3_applies(python_version):
|
| 220 |
+
for minor_version in range(python_version[1] - 1, 1, -1):
|
| 221 |
+
for platform_ in platforms:
|
| 222 |
+
interpreter = "cp{version}".format(
|
| 223 |
+
version=_version_nodot((python_version[0], minor_version))
|
| 224 |
+
)
|
| 225 |
+
yield Tag(interpreter, "abi3", platform_)
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def _generic_abi() -> List[str]:
|
| 229 |
+
"""
|
| 230 |
+
Return the ABI tag based on EXT_SUFFIX.
|
| 231 |
+
"""
|
| 232 |
+
# The following are examples of `EXT_SUFFIX`.
|
| 233 |
+
# We want to keep the parts which are related to the ABI and remove the
|
| 234 |
+
# parts which are related to the platform:
|
| 235 |
+
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
| 236 |
+
# - mac: '.cpython-310-darwin.so' => cp310
|
| 237 |
+
# - win: '.cp310-win_amd64.pyd' => cp310
|
| 238 |
+
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
| 239 |
+
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
| 240 |
+
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
| 241 |
+
# => graalpy_38_native
|
| 242 |
+
|
| 243 |
+
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
| 244 |
+
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
| 245 |
+
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
| 246 |
+
parts = ext_suffix.split(".")
|
| 247 |
+
if len(parts) < 3:
|
| 248 |
+
# CPython3.7 and earlier uses ".pyd" on Windows.
|
| 249 |
+
return _cpython_abis(sys.version_info[:2])
|
| 250 |
+
soabi = parts[1]
|
| 251 |
+
if soabi.startswith("cpython"):
|
| 252 |
+
# non-windows
|
| 253 |
+
abi = "cp" + soabi.split("-")[1]
|
| 254 |
+
elif soabi.startswith("cp"):
|
| 255 |
+
# windows
|
| 256 |
+
abi = soabi.split("-")[0]
|
| 257 |
+
elif soabi.startswith("pypy"):
|
| 258 |
+
abi = "-".join(soabi.split("-")[:2])
|
| 259 |
+
elif soabi.startswith("graalpy"):
|
| 260 |
+
abi = "-".join(soabi.split("-")[:3])
|
| 261 |
+
elif soabi:
|
| 262 |
+
# pyston, ironpython, others?
|
| 263 |
+
abi = soabi
|
| 264 |
+
else:
|
| 265 |
+
return []
|
| 266 |
+
return [_normalize_string(abi)]
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def generic_tags(
|
| 270 |
+
interpreter: Optional[str] = None,
|
| 271 |
+
abis: Optional[Iterable[str]] = None,
|
| 272 |
+
platforms: Optional[Iterable[str]] = None,
|
| 273 |
+
*,
|
| 274 |
+
warn: bool = False,
|
| 275 |
+
) -> Iterator[Tag]:
|
| 276 |
+
"""
|
| 277 |
+
Yields the tags for a generic interpreter.
|
| 278 |
+
|
| 279 |
+
The tags consist of:
|
| 280 |
+
- <interpreter>-<abi>-<platform>
|
| 281 |
+
|
| 282 |
+
The "none" ABI will be added if it was not explicitly provided.
|
| 283 |
+
"""
|
| 284 |
+
if not interpreter:
|
| 285 |
+
interp_name = interpreter_name()
|
| 286 |
+
interp_version = interpreter_version(warn=warn)
|
| 287 |
+
interpreter = "".join([interp_name, interp_version])
|
| 288 |
+
if abis is None:
|
| 289 |
+
abis = _generic_abi()
|
| 290 |
+
else:
|
| 291 |
+
abis = list(abis)
|
| 292 |
+
platforms = list(platforms or platform_tags())
|
| 293 |
+
if "none" not in abis:
|
| 294 |
+
abis.append("none")
|
| 295 |
+
for abi in abis:
|
| 296 |
+
for platform_ in platforms:
|
| 297 |
+
yield Tag(interpreter, abi, platform_)
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
| 301 |
+
"""
|
| 302 |
+
Yields Python versions in descending order.
|
| 303 |
+
|
| 304 |
+
After the latest version, the major-only version will be yielded, and then
|
| 305 |
+
all previous versions of that major version.
|
| 306 |
+
"""
|
| 307 |
+
if len(py_version) > 1:
|
| 308 |
+
yield f"py{_version_nodot(py_version[:2])}"
|
| 309 |
+
yield f"py{py_version[0]}"
|
| 310 |
+
if len(py_version) > 1:
|
| 311 |
+
for minor in range(py_version[1] - 1, -1, -1):
|
| 312 |
+
yield f"py{_version_nodot((py_version[0], minor))}"
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def compatible_tags(
|
| 316 |
+
python_version: Optional[PythonVersion] = None,
|
| 317 |
+
interpreter: Optional[str] = None,
|
| 318 |
+
platforms: Optional[Iterable[str]] = None,
|
| 319 |
+
) -> Iterator[Tag]:
|
| 320 |
+
"""
|
| 321 |
+
Yields the sequence of tags that are compatible with a specific version of Python.
|
| 322 |
+
|
| 323 |
+
The tags consist of:
|
| 324 |
+
- py*-none-<platform>
|
| 325 |
+
- <interpreter>-none-any # ... if `interpreter` is provided.
|
| 326 |
+
- py*-none-any
|
| 327 |
+
"""
|
| 328 |
+
if not python_version:
|
| 329 |
+
python_version = sys.version_info[:2]
|
| 330 |
+
platforms = list(platforms or platform_tags())
|
| 331 |
+
for version in _py_interpreter_range(python_version):
|
| 332 |
+
for platform_ in platforms:
|
| 333 |
+
yield Tag(version, "none", platform_)
|
| 334 |
+
if interpreter:
|
| 335 |
+
yield Tag(interpreter, "none", "any")
|
| 336 |
+
for version in _py_interpreter_range(python_version):
|
| 337 |
+
yield Tag(version, "none", "any")
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
| 341 |
+
if not is_32bit:
|
| 342 |
+
return arch
|
| 343 |
+
|
| 344 |
+
if arch.startswith("ppc"):
|
| 345 |
+
return "ppc"
|
| 346 |
+
|
| 347 |
+
return "i386"
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
| 351 |
+
formats = [cpu_arch]
|
| 352 |
+
if cpu_arch == "x86_64":
|
| 353 |
+
if version < (10, 4):
|
| 354 |
+
return []
|
| 355 |
+
formats.extend(["intel", "fat64", "fat32"])
|
| 356 |
+
|
| 357 |
+
elif cpu_arch == "i386":
|
| 358 |
+
if version < (10, 4):
|
| 359 |
+
return []
|
| 360 |
+
formats.extend(["intel", "fat32", "fat"])
|
| 361 |
+
|
| 362 |
+
elif cpu_arch == "ppc64":
|
| 363 |
+
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
| 364 |
+
if version > (10, 5) or version < (10, 4):
|
| 365 |
+
return []
|
| 366 |
+
formats.append("fat64")
|
| 367 |
+
|
| 368 |
+
elif cpu_arch == "ppc":
|
| 369 |
+
if version > (10, 6):
|
| 370 |
+
return []
|
| 371 |
+
formats.extend(["fat32", "fat"])
|
| 372 |
+
|
| 373 |
+
if cpu_arch in {"arm64", "x86_64"}:
|
| 374 |
+
formats.append("universal2")
|
| 375 |
+
|
| 376 |
+
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
| 377 |
+
formats.append("universal")
|
| 378 |
+
|
| 379 |
+
return formats
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def mac_platforms(
|
| 383 |
+
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
| 384 |
+
) -> Iterator[str]:
|
| 385 |
+
"""
|
| 386 |
+
Yields the platform tags for a macOS system.
|
| 387 |
+
|
| 388 |
+
The `version` parameter is a two-item tuple specifying the macOS version to
|
| 389 |
+
generate platform tags for. The `arch` parameter is the CPU architecture to
|
| 390 |
+
generate platform tags for. Both parameters default to the appropriate value
|
| 391 |
+
for the current system.
|
| 392 |
+
"""
|
| 393 |
+
version_str, _, cpu_arch = platform.mac_ver()
|
| 394 |
+
if version is None:
|
| 395 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
| 396 |
+
if version == (10, 16):
|
| 397 |
+
# When built against an older macOS SDK, Python will report macOS 10.16
|
| 398 |
+
# instead of the real version.
|
| 399 |
+
version_str = subprocess.run(
|
| 400 |
+
[
|
| 401 |
+
sys.executable,
|
| 402 |
+
"-sS",
|
| 403 |
+
"-c",
|
| 404 |
+
"import platform; print(platform.mac_ver()[0])",
|
| 405 |
+
],
|
| 406 |
+
check=True,
|
| 407 |
+
env={"SYSTEM_VERSION_COMPAT": "0"},
|
| 408 |
+
stdout=subprocess.PIPE,
|
| 409 |
+
universal_newlines=True,
|
| 410 |
+
).stdout
|
| 411 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
| 412 |
+
else:
|
| 413 |
+
version = version
|
| 414 |
+
if arch is None:
|
| 415 |
+
arch = _mac_arch(cpu_arch)
|
| 416 |
+
else:
|
| 417 |
+
arch = arch
|
| 418 |
+
|
| 419 |
+
if (10, 0) <= version and version < (11, 0):
|
| 420 |
+
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
| 421 |
+
# "minor" version number. The major version was always 10.
|
| 422 |
+
for minor_version in range(version[1], -1, -1):
|
| 423 |
+
compat_version = 10, minor_version
|
| 424 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 425 |
+
for binary_format in binary_formats:
|
| 426 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 427 |
+
major=10, minor=minor_version, binary_format=binary_format
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
if version >= (11, 0):
|
| 431 |
+
# Starting with Mac OS 11, each yearly release bumps the major version
|
| 432 |
+
# number. The minor versions are now the midyear updates.
|
| 433 |
+
for major_version in range(version[0], 10, -1):
|
| 434 |
+
compat_version = major_version, 0
|
| 435 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 436 |
+
for binary_format in binary_formats:
|
| 437 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 438 |
+
major=major_version, minor=0, binary_format=binary_format
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
if version >= (11, 0):
|
| 442 |
+
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
| 443 |
+
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
| 444 |
+
# releases exist.
|
| 445 |
+
#
|
| 446 |
+
# However, the "universal2" binary format can have a
|
| 447 |
+
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
| 448 |
+
# that version of macOS.
|
| 449 |
+
if arch == "x86_64":
|
| 450 |
+
for minor_version in range(16, 3, -1):
|
| 451 |
+
compat_version = 10, minor_version
|
| 452 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 453 |
+
for binary_format in binary_formats:
|
| 454 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 455 |
+
major=compat_version[0],
|
| 456 |
+
minor=compat_version[1],
|
| 457 |
+
binary_format=binary_format,
|
| 458 |
+
)
|
| 459 |
+
else:
|
| 460 |
+
for minor_version in range(16, 3, -1):
|
| 461 |
+
compat_version = 10, minor_version
|
| 462 |
+
binary_format = "universal2"
|
| 463 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 464 |
+
major=compat_version[0],
|
| 465 |
+
minor=compat_version[1],
|
| 466 |
+
binary_format=binary_format,
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
| 471 |
+
linux = _normalize_string(sysconfig.get_platform())
|
| 472 |
+
if is_32bit:
|
| 473 |
+
if linux == "linux_x86_64":
|
| 474 |
+
linux = "linux_i686"
|
| 475 |
+
elif linux == "linux_aarch64":
|
| 476 |
+
linux = "linux_armv7l"
|
| 477 |
+
_, arch = linux.split("_", 1)
|
| 478 |
+
yield from _manylinux.platform_tags(linux, arch)
|
| 479 |
+
yield from _musllinux.platform_tags(arch)
|
| 480 |
+
yield linux
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
def _generic_platforms() -> Iterator[str]:
|
| 484 |
+
yield _normalize_string(sysconfig.get_platform())
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def platform_tags() -> Iterator[str]:
|
| 488 |
+
"""
|
| 489 |
+
Provides the platform tags for this installation.
|
| 490 |
+
"""
|
| 491 |
+
if platform.system() == "Darwin":
|
| 492 |
+
return mac_platforms()
|
| 493 |
+
elif platform.system() == "Linux":
|
| 494 |
+
return _linux_platforms()
|
| 495 |
+
else:
|
| 496 |
+
return _generic_platforms()
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
def interpreter_name() -> str:
|
| 500 |
+
"""
|
| 501 |
+
Returns the name of the running interpreter.
|
| 502 |
+
|
| 503 |
+
Some implementations have a reserved, two-letter abbreviation which will
|
| 504 |
+
be returned when appropriate.
|
| 505 |
+
"""
|
| 506 |
+
name = sys.implementation.name
|
| 507 |
+
return INTERPRETER_SHORT_NAMES.get(name) or name
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def interpreter_version(*, warn: bool = False) -> str:
|
| 511 |
+
"""
|
| 512 |
+
Returns the version of the running interpreter.
|
| 513 |
+
"""
|
| 514 |
+
version = _get_config_var("py_version_nodot", warn=warn)
|
| 515 |
+
if version:
|
| 516 |
+
version = str(version)
|
| 517 |
+
else:
|
| 518 |
+
version = _version_nodot(sys.version_info[:2])
|
| 519 |
+
return version
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
def _version_nodot(version: PythonVersion) -> str:
|
| 523 |
+
return "".join(map(str, version))
|
| 524 |
+
|
| 525 |
+
|
| 526 |
+
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
| 527 |
+
"""
|
| 528 |
+
Returns the sequence of tag triples for the running interpreter.
|
| 529 |
+
|
| 530 |
+
The order of the sequence corresponds to priority order for the
|
| 531 |
+
interpreter, from most to least important.
|
| 532 |
+
"""
|
| 533 |
+
|
| 534 |
+
interp_name = interpreter_name()
|
| 535 |
+
if interp_name == "cp":
|
| 536 |
+
yield from cpython_tags(warn=warn)
|
| 537 |
+
else:
|
| 538 |
+
yield from generic_tags()
|
| 539 |
+
|
| 540 |
+
if interp_name == "pp":
|
| 541 |
+
interp = "pp3"
|
| 542 |
+
elif interp_name == "cp":
|
| 543 |
+
interp = "cp" + interpreter_version(warn=warn)
|
| 544 |
+
else:
|
| 545 |
+
interp = None
|
| 546 |
+
yield from compatible_tags(interpreter=interp)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/utils.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import re
|
| 6 |
+
from typing import FrozenSet, NewType, Tuple, Union, cast
|
| 7 |
+
|
| 8 |
+
from .tags import Tag, parse_tag
|
| 9 |
+
from .version import InvalidVersion, Version
|
| 10 |
+
|
| 11 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
| 12 |
+
NormalizedName = NewType("NormalizedName", str)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class InvalidWheelFilename(ValueError):
|
| 16 |
+
"""
|
| 17 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class InvalidSdistFilename(ValueError):
|
| 22 |
+
"""
|
| 23 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
| 28 |
+
# PEP 427: The build number must start with a digit.
|
| 29 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def canonicalize_name(name: str) -> NormalizedName:
|
| 33 |
+
# This is taken from PEP 503.
|
| 34 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
| 35 |
+
return cast(NormalizedName, value)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def canonicalize_version(
|
| 39 |
+
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
| 40 |
+
) -> str:
|
| 41 |
+
"""
|
| 42 |
+
This is very similar to Version.__str__, but has one subtle difference
|
| 43 |
+
with the way it handles the release segment.
|
| 44 |
+
"""
|
| 45 |
+
if isinstance(version, str):
|
| 46 |
+
try:
|
| 47 |
+
parsed = Version(version)
|
| 48 |
+
except InvalidVersion:
|
| 49 |
+
# Legacy versions cannot be normalized
|
| 50 |
+
return version
|
| 51 |
+
else:
|
| 52 |
+
parsed = version
|
| 53 |
+
|
| 54 |
+
parts = []
|
| 55 |
+
|
| 56 |
+
# Epoch
|
| 57 |
+
if parsed.epoch != 0:
|
| 58 |
+
parts.append(f"{parsed.epoch}!")
|
| 59 |
+
|
| 60 |
+
# Release segment
|
| 61 |
+
release_segment = ".".join(str(x) for x in parsed.release)
|
| 62 |
+
if strip_trailing_zero:
|
| 63 |
+
# NB: This strips trailing '.0's to normalize
|
| 64 |
+
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
| 65 |
+
parts.append(release_segment)
|
| 66 |
+
|
| 67 |
+
# Pre-release
|
| 68 |
+
if parsed.pre is not None:
|
| 69 |
+
parts.append("".join(str(x) for x in parsed.pre))
|
| 70 |
+
|
| 71 |
+
# Post-release
|
| 72 |
+
if parsed.post is not None:
|
| 73 |
+
parts.append(f".post{parsed.post}")
|
| 74 |
+
|
| 75 |
+
# Development release
|
| 76 |
+
if parsed.dev is not None:
|
| 77 |
+
parts.append(f".dev{parsed.dev}")
|
| 78 |
+
|
| 79 |
+
# Local version segment
|
| 80 |
+
if parsed.local is not None:
|
| 81 |
+
parts.append(f"+{parsed.local}")
|
| 82 |
+
|
| 83 |
+
return "".join(parts)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def parse_wheel_filename(
|
| 87 |
+
filename: str,
|
| 88 |
+
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
| 89 |
+
if not filename.endswith(".whl"):
|
| 90 |
+
raise InvalidWheelFilename(
|
| 91 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
filename = filename[:-4]
|
| 95 |
+
dashes = filename.count("-")
|
| 96 |
+
if dashes not in (4, 5):
|
| 97 |
+
raise InvalidWheelFilename(
|
| 98 |
+
f"Invalid wheel filename (wrong number of parts): {filename}"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
parts = filename.split("-", dashes - 2)
|
| 102 |
+
name_part = parts[0]
|
| 103 |
+
# See PEP 427 for the rules on escaping the project name
|
| 104 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
| 105 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
| 106 |
+
name = canonicalize_name(name_part)
|
| 107 |
+
version = Version(parts[1])
|
| 108 |
+
if dashes == 5:
|
| 109 |
+
build_part = parts[2]
|
| 110 |
+
build_match = _build_tag_regex.match(build_part)
|
| 111 |
+
if build_match is None:
|
| 112 |
+
raise InvalidWheelFilename(
|
| 113 |
+
f"Invalid build number: {build_part} in '{filename}'"
|
| 114 |
+
)
|
| 115 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
| 116 |
+
else:
|
| 117 |
+
build = ()
|
| 118 |
+
tags = parse_tag(parts[-1])
|
| 119 |
+
return (name, version, build, tags)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
| 123 |
+
if filename.endswith(".tar.gz"):
|
| 124 |
+
file_stem = filename[: -len(".tar.gz")]
|
| 125 |
+
elif filename.endswith(".zip"):
|
| 126 |
+
file_stem = filename[: -len(".zip")]
|
| 127 |
+
else:
|
| 128 |
+
raise InvalidSdistFilename(
|
| 129 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
| 130 |
+
f" {filename}"
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
| 134 |
+
# so we split on the last dash.
|
| 135 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
| 136 |
+
if not sep:
|
| 137 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
| 138 |
+
|
| 139 |
+
name = canonicalize_name(name_part)
|
| 140 |
+
version = Version(version_part)
|
| 141 |
+
return (name, version)
|
.venv/Lib/site-packages/pkg_resources/_vendor/packaging/version.py
ADDED
|
@@ -0,0 +1,564 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from packaging.version import parse, Version
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import collections
|
| 11 |
+
import itertools
|
| 12 |
+
import re
|
| 13 |
+
from typing import Any, Callable, Optional, SupportsInt, Tuple, Union
|
| 14 |
+
|
| 15 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
| 16 |
+
|
| 17 |
+
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
| 18 |
+
|
| 19 |
+
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
| 20 |
+
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
| 21 |
+
SubLocalType = Union[InfiniteTypes, int, str]
|
| 22 |
+
LocalType = Union[
|
| 23 |
+
NegativeInfinityType,
|
| 24 |
+
Tuple[
|
| 25 |
+
Union[
|
| 26 |
+
SubLocalType,
|
| 27 |
+
Tuple[SubLocalType, str],
|
| 28 |
+
Tuple[NegativeInfinityType, SubLocalType],
|
| 29 |
+
],
|
| 30 |
+
...,
|
| 31 |
+
],
|
| 32 |
+
]
|
| 33 |
+
CmpKey = Tuple[
|
| 34 |
+
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
| 35 |
+
]
|
| 36 |
+
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
| 37 |
+
|
| 38 |
+
_Version = collections.namedtuple(
|
| 39 |
+
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def parse(version: str) -> "Version":
|
| 44 |
+
"""Parse the given version string.
|
| 45 |
+
|
| 46 |
+
>>> parse('1.0.dev1')
|
| 47 |
+
<Version('1.0.dev1')>
|
| 48 |
+
|
| 49 |
+
:param version: The version string to parse.
|
| 50 |
+
:raises InvalidVersion: When the version string is not a valid version.
|
| 51 |
+
"""
|
| 52 |
+
return Version(version)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class InvalidVersion(ValueError):
|
| 56 |
+
"""Raised when a version string is not a valid version.
|
| 57 |
+
|
| 58 |
+
>>> Version("invalid")
|
| 59 |
+
Traceback (most recent call last):
|
| 60 |
+
...
|
| 61 |
+
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
| 62 |
+
"""
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class _BaseVersion:
|
| 66 |
+
_key: Tuple[Any, ...]
|
| 67 |
+
|
| 68 |
+
def __hash__(self) -> int:
|
| 69 |
+
return hash(self._key)
|
| 70 |
+
|
| 71 |
+
# Please keep the duplicated `isinstance` check
|
| 72 |
+
# in the six comparisons hereunder
|
| 73 |
+
# unless you find a way to avoid adding overhead function calls.
|
| 74 |
+
def __lt__(self, other: "_BaseVersion") -> bool:
|
| 75 |
+
if not isinstance(other, _BaseVersion):
|
| 76 |
+
return NotImplemented
|
| 77 |
+
|
| 78 |
+
return self._key < other._key
|
| 79 |
+
|
| 80 |
+
def __le__(self, other: "_BaseVersion") -> bool:
|
| 81 |
+
if not isinstance(other, _BaseVersion):
|
| 82 |
+
return NotImplemented
|
| 83 |
+
|
| 84 |
+
return self._key <= other._key
|
| 85 |
+
|
| 86 |
+
def __eq__(self, other: object) -> bool:
|
| 87 |
+
if not isinstance(other, _BaseVersion):
|
| 88 |
+
return NotImplemented
|
| 89 |
+
|
| 90 |
+
return self._key == other._key
|
| 91 |
+
|
| 92 |
+
def __ge__(self, other: "_BaseVersion") -> bool:
|
| 93 |
+
if not isinstance(other, _BaseVersion):
|
| 94 |
+
return NotImplemented
|
| 95 |
+
|
| 96 |
+
return self._key >= other._key
|
| 97 |
+
|
| 98 |
+
def __gt__(self, other: "_BaseVersion") -> bool:
|
| 99 |
+
if not isinstance(other, _BaseVersion):
|
| 100 |
+
return NotImplemented
|
| 101 |
+
|
| 102 |
+
return self._key > other._key
|
| 103 |
+
|
| 104 |
+
def __ne__(self, other: object) -> bool:
|
| 105 |
+
if not isinstance(other, _BaseVersion):
|
| 106 |
+
return NotImplemented
|
| 107 |
+
|
| 108 |
+
return self._key != other._key
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
| 112 |
+
# easier for 3rd party code to reuse
|
| 113 |
+
_VERSION_PATTERN = r"""
|
| 114 |
+
v?
|
| 115 |
+
(?:
|
| 116 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
| 117 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
| 118 |
+
(?P<pre> # pre-release
|
| 119 |
+
[-_\.]?
|
| 120 |
+
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
| 121 |
+
[-_\.]?
|
| 122 |
+
(?P<pre_n>[0-9]+)?
|
| 123 |
+
)?
|
| 124 |
+
(?P<post> # post release
|
| 125 |
+
(?:-(?P<post_n1>[0-9]+))
|
| 126 |
+
|
|
| 127 |
+
(?:
|
| 128 |
+
[-_\.]?
|
| 129 |
+
(?P<post_l>post|rev|r)
|
| 130 |
+
[-_\.]?
|
| 131 |
+
(?P<post_n2>[0-9]+)?
|
| 132 |
+
)
|
| 133 |
+
)?
|
| 134 |
+
(?P<dev> # dev release
|
| 135 |
+
[-_\.]?
|
| 136 |
+
(?P<dev_l>dev)
|
| 137 |
+
[-_\.]?
|
| 138 |
+
(?P<dev_n>[0-9]+)?
|
| 139 |
+
)?
|
| 140 |
+
)
|
| 141 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
VERSION_PATTERN = _VERSION_PATTERN
|
| 145 |
+
"""
|
| 146 |
+
A string containing the regular expression used to match a valid version.
|
| 147 |
+
|
| 148 |
+
The pattern is not anchored at either end, and is intended for embedding in larger
|
| 149 |
+
expressions (for example, matching a version number as part of a file name). The
|
| 150 |
+
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
| 151 |
+
flags set.
|
| 152 |
+
|
| 153 |
+
:meta hide-value:
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class Version(_BaseVersion):
|
| 158 |
+
"""This class abstracts handling of a project's versions.
|
| 159 |
+
|
| 160 |
+
A :class:`Version` instance is comparison aware and can be compared and
|
| 161 |
+
sorted using the standard Python interfaces.
|
| 162 |
+
|
| 163 |
+
>>> v1 = Version("1.0a5")
|
| 164 |
+
>>> v2 = Version("1.0")
|
| 165 |
+
>>> v1
|
| 166 |
+
<Version('1.0a5')>
|
| 167 |
+
>>> v2
|
| 168 |
+
<Version('1.0')>
|
| 169 |
+
>>> v1 < v2
|
| 170 |
+
True
|
| 171 |
+
>>> v1 == v2
|
| 172 |
+
False
|
| 173 |
+
>>> v1 > v2
|
| 174 |
+
False
|
| 175 |
+
>>> v1 >= v2
|
| 176 |
+
False
|
| 177 |
+
>>> v1 <= v2
|
| 178 |
+
True
|
| 179 |
+
"""
|
| 180 |
+
|
| 181 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
| 182 |
+
_key: CmpKey
|
| 183 |
+
|
| 184 |
+
def __init__(self, version: str) -> None:
|
| 185 |
+
"""Initialize a Version object.
|
| 186 |
+
|
| 187 |
+
:param version:
|
| 188 |
+
The string representation of a version which will be parsed and normalized
|
| 189 |
+
before use.
|
| 190 |
+
:raises InvalidVersion:
|
| 191 |
+
If the ``version`` does not conform to PEP 440 in any way then this
|
| 192 |
+
exception will be raised.
|
| 193 |
+
"""
|
| 194 |
+
|
| 195 |
+
# Validate the version and parse it into pieces
|
| 196 |
+
match = self._regex.search(version)
|
| 197 |
+
if not match:
|
| 198 |
+
raise InvalidVersion(f"Invalid version: '{version}'")
|
| 199 |
+
|
| 200 |
+
# Store the parsed out pieces of the version
|
| 201 |
+
self._version = _Version(
|
| 202 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
| 203 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
| 204 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
| 205 |
+
post=_parse_letter_version(
|
| 206 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
| 207 |
+
),
|
| 208 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
| 209 |
+
local=_parse_local_version(match.group("local")),
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
# Generate a key which will be used for sorting
|
| 213 |
+
self._key = _cmpkey(
|
| 214 |
+
self._version.epoch,
|
| 215 |
+
self._version.release,
|
| 216 |
+
self._version.pre,
|
| 217 |
+
self._version.post,
|
| 218 |
+
self._version.dev,
|
| 219 |
+
self._version.local,
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
def __repr__(self) -> str:
|
| 223 |
+
"""A representation of the Version that shows all internal state.
|
| 224 |
+
|
| 225 |
+
>>> Version('1.0.0')
|
| 226 |
+
<Version('1.0.0')>
|
| 227 |
+
"""
|
| 228 |
+
return f"<Version('{self}')>"
|
| 229 |
+
|
| 230 |
+
def __str__(self) -> str:
|
| 231 |
+
"""A string representation of the version that can be rounded-tripped.
|
| 232 |
+
|
| 233 |
+
>>> str(Version("1.0a5"))
|
| 234 |
+
'1.0a5'
|
| 235 |
+
"""
|
| 236 |
+
parts = []
|
| 237 |
+
|
| 238 |
+
# Epoch
|
| 239 |
+
if self.epoch != 0:
|
| 240 |
+
parts.append(f"{self.epoch}!")
|
| 241 |
+
|
| 242 |
+
# Release segment
|
| 243 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 244 |
+
|
| 245 |
+
# Pre-release
|
| 246 |
+
if self.pre is not None:
|
| 247 |
+
parts.append("".join(str(x) for x in self.pre))
|
| 248 |
+
|
| 249 |
+
# Post-release
|
| 250 |
+
if self.post is not None:
|
| 251 |
+
parts.append(f".post{self.post}")
|
| 252 |
+
|
| 253 |
+
# Development release
|
| 254 |
+
if self.dev is not None:
|
| 255 |
+
parts.append(f".dev{self.dev}")
|
| 256 |
+
|
| 257 |
+
# Local version segment
|
| 258 |
+
if self.local is not None:
|
| 259 |
+
parts.append(f"+{self.local}")
|
| 260 |
+
|
| 261 |
+
return "".join(parts)
|
| 262 |
+
|
| 263 |
+
@property
|
| 264 |
+
def epoch(self) -> int:
|
| 265 |
+
"""The epoch of the version.
|
| 266 |
+
|
| 267 |
+
>>> Version("2.0.0").epoch
|
| 268 |
+
0
|
| 269 |
+
>>> Version("1!2.0.0").epoch
|
| 270 |
+
1
|
| 271 |
+
"""
|
| 272 |
+
_epoch: int = self._version.epoch
|
| 273 |
+
return _epoch
|
| 274 |
+
|
| 275 |
+
@property
|
| 276 |
+
def release(self) -> Tuple[int, ...]:
|
| 277 |
+
"""The components of the "release" segment of the version.
|
| 278 |
+
|
| 279 |
+
>>> Version("1.2.3").release
|
| 280 |
+
(1, 2, 3)
|
| 281 |
+
>>> Version("2.0.0").release
|
| 282 |
+
(2, 0, 0)
|
| 283 |
+
>>> Version("1!2.0.0.post0").release
|
| 284 |
+
(2, 0, 0)
|
| 285 |
+
|
| 286 |
+
Includes trailing zeroes but not the epoch or any pre-release / development /
|
| 287 |
+
post-release suffixes.
|
| 288 |
+
"""
|
| 289 |
+
_release: Tuple[int, ...] = self._version.release
|
| 290 |
+
return _release
|
| 291 |
+
|
| 292 |
+
@property
|
| 293 |
+
def pre(self) -> Optional[Tuple[str, int]]:
|
| 294 |
+
"""The pre-release segment of the version.
|
| 295 |
+
|
| 296 |
+
>>> print(Version("1.2.3").pre)
|
| 297 |
+
None
|
| 298 |
+
>>> Version("1.2.3a1").pre
|
| 299 |
+
('a', 1)
|
| 300 |
+
>>> Version("1.2.3b1").pre
|
| 301 |
+
('b', 1)
|
| 302 |
+
>>> Version("1.2.3rc1").pre
|
| 303 |
+
('rc', 1)
|
| 304 |
+
"""
|
| 305 |
+
_pre: Optional[Tuple[str, int]] = self._version.pre
|
| 306 |
+
return _pre
|
| 307 |
+
|
| 308 |
+
@property
|
| 309 |
+
def post(self) -> Optional[int]:
|
| 310 |
+
"""The post-release number of the version.
|
| 311 |
+
|
| 312 |
+
>>> print(Version("1.2.3").post)
|
| 313 |
+
None
|
| 314 |
+
>>> Version("1.2.3.post1").post
|
| 315 |
+
1
|
| 316 |
+
"""
|
| 317 |
+
return self._version.post[1] if self._version.post else None
|
| 318 |
+
|
| 319 |
+
@property
|
| 320 |
+
def dev(self) -> Optional[int]:
|
| 321 |
+
"""The development number of the version.
|
| 322 |
+
|
| 323 |
+
>>> print(Version("1.2.3").dev)
|
| 324 |
+
None
|
| 325 |
+
>>> Version("1.2.3.dev1").dev
|
| 326 |
+
1
|
| 327 |
+
"""
|
| 328 |
+
return self._version.dev[1] if self._version.dev else None
|
| 329 |
+
|
| 330 |
+
@property
|
| 331 |
+
def local(self) -> Optional[str]:
|
| 332 |
+
"""The local version segment of the version.
|
| 333 |
+
|
| 334 |
+
>>> print(Version("1.2.3").local)
|
| 335 |
+
None
|
| 336 |
+
>>> Version("1.2.3+abc").local
|
| 337 |
+
'abc'
|
| 338 |
+
"""
|
| 339 |
+
if self._version.local:
|
| 340 |
+
return ".".join(str(x) for x in self._version.local)
|
| 341 |
+
else:
|
| 342 |
+
return None
|
| 343 |
+
|
| 344 |
+
@property
|
| 345 |
+
def public(self) -> str:
|
| 346 |
+
"""The public portion of the version.
|
| 347 |
+
|
| 348 |
+
>>> Version("1.2.3").public
|
| 349 |
+
'1.2.3'
|
| 350 |
+
>>> Version("1.2.3+abc").public
|
| 351 |
+
'1.2.3'
|
| 352 |
+
>>> Version("1.2.3+abc.dev1").public
|
| 353 |
+
'1.2.3'
|
| 354 |
+
"""
|
| 355 |
+
return str(self).split("+", 1)[0]
|
| 356 |
+
|
| 357 |
+
@property
|
| 358 |
+
def base_version(self) -> str:
|
| 359 |
+
"""The "base version" of the version.
|
| 360 |
+
|
| 361 |
+
>>> Version("1.2.3").base_version
|
| 362 |
+
'1.2.3'
|
| 363 |
+
>>> Version("1.2.3+abc").base_version
|
| 364 |
+
'1.2.3'
|
| 365 |
+
>>> Version("1!1.2.3+abc.dev1").base_version
|
| 366 |
+
'1!1.2.3'
|
| 367 |
+
|
| 368 |
+
The "base version" is the public version of the project without any pre or post
|
| 369 |
+
release markers.
|
| 370 |
+
"""
|
| 371 |
+
parts = []
|
| 372 |
+
|
| 373 |
+
# Epoch
|
| 374 |
+
if self.epoch != 0:
|
| 375 |
+
parts.append(f"{self.epoch}!")
|
| 376 |
+
|
| 377 |
+
# Release segment
|
| 378 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 379 |
+
|
| 380 |
+
return "".join(parts)
|
| 381 |
+
|
| 382 |
+
@property
|
| 383 |
+
def is_prerelease(self) -> bool:
|
| 384 |
+
"""Whether this version is a pre-release.
|
| 385 |
+
|
| 386 |
+
>>> Version("1.2.3").is_prerelease
|
| 387 |
+
False
|
| 388 |
+
>>> Version("1.2.3a1").is_prerelease
|
| 389 |
+
True
|
| 390 |
+
>>> Version("1.2.3b1").is_prerelease
|
| 391 |
+
True
|
| 392 |
+
>>> Version("1.2.3rc1").is_prerelease
|
| 393 |
+
True
|
| 394 |
+
>>> Version("1.2.3dev1").is_prerelease
|
| 395 |
+
True
|
| 396 |
+
"""
|
| 397 |
+
return self.dev is not None or self.pre is not None
|
| 398 |
+
|
| 399 |
+
@property
|
| 400 |
+
def is_postrelease(self) -> bool:
|
| 401 |
+
"""Whether this version is a post-release.
|
| 402 |
+
|
| 403 |
+
>>> Version("1.2.3").is_postrelease
|
| 404 |
+
False
|
| 405 |
+
>>> Version("1.2.3.post1").is_postrelease
|
| 406 |
+
True
|
| 407 |
+
"""
|
| 408 |
+
return self.post is not None
|
| 409 |
+
|
| 410 |
+
@property
|
| 411 |
+
def is_devrelease(self) -> bool:
|
| 412 |
+
"""Whether this version is a development release.
|
| 413 |
+
|
| 414 |
+
>>> Version("1.2.3").is_devrelease
|
| 415 |
+
False
|
| 416 |
+
>>> Version("1.2.3.dev1").is_devrelease
|
| 417 |
+
True
|
| 418 |
+
"""
|
| 419 |
+
return self.dev is not None
|
| 420 |
+
|
| 421 |
+
@property
|
| 422 |
+
def major(self) -> int:
|
| 423 |
+
"""The first item of :attr:`release` or ``0`` if unavailable.
|
| 424 |
+
|
| 425 |
+
>>> Version("1.2.3").major
|
| 426 |
+
1
|
| 427 |
+
"""
|
| 428 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
| 429 |
+
|
| 430 |
+
@property
|
| 431 |
+
def minor(self) -> int:
|
| 432 |
+
"""The second item of :attr:`release` or ``0`` if unavailable.
|
| 433 |
+
|
| 434 |
+
>>> Version("1.2.3").minor
|
| 435 |
+
2
|
| 436 |
+
>>> Version("1").minor
|
| 437 |
+
0
|
| 438 |
+
"""
|
| 439 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
| 440 |
+
|
| 441 |
+
@property
|
| 442 |
+
def micro(self) -> int:
|
| 443 |
+
"""The third item of :attr:`release` or ``0`` if unavailable.
|
| 444 |
+
|
| 445 |
+
>>> Version("1.2.3").micro
|
| 446 |
+
3
|
| 447 |
+
>>> Version("1").micro
|
| 448 |
+
0
|
| 449 |
+
"""
|
| 450 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
| 451 |
+
|
| 452 |
+
|
| 453 |
+
def _parse_letter_version(
|
| 454 |
+
letter: str, number: Union[str, bytes, SupportsInt]
|
| 455 |
+
) -> Optional[Tuple[str, int]]:
|
| 456 |
+
|
| 457 |
+
if letter:
|
| 458 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
| 459 |
+
# not a numeral associated with it.
|
| 460 |
+
if number is None:
|
| 461 |
+
number = 0
|
| 462 |
+
|
| 463 |
+
# We normalize any letters to their lower case form
|
| 464 |
+
letter = letter.lower()
|
| 465 |
+
|
| 466 |
+
# We consider some words to be alternate spellings of other words and
|
| 467 |
+
# in those cases we want to normalize the spellings to our preferred
|
| 468 |
+
# spelling.
|
| 469 |
+
if letter == "alpha":
|
| 470 |
+
letter = "a"
|
| 471 |
+
elif letter == "beta":
|
| 472 |
+
letter = "b"
|
| 473 |
+
elif letter in ["c", "pre", "preview"]:
|
| 474 |
+
letter = "rc"
|
| 475 |
+
elif letter in ["rev", "r"]:
|
| 476 |
+
letter = "post"
|
| 477 |
+
|
| 478 |
+
return letter, int(number)
|
| 479 |
+
if not letter and number:
|
| 480 |
+
# We assume if we are given a number, but we are not given a letter
|
| 481 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
| 482 |
+
letter = "post"
|
| 483 |
+
|
| 484 |
+
return letter, int(number)
|
| 485 |
+
|
| 486 |
+
return None
|
| 487 |
+
|
| 488 |
+
|
| 489 |
+
_local_version_separators = re.compile(r"[\._-]")
|
| 490 |
+
|
| 491 |
+
|
| 492 |
+
def _parse_local_version(local: str) -> Optional[LocalType]:
|
| 493 |
+
"""
|
| 494 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
| 495 |
+
"""
|
| 496 |
+
if local is not None:
|
| 497 |
+
return tuple(
|
| 498 |
+
part.lower() if not part.isdigit() else int(part)
|
| 499 |
+
for part in _local_version_separators.split(local)
|
| 500 |
+
)
|
| 501 |
+
return None
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
def _cmpkey(
|
| 505 |
+
epoch: int,
|
| 506 |
+
release: Tuple[int, ...],
|
| 507 |
+
pre: Optional[Tuple[str, int]],
|
| 508 |
+
post: Optional[Tuple[str, int]],
|
| 509 |
+
dev: Optional[Tuple[str, int]],
|
| 510 |
+
local: Optional[Tuple[SubLocalType]],
|
| 511 |
+
) -> CmpKey:
|
| 512 |
+
|
| 513 |
+
# When we compare a release version, we want to compare it with all of the
|
| 514 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
| 515 |
+
# leading zeros until we come to something non zero, then take the rest
|
| 516 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
| 517 |
+
# that for our sorting key.
|
| 518 |
+
_release = tuple(
|
| 519 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
| 520 |
+
)
|
| 521 |
+
|
| 522 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
| 523 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
| 524 |
+
# if there is not a pre or a post segment. If we have one of those then
|
| 525 |
+
# the normal sorting rules will handle this case correctly.
|
| 526 |
+
if pre is None and post is None and dev is not None:
|
| 527 |
+
_pre: PrePostDevType = NegativeInfinity
|
| 528 |
+
# Versions without a pre-release (except as noted above) should sort after
|
| 529 |
+
# those with one.
|
| 530 |
+
elif pre is None:
|
| 531 |
+
_pre = Infinity
|
| 532 |
+
else:
|
| 533 |
+
_pre = pre
|
| 534 |
+
|
| 535 |
+
# Versions without a post segment should sort before those with one.
|
| 536 |
+
if post is None:
|
| 537 |
+
_post: PrePostDevType = NegativeInfinity
|
| 538 |
+
|
| 539 |
+
else:
|
| 540 |
+
_post = post
|
| 541 |
+
|
| 542 |
+
# Versions without a development segment should sort after those with one.
|
| 543 |
+
if dev is None:
|
| 544 |
+
_dev: PrePostDevType = Infinity
|
| 545 |
+
|
| 546 |
+
else:
|
| 547 |
+
_dev = dev
|
| 548 |
+
|
| 549 |
+
if local is None:
|
| 550 |
+
# Versions without a local segment should sort before those with one.
|
| 551 |
+
_local: LocalType = NegativeInfinity
|
| 552 |
+
else:
|
| 553 |
+
# Versions with a local segment need that segment parsed to implement
|
| 554 |
+
# the sorting rules in PEP440.
|
| 555 |
+
# - Alpha numeric segments sort before numeric segments
|
| 556 |
+
# - Alpha numeric segments sort lexicographically
|
| 557 |
+
# - Numeric segments sort numerically
|
| 558 |
+
# - Shorter versions sort before longer versions when the prefixes
|
| 559 |
+
# match exactly
|
| 560 |
+
_local = tuple(
|
| 561 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
| 562 |
+
)
|
| 563 |
+
|
| 564 |
+
return epoch, _release, _pre, _post, _dev, _local
|
.venv/Lib/site-packages/pkg_resources/_vendor/platformdirs/android.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
import sys
|
| 6 |
+
from functools import lru_cache
|
| 7 |
+
from typing import cast
|
| 8 |
+
|
| 9 |
+
from .api import PlatformDirsABC
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class Android(PlatformDirsABC):
|
| 13 |
+
"""
|
| 14 |
+
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_. Makes use of the
|
| 15 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>` and
|
| 16 |
+
`version <platformdirs.api.PlatformDirsABC.version>`.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
@property
|
| 20 |
+
def user_data_dir(self) -> str:
|
| 21 |
+
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
|
| 22 |
+
return self._append_app_name_and_version(cast(str, _android_folder()), "files")
|
| 23 |
+
|
| 24 |
+
@property
|
| 25 |
+
def site_data_dir(self) -> str:
|
| 26 |
+
""":return: data directory shared by users, same as `user_data_dir`"""
|
| 27 |
+
return self.user_data_dir
|
| 28 |
+
|
| 29 |
+
@property
|
| 30 |
+
def user_config_dir(self) -> str:
|
| 31 |
+
"""
|
| 32 |
+
:return: config directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
|
| 33 |
+
"""
|
| 34 |
+
return self._append_app_name_and_version(cast(str, _android_folder()), "shared_prefs")
|
| 35 |
+
|
| 36 |
+
@property
|
| 37 |
+
def site_config_dir(self) -> str:
|
| 38 |
+
""":return: config directory shared by the users, same as `user_config_dir`"""
|
| 39 |
+
return self.user_config_dir
|
| 40 |
+
|
| 41 |
+
@property
|
| 42 |
+
def user_cache_dir(self) -> str:
|
| 43 |
+
""":return: cache directory tied to the user, e.g. e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>``"""
|
| 44 |
+
return self._append_app_name_and_version(cast(str, _android_folder()), "cache")
|
| 45 |
+
|
| 46 |
+
@property
|
| 47 |
+
def user_state_dir(self) -> str:
|
| 48 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
| 49 |
+
return self.user_data_dir
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def user_log_dir(self) -> str:
|
| 53 |
+
"""
|
| 54 |
+
:return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
|
| 55 |
+
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/log``
|
| 56 |
+
"""
|
| 57 |
+
path = self.user_cache_dir
|
| 58 |
+
if self.opinion:
|
| 59 |
+
path = os.path.join(path, "log")
|
| 60 |
+
return path
|
| 61 |
+
|
| 62 |
+
@property
|
| 63 |
+
def user_documents_dir(self) -> str:
|
| 64 |
+
"""
|
| 65 |
+
:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``
|
| 66 |
+
"""
|
| 67 |
+
return _android_documents_folder()
|
| 68 |
+
|
| 69 |
+
@property
|
| 70 |
+
def user_runtime_dir(self) -> str:
|
| 71 |
+
"""
|
| 72 |
+
:return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
|
| 73 |
+
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/tmp``
|
| 74 |
+
"""
|
| 75 |
+
path = self.user_cache_dir
|
| 76 |
+
if self.opinion:
|
| 77 |
+
path = os.path.join(path, "tmp")
|
| 78 |
+
return path
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
@lru_cache(maxsize=1)
|
| 82 |
+
def _android_folder() -> str | None:
|
| 83 |
+
""":return: base folder for the Android OS or None if cannot be found"""
|
| 84 |
+
try:
|
| 85 |
+
# First try to get path to android app via pyjnius
|
| 86 |
+
from jnius import autoclass
|
| 87 |
+
|
| 88 |
+
Context = autoclass("android.content.Context") # noqa: N806
|
| 89 |
+
result: str | None = Context.getFilesDir().getParentFile().getAbsolutePath()
|
| 90 |
+
except Exception:
|
| 91 |
+
# if fails find an android folder looking path on the sys.path
|
| 92 |
+
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
|
| 93 |
+
for path in sys.path:
|
| 94 |
+
if pattern.match(path):
|
| 95 |
+
result = path.split("/files")[0]
|
| 96 |
+
break
|
| 97 |
+
else:
|
| 98 |
+
result = None
|
| 99 |
+
return result
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
@lru_cache(maxsize=1)
|
| 103 |
+
def _android_documents_folder() -> str:
|
| 104 |
+
""":return: documents folder for the Android OS"""
|
| 105 |
+
# Get directories with pyjnius
|
| 106 |
+
try:
|
| 107 |
+
from jnius import autoclass
|
| 108 |
+
|
| 109 |
+
Context = autoclass("android.content.Context") # noqa: N806
|
| 110 |
+
Environment = autoclass("android.os.Environment") # noqa: N806
|
| 111 |
+
documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
|
| 112 |
+
except Exception:
|
| 113 |
+
documents_dir = "/storage/emulated/0/Documents"
|
| 114 |
+
|
| 115 |
+
return documents_dir
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
__all__ = [
|
| 119 |
+
"Android",
|
| 120 |
+
]
|
.venv/Lib/site-packages/pkg_resources/_vendor/typing_extensions.py
ADDED
|
@@ -0,0 +1,2209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import collections
|
| 3 |
+
import collections.abc
|
| 4 |
+
import functools
|
| 5 |
+
import operator
|
| 6 |
+
import sys
|
| 7 |
+
import types as _types
|
| 8 |
+
import typing
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
__all__ = [
|
| 12 |
+
# Super-special typing primitives.
|
| 13 |
+
'Any',
|
| 14 |
+
'ClassVar',
|
| 15 |
+
'Concatenate',
|
| 16 |
+
'Final',
|
| 17 |
+
'LiteralString',
|
| 18 |
+
'ParamSpec',
|
| 19 |
+
'ParamSpecArgs',
|
| 20 |
+
'ParamSpecKwargs',
|
| 21 |
+
'Self',
|
| 22 |
+
'Type',
|
| 23 |
+
'TypeVar',
|
| 24 |
+
'TypeVarTuple',
|
| 25 |
+
'Unpack',
|
| 26 |
+
|
| 27 |
+
# ABCs (from collections.abc).
|
| 28 |
+
'Awaitable',
|
| 29 |
+
'AsyncIterator',
|
| 30 |
+
'AsyncIterable',
|
| 31 |
+
'Coroutine',
|
| 32 |
+
'AsyncGenerator',
|
| 33 |
+
'AsyncContextManager',
|
| 34 |
+
'ChainMap',
|
| 35 |
+
|
| 36 |
+
# Concrete collection types.
|
| 37 |
+
'ContextManager',
|
| 38 |
+
'Counter',
|
| 39 |
+
'Deque',
|
| 40 |
+
'DefaultDict',
|
| 41 |
+
'NamedTuple',
|
| 42 |
+
'OrderedDict',
|
| 43 |
+
'TypedDict',
|
| 44 |
+
|
| 45 |
+
# Structural checks, a.k.a. protocols.
|
| 46 |
+
'SupportsIndex',
|
| 47 |
+
|
| 48 |
+
# One-off things.
|
| 49 |
+
'Annotated',
|
| 50 |
+
'assert_never',
|
| 51 |
+
'assert_type',
|
| 52 |
+
'clear_overloads',
|
| 53 |
+
'dataclass_transform',
|
| 54 |
+
'get_overloads',
|
| 55 |
+
'final',
|
| 56 |
+
'get_args',
|
| 57 |
+
'get_origin',
|
| 58 |
+
'get_type_hints',
|
| 59 |
+
'IntVar',
|
| 60 |
+
'is_typeddict',
|
| 61 |
+
'Literal',
|
| 62 |
+
'NewType',
|
| 63 |
+
'overload',
|
| 64 |
+
'override',
|
| 65 |
+
'Protocol',
|
| 66 |
+
'reveal_type',
|
| 67 |
+
'runtime',
|
| 68 |
+
'runtime_checkable',
|
| 69 |
+
'Text',
|
| 70 |
+
'TypeAlias',
|
| 71 |
+
'TypeGuard',
|
| 72 |
+
'TYPE_CHECKING',
|
| 73 |
+
'Never',
|
| 74 |
+
'NoReturn',
|
| 75 |
+
'Required',
|
| 76 |
+
'NotRequired',
|
| 77 |
+
]
|
| 78 |
+
|
| 79 |
+
# for backward compatibility
|
| 80 |
+
PEP_560 = True
|
| 81 |
+
GenericMeta = type
|
| 82 |
+
|
| 83 |
+
# The functions below are modified copies of typing internal helpers.
|
| 84 |
+
# They are needed by _ProtocolMeta and they provide support for PEP 646.
|
| 85 |
+
|
| 86 |
+
_marker = object()
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def _check_generic(cls, parameters, elen=_marker):
|
| 90 |
+
"""Check correct count for parameters of a generic cls (internal helper).
|
| 91 |
+
This gives a nice error message in case of count mismatch.
|
| 92 |
+
"""
|
| 93 |
+
if not elen:
|
| 94 |
+
raise TypeError(f"{cls} is not a generic class")
|
| 95 |
+
if elen is _marker:
|
| 96 |
+
if not hasattr(cls, "__parameters__") or not cls.__parameters__:
|
| 97 |
+
raise TypeError(f"{cls} is not a generic class")
|
| 98 |
+
elen = len(cls.__parameters__)
|
| 99 |
+
alen = len(parameters)
|
| 100 |
+
if alen != elen:
|
| 101 |
+
if hasattr(cls, "__parameters__"):
|
| 102 |
+
parameters = [p for p in cls.__parameters__ if not _is_unpack(p)]
|
| 103 |
+
num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters)
|
| 104 |
+
if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples):
|
| 105 |
+
return
|
| 106 |
+
raise TypeError(f"Too {'many' if alen > elen else 'few'} parameters for {cls};"
|
| 107 |
+
f" actual {alen}, expected {elen}")
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
if sys.version_info >= (3, 10):
|
| 111 |
+
def _should_collect_from_parameters(t):
|
| 112 |
+
return isinstance(
|
| 113 |
+
t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType)
|
| 114 |
+
)
|
| 115 |
+
elif sys.version_info >= (3, 9):
|
| 116 |
+
def _should_collect_from_parameters(t):
|
| 117 |
+
return isinstance(t, (typing._GenericAlias, _types.GenericAlias))
|
| 118 |
+
else:
|
| 119 |
+
def _should_collect_from_parameters(t):
|
| 120 |
+
return isinstance(t, typing._GenericAlias) and not t._special
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def _collect_type_vars(types, typevar_types=None):
|
| 124 |
+
"""Collect all type variable contained in types in order of
|
| 125 |
+
first appearance (lexicographic order). For example::
|
| 126 |
+
|
| 127 |
+
_collect_type_vars((T, List[S, T])) == (T, S)
|
| 128 |
+
"""
|
| 129 |
+
if typevar_types is None:
|
| 130 |
+
typevar_types = typing.TypeVar
|
| 131 |
+
tvars = []
|
| 132 |
+
for t in types:
|
| 133 |
+
if (
|
| 134 |
+
isinstance(t, typevar_types) and
|
| 135 |
+
t not in tvars and
|
| 136 |
+
not _is_unpack(t)
|
| 137 |
+
):
|
| 138 |
+
tvars.append(t)
|
| 139 |
+
if _should_collect_from_parameters(t):
|
| 140 |
+
tvars.extend([t for t in t.__parameters__ if t not in tvars])
|
| 141 |
+
return tuple(tvars)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
NoReturn = typing.NoReturn
|
| 145 |
+
|
| 146 |
+
# Some unconstrained type variables. These are used by the container types.
|
| 147 |
+
# (These are not for export.)
|
| 148 |
+
T = typing.TypeVar('T') # Any type.
|
| 149 |
+
KT = typing.TypeVar('KT') # Key type.
|
| 150 |
+
VT = typing.TypeVar('VT') # Value type.
|
| 151 |
+
T_co = typing.TypeVar('T_co', covariant=True) # Any type covariant containers.
|
| 152 |
+
T_contra = typing.TypeVar('T_contra', contravariant=True) # Ditto contravariant.
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
if sys.version_info >= (3, 11):
|
| 156 |
+
from typing import Any
|
| 157 |
+
else:
|
| 158 |
+
|
| 159 |
+
class _AnyMeta(type):
|
| 160 |
+
def __instancecheck__(self, obj):
|
| 161 |
+
if self is Any:
|
| 162 |
+
raise TypeError("typing_extensions.Any cannot be used with isinstance()")
|
| 163 |
+
return super().__instancecheck__(obj)
|
| 164 |
+
|
| 165 |
+
def __repr__(self):
|
| 166 |
+
if self is Any:
|
| 167 |
+
return "typing_extensions.Any"
|
| 168 |
+
return super().__repr__()
|
| 169 |
+
|
| 170 |
+
class Any(metaclass=_AnyMeta):
|
| 171 |
+
"""Special type indicating an unconstrained type.
|
| 172 |
+
- Any is compatible with every type.
|
| 173 |
+
- Any assumed to have all methods.
|
| 174 |
+
- All values assumed to be instances of Any.
|
| 175 |
+
Note that all the above statements are true from the point of view of
|
| 176 |
+
static type checkers. At runtime, Any should not be used with instance
|
| 177 |
+
checks.
|
| 178 |
+
"""
|
| 179 |
+
def __new__(cls, *args, **kwargs):
|
| 180 |
+
if cls is Any:
|
| 181 |
+
raise TypeError("Any cannot be instantiated")
|
| 182 |
+
return super().__new__(cls, *args, **kwargs)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
ClassVar = typing.ClassVar
|
| 186 |
+
|
| 187 |
+
# On older versions of typing there is an internal class named "Final".
|
| 188 |
+
# 3.8+
|
| 189 |
+
if hasattr(typing, 'Final') and sys.version_info[:2] >= (3, 7):
|
| 190 |
+
Final = typing.Final
|
| 191 |
+
# 3.7
|
| 192 |
+
else:
|
| 193 |
+
class _FinalForm(typing._SpecialForm, _root=True):
|
| 194 |
+
|
| 195 |
+
def __repr__(self):
|
| 196 |
+
return 'typing_extensions.' + self._name
|
| 197 |
+
|
| 198 |
+
def __getitem__(self, parameters):
|
| 199 |
+
item = typing._type_check(parameters,
|
| 200 |
+
f'{self._name} accepts only a single type.')
|
| 201 |
+
return typing._GenericAlias(self, (item,))
|
| 202 |
+
|
| 203 |
+
Final = _FinalForm('Final',
|
| 204 |
+
doc="""A special typing construct to indicate that a name
|
| 205 |
+
cannot be re-assigned or overridden in a subclass.
|
| 206 |
+
For example:
|
| 207 |
+
|
| 208 |
+
MAX_SIZE: Final = 9000
|
| 209 |
+
MAX_SIZE += 1 # Error reported by type checker
|
| 210 |
+
|
| 211 |
+
class Connection:
|
| 212 |
+
TIMEOUT: Final[int] = 10
|
| 213 |
+
class FastConnector(Connection):
|
| 214 |
+
TIMEOUT = 1 # Error reported by type checker
|
| 215 |
+
|
| 216 |
+
There is no runtime checking of these properties.""")
|
| 217 |
+
|
| 218 |
+
if sys.version_info >= (3, 11):
|
| 219 |
+
final = typing.final
|
| 220 |
+
else:
|
| 221 |
+
# @final exists in 3.8+, but we backport it for all versions
|
| 222 |
+
# before 3.11 to keep support for the __final__ attribute.
|
| 223 |
+
# See https://bugs.python.org/issue46342
|
| 224 |
+
def final(f):
|
| 225 |
+
"""This decorator can be used to indicate to type checkers that
|
| 226 |
+
the decorated method cannot be overridden, and decorated class
|
| 227 |
+
cannot be subclassed. For example:
|
| 228 |
+
|
| 229 |
+
class Base:
|
| 230 |
+
@final
|
| 231 |
+
def done(self) -> None:
|
| 232 |
+
...
|
| 233 |
+
class Sub(Base):
|
| 234 |
+
def done(self) -> None: # Error reported by type checker
|
| 235 |
+
...
|
| 236 |
+
@final
|
| 237 |
+
class Leaf:
|
| 238 |
+
...
|
| 239 |
+
class Other(Leaf): # Error reported by type checker
|
| 240 |
+
...
|
| 241 |
+
|
| 242 |
+
There is no runtime checking of these properties. The decorator
|
| 243 |
+
sets the ``__final__`` attribute to ``True`` on the decorated object
|
| 244 |
+
to allow runtime introspection.
|
| 245 |
+
"""
|
| 246 |
+
try:
|
| 247 |
+
f.__final__ = True
|
| 248 |
+
except (AttributeError, TypeError):
|
| 249 |
+
# Skip the attribute silently if it is not writable.
|
| 250 |
+
# AttributeError happens if the object has __slots__ or a
|
| 251 |
+
# read-only property, TypeError if it's a builtin class.
|
| 252 |
+
pass
|
| 253 |
+
return f
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
def IntVar(name):
|
| 257 |
+
return typing.TypeVar(name)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
# 3.8+:
|
| 261 |
+
if hasattr(typing, 'Literal'):
|
| 262 |
+
Literal = typing.Literal
|
| 263 |
+
# 3.7:
|
| 264 |
+
else:
|
| 265 |
+
class _LiteralForm(typing._SpecialForm, _root=True):
|
| 266 |
+
|
| 267 |
+
def __repr__(self):
|
| 268 |
+
return 'typing_extensions.' + self._name
|
| 269 |
+
|
| 270 |
+
def __getitem__(self, parameters):
|
| 271 |
+
return typing._GenericAlias(self, parameters)
|
| 272 |
+
|
| 273 |
+
Literal = _LiteralForm('Literal',
|
| 274 |
+
doc="""A type that can be used to indicate to type checkers
|
| 275 |
+
that the corresponding value has a value literally equivalent
|
| 276 |
+
to the provided parameter. For example:
|
| 277 |
+
|
| 278 |
+
var: Literal[4] = 4
|
| 279 |
+
|
| 280 |
+
The type checker understands that 'var' is literally equal to
|
| 281 |
+
the value 4 and no other value.
|
| 282 |
+
|
| 283 |
+
Literal[...] cannot be subclassed. There is no runtime
|
| 284 |
+
checking verifying that the parameter is actually a value
|
| 285 |
+
instead of a type.""")
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
_overload_dummy = typing._overload_dummy # noqa
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
if hasattr(typing, "get_overloads"): # 3.11+
|
| 292 |
+
overload = typing.overload
|
| 293 |
+
get_overloads = typing.get_overloads
|
| 294 |
+
clear_overloads = typing.clear_overloads
|
| 295 |
+
else:
|
| 296 |
+
# {module: {qualname: {firstlineno: func}}}
|
| 297 |
+
_overload_registry = collections.defaultdict(
|
| 298 |
+
functools.partial(collections.defaultdict, dict)
|
| 299 |
+
)
|
| 300 |
+
|
| 301 |
+
def overload(func):
|
| 302 |
+
"""Decorator for overloaded functions/methods.
|
| 303 |
+
|
| 304 |
+
In a stub file, place two or more stub definitions for the same
|
| 305 |
+
function in a row, each decorated with @overload. For example:
|
| 306 |
+
|
| 307 |
+
@overload
|
| 308 |
+
def utf8(value: None) -> None: ...
|
| 309 |
+
@overload
|
| 310 |
+
def utf8(value: bytes) -> bytes: ...
|
| 311 |
+
@overload
|
| 312 |
+
def utf8(value: str) -> bytes: ...
|
| 313 |
+
|
| 314 |
+
In a non-stub file (i.e. a regular .py file), do the same but
|
| 315 |
+
follow it with an implementation. The implementation should *not*
|
| 316 |
+
be decorated with @overload. For example:
|
| 317 |
+
|
| 318 |
+
@overload
|
| 319 |
+
def utf8(value: None) -> None: ...
|
| 320 |
+
@overload
|
| 321 |
+
def utf8(value: bytes) -> bytes: ...
|
| 322 |
+
@overload
|
| 323 |
+
def utf8(value: str) -> bytes: ...
|
| 324 |
+
def utf8(value):
|
| 325 |
+
# implementation goes here
|
| 326 |
+
|
| 327 |
+
The overloads for a function can be retrieved at runtime using the
|
| 328 |
+
get_overloads() function.
|
| 329 |
+
"""
|
| 330 |
+
# classmethod and staticmethod
|
| 331 |
+
f = getattr(func, "__func__", func)
|
| 332 |
+
try:
|
| 333 |
+
_overload_registry[f.__module__][f.__qualname__][
|
| 334 |
+
f.__code__.co_firstlineno
|
| 335 |
+
] = func
|
| 336 |
+
except AttributeError:
|
| 337 |
+
# Not a normal function; ignore.
|
| 338 |
+
pass
|
| 339 |
+
return _overload_dummy
|
| 340 |
+
|
| 341 |
+
def get_overloads(func):
|
| 342 |
+
"""Return all defined overloads for *func* as a sequence."""
|
| 343 |
+
# classmethod and staticmethod
|
| 344 |
+
f = getattr(func, "__func__", func)
|
| 345 |
+
if f.__module__ not in _overload_registry:
|
| 346 |
+
return []
|
| 347 |
+
mod_dict = _overload_registry[f.__module__]
|
| 348 |
+
if f.__qualname__ not in mod_dict:
|
| 349 |
+
return []
|
| 350 |
+
return list(mod_dict[f.__qualname__].values())
|
| 351 |
+
|
| 352 |
+
def clear_overloads():
|
| 353 |
+
"""Clear all overloads in the registry."""
|
| 354 |
+
_overload_registry.clear()
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
# This is not a real generic class. Don't use outside annotations.
|
| 358 |
+
Type = typing.Type
|
| 359 |
+
|
| 360 |
+
# Various ABCs mimicking those in collections.abc.
|
| 361 |
+
# A few are simply re-exported for completeness.
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
Awaitable = typing.Awaitable
|
| 365 |
+
Coroutine = typing.Coroutine
|
| 366 |
+
AsyncIterable = typing.AsyncIterable
|
| 367 |
+
AsyncIterator = typing.AsyncIterator
|
| 368 |
+
Deque = typing.Deque
|
| 369 |
+
ContextManager = typing.ContextManager
|
| 370 |
+
AsyncContextManager = typing.AsyncContextManager
|
| 371 |
+
DefaultDict = typing.DefaultDict
|
| 372 |
+
|
| 373 |
+
# 3.7.2+
|
| 374 |
+
if hasattr(typing, 'OrderedDict'):
|
| 375 |
+
OrderedDict = typing.OrderedDict
|
| 376 |
+
# 3.7.0-3.7.2
|
| 377 |
+
else:
|
| 378 |
+
OrderedDict = typing._alias(collections.OrderedDict, (KT, VT))
|
| 379 |
+
|
| 380 |
+
Counter = typing.Counter
|
| 381 |
+
ChainMap = typing.ChainMap
|
| 382 |
+
AsyncGenerator = typing.AsyncGenerator
|
| 383 |
+
NewType = typing.NewType
|
| 384 |
+
Text = typing.Text
|
| 385 |
+
TYPE_CHECKING = typing.TYPE_CHECKING
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
_PROTO_WHITELIST = ['Callable', 'Awaitable',
|
| 389 |
+
'Iterable', 'Iterator', 'AsyncIterable', 'AsyncIterator',
|
| 390 |
+
'Hashable', 'Sized', 'Container', 'Collection', 'Reversible',
|
| 391 |
+
'ContextManager', 'AsyncContextManager']
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
def _get_protocol_attrs(cls):
|
| 395 |
+
attrs = set()
|
| 396 |
+
for base in cls.__mro__[:-1]: # without object
|
| 397 |
+
if base.__name__ in ('Protocol', 'Generic'):
|
| 398 |
+
continue
|
| 399 |
+
annotations = getattr(base, '__annotations__', {})
|
| 400 |
+
for attr in list(base.__dict__.keys()) + list(annotations.keys()):
|
| 401 |
+
if (not attr.startswith('_abc_') and attr not in (
|
| 402 |
+
'__abstractmethods__', '__annotations__', '__weakref__',
|
| 403 |
+
'_is_protocol', '_is_runtime_protocol', '__dict__',
|
| 404 |
+
'__args__', '__slots__',
|
| 405 |
+
'__next_in_mro__', '__parameters__', '__origin__',
|
| 406 |
+
'__orig_bases__', '__extra__', '__tree_hash__',
|
| 407 |
+
'__doc__', '__subclasshook__', '__init__', '__new__',
|
| 408 |
+
'__module__', '_MutableMapping__marker', '_gorg')):
|
| 409 |
+
attrs.add(attr)
|
| 410 |
+
return attrs
|
| 411 |
+
|
| 412 |
+
|
| 413 |
+
def _is_callable_members_only(cls):
|
| 414 |
+
return all(callable(getattr(cls, attr, None)) for attr in _get_protocol_attrs(cls))
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def _maybe_adjust_parameters(cls):
|
| 418 |
+
"""Helper function used in Protocol.__init_subclass__ and _TypedDictMeta.__new__.
|
| 419 |
+
|
| 420 |
+
The contents of this function are very similar
|
| 421 |
+
to logic found in typing.Generic.__init_subclass__
|
| 422 |
+
on the CPython main branch.
|
| 423 |
+
"""
|
| 424 |
+
tvars = []
|
| 425 |
+
if '__orig_bases__' in cls.__dict__:
|
| 426 |
+
tvars = typing._collect_type_vars(cls.__orig_bases__)
|
| 427 |
+
# Look for Generic[T1, ..., Tn] or Protocol[T1, ..., Tn].
|
| 428 |
+
# If found, tvars must be a subset of it.
|
| 429 |
+
# If not found, tvars is it.
|
| 430 |
+
# Also check for and reject plain Generic,
|
| 431 |
+
# and reject multiple Generic[...] and/or Protocol[...].
|
| 432 |
+
gvars = None
|
| 433 |
+
for base in cls.__orig_bases__:
|
| 434 |
+
if (isinstance(base, typing._GenericAlias) and
|
| 435 |
+
base.__origin__ in (typing.Generic, Protocol)):
|
| 436 |
+
# for error messages
|
| 437 |
+
the_base = base.__origin__.__name__
|
| 438 |
+
if gvars is not None:
|
| 439 |
+
raise TypeError(
|
| 440 |
+
"Cannot inherit from Generic[...]"
|
| 441 |
+
" and/or Protocol[...] multiple types.")
|
| 442 |
+
gvars = base.__parameters__
|
| 443 |
+
if gvars is None:
|
| 444 |
+
gvars = tvars
|
| 445 |
+
else:
|
| 446 |
+
tvarset = set(tvars)
|
| 447 |
+
gvarset = set(gvars)
|
| 448 |
+
if not tvarset <= gvarset:
|
| 449 |
+
s_vars = ', '.join(str(t) for t in tvars if t not in gvarset)
|
| 450 |
+
s_args = ', '.join(str(g) for g in gvars)
|
| 451 |
+
raise TypeError(f"Some type variables ({s_vars}) are"
|
| 452 |
+
f" not listed in {the_base}[{s_args}]")
|
| 453 |
+
tvars = gvars
|
| 454 |
+
cls.__parameters__ = tuple(tvars)
|
| 455 |
+
|
| 456 |
+
|
| 457 |
+
# 3.8+
|
| 458 |
+
if hasattr(typing, 'Protocol'):
|
| 459 |
+
Protocol = typing.Protocol
|
| 460 |
+
# 3.7
|
| 461 |
+
else:
|
| 462 |
+
|
| 463 |
+
def _no_init(self, *args, **kwargs):
|
| 464 |
+
if type(self)._is_protocol:
|
| 465 |
+
raise TypeError('Protocols cannot be instantiated')
|
| 466 |
+
|
| 467 |
+
class _ProtocolMeta(abc.ABCMeta): # noqa: B024
|
| 468 |
+
# This metaclass is a bit unfortunate and exists only because of the lack
|
| 469 |
+
# of __instancehook__.
|
| 470 |
+
def __instancecheck__(cls, instance):
|
| 471 |
+
# We need this method for situations where attributes are
|
| 472 |
+
# assigned in __init__.
|
| 473 |
+
if ((not getattr(cls, '_is_protocol', False) or
|
| 474 |
+
_is_callable_members_only(cls)) and
|
| 475 |
+
issubclass(instance.__class__, cls)):
|
| 476 |
+
return True
|
| 477 |
+
if cls._is_protocol:
|
| 478 |
+
if all(hasattr(instance, attr) and
|
| 479 |
+
(not callable(getattr(cls, attr, None)) or
|
| 480 |
+
getattr(instance, attr) is not None)
|
| 481 |
+
for attr in _get_protocol_attrs(cls)):
|
| 482 |
+
return True
|
| 483 |
+
return super().__instancecheck__(instance)
|
| 484 |
+
|
| 485 |
+
class Protocol(metaclass=_ProtocolMeta):
|
| 486 |
+
# There is quite a lot of overlapping code with typing.Generic.
|
| 487 |
+
# Unfortunately it is hard to avoid this while these live in two different
|
| 488 |
+
# modules. The duplicated code will be removed when Protocol is moved to typing.
|
| 489 |
+
"""Base class for protocol classes. Protocol classes are defined as::
|
| 490 |
+
|
| 491 |
+
class Proto(Protocol):
|
| 492 |
+
def meth(self) -> int:
|
| 493 |
+
...
|
| 494 |
+
|
| 495 |
+
Such classes are primarily used with static type checkers that recognize
|
| 496 |
+
structural subtyping (static duck-typing), for example::
|
| 497 |
+
|
| 498 |
+
class C:
|
| 499 |
+
def meth(self) -> int:
|
| 500 |
+
return 0
|
| 501 |
+
|
| 502 |
+
def func(x: Proto) -> int:
|
| 503 |
+
return x.meth()
|
| 504 |
+
|
| 505 |
+
func(C()) # Passes static type check
|
| 506 |
+
|
| 507 |
+
See PEP 544 for details. Protocol classes decorated with
|
| 508 |
+
@typing_extensions.runtime act as simple-minded runtime protocol that checks
|
| 509 |
+
only the presence of given attributes, ignoring their type signatures.
|
| 510 |
+
|
| 511 |
+
Protocol classes can be generic, they are defined as::
|
| 512 |
+
|
| 513 |
+
class GenProto(Protocol[T]):
|
| 514 |
+
def meth(self) -> T:
|
| 515 |
+
...
|
| 516 |
+
"""
|
| 517 |
+
__slots__ = ()
|
| 518 |
+
_is_protocol = True
|
| 519 |
+
|
| 520 |
+
def __new__(cls, *args, **kwds):
|
| 521 |
+
if cls is Protocol:
|
| 522 |
+
raise TypeError("Type Protocol cannot be instantiated; "
|
| 523 |
+
"it can only be used as a base class")
|
| 524 |
+
return super().__new__(cls)
|
| 525 |
+
|
| 526 |
+
@typing._tp_cache
|
| 527 |
+
def __class_getitem__(cls, params):
|
| 528 |
+
if not isinstance(params, tuple):
|
| 529 |
+
params = (params,)
|
| 530 |
+
if not params and cls is not typing.Tuple:
|
| 531 |
+
raise TypeError(
|
| 532 |
+
f"Parameter list to {cls.__qualname__}[...] cannot be empty")
|
| 533 |
+
msg = "Parameters to generic types must be types."
|
| 534 |
+
params = tuple(typing._type_check(p, msg) for p in params) # noqa
|
| 535 |
+
if cls is Protocol:
|
| 536 |
+
# Generic can only be subscripted with unique type variables.
|
| 537 |
+
if not all(isinstance(p, typing.TypeVar) for p in params):
|
| 538 |
+
i = 0
|
| 539 |
+
while isinstance(params[i], typing.TypeVar):
|
| 540 |
+
i += 1
|
| 541 |
+
raise TypeError(
|
| 542 |
+
"Parameters to Protocol[...] must all be type variables."
|
| 543 |
+
f" Parameter {i + 1} is {params[i]}")
|
| 544 |
+
if len(set(params)) != len(params):
|
| 545 |
+
raise TypeError(
|
| 546 |
+
"Parameters to Protocol[...] must all be unique")
|
| 547 |
+
else:
|
| 548 |
+
# Subscripting a regular Generic subclass.
|
| 549 |
+
_check_generic(cls, params, len(cls.__parameters__))
|
| 550 |
+
return typing._GenericAlias(cls, params)
|
| 551 |
+
|
| 552 |
+
def __init_subclass__(cls, *args, **kwargs):
|
| 553 |
+
if '__orig_bases__' in cls.__dict__:
|
| 554 |
+
error = typing.Generic in cls.__orig_bases__
|
| 555 |
+
else:
|
| 556 |
+
error = typing.Generic in cls.__bases__
|
| 557 |
+
if error:
|
| 558 |
+
raise TypeError("Cannot inherit from plain Generic")
|
| 559 |
+
_maybe_adjust_parameters(cls)
|
| 560 |
+
|
| 561 |
+
# Determine if this is a protocol or a concrete subclass.
|
| 562 |
+
if not cls.__dict__.get('_is_protocol', None):
|
| 563 |
+
cls._is_protocol = any(b is Protocol for b in cls.__bases__)
|
| 564 |
+
|
| 565 |
+
# Set (or override) the protocol subclass hook.
|
| 566 |
+
def _proto_hook(other):
|
| 567 |
+
if not cls.__dict__.get('_is_protocol', None):
|
| 568 |
+
return NotImplemented
|
| 569 |
+
if not getattr(cls, '_is_runtime_protocol', False):
|
| 570 |
+
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
|
| 571 |
+
return NotImplemented
|
| 572 |
+
raise TypeError("Instance and class checks can only be used with"
|
| 573 |
+
" @runtime protocols")
|
| 574 |
+
if not _is_callable_members_only(cls):
|
| 575 |
+
if sys._getframe(2).f_globals['__name__'] in ['abc', 'functools']:
|
| 576 |
+
return NotImplemented
|
| 577 |
+
raise TypeError("Protocols with non-method members"
|
| 578 |
+
" don't support issubclass()")
|
| 579 |
+
if not isinstance(other, type):
|
| 580 |
+
# Same error as for issubclass(1, int)
|
| 581 |
+
raise TypeError('issubclass() arg 1 must be a class')
|
| 582 |
+
for attr in _get_protocol_attrs(cls):
|
| 583 |
+
for base in other.__mro__:
|
| 584 |
+
if attr in base.__dict__:
|
| 585 |
+
if base.__dict__[attr] is None:
|
| 586 |
+
return NotImplemented
|
| 587 |
+
break
|
| 588 |
+
annotations = getattr(base, '__annotations__', {})
|
| 589 |
+
if (isinstance(annotations, typing.Mapping) and
|
| 590 |
+
attr in annotations and
|
| 591 |
+
isinstance(other, _ProtocolMeta) and
|
| 592 |
+
other._is_protocol):
|
| 593 |
+
break
|
| 594 |
+
else:
|
| 595 |
+
return NotImplemented
|
| 596 |
+
return True
|
| 597 |
+
if '__subclasshook__' not in cls.__dict__:
|
| 598 |
+
cls.__subclasshook__ = _proto_hook
|
| 599 |
+
|
| 600 |
+
# We have nothing more to do for non-protocols.
|
| 601 |
+
if not cls._is_protocol:
|
| 602 |
+
return
|
| 603 |
+
|
| 604 |
+
# Check consistency of bases.
|
| 605 |
+
for base in cls.__bases__:
|
| 606 |
+
if not (base in (object, typing.Generic) or
|
| 607 |
+
base.__module__ == 'collections.abc' and
|
| 608 |
+
base.__name__ in _PROTO_WHITELIST or
|
| 609 |
+
isinstance(base, _ProtocolMeta) and base._is_protocol):
|
| 610 |
+
raise TypeError('Protocols can only inherit from other'
|
| 611 |
+
f' protocols, got {repr(base)}')
|
| 612 |
+
cls.__init__ = _no_init
|
| 613 |
+
|
| 614 |
+
|
| 615 |
+
# 3.8+
|
| 616 |
+
if hasattr(typing, 'runtime_checkable'):
|
| 617 |
+
runtime_checkable = typing.runtime_checkable
|
| 618 |
+
# 3.7
|
| 619 |
+
else:
|
| 620 |
+
def runtime_checkable(cls):
|
| 621 |
+
"""Mark a protocol class as a runtime protocol, so that it
|
| 622 |
+
can be used with isinstance() and issubclass(). Raise TypeError
|
| 623 |
+
if applied to a non-protocol class.
|
| 624 |
+
|
| 625 |
+
This allows a simple-minded structural check very similar to the
|
| 626 |
+
one-offs in collections.abc such as Hashable.
|
| 627 |
+
"""
|
| 628 |
+
if not isinstance(cls, _ProtocolMeta) or not cls._is_protocol:
|
| 629 |
+
raise TypeError('@runtime_checkable can be only applied to protocol classes,'
|
| 630 |
+
f' got {cls!r}')
|
| 631 |
+
cls._is_runtime_protocol = True
|
| 632 |
+
return cls
|
| 633 |
+
|
| 634 |
+
|
| 635 |
+
# Exists for backwards compatibility.
|
| 636 |
+
runtime = runtime_checkable
|
| 637 |
+
|
| 638 |
+
|
| 639 |
+
# 3.8+
|
| 640 |
+
if hasattr(typing, 'SupportsIndex'):
|
| 641 |
+
SupportsIndex = typing.SupportsIndex
|
| 642 |
+
# 3.7
|
| 643 |
+
else:
|
| 644 |
+
@runtime_checkable
|
| 645 |
+
class SupportsIndex(Protocol):
|
| 646 |
+
__slots__ = ()
|
| 647 |
+
|
| 648 |
+
@abc.abstractmethod
|
| 649 |
+
def __index__(self) -> int:
|
| 650 |
+
pass
|
| 651 |
+
|
| 652 |
+
|
| 653 |
+
if hasattr(typing, "Required"):
|
| 654 |
+
# The standard library TypedDict in Python 3.8 does not store runtime information
|
| 655 |
+
# about which (if any) keys are optional. See https://bugs.python.org/issue38834
|
| 656 |
+
# The standard library TypedDict in Python 3.9.0/1 does not honour the "total"
|
| 657 |
+
# keyword with old-style TypedDict(). See https://bugs.python.org/issue42059
|
| 658 |
+
# The standard library TypedDict below Python 3.11 does not store runtime
|
| 659 |
+
# information about optional and required keys when using Required or NotRequired.
|
| 660 |
+
# Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11.
|
| 661 |
+
TypedDict = typing.TypedDict
|
| 662 |
+
_TypedDictMeta = typing._TypedDictMeta
|
| 663 |
+
is_typeddict = typing.is_typeddict
|
| 664 |
+
else:
|
| 665 |
+
def _check_fails(cls, other):
|
| 666 |
+
try:
|
| 667 |
+
if sys._getframe(1).f_globals['__name__'] not in ['abc',
|
| 668 |
+
'functools',
|
| 669 |
+
'typing']:
|
| 670 |
+
# Typed dicts are only for static structural subtyping.
|
| 671 |
+
raise TypeError('TypedDict does not support instance and class checks')
|
| 672 |
+
except (AttributeError, ValueError):
|
| 673 |
+
pass
|
| 674 |
+
return False
|
| 675 |
+
|
| 676 |
+
def _dict_new(*args, **kwargs):
|
| 677 |
+
if not args:
|
| 678 |
+
raise TypeError('TypedDict.__new__(): not enough arguments')
|
| 679 |
+
_, args = args[0], args[1:] # allow the "cls" keyword be passed
|
| 680 |
+
return dict(*args, **kwargs)
|
| 681 |
+
|
| 682 |
+
_dict_new.__text_signature__ = '($cls, _typename, _fields=None, /, **kwargs)'
|
| 683 |
+
|
| 684 |
+
def _typeddict_new(*args, total=True, **kwargs):
|
| 685 |
+
if not args:
|
| 686 |
+
raise TypeError('TypedDict.__new__(): not enough arguments')
|
| 687 |
+
_, args = args[0], args[1:] # allow the "cls" keyword be passed
|
| 688 |
+
if args:
|
| 689 |
+
typename, args = args[0], args[1:] # allow the "_typename" keyword be passed
|
| 690 |
+
elif '_typename' in kwargs:
|
| 691 |
+
typename = kwargs.pop('_typename')
|
| 692 |
+
import warnings
|
| 693 |
+
warnings.warn("Passing '_typename' as keyword argument is deprecated",
|
| 694 |
+
DeprecationWarning, stacklevel=2)
|
| 695 |
+
else:
|
| 696 |
+
raise TypeError("TypedDict.__new__() missing 1 required positional "
|
| 697 |
+
"argument: '_typename'")
|
| 698 |
+
if args:
|
| 699 |
+
try:
|
| 700 |
+
fields, = args # allow the "_fields" keyword be passed
|
| 701 |
+
except ValueError:
|
| 702 |
+
raise TypeError('TypedDict.__new__() takes from 2 to 3 '
|
| 703 |
+
f'positional arguments but {len(args) + 2} '
|
| 704 |
+
'were given')
|
| 705 |
+
elif '_fields' in kwargs and len(kwargs) == 1:
|
| 706 |
+
fields = kwargs.pop('_fields')
|
| 707 |
+
import warnings
|
| 708 |
+
warnings.warn("Passing '_fields' as keyword argument is deprecated",
|
| 709 |
+
DeprecationWarning, stacklevel=2)
|
| 710 |
+
else:
|
| 711 |
+
fields = None
|
| 712 |
+
|
| 713 |
+
if fields is None:
|
| 714 |
+
fields = kwargs
|
| 715 |
+
elif kwargs:
|
| 716 |
+
raise TypeError("TypedDict takes either a dict or keyword arguments,"
|
| 717 |
+
" but not both")
|
| 718 |
+
|
| 719 |
+
ns = {'__annotations__': dict(fields)}
|
| 720 |
+
try:
|
| 721 |
+
# Setting correct module is necessary to make typed dict classes pickleable.
|
| 722 |
+
ns['__module__'] = sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 723 |
+
except (AttributeError, ValueError):
|
| 724 |
+
pass
|
| 725 |
+
|
| 726 |
+
return _TypedDictMeta(typename, (), ns, total=total)
|
| 727 |
+
|
| 728 |
+
_typeddict_new.__text_signature__ = ('($cls, _typename, _fields=None,'
|
| 729 |
+
' /, *, total=True, **kwargs)')
|
| 730 |
+
|
| 731 |
+
class _TypedDictMeta(type):
|
| 732 |
+
def __init__(cls, name, bases, ns, total=True):
|
| 733 |
+
super().__init__(name, bases, ns)
|
| 734 |
+
|
| 735 |
+
def __new__(cls, name, bases, ns, total=True):
|
| 736 |
+
# Create new typed dict class object.
|
| 737 |
+
# This method is called directly when TypedDict is subclassed,
|
| 738 |
+
# or via _typeddict_new when TypedDict is instantiated. This way
|
| 739 |
+
# TypedDict supports all three syntaxes described in its docstring.
|
| 740 |
+
# Subclasses and instances of TypedDict return actual dictionaries
|
| 741 |
+
# via _dict_new.
|
| 742 |
+
ns['__new__'] = _typeddict_new if name == 'TypedDict' else _dict_new
|
| 743 |
+
# Don't insert typing.Generic into __bases__ here,
|
| 744 |
+
# or Generic.__init_subclass__ will raise TypeError
|
| 745 |
+
# in the super().__new__() call.
|
| 746 |
+
# Instead, monkey-patch __bases__ onto the class after it's been created.
|
| 747 |
+
tp_dict = super().__new__(cls, name, (dict,), ns)
|
| 748 |
+
|
| 749 |
+
if any(issubclass(base, typing.Generic) for base in bases):
|
| 750 |
+
tp_dict.__bases__ = (typing.Generic, dict)
|
| 751 |
+
_maybe_adjust_parameters(tp_dict)
|
| 752 |
+
|
| 753 |
+
annotations = {}
|
| 754 |
+
own_annotations = ns.get('__annotations__', {})
|
| 755 |
+
msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type"
|
| 756 |
+
own_annotations = {
|
| 757 |
+
n: typing._type_check(tp, msg) for n, tp in own_annotations.items()
|
| 758 |
+
}
|
| 759 |
+
required_keys = set()
|
| 760 |
+
optional_keys = set()
|
| 761 |
+
|
| 762 |
+
for base in bases:
|
| 763 |
+
annotations.update(base.__dict__.get('__annotations__', {}))
|
| 764 |
+
required_keys.update(base.__dict__.get('__required_keys__', ()))
|
| 765 |
+
optional_keys.update(base.__dict__.get('__optional_keys__', ()))
|
| 766 |
+
|
| 767 |
+
annotations.update(own_annotations)
|
| 768 |
+
for annotation_key, annotation_type in own_annotations.items():
|
| 769 |
+
annotation_origin = get_origin(annotation_type)
|
| 770 |
+
if annotation_origin is Annotated:
|
| 771 |
+
annotation_args = get_args(annotation_type)
|
| 772 |
+
if annotation_args:
|
| 773 |
+
annotation_type = annotation_args[0]
|
| 774 |
+
annotation_origin = get_origin(annotation_type)
|
| 775 |
+
|
| 776 |
+
if annotation_origin is Required:
|
| 777 |
+
required_keys.add(annotation_key)
|
| 778 |
+
elif annotation_origin is NotRequired:
|
| 779 |
+
optional_keys.add(annotation_key)
|
| 780 |
+
elif total:
|
| 781 |
+
required_keys.add(annotation_key)
|
| 782 |
+
else:
|
| 783 |
+
optional_keys.add(annotation_key)
|
| 784 |
+
|
| 785 |
+
tp_dict.__annotations__ = annotations
|
| 786 |
+
tp_dict.__required_keys__ = frozenset(required_keys)
|
| 787 |
+
tp_dict.__optional_keys__ = frozenset(optional_keys)
|
| 788 |
+
if not hasattr(tp_dict, '__total__'):
|
| 789 |
+
tp_dict.__total__ = total
|
| 790 |
+
return tp_dict
|
| 791 |
+
|
| 792 |
+
__instancecheck__ = __subclasscheck__ = _check_fails
|
| 793 |
+
|
| 794 |
+
TypedDict = _TypedDictMeta('TypedDict', (dict,), {})
|
| 795 |
+
TypedDict.__module__ = __name__
|
| 796 |
+
TypedDict.__doc__ = \
|
| 797 |
+
"""A simple typed name space. At runtime it is equivalent to a plain dict.
|
| 798 |
+
|
| 799 |
+
TypedDict creates a dictionary type that expects all of its
|
| 800 |
+
instances to have a certain set of keys, with each key
|
| 801 |
+
associated with a value of a consistent type. This expectation
|
| 802 |
+
is not checked at runtime but is only enforced by type checkers.
|
| 803 |
+
Usage::
|
| 804 |
+
|
| 805 |
+
class Point2D(TypedDict):
|
| 806 |
+
x: int
|
| 807 |
+
y: int
|
| 808 |
+
label: str
|
| 809 |
+
|
| 810 |
+
a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK
|
| 811 |
+
b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check
|
| 812 |
+
|
| 813 |
+
assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first')
|
| 814 |
+
|
| 815 |
+
The type info can be accessed via the Point2D.__annotations__ dict, and
|
| 816 |
+
the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets.
|
| 817 |
+
TypedDict supports two additional equivalent forms::
|
| 818 |
+
|
| 819 |
+
Point2D = TypedDict('Point2D', x=int, y=int, label=str)
|
| 820 |
+
Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str})
|
| 821 |
+
|
| 822 |
+
The class syntax is only supported in Python 3.6+, while two other
|
| 823 |
+
syntax forms work for Python 2.7 and 3.2+
|
| 824 |
+
"""
|
| 825 |
+
|
| 826 |
+
if hasattr(typing, "_TypedDictMeta"):
|
| 827 |
+
_TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta)
|
| 828 |
+
else:
|
| 829 |
+
_TYPEDDICT_TYPES = (_TypedDictMeta,)
|
| 830 |
+
|
| 831 |
+
def is_typeddict(tp):
|
| 832 |
+
"""Check if an annotation is a TypedDict class
|
| 833 |
+
|
| 834 |
+
For example::
|
| 835 |
+
class Film(TypedDict):
|
| 836 |
+
title: str
|
| 837 |
+
year: int
|
| 838 |
+
|
| 839 |
+
is_typeddict(Film) # => True
|
| 840 |
+
is_typeddict(Union[list, str]) # => False
|
| 841 |
+
"""
|
| 842 |
+
return isinstance(tp, tuple(_TYPEDDICT_TYPES))
|
| 843 |
+
|
| 844 |
+
|
| 845 |
+
if hasattr(typing, "assert_type"):
|
| 846 |
+
assert_type = typing.assert_type
|
| 847 |
+
|
| 848 |
+
else:
|
| 849 |
+
def assert_type(__val, __typ):
|
| 850 |
+
"""Assert (to the type checker) that the value is of the given type.
|
| 851 |
+
|
| 852 |
+
When the type checker encounters a call to assert_type(), it
|
| 853 |
+
emits an error if the value is not of the specified type::
|
| 854 |
+
|
| 855 |
+
def greet(name: str) -> None:
|
| 856 |
+
assert_type(name, str) # ok
|
| 857 |
+
assert_type(name, int) # type checker error
|
| 858 |
+
|
| 859 |
+
At runtime this returns the first argument unchanged and otherwise
|
| 860 |
+
does nothing.
|
| 861 |
+
"""
|
| 862 |
+
return __val
|
| 863 |
+
|
| 864 |
+
|
| 865 |
+
if hasattr(typing, "Required"):
|
| 866 |
+
get_type_hints = typing.get_type_hints
|
| 867 |
+
else:
|
| 868 |
+
import functools
|
| 869 |
+
import types
|
| 870 |
+
|
| 871 |
+
# replaces _strip_annotations()
|
| 872 |
+
def _strip_extras(t):
|
| 873 |
+
"""Strips Annotated, Required and NotRequired from a given type."""
|
| 874 |
+
if isinstance(t, _AnnotatedAlias):
|
| 875 |
+
return _strip_extras(t.__origin__)
|
| 876 |
+
if hasattr(t, "__origin__") and t.__origin__ in (Required, NotRequired):
|
| 877 |
+
return _strip_extras(t.__args__[0])
|
| 878 |
+
if isinstance(t, typing._GenericAlias):
|
| 879 |
+
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
|
| 880 |
+
if stripped_args == t.__args__:
|
| 881 |
+
return t
|
| 882 |
+
return t.copy_with(stripped_args)
|
| 883 |
+
if hasattr(types, "GenericAlias") and isinstance(t, types.GenericAlias):
|
| 884 |
+
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
|
| 885 |
+
if stripped_args == t.__args__:
|
| 886 |
+
return t
|
| 887 |
+
return types.GenericAlias(t.__origin__, stripped_args)
|
| 888 |
+
if hasattr(types, "UnionType") and isinstance(t, types.UnionType):
|
| 889 |
+
stripped_args = tuple(_strip_extras(a) for a in t.__args__)
|
| 890 |
+
if stripped_args == t.__args__:
|
| 891 |
+
return t
|
| 892 |
+
return functools.reduce(operator.or_, stripped_args)
|
| 893 |
+
|
| 894 |
+
return t
|
| 895 |
+
|
| 896 |
+
def get_type_hints(obj, globalns=None, localns=None, include_extras=False):
|
| 897 |
+
"""Return type hints for an object.
|
| 898 |
+
|
| 899 |
+
This is often the same as obj.__annotations__, but it handles
|
| 900 |
+
forward references encoded as string literals, adds Optional[t] if a
|
| 901 |
+
default value equal to None is set and recursively replaces all
|
| 902 |
+
'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T'
|
| 903 |
+
(unless 'include_extras=True').
|
| 904 |
+
|
| 905 |
+
The argument may be a module, class, method, or function. The annotations
|
| 906 |
+
are returned as a dictionary. For classes, annotations include also
|
| 907 |
+
inherited members.
|
| 908 |
+
|
| 909 |
+
TypeError is raised if the argument is not of a type that can contain
|
| 910 |
+
annotations, and an empty dictionary is returned if no annotations are
|
| 911 |
+
present.
|
| 912 |
+
|
| 913 |
+
BEWARE -- the behavior of globalns and localns is counterintuitive
|
| 914 |
+
(unless you are familiar with how eval() and exec() work). The
|
| 915 |
+
search order is locals first, then globals.
|
| 916 |
+
|
| 917 |
+
- If no dict arguments are passed, an attempt is made to use the
|
| 918 |
+
globals from obj (or the respective module's globals for classes),
|
| 919 |
+
and these are also used as the locals. If the object does not appear
|
| 920 |
+
to have globals, an empty dictionary is used.
|
| 921 |
+
|
| 922 |
+
- If one dict argument is passed, it is used for both globals and
|
| 923 |
+
locals.
|
| 924 |
+
|
| 925 |
+
- If two dict arguments are passed, they specify globals and
|
| 926 |
+
locals, respectively.
|
| 927 |
+
"""
|
| 928 |
+
if hasattr(typing, "Annotated"):
|
| 929 |
+
hint = typing.get_type_hints(
|
| 930 |
+
obj, globalns=globalns, localns=localns, include_extras=True
|
| 931 |
+
)
|
| 932 |
+
else:
|
| 933 |
+
hint = typing.get_type_hints(obj, globalns=globalns, localns=localns)
|
| 934 |
+
if include_extras:
|
| 935 |
+
return hint
|
| 936 |
+
return {k: _strip_extras(t) for k, t in hint.items()}
|
| 937 |
+
|
| 938 |
+
|
| 939 |
+
# Python 3.9+ has PEP 593 (Annotated)
|
| 940 |
+
if hasattr(typing, 'Annotated'):
|
| 941 |
+
Annotated = typing.Annotated
|
| 942 |
+
# Not exported and not a public API, but needed for get_origin() and get_args()
|
| 943 |
+
# to work.
|
| 944 |
+
_AnnotatedAlias = typing._AnnotatedAlias
|
| 945 |
+
# 3.7-3.8
|
| 946 |
+
else:
|
| 947 |
+
class _AnnotatedAlias(typing._GenericAlias, _root=True):
|
| 948 |
+
"""Runtime representation of an annotated type.
|
| 949 |
+
|
| 950 |
+
At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't'
|
| 951 |
+
with extra annotations. The alias behaves like a normal typing alias,
|
| 952 |
+
instantiating is the same as instantiating the underlying type, binding
|
| 953 |
+
it to types is also the same.
|
| 954 |
+
"""
|
| 955 |
+
def __init__(self, origin, metadata):
|
| 956 |
+
if isinstance(origin, _AnnotatedAlias):
|
| 957 |
+
metadata = origin.__metadata__ + metadata
|
| 958 |
+
origin = origin.__origin__
|
| 959 |
+
super().__init__(origin, origin)
|
| 960 |
+
self.__metadata__ = metadata
|
| 961 |
+
|
| 962 |
+
def copy_with(self, params):
|
| 963 |
+
assert len(params) == 1
|
| 964 |
+
new_type = params[0]
|
| 965 |
+
return _AnnotatedAlias(new_type, self.__metadata__)
|
| 966 |
+
|
| 967 |
+
def __repr__(self):
|
| 968 |
+
return (f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, "
|
| 969 |
+
f"{', '.join(repr(a) for a in self.__metadata__)}]")
|
| 970 |
+
|
| 971 |
+
def __reduce__(self):
|
| 972 |
+
return operator.getitem, (
|
| 973 |
+
Annotated, (self.__origin__,) + self.__metadata__
|
| 974 |
+
)
|
| 975 |
+
|
| 976 |
+
def __eq__(self, other):
|
| 977 |
+
if not isinstance(other, _AnnotatedAlias):
|
| 978 |
+
return NotImplemented
|
| 979 |
+
if self.__origin__ != other.__origin__:
|
| 980 |
+
return False
|
| 981 |
+
return self.__metadata__ == other.__metadata__
|
| 982 |
+
|
| 983 |
+
def __hash__(self):
|
| 984 |
+
return hash((self.__origin__, self.__metadata__))
|
| 985 |
+
|
| 986 |
+
class Annotated:
|
| 987 |
+
"""Add context specific metadata to a type.
|
| 988 |
+
|
| 989 |
+
Example: Annotated[int, runtime_check.Unsigned] indicates to the
|
| 990 |
+
hypothetical runtime_check module that this type is an unsigned int.
|
| 991 |
+
Every other consumer of this type can ignore this metadata and treat
|
| 992 |
+
this type as int.
|
| 993 |
+
|
| 994 |
+
The first argument to Annotated must be a valid type (and will be in
|
| 995 |
+
the __origin__ field), the remaining arguments are kept as a tuple in
|
| 996 |
+
the __extra__ field.
|
| 997 |
+
|
| 998 |
+
Details:
|
| 999 |
+
|
| 1000 |
+
- It's an error to call `Annotated` with less than two arguments.
|
| 1001 |
+
- Nested Annotated are flattened::
|
| 1002 |
+
|
| 1003 |
+
Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3]
|
| 1004 |
+
|
| 1005 |
+
- Instantiating an annotated type is equivalent to instantiating the
|
| 1006 |
+
underlying type::
|
| 1007 |
+
|
| 1008 |
+
Annotated[C, Ann1](5) == C(5)
|
| 1009 |
+
|
| 1010 |
+
- Annotated can be used as a generic type alias::
|
| 1011 |
+
|
| 1012 |
+
Optimized = Annotated[T, runtime.Optimize()]
|
| 1013 |
+
Optimized[int] == Annotated[int, runtime.Optimize()]
|
| 1014 |
+
|
| 1015 |
+
OptimizedList = Annotated[List[T], runtime.Optimize()]
|
| 1016 |
+
OptimizedList[int] == Annotated[List[int], runtime.Optimize()]
|
| 1017 |
+
"""
|
| 1018 |
+
|
| 1019 |
+
__slots__ = ()
|
| 1020 |
+
|
| 1021 |
+
def __new__(cls, *args, **kwargs):
|
| 1022 |
+
raise TypeError("Type Annotated cannot be instantiated.")
|
| 1023 |
+
|
| 1024 |
+
@typing._tp_cache
|
| 1025 |
+
def __class_getitem__(cls, params):
|
| 1026 |
+
if not isinstance(params, tuple) or len(params) < 2:
|
| 1027 |
+
raise TypeError("Annotated[...] should be used "
|
| 1028 |
+
"with at least two arguments (a type and an "
|
| 1029 |
+
"annotation).")
|
| 1030 |
+
allowed_special_forms = (ClassVar, Final)
|
| 1031 |
+
if get_origin(params[0]) in allowed_special_forms:
|
| 1032 |
+
origin = params[0]
|
| 1033 |
+
else:
|
| 1034 |
+
msg = "Annotated[t, ...]: t must be a type."
|
| 1035 |
+
origin = typing._type_check(params[0], msg)
|
| 1036 |
+
metadata = tuple(params[1:])
|
| 1037 |
+
return _AnnotatedAlias(origin, metadata)
|
| 1038 |
+
|
| 1039 |
+
def __init_subclass__(cls, *args, **kwargs):
|
| 1040 |
+
raise TypeError(
|
| 1041 |
+
f"Cannot subclass {cls.__module__}.Annotated"
|
| 1042 |
+
)
|
| 1043 |
+
|
| 1044 |
+
# Python 3.8 has get_origin() and get_args() but those implementations aren't
|
| 1045 |
+
# Annotated-aware, so we can't use those. Python 3.9's versions don't support
|
| 1046 |
+
# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do.
|
| 1047 |
+
if sys.version_info[:2] >= (3, 10):
|
| 1048 |
+
get_origin = typing.get_origin
|
| 1049 |
+
get_args = typing.get_args
|
| 1050 |
+
# 3.7-3.9
|
| 1051 |
+
else:
|
| 1052 |
+
try:
|
| 1053 |
+
# 3.9+
|
| 1054 |
+
from typing import _BaseGenericAlias
|
| 1055 |
+
except ImportError:
|
| 1056 |
+
_BaseGenericAlias = typing._GenericAlias
|
| 1057 |
+
try:
|
| 1058 |
+
# 3.9+
|
| 1059 |
+
from typing import GenericAlias as _typing_GenericAlias
|
| 1060 |
+
except ImportError:
|
| 1061 |
+
_typing_GenericAlias = typing._GenericAlias
|
| 1062 |
+
|
| 1063 |
+
def get_origin(tp):
|
| 1064 |
+
"""Get the unsubscripted version of a type.
|
| 1065 |
+
|
| 1066 |
+
This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar
|
| 1067 |
+
and Annotated. Return None for unsupported types. Examples::
|
| 1068 |
+
|
| 1069 |
+
get_origin(Literal[42]) is Literal
|
| 1070 |
+
get_origin(int) is None
|
| 1071 |
+
get_origin(ClassVar[int]) is ClassVar
|
| 1072 |
+
get_origin(Generic) is Generic
|
| 1073 |
+
get_origin(Generic[T]) is Generic
|
| 1074 |
+
get_origin(Union[T, int]) is Union
|
| 1075 |
+
get_origin(List[Tuple[T, T]][int]) == list
|
| 1076 |
+
get_origin(P.args) is P
|
| 1077 |
+
"""
|
| 1078 |
+
if isinstance(tp, _AnnotatedAlias):
|
| 1079 |
+
return Annotated
|
| 1080 |
+
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias, _BaseGenericAlias,
|
| 1081 |
+
ParamSpecArgs, ParamSpecKwargs)):
|
| 1082 |
+
return tp.__origin__
|
| 1083 |
+
if tp is typing.Generic:
|
| 1084 |
+
return typing.Generic
|
| 1085 |
+
return None
|
| 1086 |
+
|
| 1087 |
+
def get_args(tp):
|
| 1088 |
+
"""Get type arguments with all substitutions performed.
|
| 1089 |
+
|
| 1090 |
+
For unions, basic simplifications used by Union constructor are performed.
|
| 1091 |
+
Examples::
|
| 1092 |
+
get_args(Dict[str, int]) == (str, int)
|
| 1093 |
+
get_args(int) == ()
|
| 1094 |
+
get_args(Union[int, Union[T, int], str][int]) == (int, str)
|
| 1095 |
+
get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int])
|
| 1096 |
+
get_args(Callable[[], T][int]) == ([], int)
|
| 1097 |
+
"""
|
| 1098 |
+
if isinstance(tp, _AnnotatedAlias):
|
| 1099 |
+
return (tp.__origin__,) + tp.__metadata__
|
| 1100 |
+
if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)):
|
| 1101 |
+
if getattr(tp, "_special", False):
|
| 1102 |
+
return ()
|
| 1103 |
+
res = tp.__args__
|
| 1104 |
+
if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis:
|
| 1105 |
+
res = (list(res[:-1]), res[-1])
|
| 1106 |
+
return res
|
| 1107 |
+
return ()
|
| 1108 |
+
|
| 1109 |
+
|
| 1110 |
+
# 3.10+
|
| 1111 |
+
if hasattr(typing, 'TypeAlias'):
|
| 1112 |
+
TypeAlias = typing.TypeAlias
|
| 1113 |
+
# 3.9
|
| 1114 |
+
elif sys.version_info[:2] >= (3, 9):
|
| 1115 |
+
class _TypeAliasForm(typing._SpecialForm, _root=True):
|
| 1116 |
+
def __repr__(self):
|
| 1117 |
+
return 'typing_extensions.' + self._name
|
| 1118 |
+
|
| 1119 |
+
@_TypeAliasForm
|
| 1120 |
+
def TypeAlias(self, parameters):
|
| 1121 |
+
"""Special marker indicating that an assignment should
|
| 1122 |
+
be recognized as a proper type alias definition by type
|
| 1123 |
+
checkers.
|
| 1124 |
+
|
| 1125 |
+
For example::
|
| 1126 |
+
|
| 1127 |
+
Predicate: TypeAlias = Callable[..., bool]
|
| 1128 |
+
|
| 1129 |
+
It's invalid when used anywhere except as in the example above.
|
| 1130 |
+
"""
|
| 1131 |
+
raise TypeError(f"{self} is not subscriptable")
|
| 1132 |
+
# 3.7-3.8
|
| 1133 |
+
else:
|
| 1134 |
+
class _TypeAliasForm(typing._SpecialForm, _root=True):
|
| 1135 |
+
def __repr__(self):
|
| 1136 |
+
return 'typing_extensions.' + self._name
|
| 1137 |
+
|
| 1138 |
+
TypeAlias = _TypeAliasForm('TypeAlias',
|
| 1139 |
+
doc="""Special marker indicating that an assignment should
|
| 1140 |
+
be recognized as a proper type alias definition by type
|
| 1141 |
+
checkers.
|
| 1142 |
+
|
| 1143 |
+
For example::
|
| 1144 |
+
|
| 1145 |
+
Predicate: TypeAlias = Callable[..., bool]
|
| 1146 |
+
|
| 1147 |
+
It's invalid when used anywhere except as in the example
|
| 1148 |
+
above.""")
|
| 1149 |
+
|
| 1150 |
+
|
| 1151 |
+
class _DefaultMixin:
|
| 1152 |
+
"""Mixin for TypeVarLike defaults."""
|
| 1153 |
+
|
| 1154 |
+
__slots__ = ()
|
| 1155 |
+
|
| 1156 |
+
def __init__(self, default):
|
| 1157 |
+
if isinstance(default, (tuple, list)):
|
| 1158 |
+
self.__default__ = tuple((typing._type_check(d, "Default must be a type")
|
| 1159 |
+
for d in default))
|
| 1160 |
+
elif default:
|
| 1161 |
+
self.__default__ = typing._type_check(default, "Default must be a type")
|
| 1162 |
+
else:
|
| 1163 |
+
self.__default__ = None
|
| 1164 |
+
|
| 1165 |
+
|
| 1166 |
+
# Add default and infer_variance parameters from PEP 696 and 695
|
| 1167 |
+
class TypeVar(typing.TypeVar, _DefaultMixin, _root=True):
|
| 1168 |
+
"""Type variable."""
|
| 1169 |
+
|
| 1170 |
+
__module__ = 'typing'
|
| 1171 |
+
|
| 1172 |
+
def __init__(self, name, *constraints, bound=None,
|
| 1173 |
+
covariant=False, contravariant=False,
|
| 1174 |
+
default=None, infer_variance=False):
|
| 1175 |
+
super().__init__(name, *constraints, bound=bound, covariant=covariant,
|
| 1176 |
+
contravariant=contravariant)
|
| 1177 |
+
_DefaultMixin.__init__(self, default)
|
| 1178 |
+
self.__infer_variance__ = infer_variance
|
| 1179 |
+
|
| 1180 |
+
# for pickling:
|
| 1181 |
+
try:
|
| 1182 |
+
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 1183 |
+
except (AttributeError, ValueError):
|
| 1184 |
+
def_mod = None
|
| 1185 |
+
if def_mod != 'typing_extensions':
|
| 1186 |
+
self.__module__ = def_mod
|
| 1187 |
+
|
| 1188 |
+
|
| 1189 |
+
# Python 3.10+ has PEP 612
|
| 1190 |
+
if hasattr(typing, 'ParamSpecArgs'):
|
| 1191 |
+
ParamSpecArgs = typing.ParamSpecArgs
|
| 1192 |
+
ParamSpecKwargs = typing.ParamSpecKwargs
|
| 1193 |
+
# 3.7-3.9
|
| 1194 |
+
else:
|
| 1195 |
+
class _Immutable:
|
| 1196 |
+
"""Mixin to indicate that object should not be copied."""
|
| 1197 |
+
__slots__ = ()
|
| 1198 |
+
|
| 1199 |
+
def __copy__(self):
|
| 1200 |
+
return self
|
| 1201 |
+
|
| 1202 |
+
def __deepcopy__(self, memo):
|
| 1203 |
+
return self
|
| 1204 |
+
|
| 1205 |
+
class ParamSpecArgs(_Immutable):
|
| 1206 |
+
"""The args for a ParamSpec object.
|
| 1207 |
+
|
| 1208 |
+
Given a ParamSpec object P, P.args is an instance of ParamSpecArgs.
|
| 1209 |
+
|
| 1210 |
+
ParamSpecArgs objects have a reference back to their ParamSpec:
|
| 1211 |
+
|
| 1212 |
+
P.args.__origin__ is P
|
| 1213 |
+
|
| 1214 |
+
This type is meant for runtime introspection and has no special meaning to
|
| 1215 |
+
static type checkers.
|
| 1216 |
+
"""
|
| 1217 |
+
def __init__(self, origin):
|
| 1218 |
+
self.__origin__ = origin
|
| 1219 |
+
|
| 1220 |
+
def __repr__(self):
|
| 1221 |
+
return f"{self.__origin__.__name__}.args"
|
| 1222 |
+
|
| 1223 |
+
def __eq__(self, other):
|
| 1224 |
+
if not isinstance(other, ParamSpecArgs):
|
| 1225 |
+
return NotImplemented
|
| 1226 |
+
return self.__origin__ == other.__origin__
|
| 1227 |
+
|
| 1228 |
+
class ParamSpecKwargs(_Immutable):
|
| 1229 |
+
"""The kwargs for a ParamSpec object.
|
| 1230 |
+
|
| 1231 |
+
Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs.
|
| 1232 |
+
|
| 1233 |
+
ParamSpecKwargs objects have a reference back to their ParamSpec:
|
| 1234 |
+
|
| 1235 |
+
P.kwargs.__origin__ is P
|
| 1236 |
+
|
| 1237 |
+
This type is meant for runtime introspection and has no special meaning to
|
| 1238 |
+
static type checkers.
|
| 1239 |
+
"""
|
| 1240 |
+
def __init__(self, origin):
|
| 1241 |
+
self.__origin__ = origin
|
| 1242 |
+
|
| 1243 |
+
def __repr__(self):
|
| 1244 |
+
return f"{self.__origin__.__name__}.kwargs"
|
| 1245 |
+
|
| 1246 |
+
def __eq__(self, other):
|
| 1247 |
+
if not isinstance(other, ParamSpecKwargs):
|
| 1248 |
+
return NotImplemented
|
| 1249 |
+
return self.__origin__ == other.__origin__
|
| 1250 |
+
|
| 1251 |
+
# 3.10+
|
| 1252 |
+
if hasattr(typing, 'ParamSpec'):
|
| 1253 |
+
|
| 1254 |
+
# Add default Parameter - PEP 696
|
| 1255 |
+
class ParamSpec(typing.ParamSpec, _DefaultMixin, _root=True):
|
| 1256 |
+
"""Parameter specification variable."""
|
| 1257 |
+
|
| 1258 |
+
__module__ = 'typing'
|
| 1259 |
+
|
| 1260 |
+
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
|
| 1261 |
+
default=None):
|
| 1262 |
+
super().__init__(name, bound=bound, covariant=covariant,
|
| 1263 |
+
contravariant=contravariant)
|
| 1264 |
+
_DefaultMixin.__init__(self, default)
|
| 1265 |
+
|
| 1266 |
+
# for pickling:
|
| 1267 |
+
try:
|
| 1268 |
+
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 1269 |
+
except (AttributeError, ValueError):
|
| 1270 |
+
def_mod = None
|
| 1271 |
+
if def_mod != 'typing_extensions':
|
| 1272 |
+
self.__module__ = def_mod
|
| 1273 |
+
|
| 1274 |
+
# 3.7-3.9
|
| 1275 |
+
else:
|
| 1276 |
+
|
| 1277 |
+
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
|
| 1278 |
+
class ParamSpec(list, _DefaultMixin):
|
| 1279 |
+
"""Parameter specification variable.
|
| 1280 |
+
|
| 1281 |
+
Usage::
|
| 1282 |
+
|
| 1283 |
+
P = ParamSpec('P')
|
| 1284 |
+
|
| 1285 |
+
Parameter specification variables exist primarily for the benefit of static
|
| 1286 |
+
type checkers. They are used to forward the parameter types of one
|
| 1287 |
+
callable to another callable, a pattern commonly found in higher order
|
| 1288 |
+
functions and decorators. They are only valid when used in ``Concatenate``,
|
| 1289 |
+
or s the first argument to ``Callable``. In Python 3.10 and higher,
|
| 1290 |
+
they are also supported in user-defined Generics at runtime.
|
| 1291 |
+
See class Generic for more information on generic types. An
|
| 1292 |
+
example for annotating a decorator::
|
| 1293 |
+
|
| 1294 |
+
T = TypeVar('T')
|
| 1295 |
+
P = ParamSpec('P')
|
| 1296 |
+
|
| 1297 |
+
def add_logging(f: Callable[P, T]) -> Callable[P, T]:
|
| 1298 |
+
'''A type-safe decorator to add logging to a function.'''
|
| 1299 |
+
def inner(*args: P.args, **kwargs: P.kwargs) -> T:
|
| 1300 |
+
logging.info(f'{f.__name__} was called')
|
| 1301 |
+
return f(*args, **kwargs)
|
| 1302 |
+
return inner
|
| 1303 |
+
|
| 1304 |
+
@add_logging
|
| 1305 |
+
def add_two(x: float, y: float) -> float:
|
| 1306 |
+
'''Add two numbers together.'''
|
| 1307 |
+
return x + y
|
| 1308 |
+
|
| 1309 |
+
Parameter specification variables defined with covariant=True or
|
| 1310 |
+
contravariant=True can be used to declare covariant or contravariant
|
| 1311 |
+
generic types. These keyword arguments are valid, but their actual semantics
|
| 1312 |
+
are yet to be decided. See PEP 612 for details.
|
| 1313 |
+
|
| 1314 |
+
Parameter specification variables can be introspected. e.g.:
|
| 1315 |
+
|
| 1316 |
+
P.__name__ == 'T'
|
| 1317 |
+
P.__bound__ == None
|
| 1318 |
+
P.__covariant__ == False
|
| 1319 |
+
P.__contravariant__ == False
|
| 1320 |
+
|
| 1321 |
+
Note that only parameter specification variables defined in global scope can
|
| 1322 |
+
be pickled.
|
| 1323 |
+
"""
|
| 1324 |
+
|
| 1325 |
+
# Trick Generic __parameters__.
|
| 1326 |
+
__class__ = typing.TypeVar
|
| 1327 |
+
|
| 1328 |
+
@property
|
| 1329 |
+
def args(self):
|
| 1330 |
+
return ParamSpecArgs(self)
|
| 1331 |
+
|
| 1332 |
+
@property
|
| 1333 |
+
def kwargs(self):
|
| 1334 |
+
return ParamSpecKwargs(self)
|
| 1335 |
+
|
| 1336 |
+
def __init__(self, name, *, bound=None, covariant=False, contravariant=False,
|
| 1337 |
+
default=None):
|
| 1338 |
+
super().__init__([self])
|
| 1339 |
+
self.__name__ = name
|
| 1340 |
+
self.__covariant__ = bool(covariant)
|
| 1341 |
+
self.__contravariant__ = bool(contravariant)
|
| 1342 |
+
if bound:
|
| 1343 |
+
self.__bound__ = typing._type_check(bound, 'Bound must be a type.')
|
| 1344 |
+
else:
|
| 1345 |
+
self.__bound__ = None
|
| 1346 |
+
_DefaultMixin.__init__(self, default)
|
| 1347 |
+
|
| 1348 |
+
# for pickling:
|
| 1349 |
+
try:
|
| 1350 |
+
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 1351 |
+
except (AttributeError, ValueError):
|
| 1352 |
+
def_mod = None
|
| 1353 |
+
if def_mod != 'typing_extensions':
|
| 1354 |
+
self.__module__ = def_mod
|
| 1355 |
+
|
| 1356 |
+
def __repr__(self):
|
| 1357 |
+
if self.__covariant__:
|
| 1358 |
+
prefix = '+'
|
| 1359 |
+
elif self.__contravariant__:
|
| 1360 |
+
prefix = '-'
|
| 1361 |
+
else:
|
| 1362 |
+
prefix = '~'
|
| 1363 |
+
return prefix + self.__name__
|
| 1364 |
+
|
| 1365 |
+
def __hash__(self):
|
| 1366 |
+
return object.__hash__(self)
|
| 1367 |
+
|
| 1368 |
+
def __eq__(self, other):
|
| 1369 |
+
return self is other
|
| 1370 |
+
|
| 1371 |
+
def __reduce__(self):
|
| 1372 |
+
return self.__name__
|
| 1373 |
+
|
| 1374 |
+
# Hack to get typing._type_check to pass.
|
| 1375 |
+
def __call__(self, *args, **kwargs):
|
| 1376 |
+
pass
|
| 1377 |
+
|
| 1378 |
+
|
| 1379 |
+
# 3.7-3.9
|
| 1380 |
+
if not hasattr(typing, 'Concatenate'):
|
| 1381 |
+
# Inherits from list as a workaround for Callable checks in Python < 3.9.2.
|
| 1382 |
+
class _ConcatenateGenericAlias(list):
|
| 1383 |
+
|
| 1384 |
+
# Trick Generic into looking into this for __parameters__.
|
| 1385 |
+
__class__ = typing._GenericAlias
|
| 1386 |
+
|
| 1387 |
+
# Flag in 3.8.
|
| 1388 |
+
_special = False
|
| 1389 |
+
|
| 1390 |
+
def __init__(self, origin, args):
|
| 1391 |
+
super().__init__(args)
|
| 1392 |
+
self.__origin__ = origin
|
| 1393 |
+
self.__args__ = args
|
| 1394 |
+
|
| 1395 |
+
def __repr__(self):
|
| 1396 |
+
_type_repr = typing._type_repr
|
| 1397 |
+
return (f'{_type_repr(self.__origin__)}'
|
| 1398 |
+
f'[{", ".join(_type_repr(arg) for arg in self.__args__)}]')
|
| 1399 |
+
|
| 1400 |
+
def __hash__(self):
|
| 1401 |
+
return hash((self.__origin__, self.__args__))
|
| 1402 |
+
|
| 1403 |
+
# Hack to get typing._type_check to pass in Generic.
|
| 1404 |
+
def __call__(self, *args, **kwargs):
|
| 1405 |
+
pass
|
| 1406 |
+
|
| 1407 |
+
@property
|
| 1408 |
+
def __parameters__(self):
|
| 1409 |
+
return tuple(
|
| 1410 |
+
tp for tp in self.__args__ if isinstance(tp, (typing.TypeVar, ParamSpec))
|
| 1411 |
+
)
|
| 1412 |
+
|
| 1413 |
+
|
| 1414 |
+
# 3.7-3.9
|
| 1415 |
+
@typing._tp_cache
|
| 1416 |
+
def _concatenate_getitem(self, parameters):
|
| 1417 |
+
if parameters == ():
|
| 1418 |
+
raise TypeError("Cannot take a Concatenate of no types.")
|
| 1419 |
+
if not isinstance(parameters, tuple):
|
| 1420 |
+
parameters = (parameters,)
|
| 1421 |
+
if not isinstance(parameters[-1], ParamSpec):
|
| 1422 |
+
raise TypeError("The last parameter to Concatenate should be a "
|
| 1423 |
+
"ParamSpec variable.")
|
| 1424 |
+
msg = "Concatenate[arg, ...]: each arg must be a type."
|
| 1425 |
+
parameters = tuple(typing._type_check(p, msg) for p in parameters)
|
| 1426 |
+
return _ConcatenateGenericAlias(self, parameters)
|
| 1427 |
+
|
| 1428 |
+
|
| 1429 |
+
# 3.10+
|
| 1430 |
+
if hasattr(typing, 'Concatenate'):
|
| 1431 |
+
Concatenate = typing.Concatenate
|
| 1432 |
+
_ConcatenateGenericAlias = typing._ConcatenateGenericAlias # noqa
|
| 1433 |
+
# 3.9
|
| 1434 |
+
elif sys.version_info[:2] >= (3, 9):
|
| 1435 |
+
@_TypeAliasForm
|
| 1436 |
+
def Concatenate(self, parameters):
|
| 1437 |
+
"""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
|
| 1438 |
+
higher order function which adds, removes or transforms parameters of a
|
| 1439 |
+
callable.
|
| 1440 |
+
|
| 1441 |
+
For example::
|
| 1442 |
+
|
| 1443 |
+
Callable[Concatenate[int, P], int]
|
| 1444 |
+
|
| 1445 |
+
See PEP 612 for detailed information.
|
| 1446 |
+
"""
|
| 1447 |
+
return _concatenate_getitem(self, parameters)
|
| 1448 |
+
# 3.7-8
|
| 1449 |
+
else:
|
| 1450 |
+
class _ConcatenateForm(typing._SpecialForm, _root=True):
|
| 1451 |
+
def __repr__(self):
|
| 1452 |
+
return 'typing_extensions.' + self._name
|
| 1453 |
+
|
| 1454 |
+
def __getitem__(self, parameters):
|
| 1455 |
+
return _concatenate_getitem(self, parameters)
|
| 1456 |
+
|
| 1457 |
+
Concatenate = _ConcatenateForm(
|
| 1458 |
+
'Concatenate',
|
| 1459 |
+
doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a
|
| 1460 |
+
higher order function which adds, removes or transforms parameters of a
|
| 1461 |
+
callable.
|
| 1462 |
+
|
| 1463 |
+
For example::
|
| 1464 |
+
|
| 1465 |
+
Callable[Concatenate[int, P], int]
|
| 1466 |
+
|
| 1467 |
+
See PEP 612 for detailed information.
|
| 1468 |
+
""")
|
| 1469 |
+
|
| 1470 |
+
# 3.10+
|
| 1471 |
+
if hasattr(typing, 'TypeGuard'):
|
| 1472 |
+
TypeGuard = typing.TypeGuard
|
| 1473 |
+
# 3.9
|
| 1474 |
+
elif sys.version_info[:2] >= (3, 9):
|
| 1475 |
+
class _TypeGuardForm(typing._SpecialForm, _root=True):
|
| 1476 |
+
def __repr__(self):
|
| 1477 |
+
return 'typing_extensions.' + self._name
|
| 1478 |
+
|
| 1479 |
+
@_TypeGuardForm
|
| 1480 |
+
def TypeGuard(self, parameters):
|
| 1481 |
+
"""Special typing form used to annotate the return type of a user-defined
|
| 1482 |
+
type guard function. ``TypeGuard`` only accepts a single type argument.
|
| 1483 |
+
At runtime, functions marked this way should return a boolean.
|
| 1484 |
+
|
| 1485 |
+
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
|
| 1486 |
+
type checkers to determine a more precise type of an expression within a
|
| 1487 |
+
program's code flow. Usually type narrowing is done by analyzing
|
| 1488 |
+
conditional code flow and applying the narrowing to a block of code. The
|
| 1489 |
+
conditional expression here is sometimes referred to as a "type guard".
|
| 1490 |
+
|
| 1491 |
+
Sometimes it would be convenient to use a user-defined boolean function
|
| 1492 |
+
as a type guard. Such a function should use ``TypeGuard[...]`` as its
|
| 1493 |
+
return type to alert static type checkers to this intention.
|
| 1494 |
+
|
| 1495 |
+
Using ``-> TypeGuard`` tells the static type checker that for a given
|
| 1496 |
+
function:
|
| 1497 |
+
|
| 1498 |
+
1. The return value is a boolean.
|
| 1499 |
+
2. If the return value is ``True``, the type of its argument
|
| 1500 |
+
is the type inside ``TypeGuard``.
|
| 1501 |
+
|
| 1502 |
+
For example::
|
| 1503 |
+
|
| 1504 |
+
def is_str(val: Union[str, float]):
|
| 1505 |
+
# "isinstance" type guard
|
| 1506 |
+
if isinstance(val, str):
|
| 1507 |
+
# Type of ``val`` is narrowed to ``str``
|
| 1508 |
+
...
|
| 1509 |
+
else:
|
| 1510 |
+
# Else, type of ``val`` is narrowed to ``float``.
|
| 1511 |
+
...
|
| 1512 |
+
|
| 1513 |
+
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
|
| 1514 |
+
form of ``TypeA`` (it can even be a wider form) and this may lead to
|
| 1515 |
+
type-unsafe results. The main reason is to allow for things like
|
| 1516 |
+
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
|
| 1517 |
+
a subtype of the former, since ``List`` is invariant. The responsibility of
|
| 1518 |
+
writing type-safe type guards is left to the user.
|
| 1519 |
+
|
| 1520 |
+
``TypeGuard`` also works with type variables. For more information, see
|
| 1521 |
+
PEP 647 (User-Defined Type Guards).
|
| 1522 |
+
"""
|
| 1523 |
+
item = typing._type_check(parameters, f'{self} accepts only a single type.')
|
| 1524 |
+
return typing._GenericAlias(self, (item,))
|
| 1525 |
+
# 3.7-3.8
|
| 1526 |
+
else:
|
| 1527 |
+
class _TypeGuardForm(typing._SpecialForm, _root=True):
|
| 1528 |
+
|
| 1529 |
+
def __repr__(self):
|
| 1530 |
+
return 'typing_extensions.' + self._name
|
| 1531 |
+
|
| 1532 |
+
def __getitem__(self, parameters):
|
| 1533 |
+
item = typing._type_check(parameters,
|
| 1534 |
+
f'{self._name} accepts only a single type')
|
| 1535 |
+
return typing._GenericAlias(self, (item,))
|
| 1536 |
+
|
| 1537 |
+
TypeGuard = _TypeGuardForm(
|
| 1538 |
+
'TypeGuard',
|
| 1539 |
+
doc="""Special typing form used to annotate the return type of a user-defined
|
| 1540 |
+
type guard function. ``TypeGuard`` only accepts a single type argument.
|
| 1541 |
+
At runtime, functions marked this way should return a boolean.
|
| 1542 |
+
|
| 1543 |
+
``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static
|
| 1544 |
+
type checkers to determine a more precise type of an expression within a
|
| 1545 |
+
program's code flow. Usually type narrowing is done by analyzing
|
| 1546 |
+
conditional code flow and applying the narrowing to a block of code. The
|
| 1547 |
+
conditional expression here is sometimes referred to as a "type guard".
|
| 1548 |
+
|
| 1549 |
+
Sometimes it would be convenient to use a user-defined boolean function
|
| 1550 |
+
as a type guard. Such a function should use ``TypeGuard[...]`` as its
|
| 1551 |
+
return type to alert static type checkers to this intention.
|
| 1552 |
+
|
| 1553 |
+
Using ``-> TypeGuard`` tells the static type checker that for a given
|
| 1554 |
+
function:
|
| 1555 |
+
|
| 1556 |
+
1. The return value is a boolean.
|
| 1557 |
+
2. If the return value is ``True``, the type of its argument
|
| 1558 |
+
is the type inside ``TypeGuard``.
|
| 1559 |
+
|
| 1560 |
+
For example::
|
| 1561 |
+
|
| 1562 |
+
def is_str(val: Union[str, float]):
|
| 1563 |
+
# "isinstance" type guard
|
| 1564 |
+
if isinstance(val, str):
|
| 1565 |
+
# Type of ``val`` is narrowed to ``str``
|
| 1566 |
+
...
|
| 1567 |
+
else:
|
| 1568 |
+
# Else, type of ``val`` is narrowed to ``float``.
|
| 1569 |
+
...
|
| 1570 |
+
|
| 1571 |
+
Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower
|
| 1572 |
+
form of ``TypeA`` (it can even be a wider form) and this may lead to
|
| 1573 |
+
type-unsafe results. The main reason is to allow for things like
|
| 1574 |
+
narrowing ``List[object]`` to ``List[str]`` even though the latter is not
|
| 1575 |
+
a subtype of the former, since ``List`` is invariant. The responsibility of
|
| 1576 |
+
writing type-safe type guards is left to the user.
|
| 1577 |
+
|
| 1578 |
+
``TypeGuard`` also works with type variables. For more information, see
|
| 1579 |
+
PEP 647 (User-Defined Type Guards).
|
| 1580 |
+
""")
|
| 1581 |
+
|
| 1582 |
+
|
| 1583 |
+
# Vendored from cpython typing._SpecialFrom
|
| 1584 |
+
class _SpecialForm(typing._Final, _root=True):
|
| 1585 |
+
__slots__ = ('_name', '__doc__', '_getitem')
|
| 1586 |
+
|
| 1587 |
+
def __init__(self, getitem):
|
| 1588 |
+
self._getitem = getitem
|
| 1589 |
+
self._name = getitem.__name__
|
| 1590 |
+
self.__doc__ = getitem.__doc__
|
| 1591 |
+
|
| 1592 |
+
def __getattr__(self, item):
|
| 1593 |
+
if item in {'__name__', '__qualname__'}:
|
| 1594 |
+
return self._name
|
| 1595 |
+
|
| 1596 |
+
raise AttributeError(item)
|
| 1597 |
+
|
| 1598 |
+
def __mro_entries__(self, bases):
|
| 1599 |
+
raise TypeError(f"Cannot subclass {self!r}")
|
| 1600 |
+
|
| 1601 |
+
def __repr__(self):
|
| 1602 |
+
return f'typing_extensions.{self._name}'
|
| 1603 |
+
|
| 1604 |
+
def __reduce__(self):
|
| 1605 |
+
return self._name
|
| 1606 |
+
|
| 1607 |
+
def __call__(self, *args, **kwds):
|
| 1608 |
+
raise TypeError(f"Cannot instantiate {self!r}")
|
| 1609 |
+
|
| 1610 |
+
def __or__(self, other):
|
| 1611 |
+
return typing.Union[self, other]
|
| 1612 |
+
|
| 1613 |
+
def __ror__(self, other):
|
| 1614 |
+
return typing.Union[other, self]
|
| 1615 |
+
|
| 1616 |
+
def __instancecheck__(self, obj):
|
| 1617 |
+
raise TypeError(f"{self} cannot be used with isinstance()")
|
| 1618 |
+
|
| 1619 |
+
def __subclasscheck__(self, cls):
|
| 1620 |
+
raise TypeError(f"{self} cannot be used with issubclass()")
|
| 1621 |
+
|
| 1622 |
+
@typing._tp_cache
|
| 1623 |
+
def __getitem__(self, parameters):
|
| 1624 |
+
return self._getitem(self, parameters)
|
| 1625 |
+
|
| 1626 |
+
|
| 1627 |
+
if hasattr(typing, "LiteralString"):
|
| 1628 |
+
LiteralString = typing.LiteralString
|
| 1629 |
+
else:
|
| 1630 |
+
@_SpecialForm
|
| 1631 |
+
def LiteralString(self, params):
|
| 1632 |
+
"""Represents an arbitrary literal string.
|
| 1633 |
+
|
| 1634 |
+
Example::
|
| 1635 |
+
|
| 1636 |
+
from typing_extensions import LiteralString
|
| 1637 |
+
|
| 1638 |
+
def query(sql: LiteralString) -> ...:
|
| 1639 |
+
...
|
| 1640 |
+
|
| 1641 |
+
query("SELECT * FROM table") # ok
|
| 1642 |
+
query(f"SELECT * FROM {input()}") # not ok
|
| 1643 |
+
|
| 1644 |
+
See PEP 675 for details.
|
| 1645 |
+
|
| 1646 |
+
"""
|
| 1647 |
+
raise TypeError(f"{self} is not subscriptable")
|
| 1648 |
+
|
| 1649 |
+
|
| 1650 |
+
if hasattr(typing, "Self"):
|
| 1651 |
+
Self = typing.Self
|
| 1652 |
+
else:
|
| 1653 |
+
@_SpecialForm
|
| 1654 |
+
def Self(self, params):
|
| 1655 |
+
"""Used to spell the type of "self" in classes.
|
| 1656 |
+
|
| 1657 |
+
Example::
|
| 1658 |
+
|
| 1659 |
+
from typing import Self
|
| 1660 |
+
|
| 1661 |
+
class ReturnsSelf:
|
| 1662 |
+
def parse(self, data: bytes) -> Self:
|
| 1663 |
+
...
|
| 1664 |
+
return self
|
| 1665 |
+
|
| 1666 |
+
"""
|
| 1667 |
+
|
| 1668 |
+
raise TypeError(f"{self} is not subscriptable")
|
| 1669 |
+
|
| 1670 |
+
|
| 1671 |
+
if hasattr(typing, "Never"):
|
| 1672 |
+
Never = typing.Never
|
| 1673 |
+
else:
|
| 1674 |
+
@_SpecialForm
|
| 1675 |
+
def Never(self, params):
|
| 1676 |
+
"""The bottom type, a type that has no members.
|
| 1677 |
+
|
| 1678 |
+
This can be used to define a function that should never be
|
| 1679 |
+
called, or a function that never returns::
|
| 1680 |
+
|
| 1681 |
+
from typing_extensions import Never
|
| 1682 |
+
|
| 1683 |
+
def never_call_me(arg: Never) -> None:
|
| 1684 |
+
pass
|
| 1685 |
+
|
| 1686 |
+
def int_or_str(arg: int | str) -> None:
|
| 1687 |
+
never_call_me(arg) # type checker error
|
| 1688 |
+
match arg:
|
| 1689 |
+
case int():
|
| 1690 |
+
print("It's an int")
|
| 1691 |
+
case str():
|
| 1692 |
+
print("It's a str")
|
| 1693 |
+
case _:
|
| 1694 |
+
never_call_me(arg) # ok, arg is of type Never
|
| 1695 |
+
|
| 1696 |
+
"""
|
| 1697 |
+
|
| 1698 |
+
raise TypeError(f"{self} is not subscriptable")
|
| 1699 |
+
|
| 1700 |
+
|
| 1701 |
+
if hasattr(typing, 'Required'):
|
| 1702 |
+
Required = typing.Required
|
| 1703 |
+
NotRequired = typing.NotRequired
|
| 1704 |
+
elif sys.version_info[:2] >= (3, 9):
|
| 1705 |
+
class _ExtensionsSpecialForm(typing._SpecialForm, _root=True):
|
| 1706 |
+
def __repr__(self):
|
| 1707 |
+
return 'typing_extensions.' + self._name
|
| 1708 |
+
|
| 1709 |
+
@_ExtensionsSpecialForm
|
| 1710 |
+
def Required(self, parameters):
|
| 1711 |
+
"""A special typing construct to mark a key of a total=False TypedDict
|
| 1712 |
+
as required. For example:
|
| 1713 |
+
|
| 1714 |
+
class Movie(TypedDict, total=False):
|
| 1715 |
+
title: Required[str]
|
| 1716 |
+
year: int
|
| 1717 |
+
|
| 1718 |
+
m = Movie(
|
| 1719 |
+
title='The Matrix', # typechecker error if key is omitted
|
| 1720 |
+
year=1999,
|
| 1721 |
+
)
|
| 1722 |
+
|
| 1723 |
+
There is no runtime checking that a required key is actually provided
|
| 1724 |
+
when instantiating a related TypedDict.
|
| 1725 |
+
"""
|
| 1726 |
+
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
|
| 1727 |
+
return typing._GenericAlias(self, (item,))
|
| 1728 |
+
|
| 1729 |
+
@_ExtensionsSpecialForm
|
| 1730 |
+
def NotRequired(self, parameters):
|
| 1731 |
+
"""A special typing construct to mark a key of a TypedDict as
|
| 1732 |
+
potentially missing. For example:
|
| 1733 |
+
|
| 1734 |
+
class Movie(TypedDict):
|
| 1735 |
+
title: str
|
| 1736 |
+
year: NotRequired[int]
|
| 1737 |
+
|
| 1738 |
+
m = Movie(
|
| 1739 |
+
title='The Matrix', # typechecker error if key is omitted
|
| 1740 |
+
year=1999,
|
| 1741 |
+
)
|
| 1742 |
+
"""
|
| 1743 |
+
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
|
| 1744 |
+
return typing._GenericAlias(self, (item,))
|
| 1745 |
+
|
| 1746 |
+
else:
|
| 1747 |
+
class _RequiredForm(typing._SpecialForm, _root=True):
|
| 1748 |
+
def __repr__(self):
|
| 1749 |
+
return 'typing_extensions.' + self._name
|
| 1750 |
+
|
| 1751 |
+
def __getitem__(self, parameters):
|
| 1752 |
+
item = typing._type_check(parameters,
|
| 1753 |
+
f'{self._name} accepts only a single type.')
|
| 1754 |
+
return typing._GenericAlias(self, (item,))
|
| 1755 |
+
|
| 1756 |
+
Required = _RequiredForm(
|
| 1757 |
+
'Required',
|
| 1758 |
+
doc="""A special typing construct to mark a key of a total=False TypedDict
|
| 1759 |
+
as required. For example:
|
| 1760 |
+
|
| 1761 |
+
class Movie(TypedDict, total=False):
|
| 1762 |
+
title: Required[str]
|
| 1763 |
+
year: int
|
| 1764 |
+
|
| 1765 |
+
m = Movie(
|
| 1766 |
+
title='The Matrix', # typechecker error if key is omitted
|
| 1767 |
+
year=1999,
|
| 1768 |
+
)
|
| 1769 |
+
|
| 1770 |
+
There is no runtime checking that a required key is actually provided
|
| 1771 |
+
when instantiating a related TypedDict.
|
| 1772 |
+
""")
|
| 1773 |
+
NotRequired = _RequiredForm(
|
| 1774 |
+
'NotRequired',
|
| 1775 |
+
doc="""A special typing construct to mark a key of a TypedDict as
|
| 1776 |
+
potentially missing. For example:
|
| 1777 |
+
|
| 1778 |
+
class Movie(TypedDict):
|
| 1779 |
+
title: str
|
| 1780 |
+
year: NotRequired[int]
|
| 1781 |
+
|
| 1782 |
+
m = Movie(
|
| 1783 |
+
title='The Matrix', # typechecker error if key is omitted
|
| 1784 |
+
year=1999,
|
| 1785 |
+
)
|
| 1786 |
+
""")
|
| 1787 |
+
|
| 1788 |
+
|
| 1789 |
+
if hasattr(typing, "Unpack"): # 3.11+
|
| 1790 |
+
Unpack = typing.Unpack
|
| 1791 |
+
elif sys.version_info[:2] >= (3, 9):
|
| 1792 |
+
class _UnpackSpecialForm(typing._SpecialForm, _root=True):
|
| 1793 |
+
def __repr__(self):
|
| 1794 |
+
return 'typing_extensions.' + self._name
|
| 1795 |
+
|
| 1796 |
+
class _UnpackAlias(typing._GenericAlias, _root=True):
|
| 1797 |
+
__class__ = typing.TypeVar
|
| 1798 |
+
|
| 1799 |
+
@_UnpackSpecialForm
|
| 1800 |
+
def Unpack(self, parameters):
|
| 1801 |
+
"""A special typing construct to unpack a variadic type. For example:
|
| 1802 |
+
|
| 1803 |
+
Shape = TypeVarTuple('Shape')
|
| 1804 |
+
Batch = NewType('Batch', int)
|
| 1805 |
+
|
| 1806 |
+
def add_batch_axis(
|
| 1807 |
+
x: Array[Unpack[Shape]]
|
| 1808 |
+
) -> Array[Batch, Unpack[Shape]]: ...
|
| 1809 |
+
|
| 1810 |
+
"""
|
| 1811 |
+
item = typing._type_check(parameters, f'{self._name} accepts only a single type.')
|
| 1812 |
+
return _UnpackAlias(self, (item,))
|
| 1813 |
+
|
| 1814 |
+
def _is_unpack(obj):
|
| 1815 |
+
return isinstance(obj, _UnpackAlias)
|
| 1816 |
+
|
| 1817 |
+
else:
|
| 1818 |
+
class _UnpackAlias(typing._GenericAlias, _root=True):
|
| 1819 |
+
__class__ = typing.TypeVar
|
| 1820 |
+
|
| 1821 |
+
class _UnpackForm(typing._SpecialForm, _root=True):
|
| 1822 |
+
def __repr__(self):
|
| 1823 |
+
return 'typing_extensions.' + self._name
|
| 1824 |
+
|
| 1825 |
+
def __getitem__(self, parameters):
|
| 1826 |
+
item = typing._type_check(parameters,
|
| 1827 |
+
f'{self._name} accepts only a single type.')
|
| 1828 |
+
return _UnpackAlias(self, (item,))
|
| 1829 |
+
|
| 1830 |
+
Unpack = _UnpackForm(
|
| 1831 |
+
'Unpack',
|
| 1832 |
+
doc="""A special typing construct to unpack a variadic type. For example:
|
| 1833 |
+
|
| 1834 |
+
Shape = TypeVarTuple('Shape')
|
| 1835 |
+
Batch = NewType('Batch', int)
|
| 1836 |
+
|
| 1837 |
+
def add_batch_axis(
|
| 1838 |
+
x: Array[Unpack[Shape]]
|
| 1839 |
+
) -> Array[Batch, Unpack[Shape]]: ...
|
| 1840 |
+
|
| 1841 |
+
""")
|
| 1842 |
+
|
| 1843 |
+
def _is_unpack(obj):
|
| 1844 |
+
return isinstance(obj, _UnpackAlias)
|
| 1845 |
+
|
| 1846 |
+
|
| 1847 |
+
if hasattr(typing, "TypeVarTuple"): # 3.11+
|
| 1848 |
+
|
| 1849 |
+
# Add default Parameter - PEP 696
|
| 1850 |
+
class TypeVarTuple(typing.TypeVarTuple, _DefaultMixin, _root=True):
|
| 1851 |
+
"""Type variable tuple."""
|
| 1852 |
+
|
| 1853 |
+
def __init__(self, name, *, default=None):
|
| 1854 |
+
super().__init__(name)
|
| 1855 |
+
_DefaultMixin.__init__(self, default)
|
| 1856 |
+
|
| 1857 |
+
# for pickling:
|
| 1858 |
+
try:
|
| 1859 |
+
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 1860 |
+
except (AttributeError, ValueError):
|
| 1861 |
+
def_mod = None
|
| 1862 |
+
if def_mod != 'typing_extensions':
|
| 1863 |
+
self.__module__ = def_mod
|
| 1864 |
+
|
| 1865 |
+
else:
|
| 1866 |
+
class TypeVarTuple(_DefaultMixin):
|
| 1867 |
+
"""Type variable tuple.
|
| 1868 |
+
|
| 1869 |
+
Usage::
|
| 1870 |
+
|
| 1871 |
+
Ts = TypeVarTuple('Ts')
|
| 1872 |
+
|
| 1873 |
+
In the same way that a normal type variable is a stand-in for a single
|
| 1874 |
+
type such as ``int``, a type variable *tuple* is a stand-in for a *tuple*
|
| 1875 |
+
type such as ``Tuple[int, str]``.
|
| 1876 |
+
|
| 1877 |
+
Type variable tuples can be used in ``Generic`` declarations.
|
| 1878 |
+
Consider the following example::
|
| 1879 |
+
|
| 1880 |
+
class Array(Generic[*Ts]): ...
|
| 1881 |
+
|
| 1882 |
+
The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``,
|
| 1883 |
+
where ``T1`` and ``T2`` are type variables. To use these type variables
|
| 1884 |
+
as type parameters of ``Array``, we must *unpack* the type variable tuple using
|
| 1885 |
+
the star operator: ``*Ts``. The signature of ``Array`` then behaves
|
| 1886 |
+
as if we had simply written ``class Array(Generic[T1, T2]): ...``.
|
| 1887 |
+
In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows
|
| 1888 |
+
us to parameterise the class with an *arbitrary* number of type parameters.
|
| 1889 |
+
|
| 1890 |
+
Type variable tuples can be used anywhere a normal ``TypeVar`` can.
|
| 1891 |
+
This includes class definitions, as shown above, as well as function
|
| 1892 |
+
signatures and variable annotations::
|
| 1893 |
+
|
| 1894 |
+
class Array(Generic[*Ts]):
|
| 1895 |
+
|
| 1896 |
+
def __init__(self, shape: Tuple[*Ts]):
|
| 1897 |
+
self._shape: Tuple[*Ts] = shape
|
| 1898 |
+
|
| 1899 |
+
def get_shape(self) -> Tuple[*Ts]:
|
| 1900 |
+
return self._shape
|
| 1901 |
+
|
| 1902 |
+
shape = (Height(480), Width(640))
|
| 1903 |
+
x: Array[Height, Width] = Array(shape)
|
| 1904 |
+
y = abs(x) # Inferred type is Array[Height, Width]
|
| 1905 |
+
z = x + x # ... is Array[Height, Width]
|
| 1906 |
+
x.get_shape() # ... is tuple[Height, Width]
|
| 1907 |
+
|
| 1908 |
+
"""
|
| 1909 |
+
|
| 1910 |
+
# Trick Generic __parameters__.
|
| 1911 |
+
__class__ = typing.TypeVar
|
| 1912 |
+
|
| 1913 |
+
def __iter__(self):
|
| 1914 |
+
yield self.__unpacked__
|
| 1915 |
+
|
| 1916 |
+
def __init__(self, name, *, default=None):
|
| 1917 |
+
self.__name__ = name
|
| 1918 |
+
_DefaultMixin.__init__(self, default)
|
| 1919 |
+
|
| 1920 |
+
# for pickling:
|
| 1921 |
+
try:
|
| 1922 |
+
def_mod = sys._getframe(1).f_globals.get('__name__', '__main__')
|
| 1923 |
+
except (AttributeError, ValueError):
|
| 1924 |
+
def_mod = None
|
| 1925 |
+
if def_mod != 'typing_extensions':
|
| 1926 |
+
self.__module__ = def_mod
|
| 1927 |
+
|
| 1928 |
+
self.__unpacked__ = Unpack[self]
|
| 1929 |
+
|
| 1930 |
+
def __repr__(self):
|
| 1931 |
+
return self.__name__
|
| 1932 |
+
|
| 1933 |
+
def __hash__(self):
|
| 1934 |
+
return object.__hash__(self)
|
| 1935 |
+
|
| 1936 |
+
def __eq__(self, other):
|
| 1937 |
+
return self is other
|
| 1938 |
+
|
| 1939 |
+
def __reduce__(self):
|
| 1940 |
+
return self.__name__
|
| 1941 |
+
|
| 1942 |
+
def __init_subclass__(self, *args, **kwds):
|
| 1943 |
+
if '_root' not in kwds:
|
| 1944 |
+
raise TypeError("Cannot subclass special typing classes")
|
| 1945 |
+
|
| 1946 |
+
|
| 1947 |
+
if hasattr(typing, "reveal_type"):
|
| 1948 |
+
reveal_type = typing.reveal_type
|
| 1949 |
+
else:
|
| 1950 |
+
def reveal_type(__obj: T) -> T:
|
| 1951 |
+
"""Reveal the inferred type of a variable.
|
| 1952 |
+
|
| 1953 |
+
When a static type checker encounters a call to ``reveal_type()``,
|
| 1954 |
+
it will emit the inferred type of the argument::
|
| 1955 |
+
|
| 1956 |
+
x: int = 1
|
| 1957 |
+
reveal_type(x)
|
| 1958 |
+
|
| 1959 |
+
Running a static type checker (e.g., ``mypy``) on this example
|
| 1960 |
+
will produce output similar to 'Revealed type is "builtins.int"'.
|
| 1961 |
+
|
| 1962 |
+
At runtime, the function prints the runtime type of the
|
| 1963 |
+
argument and returns it unchanged.
|
| 1964 |
+
|
| 1965 |
+
"""
|
| 1966 |
+
print(f"Runtime type is {type(__obj).__name__!r}", file=sys.stderr)
|
| 1967 |
+
return __obj
|
| 1968 |
+
|
| 1969 |
+
|
| 1970 |
+
if hasattr(typing, "assert_never"):
|
| 1971 |
+
assert_never = typing.assert_never
|
| 1972 |
+
else:
|
| 1973 |
+
def assert_never(__arg: Never) -> Never:
|
| 1974 |
+
"""Assert to the type checker that a line of code is unreachable.
|
| 1975 |
+
|
| 1976 |
+
Example::
|
| 1977 |
+
|
| 1978 |
+
def int_or_str(arg: int | str) -> None:
|
| 1979 |
+
match arg:
|
| 1980 |
+
case int():
|
| 1981 |
+
print("It's an int")
|
| 1982 |
+
case str():
|
| 1983 |
+
print("It's a str")
|
| 1984 |
+
case _:
|
| 1985 |
+
assert_never(arg)
|
| 1986 |
+
|
| 1987 |
+
If a type checker finds that a call to assert_never() is
|
| 1988 |
+
reachable, it will emit an error.
|
| 1989 |
+
|
| 1990 |
+
At runtime, this throws an exception when called.
|
| 1991 |
+
|
| 1992 |
+
"""
|
| 1993 |
+
raise AssertionError("Expected code to be unreachable")
|
| 1994 |
+
|
| 1995 |
+
|
| 1996 |
+
if hasattr(typing, 'dataclass_transform'):
|
| 1997 |
+
dataclass_transform = typing.dataclass_transform
|
| 1998 |
+
else:
|
| 1999 |
+
def dataclass_transform(
|
| 2000 |
+
*,
|
| 2001 |
+
eq_default: bool = True,
|
| 2002 |
+
order_default: bool = False,
|
| 2003 |
+
kw_only_default: bool = False,
|
| 2004 |
+
field_specifiers: typing.Tuple[
|
| 2005 |
+
typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]],
|
| 2006 |
+
...
|
| 2007 |
+
] = (),
|
| 2008 |
+
**kwargs: typing.Any,
|
| 2009 |
+
) -> typing.Callable[[T], T]:
|
| 2010 |
+
"""Decorator that marks a function, class, or metaclass as providing
|
| 2011 |
+
dataclass-like behavior.
|
| 2012 |
+
|
| 2013 |
+
Example:
|
| 2014 |
+
|
| 2015 |
+
from typing_extensions import dataclass_transform
|
| 2016 |
+
|
| 2017 |
+
_T = TypeVar("_T")
|
| 2018 |
+
|
| 2019 |
+
# Used on a decorator function
|
| 2020 |
+
@dataclass_transform()
|
| 2021 |
+
def create_model(cls: type[_T]) -> type[_T]:
|
| 2022 |
+
...
|
| 2023 |
+
return cls
|
| 2024 |
+
|
| 2025 |
+
@create_model
|
| 2026 |
+
class CustomerModel:
|
| 2027 |
+
id: int
|
| 2028 |
+
name: str
|
| 2029 |
+
|
| 2030 |
+
# Used on a base class
|
| 2031 |
+
@dataclass_transform()
|
| 2032 |
+
class ModelBase: ...
|
| 2033 |
+
|
| 2034 |
+
class CustomerModel(ModelBase):
|
| 2035 |
+
id: int
|
| 2036 |
+
name: str
|
| 2037 |
+
|
| 2038 |
+
# Used on a metaclass
|
| 2039 |
+
@dataclass_transform()
|
| 2040 |
+
class ModelMeta(type): ...
|
| 2041 |
+
|
| 2042 |
+
class ModelBase(metaclass=ModelMeta): ...
|
| 2043 |
+
|
| 2044 |
+
class CustomerModel(ModelBase):
|
| 2045 |
+
id: int
|
| 2046 |
+
name: str
|
| 2047 |
+
|
| 2048 |
+
Each of the ``CustomerModel`` classes defined in this example will now
|
| 2049 |
+
behave similarly to a dataclass created with the ``@dataclasses.dataclass``
|
| 2050 |
+
decorator. For example, the type checker will synthesize an ``__init__``
|
| 2051 |
+
method.
|
| 2052 |
+
|
| 2053 |
+
The arguments to this decorator can be used to customize this behavior:
|
| 2054 |
+
- ``eq_default`` indicates whether the ``eq`` parameter is assumed to be
|
| 2055 |
+
True or False if it is omitted by the caller.
|
| 2056 |
+
- ``order_default`` indicates whether the ``order`` parameter is
|
| 2057 |
+
assumed to be True or False if it is omitted by the caller.
|
| 2058 |
+
- ``kw_only_default`` indicates whether the ``kw_only`` parameter is
|
| 2059 |
+
assumed to be True or False if it is omitted by the caller.
|
| 2060 |
+
- ``field_specifiers`` specifies a static list of supported classes
|
| 2061 |
+
or functions that describe fields, similar to ``dataclasses.field()``.
|
| 2062 |
+
|
| 2063 |
+
At runtime, this decorator records its arguments in the
|
| 2064 |
+
``__dataclass_transform__`` attribute on the decorated object.
|
| 2065 |
+
|
| 2066 |
+
See PEP 681 for details.
|
| 2067 |
+
|
| 2068 |
+
"""
|
| 2069 |
+
def decorator(cls_or_fn):
|
| 2070 |
+
cls_or_fn.__dataclass_transform__ = {
|
| 2071 |
+
"eq_default": eq_default,
|
| 2072 |
+
"order_default": order_default,
|
| 2073 |
+
"kw_only_default": kw_only_default,
|
| 2074 |
+
"field_specifiers": field_specifiers,
|
| 2075 |
+
"kwargs": kwargs,
|
| 2076 |
+
}
|
| 2077 |
+
return cls_or_fn
|
| 2078 |
+
return decorator
|
| 2079 |
+
|
| 2080 |
+
|
| 2081 |
+
if hasattr(typing, "override"):
|
| 2082 |
+
override = typing.override
|
| 2083 |
+
else:
|
| 2084 |
+
_F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any])
|
| 2085 |
+
|
| 2086 |
+
def override(__arg: _F) -> _F:
|
| 2087 |
+
"""Indicate that a method is intended to override a method in a base class.
|
| 2088 |
+
|
| 2089 |
+
Usage:
|
| 2090 |
+
|
| 2091 |
+
class Base:
|
| 2092 |
+
def method(self) -> None: ...
|
| 2093 |
+
pass
|
| 2094 |
+
|
| 2095 |
+
class Child(Base):
|
| 2096 |
+
@override
|
| 2097 |
+
def method(self) -> None:
|
| 2098 |
+
super().method()
|
| 2099 |
+
|
| 2100 |
+
When this decorator is applied to a method, the type checker will
|
| 2101 |
+
validate that it overrides a method with the same name on a base class.
|
| 2102 |
+
This helps prevent bugs that may occur when a base class is changed
|
| 2103 |
+
without an equivalent change to a child class.
|
| 2104 |
+
|
| 2105 |
+
See PEP 698 for details.
|
| 2106 |
+
|
| 2107 |
+
"""
|
| 2108 |
+
return __arg
|
| 2109 |
+
|
| 2110 |
+
|
| 2111 |
+
# We have to do some monkey patching to deal with the dual nature of
|
| 2112 |
+
# Unpack/TypeVarTuple:
|
| 2113 |
+
# - We want Unpack to be a kind of TypeVar so it gets accepted in
|
| 2114 |
+
# Generic[Unpack[Ts]]
|
| 2115 |
+
# - We want it to *not* be treated as a TypeVar for the purposes of
|
| 2116 |
+
# counting generic parameters, so that when we subscript a generic,
|
| 2117 |
+
# the runtime doesn't try to substitute the Unpack with the subscripted type.
|
| 2118 |
+
if not hasattr(typing, "TypeVarTuple"):
|
| 2119 |
+
typing._collect_type_vars = _collect_type_vars
|
| 2120 |
+
typing._check_generic = _check_generic
|
| 2121 |
+
|
| 2122 |
+
|
| 2123 |
+
# Backport typing.NamedTuple as it exists in Python 3.11.
|
| 2124 |
+
# In 3.11, the ability to define generic `NamedTuple`s was supported.
|
| 2125 |
+
# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8.
|
| 2126 |
+
if sys.version_info >= (3, 11):
|
| 2127 |
+
NamedTuple = typing.NamedTuple
|
| 2128 |
+
else:
|
| 2129 |
+
def _caller():
|
| 2130 |
+
try:
|
| 2131 |
+
return sys._getframe(2).f_globals.get('__name__', '__main__')
|
| 2132 |
+
except (AttributeError, ValueError): # For platforms without _getframe()
|
| 2133 |
+
return None
|
| 2134 |
+
|
| 2135 |
+
def _make_nmtuple(name, types, module, defaults=()):
|
| 2136 |
+
fields = [n for n, t in types]
|
| 2137 |
+
annotations = {n: typing._type_check(t, f"field {n} annotation must be a type")
|
| 2138 |
+
for n, t in types}
|
| 2139 |
+
nm_tpl = collections.namedtuple(name, fields,
|
| 2140 |
+
defaults=defaults, module=module)
|
| 2141 |
+
nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations
|
| 2142 |
+
# The `_field_types` attribute was removed in 3.9;
|
| 2143 |
+
# in earlier versions, it is the same as the `__annotations__` attribute
|
| 2144 |
+
if sys.version_info < (3, 9):
|
| 2145 |
+
nm_tpl._field_types = annotations
|
| 2146 |
+
return nm_tpl
|
| 2147 |
+
|
| 2148 |
+
_prohibited_namedtuple_fields = typing._prohibited
|
| 2149 |
+
_special_namedtuple_fields = frozenset({'__module__', '__name__', '__annotations__'})
|
| 2150 |
+
|
| 2151 |
+
class _NamedTupleMeta(type):
|
| 2152 |
+
def __new__(cls, typename, bases, ns):
|
| 2153 |
+
assert _NamedTuple in bases
|
| 2154 |
+
for base in bases:
|
| 2155 |
+
if base is not _NamedTuple and base is not typing.Generic:
|
| 2156 |
+
raise TypeError(
|
| 2157 |
+
'can only inherit from a NamedTuple type and Generic')
|
| 2158 |
+
bases = tuple(tuple if base is _NamedTuple else base for base in bases)
|
| 2159 |
+
types = ns.get('__annotations__', {})
|
| 2160 |
+
default_names = []
|
| 2161 |
+
for field_name in types:
|
| 2162 |
+
if field_name in ns:
|
| 2163 |
+
default_names.append(field_name)
|
| 2164 |
+
elif default_names:
|
| 2165 |
+
raise TypeError(f"Non-default namedtuple field {field_name} "
|
| 2166 |
+
f"cannot follow default field"
|
| 2167 |
+
f"{'s' if len(default_names) > 1 else ''} "
|
| 2168 |
+
f"{', '.join(default_names)}")
|
| 2169 |
+
nm_tpl = _make_nmtuple(
|
| 2170 |
+
typename, types.items(),
|
| 2171 |
+
defaults=[ns[n] for n in default_names],
|
| 2172 |
+
module=ns['__module__']
|
| 2173 |
+
)
|
| 2174 |
+
nm_tpl.__bases__ = bases
|
| 2175 |
+
if typing.Generic in bases:
|
| 2176 |
+
class_getitem = typing.Generic.__class_getitem__.__func__
|
| 2177 |
+
nm_tpl.__class_getitem__ = classmethod(class_getitem)
|
| 2178 |
+
# update from user namespace without overriding special namedtuple attributes
|
| 2179 |
+
for key in ns:
|
| 2180 |
+
if key in _prohibited_namedtuple_fields:
|
| 2181 |
+
raise AttributeError("Cannot overwrite NamedTuple attribute " + key)
|
| 2182 |
+
elif key not in _special_namedtuple_fields and key not in nm_tpl._fields:
|
| 2183 |
+
setattr(nm_tpl, key, ns[key])
|
| 2184 |
+
if typing.Generic in bases:
|
| 2185 |
+
nm_tpl.__init_subclass__()
|
| 2186 |
+
return nm_tpl
|
| 2187 |
+
|
| 2188 |
+
def NamedTuple(__typename, __fields=None, **kwargs):
|
| 2189 |
+
if __fields is None:
|
| 2190 |
+
__fields = kwargs.items()
|
| 2191 |
+
elif kwargs:
|
| 2192 |
+
raise TypeError("Either list of fields or keywords"
|
| 2193 |
+
" can be provided to NamedTuple, not both")
|
| 2194 |
+
return _make_nmtuple(__typename, __fields, module=_caller())
|
| 2195 |
+
|
| 2196 |
+
NamedTuple.__doc__ = typing.NamedTuple.__doc__
|
| 2197 |
+
_NamedTuple = type.__new__(_NamedTupleMeta, 'NamedTuple', (), {})
|
| 2198 |
+
|
| 2199 |
+
# On 3.8+, alter the signature so that it matches typing.NamedTuple.
|
| 2200 |
+
# The signature of typing.NamedTuple on >=3.8 is invalid syntax in Python 3.7,
|
| 2201 |
+
# so just leave the signature as it is on 3.7.
|
| 2202 |
+
if sys.version_info >= (3, 8):
|
| 2203 |
+
NamedTuple.__text_signature__ = '(typename, fields=None, /, **kwargs)'
|
| 2204 |
+
|
| 2205 |
+
def _namedtuple_mro_entries(bases):
|
| 2206 |
+
assert NamedTuple in bases
|
| 2207 |
+
return (_NamedTuple,)
|
| 2208 |
+
|
| 2209 |
+
NamedTuple.__mro_entries__ = _namedtuple_mro_entries
|
.venv/Lib/site-packages/pkg_resources/_vendor/zipp.py
ADDED
|
@@ -0,0 +1,329 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import posixpath
|
| 3 |
+
import zipfile
|
| 4 |
+
import itertools
|
| 5 |
+
import contextlib
|
| 6 |
+
import sys
|
| 7 |
+
import pathlib
|
| 8 |
+
|
| 9 |
+
if sys.version_info < (3, 7):
|
| 10 |
+
from collections import OrderedDict
|
| 11 |
+
else:
|
| 12 |
+
OrderedDict = dict
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
__all__ = ['Path']
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def _parents(path):
|
| 19 |
+
"""
|
| 20 |
+
Given a path with elements separated by
|
| 21 |
+
posixpath.sep, generate all parents of that path.
|
| 22 |
+
|
| 23 |
+
>>> list(_parents('b/d'))
|
| 24 |
+
['b']
|
| 25 |
+
>>> list(_parents('/b/d/'))
|
| 26 |
+
['/b']
|
| 27 |
+
>>> list(_parents('b/d/f/'))
|
| 28 |
+
['b/d', 'b']
|
| 29 |
+
>>> list(_parents('b'))
|
| 30 |
+
[]
|
| 31 |
+
>>> list(_parents(''))
|
| 32 |
+
[]
|
| 33 |
+
"""
|
| 34 |
+
return itertools.islice(_ancestry(path), 1, None)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _ancestry(path):
|
| 38 |
+
"""
|
| 39 |
+
Given a path with elements separated by
|
| 40 |
+
posixpath.sep, generate all elements of that path
|
| 41 |
+
|
| 42 |
+
>>> list(_ancestry('b/d'))
|
| 43 |
+
['b/d', 'b']
|
| 44 |
+
>>> list(_ancestry('/b/d/'))
|
| 45 |
+
['/b/d', '/b']
|
| 46 |
+
>>> list(_ancestry('b/d/f/'))
|
| 47 |
+
['b/d/f', 'b/d', 'b']
|
| 48 |
+
>>> list(_ancestry('b'))
|
| 49 |
+
['b']
|
| 50 |
+
>>> list(_ancestry(''))
|
| 51 |
+
[]
|
| 52 |
+
"""
|
| 53 |
+
path = path.rstrip(posixpath.sep)
|
| 54 |
+
while path and path != posixpath.sep:
|
| 55 |
+
yield path
|
| 56 |
+
path, tail = posixpath.split(path)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
_dedupe = OrderedDict.fromkeys
|
| 60 |
+
"""Deduplicate an iterable in original order"""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def _difference(minuend, subtrahend):
|
| 64 |
+
"""
|
| 65 |
+
Return items in minuend not in subtrahend, retaining order
|
| 66 |
+
with O(1) lookup.
|
| 67 |
+
"""
|
| 68 |
+
return itertools.filterfalse(set(subtrahend).__contains__, minuend)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class CompleteDirs(zipfile.ZipFile):
|
| 72 |
+
"""
|
| 73 |
+
A ZipFile subclass that ensures that implied directories
|
| 74 |
+
are always included in the namelist.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
@staticmethod
|
| 78 |
+
def _implied_dirs(names):
|
| 79 |
+
parents = itertools.chain.from_iterable(map(_parents, names))
|
| 80 |
+
as_dirs = (p + posixpath.sep for p in parents)
|
| 81 |
+
return _dedupe(_difference(as_dirs, names))
|
| 82 |
+
|
| 83 |
+
def namelist(self):
|
| 84 |
+
names = super(CompleteDirs, self).namelist()
|
| 85 |
+
return names + list(self._implied_dirs(names))
|
| 86 |
+
|
| 87 |
+
def _name_set(self):
|
| 88 |
+
return set(self.namelist())
|
| 89 |
+
|
| 90 |
+
def resolve_dir(self, name):
|
| 91 |
+
"""
|
| 92 |
+
If the name represents a directory, return that name
|
| 93 |
+
as a directory (with the trailing slash).
|
| 94 |
+
"""
|
| 95 |
+
names = self._name_set()
|
| 96 |
+
dirname = name + '/'
|
| 97 |
+
dir_match = name not in names and dirname in names
|
| 98 |
+
return dirname if dir_match else name
|
| 99 |
+
|
| 100 |
+
@classmethod
|
| 101 |
+
def make(cls, source):
|
| 102 |
+
"""
|
| 103 |
+
Given a source (filename or zipfile), return an
|
| 104 |
+
appropriate CompleteDirs subclass.
|
| 105 |
+
"""
|
| 106 |
+
if isinstance(source, CompleteDirs):
|
| 107 |
+
return source
|
| 108 |
+
|
| 109 |
+
if not isinstance(source, zipfile.ZipFile):
|
| 110 |
+
return cls(_pathlib_compat(source))
|
| 111 |
+
|
| 112 |
+
# Only allow for FastLookup when supplied zipfile is read-only
|
| 113 |
+
if 'r' not in source.mode:
|
| 114 |
+
cls = CompleteDirs
|
| 115 |
+
|
| 116 |
+
source.__class__ = cls
|
| 117 |
+
return source
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class FastLookup(CompleteDirs):
|
| 121 |
+
"""
|
| 122 |
+
ZipFile subclass to ensure implicit
|
| 123 |
+
dirs exist and are resolved rapidly.
|
| 124 |
+
"""
|
| 125 |
+
|
| 126 |
+
def namelist(self):
|
| 127 |
+
with contextlib.suppress(AttributeError):
|
| 128 |
+
return self.__names
|
| 129 |
+
self.__names = super(FastLookup, self).namelist()
|
| 130 |
+
return self.__names
|
| 131 |
+
|
| 132 |
+
def _name_set(self):
|
| 133 |
+
with contextlib.suppress(AttributeError):
|
| 134 |
+
return self.__lookup
|
| 135 |
+
self.__lookup = super(FastLookup, self)._name_set()
|
| 136 |
+
return self.__lookup
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def _pathlib_compat(path):
|
| 140 |
+
"""
|
| 141 |
+
For path-like objects, convert to a filename for compatibility
|
| 142 |
+
on Python 3.6.1 and earlier.
|
| 143 |
+
"""
|
| 144 |
+
try:
|
| 145 |
+
return path.__fspath__()
|
| 146 |
+
except AttributeError:
|
| 147 |
+
return str(path)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class Path:
|
| 151 |
+
"""
|
| 152 |
+
A pathlib-compatible interface for zip files.
|
| 153 |
+
|
| 154 |
+
Consider a zip file with this structure::
|
| 155 |
+
|
| 156 |
+
.
|
| 157 |
+
├── a.txt
|
| 158 |
+
└── b
|
| 159 |
+
├── c.txt
|
| 160 |
+
└── d
|
| 161 |
+
└── e.txt
|
| 162 |
+
|
| 163 |
+
>>> data = io.BytesIO()
|
| 164 |
+
>>> zf = zipfile.ZipFile(data, 'w')
|
| 165 |
+
>>> zf.writestr('a.txt', 'content of a')
|
| 166 |
+
>>> zf.writestr('b/c.txt', 'content of c')
|
| 167 |
+
>>> zf.writestr('b/d/e.txt', 'content of e')
|
| 168 |
+
>>> zf.filename = 'mem/abcde.zip'
|
| 169 |
+
|
| 170 |
+
Path accepts the zipfile object itself or a filename
|
| 171 |
+
|
| 172 |
+
>>> root = Path(zf)
|
| 173 |
+
|
| 174 |
+
From there, several path operations are available.
|
| 175 |
+
|
| 176 |
+
Directory iteration (including the zip file itself):
|
| 177 |
+
|
| 178 |
+
>>> a, b = root.iterdir()
|
| 179 |
+
>>> a
|
| 180 |
+
Path('mem/abcde.zip', 'a.txt')
|
| 181 |
+
>>> b
|
| 182 |
+
Path('mem/abcde.zip', 'b/')
|
| 183 |
+
|
| 184 |
+
name property:
|
| 185 |
+
|
| 186 |
+
>>> b.name
|
| 187 |
+
'b'
|
| 188 |
+
|
| 189 |
+
join with divide operator:
|
| 190 |
+
|
| 191 |
+
>>> c = b / 'c.txt'
|
| 192 |
+
>>> c
|
| 193 |
+
Path('mem/abcde.zip', 'b/c.txt')
|
| 194 |
+
>>> c.name
|
| 195 |
+
'c.txt'
|
| 196 |
+
|
| 197 |
+
Read text:
|
| 198 |
+
|
| 199 |
+
>>> c.read_text()
|
| 200 |
+
'content of c'
|
| 201 |
+
|
| 202 |
+
existence:
|
| 203 |
+
|
| 204 |
+
>>> c.exists()
|
| 205 |
+
True
|
| 206 |
+
>>> (b / 'missing.txt').exists()
|
| 207 |
+
False
|
| 208 |
+
|
| 209 |
+
Coercion to string:
|
| 210 |
+
|
| 211 |
+
>>> import os
|
| 212 |
+
>>> str(c).replace(os.sep, posixpath.sep)
|
| 213 |
+
'mem/abcde.zip/b/c.txt'
|
| 214 |
+
|
| 215 |
+
At the root, ``name``, ``filename``, and ``parent``
|
| 216 |
+
resolve to the zipfile. Note these attributes are not
|
| 217 |
+
valid and will raise a ``ValueError`` if the zipfile
|
| 218 |
+
has no filename.
|
| 219 |
+
|
| 220 |
+
>>> root.name
|
| 221 |
+
'abcde.zip'
|
| 222 |
+
>>> str(root.filename).replace(os.sep, posixpath.sep)
|
| 223 |
+
'mem/abcde.zip'
|
| 224 |
+
>>> str(root.parent)
|
| 225 |
+
'mem'
|
| 226 |
+
"""
|
| 227 |
+
|
| 228 |
+
__repr = "{self.__class__.__name__}({self.root.filename!r}, {self.at!r})"
|
| 229 |
+
|
| 230 |
+
def __init__(self, root, at=""):
|
| 231 |
+
"""
|
| 232 |
+
Construct a Path from a ZipFile or filename.
|
| 233 |
+
|
| 234 |
+
Note: When the source is an existing ZipFile object,
|
| 235 |
+
its type (__class__) will be mutated to a
|
| 236 |
+
specialized type. If the caller wishes to retain the
|
| 237 |
+
original type, the caller should either create a
|
| 238 |
+
separate ZipFile object or pass a filename.
|
| 239 |
+
"""
|
| 240 |
+
self.root = FastLookup.make(root)
|
| 241 |
+
self.at = at
|
| 242 |
+
|
| 243 |
+
def open(self, mode='r', *args, pwd=None, **kwargs):
|
| 244 |
+
"""
|
| 245 |
+
Open this entry as text or binary following the semantics
|
| 246 |
+
of ``pathlib.Path.open()`` by passing arguments through
|
| 247 |
+
to io.TextIOWrapper().
|
| 248 |
+
"""
|
| 249 |
+
if self.is_dir():
|
| 250 |
+
raise IsADirectoryError(self)
|
| 251 |
+
zip_mode = mode[0]
|
| 252 |
+
if not self.exists() and zip_mode == 'r':
|
| 253 |
+
raise FileNotFoundError(self)
|
| 254 |
+
stream = self.root.open(self.at, zip_mode, pwd=pwd)
|
| 255 |
+
if 'b' in mode:
|
| 256 |
+
if args or kwargs:
|
| 257 |
+
raise ValueError("encoding args invalid for binary operation")
|
| 258 |
+
return stream
|
| 259 |
+
return io.TextIOWrapper(stream, *args, **kwargs)
|
| 260 |
+
|
| 261 |
+
@property
|
| 262 |
+
def name(self):
|
| 263 |
+
return pathlib.Path(self.at).name or self.filename.name
|
| 264 |
+
|
| 265 |
+
@property
|
| 266 |
+
def suffix(self):
|
| 267 |
+
return pathlib.Path(self.at).suffix or self.filename.suffix
|
| 268 |
+
|
| 269 |
+
@property
|
| 270 |
+
def suffixes(self):
|
| 271 |
+
return pathlib.Path(self.at).suffixes or self.filename.suffixes
|
| 272 |
+
|
| 273 |
+
@property
|
| 274 |
+
def stem(self):
|
| 275 |
+
return pathlib.Path(self.at).stem or self.filename.stem
|
| 276 |
+
|
| 277 |
+
@property
|
| 278 |
+
def filename(self):
|
| 279 |
+
return pathlib.Path(self.root.filename).joinpath(self.at)
|
| 280 |
+
|
| 281 |
+
def read_text(self, *args, **kwargs):
|
| 282 |
+
with self.open('r', *args, **kwargs) as strm:
|
| 283 |
+
return strm.read()
|
| 284 |
+
|
| 285 |
+
def read_bytes(self):
|
| 286 |
+
with self.open('rb') as strm:
|
| 287 |
+
return strm.read()
|
| 288 |
+
|
| 289 |
+
def _is_child(self, path):
|
| 290 |
+
return posixpath.dirname(path.at.rstrip("/")) == self.at.rstrip("/")
|
| 291 |
+
|
| 292 |
+
def _next(self, at):
|
| 293 |
+
return self.__class__(self.root, at)
|
| 294 |
+
|
| 295 |
+
def is_dir(self):
|
| 296 |
+
return not self.at or self.at.endswith("/")
|
| 297 |
+
|
| 298 |
+
def is_file(self):
|
| 299 |
+
return self.exists() and not self.is_dir()
|
| 300 |
+
|
| 301 |
+
def exists(self):
|
| 302 |
+
return self.at in self.root._name_set()
|
| 303 |
+
|
| 304 |
+
def iterdir(self):
|
| 305 |
+
if not self.is_dir():
|
| 306 |
+
raise ValueError("Can't listdir a file")
|
| 307 |
+
subs = map(self._next, self.root.namelist())
|
| 308 |
+
return filter(self._is_child, subs)
|
| 309 |
+
|
| 310 |
+
def __str__(self):
|
| 311 |
+
return posixpath.join(self.root.filename, self.at)
|
| 312 |
+
|
| 313 |
+
def __repr__(self):
|
| 314 |
+
return self.__repr.format(self=self)
|
| 315 |
+
|
| 316 |
+
def joinpath(self, *other):
|
| 317 |
+
next = posixpath.join(self.at, *map(_pathlib_compat, other))
|
| 318 |
+
return self._next(self.root.resolve_dir(next))
|
| 319 |
+
|
| 320 |
+
__truediv__ = joinpath
|
| 321 |
+
|
| 322 |
+
@property
|
| 323 |
+
def parent(self):
|
| 324 |
+
if not self.at:
|
| 325 |
+
return self.filename.parent
|
| 326 |
+
parent_at = posixpath.dirname(self.at.rstrip('/'))
|
| 327 |
+
if parent_at:
|
| 328 |
+
parent_at += '/'
|
| 329 |
+
return self._next(parent_at)
|
.venv/Lib/site-packages/pkg_resources/extern/__init__.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib.util
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class VendorImporter:
|
| 6 |
+
"""
|
| 7 |
+
A PEP 302 meta path importer for finding optionally-vendored
|
| 8 |
+
or otherwise naturally-installed packages from root_name.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
|
| 12 |
+
self.root_name = root_name
|
| 13 |
+
self.vendored_names = set(vendored_names)
|
| 14 |
+
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
|
| 15 |
+
|
| 16 |
+
@property
|
| 17 |
+
def search_path(self):
|
| 18 |
+
"""
|
| 19 |
+
Search first the vendor package then as a natural package.
|
| 20 |
+
"""
|
| 21 |
+
yield self.vendor_pkg + '.'
|
| 22 |
+
yield ''
|
| 23 |
+
|
| 24 |
+
def _module_matches_namespace(self, fullname):
|
| 25 |
+
"""Figure out if the target module is vendored."""
|
| 26 |
+
root, base, target = fullname.partition(self.root_name + '.')
|
| 27 |
+
return not root and any(map(target.startswith, self.vendored_names))
|
| 28 |
+
|
| 29 |
+
def load_module(self, fullname):
|
| 30 |
+
"""
|
| 31 |
+
Iterate over the search path to locate and load fullname.
|
| 32 |
+
"""
|
| 33 |
+
root, base, target = fullname.partition(self.root_name + '.')
|
| 34 |
+
for prefix in self.search_path:
|
| 35 |
+
try:
|
| 36 |
+
extant = prefix + target
|
| 37 |
+
__import__(extant)
|
| 38 |
+
mod = sys.modules[extant]
|
| 39 |
+
sys.modules[fullname] = mod
|
| 40 |
+
return mod
|
| 41 |
+
except ImportError:
|
| 42 |
+
pass
|
| 43 |
+
else:
|
| 44 |
+
raise ImportError(
|
| 45 |
+
"The '{target}' package is required; "
|
| 46 |
+
"normally this is bundled with this package so if you get "
|
| 47 |
+
"this warning, consult the packager of your "
|
| 48 |
+
"distribution.".format(**locals())
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
def create_module(self, spec):
|
| 52 |
+
return self.load_module(spec.name)
|
| 53 |
+
|
| 54 |
+
def exec_module(self, module):
|
| 55 |
+
pass
|
| 56 |
+
|
| 57 |
+
def find_spec(self, fullname, path=None, target=None):
|
| 58 |
+
"""Return a module spec for vendored names."""
|
| 59 |
+
return (
|
| 60 |
+
importlib.util.spec_from_loader(fullname, self)
|
| 61 |
+
if self._module_matches_namespace(fullname)
|
| 62 |
+
else None
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
def install(self):
|
| 66 |
+
"""
|
| 67 |
+
Install this importer into sys.meta_path if not already present.
|
| 68 |
+
"""
|
| 69 |
+
if self not in sys.meta_path:
|
| 70 |
+
sys.meta_path.append(self)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
names = (
|
| 74 |
+
'packaging',
|
| 75 |
+
'platformdirs',
|
| 76 |
+
'jaraco',
|
| 77 |
+
'importlib_resources',
|
| 78 |
+
'more_itertools',
|
| 79 |
+
)
|
| 80 |
+
VendorImporter(__name__, names).install()
|
.venv/Lib/site-packages/setuptools/__init__.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Extensions to the 'distutils' for large or complex distributions"""
|
| 2 |
+
|
| 3 |
+
import functools
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
|
| 7 |
+
import _distutils_hack.override # noqa: F401
|
| 8 |
+
import distutils.core
|
| 9 |
+
from distutils.errors import DistutilsOptionError
|
| 10 |
+
from distutils.util import convert_path as _convert_path
|
| 11 |
+
|
| 12 |
+
from . import logging, monkey
|
| 13 |
+
from . import version as _version_module
|
| 14 |
+
from .depends import Require
|
| 15 |
+
from .discovery import PackageFinder, PEP420PackageFinder
|
| 16 |
+
from .dist import Distribution
|
| 17 |
+
from .extension import Extension
|
| 18 |
+
from .warnings import SetuptoolsDeprecationWarning
|
| 19 |
+
|
| 20 |
+
__all__ = [
|
| 21 |
+
'setup',
|
| 22 |
+
'Distribution',
|
| 23 |
+
'Command',
|
| 24 |
+
'Extension',
|
| 25 |
+
'Require',
|
| 26 |
+
'SetuptoolsDeprecationWarning',
|
| 27 |
+
'find_packages',
|
| 28 |
+
'find_namespace_packages',
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
__version__ = _version_module.__version__
|
| 32 |
+
|
| 33 |
+
bootstrap_install_from = None
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
find_packages = PackageFinder.find
|
| 37 |
+
find_namespace_packages = PEP420PackageFinder.find
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _install_setup_requires(attrs):
|
| 41 |
+
# Note: do not use `setuptools.Distribution` directly, as
|
| 42 |
+
# our PEP 517 backend patch `distutils.core.Distribution`.
|
| 43 |
+
class MinimalDistribution(distutils.core.Distribution):
|
| 44 |
+
"""
|
| 45 |
+
A minimal version of a distribution for supporting the
|
| 46 |
+
fetch_build_eggs interface.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
def __init__(self, attrs):
|
| 50 |
+
_incl = 'dependency_links', 'setup_requires'
|
| 51 |
+
filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
|
| 52 |
+
super().__init__(filtered)
|
| 53 |
+
# Prevent accidentally triggering discovery with incomplete set of attrs
|
| 54 |
+
self.set_defaults._disable()
|
| 55 |
+
|
| 56 |
+
def _get_project_config_files(self, filenames=None):
|
| 57 |
+
"""Ignore ``pyproject.toml``, they are not related to setup_requires"""
|
| 58 |
+
try:
|
| 59 |
+
cfg, toml = super()._split_standard_project_metadata(filenames)
|
| 60 |
+
return cfg, ()
|
| 61 |
+
except Exception:
|
| 62 |
+
return filenames, ()
|
| 63 |
+
|
| 64 |
+
def finalize_options(self):
|
| 65 |
+
"""
|
| 66 |
+
Disable finalize_options to avoid building the working set.
|
| 67 |
+
Ref #2158.
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
dist = MinimalDistribution(attrs)
|
| 71 |
+
|
| 72 |
+
# Honor setup.cfg's options.
|
| 73 |
+
dist.parse_config_files(ignore_option_errors=True)
|
| 74 |
+
if dist.setup_requires:
|
| 75 |
+
_fetch_build_eggs(dist)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def _fetch_build_eggs(dist):
|
| 79 |
+
try:
|
| 80 |
+
dist.fetch_build_eggs(dist.setup_requires)
|
| 81 |
+
except Exception as ex:
|
| 82 |
+
msg = """
|
| 83 |
+
It is possible a package already installed in your system
|
| 84 |
+
contains an version that is invalid according to PEP 440.
|
| 85 |
+
You can try `pip install --use-pep517` as a workaround for this problem,
|
| 86 |
+
or rely on a new virtual environment.
|
| 87 |
+
|
| 88 |
+
If the problem refers to a package that is not installed yet,
|
| 89 |
+
please contact that package's maintainers or distributors.
|
| 90 |
+
"""
|
| 91 |
+
if "InvalidVersion" in ex.__class__.__name__:
|
| 92 |
+
if hasattr(ex, "add_note"):
|
| 93 |
+
ex.add_note(msg) # PEP 678
|
| 94 |
+
else:
|
| 95 |
+
dist.announce(f"\n{msg}\n")
|
| 96 |
+
raise
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def setup(**attrs):
|
| 100 |
+
# Make sure we have any requirements needed to interpret 'attrs'.
|
| 101 |
+
logging.configure()
|
| 102 |
+
_install_setup_requires(attrs)
|
| 103 |
+
return distutils.core.setup(**attrs)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
setup.__doc__ = distutils.core.setup.__doc__
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
_Command = monkey.get_unpatched(distutils.core.Command)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class Command(_Command):
|
| 113 |
+
"""
|
| 114 |
+
Setuptools internal actions are organized using a *command design pattern*.
|
| 115 |
+
This means that each action (or group of closely related actions) executed during
|
| 116 |
+
the build should be implemented as a ``Command`` subclass.
|
| 117 |
+
|
| 118 |
+
These commands are abstractions and do not necessarily correspond to a command that
|
| 119 |
+
can (or should) be executed via a terminal, in a CLI fashion (although historically
|
| 120 |
+
they would).
|
| 121 |
+
|
| 122 |
+
When creating a new command from scratch, custom defined classes **SHOULD** inherit
|
| 123 |
+
from ``setuptools.Command`` and implement a few mandatory methods.
|
| 124 |
+
Between these mandatory methods, are listed:
|
| 125 |
+
|
| 126 |
+
.. method:: initialize_options(self)
|
| 127 |
+
|
| 128 |
+
Set or (reset) all options/attributes/caches used by the command
|
| 129 |
+
to their default values. Note that these values may be overwritten during
|
| 130 |
+
the build.
|
| 131 |
+
|
| 132 |
+
.. method:: finalize_options(self)
|
| 133 |
+
|
| 134 |
+
Set final values for all options/attributes used by the command.
|
| 135 |
+
Most of the time, each option/attribute/cache should only be set if it does not
|
| 136 |
+
have any value yet (e.g. ``if self.attr is None: self.attr = val``).
|
| 137 |
+
|
| 138 |
+
.. method:: run(self)
|
| 139 |
+
|
| 140 |
+
Execute the actions intended by the command.
|
| 141 |
+
(Side effects **SHOULD** only take place when ``run`` is executed,
|
| 142 |
+
for example, creating new files or writing to the terminal output).
|
| 143 |
+
|
| 144 |
+
A useful analogy for command classes is to think of them as subroutines with local
|
| 145 |
+
variables called "options". The options are "declared" in ``initialize_options()``
|
| 146 |
+
and "defined" (given their final values, aka "finalized") in ``finalize_options()``,
|
| 147 |
+
both of which must be defined by every command class. The "body" of the subroutine,
|
| 148 |
+
(where it does all the work) is the ``run()`` method.
|
| 149 |
+
Between ``initialize_options()`` and ``finalize_options()``, ``setuptools`` may set
|
| 150 |
+
the values for options/attributes based on user's input (or circumstance),
|
| 151 |
+
which means that the implementation should be careful to not overwrite values in
|
| 152 |
+
``finalize_options`` unless necessary.
|
| 153 |
+
|
| 154 |
+
Please note that other commands (or other parts of setuptools) may also overwrite
|
| 155 |
+
the values of the command's options/attributes multiple times during the build
|
| 156 |
+
process.
|
| 157 |
+
Therefore it is important to consistently implement ``initialize_options()`` and
|
| 158 |
+
``finalize_options()``. For example, all derived attributes (or attributes that
|
| 159 |
+
depend on the value of other attributes) **SHOULD** be recomputed in
|
| 160 |
+
``finalize_options``.
|
| 161 |
+
|
| 162 |
+
When overwriting existing commands, custom defined classes **MUST** abide by the
|
| 163 |
+
same APIs implemented by the original class. They also **SHOULD** inherit from the
|
| 164 |
+
original class.
|
| 165 |
+
"""
|
| 166 |
+
|
| 167 |
+
command_consumes_arguments = False
|
| 168 |
+
|
| 169 |
+
def __init__(self, dist, **kw):
|
| 170 |
+
"""
|
| 171 |
+
Construct the command for dist, updating
|
| 172 |
+
vars(self) with any keyword parameters.
|
| 173 |
+
"""
|
| 174 |
+
super().__init__(dist)
|
| 175 |
+
vars(self).update(kw)
|
| 176 |
+
|
| 177 |
+
def _ensure_stringlike(self, option, what, default=None):
|
| 178 |
+
val = getattr(self, option)
|
| 179 |
+
if val is None:
|
| 180 |
+
setattr(self, option, default)
|
| 181 |
+
return default
|
| 182 |
+
elif not isinstance(val, str):
|
| 183 |
+
raise DistutilsOptionError(
|
| 184 |
+
"'%s' must be a %s (got `%s`)" % (option, what, val)
|
| 185 |
+
)
|
| 186 |
+
return val
|
| 187 |
+
|
| 188 |
+
def ensure_string_list(self, option):
|
| 189 |
+
r"""Ensure that 'option' is a list of strings. If 'option' is
|
| 190 |
+
currently a string, we split it either on /,\s*/ or /\s+/, so
|
| 191 |
+
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
| 192 |
+
["foo", "bar", "baz"].
|
| 193 |
+
|
| 194 |
+
..
|
| 195 |
+
TODO: This method seems to be similar to the one in ``distutils.cmd``
|
| 196 |
+
Probably it is just here for backward compatibility with old Python versions?
|
| 197 |
+
|
| 198 |
+
:meta private:
|
| 199 |
+
"""
|
| 200 |
+
val = getattr(self, option)
|
| 201 |
+
if val is None:
|
| 202 |
+
return
|
| 203 |
+
elif isinstance(val, str):
|
| 204 |
+
setattr(self, option, re.split(r',\s*|\s+', val))
|
| 205 |
+
else:
|
| 206 |
+
if isinstance(val, list):
|
| 207 |
+
ok = all(isinstance(v, str) for v in val)
|
| 208 |
+
else:
|
| 209 |
+
ok = False
|
| 210 |
+
if not ok:
|
| 211 |
+
raise DistutilsOptionError(
|
| 212 |
+
"'%s' must be a list of strings (got %r)" % (option, val)
|
| 213 |
+
)
|
| 214 |
+
|
| 215 |
+
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
| 216 |
+
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
| 217 |
+
vars(cmd).update(kw)
|
| 218 |
+
return cmd
|
| 219 |
+
|
| 220 |
+
|
| 221 |
+
def _find_all_simple(path):
|
| 222 |
+
"""
|
| 223 |
+
Find all files under 'path'
|
| 224 |
+
"""
|
| 225 |
+
results = (
|
| 226 |
+
os.path.join(base, file)
|
| 227 |
+
for base, dirs, files in os.walk(path, followlinks=True)
|
| 228 |
+
for file in files
|
| 229 |
+
)
|
| 230 |
+
return filter(os.path.isfile, results)
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def findall(dir=os.curdir):
|
| 234 |
+
"""
|
| 235 |
+
Find all files under 'dir' and return the list of full filenames.
|
| 236 |
+
Unless dir is '.', return full filenames with dir prepended.
|
| 237 |
+
"""
|
| 238 |
+
files = _find_all_simple(dir)
|
| 239 |
+
if dir == os.curdir:
|
| 240 |
+
make_rel = functools.partial(os.path.relpath, start=dir)
|
| 241 |
+
files = map(make_rel, files)
|
| 242 |
+
return list(files)
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
@functools.wraps(_convert_path)
|
| 246 |
+
def convert_path(pathname):
|
| 247 |
+
SetuptoolsDeprecationWarning.emit(
|
| 248 |
+
"Access to implementation detail",
|
| 249 |
+
"""
|
| 250 |
+
The function `convert_path` is not provided by setuptools itself,
|
| 251 |
+
and therefore not part of the public API.
|
| 252 |
+
|
| 253 |
+
Its direct usage by 3rd-party packages is considered improper and the function
|
| 254 |
+
may be removed in the future.
|
| 255 |
+
""",
|
| 256 |
+
due_date=(2023, 12, 13), # initial deprecation 2022-03-25, see #3201
|
| 257 |
+
)
|
| 258 |
+
return _convert_path(pathname)
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
class sic(str):
|
| 262 |
+
"""Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
# Apply monkey patches
|
| 266 |
+
monkey.patch_all()
|
.venv/Lib/site-packages/setuptools/_core_metadata.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Handling of Core Metadata for Python packages (including reading and writing).
|
| 3 |
+
|
| 4 |
+
See: https://packaging.python.org/en/latest/specifications/core-metadata/
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
import stat
|
| 8 |
+
import textwrap
|
| 9 |
+
from email import message_from_file
|
| 10 |
+
from email.message import Message
|
| 11 |
+
from tempfile import NamedTemporaryFile
|
| 12 |
+
from typing import Optional, List
|
| 13 |
+
|
| 14 |
+
from distutils.util import rfc822_escape
|
| 15 |
+
|
| 16 |
+
from . import _normalization
|
| 17 |
+
from .extern.packaging.markers import Marker
|
| 18 |
+
from .extern.packaging.requirements import Requirement
|
| 19 |
+
from .extern.packaging.version import Version
|
| 20 |
+
from .warnings import SetuptoolsDeprecationWarning
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def get_metadata_version(self):
|
| 24 |
+
mv = getattr(self, 'metadata_version', None)
|
| 25 |
+
if mv is None:
|
| 26 |
+
mv = Version('2.1')
|
| 27 |
+
self.metadata_version = mv
|
| 28 |
+
return mv
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def rfc822_unescape(content: str) -> str:
|
| 32 |
+
"""Reverse RFC-822 escaping by removing leading whitespaces from content."""
|
| 33 |
+
lines = content.splitlines()
|
| 34 |
+
if len(lines) == 1:
|
| 35 |
+
return lines[0].lstrip()
|
| 36 |
+
return '\n'.join((lines[0].lstrip(), textwrap.dedent('\n'.join(lines[1:]))))
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _read_field_from_msg(msg: Message, field: str) -> Optional[str]:
|
| 40 |
+
"""Read Message header field."""
|
| 41 |
+
value = msg[field]
|
| 42 |
+
if value == 'UNKNOWN':
|
| 43 |
+
return None
|
| 44 |
+
return value
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def _read_field_unescaped_from_msg(msg: Message, field: str) -> Optional[str]:
|
| 48 |
+
"""Read Message header field and apply rfc822_unescape."""
|
| 49 |
+
value = _read_field_from_msg(msg, field)
|
| 50 |
+
if value is None:
|
| 51 |
+
return value
|
| 52 |
+
return rfc822_unescape(value)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def _read_list_from_msg(msg: Message, field: str) -> Optional[List[str]]:
|
| 56 |
+
"""Read Message header field and return all results as list."""
|
| 57 |
+
values = msg.get_all(field, None)
|
| 58 |
+
if values == []:
|
| 59 |
+
return None
|
| 60 |
+
return values
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def _read_payload_from_msg(msg: Message) -> Optional[str]:
|
| 64 |
+
value = msg.get_payload().strip()
|
| 65 |
+
if value == 'UNKNOWN' or not value:
|
| 66 |
+
return None
|
| 67 |
+
return value
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def read_pkg_file(self, file):
|
| 71 |
+
"""Reads the metadata values from a file object."""
|
| 72 |
+
msg = message_from_file(file)
|
| 73 |
+
|
| 74 |
+
self.metadata_version = Version(msg['metadata-version'])
|
| 75 |
+
self.name = _read_field_from_msg(msg, 'name')
|
| 76 |
+
self.version = _read_field_from_msg(msg, 'version')
|
| 77 |
+
self.description = _read_field_from_msg(msg, 'summary')
|
| 78 |
+
# we are filling author only.
|
| 79 |
+
self.author = _read_field_from_msg(msg, 'author')
|
| 80 |
+
self.maintainer = None
|
| 81 |
+
self.author_email = _read_field_from_msg(msg, 'author-email')
|
| 82 |
+
self.maintainer_email = None
|
| 83 |
+
self.url = _read_field_from_msg(msg, 'home-page')
|
| 84 |
+
self.download_url = _read_field_from_msg(msg, 'download-url')
|
| 85 |
+
self.license = _read_field_unescaped_from_msg(msg, 'license')
|
| 86 |
+
|
| 87 |
+
self.long_description = _read_field_unescaped_from_msg(msg, 'description')
|
| 88 |
+
if self.long_description is None and self.metadata_version >= Version('2.1'):
|
| 89 |
+
self.long_description = _read_payload_from_msg(msg)
|
| 90 |
+
self.description = _read_field_from_msg(msg, 'summary')
|
| 91 |
+
|
| 92 |
+
if 'keywords' in msg:
|
| 93 |
+
self.keywords = _read_field_from_msg(msg, 'keywords').split(',')
|
| 94 |
+
|
| 95 |
+
self.platforms = _read_list_from_msg(msg, 'platform')
|
| 96 |
+
self.classifiers = _read_list_from_msg(msg, 'classifier')
|
| 97 |
+
|
| 98 |
+
# PEP 314 - these fields only exist in 1.1
|
| 99 |
+
if self.metadata_version == Version('1.1'):
|
| 100 |
+
self.requires = _read_list_from_msg(msg, 'requires')
|
| 101 |
+
self.provides = _read_list_from_msg(msg, 'provides')
|
| 102 |
+
self.obsoletes = _read_list_from_msg(msg, 'obsoletes')
|
| 103 |
+
else:
|
| 104 |
+
self.requires = None
|
| 105 |
+
self.provides = None
|
| 106 |
+
self.obsoletes = None
|
| 107 |
+
|
| 108 |
+
self.license_files = _read_list_from_msg(msg, 'license-file')
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def single_line(val):
|
| 112 |
+
"""
|
| 113 |
+
Quick and dirty validation for Summary pypa/setuptools#1390.
|
| 114 |
+
"""
|
| 115 |
+
if '\n' in val:
|
| 116 |
+
# TODO: Replace with `raise ValueError("newlines not allowed")`
|
| 117 |
+
# after reviewing #2893.
|
| 118 |
+
msg = "newlines are not allowed in `summary` and will break in the future"
|
| 119 |
+
SetuptoolsDeprecationWarning.emit("Invalid config.", msg)
|
| 120 |
+
# due_date is undefined. Controversial change, there was a lot of push back.
|
| 121 |
+
val = val.strip().split('\n')[0]
|
| 122 |
+
return val
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def write_pkg_info(self, base_dir):
|
| 126 |
+
"""Write the PKG-INFO file into the release tree."""
|
| 127 |
+
temp = ""
|
| 128 |
+
final = os.path.join(base_dir, 'PKG-INFO')
|
| 129 |
+
try:
|
| 130 |
+
# Use a temporary file while writing to avoid race conditions
|
| 131 |
+
# (e.g. `importlib.metadata` reading `.egg-info/PKG-INFO`):
|
| 132 |
+
with NamedTemporaryFile("w", encoding="utf-8", dir=base_dir, delete=False) as f:
|
| 133 |
+
temp = f.name
|
| 134 |
+
self.write_pkg_file(f)
|
| 135 |
+
permissions = stat.S_IMODE(os.lstat(temp).st_mode)
|
| 136 |
+
os.chmod(temp, permissions | stat.S_IRGRP | stat.S_IROTH)
|
| 137 |
+
os.replace(temp, final) # atomic operation.
|
| 138 |
+
finally:
|
| 139 |
+
if temp and os.path.exists(temp):
|
| 140 |
+
os.remove(temp)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# Based on Python 3.5 version
|
| 144 |
+
def write_pkg_file(self, file): # noqa: C901 # is too complex (14) # FIXME
|
| 145 |
+
"""Write the PKG-INFO format data to a file object."""
|
| 146 |
+
version = self.get_metadata_version()
|
| 147 |
+
|
| 148 |
+
def write_field(key, value):
|
| 149 |
+
file.write("%s: %s\n" % (key, value))
|
| 150 |
+
|
| 151 |
+
write_field('Metadata-Version', str(version))
|
| 152 |
+
write_field('Name', self.get_name())
|
| 153 |
+
write_field('Version', self.get_version())
|
| 154 |
+
|
| 155 |
+
summary = self.get_description()
|
| 156 |
+
if summary:
|
| 157 |
+
write_field('Summary', single_line(summary))
|
| 158 |
+
|
| 159 |
+
optional_fields = (
|
| 160 |
+
('Home-page', 'url'),
|
| 161 |
+
('Download-URL', 'download_url'),
|
| 162 |
+
('Author', 'author'),
|
| 163 |
+
('Author-email', 'author_email'),
|
| 164 |
+
('Maintainer', 'maintainer'),
|
| 165 |
+
('Maintainer-email', 'maintainer_email'),
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
for field, attr in optional_fields:
|
| 169 |
+
attr_val = getattr(self, attr, None)
|
| 170 |
+
if attr_val is not None:
|
| 171 |
+
write_field(field, attr_val)
|
| 172 |
+
|
| 173 |
+
license = self.get_license()
|
| 174 |
+
if license:
|
| 175 |
+
write_field('License', rfc822_escape(license))
|
| 176 |
+
|
| 177 |
+
for project_url in self.project_urls.items():
|
| 178 |
+
write_field('Project-URL', '%s, %s' % project_url)
|
| 179 |
+
|
| 180 |
+
keywords = ','.join(self.get_keywords())
|
| 181 |
+
if keywords:
|
| 182 |
+
write_field('Keywords', keywords)
|
| 183 |
+
|
| 184 |
+
platforms = self.get_platforms() or []
|
| 185 |
+
for platform in platforms:
|
| 186 |
+
write_field('Platform', platform)
|
| 187 |
+
|
| 188 |
+
self._write_list(file, 'Classifier', self.get_classifiers())
|
| 189 |
+
|
| 190 |
+
# PEP 314
|
| 191 |
+
self._write_list(file, 'Requires', self.get_requires())
|
| 192 |
+
self._write_list(file, 'Provides', self.get_provides())
|
| 193 |
+
self._write_list(file, 'Obsoletes', self.get_obsoletes())
|
| 194 |
+
|
| 195 |
+
# Setuptools specific for PEP 345
|
| 196 |
+
if hasattr(self, 'python_requires'):
|
| 197 |
+
write_field('Requires-Python', self.python_requires)
|
| 198 |
+
|
| 199 |
+
# PEP 566
|
| 200 |
+
if self.long_description_content_type:
|
| 201 |
+
write_field('Description-Content-Type', self.long_description_content_type)
|
| 202 |
+
|
| 203 |
+
self._write_list(file, 'License-File', self.license_files or [])
|
| 204 |
+
_write_requirements(self, file)
|
| 205 |
+
|
| 206 |
+
long_description = self.get_long_description()
|
| 207 |
+
if long_description:
|
| 208 |
+
file.write("\n%s" % long_description)
|
| 209 |
+
if not long_description.endswith("\n"):
|
| 210 |
+
file.write("\n")
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def _write_requirements(self, file):
|
| 214 |
+
for req in self._normalized_install_requires:
|
| 215 |
+
file.write(f"Requires-Dist: {req}\n")
|
| 216 |
+
|
| 217 |
+
processed_extras = {}
|
| 218 |
+
for augmented_extra, reqs in self._normalized_extras_require.items():
|
| 219 |
+
# Historically, setuptools allows "augmented extras": `<extra>:<condition>`
|
| 220 |
+
unsafe_extra, _, condition = augmented_extra.partition(":")
|
| 221 |
+
unsafe_extra = unsafe_extra.strip()
|
| 222 |
+
extra = _normalization.safe_extra(unsafe_extra)
|
| 223 |
+
|
| 224 |
+
if extra:
|
| 225 |
+
_write_provides_extra(file, processed_extras, extra, unsafe_extra)
|
| 226 |
+
for req in reqs:
|
| 227 |
+
r = _include_extra(req, extra, condition.strip())
|
| 228 |
+
file.write(f"Requires-Dist: {r}\n")
|
| 229 |
+
|
| 230 |
+
return processed_extras
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _include_extra(req: str, extra: str, condition: str) -> Requirement:
|
| 234 |
+
r = Requirement(req)
|
| 235 |
+
parts = (
|
| 236 |
+
f"({r.marker})" if r.marker else None,
|
| 237 |
+
f"({condition})" if condition else None,
|
| 238 |
+
f"extra == {extra!r}" if extra else None,
|
| 239 |
+
)
|
| 240 |
+
r.marker = Marker(" and ".join(x for x in parts if x))
|
| 241 |
+
return r
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def _write_provides_extra(file, processed_extras, safe, unsafe):
|
| 245 |
+
previous = processed_extras.get(safe)
|
| 246 |
+
if previous == unsafe:
|
| 247 |
+
SetuptoolsDeprecationWarning.emit(
|
| 248 |
+
'Ambiguity during "extra" normalization for dependencies.',
|
| 249 |
+
f"""
|
| 250 |
+
{previous!r} and {unsafe!r} normalize to the same value:\n
|
| 251 |
+
{safe!r}\n
|
| 252 |
+
In future versions, setuptools might halt the build process.
|
| 253 |
+
""",
|
| 254 |
+
see_url="https://peps.python.org/pep-0685/",
|
| 255 |
+
)
|
| 256 |
+
else:
|
| 257 |
+
processed_extras[safe] = unsafe
|
| 258 |
+
file.write(f"Provides-Extra: {safe}\n")
|
.venv/Lib/site-packages/setuptools/_entry_points.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import functools
|
| 2 |
+
import operator
|
| 3 |
+
import itertools
|
| 4 |
+
|
| 5 |
+
from .errors import OptionError
|
| 6 |
+
from .extern.jaraco.text import yield_lines
|
| 7 |
+
from .extern.jaraco.functools import pass_none
|
| 8 |
+
from ._importlib import metadata
|
| 9 |
+
from ._itertools import ensure_unique
|
| 10 |
+
from .extern.more_itertools import consume
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def ensure_valid(ep):
|
| 14 |
+
"""
|
| 15 |
+
Exercise one of the dynamic properties to trigger
|
| 16 |
+
the pattern match.
|
| 17 |
+
"""
|
| 18 |
+
try:
|
| 19 |
+
ep.extras
|
| 20 |
+
except AttributeError as ex:
|
| 21 |
+
msg = (
|
| 22 |
+
f"Problems to parse {ep}.\nPlease ensure entry-point follows the spec: "
|
| 23 |
+
"https://packaging.python.org/en/latest/specifications/entry-points/"
|
| 24 |
+
)
|
| 25 |
+
raise OptionError(msg) from ex
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def load_group(value, group):
|
| 29 |
+
"""
|
| 30 |
+
Given a value of an entry point or series of entry points,
|
| 31 |
+
return each as an EntryPoint.
|
| 32 |
+
"""
|
| 33 |
+
# normalize to a single sequence of lines
|
| 34 |
+
lines = yield_lines(value)
|
| 35 |
+
text = f'[{group}]\n' + '\n'.join(lines)
|
| 36 |
+
return metadata.EntryPoints._from_text(text)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def by_group_and_name(ep):
|
| 40 |
+
return ep.group, ep.name
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def validate(eps: metadata.EntryPoints):
|
| 44 |
+
"""
|
| 45 |
+
Ensure entry points are unique by group and name and validate each.
|
| 46 |
+
"""
|
| 47 |
+
consume(map(ensure_valid, ensure_unique(eps, key=by_group_and_name)))
|
| 48 |
+
return eps
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@functools.singledispatch
|
| 52 |
+
def load(eps):
|
| 53 |
+
"""
|
| 54 |
+
Given a Distribution.entry_points, produce EntryPoints.
|
| 55 |
+
"""
|
| 56 |
+
groups = itertools.chain.from_iterable(
|
| 57 |
+
load_group(value, group) for group, value in eps.items()
|
| 58 |
+
)
|
| 59 |
+
return validate(metadata.EntryPoints(groups))
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
@load.register(str)
|
| 63 |
+
def _(eps):
|
| 64 |
+
r"""
|
| 65 |
+
>>> ep, = load('[console_scripts]\nfoo=bar')
|
| 66 |
+
>>> ep.group
|
| 67 |
+
'console_scripts'
|
| 68 |
+
>>> ep.name
|
| 69 |
+
'foo'
|
| 70 |
+
>>> ep.value
|
| 71 |
+
'bar'
|
| 72 |
+
"""
|
| 73 |
+
return validate(metadata.EntryPoints(metadata.EntryPoints._from_text(eps)))
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
load.register(type(None), lambda x: x)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
@pass_none
|
| 80 |
+
def render(eps: metadata.EntryPoints):
|
| 81 |
+
by_group = operator.attrgetter('group')
|
| 82 |
+
groups = itertools.groupby(sorted(eps, key=by_group), by_group)
|
| 83 |
+
|
| 84 |
+
return '\n'.join(f'[{group}]\n{render_items(items)}\n' for group, items in groups)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def render_items(eps):
|
| 88 |
+
return '\n'.join(f'{ep.name} = {ep.value}' for ep in sorted(eps))
|
.venv/Lib/site-packages/setuptools/_imp.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Re-implementation of find_module and get_frozen_object
|
| 3 |
+
from the deprecated imp module.
|
| 4 |
+
"""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import importlib.util
|
| 8 |
+
import importlib.machinery
|
| 9 |
+
|
| 10 |
+
from importlib.util import module_from_spec
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
PY_SOURCE = 1
|
| 14 |
+
PY_COMPILED = 2
|
| 15 |
+
C_EXTENSION = 3
|
| 16 |
+
C_BUILTIN = 6
|
| 17 |
+
PY_FROZEN = 7
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def find_spec(module, paths):
|
| 21 |
+
finder = (
|
| 22 |
+
importlib.machinery.PathFinder().find_spec
|
| 23 |
+
if isinstance(paths, list)
|
| 24 |
+
else importlib.util.find_spec
|
| 25 |
+
)
|
| 26 |
+
return finder(module, paths)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def find_module(module, paths=None):
|
| 30 |
+
"""Just like 'imp.find_module()', but with package support"""
|
| 31 |
+
spec = find_spec(module, paths)
|
| 32 |
+
if spec is None:
|
| 33 |
+
raise ImportError("Can't find %s" % module)
|
| 34 |
+
if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
|
| 35 |
+
spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
|
| 36 |
+
|
| 37 |
+
kind = -1
|
| 38 |
+
file = None
|
| 39 |
+
static = isinstance(spec.loader, type)
|
| 40 |
+
if (
|
| 41 |
+
spec.origin == 'frozen'
|
| 42 |
+
or static
|
| 43 |
+
and issubclass(spec.loader, importlib.machinery.FrozenImporter)
|
| 44 |
+
):
|
| 45 |
+
kind = PY_FROZEN
|
| 46 |
+
path = None # imp compabilty
|
| 47 |
+
suffix = mode = '' # imp compatibility
|
| 48 |
+
elif (
|
| 49 |
+
spec.origin == 'built-in'
|
| 50 |
+
or static
|
| 51 |
+
and issubclass(spec.loader, importlib.machinery.BuiltinImporter)
|
| 52 |
+
):
|
| 53 |
+
kind = C_BUILTIN
|
| 54 |
+
path = None # imp compabilty
|
| 55 |
+
suffix = mode = '' # imp compatibility
|
| 56 |
+
elif spec.has_location:
|
| 57 |
+
path = spec.origin
|
| 58 |
+
suffix = os.path.splitext(path)[1]
|
| 59 |
+
mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
|
| 60 |
+
|
| 61 |
+
if suffix in importlib.machinery.SOURCE_SUFFIXES:
|
| 62 |
+
kind = PY_SOURCE
|
| 63 |
+
elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
|
| 64 |
+
kind = PY_COMPILED
|
| 65 |
+
elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
|
| 66 |
+
kind = C_EXTENSION
|
| 67 |
+
|
| 68 |
+
if kind in {PY_SOURCE, PY_COMPILED}:
|
| 69 |
+
file = open(path, mode)
|
| 70 |
+
else:
|
| 71 |
+
path = None
|
| 72 |
+
suffix = mode = ''
|
| 73 |
+
|
| 74 |
+
return file, path, (suffix, mode, kind)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def get_frozen_object(module, paths=None):
|
| 78 |
+
spec = find_spec(module, paths)
|
| 79 |
+
if not spec:
|
| 80 |
+
raise ImportError("Can't find %s" % module)
|
| 81 |
+
return spec.loader.get_code(module)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def get_module(module, paths, info):
|
| 85 |
+
spec = find_spec(module, paths)
|
| 86 |
+
if not spec:
|
| 87 |
+
raise ImportError("Can't find %s" % module)
|
| 88 |
+
return module_from_spec(spec)
|
.venv/Lib/site-packages/setuptools/_importlib.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
def disable_importlib_metadata_finder(metadata):
|
| 5 |
+
"""
|
| 6 |
+
Ensure importlib_metadata doesn't provide older, incompatible
|
| 7 |
+
Distributions.
|
| 8 |
+
|
| 9 |
+
Workaround for #3102.
|
| 10 |
+
"""
|
| 11 |
+
try:
|
| 12 |
+
import importlib_metadata
|
| 13 |
+
except ImportError:
|
| 14 |
+
return
|
| 15 |
+
except AttributeError:
|
| 16 |
+
from .warnings import SetuptoolsWarning
|
| 17 |
+
|
| 18 |
+
SetuptoolsWarning.emit(
|
| 19 |
+
"Incompatibility problem.",
|
| 20 |
+
"""
|
| 21 |
+
`importlib-metadata` version is incompatible with `setuptools`.
|
| 22 |
+
This problem is likely to be solved by installing an updated version of
|
| 23 |
+
`importlib-metadata`.
|
| 24 |
+
""",
|
| 25 |
+
see_url="https://github.com/python/importlib_metadata/issues/396",
|
| 26 |
+
) # Ensure a descriptive message is shown.
|
| 27 |
+
raise # This exception can be suppressed by _distutils_hack
|
| 28 |
+
|
| 29 |
+
if importlib_metadata is metadata:
|
| 30 |
+
return
|
| 31 |
+
to_remove = [
|
| 32 |
+
ob
|
| 33 |
+
for ob in sys.meta_path
|
| 34 |
+
if isinstance(ob, importlib_metadata.MetadataPathFinder)
|
| 35 |
+
]
|
| 36 |
+
for item in to_remove:
|
| 37 |
+
sys.meta_path.remove(item)
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
if sys.version_info < (3, 10):
|
| 41 |
+
from setuptools.extern import importlib_metadata as metadata
|
| 42 |
+
|
| 43 |
+
disable_importlib_metadata_finder(metadata)
|
| 44 |
+
else:
|
| 45 |
+
import importlib.metadata as metadata # noqa: F401
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
if sys.version_info < (3, 9):
|
| 49 |
+
from setuptools.extern import importlib_resources as resources
|
| 50 |
+
else:
|
| 51 |
+
import importlib.resources as resources # noqa: F401
|
.venv/Lib/site-packages/setuptools/_itertools.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from setuptools.extern.more_itertools import consume # noqa: F401
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
# copied from jaraco.itertools 6.1
|
| 5 |
+
def ensure_unique(iterable, key=lambda x: x):
|
| 6 |
+
"""
|
| 7 |
+
Wrap an iterable to raise a ValueError if non-unique values are encountered.
|
| 8 |
+
|
| 9 |
+
>>> list(ensure_unique('abc'))
|
| 10 |
+
['a', 'b', 'c']
|
| 11 |
+
>>> consume(ensure_unique('abca'))
|
| 12 |
+
Traceback (most recent call last):
|
| 13 |
+
...
|
| 14 |
+
ValueError: Duplicate element 'a' encountered.
|
| 15 |
+
"""
|
| 16 |
+
seen = set()
|
| 17 |
+
seen_add = seen.add
|
| 18 |
+
for element in iterable:
|
| 19 |
+
k = key(element)
|
| 20 |
+
if k in seen:
|
| 21 |
+
raise ValueError(f"Duplicate element {element!r} encountered.")
|
| 22 |
+
seen_add(k)
|
| 23 |
+
yield element
|
.venv/Lib/site-packages/setuptools/_normalization.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Helpers for normalization as expected in wheel/sdist/module file names
|
| 3 |
+
and core metadata
|
| 4 |
+
"""
|
| 5 |
+
import re
|
| 6 |
+
from pathlib import Path
|
| 7 |
+
from typing import Union
|
| 8 |
+
|
| 9 |
+
from .extern import packaging
|
| 10 |
+
from .warnings import SetuptoolsDeprecationWarning
|
| 11 |
+
|
| 12 |
+
_Path = Union[str, Path]
|
| 13 |
+
|
| 14 |
+
# https://packaging.python.org/en/latest/specifications/core-metadata/#name
|
| 15 |
+
_VALID_NAME = re.compile(r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.I)
|
| 16 |
+
_UNSAFE_NAME_CHARS = re.compile(r"[^A-Z0-9.]+", re.I)
|
| 17 |
+
_NON_ALPHANUMERIC = re.compile(r"[^A-Z0-9]+", re.I)
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def safe_identifier(name: str) -> str:
|
| 21 |
+
"""Make a string safe to be used as Python identifier.
|
| 22 |
+
>>> safe_identifier("12abc")
|
| 23 |
+
'_12abc'
|
| 24 |
+
>>> safe_identifier("__editable__.myns.pkg-78.9.3_local")
|
| 25 |
+
'__editable___myns_pkg_78_9_3_local'
|
| 26 |
+
"""
|
| 27 |
+
safe = re.sub(r'\W|^(?=\d)', '_', name)
|
| 28 |
+
assert safe.isidentifier()
|
| 29 |
+
return safe
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def safe_name(component: str) -> str:
|
| 33 |
+
"""Escape a component used as a project name according to Core Metadata.
|
| 34 |
+
>>> safe_name("hello world")
|
| 35 |
+
'hello-world'
|
| 36 |
+
>>> safe_name("hello?world")
|
| 37 |
+
'hello-world'
|
| 38 |
+
"""
|
| 39 |
+
# See pkg_resources.safe_name
|
| 40 |
+
return _UNSAFE_NAME_CHARS.sub("-", component)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def safe_version(version: str) -> str:
|
| 44 |
+
"""Convert an arbitrary string into a valid version string.
|
| 45 |
+
>>> safe_version("1988 12 25")
|
| 46 |
+
'1988.12.25'
|
| 47 |
+
>>> safe_version("v0.2.1")
|
| 48 |
+
'0.2.1'
|
| 49 |
+
>>> safe_version("v0.2?beta")
|
| 50 |
+
'0.2b0'
|
| 51 |
+
>>> safe_version("v0.2 beta")
|
| 52 |
+
'0.2b0'
|
| 53 |
+
>>> safe_version("ubuntu lts")
|
| 54 |
+
Traceback (most recent call last):
|
| 55 |
+
...
|
| 56 |
+
setuptools.extern.packaging.version.InvalidVersion: Invalid version: 'ubuntu.lts'
|
| 57 |
+
"""
|
| 58 |
+
v = version.replace(' ', '.')
|
| 59 |
+
try:
|
| 60 |
+
return str(packaging.version.Version(v))
|
| 61 |
+
except packaging.version.InvalidVersion:
|
| 62 |
+
attempt = _UNSAFE_NAME_CHARS.sub("-", v)
|
| 63 |
+
return str(packaging.version.Version(attempt))
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def best_effort_version(version: str) -> str:
|
| 67 |
+
"""Convert an arbitrary string into a version-like string.
|
| 68 |
+
>>> best_effort_version("v0.2 beta")
|
| 69 |
+
'0.2b0'
|
| 70 |
+
|
| 71 |
+
>>> import warnings
|
| 72 |
+
>>> warnings.simplefilter("ignore", category=SetuptoolsDeprecationWarning)
|
| 73 |
+
>>> best_effort_version("ubuntu lts")
|
| 74 |
+
'ubuntu.lts'
|
| 75 |
+
"""
|
| 76 |
+
# See pkg_resources.safe_version
|
| 77 |
+
try:
|
| 78 |
+
return safe_version(version)
|
| 79 |
+
except packaging.version.InvalidVersion:
|
| 80 |
+
SetuptoolsDeprecationWarning.emit(
|
| 81 |
+
f"Invalid version: {version!r}.",
|
| 82 |
+
f"""
|
| 83 |
+
Version {version!r} is not valid according to PEP 440.
|
| 84 |
+
|
| 85 |
+
Please make sure to specify a valid version for your package.
|
| 86 |
+
Also note that future releases of setuptools may halt the build process
|
| 87 |
+
if an invalid version is given.
|
| 88 |
+
""",
|
| 89 |
+
see_url="https://peps.python.org/pep-0440/",
|
| 90 |
+
due_date=(2023, 9, 26), # See setuptools/dist _validate_version
|
| 91 |
+
)
|
| 92 |
+
v = version.replace(' ', '.')
|
| 93 |
+
return safe_name(v)
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def safe_extra(extra: str) -> str:
|
| 97 |
+
"""Normalize extra name according to PEP 685
|
| 98 |
+
>>> safe_extra("_FrIeNdLy-._.-bArD")
|
| 99 |
+
'friendly-bard'
|
| 100 |
+
>>> safe_extra("FrIeNdLy-._.-bArD__._-")
|
| 101 |
+
'friendly-bard'
|
| 102 |
+
"""
|
| 103 |
+
return _NON_ALPHANUMERIC.sub("-", extra).strip("-").lower()
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def filename_component(value: str) -> str:
|
| 107 |
+
"""Normalize each component of a filename (e.g. distribution/version part of wheel)
|
| 108 |
+
Note: ``value`` needs to be already normalized.
|
| 109 |
+
>>> filename_component("my-pkg")
|
| 110 |
+
'my_pkg'
|
| 111 |
+
"""
|
| 112 |
+
return value.replace("-", "_").strip("_")
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def safer_name(value: str) -> str:
|
| 116 |
+
"""Like ``safe_name`` but can be used as filename component for wheel"""
|
| 117 |
+
# See bdist_wheel.safer_name
|
| 118 |
+
return filename_component(safe_name(value))
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def safer_best_effort_version(value: str) -> str:
|
| 122 |
+
"""Like ``best_effort_version`` but can be used as filename component for wheel"""
|
| 123 |
+
# See bdist_wheel.safer_verion
|
| 124 |
+
# TODO: Replace with only safe_version in the future (no need for best effort)
|
| 125 |
+
return filename_component(best_effort_version(value))
|
.venv/Lib/site-packages/setuptools/_path.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Union
|
| 4 |
+
|
| 5 |
+
_Path = Union[str, os.PathLike]
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def ensure_directory(path):
|
| 9 |
+
"""Ensure that the parent directory of `path` exists"""
|
| 10 |
+
dirname = os.path.dirname(path)
|
| 11 |
+
os.makedirs(dirname, exist_ok=True)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def same_path(p1: _Path, p2: _Path) -> bool:
|
| 15 |
+
"""Differs from os.path.samefile because it does not require paths to exist.
|
| 16 |
+
Purely string based (no comparison between i-nodes).
|
| 17 |
+
>>> same_path("a/b", "./a/b")
|
| 18 |
+
True
|
| 19 |
+
>>> same_path("a/b", "a/./b")
|
| 20 |
+
True
|
| 21 |
+
>>> same_path("a/b", "././a/b")
|
| 22 |
+
True
|
| 23 |
+
>>> same_path("a/b", "./a/b/c/..")
|
| 24 |
+
True
|
| 25 |
+
>>> same_path("a/b", "../a/b/c")
|
| 26 |
+
False
|
| 27 |
+
>>> same_path("a", "a/b")
|
| 28 |
+
False
|
| 29 |
+
"""
|
| 30 |
+
return normpath(p1) == normpath(p2)
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def normpath(filename: _Path) -> str:
|
| 34 |
+
"""Normalize a file/dir name for comparison purposes."""
|
| 35 |
+
# See pkg_resources.normalize_path for notes about cygwin
|
| 36 |
+
file = os.path.abspath(filename) if sys.platform == 'cygwin' else filename
|
| 37 |
+
return os.path.normcase(os.path.realpath(os.path.normpath(file)))
|
.venv/Lib/site-packages/setuptools/_reqs.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Callable, Iterable, Iterator, TypeVar, Union, overload
|
| 2 |
+
|
| 3 |
+
import setuptools.extern.jaraco.text as text
|
| 4 |
+
from setuptools.extern.packaging.requirements import Requirement
|
| 5 |
+
|
| 6 |
+
_T = TypeVar("_T")
|
| 7 |
+
_StrOrIter = Union[str, Iterable[str]]
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def parse_strings(strs: _StrOrIter) -> Iterator[str]:
|
| 11 |
+
"""
|
| 12 |
+
Yield requirement strings for each specification in `strs`.
|
| 13 |
+
|
| 14 |
+
`strs` must be a string, or a (possibly-nested) iterable thereof.
|
| 15 |
+
"""
|
| 16 |
+
return text.join_continuation(map(text.drop_comment, text.yield_lines(strs)))
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
@overload
|
| 20 |
+
def parse(strs: _StrOrIter) -> Iterator[Requirement]:
|
| 21 |
+
...
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@overload
|
| 25 |
+
def parse(strs: _StrOrIter, parser: Callable[[str], _T]) -> Iterator[_T]:
|
| 26 |
+
...
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def parse(strs, parser=Requirement):
|
| 30 |
+
"""
|
| 31 |
+
Replacement for ``pkg_resources.parse_requirements`` that uses ``packaging``.
|
| 32 |
+
"""
|
| 33 |
+
return map(parser, parse_strings(strs))
|
.venv/Lib/site-packages/setuptools/cli-32.exe
ADDED
|
Binary file (11.8 kB). View file
|
|
|
.venv/Lib/site-packages/setuptools/cli-64.exe
ADDED
|
Binary file (14.3 kB). View file
|
|
|
.venv/Lib/site-packages/setuptools/cli-arm64.exe
ADDED
|
Binary file (13.8 kB). View file
|
|
|
.venv/Lib/site-packages/setuptools/cli.exe
ADDED
|
Binary file (11.8 kB). View file
|
|
|
.venv/Lib/site-packages/setuptools/command/alias.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from distutils.errors import DistutilsOptionError
|
| 2 |
+
|
| 3 |
+
from setuptools.command.setopt import edit_config, option_base, config_file
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def shquote(arg):
|
| 7 |
+
"""Quote an argument for later parsing by shlex.split()"""
|
| 8 |
+
for c in '"', "'", "\\", "#":
|
| 9 |
+
if c in arg:
|
| 10 |
+
return repr(arg)
|
| 11 |
+
if arg.split() != [arg]:
|
| 12 |
+
return repr(arg)
|
| 13 |
+
return arg
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class alias(option_base):
|
| 17 |
+
"""Define a shortcut that invokes one or more commands"""
|
| 18 |
+
|
| 19 |
+
description = "define a shortcut to invoke one or more commands"
|
| 20 |
+
command_consumes_arguments = True
|
| 21 |
+
|
| 22 |
+
user_options = [
|
| 23 |
+
('remove', 'r', 'remove (unset) the alias'),
|
| 24 |
+
] + option_base.user_options
|
| 25 |
+
|
| 26 |
+
boolean_options = option_base.boolean_options + ['remove']
|
| 27 |
+
|
| 28 |
+
def initialize_options(self):
|
| 29 |
+
option_base.initialize_options(self)
|
| 30 |
+
self.args = None
|
| 31 |
+
self.remove = None
|
| 32 |
+
|
| 33 |
+
def finalize_options(self):
|
| 34 |
+
option_base.finalize_options(self)
|
| 35 |
+
if self.remove and len(self.args) != 1:
|
| 36 |
+
raise DistutilsOptionError(
|
| 37 |
+
"Must specify exactly one argument (the alias name) when "
|
| 38 |
+
"using --remove"
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
def run(self):
|
| 42 |
+
aliases = self.distribution.get_option_dict('aliases')
|
| 43 |
+
|
| 44 |
+
if not self.args:
|
| 45 |
+
print("Command Aliases")
|
| 46 |
+
print("---------------")
|
| 47 |
+
for alias in aliases:
|
| 48 |
+
print("setup.py alias", format_alias(alias, aliases))
|
| 49 |
+
return
|
| 50 |
+
|
| 51 |
+
elif len(self.args) == 1:
|
| 52 |
+
(alias,) = self.args
|
| 53 |
+
if self.remove:
|
| 54 |
+
command = None
|
| 55 |
+
elif alias in aliases:
|
| 56 |
+
print("setup.py alias", format_alias(alias, aliases))
|
| 57 |
+
return
|
| 58 |
+
else:
|
| 59 |
+
print("No alias definition found for %r" % alias)
|
| 60 |
+
return
|
| 61 |
+
else:
|
| 62 |
+
alias = self.args[0]
|
| 63 |
+
command = ' '.join(map(shquote, self.args[1:]))
|
| 64 |
+
|
| 65 |
+
edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run)
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def format_alias(name, aliases):
|
| 69 |
+
source, command = aliases[name]
|
| 70 |
+
if source == config_file('global'):
|
| 71 |
+
source = '--global-config '
|
| 72 |
+
elif source == config_file('user'):
|
| 73 |
+
source = '--user-config '
|
| 74 |
+
elif source == config_file('local'):
|
| 75 |
+
source = ''
|
| 76 |
+
else:
|
| 77 |
+
source = '--filename=%r' % source
|
| 78 |
+
return source + name + ' ' + command
|
.venv/Lib/site-packages/setuptools/command/bdist_egg.py
ADDED
|
@@ -0,0 +1,464 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""setuptools.command.bdist_egg
|
| 2 |
+
|
| 3 |
+
Build .egg distributions"""
|
| 4 |
+
|
| 5 |
+
from distutils.dir_util import remove_tree, mkpath
|
| 6 |
+
from distutils import log
|
| 7 |
+
from types import CodeType
|
| 8 |
+
import sys
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import textwrap
|
| 12 |
+
import marshal
|
| 13 |
+
|
| 14 |
+
from setuptools.extension import Library
|
| 15 |
+
from setuptools import Command
|
| 16 |
+
from .._path import ensure_directory
|
| 17 |
+
|
| 18 |
+
from sysconfig import get_path, get_python_version
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _get_purelib():
|
| 22 |
+
return get_path("purelib")
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def strip_module(filename):
|
| 26 |
+
if '.' in filename:
|
| 27 |
+
filename = os.path.splitext(filename)[0]
|
| 28 |
+
if filename.endswith('module'):
|
| 29 |
+
filename = filename[:-6]
|
| 30 |
+
return filename
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def sorted_walk(dir):
|
| 34 |
+
"""Do os.walk in a reproducible way,
|
| 35 |
+
independent of indeterministic filesystem readdir order
|
| 36 |
+
"""
|
| 37 |
+
for base, dirs, files in os.walk(dir):
|
| 38 |
+
dirs.sort()
|
| 39 |
+
files.sort()
|
| 40 |
+
yield base, dirs, files
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def write_stub(resource, pyfile):
|
| 44 |
+
_stub_template = textwrap.dedent(
|
| 45 |
+
"""
|
| 46 |
+
def __bootstrap__():
|
| 47 |
+
global __bootstrap__, __loader__, __file__
|
| 48 |
+
import sys, pkg_resources, importlib.util
|
| 49 |
+
__file__ = pkg_resources.resource_filename(__name__, %r)
|
| 50 |
+
__loader__ = None; del __bootstrap__, __loader__
|
| 51 |
+
spec = importlib.util.spec_from_file_location(__name__,__file__)
|
| 52 |
+
mod = importlib.util.module_from_spec(spec)
|
| 53 |
+
spec.loader.exec_module(mod)
|
| 54 |
+
__bootstrap__()
|
| 55 |
+
"""
|
| 56 |
+
).lstrip()
|
| 57 |
+
with open(pyfile, 'w') as f:
|
| 58 |
+
f.write(_stub_template % resource)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class bdist_egg(Command):
|
| 62 |
+
description = "create an \"egg\" distribution"
|
| 63 |
+
|
| 64 |
+
user_options = [
|
| 65 |
+
('bdist-dir=', 'b', "temporary directory for creating the distribution"),
|
| 66 |
+
(
|
| 67 |
+
'plat-name=',
|
| 68 |
+
'p',
|
| 69 |
+
"platform name to embed in generated filenames "
|
| 70 |
+
"(by default uses `pkg_resources.get_build_platform()`)",
|
| 71 |
+
),
|
| 72 |
+
('exclude-source-files', None, "remove all .py files from the generated egg"),
|
| 73 |
+
(
|
| 74 |
+
'keep-temp',
|
| 75 |
+
'k',
|
| 76 |
+
"keep the pseudo-installation tree around after "
|
| 77 |
+
+ "creating the distribution archive",
|
| 78 |
+
),
|
| 79 |
+
('dist-dir=', 'd', "directory to put final built distributions in"),
|
| 80 |
+
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
| 81 |
+
]
|
| 82 |
+
|
| 83 |
+
boolean_options = ['keep-temp', 'skip-build', 'exclude-source-files']
|
| 84 |
+
|
| 85 |
+
def initialize_options(self):
|
| 86 |
+
self.bdist_dir = None
|
| 87 |
+
self.plat_name = None
|
| 88 |
+
self.keep_temp = 0
|
| 89 |
+
self.dist_dir = None
|
| 90 |
+
self.skip_build = 0
|
| 91 |
+
self.egg_output = None
|
| 92 |
+
self.exclude_source_files = None
|
| 93 |
+
|
| 94 |
+
def finalize_options(self):
|
| 95 |
+
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
|
| 96 |
+
self.egg_info = ei_cmd.egg_info
|
| 97 |
+
|
| 98 |
+
if self.bdist_dir is None:
|
| 99 |
+
bdist_base = self.get_finalized_command('bdist').bdist_base
|
| 100 |
+
self.bdist_dir = os.path.join(bdist_base, 'egg')
|
| 101 |
+
|
| 102 |
+
if self.plat_name is None:
|
| 103 |
+
from pkg_resources import get_build_platform
|
| 104 |
+
|
| 105 |
+
self.plat_name = get_build_platform()
|
| 106 |
+
|
| 107 |
+
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
|
| 108 |
+
|
| 109 |
+
if self.egg_output is None:
|
| 110 |
+
# Compute filename of the output egg
|
| 111 |
+
basename = ei_cmd._get_egg_basename(
|
| 112 |
+
py_version=get_python_version(),
|
| 113 |
+
platform=self.distribution.has_ext_modules() and self.plat_name,
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
self.egg_output = os.path.join(self.dist_dir, basename + '.egg')
|
| 117 |
+
|
| 118 |
+
def do_install_data(self):
|
| 119 |
+
# Hack for packages that install data to install's --install-lib
|
| 120 |
+
self.get_finalized_command('install').install_lib = self.bdist_dir
|
| 121 |
+
|
| 122 |
+
site_packages = os.path.normcase(os.path.realpath(_get_purelib()))
|
| 123 |
+
old, self.distribution.data_files = self.distribution.data_files, []
|
| 124 |
+
|
| 125 |
+
for item in old:
|
| 126 |
+
if isinstance(item, tuple) and len(item) == 2:
|
| 127 |
+
if os.path.isabs(item[0]):
|
| 128 |
+
realpath = os.path.realpath(item[0])
|
| 129 |
+
normalized = os.path.normcase(realpath)
|
| 130 |
+
if normalized == site_packages or normalized.startswith(
|
| 131 |
+
site_packages + os.sep
|
| 132 |
+
):
|
| 133 |
+
item = realpath[len(site_packages) + 1 :], item[1]
|
| 134 |
+
# XXX else: raise ???
|
| 135 |
+
self.distribution.data_files.append(item)
|
| 136 |
+
|
| 137 |
+
try:
|
| 138 |
+
log.info("installing package data to %s", self.bdist_dir)
|
| 139 |
+
self.call_command('install_data', force=0, root=None)
|
| 140 |
+
finally:
|
| 141 |
+
self.distribution.data_files = old
|
| 142 |
+
|
| 143 |
+
def get_outputs(self):
|
| 144 |
+
return [self.egg_output]
|
| 145 |
+
|
| 146 |
+
def call_command(self, cmdname, **kw):
|
| 147 |
+
"""Invoke reinitialized command `cmdname` with keyword args"""
|
| 148 |
+
for dirname in INSTALL_DIRECTORY_ATTRS:
|
| 149 |
+
kw.setdefault(dirname, self.bdist_dir)
|
| 150 |
+
kw.setdefault('skip_build', self.skip_build)
|
| 151 |
+
kw.setdefault('dry_run', self.dry_run)
|
| 152 |
+
cmd = self.reinitialize_command(cmdname, **kw)
|
| 153 |
+
self.run_command(cmdname)
|
| 154 |
+
return cmd
|
| 155 |
+
|
| 156 |
+
def run(self): # noqa: C901 # is too complex (14) # FIXME
|
| 157 |
+
# Generate metadata first
|
| 158 |
+
self.run_command("egg_info")
|
| 159 |
+
# We run install_lib before install_data, because some data hacks
|
| 160 |
+
# pull their data path from the install_lib command.
|
| 161 |
+
log.info("installing library code to %s", self.bdist_dir)
|
| 162 |
+
instcmd = self.get_finalized_command('install')
|
| 163 |
+
old_root = instcmd.root
|
| 164 |
+
instcmd.root = None
|
| 165 |
+
if self.distribution.has_c_libraries() and not self.skip_build:
|
| 166 |
+
self.run_command('build_clib')
|
| 167 |
+
cmd = self.call_command('install_lib', warn_dir=0)
|
| 168 |
+
instcmd.root = old_root
|
| 169 |
+
|
| 170 |
+
all_outputs, ext_outputs = self.get_ext_outputs()
|
| 171 |
+
self.stubs = []
|
| 172 |
+
to_compile = []
|
| 173 |
+
for p, ext_name in enumerate(ext_outputs):
|
| 174 |
+
filename, ext = os.path.splitext(ext_name)
|
| 175 |
+
pyfile = os.path.join(self.bdist_dir, strip_module(filename) + '.py')
|
| 176 |
+
self.stubs.append(pyfile)
|
| 177 |
+
log.info("creating stub loader for %s", ext_name)
|
| 178 |
+
if not self.dry_run:
|
| 179 |
+
write_stub(os.path.basename(ext_name), pyfile)
|
| 180 |
+
to_compile.append(pyfile)
|
| 181 |
+
ext_outputs[p] = ext_name.replace(os.sep, '/')
|
| 182 |
+
|
| 183 |
+
if to_compile:
|
| 184 |
+
cmd.byte_compile(to_compile)
|
| 185 |
+
if self.distribution.data_files:
|
| 186 |
+
self.do_install_data()
|
| 187 |
+
|
| 188 |
+
# Make the EGG-INFO directory
|
| 189 |
+
archive_root = self.bdist_dir
|
| 190 |
+
egg_info = os.path.join(archive_root, 'EGG-INFO')
|
| 191 |
+
self.mkpath(egg_info)
|
| 192 |
+
if self.distribution.scripts:
|
| 193 |
+
script_dir = os.path.join(egg_info, 'scripts')
|
| 194 |
+
log.info("installing scripts to %s", script_dir)
|
| 195 |
+
self.call_command('install_scripts', install_dir=script_dir, no_ep=1)
|
| 196 |
+
|
| 197 |
+
self.copy_metadata_to(egg_info)
|
| 198 |
+
native_libs = os.path.join(egg_info, "native_libs.txt")
|
| 199 |
+
if all_outputs:
|
| 200 |
+
log.info("writing %s", native_libs)
|
| 201 |
+
if not self.dry_run:
|
| 202 |
+
ensure_directory(native_libs)
|
| 203 |
+
libs_file = open(native_libs, 'wt')
|
| 204 |
+
libs_file.write('\n'.join(all_outputs))
|
| 205 |
+
libs_file.write('\n')
|
| 206 |
+
libs_file.close()
|
| 207 |
+
elif os.path.isfile(native_libs):
|
| 208 |
+
log.info("removing %s", native_libs)
|
| 209 |
+
if not self.dry_run:
|
| 210 |
+
os.unlink(native_libs)
|
| 211 |
+
|
| 212 |
+
write_safety_flag(os.path.join(archive_root, 'EGG-INFO'), self.zip_safe())
|
| 213 |
+
|
| 214 |
+
if os.path.exists(os.path.join(self.egg_info, 'depends.txt')):
|
| 215 |
+
log.warn(
|
| 216 |
+
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
|
| 217 |
+
"Use the install_requires/extras_require setup() args instead."
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
if self.exclude_source_files:
|
| 221 |
+
self.zap_pyfiles()
|
| 222 |
+
|
| 223 |
+
# Make the archive
|
| 224 |
+
make_zipfile(
|
| 225 |
+
self.egg_output,
|
| 226 |
+
archive_root,
|
| 227 |
+
verbose=self.verbose,
|
| 228 |
+
dry_run=self.dry_run,
|
| 229 |
+
mode=self.gen_header(),
|
| 230 |
+
)
|
| 231 |
+
if not self.keep_temp:
|
| 232 |
+
remove_tree(self.bdist_dir, dry_run=self.dry_run)
|
| 233 |
+
|
| 234 |
+
# Add to 'Distribution.dist_files' so that the "upload" command works
|
| 235 |
+
getattr(self.distribution, 'dist_files', []).append(
|
| 236 |
+
('bdist_egg', get_python_version(), self.egg_output)
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
def zap_pyfiles(self):
|
| 240 |
+
log.info("Removing .py files from temporary directory")
|
| 241 |
+
for base, dirs, files in walk_egg(self.bdist_dir):
|
| 242 |
+
for name in files:
|
| 243 |
+
path = os.path.join(base, name)
|
| 244 |
+
|
| 245 |
+
if name.endswith('.py'):
|
| 246 |
+
log.debug("Deleting %s", path)
|
| 247 |
+
os.unlink(path)
|
| 248 |
+
|
| 249 |
+
if base.endswith('__pycache__'):
|
| 250 |
+
path_old = path
|
| 251 |
+
|
| 252 |
+
pattern = r'(?P<name>.+)\.(?P<magic>[^.]+)\.pyc'
|
| 253 |
+
m = re.match(pattern, name)
|
| 254 |
+
path_new = os.path.join(base, os.pardir, m.group('name') + '.pyc')
|
| 255 |
+
log.info("Renaming file from [%s] to [%s]" % (path_old, path_new))
|
| 256 |
+
try:
|
| 257 |
+
os.remove(path_new)
|
| 258 |
+
except OSError:
|
| 259 |
+
pass
|
| 260 |
+
os.rename(path_old, path_new)
|
| 261 |
+
|
| 262 |
+
def zip_safe(self):
|
| 263 |
+
safe = getattr(self.distribution, 'zip_safe', None)
|
| 264 |
+
if safe is not None:
|
| 265 |
+
return safe
|
| 266 |
+
log.warn("zip_safe flag not set; analyzing archive contents...")
|
| 267 |
+
return analyze_egg(self.bdist_dir, self.stubs)
|
| 268 |
+
|
| 269 |
+
def gen_header(self):
|
| 270 |
+
return 'w'
|
| 271 |
+
|
| 272 |
+
def copy_metadata_to(self, target_dir):
|
| 273 |
+
"Copy metadata (egg info) to the target_dir"
|
| 274 |
+
# normalize the path (so that a forward-slash in egg_info will
|
| 275 |
+
# match using startswith below)
|
| 276 |
+
norm_egg_info = os.path.normpath(self.egg_info)
|
| 277 |
+
prefix = os.path.join(norm_egg_info, '')
|
| 278 |
+
for path in self.ei_cmd.filelist.files:
|
| 279 |
+
if path.startswith(prefix):
|
| 280 |
+
target = os.path.join(target_dir, path[len(prefix) :])
|
| 281 |
+
ensure_directory(target)
|
| 282 |
+
self.copy_file(path, target)
|
| 283 |
+
|
| 284 |
+
def get_ext_outputs(self):
|
| 285 |
+
"""Get a list of relative paths to C extensions in the output distro"""
|
| 286 |
+
|
| 287 |
+
all_outputs = []
|
| 288 |
+
ext_outputs = []
|
| 289 |
+
|
| 290 |
+
paths = {self.bdist_dir: ''}
|
| 291 |
+
for base, dirs, files in sorted_walk(self.bdist_dir):
|
| 292 |
+
for filename in files:
|
| 293 |
+
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
|
| 294 |
+
all_outputs.append(paths[base] + filename)
|
| 295 |
+
for filename in dirs:
|
| 296 |
+
paths[os.path.join(base, filename)] = paths[base] + filename + '/'
|
| 297 |
+
|
| 298 |
+
if self.distribution.has_ext_modules():
|
| 299 |
+
build_cmd = self.get_finalized_command('build_ext')
|
| 300 |
+
for ext in build_cmd.extensions:
|
| 301 |
+
if isinstance(ext, Library):
|
| 302 |
+
continue
|
| 303 |
+
fullname = build_cmd.get_ext_fullname(ext.name)
|
| 304 |
+
filename = build_cmd.get_ext_filename(fullname)
|
| 305 |
+
if not os.path.basename(filename).startswith('dl-'):
|
| 306 |
+
if os.path.exists(os.path.join(self.bdist_dir, filename)):
|
| 307 |
+
ext_outputs.append(filename)
|
| 308 |
+
|
| 309 |
+
return all_outputs, ext_outputs
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def walk_egg(egg_dir):
|
| 316 |
+
"""Walk an unpacked egg's contents, skipping the metadata directory"""
|
| 317 |
+
walker = sorted_walk(egg_dir)
|
| 318 |
+
base, dirs, files = next(walker)
|
| 319 |
+
if 'EGG-INFO' in dirs:
|
| 320 |
+
dirs.remove('EGG-INFO')
|
| 321 |
+
yield base, dirs, files
|
| 322 |
+
for bdf in walker:
|
| 323 |
+
yield bdf
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
def analyze_egg(egg_dir, stubs):
|
| 327 |
+
# check for existing flag in EGG-INFO
|
| 328 |
+
for flag, fn in safety_flags.items():
|
| 329 |
+
if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)):
|
| 330 |
+
return flag
|
| 331 |
+
if not can_scan():
|
| 332 |
+
return False
|
| 333 |
+
safe = True
|
| 334 |
+
for base, dirs, files in walk_egg(egg_dir):
|
| 335 |
+
for name in files:
|
| 336 |
+
if name.endswith('.py') or name.endswith('.pyw'):
|
| 337 |
+
continue
|
| 338 |
+
elif name.endswith('.pyc') or name.endswith('.pyo'):
|
| 339 |
+
# always scan, even if we already know we're not safe
|
| 340 |
+
safe = scan_module(egg_dir, base, name, stubs) and safe
|
| 341 |
+
return safe
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
def write_safety_flag(egg_dir, safe):
|
| 345 |
+
# Write or remove zip safety flag file(s)
|
| 346 |
+
for flag, fn in safety_flags.items():
|
| 347 |
+
fn = os.path.join(egg_dir, fn)
|
| 348 |
+
if os.path.exists(fn):
|
| 349 |
+
if safe is None or bool(safe) != flag:
|
| 350 |
+
os.unlink(fn)
|
| 351 |
+
elif safe is not None and bool(safe) == flag:
|
| 352 |
+
f = open(fn, 'wt')
|
| 353 |
+
f.write('\n')
|
| 354 |
+
f.close()
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
safety_flags = {
|
| 358 |
+
True: 'zip-safe',
|
| 359 |
+
False: 'not-zip-safe',
|
| 360 |
+
}
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
def scan_module(egg_dir, base, name, stubs):
|
| 364 |
+
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
|
| 365 |
+
|
| 366 |
+
filename = os.path.join(base, name)
|
| 367 |
+
if filename[:-1] in stubs:
|
| 368 |
+
return True # Extension module
|
| 369 |
+
pkg = base[len(egg_dir) + 1 :].replace(os.sep, '.')
|
| 370 |
+
module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0]
|
| 371 |
+
if sys.version_info < (3, 7):
|
| 372 |
+
skip = 12 # skip magic & date & file size
|
| 373 |
+
else:
|
| 374 |
+
skip = 16 # skip magic & reserved? & date & file size
|
| 375 |
+
f = open(filename, 'rb')
|
| 376 |
+
f.read(skip)
|
| 377 |
+
code = marshal.load(f)
|
| 378 |
+
f.close()
|
| 379 |
+
safe = True
|
| 380 |
+
symbols = dict.fromkeys(iter_symbols(code))
|
| 381 |
+
for bad in ['__file__', '__path__']:
|
| 382 |
+
if bad in symbols:
|
| 383 |
+
log.warn("%s: module references %s", module, bad)
|
| 384 |
+
safe = False
|
| 385 |
+
if 'inspect' in symbols:
|
| 386 |
+
for bad in [
|
| 387 |
+
'getsource',
|
| 388 |
+
'getabsfile',
|
| 389 |
+
'getsourcefile',
|
| 390 |
+
'getfile' 'getsourcelines',
|
| 391 |
+
'findsource',
|
| 392 |
+
'getcomments',
|
| 393 |
+
'getframeinfo',
|
| 394 |
+
'getinnerframes',
|
| 395 |
+
'getouterframes',
|
| 396 |
+
'stack',
|
| 397 |
+
'trace',
|
| 398 |
+
]:
|
| 399 |
+
if bad in symbols:
|
| 400 |
+
log.warn("%s: module MAY be using inspect.%s", module, bad)
|
| 401 |
+
safe = False
|
| 402 |
+
return safe
|
| 403 |
+
|
| 404 |
+
|
| 405 |
+
def iter_symbols(code):
|
| 406 |
+
"""Yield names and strings used by `code` and its nested code objects"""
|
| 407 |
+
for name in code.co_names:
|
| 408 |
+
yield name
|
| 409 |
+
for const in code.co_consts:
|
| 410 |
+
if isinstance(const, str):
|
| 411 |
+
yield const
|
| 412 |
+
elif isinstance(const, CodeType):
|
| 413 |
+
for name in iter_symbols(const):
|
| 414 |
+
yield name
|
| 415 |
+
|
| 416 |
+
|
| 417 |
+
def can_scan():
|
| 418 |
+
if not sys.platform.startswith('java') and sys.platform != 'cli':
|
| 419 |
+
# CPython, PyPy, etc.
|
| 420 |
+
return True
|
| 421 |
+
log.warn("Unable to analyze compiled code on this platform.")
|
| 422 |
+
log.warn(
|
| 423 |
+
"Please ask the author to include a 'zip_safe'"
|
| 424 |
+
" setting (either True or False) in the package's setup.py"
|
| 425 |
+
)
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
# Attribute names of options for commands that might need to be convinced to
|
| 429 |
+
# install to the egg build directory
|
| 430 |
+
|
| 431 |
+
INSTALL_DIRECTORY_ATTRS = ['install_lib', 'install_dir', 'install_data', 'install_base']
|
| 432 |
+
|
| 433 |
+
|
| 434 |
+
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, mode='w'):
|
| 435 |
+
"""Create a zip file from all the files under 'base_dir'. The output
|
| 436 |
+
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
|
| 437 |
+
Python module (if available) or the InfoZIP "zip" utility (if installed
|
| 438 |
+
and found on the default search path). If neither tool is available,
|
| 439 |
+
raises DistutilsExecError. Returns the name of the output zip file.
|
| 440 |
+
"""
|
| 441 |
+
import zipfile
|
| 442 |
+
|
| 443 |
+
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
|
| 444 |
+
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
|
| 445 |
+
|
| 446 |
+
def visit(z, dirname, names):
|
| 447 |
+
for name in names:
|
| 448 |
+
path = os.path.normpath(os.path.join(dirname, name))
|
| 449 |
+
if os.path.isfile(path):
|
| 450 |
+
p = path[len(base_dir) + 1 :]
|
| 451 |
+
if not dry_run:
|
| 452 |
+
z.write(path, p)
|
| 453 |
+
log.debug("adding '%s'", p)
|
| 454 |
+
|
| 455 |
+
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
|
| 456 |
+
if not dry_run:
|
| 457 |
+
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
|
| 458 |
+
for dirname, dirs, files in sorted_walk(base_dir):
|
| 459 |
+
visit(z, dirname, files)
|
| 460 |
+
z.close()
|
| 461 |
+
else:
|
| 462 |
+
for dirname, dirs, files in sorted_walk(base_dir):
|
| 463 |
+
visit(None, dirname, files)
|
| 464 |
+
return zip_filename
|