Datasets:
Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .venv/Lib/site-packages/__pycache__/_virtualenv.cpython-310.pyc +0 -0
- .venv/Lib/site-packages/_distutils_hack/__init__.py +227 -0
- .venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc +0 -0
- .venv/Lib/site-packages/_distutils_hack/override.py +1 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py +172 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/check.py +151 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/clean.py +75 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/config.py +376 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/install.py +813 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/install_data.py +83 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py +92 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py +44 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py +237 -0
- .venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py +60 -0
- .venv/Lib/site-packages/wheel-0.41.2.dist-info/INSTALLER +1 -0
- .venv/Lib/site-packages/wheel-0.41.2.dist-info/LICENSE.txt +21 -0
- .venv/Lib/site-packages/wheel-0.41.2.dist-info/METADATA +61 -0
- .venv/Lib/site-packages/wheel-0.41.2.dist-info/RECORD +71 -0
- .venv/Lib/site-packages/wheel-0.41.2.dist-info/WHEEL +4 -0
- .venv/Lib/site-packages/wheel-0.41.2.dist-info/entry_points.txt +6 -0
- .venv/Lib/site-packages/wheel/__init__.py +3 -0
- .venv/Lib/site-packages/wheel/__main__.py +23 -0
- .venv/Lib/site-packages/wheel/_setuptools_logging.py +26 -0
- .venv/Lib/site-packages/wheel/bdist_wheel.py +593 -0
- .venv/Lib/site-packages/wheel/cli/__init__.py +155 -0
- .venv/Lib/site-packages/wheel/cli/convert.py +273 -0
- .venv/Lib/site-packages/wheel/cli/pack.py +124 -0
- .venv/Lib/site-packages/wheel/cli/unpack.py +30 -0
- .venv/Lib/site-packages/wheel/macosx_libfile.py +471 -0
- .venv/Lib/site-packages/wheel/metadata.py +179 -0
- .venv/Lib/site-packages/wheel/util.py +26 -0
- .venv/Lib/site-packages/wheel/vendored/__init__.py +0 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/__init__.py +0 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/_elffile.py +108 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/_manylinux.py +238 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/_musllinux.py +80 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/_parser.py +328 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/_structures.py +61 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/_tokenizer.py +188 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/markers.py +245 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/requirements.py +95 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/specifiers.py +1006 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/tags.py +546 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/utils.py +141 -0
- .venv/Lib/site-packages/wheel/vendored/packaging/version.py +563 -0
- .venv/Lib/site-packages/wheel/vendored/vendor.txt +1 -0
- .venv/Lib/site-packages/wheel/wheelfile.py +196 -0
- .venv/Scripts/activate +87 -0
- .venv/Scripts/activate.bat +38 -0
- .venv/Scripts/activate.fish +103 -0
.venv/Lib/site-packages/__pycache__/_virtualenv.cpython-310.pyc
ADDED
|
Binary file (2.79 kB). View file
|
|
|
.venv/Lib/site-packages/_distutils_hack/__init__.py
ADDED
|
@@ -0,0 +1,227 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# don't import any costly modules
|
| 2 |
+
import sys
|
| 3 |
+
import os
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
is_pypy = '__pypy__' in sys.builtin_module_names
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def warn_distutils_present():
|
| 10 |
+
if 'distutils' not in sys.modules:
|
| 11 |
+
return
|
| 12 |
+
if is_pypy and sys.version_info < (3, 7):
|
| 13 |
+
# PyPy for 3.6 unconditionally imports distutils, so bypass the warning
|
| 14 |
+
# https://foss.heptapod.net/pypy/pypy/-/blob/be829135bc0d758997b3566062999ee8b23872b4/lib-python/3/site.py#L250
|
| 15 |
+
return
|
| 16 |
+
import warnings
|
| 17 |
+
|
| 18 |
+
warnings.warn(
|
| 19 |
+
"Distutils was imported before Setuptools, but importing Setuptools "
|
| 20 |
+
"also replaces the `distutils` module in `sys.modules`. This may lead "
|
| 21 |
+
"to undesirable behaviors or errors. To avoid these issues, avoid "
|
| 22 |
+
"using distutils directly, ensure that setuptools is installed in the "
|
| 23 |
+
"traditional way (e.g. not an editable install), and/or make sure "
|
| 24 |
+
"that setuptools is always imported before distutils."
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def clear_distutils():
|
| 29 |
+
if 'distutils' not in sys.modules:
|
| 30 |
+
return
|
| 31 |
+
import warnings
|
| 32 |
+
|
| 33 |
+
warnings.warn("Setuptools is replacing distutils.")
|
| 34 |
+
mods = [
|
| 35 |
+
name
|
| 36 |
+
for name in sys.modules
|
| 37 |
+
if name == "distutils" or name.startswith("distutils.")
|
| 38 |
+
]
|
| 39 |
+
for name in mods:
|
| 40 |
+
del sys.modules[name]
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def enabled():
|
| 44 |
+
"""
|
| 45 |
+
Allow selection of distutils by environment variable.
|
| 46 |
+
"""
|
| 47 |
+
which = os.environ.get('SETUPTOOLS_USE_DISTUTILS', 'local')
|
| 48 |
+
return which == 'local'
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def ensure_local_distutils():
|
| 52 |
+
import importlib
|
| 53 |
+
|
| 54 |
+
clear_distutils()
|
| 55 |
+
|
| 56 |
+
# With the DistutilsMetaFinder in place,
|
| 57 |
+
# perform an import to cause distutils to be
|
| 58 |
+
# loaded from setuptools._distutils. Ref #2906.
|
| 59 |
+
with shim():
|
| 60 |
+
importlib.import_module('distutils')
|
| 61 |
+
|
| 62 |
+
# check that submodules load as expected
|
| 63 |
+
core = importlib.import_module('distutils.core')
|
| 64 |
+
assert '_distutils' in core.__file__, core.__file__
|
| 65 |
+
assert 'setuptools._distutils.log' not in sys.modules
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def do_override():
|
| 69 |
+
"""
|
| 70 |
+
Ensure that the local copy of distutils is preferred over stdlib.
|
| 71 |
+
|
| 72 |
+
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
|
| 73 |
+
for more motivation.
|
| 74 |
+
"""
|
| 75 |
+
if enabled():
|
| 76 |
+
warn_distutils_present()
|
| 77 |
+
ensure_local_distutils()
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class _TrivialRe:
|
| 81 |
+
def __init__(self, *patterns):
|
| 82 |
+
self._patterns = patterns
|
| 83 |
+
|
| 84 |
+
def match(self, string):
|
| 85 |
+
return all(pat in string for pat in self._patterns)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class DistutilsMetaFinder:
|
| 89 |
+
def find_spec(self, fullname, path, target=None):
|
| 90 |
+
# optimization: only consider top level modules and those
|
| 91 |
+
# found in the CPython test suite.
|
| 92 |
+
if path is not None and not fullname.startswith('test.'):
|
| 93 |
+
return
|
| 94 |
+
|
| 95 |
+
method_name = 'spec_for_{fullname}'.format(**locals())
|
| 96 |
+
method = getattr(self, method_name, lambda: None)
|
| 97 |
+
return method()
|
| 98 |
+
|
| 99 |
+
def spec_for_distutils(self):
|
| 100 |
+
if self.is_cpython():
|
| 101 |
+
return
|
| 102 |
+
|
| 103 |
+
import importlib
|
| 104 |
+
import importlib.abc
|
| 105 |
+
import importlib.util
|
| 106 |
+
|
| 107 |
+
try:
|
| 108 |
+
mod = importlib.import_module('setuptools._distutils')
|
| 109 |
+
except Exception:
|
| 110 |
+
# There are a couple of cases where setuptools._distutils
|
| 111 |
+
# may not be present:
|
| 112 |
+
# - An older Setuptools without a local distutils is
|
| 113 |
+
# taking precedence. Ref #2957.
|
| 114 |
+
# - Path manipulation during sitecustomize removes
|
| 115 |
+
# setuptools from the path but only after the hook
|
| 116 |
+
# has been loaded. Ref #2980.
|
| 117 |
+
# In either case, fall back to stdlib behavior.
|
| 118 |
+
return
|
| 119 |
+
|
| 120 |
+
class DistutilsLoader(importlib.abc.Loader):
|
| 121 |
+
def create_module(self, spec):
|
| 122 |
+
mod.__name__ = 'distutils'
|
| 123 |
+
return mod
|
| 124 |
+
|
| 125 |
+
def exec_module(self, module):
|
| 126 |
+
pass
|
| 127 |
+
|
| 128 |
+
return importlib.util.spec_from_loader(
|
| 129 |
+
'distutils', DistutilsLoader(), origin=mod.__file__
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
@staticmethod
|
| 133 |
+
def is_cpython():
|
| 134 |
+
"""
|
| 135 |
+
Suppress supplying distutils for CPython (build and tests).
|
| 136 |
+
Ref #2965 and #3007.
|
| 137 |
+
"""
|
| 138 |
+
return os.path.isfile('pybuilddir.txt')
|
| 139 |
+
|
| 140 |
+
def spec_for_pip(self):
|
| 141 |
+
"""
|
| 142 |
+
Ensure stdlib distutils when running under pip.
|
| 143 |
+
See pypa/pip#8761 for rationale.
|
| 144 |
+
"""
|
| 145 |
+
if sys.version_info >= (3, 12) or self.pip_imported_during_build():
|
| 146 |
+
return
|
| 147 |
+
clear_distutils()
|
| 148 |
+
self.spec_for_distutils = lambda: None
|
| 149 |
+
|
| 150 |
+
@classmethod
|
| 151 |
+
def pip_imported_during_build(cls):
|
| 152 |
+
"""
|
| 153 |
+
Detect if pip is being imported in a build script. Ref #2355.
|
| 154 |
+
"""
|
| 155 |
+
import traceback
|
| 156 |
+
|
| 157 |
+
return any(
|
| 158 |
+
cls.frame_file_is_setup(frame) for frame, line in traceback.walk_stack(None)
|
| 159 |
+
)
|
| 160 |
+
|
| 161 |
+
@staticmethod
|
| 162 |
+
def frame_file_is_setup(frame):
|
| 163 |
+
"""
|
| 164 |
+
Return True if the indicated frame suggests a setup.py file.
|
| 165 |
+
"""
|
| 166 |
+
# some frames may not have __file__ (#2940)
|
| 167 |
+
return frame.f_globals.get('__file__', '').endswith('setup.py')
|
| 168 |
+
|
| 169 |
+
def spec_for_sensitive_tests(self):
|
| 170 |
+
"""
|
| 171 |
+
Ensure stdlib distutils when running select tests under CPython.
|
| 172 |
+
|
| 173 |
+
python/cpython#91169
|
| 174 |
+
"""
|
| 175 |
+
clear_distutils()
|
| 176 |
+
self.spec_for_distutils = lambda: None
|
| 177 |
+
|
| 178 |
+
sensitive_tests = (
|
| 179 |
+
[
|
| 180 |
+
'test.test_distutils',
|
| 181 |
+
'test.test_peg_generator',
|
| 182 |
+
'test.test_importlib',
|
| 183 |
+
]
|
| 184 |
+
if sys.version_info < (3, 10)
|
| 185 |
+
else [
|
| 186 |
+
'test.test_distutils',
|
| 187 |
+
]
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
for name in DistutilsMetaFinder.sensitive_tests:
|
| 192 |
+
setattr(
|
| 193 |
+
DistutilsMetaFinder,
|
| 194 |
+
f'spec_for_{name}',
|
| 195 |
+
DistutilsMetaFinder.spec_for_sensitive_tests,
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
DISTUTILS_FINDER = DistutilsMetaFinder()
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
def add_shim():
|
| 203 |
+
DISTUTILS_FINDER in sys.meta_path or insert_shim()
|
| 204 |
+
|
| 205 |
+
|
| 206 |
+
class shim:
|
| 207 |
+
def __enter__(self):
|
| 208 |
+
insert_shim()
|
| 209 |
+
|
| 210 |
+
def __exit__(self, exc, value, tb):
|
| 211 |
+
_remove_shim()
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def insert_shim():
|
| 215 |
+
sys.meta_path.insert(0, DISTUTILS_FINDER)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
def _remove_shim():
|
| 219 |
+
try:
|
| 220 |
+
sys.meta_path.remove(DISTUTILS_FINDER)
|
| 221 |
+
except ValueError:
|
| 222 |
+
pass
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
if sys.version_info < (3, 12):
|
| 226 |
+
# DistutilsMetaFinder can only be disabled in Python < 3.12 (PEP 632)
|
| 227 |
+
remove_shim = _remove_shim
|
.venv/Lib/site-packages/_distutils_hack/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (7.65 kB). View file
|
|
|
.venv/Lib/site-packages/_distutils_hack/override.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
__import__('_distutils_hack').do_override()
|
.venv/Lib/site-packages/setuptools/_distutils/command/build_scripts.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.build_scripts
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'build_scripts' command."""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
from stat import ST_MODE
|
| 8 |
+
from distutils import sysconfig
|
| 9 |
+
from ..core import Command
|
| 10 |
+
from ..dep_util import newer
|
| 11 |
+
from ..util import convert_path
|
| 12 |
+
from distutils._log import log
|
| 13 |
+
import tokenize
|
| 14 |
+
|
| 15 |
+
shebang_pattern = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
|
| 16 |
+
"""
|
| 17 |
+
Pattern matching a Python interpreter indicated in first line of a script.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
# for Setuptools compatibility
|
| 21 |
+
first_line_re = shebang_pattern
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class build_scripts(Command):
|
| 25 |
+
description = "\"build\" scripts (copy and fixup #! line)"
|
| 26 |
+
|
| 27 |
+
user_options = [
|
| 28 |
+
('build-dir=', 'd', "directory to \"build\" (copy) to"),
|
| 29 |
+
('force', 'f', "forcibly build everything (ignore file timestamps"),
|
| 30 |
+
('executable=', 'e', "specify final destination interpreter path"),
|
| 31 |
+
]
|
| 32 |
+
|
| 33 |
+
boolean_options = ['force']
|
| 34 |
+
|
| 35 |
+
def initialize_options(self):
|
| 36 |
+
self.build_dir = None
|
| 37 |
+
self.scripts = None
|
| 38 |
+
self.force = None
|
| 39 |
+
self.executable = None
|
| 40 |
+
|
| 41 |
+
def finalize_options(self):
|
| 42 |
+
self.set_undefined_options(
|
| 43 |
+
'build',
|
| 44 |
+
('build_scripts', 'build_dir'),
|
| 45 |
+
('force', 'force'),
|
| 46 |
+
('executable', 'executable'),
|
| 47 |
+
)
|
| 48 |
+
self.scripts = self.distribution.scripts
|
| 49 |
+
|
| 50 |
+
def get_source_files(self):
|
| 51 |
+
return self.scripts
|
| 52 |
+
|
| 53 |
+
def run(self):
|
| 54 |
+
if not self.scripts:
|
| 55 |
+
return
|
| 56 |
+
self.copy_scripts()
|
| 57 |
+
|
| 58 |
+
def copy_scripts(self):
|
| 59 |
+
"""
|
| 60 |
+
Copy each script listed in ``self.scripts``.
|
| 61 |
+
|
| 62 |
+
If a script is marked as a Python script (first line matches
|
| 63 |
+
'shebang_pattern', i.e. starts with ``#!`` and contains
|
| 64 |
+
"python"), then adjust in the copy the first line to refer to
|
| 65 |
+
the current Python interpreter.
|
| 66 |
+
"""
|
| 67 |
+
self.mkpath(self.build_dir)
|
| 68 |
+
outfiles = []
|
| 69 |
+
updated_files = []
|
| 70 |
+
for script in self.scripts:
|
| 71 |
+
self._copy_script(script, outfiles, updated_files)
|
| 72 |
+
|
| 73 |
+
self._change_modes(outfiles)
|
| 74 |
+
|
| 75 |
+
return outfiles, updated_files
|
| 76 |
+
|
| 77 |
+
def _copy_script(self, script, outfiles, updated_files): # noqa: C901
|
| 78 |
+
shebang_match = None
|
| 79 |
+
script = convert_path(script)
|
| 80 |
+
outfile = os.path.join(self.build_dir, os.path.basename(script))
|
| 81 |
+
outfiles.append(outfile)
|
| 82 |
+
|
| 83 |
+
if not self.force and not newer(script, outfile):
|
| 84 |
+
log.debug("not copying %s (up-to-date)", script)
|
| 85 |
+
return
|
| 86 |
+
|
| 87 |
+
# Always open the file, but ignore failures in dry-run mode
|
| 88 |
+
# in order to attempt to copy directly.
|
| 89 |
+
try:
|
| 90 |
+
f = tokenize.open(script)
|
| 91 |
+
except OSError:
|
| 92 |
+
if not self.dry_run:
|
| 93 |
+
raise
|
| 94 |
+
f = None
|
| 95 |
+
else:
|
| 96 |
+
first_line = f.readline()
|
| 97 |
+
if not first_line:
|
| 98 |
+
self.warn("%s is an empty file (skipping)" % script)
|
| 99 |
+
return
|
| 100 |
+
|
| 101 |
+
shebang_match = shebang_pattern.match(first_line)
|
| 102 |
+
|
| 103 |
+
updated_files.append(outfile)
|
| 104 |
+
if shebang_match:
|
| 105 |
+
log.info("copying and adjusting %s -> %s", script, self.build_dir)
|
| 106 |
+
if not self.dry_run:
|
| 107 |
+
if not sysconfig.python_build:
|
| 108 |
+
executable = self.executable
|
| 109 |
+
else:
|
| 110 |
+
executable = os.path.join(
|
| 111 |
+
sysconfig.get_config_var("BINDIR"),
|
| 112 |
+
"python%s%s"
|
| 113 |
+
% (
|
| 114 |
+
sysconfig.get_config_var("VERSION"),
|
| 115 |
+
sysconfig.get_config_var("EXE"),
|
| 116 |
+
),
|
| 117 |
+
)
|
| 118 |
+
post_interp = shebang_match.group(1) or ''
|
| 119 |
+
shebang = "#!" + executable + post_interp + "\n"
|
| 120 |
+
self._validate_shebang(shebang, f.encoding)
|
| 121 |
+
with open(outfile, "w", encoding=f.encoding) as outf:
|
| 122 |
+
outf.write(shebang)
|
| 123 |
+
outf.writelines(f.readlines())
|
| 124 |
+
if f:
|
| 125 |
+
f.close()
|
| 126 |
+
else:
|
| 127 |
+
if f:
|
| 128 |
+
f.close()
|
| 129 |
+
self.copy_file(script, outfile)
|
| 130 |
+
|
| 131 |
+
def _change_modes(self, outfiles):
|
| 132 |
+
if os.name != 'posix':
|
| 133 |
+
return
|
| 134 |
+
|
| 135 |
+
for file in outfiles:
|
| 136 |
+
self._change_mode(file)
|
| 137 |
+
|
| 138 |
+
def _change_mode(self, file):
|
| 139 |
+
if self.dry_run:
|
| 140 |
+
log.info("changing mode of %s", file)
|
| 141 |
+
return
|
| 142 |
+
|
| 143 |
+
oldmode = os.stat(file)[ST_MODE] & 0o7777
|
| 144 |
+
newmode = (oldmode | 0o555) & 0o7777
|
| 145 |
+
if newmode != oldmode:
|
| 146 |
+
log.info("changing mode of %s from %o to %o", file, oldmode, newmode)
|
| 147 |
+
os.chmod(file, newmode)
|
| 148 |
+
|
| 149 |
+
@staticmethod
|
| 150 |
+
def _validate_shebang(shebang, encoding):
|
| 151 |
+
# Python parser starts to read a script using UTF-8 until
|
| 152 |
+
# it gets a #coding:xxx cookie. The shebang has to be the
|
| 153 |
+
# first line of a file, the #coding:xxx cookie cannot be
|
| 154 |
+
# written before. So the shebang has to be encodable to
|
| 155 |
+
# UTF-8.
|
| 156 |
+
try:
|
| 157 |
+
shebang.encode('utf-8')
|
| 158 |
+
except UnicodeEncodeError:
|
| 159 |
+
raise ValueError(
|
| 160 |
+
"The shebang ({!r}) is not encodable " "to utf-8".format(shebang)
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
# If the script is encoded to a custom encoding (use a
|
| 164 |
+
# #coding:xxx cookie), the shebang has to be encodable to
|
| 165 |
+
# the script encoding too.
|
| 166 |
+
try:
|
| 167 |
+
shebang.encode(encoding)
|
| 168 |
+
except UnicodeEncodeError:
|
| 169 |
+
raise ValueError(
|
| 170 |
+
"The shebang ({!r}) is not encodable "
|
| 171 |
+
"to the script encoding ({})".format(shebang, encoding)
|
| 172 |
+
)
|
.venv/Lib/site-packages/setuptools/_distutils/command/check.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.check
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'check' command.
|
| 4 |
+
"""
|
| 5 |
+
import contextlib
|
| 6 |
+
|
| 7 |
+
from ..core import Command
|
| 8 |
+
from ..errors import DistutilsSetupError
|
| 9 |
+
|
| 10 |
+
with contextlib.suppress(ImportError):
|
| 11 |
+
import docutils.utils
|
| 12 |
+
import docutils.parsers.rst
|
| 13 |
+
import docutils.frontend
|
| 14 |
+
import docutils.nodes
|
| 15 |
+
|
| 16 |
+
class SilentReporter(docutils.utils.Reporter):
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
source,
|
| 20 |
+
report_level,
|
| 21 |
+
halt_level,
|
| 22 |
+
stream=None,
|
| 23 |
+
debug=0,
|
| 24 |
+
encoding='ascii',
|
| 25 |
+
error_handler='replace',
|
| 26 |
+
):
|
| 27 |
+
self.messages = []
|
| 28 |
+
super().__init__(
|
| 29 |
+
source, report_level, halt_level, stream, debug, encoding, error_handler
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
def system_message(self, level, message, *children, **kwargs):
|
| 33 |
+
self.messages.append((level, message, children, kwargs))
|
| 34 |
+
return docutils.nodes.system_message(
|
| 35 |
+
message, level=level, type=self.levels[level], *children, **kwargs
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class check(Command):
|
| 40 |
+
"""This command checks the meta-data of the package."""
|
| 41 |
+
|
| 42 |
+
description = "perform some checks on the package"
|
| 43 |
+
user_options = [
|
| 44 |
+
('metadata', 'm', 'Verify meta-data'),
|
| 45 |
+
(
|
| 46 |
+
'restructuredtext',
|
| 47 |
+
'r',
|
| 48 |
+
(
|
| 49 |
+
'Checks if long string meta-data syntax '
|
| 50 |
+
'are reStructuredText-compliant'
|
| 51 |
+
),
|
| 52 |
+
),
|
| 53 |
+
('strict', 's', 'Will exit with an error if a check fails'),
|
| 54 |
+
]
|
| 55 |
+
|
| 56 |
+
boolean_options = ['metadata', 'restructuredtext', 'strict']
|
| 57 |
+
|
| 58 |
+
def initialize_options(self):
|
| 59 |
+
"""Sets default values for options."""
|
| 60 |
+
self.restructuredtext = 0
|
| 61 |
+
self.metadata = 1
|
| 62 |
+
self.strict = 0
|
| 63 |
+
self._warnings = 0
|
| 64 |
+
|
| 65 |
+
def finalize_options(self):
|
| 66 |
+
pass
|
| 67 |
+
|
| 68 |
+
def warn(self, msg):
|
| 69 |
+
"""Counts the number of warnings that occurs."""
|
| 70 |
+
self._warnings += 1
|
| 71 |
+
return Command.warn(self, msg)
|
| 72 |
+
|
| 73 |
+
def run(self):
|
| 74 |
+
"""Runs the command."""
|
| 75 |
+
# perform the various tests
|
| 76 |
+
if self.metadata:
|
| 77 |
+
self.check_metadata()
|
| 78 |
+
if self.restructuredtext:
|
| 79 |
+
if 'docutils' in globals():
|
| 80 |
+
try:
|
| 81 |
+
self.check_restructuredtext()
|
| 82 |
+
except TypeError as exc:
|
| 83 |
+
raise DistutilsSetupError(str(exc))
|
| 84 |
+
elif self.strict:
|
| 85 |
+
raise DistutilsSetupError('The docutils package is needed.')
|
| 86 |
+
|
| 87 |
+
# let's raise an error in strict mode, if we have at least
|
| 88 |
+
# one warning
|
| 89 |
+
if self.strict and self._warnings > 0:
|
| 90 |
+
raise DistutilsSetupError('Please correct your package.')
|
| 91 |
+
|
| 92 |
+
def check_metadata(self):
|
| 93 |
+
"""Ensures that all required elements of meta-data are supplied.
|
| 94 |
+
|
| 95 |
+
Required fields:
|
| 96 |
+
name, version
|
| 97 |
+
|
| 98 |
+
Warns if any are missing.
|
| 99 |
+
"""
|
| 100 |
+
metadata = self.distribution.metadata
|
| 101 |
+
|
| 102 |
+
missing = []
|
| 103 |
+
for attr in 'name', 'version':
|
| 104 |
+
if not getattr(metadata, attr, None):
|
| 105 |
+
missing.append(attr)
|
| 106 |
+
|
| 107 |
+
if missing:
|
| 108 |
+
self.warn("missing required meta-data: %s" % ', '.join(missing))
|
| 109 |
+
|
| 110 |
+
def check_restructuredtext(self):
|
| 111 |
+
"""Checks if the long string fields are reST-compliant."""
|
| 112 |
+
data = self.distribution.get_long_description()
|
| 113 |
+
for warning in self._check_rst_data(data):
|
| 114 |
+
line = warning[-1].get('line')
|
| 115 |
+
if line is None:
|
| 116 |
+
warning = warning[1]
|
| 117 |
+
else:
|
| 118 |
+
warning = '{} (line {})'.format(warning[1], line)
|
| 119 |
+
self.warn(warning)
|
| 120 |
+
|
| 121 |
+
def _check_rst_data(self, data):
|
| 122 |
+
"""Returns warnings when the provided data doesn't compile."""
|
| 123 |
+
# the include and csv_table directives need this to be a path
|
| 124 |
+
source_path = self.distribution.script_name or 'setup.py'
|
| 125 |
+
parser = docutils.parsers.rst.Parser()
|
| 126 |
+
settings = docutils.frontend.OptionParser(
|
| 127 |
+
components=(docutils.parsers.rst.Parser,)
|
| 128 |
+
).get_default_values()
|
| 129 |
+
settings.tab_width = 4
|
| 130 |
+
settings.pep_references = None
|
| 131 |
+
settings.rfc_references = None
|
| 132 |
+
reporter = SilentReporter(
|
| 133 |
+
source_path,
|
| 134 |
+
settings.report_level,
|
| 135 |
+
settings.halt_level,
|
| 136 |
+
stream=settings.warning_stream,
|
| 137 |
+
debug=settings.debug,
|
| 138 |
+
encoding=settings.error_encoding,
|
| 139 |
+
error_handler=settings.error_encoding_error_handler,
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
document = docutils.nodes.document(settings, reporter, source=source_path)
|
| 143 |
+
document.note_source(source_path, -1)
|
| 144 |
+
try:
|
| 145 |
+
parser.parse(data, document)
|
| 146 |
+
except AttributeError as e:
|
| 147 |
+
reporter.messages.append(
|
| 148 |
+
(-1, 'Could not finish the parsing: %s.' % e, '', {})
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
return reporter.messages
|
.venv/Lib/site-packages/setuptools/_distutils/command/clean.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.clean
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'clean' command."""
|
| 4 |
+
|
| 5 |
+
# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
|
| 6 |
+
|
| 7 |
+
import os
|
| 8 |
+
from ..core import Command
|
| 9 |
+
from ..dir_util import remove_tree
|
| 10 |
+
from distutils._log import log
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class clean(Command):
|
| 14 |
+
description = "clean up temporary files from 'build' command"
|
| 15 |
+
user_options = [
|
| 16 |
+
('build-base=', 'b', "base build directory (default: 'build.build-base')"),
|
| 17 |
+
(
|
| 18 |
+
'build-lib=',
|
| 19 |
+
None,
|
| 20 |
+
"build directory for all modules (default: 'build.build-lib')",
|
| 21 |
+
),
|
| 22 |
+
('build-temp=', 't', "temporary build directory (default: 'build.build-temp')"),
|
| 23 |
+
(
|
| 24 |
+
'build-scripts=',
|
| 25 |
+
None,
|
| 26 |
+
"build directory for scripts (default: 'build.build-scripts')",
|
| 27 |
+
),
|
| 28 |
+
('bdist-base=', None, "temporary directory for built distributions"),
|
| 29 |
+
('all', 'a', "remove all build output, not just temporary by-products"),
|
| 30 |
+
]
|
| 31 |
+
|
| 32 |
+
boolean_options = ['all']
|
| 33 |
+
|
| 34 |
+
def initialize_options(self):
|
| 35 |
+
self.build_base = None
|
| 36 |
+
self.build_lib = None
|
| 37 |
+
self.build_temp = None
|
| 38 |
+
self.build_scripts = None
|
| 39 |
+
self.bdist_base = None
|
| 40 |
+
self.all = None
|
| 41 |
+
|
| 42 |
+
def finalize_options(self):
|
| 43 |
+
self.set_undefined_options(
|
| 44 |
+
'build',
|
| 45 |
+
('build_base', 'build_base'),
|
| 46 |
+
('build_lib', 'build_lib'),
|
| 47 |
+
('build_scripts', 'build_scripts'),
|
| 48 |
+
('build_temp', 'build_temp'),
|
| 49 |
+
)
|
| 50 |
+
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
|
| 51 |
+
|
| 52 |
+
def run(self):
|
| 53 |
+
# remove the build/temp.<plat> directory (unless it's already
|
| 54 |
+
# gone)
|
| 55 |
+
if os.path.exists(self.build_temp):
|
| 56 |
+
remove_tree(self.build_temp, dry_run=self.dry_run)
|
| 57 |
+
else:
|
| 58 |
+
log.debug("'%s' does not exist -- can't clean it", self.build_temp)
|
| 59 |
+
|
| 60 |
+
if self.all:
|
| 61 |
+
# remove build directories
|
| 62 |
+
for directory in (self.build_lib, self.bdist_base, self.build_scripts):
|
| 63 |
+
if os.path.exists(directory):
|
| 64 |
+
remove_tree(directory, dry_run=self.dry_run)
|
| 65 |
+
else:
|
| 66 |
+
log.warning("'%s' does not exist -- can't clean it", directory)
|
| 67 |
+
|
| 68 |
+
# just for the heck of it, try to remove the base build directory:
|
| 69 |
+
# we might have emptied it right now, but if not we don't care
|
| 70 |
+
if not self.dry_run:
|
| 71 |
+
try:
|
| 72 |
+
os.rmdir(self.build_base)
|
| 73 |
+
log.info("removing '%s'", self.build_base)
|
| 74 |
+
except OSError:
|
| 75 |
+
pass
|
.venv/Lib/site-packages/setuptools/_distutils/command/config.py
ADDED
|
@@ -0,0 +1,376 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.config
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'config' command, a (mostly) empty command class
|
| 4 |
+
that exists mainly to be sub-classed by specific module distributions and
|
| 5 |
+
applications. The idea is that while every "config" command is different,
|
| 6 |
+
at least they're all named the same, and users always see "config" in the
|
| 7 |
+
list of standard commands. Also, this is a good place to put common
|
| 8 |
+
configure-like tasks: "try to compile this C code", or "figure out where
|
| 9 |
+
this header file lives".
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import os
|
| 13 |
+
import re
|
| 14 |
+
|
| 15 |
+
from ..core import Command
|
| 16 |
+
from ..errors import DistutilsExecError
|
| 17 |
+
from ..sysconfig import customize_compiler
|
| 18 |
+
from distutils._log import log
|
| 19 |
+
|
| 20 |
+
LANG_EXT = {"c": ".c", "c++": ".cxx"}
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class config(Command):
|
| 24 |
+
description = "prepare to build"
|
| 25 |
+
|
| 26 |
+
user_options = [
|
| 27 |
+
('compiler=', None, "specify the compiler type"),
|
| 28 |
+
('cc=', None, "specify the compiler executable"),
|
| 29 |
+
('include-dirs=', 'I', "list of directories to search for header files"),
|
| 30 |
+
('define=', 'D', "C preprocessor macros to define"),
|
| 31 |
+
('undef=', 'U', "C preprocessor macros to undefine"),
|
| 32 |
+
('libraries=', 'l', "external C libraries to link with"),
|
| 33 |
+
('library-dirs=', 'L', "directories to search for external C libraries"),
|
| 34 |
+
('noisy', None, "show every action (compile, link, run, ...) taken"),
|
| 35 |
+
(
|
| 36 |
+
'dump-source',
|
| 37 |
+
None,
|
| 38 |
+
"dump generated source files before attempting to compile them",
|
| 39 |
+
),
|
| 40 |
+
]
|
| 41 |
+
|
| 42 |
+
# The three standard command methods: since the "config" command
|
| 43 |
+
# does nothing by default, these are empty.
|
| 44 |
+
|
| 45 |
+
def initialize_options(self):
|
| 46 |
+
self.compiler = None
|
| 47 |
+
self.cc = None
|
| 48 |
+
self.include_dirs = None
|
| 49 |
+
self.libraries = None
|
| 50 |
+
self.library_dirs = None
|
| 51 |
+
|
| 52 |
+
# maximal output for now
|
| 53 |
+
self.noisy = 1
|
| 54 |
+
self.dump_source = 1
|
| 55 |
+
|
| 56 |
+
# list of temporary files generated along-the-way that we have
|
| 57 |
+
# to clean at some point
|
| 58 |
+
self.temp_files = []
|
| 59 |
+
|
| 60 |
+
def finalize_options(self):
|
| 61 |
+
if self.include_dirs is None:
|
| 62 |
+
self.include_dirs = self.distribution.include_dirs or []
|
| 63 |
+
elif isinstance(self.include_dirs, str):
|
| 64 |
+
self.include_dirs = self.include_dirs.split(os.pathsep)
|
| 65 |
+
|
| 66 |
+
if self.libraries is None:
|
| 67 |
+
self.libraries = []
|
| 68 |
+
elif isinstance(self.libraries, str):
|
| 69 |
+
self.libraries = [self.libraries]
|
| 70 |
+
|
| 71 |
+
if self.library_dirs is None:
|
| 72 |
+
self.library_dirs = []
|
| 73 |
+
elif isinstance(self.library_dirs, str):
|
| 74 |
+
self.library_dirs = self.library_dirs.split(os.pathsep)
|
| 75 |
+
|
| 76 |
+
def run(self):
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
# Utility methods for actual "config" commands. The interfaces are
|
| 80 |
+
# loosely based on Autoconf macros of similar names. Sub-classes
|
| 81 |
+
# may use these freely.
|
| 82 |
+
|
| 83 |
+
def _check_compiler(self):
|
| 84 |
+
"""Check that 'self.compiler' really is a CCompiler object;
|
| 85 |
+
if not, make it one.
|
| 86 |
+
"""
|
| 87 |
+
# We do this late, and only on-demand, because this is an expensive
|
| 88 |
+
# import.
|
| 89 |
+
from ..ccompiler import CCompiler, new_compiler
|
| 90 |
+
|
| 91 |
+
if not isinstance(self.compiler, CCompiler):
|
| 92 |
+
self.compiler = new_compiler(
|
| 93 |
+
compiler=self.compiler, dry_run=self.dry_run, force=1
|
| 94 |
+
)
|
| 95 |
+
customize_compiler(self.compiler)
|
| 96 |
+
if self.include_dirs:
|
| 97 |
+
self.compiler.set_include_dirs(self.include_dirs)
|
| 98 |
+
if self.libraries:
|
| 99 |
+
self.compiler.set_libraries(self.libraries)
|
| 100 |
+
if self.library_dirs:
|
| 101 |
+
self.compiler.set_library_dirs(self.library_dirs)
|
| 102 |
+
|
| 103 |
+
def _gen_temp_sourcefile(self, body, headers, lang):
|
| 104 |
+
filename = "_configtest" + LANG_EXT[lang]
|
| 105 |
+
with open(filename, "w") as file:
|
| 106 |
+
if headers:
|
| 107 |
+
for header in headers:
|
| 108 |
+
file.write("#include <%s>\n" % header)
|
| 109 |
+
file.write("\n")
|
| 110 |
+
file.write(body)
|
| 111 |
+
if body[-1] != "\n":
|
| 112 |
+
file.write("\n")
|
| 113 |
+
return filename
|
| 114 |
+
|
| 115 |
+
def _preprocess(self, body, headers, include_dirs, lang):
|
| 116 |
+
src = self._gen_temp_sourcefile(body, headers, lang)
|
| 117 |
+
out = "_configtest.i"
|
| 118 |
+
self.temp_files.extend([src, out])
|
| 119 |
+
self.compiler.preprocess(src, out, include_dirs=include_dirs)
|
| 120 |
+
return (src, out)
|
| 121 |
+
|
| 122 |
+
def _compile(self, body, headers, include_dirs, lang):
|
| 123 |
+
src = self._gen_temp_sourcefile(body, headers, lang)
|
| 124 |
+
if self.dump_source:
|
| 125 |
+
dump_file(src, "compiling '%s':" % src)
|
| 126 |
+
(obj,) = self.compiler.object_filenames([src])
|
| 127 |
+
self.temp_files.extend([src, obj])
|
| 128 |
+
self.compiler.compile([src], include_dirs=include_dirs)
|
| 129 |
+
return (src, obj)
|
| 130 |
+
|
| 131 |
+
def _link(self, body, headers, include_dirs, libraries, library_dirs, lang):
|
| 132 |
+
(src, obj) = self._compile(body, headers, include_dirs, lang)
|
| 133 |
+
prog = os.path.splitext(os.path.basename(src))[0]
|
| 134 |
+
self.compiler.link_executable(
|
| 135 |
+
[obj],
|
| 136 |
+
prog,
|
| 137 |
+
libraries=libraries,
|
| 138 |
+
library_dirs=library_dirs,
|
| 139 |
+
target_lang=lang,
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
if self.compiler.exe_extension is not None:
|
| 143 |
+
prog = prog + self.compiler.exe_extension
|
| 144 |
+
self.temp_files.append(prog)
|
| 145 |
+
|
| 146 |
+
return (src, obj, prog)
|
| 147 |
+
|
| 148 |
+
def _clean(self, *filenames):
|
| 149 |
+
if not filenames:
|
| 150 |
+
filenames = self.temp_files
|
| 151 |
+
self.temp_files = []
|
| 152 |
+
log.info("removing: %s", ' '.join(filenames))
|
| 153 |
+
for filename in filenames:
|
| 154 |
+
try:
|
| 155 |
+
os.remove(filename)
|
| 156 |
+
except OSError:
|
| 157 |
+
pass
|
| 158 |
+
|
| 159 |
+
# XXX these ignore the dry-run flag: what to do, what to do? even if
|
| 160 |
+
# you want a dry-run build, you still need some sort of configuration
|
| 161 |
+
# info. My inclination is to make it up to the real config command to
|
| 162 |
+
# consult 'dry_run', and assume a default (minimal) configuration if
|
| 163 |
+
# true. The problem with trying to do it here is that you'd have to
|
| 164 |
+
# return either true or false from all the 'try' methods, neither of
|
| 165 |
+
# which is correct.
|
| 166 |
+
|
| 167 |
+
# XXX need access to the header search path and maybe default macros.
|
| 168 |
+
|
| 169 |
+
def try_cpp(self, body=None, headers=None, include_dirs=None, lang="c"):
|
| 170 |
+
"""Construct a source file from 'body' (a string containing lines
|
| 171 |
+
of C/C++ code) and 'headers' (a list of header files to include)
|
| 172 |
+
and run it through the preprocessor. Return true if the
|
| 173 |
+
preprocessor succeeded, false if there were any errors.
|
| 174 |
+
('body' probably isn't of much use, but what the heck.)
|
| 175 |
+
"""
|
| 176 |
+
from ..ccompiler import CompileError
|
| 177 |
+
|
| 178 |
+
self._check_compiler()
|
| 179 |
+
ok = True
|
| 180 |
+
try:
|
| 181 |
+
self._preprocess(body, headers, include_dirs, lang)
|
| 182 |
+
except CompileError:
|
| 183 |
+
ok = False
|
| 184 |
+
|
| 185 |
+
self._clean()
|
| 186 |
+
return ok
|
| 187 |
+
|
| 188 |
+
def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang="c"):
|
| 189 |
+
"""Construct a source file (just like 'try_cpp()'), run it through
|
| 190 |
+
the preprocessor, and return true if any line of the output matches
|
| 191 |
+
'pattern'. 'pattern' should either be a compiled regex object or a
|
| 192 |
+
string containing a regex. If both 'body' and 'headers' are None,
|
| 193 |
+
preprocesses an empty file -- which can be useful to determine the
|
| 194 |
+
symbols the preprocessor and compiler set by default.
|
| 195 |
+
"""
|
| 196 |
+
self._check_compiler()
|
| 197 |
+
src, out = self._preprocess(body, headers, include_dirs, lang)
|
| 198 |
+
|
| 199 |
+
if isinstance(pattern, str):
|
| 200 |
+
pattern = re.compile(pattern)
|
| 201 |
+
|
| 202 |
+
with open(out) as file:
|
| 203 |
+
match = False
|
| 204 |
+
while True:
|
| 205 |
+
line = file.readline()
|
| 206 |
+
if line == '':
|
| 207 |
+
break
|
| 208 |
+
if pattern.search(line):
|
| 209 |
+
match = True
|
| 210 |
+
break
|
| 211 |
+
|
| 212 |
+
self._clean()
|
| 213 |
+
return match
|
| 214 |
+
|
| 215 |
+
def try_compile(self, body, headers=None, include_dirs=None, lang="c"):
|
| 216 |
+
"""Try to compile a source file built from 'body' and 'headers'.
|
| 217 |
+
Return true on success, false otherwise.
|
| 218 |
+
"""
|
| 219 |
+
from ..ccompiler import CompileError
|
| 220 |
+
|
| 221 |
+
self._check_compiler()
|
| 222 |
+
try:
|
| 223 |
+
self._compile(body, headers, include_dirs, lang)
|
| 224 |
+
ok = True
|
| 225 |
+
except CompileError:
|
| 226 |
+
ok = False
|
| 227 |
+
|
| 228 |
+
log.info(ok and "success!" or "failure.")
|
| 229 |
+
self._clean()
|
| 230 |
+
return ok
|
| 231 |
+
|
| 232 |
+
def try_link(
|
| 233 |
+
self,
|
| 234 |
+
body,
|
| 235 |
+
headers=None,
|
| 236 |
+
include_dirs=None,
|
| 237 |
+
libraries=None,
|
| 238 |
+
library_dirs=None,
|
| 239 |
+
lang="c",
|
| 240 |
+
):
|
| 241 |
+
"""Try to compile and link a source file, built from 'body' and
|
| 242 |
+
'headers', to executable form. Return true on success, false
|
| 243 |
+
otherwise.
|
| 244 |
+
"""
|
| 245 |
+
from ..ccompiler import CompileError, LinkError
|
| 246 |
+
|
| 247 |
+
self._check_compiler()
|
| 248 |
+
try:
|
| 249 |
+
self._link(body, headers, include_dirs, libraries, library_dirs, lang)
|
| 250 |
+
ok = True
|
| 251 |
+
except (CompileError, LinkError):
|
| 252 |
+
ok = False
|
| 253 |
+
|
| 254 |
+
log.info(ok and "success!" or "failure.")
|
| 255 |
+
self._clean()
|
| 256 |
+
return ok
|
| 257 |
+
|
| 258 |
+
def try_run(
|
| 259 |
+
self,
|
| 260 |
+
body,
|
| 261 |
+
headers=None,
|
| 262 |
+
include_dirs=None,
|
| 263 |
+
libraries=None,
|
| 264 |
+
library_dirs=None,
|
| 265 |
+
lang="c",
|
| 266 |
+
):
|
| 267 |
+
"""Try to compile, link to an executable, and run a program
|
| 268 |
+
built from 'body' and 'headers'. Return true on success, false
|
| 269 |
+
otherwise.
|
| 270 |
+
"""
|
| 271 |
+
from ..ccompiler import CompileError, LinkError
|
| 272 |
+
|
| 273 |
+
self._check_compiler()
|
| 274 |
+
try:
|
| 275 |
+
src, obj, exe = self._link(
|
| 276 |
+
body, headers, include_dirs, libraries, library_dirs, lang
|
| 277 |
+
)
|
| 278 |
+
self.spawn([exe])
|
| 279 |
+
ok = True
|
| 280 |
+
except (CompileError, LinkError, DistutilsExecError):
|
| 281 |
+
ok = False
|
| 282 |
+
|
| 283 |
+
log.info(ok and "success!" or "failure.")
|
| 284 |
+
self._clean()
|
| 285 |
+
return ok
|
| 286 |
+
|
| 287 |
+
# -- High-level methods --------------------------------------------
|
| 288 |
+
# (these are the ones that are actually likely to be useful
|
| 289 |
+
# when implementing a real-world config command!)
|
| 290 |
+
|
| 291 |
+
def check_func(
|
| 292 |
+
self,
|
| 293 |
+
func,
|
| 294 |
+
headers=None,
|
| 295 |
+
include_dirs=None,
|
| 296 |
+
libraries=None,
|
| 297 |
+
library_dirs=None,
|
| 298 |
+
decl=0,
|
| 299 |
+
call=0,
|
| 300 |
+
):
|
| 301 |
+
"""Determine if function 'func' is available by constructing a
|
| 302 |
+
source file that refers to 'func', and compiles and links it.
|
| 303 |
+
If everything succeeds, returns true; otherwise returns false.
|
| 304 |
+
|
| 305 |
+
The constructed source file starts out by including the header
|
| 306 |
+
files listed in 'headers'. If 'decl' is true, it then declares
|
| 307 |
+
'func' (as "int func()"); you probably shouldn't supply 'headers'
|
| 308 |
+
and set 'decl' true in the same call, or you might get errors about
|
| 309 |
+
a conflicting declarations for 'func'. Finally, the constructed
|
| 310 |
+
'main()' function either references 'func' or (if 'call' is true)
|
| 311 |
+
calls it. 'libraries' and 'library_dirs' are used when
|
| 312 |
+
linking.
|
| 313 |
+
"""
|
| 314 |
+
self._check_compiler()
|
| 315 |
+
body = []
|
| 316 |
+
if decl:
|
| 317 |
+
body.append("int %s ();" % func)
|
| 318 |
+
body.append("int main () {")
|
| 319 |
+
if call:
|
| 320 |
+
body.append(" %s();" % func)
|
| 321 |
+
else:
|
| 322 |
+
body.append(" %s;" % func)
|
| 323 |
+
body.append("}")
|
| 324 |
+
body = "\n".join(body) + "\n"
|
| 325 |
+
|
| 326 |
+
return self.try_link(body, headers, include_dirs, libraries, library_dirs)
|
| 327 |
+
|
| 328 |
+
def check_lib(
|
| 329 |
+
self,
|
| 330 |
+
library,
|
| 331 |
+
library_dirs=None,
|
| 332 |
+
headers=None,
|
| 333 |
+
include_dirs=None,
|
| 334 |
+
other_libraries=[],
|
| 335 |
+
):
|
| 336 |
+
"""Determine if 'library' is available to be linked against,
|
| 337 |
+
without actually checking that any particular symbols are provided
|
| 338 |
+
by it. 'headers' will be used in constructing the source file to
|
| 339 |
+
be compiled, but the only effect of this is to check if all the
|
| 340 |
+
header files listed are available. Any libraries listed in
|
| 341 |
+
'other_libraries' will be included in the link, in case 'library'
|
| 342 |
+
has symbols that depend on other libraries.
|
| 343 |
+
"""
|
| 344 |
+
self._check_compiler()
|
| 345 |
+
return self.try_link(
|
| 346 |
+
"int main (void) { }",
|
| 347 |
+
headers,
|
| 348 |
+
include_dirs,
|
| 349 |
+
[library] + other_libraries,
|
| 350 |
+
library_dirs,
|
| 351 |
+
)
|
| 352 |
+
|
| 353 |
+
def check_header(self, header, include_dirs=None, library_dirs=None, lang="c"):
|
| 354 |
+
"""Determine if the system header file named by 'header_file'
|
| 355 |
+
exists and can be found by the preprocessor; return true if so,
|
| 356 |
+
false otherwise.
|
| 357 |
+
"""
|
| 358 |
+
return self.try_cpp(
|
| 359 |
+
body="/* No body */", headers=[header], include_dirs=include_dirs
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
def dump_file(filename, head=None):
|
| 364 |
+
"""Dumps a file content into log.info.
|
| 365 |
+
|
| 366 |
+
If head is not None, will be dumped before the file content.
|
| 367 |
+
"""
|
| 368 |
+
if head is None:
|
| 369 |
+
log.info('%s', filename)
|
| 370 |
+
else:
|
| 371 |
+
log.info(head)
|
| 372 |
+
file = open(filename)
|
| 373 |
+
try:
|
| 374 |
+
log.info(file.read())
|
| 375 |
+
finally:
|
| 376 |
+
file.close()
|
.venv/Lib/site-packages/setuptools/_distutils/command/install.py
ADDED
|
@@ -0,0 +1,813 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install' command."""
|
| 4 |
+
|
| 5 |
+
import sys
|
| 6 |
+
import os
|
| 7 |
+
import contextlib
|
| 8 |
+
import sysconfig
|
| 9 |
+
import itertools
|
| 10 |
+
|
| 11 |
+
from distutils._log import log
|
| 12 |
+
from ..core import Command
|
| 13 |
+
from ..debug import DEBUG
|
| 14 |
+
from ..sysconfig import get_config_vars
|
| 15 |
+
from ..file_util import write_file
|
| 16 |
+
from ..util import convert_path, subst_vars, change_root
|
| 17 |
+
from ..util import get_platform
|
| 18 |
+
from ..errors import DistutilsOptionError, DistutilsPlatformError
|
| 19 |
+
from . import _framework_compat as fw
|
| 20 |
+
from .. import _collections
|
| 21 |
+
|
| 22 |
+
from site import USER_BASE
|
| 23 |
+
from site import USER_SITE
|
| 24 |
+
|
| 25 |
+
HAS_USER_SITE = True
|
| 26 |
+
|
| 27 |
+
WINDOWS_SCHEME = {
|
| 28 |
+
'purelib': '{base}/Lib/site-packages',
|
| 29 |
+
'platlib': '{base}/Lib/site-packages',
|
| 30 |
+
'headers': '{base}/Include/{dist_name}',
|
| 31 |
+
'scripts': '{base}/Scripts',
|
| 32 |
+
'data': '{base}',
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
INSTALL_SCHEMES = {
|
| 36 |
+
'posix_prefix': {
|
| 37 |
+
'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
|
| 38 |
+
'platlib': '{platbase}/{platlibdir}/{implementation_lower}'
|
| 39 |
+
'{py_version_short}/site-packages',
|
| 40 |
+
'headers': '{base}/include/{implementation_lower}'
|
| 41 |
+
'{py_version_short}{abiflags}/{dist_name}',
|
| 42 |
+
'scripts': '{base}/bin',
|
| 43 |
+
'data': '{base}',
|
| 44 |
+
},
|
| 45 |
+
'posix_home': {
|
| 46 |
+
'purelib': '{base}/lib/{implementation_lower}',
|
| 47 |
+
'platlib': '{base}/{platlibdir}/{implementation_lower}',
|
| 48 |
+
'headers': '{base}/include/{implementation_lower}/{dist_name}',
|
| 49 |
+
'scripts': '{base}/bin',
|
| 50 |
+
'data': '{base}',
|
| 51 |
+
},
|
| 52 |
+
'nt': WINDOWS_SCHEME,
|
| 53 |
+
'pypy': {
|
| 54 |
+
'purelib': '{base}/site-packages',
|
| 55 |
+
'platlib': '{base}/site-packages',
|
| 56 |
+
'headers': '{base}/include/{dist_name}',
|
| 57 |
+
'scripts': '{base}/bin',
|
| 58 |
+
'data': '{base}',
|
| 59 |
+
},
|
| 60 |
+
'pypy_nt': {
|
| 61 |
+
'purelib': '{base}/site-packages',
|
| 62 |
+
'platlib': '{base}/site-packages',
|
| 63 |
+
'headers': '{base}/include/{dist_name}',
|
| 64 |
+
'scripts': '{base}/Scripts',
|
| 65 |
+
'data': '{base}',
|
| 66 |
+
},
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
# user site schemes
|
| 70 |
+
if HAS_USER_SITE:
|
| 71 |
+
INSTALL_SCHEMES['nt_user'] = {
|
| 72 |
+
'purelib': '{usersite}',
|
| 73 |
+
'platlib': '{usersite}',
|
| 74 |
+
'headers': '{userbase}/{implementation}{py_version_nodot_plat}'
|
| 75 |
+
'/Include/{dist_name}',
|
| 76 |
+
'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts',
|
| 77 |
+
'data': '{userbase}',
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
INSTALL_SCHEMES['posix_user'] = {
|
| 81 |
+
'purelib': '{usersite}',
|
| 82 |
+
'platlib': '{usersite}',
|
| 83 |
+
'headers': '{userbase}/include/{implementation_lower}'
|
| 84 |
+
'{py_version_short}{abiflags}/{dist_name}',
|
| 85 |
+
'scripts': '{userbase}/bin',
|
| 86 |
+
'data': '{userbase}',
|
| 87 |
+
}
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
INSTALL_SCHEMES.update(fw.schemes)
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
# The keys to an installation scheme; if any new types of files are to be
|
| 94 |
+
# installed, be sure to add an entry to every installation scheme above,
|
| 95 |
+
# and to SCHEME_KEYS here.
|
| 96 |
+
SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def _load_sysconfig_schemes():
|
| 100 |
+
with contextlib.suppress(AttributeError):
|
| 101 |
+
return {
|
| 102 |
+
scheme: sysconfig.get_paths(scheme, expand=False)
|
| 103 |
+
for scheme in sysconfig.get_scheme_names()
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def _load_schemes():
|
| 108 |
+
"""
|
| 109 |
+
Extend default schemes with schemes from sysconfig.
|
| 110 |
+
"""
|
| 111 |
+
|
| 112 |
+
sysconfig_schemes = _load_sysconfig_schemes() or {}
|
| 113 |
+
|
| 114 |
+
return {
|
| 115 |
+
scheme: {
|
| 116 |
+
**INSTALL_SCHEMES.get(scheme, {}),
|
| 117 |
+
**sysconfig_schemes.get(scheme, {}),
|
| 118 |
+
}
|
| 119 |
+
for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
|
| 120 |
+
}
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
def _get_implementation():
|
| 124 |
+
if hasattr(sys, 'pypy_version_info'):
|
| 125 |
+
return 'PyPy'
|
| 126 |
+
else:
|
| 127 |
+
return 'Python'
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _select_scheme(ob, name):
|
| 131 |
+
scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name)))
|
| 132 |
+
vars(ob).update(_remove_set(ob, _scheme_attrs(scheme)))
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _remove_set(ob, attrs):
|
| 136 |
+
"""
|
| 137 |
+
Include only attrs that are None in ob.
|
| 138 |
+
"""
|
| 139 |
+
return {key: value for key, value in attrs.items() if getattr(ob, key) is None}
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def _resolve_scheme(name):
|
| 143 |
+
os_name, sep, key = name.partition('_')
|
| 144 |
+
try:
|
| 145 |
+
resolved = sysconfig.get_preferred_scheme(key)
|
| 146 |
+
except Exception:
|
| 147 |
+
resolved = fw.scheme(_pypy_hack(name))
|
| 148 |
+
return resolved
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def _load_scheme(name):
|
| 152 |
+
return _load_schemes()[name]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def _inject_headers(name, scheme):
|
| 156 |
+
"""
|
| 157 |
+
Given a scheme name and the resolved scheme,
|
| 158 |
+
if the scheme does not include headers, resolve
|
| 159 |
+
the fallback scheme for the name and use headers
|
| 160 |
+
from it. pypa/distutils#88
|
| 161 |
+
"""
|
| 162 |
+
# Bypass the preferred scheme, which may not
|
| 163 |
+
# have defined headers.
|
| 164 |
+
fallback = _load_scheme(_pypy_hack(name))
|
| 165 |
+
scheme.setdefault('headers', fallback['headers'])
|
| 166 |
+
return scheme
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
def _scheme_attrs(scheme):
|
| 170 |
+
"""Resolve install directories by applying the install schemes."""
|
| 171 |
+
return {f'install_{key}': scheme[key] for key in SCHEME_KEYS}
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def _pypy_hack(name):
|
| 175 |
+
PY37 = sys.version_info < (3, 8)
|
| 176 |
+
old_pypy = hasattr(sys, 'pypy_version_info') and PY37
|
| 177 |
+
prefix = not name.endswith(('_user', '_home'))
|
| 178 |
+
pypy_name = 'pypy' + '_nt' * (os.name == 'nt')
|
| 179 |
+
return pypy_name if old_pypy and prefix else name
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
class install(Command):
|
| 183 |
+
description = "install everything from build directory"
|
| 184 |
+
|
| 185 |
+
user_options = [
|
| 186 |
+
# Select installation scheme and set base director(y|ies)
|
| 187 |
+
('prefix=', None, "installation prefix"),
|
| 188 |
+
('exec-prefix=', None, "(Unix only) prefix for platform-specific files"),
|
| 189 |
+
('home=', None, "(Unix only) home directory to install under"),
|
| 190 |
+
# Or, just set the base director(y|ies)
|
| 191 |
+
(
|
| 192 |
+
'install-base=',
|
| 193 |
+
None,
|
| 194 |
+
"base installation directory (instead of --prefix or --home)",
|
| 195 |
+
),
|
| 196 |
+
(
|
| 197 |
+
'install-platbase=',
|
| 198 |
+
None,
|
| 199 |
+
"base installation directory for platform-specific files "
|
| 200 |
+
+ "(instead of --exec-prefix or --home)",
|
| 201 |
+
),
|
| 202 |
+
('root=', None, "install everything relative to this alternate root directory"),
|
| 203 |
+
# Or, explicitly set the installation scheme
|
| 204 |
+
(
|
| 205 |
+
'install-purelib=',
|
| 206 |
+
None,
|
| 207 |
+
"installation directory for pure Python module distributions",
|
| 208 |
+
),
|
| 209 |
+
(
|
| 210 |
+
'install-platlib=',
|
| 211 |
+
None,
|
| 212 |
+
"installation directory for non-pure module distributions",
|
| 213 |
+
),
|
| 214 |
+
(
|
| 215 |
+
'install-lib=',
|
| 216 |
+
None,
|
| 217 |
+
"installation directory for all module distributions "
|
| 218 |
+
+ "(overrides --install-purelib and --install-platlib)",
|
| 219 |
+
),
|
| 220 |
+
('install-headers=', None, "installation directory for C/C++ headers"),
|
| 221 |
+
('install-scripts=', None, "installation directory for Python scripts"),
|
| 222 |
+
('install-data=', None, "installation directory for data files"),
|
| 223 |
+
# Byte-compilation options -- see install_lib.py for details, as
|
| 224 |
+
# these are duplicated from there (but only install_lib does
|
| 225 |
+
# anything with them).
|
| 226 |
+
('compile', 'c', "compile .py to .pyc [default]"),
|
| 227 |
+
('no-compile', None, "don't compile .py files"),
|
| 228 |
+
(
|
| 229 |
+
'optimize=',
|
| 230 |
+
'O',
|
| 231 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 232 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
|
| 233 |
+
),
|
| 234 |
+
# Miscellaneous control options
|
| 235 |
+
('force', 'f', "force installation (overwrite any existing files)"),
|
| 236 |
+
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
| 237 |
+
# Where to install documentation (eventually!)
|
| 238 |
+
# ('doc-format=', None, "format of documentation to generate"),
|
| 239 |
+
# ('install-man=', None, "directory for Unix man pages"),
|
| 240 |
+
# ('install-html=', None, "directory for HTML documentation"),
|
| 241 |
+
# ('install-info=', None, "directory for GNU info files"),
|
| 242 |
+
('record=', None, "filename in which to record list of installed files"),
|
| 243 |
+
]
|
| 244 |
+
|
| 245 |
+
boolean_options = ['compile', 'force', 'skip-build']
|
| 246 |
+
|
| 247 |
+
if HAS_USER_SITE:
|
| 248 |
+
user_options.append(
|
| 249 |
+
('user', None, "install in user site-package '%s'" % USER_SITE)
|
| 250 |
+
)
|
| 251 |
+
boolean_options.append('user')
|
| 252 |
+
|
| 253 |
+
negative_opt = {'no-compile': 'compile'}
|
| 254 |
+
|
| 255 |
+
def initialize_options(self):
|
| 256 |
+
"""Initializes options."""
|
| 257 |
+
# High-level options: these select both an installation base
|
| 258 |
+
# and scheme.
|
| 259 |
+
self.prefix = None
|
| 260 |
+
self.exec_prefix = None
|
| 261 |
+
self.home = None
|
| 262 |
+
self.user = 0
|
| 263 |
+
|
| 264 |
+
# These select only the installation base; it's up to the user to
|
| 265 |
+
# specify the installation scheme (currently, that means supplying
|
| 266 |
+
# the --install-{platlib,purelib,scripts,data} options).
|
| 267 |
+
self.install_base = None
|
| 268 |
+
self.install_platbase = None
|
| 269 |
+
self.root = None
|
| 270 |
+
|
| 271 |
+
# These options are the actual installation directories; if not
|
| 272 |
+
# supplied by the user, they are filled in using the installation
|
| 273 |
+
# scheme implied by prefix/exec-prefix/home and the contents of
|
| 274 |
+
# that installation scheme.
|
| 275 |
+
self.install_purelib = None # for pure module distributions
|
| 276 |
+
self.install_platlib = None # non-pure (dists w/ extensions)
|
| 277 |
+
self.install_headers = None # for C/C++ headers
|
| 278 |
+
self.install_lib = None # set to either purelib or platlib
|
| 279 |
+
self.install_scripts = None
|
| 280 |
+
self.install_data = None
|
| 281 |
+
self.install_userbase = USER_BASE
|
| 282 |
+
self.install_usersite = USER_SITE
|
| 283 |
+
|
| 284 |
+
self.compile = None
|
| 285 |
+
self.optimize = None
|
| 286 |
+
|
| 287 |
+
# Deprecated
|
| 288 |
+
# These two are for putting non-packagized distributions into their
|
| 289 |
+
# own directory and creating a .pth file if it makes sense.
|
| 290 |
+
# 'extra_path' comes from the setup file; 'install_path_file' can
|
| 291 |
+
# be turned off if it makes no sense to install a .pth file. (But
|
| 292 |
+
# better to install it uselessly than to guess wrong and not
|
| 293 |
+
# install it when it's necessary and would be used!) Currently,
|
| 294 |
+
# 'install_path_file' is always true unless some outsider meddles
|
| 295 |
+
# with it.
|
| 296 |
+
self.extra_path = None
|
| 297 |
+
self.install_path_file = 1
|
| 298 |
+
|
| 299 |
+
# 'force' forces installation, even if target files are not
|
| 300 |
+
# out-of-date. 'skip_build' skips running the "build" command,
|
| 301 |
+
# handy if you know it's not necessary. 'warn_dir' (which is *not*
|
| 302 |
+
# a user option, it's just there so the bdist_* commands can turn
|
| 303 |
+
# it off) determines whether we warn about installing to a
|
| 304 |
+
# directory not in sys.path.
|
| 305 |
+
self.force = 0
|
| 306 |
+
self.skip_build = 0
|
| 307 |
+
self.warn_dir = 1
|
| 308 |
+
|
| 309 |
+
# These are only here as a conduit from the 'build' command to the
|
| 310 |
+
# 'install_*' commands that do the real work. ('build_base' isn't
|
| 311 |
+
# actually used anywhere, but it might be useful in future.) They
|
| 312 |
+
# are not user options, because if the user told the install
|
| 313 |
+
# command where the build directory is, that wouldn't affect the
|
| 314 |
+
# build command.
|
| 315 |
+
self.build_base = None
|
| 316 |
+
self.build_lib = None
|
| 317 |
+
|
| 318 |
+
# Not defined yet because we don't know anything about
|
| 319 |
+
# documentation yet.
|
| 320 |
+
# self.install_man = None
|
| 321 |
+
# self.install_html = None
|
| 322 |
+
# self.install_info = None
|
| 323 |
+
|
| 324 |
+
self.record = None
|
| 325 |
+
|
| 326 |
+
# -- Option finalizing methods -------------------------------------
|
| 327 |
+
# (This is rather more involved than for most commands,
|
| 328 |
+
# because this is where the policy for installing third-
|
| 329 |
+
# party Python modules on various platforms given a wide
|
| 330 |
+
# array of user input is decided. Yes, it's quite complex!)
|
| 331 |
+
|
| 332 |
+
def finalize_options(self): # noqa: C901
|
| 333 |
+
"""Finalizes options."""
|
| 334 |
+
# This method (and its helpers, like 'finalize_unix()',
|
| 335 |
+
# 'finalize_other()', and 'select_scheme()') is where the default
|
| 336 |
+
# installation directories for modules, extension modules, and
|
| 337 |
+
# anything else we care to install from a Python module
|
| 338 |
+
# distribution. Thus, this code makes a pretty important policy
|
| 339 |
+
# statement about how third-party stuff is added to a Python
|
| 340 |
+
# installation! Note that the actual work of installation is done
|
| 341 |
+
# by the relatively simple 'install_*' commands; they just take
|
| 342 |
+
# their orders from the installation directory options determined
|
| 343 |
+
# here.
|
| 344 |
+
|
| 345 |
+
# Check for errors/inconsistencies in the options; first, stuff
|
| 346 |
+
# that's wrong on any platform.
|
| 347 |
+
|
| 348 |
+
if (self.prefix or self.exec_prefix or self.home) and (
|
| 349 |
+
self.install_base or self.install_platbase
|
| 350 |
+
):
|
| 351 |
+
raise DistutilsOptionError(
|
| 352 |
+
"must supply either prefix/exec-prefix/home or "
|
| 353 |
+
+ "install-base/install-platbase -- not both"
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
if self.home and (self.prefix or self.exec_prefix):
|
| 357 |
+
raise DistutilsOptionError(
|
| 358 |
+
"must supply either home or prefix/exec-prefix -- not both"
|
| 359 |
+
)
|
| 360 |
+
|
| 361 |
+
if self.user and (
|
| 362 |
+
self.prefix
|
| 363 |
+
or self.exec_prefix
|
| 364 |
+
or self.home
|
| 365 |
+
or self.install_base
|
| 366 |
+
or self.install_platbase
|
| 367 |
+
):
|
| 368 |
+
raise DistutilsOptionError(
|
| 369 |
+
"can't combine user with prefix, "
|
| 370 |
+
"exec_prefix/home, or install_(plat)base"
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
# Next, stuff that's wrong (or dubious) only on certain platforms.
|
| 374 |
+
if os.name != "posix":
|
| 375 |
+
if self.exec_prefix:
|
| 376 |
+
self.warn("exec-prefix option ignored on this platform")
|
| 377 |
+
self.exec_prefix = None
|
| 378 |
+
|
| 379 |
+
# Now the interesting logic -- so interesting that we farm it out
|
| 380 |
+
# to other methods. The goal of these methods is to set the final
|
| 381 |
+
# values for the install_{lib,scripts,data,...} options, using as
|
| 382 |
+
# input a heady brew of prefix, exec_prefix, home, install_base,
|
| 383 |
+
# install_platbase, user-supplied versions of
|
| 384 |
+
# install_{purelib,platlib,lib,scripts,data,...}, and the
|
| 385 |
+
# install schemes. Phew!
|
| 386 |
+
|
| 387 |
+
self.dump_dirs("pre-finalize_{unix,other}")
|
| 388 |
+
|
| 389 |
+
if os.name == 'posix':
|
| 390 |
+
self.finalize_unix()
|
| 391 |
+
else:
|
| 392 |
+
self.finalize_other()
|
| 393 |
+
|
| 394 |
+
self.dump_dirs("post-finalize_{unix,other}()")
|
| 395 |
+
|
| 396 |
+
# Expand configuration variables, tilde, etc. in self.install_base
|
| 397 |
+
# and self.install_platbase -- that way, we can use $base or
|
| 398 |
+
# $platbase in the other installation directories and not worry
|
| 399 |
+
# about needing recursive variable expansion (shudder).
|
| 400 |
+
|
| 401 |
+
py_version = sys.version.split()[0]
|
| 402 |
+
(prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
|
| 403 |
+
try:
|
| 404 |
+
abiflags = sys.abiflags
|
| 405 |
+
except AttributeError:
|
| 406 |
+
# sys.abiflags may not be defined on all platforms.
|
| 407 |
+
abiflags = ''
|
| 408 |
+
local_vars = {
|
| 409 |
+
'dist_name': self.distribution.get_name(),
|
| 410 |
+
'dist_version': self.distribution.get_version(),
|
| 411 |
+
'dist_fullname': self.distribution.get_fullname(),
|
| 412 |
+
'py_version': py_version,
|
| 413 |
+
'py_version_short': '%d.%d' % sys.version_info[:2],
|
| 414 |
+
'py_version_nodot': '%d%d' % sys.version_info[:2],
|
| 415 |
+
'sys_prefix': prefix,
|
| 416 |
+
'prefix': prefix,
|
| 417 |
+
'sys_exec_prefix': exec_prefix,
|
| 418 |
+
'exec_prefix': exec_prefix,
|
| 419 |
+
'abiflags': abiflags,
|
| 420 |
+
'platlibdir': getattr(sys, 'platlibdir', 'lib'),
|
| 421 |
+
'implementation_lower': _get_implementation().lower(),
|
| 422 |
+
'implementation': _get_implementation(),
|
| 423 |
+
}
|
| 424 |
+
|
| 425 |
+
# vars for compatibility on older Pythons
|
| 426 |
+
compat_vars = dict(
|
| 427 |
+
# Python 3.9 and earlier
|
| 428 |
+
py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''),
|
| 429 |
+
)
|
| 430 |
+
|
| 431 |
+
if HAS_USER_SITE:
|
| 432 |
+
local_vars['userbase'] = self.install_userbase
|
| 433 |
+
local_vars['usersite'] = self.install_usersite
|
| 434 |
+
|
| 435 |
+
self.config_vars = _collections.DictStack(
|
| 436 |
+
[fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars]
|
| 437 |
+
)
|
| 438 |
+
|
| 439 |
+
self.expand_basedirs()
|
| 440 |
+
|
| 441 |
+
self.dump_dirs("post-expand_basedirs()")
|
| 442 |
+
|
| 443 |
+
# Now define config vars for the base directories so we can expand
|
| 444 |
+
# everything else.
|
| 445 |
+
local_vars['base'] = self.install_base
|
| 446 |
+
local_vars['platbase'] = self.install_platbase
|
| 447 |
+
|
| 448 |
+
if DEBUG:
|
| 449 |
+
from pprint import pprint
|
| 450 |
+
|
| 451 |
+
print("config vars:")
|
| 452 |
+
pprint(dict(self.config_vars))
|
| 453 |
+
|
| 454 |
+
# Expand "~" and configuration variables in the installation
|
| 455 |
+
# directories.
|
| 456 |
+
self.expand_dirs()
|
| 457 |
+
|
| 458 |
+
self.dump_dirs("post-expand_dirs()")
|
| 459 |
+
|
| 460 |
+
# Create directories in the home dir:
|
| 461 |
+
if self.user:
|
| 462 |
+
self.create_home_path()
|
| 463 |
+
|
| 464 |
+
# Pick the actual directory to install all modules to: either
|
| 465 |
+
# install_purelib or install_platlib, depending on whether this
|
| 466 |
+
# module distribution is pure or not. Of course, if the user
|
| 467 |
+
# already specified install_lib, use their selection.
|
| 468 |
+
if self.install_lib is None:
|
| 469 |
+
if self.distribution.has_ext_modules(): # has extensions: non-pure
|
| 470 |
+
self.install_lib = self.install_platlib
|
| 471 |
+
else:
|
| 472 |
+
self.install_lib = self.install_purelib
|
| 473 |
+
|
| 474 |
+
# Convert directories from Unix /-separated syntax to the local
|
| 475 |
+
# convention.
|
| 476 |
+
self.convert_paths(
|
| 477 |
+
'lib',
|
| 478 |
+
'purelib',
|
| 479 |
+
'platlib',
|
| 480 |
+
'scripts',
|
| 481 |
+
'data',
|
| 482 |
+
'headers',
|
| 483 |
+
'userbase',
|
| 484 |
+
'usersite',
|
| 485 |
+
)
|
| 486 |
+
|
| 487 |
+
# Deprecated
|
| 488 |
+
# Well, we're not actually fully completely finalized yet: we still
|
| 489 |
+
# have to deal with 'extra_path', which is the hack for allowing
|
| 490 |
+
# non-packagized module distributions (hello, Numerical Python!) to
|
| 491 |
+
# get their own directories.
|
| 492 |
+
self.handle_extra_path()
|
| 493 |
+
self.install_libbase = self.install_lib # needed for .pth file
|
| 494 |
+
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
|
| 495 |
+
|
| 496 |
+
# If a new root directory was supplied, make all the installation
|
| 497 |
+
# dirs relative to it.
|
| 498 |
+
if self.root is not None:
|
| 499 |
+
self.change_roots(
|
| 500 |
+
'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers'
|
| 501 |
+
)
|
| 502 |
+
|
| 503 |
+
self.dump_dirs("after prepending root")
|
| 504 |
+
|
| 505 |
+
# Find out the build directories, ie. where to install from.
|
| 506 |
+
self.set_undefined_options(
|
| 507 |
+
'build', ('build_base', 'build_base'), ('build_lib', 'build_lib')
|
| 508 |
+
)
|
| 509 |
+
|
| 510 |
+
# Punt on doc directories for now -- after all, we're punting on
|
| 511 |
+
# documentation completely!
|
| 512 |
+
|
| 513 |
+
def dump_dirs(self, msg):
|
| 514 |
+
"""Dumps the list of user options."""
|
| 515 |
+
if not DEBUG:
|
| 516 |
+
return
|
| 517 |
+
from ..fancy_getopt import longopt_xlate
|
| 518 |
+
|
| 519 |
+
log.debug(msg + ":")
|
| 520 |
+
for opt in self.user_options:
|
| 521 |
+
opt_name = opt[0]
|
| 522 |
+
if opt_name[-1] == "=":
|
| 523 |
+
opt_name = opt_name[0:-1]
|
| 524 |
+
if opt_name in self.negative_opt:
|
| 525 |
+
opt_name = self.negative_opt[opt_name]
|
| 526 |
+
opt_name = opt_name.translate(longopt_xlate)
|
| 527 |
+
val = not getattr(self, opt_name)
|
| 528 |
+
else:
|
| 529 |
+
opt_name = opt_name.translate(longopt_xlate)
|
| 530 |
+
val = getattr(self, opt_name)
|
| 531 |
+
log.debug(" %s: %s", opt_name, val)
|
| 532 |
+
|
| 533 |
+
def finalize_unix(self):
|
| 534 |
+
"""Finalizes options for posix platforms."""
|
| 535 |
+
if self.install_base is not None or self.install_platbase is not None:
|
| 536 |
+
incomplete_scheme = (
|
| 537 |
+
(
|
| 538 |
+
self.install_lib is None
|
| 539 |
+
and self.install_purelib is None
|
| 540 |
+
and self.install_platlib is None
|
| 541 |
+
)
|
| 542 |
+
or self.install_headers is None
|
| 543 |
+
or self.install_scripts is None
|
| 544 |
+
or self.install_data is None
|
| 545 |
+
)
|
| 546 |
+
if incomplete_scheme:
|
| 547 |
+
raise DistutilsOptionError(
|
| 548 |
+
"install-base or install-platbase supplied, but "
|
| 549 |
+
"installation scheme is incomplete"
|
| 550 |
+
)
|
| 551 |
+
return
|
| 552 |
+
|
| 553 |
+
if self.user:
|
| 554 |
+
if self.install_userbase is None:
|
| 555 |
+
raise DistutilsPlatformError("User base directory is not specified")
|
| 556 |
+
self.install_base = self.install_platbase = self.install_userbase
|
| 557 |
+
self.select_scheme("posix_user")
|
| 558 |
+
elif self.home is not None:
|
| 559 |
+
self.install_base = self.install_platbase = self.home
|
| 560 |
+
self.select_scheme("posix_home")
|
| 561 |
+
else:
|
| 562 |
+
if self.prefix is None:
|
| 563 |
+
if self.exec_prefix is not None:
|
| 564 |
+
raise DistutilsOptionError(
|
| 565 |
+
"must not supply exec-prefix without prefix"
|
| 566 |
+
)
|
| 567 |
+
|
| 568 |
+
# Allow Fedora to add components to the prefix
|
| 569 |
+
_prefix_addition = getattr(sysconfig, '_prefix_addition', "")
|
| 570 |
+
|
| 571 |
+
self.prefix = os.path.normpath(sys.prefix) + _prefix_addition
|
| 572 |
+
self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition
|
| 573 |
+
|
| 574 |
+
else:
|
| 575 |
+
if self.exec_prefix is None:
|
| 576 |
+
self.exec_prefix = self.prefix
|
| 577 |
+
|
| 578 |
+
self.install_base = self.prefix
|
| 579 |
+
self.install_platbase = self.exec_prefix
|
| 580 |
+
self.select_scheme("posix_prefix")
|
| 581 |
+
|
| 582 |
+
def finalize_other(self):
|
| 583 |
+
"""Finalizes options for non-posix platforms"""
|
| 584 |
+
if self.user:
|
| 585 |
+
if self.install_userbase is None:
|
| 586 |
+
raise DistutilsPlatformError("User base directory is not specified")
|
| 587 |
+
self.install_base = self.install_platbase = self.install_userbase
|
| 588 |
+
self.select_scheme(os.name + "_user")
|
| 589 |
+
elif self.home is not None:
|
| 590 |
+
self.install_base = self.install_platbase = self.home
|
| 591 |
+
self.select_scheme("posix_home")
|
| 592 |
+
else:
|
| 593 |
+
if self.prefix is None:
|
| 594 |
+
self.prefix = os.path.normpath(sys.prefix)
|
| 595 |
+
|
| 596 |
+
self.install_base = self.install_platbase = self.prefix
|
| 597 |
+
try:
|
| 598 |
+
self.select_scheme(os.name)
|
| 599 |
+
except KeyError:
|
| 600 |
+
raise DistutilsPlatformError(
|
| 601 |
+
"I don't know how to install stuff on '%s'" % os.name
|
| 602 |
+
)
|
| 603 |
+
|
| 604 |
+
def select_scheme(self, name):
|
| 605 |
+
_select_scheme(self, name)
|
| 606 |
+
|
| 607 |
+
def _expand_attrs(self, attrs):
|
| 608 |
+
for attr in attrs:
|
| 609 |
+
val = getattr(self, attr)
|
| 610 |
+
if val is not None:
|
| 611 |
+
if os.name in ('posix', 'nt'):
|
| 612 |
+
val = os.path.expanduser(val)
|
| 613 |
+
val = subst_vars(val, self.config_vars)
|
| 614 |
+
setattr(self, attr, val)
|
| 615 |
+
|
| 616 |
+
def expand_basedirs(self):
|
| 617 |
+
"""Calls `os.path.expanduser` on install_base, install_platbase and
|
| 618 |
+
root."""
|
| 619 |
+
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
| 620 |
+
|
| 621 |
+
def expand_dirs(self):
|
| 622 |
+
"""Calls `os.path.expanduser` on install dirs."""
|
| 623 |
+
self._expand_attrs(
|
| 624 |
+
[
|
| 625 |
+
'install_purelib',
|
| 626 |
+
'install_platlib',
|
| 627 |
+
'install_lib',
|
| 628 |
+
'install_headers',
|
| 629 |
+
'install_scripts',
|
| 630 |
+
'install_data',
|
| 631 |
+
]
|
| 632 |
+
)
|
| 633 |
+
|
| 634 |
+
def convert_paths(self, *names):
|
| 635 |
+
"""Call `convert_path` over `names`."""
|
| 636 |
+
for name in names:
|
| 637 |
+
attr = "install_" + name
|
| 638 |
+
setattr(self, attr, convert_path(getattr(self, attr)))
|
| 639 |
+
|
| 640 |
+
def handle_extra_path(self):
|
| 641 |
+
"""Set `path_file` and `extra_dirs` using `extra_path`."""
|
| 642 |
+
if self.extra_path is None:
|
| 643 |
+
self.extra_path = self.distribution.extra_path
|
| 644 |
+
|
| 645 |
+
if self.extra_path is not None:
|
| 646 |
+
log.warning(
|
| 647 |
+
"Distribution option extra_path is deprecated. "
|
| 648 |
+
"See issue27919 for details."
|
| 649 |
+
)
|
| 650 |
+
if isinstance(self.extra_path, str):
|
| 651 |
+
self.extra_path = self.extra_path.split(',')
|
| 652 |
+
|
| 653 |
+
if len(self.extra_path) == 1:
|
| 654 |
+
path_file = extra_dirs = self.extra_path[0]
|
| 655 |
+
elif len(self.extra_path) == 2:
|
| 656 |
+
path_file, extra_dirs = self.extra_path
|
| 657 |
+
else:
|
| 658 |
+
raise DistutilsOptionError(
|
| 659 |
+
"'extra_path' option must be a list, tuple, or "
|
| 660 |
+
"comma-separated string with 1 or 2 elements"
|
| 661 |
+
)
|
| 662 |
+
|
| 663 |
+
# convert to local form in case Unix notation used (as it
|
| 664 |
+
# should be in setup scripts)
|
| 665 |
+
extra_dirs = convert_path(extra_dirs)
|
| 666 |
+
else:
|
| 667 |
+
path_file = None
|
| 668 |
+
extra_dirs = ''
|
| 669 |
+
|
| 670 |
+
# XXX should we warn if path_file and not extra_dirs? (in which
|
| 671 |
+
# case the path file would be harmless but pointless)
|
| 672 |
+
self.path_file = path_file
|
| 673 |
+
self.extra_dirs = extra_dirs
|
| 674 |
+
|
| 675 |
+
def change_roots(self, *names):
|
| 676 |
+
"""Change the install directories pointed by name using root."""
|
| 677 |
+
for name in names:
|
| 678 |
+
attr = "install_" + name
|
| 679 |
+
setattr(self, attr, change_root(self.root, getattr(self, attr)))
|
| 680 |
+
|
| 681 |
+
def create_home_path(self):
|
| 682 |
+
"""Create directories under ~."""
|
| 683 |
+
if not self.user:
|
| 684 |
+
return
|
| 685 |
+
home = convert_path(os.path.expanduser("~"))
|
| 686 |
+
for name, path in self.config_vars.items():
|
| 687 |
+
if str(path).startswith(home) and not os.path.isdir(path):
|
| 688 |
+
self.debug_print("os.makedirs('%s', 0o700)" % path)
|
| 689 |
+
os.makedirs(path, 0o700)
|
| 690 |
+
|
| 691 |
+
# -- Command execution methods -------------------------------------
|
| 692 |
+
|
| 693 |
+
def run(self):
|
| 694 |
+
"""Runs the command."""
|
| 695 |
+
# Obviously have to build before we can install
|
| 696 |
+
if not self.skip_build:
|
| 697 |
+
self.run_command('build')
|
| 698 |
+
# If we built for any other platform, we can't install.
|
| 699 |
+
build_plat = self.distribution.get_command_obj('build').plat_name
|
| 700 |
+
# check warn_dir - it is a clue that the 'install' is happening
|
| 701 |
+
# internally, and not to sys.path, so we don't check the platform
|
| 702 |
+
# matches what we are running.
|
| 703 |
+
if self.warn_dir and build_plat != get_platform():
|
| 704 |
+
raise DistutilsPlatformError("Can't install when " "cross-compiling")
|
| 705 |
+
|
| 706 |
+
# Run all sub-commands (at least those that need to be run)
|
| 707 |
+
for cmd_name in self.get_sub_commands():
|
| 708 |
+
self.run_command(cmd_name)
|
| 709 |
+
|
| 710 |
+
if self.path_file:
|
| 711 |
+
self.create_path_file()
|
| 712 |
+
|
| 713 |
+
# write list of installed files, if requested.
|
| 714 |
+
if self.record:
|
| 715 |
+
outputs = self.get_outputs()
|
| 716 |
+
if self.root: # strip any package prefix
|
| 717 |
+
root_len = len(self.root)
|
| 718 |
+
for counter in range(len(outputs)):
|
| 719 |
+
outputs[counter] = outputs[counter][root_len:]
|
| 720 |
+
self.execute(
|
| 721 |
+
write_file,
|
| 722 |
+
(self.record, outputs),
|
| 723 |
+
"writing list of installed files to '%s'" % self.record,
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
sys_path = map(os.path.normpath, sys.path)
|
| 727 |
+
sys_path = map(os.path.normcase, sys_path)
|
| 728 |
+
install_lib = os.path.normcase(os.path.normpath(self.install_lib))
|
| 729 |
+
if (
|
| 730 |
+
self.warn_dir
|
| 731 |
+
and not (self.path_file and self.install_path_file)
|
| 732 |
+
and install_lib not in sys_path
|
| 733 |
+
):
|
| 734 |
+
log.debug(
|
| 735 |
+
(
|
| 736 |
+
"modules installed to '%s', which is not in "
|
| 737 |
+
"Python's module search path (sys.path) -- "
|
| 738 |
+
"you'll have to change the search path yourself"
|
| 739 |
+
),
|
| 740 |
+
self.install_lib,
|
| 741 |
+
)
|
| 742 |
+
|
| 743 |
+
def create_path_file(self):
|
| 744 |
+
"""Creates the .pth file"""
|
| 745 |
+
filename = os.path.join(self.install_libbase, self.path_file + ".pth")
|
| 746 |
+
if self.install_path_file:
|
| 747 |
+
self.execute(
|
| 748 |
+
write_file, (filename, [self.extra_dirs]), "creating %s" % filename
|
| 749 |
+
)
|
| 750 |
+
else:
|
| 751 |
+
self.warn("path file '%s' not created" % filename)
|
| 752 |
+
|
| 753 |
+
# -- Reporting methods ---------------------------------------------
|
| 754 |
+
|
| 755 |
+
def get_outputs(self):
|
| 756 |
+
"""Assembles the outputs of all the sub-commands."""
|
| 757 |
+
outputs = []
|
| 758 |
+
for cmd_name in self.get_sub_commands():
|
| 759 |
+
cmd = self.get_finalized_command(cmd_name)
|
| 760 |
+
# Add the contents of cmd.get_outputs(), ensuring
|
| 761 |
+
# that outputs doesn't contain duplicate entries
|
| 762 |
+
for filename in cmd.get_outputs():
|
| 763 |
+
if filename not in outputs:
|
| 764 |
+
outputs.append(filename)
|
| 765 |
+
|
| 766 |
+
if self.path_file and self.install_path_file:
|
| 767 |
+
outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth"))
|
| 768 |
+
|
| 769 |
+
return outputs
|
| 770 |
+
|
| 771 |
+
def get_inputs(self):
|
| 772 |
+
"""Returns the inputs of all the sub-commands"""
|
| 773 |
+
# XXX gee, this looks familiar ;-(
|
| 774 |
+
inputs = []
|
| 775 |
+
for cmd_name in self.get_sub_commands():
|
| 776 |
+
cmd = self.get_finalized_command(cmd_name)
|
| 777 |
+
inputs.extend(cmd.get_inputs())
|
| 778 |
+
|
| 779 |
+
return inputs
|
| 780 |
+
|
| 781 |
+
# -- Predicates for sub-command list -------------------------------
|
| 782 |
+
|
| 783 |
+
def has_lib(self):
|
| 784 |
+
"""Returns true if the current distribution has any Python
|
| 785 |
+
modules to install."""
|
| 786 |
+
return (
|
| 787 |
+
self.distribution.has_pure_modules() or self.distribution.has_ext_modules()
|
| 788 |
+
)
|
| 789 |
+
|
| 790 |
+
def has_headers(self):
|
| 791 |
+
"""Returns true if the current distribution has any headers to
|
| 792 |
+
install."""
|
| 793 |
+
return self.distribution.has_headers()
|
| 794 |
+
|
| 795 |
+
def has_scripts(self):
|
| 796 |
+
"""Returns true if the current distribution has any scripts to.
|
| 797 |
+
install."""
|
| 798 |
+
return self.distribution.has_scripts()
|
| 799 |
+
|
| 800 |
+
def has_data(self):
|
| 801 |
+
"""Returns true if the current distribution has any data to.
|
| 802 |
+
install."""
|
| 803 |
+
return self.distribution.has_data_files()
|
| 804 |
+
|
| 805 |
+
# 'sub_commands': a list of commands this command might have to run to
|
| 806 |
+
# get its work done. See cmd.py for more info.
|
| 807 |
+
sub_commands = [
|
| 808 |
+
('install_lib', has_lib),
|
| 809 |
+
('install_headers', has_headers),
|
| 810 |
+
('install_scripts', has_scripts),
|
| 811 |
+
('install_data', has_data),
|
| 812 |
+
('install_egg_info', lambda self: True),
|
| 813 |
+
]
|
.venv/Lib/site-packages/setuptools/_distutils/command/install_data.py
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_data
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_data' command, for installing
|
| 4 |
+
platform-independent data files."""
|
| 5 |
+
|
| 6 |
+
# contributed by Bastian Kleineidam
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
from ..core import Command
|
| 10 |
+
from ..util import change_root, convert_path
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class install_data(Command):
|
| 14 |
+
description = "install data files"
|
| 15 |
+
|
| 16 |
+
user_options = [
|
| 17 |
+
(
|
| 18 |
+
'install-dir=',
|
| 19 |
+
'd',
|
| 20 |
+
"base directory for installing data files "
|
| 21 |
+
"(default: installation base dir)",
|
| 22 |
+
),
|
| 23 |
+
('root=', None, "install everything relative to this alternate root directory"),
|
| 24 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
boolean_options = ['force']
|
| 28 |
+
|
| 29 |
+
def initialize_options(self):
|
| 30 |
+
self.install_dir = None
|
| 31 |
+
self.outfiles = []
|
| 32 |
+
self.root = None
|
| 33 |
+
self.force = 0
|
| 34 |
+
self.data_files = self.distribution.data_files
|
| 35 |
+
self.warn_dir = 1
|
| 36 |
+
|
| 37 |
+
def finalize_options(self):
|
| 38 |
+
self.set_undefined_options(
|
| 39 |
+
'install',
|
| 40 |
+
('install_data', 'install_dir'),
|
| 41 |
+
('root', 'root'),
|
| 42 |
+
('force', 'force'),
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
def run(self):
|
| 46 |
+
self.mkpath(self.install_dir)
|
| 47 |
+
for f in self.data_files:
|
| 48 |
+
if isinstance(f, str):
|
| 49 |
+
# it's a simple file, so copy it
|
| 50 |
+
f = convert_path(f)
|
| 51 |
+
if self.warn_dir:
|
| 52 |
+
self.warn(
|
| 53 |
+
"setup script did not provide a directory for "
|
| 54 |
+
"'%s' -- installing right in '%s'" % (f, self.install_dir)
|
| 55 |
+
)
|
| 56 |
+
(out, _) = self.copy_file(f, self.install_dir)
|
| 57 |
+
self.outfiles.append(out)
|
| 58 |
+
else:
|
| 59 |
+
# it's a tuple with path to install to and a list of files
|
| 60 |
+
dir = convert_path(f[0])
|
| 61 |
+
if not os.path.isabs(dir):
|
| 62 |
+
dir = os.path.join(self.install_dir, dir)
|
| 63 |
+
elif self.root:
|
| 64 |
+
dir = change_root(self.root, dir)
|
| 65 |
+
self.mkpath(dir)
|
| 66 |
+
|
| 67 |
+
if f[1] == []:
|
| 68 |
+
# If there are no files listed, the user must be
|
| 69 |
+
# trying to create an empty directory, so add the
|
| 70 |
+
# directory to the list of output files.
|
| 71 |
+
self.outfiles.append(dir)
|
| 72 |
+
else:
|
| 73 |
+
# Copy files, adding them to the list of output files.
|
| 74 |
+
for data in f[1]:
|
| 75 |
+
data = convert_path(data)
|
| 76 |
+
(out, _) = self.copy_file(data, dir)
|
| 77 |
+
self.outfiles.append(out)
|
| 78 |
+
|
| 79 |
+
def get_inputs(self):
|
| 80 |
+
return self.data_files or []
|
| 81 |
+
|
| 82 |
+
def get_outputs(self):
|
| 83 |
+
return self.outfiles
|
.venv/Lib/site-packages/setuptools/_distutils/command/install_egg_info.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
distutils.command.install_egg_info
|
| 3 |
+
|
| 4 |
+
Implements the Distutils 'install_egg_info' command, for installing
|
| 5 |
+
a package's PKG-INFO metadata.
|
| 6 |
+
"""
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
import re
|
| 11 |
+
|
| 12 |
+
from ..cmd import Command
|
| 13 |
+
from .. import dir_util
|
| 14 |
+
from .._log import log
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class install_egg_info(Command):
|
| 18 |
+
"""Install an .egg-info file for the package"""
|
| 19 |
+
|
| 20 |
+
description = "Install package's PKG-INFO metadata as an .egg-info file"
|
| 21 |
+
user_options = [
|
| 22 |
+
('install-dir=', 'd', "directory to install to"),
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
def initialize_options(self):
|
| 26 |
+
self.install_dir = None
|
| 27 |
+
|
| 28 |
+
@property
|
| 29 |
+
def basename(self):
|
| 30 |
+
"""
|
| 31 |
+
Allow basename to be overridden by child class.
|
| 32 |
+
Ref pypa/distutils#2.
|
| 33 |
+
"""
|
| 34 |
+
return "%s-%s-py%d.%d.egg-info" % (
|
| 35 |
+
to_filename(safe_name(self.distribution.get_name())),
|
| 36 |
+
to_filename(safe_version(self.distribution.get_version())),
|
| 37 |
+
*sys.version_info[:2],
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
def finalize_options(self):
|
| 41 |
+
self.set_undefined_options('install_lib', ('install_dir', 'install_dir'))
|
| 42 |
+
self.target = os.path.join(self.install_dir, self.basename)
|
| 43 |
+
self.outputs = [self.target]
|
| 44 |
+
|
| 45 |
+
def run(self):
|
| 46 |
+
target = self.target
|
| 47 |
+
if os.path.isdir(target) and not os.path.islink(target):
|
| 48 |
+
dir_util.remove_tree(target, dry_run=self.dry_run)
|
| 49 |
+
elif os.path.exists(target):
|
| 50 |
+
self.execute(os.unlink, (self.target,), "Removing " + target)
|
| 51 |
+
elif not os.path.isdir(self.install_dir):
|
| 52 |
+
self.execute(
|
| 53 |
+
os.makedirs, (self.install_dir,), "Creating " + self.install_dir
|
| 54 |
+
)
|
| 55 |
+
log.info("Writing %s", target)
|
| 56 |
+
if not self.dry_run:
|
| 57 |
+
with open(target, 'w', encoding='UTF-8') as f:
|
| 58 |
+
self.distribution.metadata.write_pkg_file(f)
|
| 59 |
+
|
| 60 |
+
def get_outputs(self):
|
| 61 |
+
return self.outputs
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
# The following routines are taken from setuptools' pkg_resources module and
|
| 65 |
+
# can be replaced by importing them from pkg_resources once it is included
|
| 66 |
+
# in the stdlib.
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def safe_name(name):
|
| 70 |
+
"""Convert an arbitrary string to a standard distribution name
|
| 71 |
+
|
| 72 |
+
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
| 73 |
+
"""
|
| 74 |
+
return re.sub('[^A-Za-z0-9.]+', '-', name)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
def safe_version(version):
|
| 78 |
+
"""Convert an arbitrary string to a standard version string
|
| 79 |
+
|
| 80 |
+
Spaces become dots, and all other non-alphanumeric characters become
|
| 81 |
+
dashes, with runs of multiple dashes condensed to a single dash.
|
| 82 |
+
"""
|
| 83 |
+
version = version.replace(' ', '.')
|
| 84 |
+
return re.sub('[^A-Za-z0-9.]+', '-', version)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def to_filename(name):
|
| 88 |
+
"""Convert a project or version name to its filename-escaped form
|
| 89 |
+
|
| 90 |
+
Any '-' characters are currently replaced with '_'.
|
| 91 |
+
"""
|
| 92 |
+
return name.replace('-', '_')
|
.venv/Lib/site-packages/setuptools/_distutils/command/install_headers.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_headers
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_headers' command, to install C/C++ header
|
| 4 |
+
files to the Python include directory."""
|
| 5 |
+
|
| 6 |
+
from ..core import Command
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
# XXX force is never used
|
| 10 |
+
class install_headers(Command):
|
| 11 |
+
description = "install C/C++ header files"
|
| 12 |
+
|
| 13 |
+
user_options = [
|
| 14 |
+
('install-dir=', 'd', "directory to install header files to"),
|
| 15 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 16 |
+
]
|
| 17 |
+
|
| 18 |
+
boolean_options = ['force']
|
| 19 |
+
|
| 20 |
+
def initialize_options(self):
|
| 21 |
+
self.install_dir = None
|
| 22 |
+
self.force = 0
|
| 23 |
+
self.outfiles = []
|
| 24 |
+
|
| 25 |
+
def finalize_options(self):
|
| 26 |
+
self.set_undefined_options(
|
| 27 |
+
'install', ('install_headers', 'install_dir'), ('force', 'force')
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
def run(self):
|
| 31 |
+
headers = self.distribution.headers
|
| 32 |
+
if not headers:
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
self.mkpath(self.install_dir)
|
| 36 |
+
for header in headers:
|
| 37 |
+
(out, _) = self.copy_file(header, self.install_dir)
|
| 38 |
+
self.outfiles.append(out)
|
| 39 |
+
|
| 40 |
+
def get_inputs(self):
|
| 41 |
+
return self.distribution.headers or []
|
| 42 |
+
|
| 43 |
+
def get_outputs(self):
|
| 44 |
+
return self.outfiles
|
.venv/Lib/site-packages/setuptools/_distutils/command/install_lib.py
ADDED
|
@@ -0,0 +1,237 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_lib
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_lib' command
|
| 4 |
+
(install all Python modules)."""
|
| 5 |
+
|
| 6 |
+
import os
|
| 7 |
+
import importlib.util
|
| 8 |
+
import sys
|
| 9 |
+
|
| 10 |
+
from ..core import Command
|
| 11 |
+
from ..errors import DistutilsOptionError
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
# Extension for Python source files.
|
| 15 |
+
PYTHON_SOURCE_EXTENSION = ".py"
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class install_lib(Command):
|
| 19 |
+
description = "install all Python modules (extensions and pure Python)"
|
| 20 |
+
|
| 21 |
+
# The byte-compilation options are a tad confusing. Here are the
|
| 22 |
+
# possible scenarios:
|
| 23 |
+
# 1) no compilation at all (--no-compile --no-optimize)
|
| 24 |
+
# 2) compile .pyc only (--compile --no-optimize; default)
|
| 25 |
+
# 3) compile .pyc and "opt-1" .pyc (--compile --optimize)
|
| 26 |
+
# 4) compile "opt-1" .pyc only (--no-compile --optimize)
|
| 27 |
+
# 5) compile .pyc and "opt-2" .pyc (--compile --optimize-more)
|
| 28 |
+
# 6) compile "opt-2" .pyc only (--no-compile --optimize-more)
|
| 29 |
+
#
|
| 30 |
+
# The UI for this is two options, 'compile' and 'optimize'.
|
| 31 |
+
# 'compile' is strictly boolean, and only decides whether to
|
| 32 |
+
# generate .pyc files. 'optimize' is three-way (0, 1, or 2), and
|
| 33 |
+
# decides both whether to generate .pyc files and what level of
|
| 34 |
+
# optimization to use.
|
| 35 |
+
|
| 36 |
+
user_options = [
|
| 37 |
+
('install-dir=', 'd', "directory to install to"),
|
| 38 |
+
('build-dir=', 'b', "build directory (where to install from)"),
|
| 39 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 40 |
+
('compile', 'c', "compile .py to .pyc [default]"),
|
| 41 |
+
('no-compile', None, "don't compile .py files"),
|
| 42 |
+
(
|
| 43 |
+
'optimize=',
|
| 44 |
+
'O',
|
| 45 |
+
"also compile with optimization: -O1 for \"python -O\", "
|
| 46 |
+
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
|
| 47 |
+
),
|
| 48 |
+
('skip-build', None, "skip the build steps"),
|
| 49 |
+
]
|
| 50 |
+
|
| 51 |
+
boolean_options = ['force', 'compile', 'skip-build']
|
| 52 |
+
negative_opt = {'no-compile': 'compile'}
|
| 53 |
+
|
| 54 |
+
def initialize_options(self):
|
| 55 |
+
# let the 'install' command dictate our installation directory
|
| 56 |
+
self.install_dir = None
|
| 57 |
+
self.build_dir = None
|
| 58 |
+
self.force = 0
|
| 59 |
+
self.compile = None
|
| 60 |
+
self.optimize = None
|
| 61 |
+
self.skip_build = None
|
| 62 |
+
|
| 63 |
+
def finalize_options(self):
|
| 64 |
+
# Get all the information we need to install pure Python modules
|
| 65 |
+
# from the umbrella 'install' command -- build (source) directory,
|
| 66 |
+
# install (target) directory, and whether to compile .py files.
|
| 67 |
+
self.set_undefined_options(
|
| 68 |
+
'install',
|
| 69 |
+
('build_lib', 'build_dir'),
|
| 70 |
+
('install_lib', 'install_dir'),
|
| 71 |
+
('force', 'force'),
|
| 72 |
+
('compile', 'compile'),
|
| 73 |
+
('optimize', 'optimize'),
|
| 74 |
+
('skip_build', 'skip_build'),
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
if self.compile is None:
|
| 78 |
+
self.compile = True
|
| 79 |
+
if self.optimize is None:
|
| 80 |
+
self.optimize = False
|
| 81 |
+
|
| 82 |
+
if not isinstance(self.optimize, int):
|
| 83 |
+
try:
|
| 84 |
+
self.optimize = int(self.optimize)
|
| 85 |
+
if self.optimize not in (0, 1, 2):
|
| 86 |
+
raise AssertionError
|
| 87 |
+
except (ValueError, AssertionError):
|
| 88 |
+
raise DistutilsOptionError("optimize must be 0, 1, or 2")
|
| 89 |
+
|
| 90 |
+
def run(self):
|
| 91 |
+
# Make sure we have built everything we need first
|
| 92 |
+
self.build()
|
| 93 |
+
|
| 94 |
+
# Install everything: simply dump the entire contents of the build
|
| 95 |
+
# directory to the installation directory (that's the beauty of
|
| 96 |
+
# having a build directory!)
|
| 97 |
+
outfiles = self.install()
|
| 98 |
+
|
| 99 |
+
# (Optionally) compile .py to .pyc
|
| 100 |
+
if outfiles is not None and self.distribution.has_pure_modules():
|
| 101 |
+
self.byte_compile(outfiles)
|
| 102 |
+
|
| 103 |
+
# -- Top-level worker functions ------------------------------------
|
| 104 |
+
# (called from 'run()')
|
| 105 |
+
|
| 106 |
+
def build(self):
|
| 107 |
+
if not self.skip_build:
|
| 108 |
+
if self.distribution.has_pure_modules():
|
| 109 |
+
self.run_command('build_py')
|
| 110 |
+
if self.distribution.has_ext_modules():
|
| 111 |
+
self.run_command('build_ext')
|
| 112 |
+
|
| 113 |
+
def install(self):
|
| 114 |
+
if os.path.isdir(self.build_dir):
|
| 115 |
+
outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
| 116 |
+
else:
|
| 117 |
+
self.warn(
|
| 118 |
+
"'%s' does not exist -- no Python modules to install" % self.build_dir
|
| 119 |
+
)
|
| 120 |
+
return
|
| 121 |
+
return outfiles
|
| 122 |
+
|
| 123 |
+
def byte_compile(self, files):
|
| 124 |
+
if sys.dont_write_bytecode:
|
| 125 |
+
self.warn('byte-compiling is disabled, skipping.')
|
| 126 |
+
return
|
| 127 |
+
|
| 128 |
+
from ..util import byte_compile
|
| 129 |
+
|
| 130 |
+
# Get the "--root" directory supplied to the "install" command,
|
| 131 |
+
# and use it as a prefix to strip off the purported filename
|
| 132 |
+
# encoded in bytecode files. This is far from complete, but it
|
| 133 |
+
# should at least generate usable bytecode in RPM distributions.
|
| 134 |
+
install_root = self.get_finalized_command('install').root
|
| 135 |
+
|
| 136 |
+
if self.compile:
|
| 137 |
+
byte_compile(
|
| 138 |
+
files,
|
| 139 |
+
optimize=0,
|
| 140 |
+
force=self.force,
|
| 141 |
+
prefix=install_root,
|
| 142 |
+
dry_run=self.dry_run,
|
| 143 |
+
)
|
| 144 |
+
if self.optimize > 0:
|
| 145 |
+
byte_compile(
|
| 146 |
+
files,
|
| 147 |
+
optimize=self.optimize,
|
| 148 |
+
force=self.force,
|
| 149 |
+
prefix=install_root,
|
| 150 |
+
verbose=self.verbose,
|
| 151 |
+
dry_run=self.dry_run,
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
# -- Utility methods -----------------------------------------------
|
| 155 |
+
|
| 156 |
+
def _mutate_outputs(self, has_any, build_cmd, cmd_option, output_dir):
|
| 157 |
+
if not has_any:
|
| 158 |
+
return []
|
| 159 |
+
|
| 160 |
+
build_cmd = self.get_finalized_command(build_cmd)
|
| 161 |
+
build_files = build_cmd.get_outputs()
|
| 162 |
+
build_dir = getattr(build_cmd, cmd_option)
|
| 163 |
+
|
| 164 |
+
prefix_len = len(build_dir) + len(os.sep)
|
| 165 |
+
outputs = []
|
| 166 |
+
for file in build_files:
|
| 167 |
+
outputs.append(os.path.join(output_dir, file[prefix_len:]))
|
| 168 |
+
|
| 169 |
+
return outputs
|
| 170 |
+
|
| 171 |
+
def _bytecode_filenames(self, py_filenames):
|
| 172 |
+
bytecode_files = []
|
| 173 |
+
for py_file in py_filenames:
|
| 174 |
+
# Since build_py handles package data installation, the
|
| 175 |
+
# list of outputs can contain more than just .py files.
|
| 176 |
+
# Make sure we only report bytecode for the .py files.
|
| 177 |
+
ext = os.path.splitext(os.path.normcase(py_file))[1]
|
| 178 |
+
if ext != PYTHON_SOURCE_EXTENSION:
|
| 179 |
+
continue
|
| 180 |
+
if self.compile:
|
| 181 |
+
bytecode_files.append(
|
| 182 |
+
importlib.util.cache_from_source(py_file, optimization='')
|
| 183 |
+
)
|
| 184 |
+
if self.optimize > 0:
|
| 185 |
+
bytecode_files.append(
|
| 186 |
+
importlib.util.cache_from_source(
|
| 187 |
+
py_file, optimization=self.optimize
|
| 188 |
+
)
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
return bytecode_files
|
| 192 |
+
|
| 193 |
+
# -- External interface --------------------------------------------
|
| 194 |
+
# (called by outsiders)
|
| 195 |
+
|
| 196 |
+
def get_outputs(self):
|
| 197 |
+
"""Return the list of files that would be installed if this command
|
| 198 |
+
were actually run. Not affected by the "dry-run" flag or whether
|
| 199 |
+
modules have actually been built yet.
|
| 200 |
+
"""
|
| 201 |
+
pure_outputs = self._mutate_outputs(
|
| 202 |
+
self.distribution.has_pure_modules(),
|
| 203 |
+
'build_py',
|
| 204 |
+
'build_lib',
|
| 205 |
+
self.install_dir,
|
| 206 |
+
)
|
| 207 |
+
if self.compile:
|
| 208 |
+
bytecode_outputs = self._bytecode_filenames(pure_outputs)
|
| 209 |
+
else:
|
| 210 |
+
bytecode_outputs = []
|
| 211 |
+
|
| 212 |
+
ext_outputs = self._mutate_outputs(
|
| 213 |
+
self.distribution.has_ext_modules(),
|
| 214 |
+
'build_ext',
|
| 215 |
+
'build_lib',
|
| 216 |
+
self.install_dir,
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
return pure_outputs + bytecode_outputs + ext_outputs
|
| 220 |
+
|
| 221 |
+
def get_inputs(self):
|
| 222 |
+
"""Get the list of files that are input to this command, ie. the
|
| 223 |
+
files that get installed as they are named in the build tree.
|
| 224 |
+
The files in this list correspond one-to-one to the output
|
| 225 |
+
filenames returned by 'get_outputs()'.
|
| 226 |
+
"""
|
| 227 |
+
inputs = []
|
| 228 |
+
|
| 229 |
+
if self.distribution.has_pure_modules():
|
| 230 |
+
build_py = self.get_finalized_command('build_py')
|
| 231 |
+
inputs.extend(build_py.get_outputs())
|
| 232 |
+
|
| 233 |
+
if self.distribution.has_ext_modules():
|
| 234 |
+
build_ext = self.get_finalized_command('build_ext')
|
| 235 |
+
inputs.extend(build_ext.get_outputs())
|
| 236 |
+
|
| 237 |
+
return inputs
|
.venv/Lib/site-packages/setuptools/_distutils/command/install_scripts.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""distutils.command.install_scripts
|
| 2 |
+
|
| 3 |
+
Implements the Distutils 'install_scripts' command, for installing
|
| 4 |
+
Python scripts."""
|
| 5 |
+
|
| 6 |
+
# contributed by Bastian Kleineidam
|
| 7 |
+
|
| 8 |
+
import os
|
| 9 |
+
from ..core import Command
|
| 10 |
+
from distutils._log import log
|
| 11 |
+
from stat import ST_MODE
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class install_scripts(Command):
|
| 15 |
+
description = "install scripts (Python or otherwise)"
|
| 16 |
+
|
| 17 |
+
user_options = [
|
| 18 |
+
('install-dir=', 'd', "directory to install scripts to"),
|
| 19 |
+
('build-dir=', 'b', "build directory (where to install from)"),
|
| 20 |
+
('force', 'f', "force installation (overwrite existing files)"),
|
| 21 |
+
('skip-build', None, "skip the build steps"),
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
boolean_options = ['force', 'skip-build']
|
| 25 |
+
|
| 26 |
+
def initialize_options(self):
|
| 27 |
+
self.install_dir = None
|
| 28 |
+
self.force = 0
|
| 29 |
+
self.build_dir = None
|
| 30 |
+
self.skip_build = None
|
| 31 |
+
|
| 32 |
+
def finalize_options(self):
|
| 33 |
+
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
|
| 34 |
+
self.set_undefined_options(
|
| 35 |
+
'install',
|
| 36 |
+
('install_scripts', 'install_dir'),
|
| 37 |
+
('force', 'force'),
|
| 38 |
+
('skip_build', 'skip_build'),
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
def run(self):
|
| 42 |
+
if not self.skip_build:
|
| 43 |
+
self.run_command('build_scripts')
|
| 44 |
+
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
| 45 |
+
if os.name == 'posix':
|
| 46 |
+
# Set the executable bits (owner, group, and world) on
|
| 47 |
+
# all the scripts we just installed.
|
| 48 |
+
for file in self.get_outputs():
|
| 49 |
+
if self.dry_run:
|
| 50 |
+
log.info("changing mode of %s", file)
|
| 51 |
+
else:
|
| 52 |
+
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
|
| 53 |
+
log.info("changing mode of %s to %o", file, mode)
|
| 54 |
+
os.chmod(file, mode)
|
| 55 |
+
|
| 56 |
+
def get_inputs(self):
|
| 57 |
+
return self.distribution.scripts or []
|
| 58 |
+
|
| 59 |
+
def get_outputs(self):
|
| 60 |
+
return self.outfiles or []
|
.venv/Lib/site-packages/wheel-0.41.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/Lib/site-packages/wheel-0.41.2.dist-info/LICENSE.txt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) 2012 Daniel Holth <dholth@fastmail.fm> and contributors
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a
|
| 6 |
+
copy of this software and associated documentation files (the "Software"),
|
| 7 |
+
to deal in the Software without restriction, including without limitation
|
| 8 |
+
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
| 9 |
+
and/or sell copies of the Software, and to permit persons to whom the
|
| 10 |
+
Software is furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included
|
| 13 |
+
in all copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
| 18 |
+
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
|
| 19 |
+
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
| 20 |
+
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 21 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
.venv/Lib/site-packages/wheel-0.41.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: wheel
|
| 3 |
+
Version: 0.41.2
|
| 4 |
+
Summary: A built-package format for Python
|
| 5 |
+
Keywords: wheel,packaging
|
| 6 |
+
Author-email: Daniel Holth <dholth@fastmail.fm>
|
| 7 |
+
Maintainer-email: Alex Grönholm <alex.gronholm@nextday.fi>
|
| 8 |
+
Requires-Python: >=3.7
|
| 9 |
+
Description-Content-Type: text/x-rst
|
| 10 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 11 |
+
Classifier: Intended Audience :: Developers
|
| 12 |
+
Classifier: Topic :: System :: Archiving :: Packaging
|
| 13 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 14 |
+
Classifier: Programming Language :: Python
|
| 15 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.7
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 21 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 22 |
+
Requires-Dist: pytest >= 6.0.0 ; extra == "test"
|
| 23 |
+
Requires-Dist: setuptools >= 65 ; extra == "test"
|
| 24 |
+
Project-URL: Changelog, https://wheel.readthedocs.io/en/stable/news.html
|
| 25 |
+
Project-URL: Documentation, https://wheel.readthedocs.io/
|
| 26 |
+
Project-URL: Issue Tracker, https://github.com/pypa/wheel/issues
|
| 27 |
+
Provides-Extra: test
|
| 28 |
+
|
| 29 |
+
wheel
|
| 30 |
+
=====
|
| 31 |
+
|
| 32 |
+
This library is the reference implementation of the Python wheel packaging
|
| 33 |
+
standard, as defined in `PEP 427`_.
|
| 34 |
+
|
| 35 |
+
It has two different roles:
|
| 36 |
+
|
| 37 |
+
#. A setuptools_ extension for building wheels that provides the
|
| 38 |
+
``bdist_wheel`` setuptools command
|
| 39 |
+
#. A command line tool for working with wheel files
|
| 40 |
+
|
| 41 |
+
It should be noted that wheel is **not** intended to be used as a library, and
|
| 42 |
+
as such there is no stable, public API.
|
| 43 |
+
|
| 44 |
+
.. _PEP 427: https://www.python.org/dev/peps/pep-0427/
|
| 45 |
+
.. _setuptools: https://pypi.org/project/setuptools/
|
| 46 |
+
|
| 47 |
+
Documentation
|
| 48 |
+
-------------
|
| 49 |
+
|
| 50 |
+
The documentation_ can be found on Read The Docs.
|
| 51 |
+
|
| 52 |
+
.. _documentation: https://wheel.readthedocs.io/
|
| 53 |
+
|
| 54 |
+
Code of Conduct
|
| 55 |
+
---------------
|
| 56 |
+
|
| 57 |
+
Everyone interacting in the wheel project's codebases, issue trackers, chat
|
| 58 |
+
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
| 59 |
+
|
| 60 |
+
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
|
| 61 |
+
|
.venv/Lib/site-packages/wheel-0.41.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
wheel/__init__.py,sha256=iLBUbe2IXU3H9aeNf5_8FrG_MjpDDus0rYtkkaQx72M,59
|
| 2 |
+
wheel/__main__.py,sha256=NkMUnuTCGcOkgY0IBLgBCVC_BGGcWORx2K8jYGS12UE,455
|
| 3 |
+
wheel/_setuptools_logging.py,sha256=NoCnjJ4DFEZ45Eo-2BdXLsWJCwGkait1tp_17paleVw,746
|
| 4 |
+
wheel/bdist_wheel.py,sha256=COwdmACFXXupMH7VVEf5JmHnqVgoJMe81YuKRCIuMZE,20873
|
| 5 |
+
wheel/macosx_libfile.py,sha256=mKH4GW3FILt0jLgm5LPgj7D5XyEvBU2Fgc-jCxMfSng,16143
|
| 6 |
+
wheel/metadata.py,sha256=jGDlp6IMblnujK4u1eni8VAdn2WYycSdQ-P6jaGBUMw,5882
|
| 7 |
+
wheel/util.py,sha256=e0jpnsbbM9QhaaMSyap-_ZgUxcxwpyLDk6RHcrduPLg,621
|
| 8 |
+
wheel/wheelfile.py,sha256=A5QzHd3cpDBqDEr8O6R6jqwLKiqkLlde6VjfgdQXo5Q,7701
|
| 9 |
+
wheel/cli/__init__.py,sha256=ha9uxvzgt2c_uWoZx181Qp_IaCKra6kpd9Ary3BhxTU,4250
|
| 10 |
+
wheel/cli/convert.py,sha256=29utvAoTZzSwFBXb83G1FhmO_ssRQw5XIrcv2p08yXM,9431
|
| 11 |
+
wheel/cli/pack.py,sha256=j6mMTDkR29E-QSdGD4eziG9UHwtRpaNoCNc2CtoXlxM,4338
|
| 12 |
+
wheel/cli/tags.py,sha256=zpUvvgNYJyXkixxpKqrYgHutDsMri_R-N3hy7TOBsjU,5159
|
| 13 |
+
wheel/cli/unpack.py,sha256=Y_J7ynxPSoFFTT7H0fMgbBlVErwyDGcObgme5MBuz58,1021
|
| 14 |
+
wheel/vendored/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 15 |
+
wheel/vendored/vendor.txt,sha256=D8elx6ZKLANY-irWC6duLu0MUph8_wUrdHHZvOgCfKs,16
|
| 16 |
+
wheel/vendored/packaging/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 17 |
+
wheel/vendored/packaging/_elffile.py,sha256=hbmK8OD6Z7fY6hwinHEUcD1by7czkGiNYu7ShnFEk2k,3266
|
| 18 |
+
wheel/vendored/packaging/_manylinux.py,sha256=uZ821PBqQrokhUbwe7E0UodEraMHqzoSgTvfJ8MIl30,8813
|
| 19 |
+
wheel/vendored/packaging/_musllinux.py,sha256=mvPk7FNjjILKRLIdMxR7IvJ1uggLgCszo-L9rjfpi0M,2524
|
| 20 |
+
wheel/vendored/packaging/_parser.py,sha256=jjFjSqNf7W2-Ta6YUkywK0P4d2i0Bz_MqLOfl7O1Tkw,9399
|
| 21 |
+
wheel/vendored/packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
| 22 |
+
wheel/vendored/packaging/_tokenizer.py,sha256=czGibL-4oPofx1pCSt_hrozNbHlOPrqGv6m-0d-iTdo,5148
|
| 23 |
+
wheel/vendored/packaging/markers.py,sha256=HDPXE0_MPBSwsw_9upez8t8mdrqUGrgiOG_qyQy-W30,8161
|
| 24 |
+
wheel/vendored/packaging/requirements.py,sha256=4nOKheaBbVEQXTGSqaOGTy1Tkg7J_sEno3u8jxC-baw,3264
|
| 25 |
+
wheel/vendored/packaging/specifiers.py,sha256=NX3JjilBf4Bs1abjIG8-ZKGv0QFs5xc43vO8GokHxXE,39047
|
| 26 |
+
wheel/vendored/packaging/tags.py,sha256=fOKnZVfiU3oc9CPSzjJUsMk5VTfgOfpNhWobUH0sAlg,18065
|
| 27 |
+
wheel/vendored/packaging/utils.py,sha256=es0cCezKspzriQ-3V88h3yJzxz028euV2sUwM61kE-o,4355
|
| 28 |
+
wheel/vendored/packaging/version.py,sha256=_ULefmddLDLJ9VKRFAXhshEd0zP8OYPhcjCPfYolUbo,16295
|
| 29 |
+
wheel-0.41.2.dist-info/entry_points.txt,sha256=rTY1BbkPHhkGMm4Q3F0pIzJBzW2kMxoG1oriffvGdA0,104
|
| 30 |
+
wheel-0.41.2.dist-info/LICENSE.txt,sha256=MMI2GGeRCPPo6h0qZYx8pBe9_IkcmO8aifpP8MmChlQ,1107
|
| 31 |
+
wheel-0.41.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81
|
| 32 |
+
wheel-0.41.2.dist-info/METADATA,sha256=5KX3L5F9-t2CrC391waXse_X4UDFCAvshtp3FZmdA_I,2152
|
| 33 |
+
wheel-0.41.2.dist-info/RECORD,,
|
| 34 |
+
wheel\__main__.cpython-310.pyc,,
|
| 35 |
+
wheel\vendored\packaging\_tokenizer.cpython-310.pyc,,
|
| 36 |
+
wheel\vendored\packaging\_manylinux.cpython-310.pyc,,
|
| 37 |
+
..\..\Scripts\wheel-3.10.exe,,
|
| 38 |
+
wheel\__init__.cpython-310.pyc,,
|
| 39 |
+
wheel\vendored\packaging\version.cpython-310.pyc,,
|
| 40 |
+
wheel\cli\pack.cpython-310.pyc,,
|
| 41 |
+
wheel\vendored\packaging\requirements.cpython-310.pyc,,
|
| 42 |
+
wheel\cli\__init__.cpython-310.pyc,,
|
| 43 |
+
wheel\vendored\packaging\_elffile.cpython-310.pyc,,
|
| 44 |
+
wheel\vendored\packaging\_parser.cpython-310.pyc,,
|
| 45 |
+
wheel\cli\tags.cpython-310.pyc,,
|
| 46 |
+
wheel\cli\unpack.cpython-310.pyc,,
|
| 47 |
+
wheel-0.41.2.virtualenv,,
|
| 48 |
+
..\..\Scripts\wheel.exe,,
|
| 49 |
+
wheel\vendored\packaging\__pycache__,,
|
| 50 |
+
wheel\vendored\packaging\_musllinux.cpython-310.pyc,,
|
| 51 |
+
wheel\bdist_wheel.cpython-310.pyc,,
|
| 52 |
+
..\..\Scripts\wheel3.exe,,
|
| 53 |
+
wheel-0.41.2.dist-info\__pycache__,,
|
| 54 |
+
wheel\vendored\__pycache__,,
|
| 55 |
+
wheel\_setuptools_logging.cpython-310.pyc,,
|
| 56 |
+
wheel\macosx_libfile.cpython-310.pyc,,
|
| 57 |
+
..\..\Scripts\wheel3.10.exe,,
|
| 58 |
+
wheel\metadata.cpython-310.pyc,,
|
| 59 |
+
wheel\vendored\packaging\__init__.cpython-310.pyc,,
|
| 60 |
+
wheel\vendored\packaging\_structures.cpython-310.pyc,,
|
| 61 |
+
wheel\cli\convert.cpython-310.pyc,,
|
| 62 |
+
wheel\vendored\packaging\specifiers.cpython-310.pyc,,
|
| 63 |
+
wheel\__pycache__,,
|
| 64 |
+
wheel-0.41.2.dist-info\INSTALLER,,
|
| 65 |
+
wheel\wheelfile.cpython-310.pyc,,
|
| 66 |
+
wheel\vendored\packaging\tags.cpython-310.pyc,,
|
| 67 |
+
wheel\cli\__pycache__,,
|
| 68 |
+
wheel\vendored\packaging\markers.cpython-310.pyc,,
|
| 69 |
+
wheel\util.cpython-310.pyc,,
|
| 70 |
+
wheel\vendored\packaging\utils.cpython-310.pyc,,
|
| 71 |
+
wheel\vendored\__init__.cpython-310.pyc,,
|
.venv/Lib/site-packages/wheel-0.41.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.9.0
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
.venv/Lib/site-packages/wheel-0.41.2.dist-info/entry_points.txt
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[console_scripts]
|
| 2 |
+
wheel=wheel.cli:main
|
| 3 |
+
|
| 4 |
+
[distutils.commands]
|
| 5 |
+
bdist_wheel=wheel.bdist_wheel:bdist_wheel
|
| 6 |
+
|
.venv/Lib/site-packages/wheel/__init__.py
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
__version__ = "0.41.2"
|
.venv/Lib/site-packages/wheel/__main__.py
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Wheel command line tool (enable python -m wheel syntax)
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def main(): # needed for console script
|
| 11 |
+
if __package__ == "":
|
| 12 |
+
# To be able to run 'python wheel-0.9.whl/wheel':
|
| 13 |
+
import os.path
|
| 14 |
+
|
| 15 |
+
path = os.path.dirname(os.path.dirname(__file__))
|
| 16 |
+
sys.path[0:0] = [path]
|
| 17 |
+
import wheel.cli
|
| 18 |
+
|
| 19 |
+
sys.exit(wheel.cli.main())
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
if __name__ == "__main__":
|
| 23 |
+
sys.exit(main())
|
.venv/Lib/site-packages/wheel/_setuptools_logging.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# copied from setuptools.logging, omitting monkeypatching
|
| 2 |
+
from __future__ import annotations
|
| 3 |
+
|
| 4 |
+
import logging
|
| 5 |
+
import sys
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def _not_warning(record):
|
| 9 |
+
return record.levelno < logging.WARNING
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def configure():
|
| 13 |
+
"""
|
| 14 |
+
Configure logging to emit warning and above to stderr
|
| 15 |
+
and everything else to stdout. This behavior is provided
|
| 16 |
+
for compatibility with distutils.log but may change in
|
| 17 |
+
the future.
|
| 18 |
+
"""
|
| 19 |
+
err_handler = logging.StreamHandler()
|
| 20 |
+
err_handler.setLevel(logging.WARNING)
|
| 21 |
+
out_handler = logging.StreamHandler(sys.stdout)
|
| 22 |
+
out_handler.addFilter(_not_warning)
|
| 23 |
+
handlers = err_handler, out_handler
|
| 24 |
+
logging.basicConfig(
|
| 25 |
+
format="{message}", style="{", handlers=handlers, level=logging.DEBUG
|
| 26 |
+
)
|
.venv/Lib/site-packages/wheel/bdist_wheel.py
ADDED
|
@@ -0,0 +1,593 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Create a wheel (.whl) distribution.
|
| 3 |
+
|
| 4 |
+
A wheel is a built archive format.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import annotations
|
| 8 |
+
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import shutil
|
| 12 |
+
import stat
|
| 13 |
+
import struct
|
| 14 |
+
import sys
|
| 15 |
+
import sysconfig
|
| 16 |
+
import warnings
|
| 17 |
+
from email.generator import BytesGenerator, Generator
|
| 18 |
+
from email.policy import EmailPolicy
|
| 19 |
+
from glob import iglob
|
| 20 |
+
from io import BytesIO
|
| 21 |
+
from shutil import rmtree
|
| 22 |
+
from zipfile import ZIP_DEFLATED, ZIP_STORED
|
| 23 |
+
|
| 24 |
+
import setuptools
|
| 25 |
+
from setuptools import Command
|
| 26 |
+
|
| 27 |
+
from . import __version__ as wheel_version
|
| 28 |
+
from .macosx_libfile import calculate_macosx_platform_tag
|
| 29 |
+
from .metadata import pkginfo_to_metadata
|
| 30 |
+
from .util import log
|
| 31 |
+
from .vendored.packaging import tags
|
| 32 |
+
from .vendored.packaging import version as _packaging_version
|
| 33 |
+
from .wheelfile import WheelFile
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def safe_name(name):
|
| 37 |
+
"""Convert an arbitrary string to a standard distribution name
|
| 38 |
+
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
| 39 |
+
"""
|
| 40 |
+
return re.sub("[^A-Za-z0-9.]+", "-", name)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def safe_version(version):
|
| 44 |
+
"""
|
| 45 |
+
Convert an arbitrary string to a standard version string
|
| 46 |
+
"""
|
| 47 |
+
try:
|
| 48 |
+
# normalize the version
|
| 49 |
+
return str(_packaging_version.Version(version))
|
| 50 |
+
except _packaging_version.InvalidVersion:
|
| 51 |
+
version = version.replace(" ", ".")
|
| 52 |
+
return re.sub("[^A-Za-z0-9.]+", "-", version)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
setuptools_major_version = int(setuptools.__version__.split(".")[0])
|
| 56 |
+
|
| 57 |
+
PY_LIMITED_API_PATTERN = r"cp3\d"
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _is_32bit_interpreter():
|
| 61 |
+
return struct.calcsize("P") == 4
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
def python_tag():
|
| 65 |
+
return f"py{sys.version_info[0]}"
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def get_platform(archive_root):
|
| 69 |
+
"""Return our platform name 'win32', 'linux_x86_64'"""
|
| 70 |
+
result = sysconfig.get_platform()
|
| 71 |
+
if result.startswith("macosx") and archive_root is not None:
|
| 72 |
+
result = calculate_macosx_platform_tag(archive_root, result)
|
| 73 |
+
elif _is_32bit_interpreter():
|
| 74 |
+
if result == "linux-x86_64":
|
| 75 |
+
# pip pull request #3497
|
| 76 |
+
result = "linux-i686"
|
| 77 |
+
elif result == "linux-aarch64":
|
| 78 |
+
# packaging pull request #234
|
| 79 |
+
# TODO armv8l, packaging pull request #690 => this did not land
|
| 80 |
+
# in pip/packaging yet
|
| 81 |
+
result = "linux-armv7l"
|
| 82 |
+
|
| 83 |
+
return result.replace("-", "_")
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def get_flag(var, fallback, expected=True, warn=True):
|
| 87 |
+
"""Use a fallback value for determining SOABI flags if the needed config
|
| 88 |
+
var is unset or unavailable."""
|
| 89 |
+
val = sysconfig.get_config_var(var)
|
| 90 |
+
if val is None:
|
| 91 |
+
if warn:
|
| 92 |
+
warnings.warn(
|
| 93 |
+
f"Config variable '{var}' is unset, Python ABI tag may " "be incorrect",
|
| 94 |
+
RuntimeWarning,
|
| 95 |
+
stacklevel=2,
|
| 96 |
+
)
|
| 97 |
+
return fallback
|
| 98 |
+
return val == expected
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
def get_abi_tag():
|
| 102 |
+
"""Return the ABI tag based on SOABI (if available) or emulate SOABI (PyPy2)."""
|
| 103 |
+
soabi = sysconfig.get_config_var("SOABI")
|
| 104 |
+
impl = tags.interpreter_name()
|
| 105 |
+
if not soabi and impl in ("cp", "pp") and hasattr(sys, "maxunicode"):
|
| 106 |
+
d = ""
|
| 107 |
+
m = ""
|
| 108 |
+
u = ""
|
| 109 |
+
if get_flag("Py_DEBUG", hasattr(sys, "gettotalrefcount"), warn=(impl == "cp")):
|
| 110 |
+
d = "d"
|
| 111 |
+
|
| 112 |
+
if get_flag(
|
| 113 |
+
"WITH_PYMALLOC",
|
| 114 |
+
impl == "cp",
|
| 115 |
+
warn=(impl == "cp" and sys.version_info < (3, 8)),
|
| 116 |
+
) and sys.version_info < (3, 8):
|
| 117 |
+
m = "m"
|
| 118 |
+
|
| 119 |
+
abi = f"{impl}{tags.interpreter_version()}{d}{m}{u}"
|
| 120 |
+
elif soabi and impl == "cp":
|
| 121 |
+
abi = "cp" + soabi.split("-")[1]
|
| 122 |
+
elif soabi and impl == "pp":
|
| 123 |
+
# we want something like pypy36-pp73
|
| 124 |
+
abi = "-".join(soabi.split("-")[:2])
|
| 125 |
+
abi = abi.replace(".", "_").replace("-", "_")
|
| 126 |
+
elif soabi and impl == "graalpy":
|
| 127 |
+
abi = "-".join(soabi.split("-")[:3])
|
| 128 |
+
abi = abi.replace(".", "_").replace("-", "_")
|
| 129 |
+
elif soabi:
|
| 130 |
+
abi = soabi.replace(".", "_").replace("-", "_")
|
| 131 |
+
else:
|
| 132 |
+
abi = None
|
| 133 |
+
|
| 134 |
+
return abi
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def safer_name(name):
|
| 138 |
+
return safe_name(name).replace("-", "_")
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
def safer_version(version):
|
| 142 |
+
return safe_version(version).replace("-", "_")
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def remove_readonly(func, path, excinfo):
|
| 146 |
+
remove_readonly_exc(func, path, excinfo[1])
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def remove_readonly_exc(func, path, exc):
|
| 150 |
+
os.chmod(path, stat.S_IWRITE)
|
| 151 |
+
func(path)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class bdist_wheel(Command):
|
| 155 |
+
description = "create a wheel distribution"
|
| 156 |
+
|
| 157 |
+
supported_compressions = {
|
| 158 |
+
"stored": ZIP_STORED,
|
| 159 |
+
"deflated": ZIP_DEFLATED,
|
| 160 |
+
}
|
| 161 |
+
|
| 162 |
+
user_options = [
|
| 163 |
+
("bdist-dir=", "b", "temporary directory for creating the distribution"),
|
| 164 |
+
(
|
| 165 |
+
"plat-name=",
|
| 166 |
+
"p",
|
| 167 |
+
"platform name to embed in generated filenames "
|
| 168 |
+
"(default: %s)" % get_platform(None),
|
| 169 |
+
),
|
| 170 |
+
(
|
| 171 |
+
"keep-temp",
|
| 172 |
+
"k",
|
| 173 |
+
"keep the pseudo-installation tree around after "
|
| 174 |
+
"creating the distribution archive",
|
| 175 |
+
),
|
| 176 |
+
("dist-dir=", "d", "directory to put final built distributions in"),
|
| 177 |
+
("skip-build", None, "skip rebuilding everything (for testing/debugging)"),
|
| 178 |
+
(
|
| 179 |
+
"relative",
|
| 180 |
+
None,
|
| 181 |
+
"build the archive using relative paths " "(default: false)",
|
| 182 |
+
),
|
| 183 |
+
(
|
| 184 |
+
"owner=",
|
| 185 |
+
"u",
|
| 186 |
+
"Owner name used when creating a tar file" " [default: current user]",
|
| 187 |
+
),
|
| 188 |
+
(
|
| 189 |
+
"group=",
|
| 190 |
+
"g",
|
| 191 |
+
"Group name used when creating a tar file" " [default: current group]",
|
| 192 |
+
),
|
| 193 |
+
("universal", None, "make a universal wheel" " (default: false)"),
|
| 194 |
+
(
|
| 195 |
+
"compression=",
|
| 196 |
+
None,
|
| 197 |
+
"zipfile compression (one of: {})"
|
| 198 |
+
" (default: 'deflated')".format(", ".join(supported_compressions)),
|
| 199 |
+
),
|
| 200 |
+
(
|
| 201 |
+
"python-tag=",
|
| 202 |
+
None,
|
| 203 |
+
"Python implementation compatibility tag"
|
| 204 |
+
" (default: '%s')" % (python_tag()),
|
| 205 |
+
),
|
| 206 |
+
(
|
| 207 |
+
"build-number=",
|
| 208 |
+
None,
|
| 209 |
+
"Build number for this particular version. "
|
| 210 |
+
"As specified in PEP-0427, this must start with a digit. "
|
| 211 |
+
"[default: None]",
|
| 212 |
+
),
|
| 213 |
+
(
|
| 214 |
+
"py-limited-api=",
|
| 215 |
+
None,
|
| 216 |
+
"Python tag (cp32|cp33|cpNN) for abi3 wheel tag" " (default: false)",
|
| 217 |
+
),
|
| 218 |
+
]
|
| 219 |
+
|
| 220 |
+
boolean_options = ["keep-temp", "skip-build", "relative", "universal"]
|
| 221 |
+
|
| 222 |
+
def initialize_options(self):
|
| 223 |
+
self.bdist_dir = None
|
| 224 |
+
self.data_dir = None
|
| 225 |
+
self.plat_name = None
|
| 226 |
+
self.plat_tag = None
|
| 227 |
+
self.format = "zip"
|
| 228 |
+
self.keep_temp = False
|
| 229 |
+
self.dist_dir = None
|
| 230 |
+
self.egginfo_dir = None
|
| 231 |
+
self.root_is_pure = None
|
| 232 |
+
self.skip_build = None
|
| 233 |
+
self.relative = False
|
| 234 |
+
self.owner = None
|
| 235 |
+
self.group = None
|
| 236 |
+
self.universal = False
|
| 237 |
+
self.compression = "deflated"
|
| 238 |
+
self.python_tag = python_tag()
|
| 239 |
+
self.build_number = None
|
| 240 |
+
self.py_limited_api = False
|
| 241 |
+
self.plat_name_supplied = False
|
| 242 |
+
|
| 243 |
+
def finalize_options(self):
|
| 244 |
+
if self.bdist_dir is None:
|
| 245 |
+
bdist_base = self.get_finalized_command("bdist").bdist_base
|
| 246 |
+
self.bdist_dir = os.path.join(bdist_base, "wheel")
|
| 247 |
+
|
| 248 |
+
egg_info = self.distribution.get_command_obj("egg_info")
|
| 249 |
+
egg_info.ensure_finalized() # needed for correct `wheel_dist_name`
|
| 250 |
+
|
| 251 |
+
self.data_dir = self.wheel_dist_name + ".data"
|
| 252 |
+
self.plat_name_supplied = self.plat_name is not None
|
| 253 |
+
|
| 254 |
+
try:
|
| 255 |
+
self.compression = self.supported_compressions[self.compression]
|
| 256 |
+
except KeyError:
|
| 257 |
+
raise ValueError(f"Unsupported compression: {self.compression}") from None
|
| 258 |
+
|
| 259 |
+
need_options = ("dist_dir", "plat_name", "skip_build")
|
| 260 |
+
|
| 261 |
+
self.set_undefined_options("bdist", *zip(need_options, need_options))
|
| 262 |
+
|
| 263 |
+
self.root_is_pure = not (
|
| 264 |
+
self.distribution.has_ext_modules() or self.distribution.has_c_libraries()
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
if self.py_limited_api and not re.match(
|
| 268 |
+
PY_LIMITED_API_PATTERN, self.py_limited_api
|
| 269 |
+
):
|
| 270 |
+
raise ValueError("py-limited-api must match '%s'" % PY_LIMITED_API_PATTERN)
|
| 271 |
+
|
| 272 |
+
# Support legacy [wheel] section for setting universal
|
| 273 |
+
wheel = self.distribution.get_option_dict("wheel")
|
| 274 |
+
if "universal" in wheel:
|
| 275 |
+
# please don't define this in your global configs
|
| 276 |
+
log.warning(
|
| 277 |
+
"The [wheel] section is deprecated. Use [bdist_wheel] instead.",
|
| 278 |
+
)
|
| 279 |
+
val = wheel["universal"][1].strip()
|
| 280 |
+
if val.lower() in ("1", "true", "yes"):
|
| 281 |
+
self.universal = True
|
| 282 |
+
|
| 283 |
+
if self.build_number is not None and not self.build_number[:1].isdigit():
|
| 284 |
+
raise ValueError("Build tag (build-number) must start with a digit.")
|
| 285 |
+
|
| 286 |
+
@property
|
| 287 |
+
def wheel_dist_name(self):
|
| 288 |
+
"""Return distribution full name with - replaced with _"""
|
| 289 |
+
components = (
|
| 290 |
+
safer_name(self.distribution.get_name()),
|
| 291 |
+
safer_version(self.distribution.get_version()),
|
| 292 |
+
)
|
| 293 |
+
if self.build_number:
|
| 294 |
+
components += (self.build_number,)
|
| 295 |
+
return "-".join(components)
|
| 296 |
+
|
| 297 |
+
def get_tag(self):
|
| 298 |
+
# bdist sets self.plat_name if unset, we should only use it for purepy
|
| 299 |
+
# wheels if the user supplied it.
|
| 300 |
+
if self.plat_name_supplied:
|
| 301 |
+
plat_name = self.plat_name
|
| 302 |
+
elif self.root_is_pure:
|
| 303 |
+
plat_name = "any"
|
| 304 |
+
else:
|
| 305 |
+
# macosx contains system version in platform name so need special handle
|
| 306 |
+
if self.plat_name and not self.plat_name.startswith("macosx"):
|
| 307 |
+
plat_name = self.plat_name
|
| 308 |
+
else:
|
| 309 |
+
# on macosx always limit the platform name to comply with any
|
| 310 |
+
# c-extension modules in bdist_dir, since the user can specify
|
| 311 |
+
# a higher MACOSX_DEPLOYMENT_TARGET via tools like CMake
|
| 312 |
+
|
| 313 |
+
# on other platforms, and on macosx if there are no c-extension
|
| 314 |
+
# modules, use the default platform name.
|
| 315 |
+
plat_name = get_platform(self.bdist_dir)
|
| 316 |
+
|
| 317 |
+
if _is_32bit_interpreter():
|
| 318 |
+
if plat_name in ("linux-x86_64", "linux_x86_64"):
|
| 319 |
+
plat_name = "linux_i686"
|
| 320 |
+
if plat_name in ("linux-aarch64", "linux_aarch64"):
|
| 321 |
+
# TODO armv8l, packaging pull request #690 => this did not land
|
| 322 |
+
# in pip/packaging yet
|
| 323 |
+
plat_name = "linux_armv7l"
|
| 324 |
+
|
| 325 |
+
plat_name = (
|
| 326 |
+
plat_name.lower().replace("-", "_").replace(".", "_").replace(" ", "_")
|
| 327 |
+
)
|
| 328 |
+
|
| 329 |
+
if self.root_is_pure:
|
| 330 |
+
if self.universal:
|
| 331 |
+
impl = "py2.py3"
|
| 332 |
+
else:
|
| 333 |
+
impl = self.python_tag
|
| 334 |
+
tag = (impl, "none", plat_name)
|
| 335 |
+
else:
|
| 336 |
+
impl_name = tags.interpreter_name()
|
| 337 |
+
impl_ver = tags.interpreter_version()
|
| 338 |
+
impl = impl_name + impl_ver
|
| 339 |
+
# We don't work on CPython 3.1, 3.0.
|
| 340 |
+
if self.py_limited_api and (impl_name + impl_ver).startswith("cp3"):
|
| 341 |
+
impl = self.py_limited_api
|
| 342 |
+
abi_tag = "abi3"
|
| 343 |
+
else:
|
| 344 |
+
abi_tag = str(get_abi_tag()).lower()
|
| 345 |
+
tag = (impl, abi_tag, plat_name)
|
| 346 |
+
# issue gh-374: allow overriding plat_name
|
| 347 |
+
supported_tags = [
|
| 348 |
+
(t.interpreter, t.abi, plat_name) for t in tags.sys_tags()
|
| 349 |
+
]
|
| 350 |
+
assert (
|
| 351 |
+
tag in supported_tags
|
| 352 |
+
), f"would build wheel with unsupported tag {tag}"
|
| 353 |
+
return tag
|
| 354 |
+
|
| 355 |
+
def run(self):
|
| 356 |
+
build_scripts = self.reinitialize_command("build_scripts")
|
| 357 |
+
build_scripts.executable = "python"
|
| 358 |
+
build_scripts.force = True
|
| 359 |
+
|
| 360 |
+
build_ext = self.reinitialize_command("build_ext")
|
| 361 |
+
build_ext.inplace = False
|
| 362 |
+
|
| 363 |
+
if not self.skip_build:
|
| 364 |
+
self.run_command("build")
|
| 365 |
+
|
| 366 |
+
install = self.reinitialize_command("install", reinit_subcommands=True)
|
| 367 |
+
install.root = self.bdist_dir
|
| 368 |
+
install.compile = False
|
| 369 |
+
install.skip_build = self.skip_build
|
| 370 |
+
install.warn_dir = False
|
| 371 |
+
|
| 372 |
+
# A wheel without setuptools scripts is more cross-platform.
|
| 373 |
+
# Use the (undocumented) `no_ep` option to setuptools'
|
| 374 |
+
# install_scripts command to avoid creating entry point scripts.
|
| 375 |
+
install_scripts = self.reinitialize_command("install_scripts")
|
| 376 |
+
install_scripts.no_ep = True
|
| 377 |
+
|
| 378 |
+
# Use a custom scheme for the archive, because we have to decide
|
| 379 |
+
# at installation time which scheme to use.
|
| 380 |
+
for key in ("headers", "scripts", "data", "purelib", "platlib"):
|
| 381 |
+
setattr(install, "install_" + key, os.path.join(self.data_dir, key))
|
| 382 |
+
|
| 383 |
+
basedir_observed = ""
|
| 384 |
+
|
| 385 |
+
if os.name == "nt":
|
| 386 |
+
# win32 barfs if any of these are ''; could be '.'?
|
| 387 |
+
# (distutils.command.install:change_roots bug)
|
| 388 |
+
basedir_observed = os.path.normpath(os.path.join(self.data_dir, ".."))
|
| 389 |
+
self.install_libbase = self.install_lib = basedir_observed
|
| 390 |
+
|
| 391 |
+
setattr(
|
| 392 |
+
install,
|
| 393 |
+
"install_purelib" if self.root_is_pure else "install_platlib",
|
| 394 |
+
basedir_observed,
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
log.info(f"installing to {self.bdist_dir}")
|
| 398 |
+
|
| 399 |
+
self.run_command("install")
|
| 400 |
+
|
| 401 |
+
impl_tag, abi_tag, plat_tag = self.get_tag()
|
| 402 |
+
archive_basename = f"{self.wheel_dist_name}-{impl_tag}-{abi_tag}-{plat_tag}"
|
| 403 |
+
if not self.relative:
|
| 404 |
+
archive_root = self.bdist_dir
|
| 405 |
+
else:
|
| 406 |
+
archive_root = os.path.join(
|
| 407 |
+
self.bdist_dir, self._ensure_relative(install.install_base)
|
| 408 |
+
)
|
| 409 |
+
|
| 410 |
+
self.set_undefined_options("install_egg_info", ("target", "egginfo_dir"))
|
| 411 |
+
distinfo_dirname = "{}-{}.dist-info".format(
|
| 412 |
+
safer_name(self.distribution.get_name()),
|
| 413 |
+
safer_version(self.distribution.get_version()),
|
| 414 |
+
)
|
| 415 |
+
distinfo_dir = os.path.join(self.bdist_dir, distinfo_dirname)
|
| 416 |
+
self.egg2dist(self.egginfo_dir, distinfo_dir)
|
| 417 |
+
|
| 418 |
+
self.write_wheelfile(distinfo_dir)
|
| 419 |
+
|
| 420 |
+
# Make the archive
|
| 421 |
+
if not os.path.exists(self.dist_dir):
|
| 422 |
+
os.makedirs(self.dist_dir)
|
| 423 |
+
|
| 424 |
+
wheel_path = os.path.join(self.dist_dir, archive_basename + ".whl")
|
| 425 |
+
with WheelFile(wheel_path, "w", self.compression) as wf:
|
| 426 |
+
wf.write_files(archive_root)
|
| 427 |
+
|
| 428 |
+
# Add to 'Distribution.dist_files' so that the "upload" command works
|
| 429 |
+
getattr(self.distribution, "dist_files", []).append(
|
| 430 |
+
(
|
| 431 |
+
"bdist_wheel",
|
| 432 |
+
"{}.{}".format(*sys.version_info[:2]), # like 3.7
|
| 433 |
+
wheel_path,
|
| 434 |
+
)
|
| 435 |
+
)
|
| 436 |
+
|
| 437 |
+
if not self.keep_temp:
|
| 438 |
+
log.info(f"removing {self.bdist_dir}")
|
| 439 |
+
if not self.dry_run:
|
| 440 |
+
if sys.version_info < (3, 12):
|
| 441 |
+
rmtree(self.bdist_dir, onerror=remove_readonly)
|
| 442 |
+
else:
|
| 443 |
+
rmtree(self.bdist_dir, onexc=remove_readonly_exc)
|
| 444 |
+
|
| 445 |
+
def write_wheelfile(
|
| 446 |
+
self, wheelfile_base, generator="bdist_wheel (" + wheel_version + ")"
|
| 447 |
+
):
|
| 448 |
+
from email.message import Message
|
| 449 |
+
|
| 450 |
+
msg = Message()
|
| 451 |
+
msg["Wheel-Version"] = "1.0" # of the spec
|
| 452 |
+
msg["Generator"] = generator
|
| 453 |
+
msg["Root-Is-Purelib"] = str(self.root_is_pure).lower()
|
| 454 |
+
if self.build_number is not None:
|
| 455 |
+
msg["Build"] = self.build_number
|
| 456 |
+
|
| 457 |
+
# Doesn't work for bdist_wininst
|
| 458 |
+
impl_tag, abi_tag, plat_tag = self.get_tag()
|
| 459 |
+
for impl in impl_tag.split("."):
|
| 460 |
+
for abi in abi_tag.split("."):
|
| 461 |
+
for plat in plat_tag.split("."):
|
| 462 |
+
msg["Tag"] = "-".join((impl, abi, plat))
|
| 463 |
+
|
| 464 |
+
wheelfile_path = os.path.join(wheelfile_base, "WHEEL")
|
| 465 |
+
log.info(f"creating {wheelfile_path}")
|
| 466 |
+
buffer = BytesIO()
|
| 467 |
+
BytesGenerator(buffer, maxheaderlen=0).flatten(msg)
|
| 468 |
+
with open(wheelfile_path, "wb") as f:
|
| 469 |
+
f.write(buffer.getvalue().replace(b"\r\n", b"\r"))
|
| 470 |
+
|
| 471 |
+
def _ensure_relative(self, path):
|
| 472 |
+
# copied from dir_util, deleted
|
| 473 |
+
drive, path = os.path.splitdrive(path)
|
| 474 |
+
if path[0:1] == os.sep:
|
| 475 |
+
path = drive + path[1:]
|
| 476 |
+
return path
|
| 477 |
+
|
| 478 |
+
@property
|
| 479 |
+
def license_paths(self):
|
| 480 |
+
if setuptools_major_version >= 57:
|
| 481 |
+
# Setuptools has resolved any patterns to actual file names
|
| 482 |
+
return self.distribution.metadata.license_files or ()
|
| 483 |
+
|
| 484 |
+
files = set()
|
| 485 |
+
metadata = self.distribution.get_option_dict("metadata")
|
| 486 |
+
if setuptools_major_version >= 42:
|
| 487 |
+
# Setuptools recognizes the license_files option but does not do globbing
|
| 488 |
+
patterns = self.distribution.metadata.license_files
|
| 489 |
+
else:
|
| 490 |
+
# Prior to those, wheel is entirely responsible for handling license files
|
| 491 |
+
if "license_files" in metadata:
|
| 492 |
+
patterns = metadata["license_files"][1].split()
|
| 493 |
+
else:
|
| 494 |
+
patterns = ()
|
| 495 |
+
|
| 496 |
+
if "license_file" in metadata:
|
| 497 |
+
warnings.warn(
|
| 498 |
+
'The "license_file" option is deprecated. Use "license_files" instead.',
|
| 499 |
+
DeprecationWarning,
|
| 500 |
+
stacklevel=2,
|
| 501 |
+
)
|
| 502 |
+
files.add(metadata["license_file"][1])
|
| 503 |
+
|
| 504 |
+
if not files and not patterns and not isinstance(patterns, list):
|
| 505 |
+
patterns = ("LICEN[CS]E*", "COPYING*", "NOTICE*", "AUTHORS*")
|
| 506 |
+
|
| 507 |
+
for pattern in patterns:
|
| 508 |
+
for path in iglob(pattern):
|
| 509 |
+
if path.endswith("~"):
|
| 510 |
+
log.debug(
|
| 511 |
+
f'ignoring license file "{path}" as it looks like a backup'
|
| 512 |
+
)
|
| 513 |
+
continue
|
| 514 |
+
|
| 515 |
+
if path not in files and os.path.isfile(path):
|
| 516 |
+
log.info(
|
| 517 |
+
f'adding license file "{path}" (matched pattern "{pattern}")'
|
| 518 |
+
)
|
| 519 |
+
files.add(path)
|
| 520 |
+
|
| 521 |
+
return files
|
| 522 |
+
|
| 523 |
+
def egg2dist(self, egginfo_path, distinfo_path):
|
| 524 |
+
"""Convert an .egg-info directory into a .dist-info directory"""
|
| 525 |
+
|
| 526 |
+
def adios(p):
|
| 527 |
+
"""Appropriately delete directory, file or link."""
|
| 528 |
+
if os.path.exists(p) and not os.path.islink(p) and os.path.isdir(p):
|
| 529 |
+
shutil.rmtree(p)
|
| 530 |
+
elif os.path.exists(p):
|
| 531 |
+
os.unlink(p)
|
| 532 |
+
|
| 533 |
+
adios(distinfo_path)
|
| 534 |
+
|
| 535 |
+
if not os.path.exists(egginfo_path):
|
| 536 |
+
# There is no egg-info. This is probably because the egg-info
|
| 537 |
+
# file/directory is not named matching the distribution name used
|
| 538 |
+
# to name the archive file. Check for this case and report
|
| 539 |
+
# accordingly.
|
| 540 |
+
import glob
|
| 541 |
+
|
| 542 |
+
pat = os.path.join(os.path.dirname(egginfo_path), "*.egg-info")
|
| 543 |
+
possible = glob.glob(pat)
|
| 544 |
+
err = f"Egg metadata expected at {egginfo_path} but not found"
|
| 545 |
+
if possible:
|
| 546 |
+
alt = os.path.basename(possible[0])
|
| 547 |
+
err += f" ({alt} found - possible misnamed archive file?)"
|
| 548 |
+
|
| 549 |
+
raise ValueError(err)
|
| 550 |
+
|
| 551 |
+
if os.path.isfile(egginfo_path):
|
| 552 |
+
# .egg-info is a single file
|
| 553 |
+
pkginfo_path = egginfo_path
|
| 554 |
+
pkg_info = pkginfo_to_metadata(egginfo_path, egginfo_path)
|
| 555 |
+
os.mkdir(distinfo_path)
|
| 556 |
+
else:
|
| 557 |
+
# .egg-info is a directory
|
| 558 |
+
pkginfo_path = os.path.join(egginfo_path, "PKG-INFO")
|
| 559 |
+
pkg_info = pkginfo_to_metadata(egginfo_path, pkginfo_path)
|
| 560 |
+
|
| 561 |
+
# ignore common egg metadata that is useless to wheel
|
| 562 |
+
shutil.copytree(
|
| 563 |
+
egginfo_path,
|
| 564 |
+
distinfo_path,
|
| 565 |
+
ignore=lambda x, y: {
|
| 566 |
+
"PKG-INFO",
|
| 567 |
+
"requires.txt",
|
| 568 |
+
"SOURCES.txt",
|
| 569 |
+
"not-zip-safe",
|
| 570 |
+
},
|
| 571 |
+
)
|
| 572 |
+
|
| 573 |
+
# delete dependency_links if it is only whitespace
|
| 574 |
+
dependency_links_path = os.path.join(distinfo_path, "dependency_links.txt")
|
| 575 |
+
with open(dependency_links_path, encoding="utf-8") as dependency_links_file:
|
| 576 |
+
dependency_links = dependency_links_file.read().strip()
|
| 577 |
+
if not dependency_links:
|
| 578 |
+
adios(dependency_links_path)
|
| 579 |
+
|
| 580 |
+
pkg_info_path = os.path.join(distinfo_path, "METADATA")
|
| 581 |
+
serialization_policy = EmailPolicy(
|
| 582 |
+
utf8=True,
|
| 583 |
+
mangle_from_=False,
|
| 584 |
+
max_line_length=0,
|
| 585 |
+
)
|
| 586 |
+
with open(pkg_info_path, "w", encoding="utf-8") as out:
|
| 587 |
+
Generator(out, policy=serialization_policy).flatten(pkg_info)
|
| 588 |
+
|
| 589 |
+
for license_path in self.license_paths:
|
| 590 |
+
filename = os.path.basename(license_path)
|
| 591 |
+
shutil.copy(license_path, os.path.join(distinfo_path, filename))
|
| 592 |
+
|
| 593 |
+
adios(egginfo_path)
|
.venv/Lib/site-packages/wheel/cli/__init__.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Wheel command-line utility.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import argparse
|
| 8 |
+
import os
|
| 9 |
+
import sys
|
| 10 |
+
from argparse import ArgumentTypeError
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class WheelError(Exception):
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def unpack_f(args):
|
| 18 |
+
from .unpack import unpack
|
| 19 |
+
|
| 20 |
+
unpack(args.wheelfile, args.dest)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def pack_f(args):
|
| 24 |
+
from .pack import pack
|
| 25 |
+
|
| 26 |
+
pack(args.directory, args.dest_dir, args.build_number)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def convert_f(args):
|
| 30 |
+
from .convert import convert
|
| 31 |
+
|
| 32 |
+
convert(args.files, args.dest_dir, args.verbose)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def tags_f(args):
|
| 36 |
+
from .tags import tags
|
| 37 |
+
|
| 38 |
+
names = (
|
| 39 |
+
tags(
|
| 40 |
+
wheel,
|
| 41 |
+
args.python_tag,
|
| 42 |
+
args.abi_tag,
|
| 43 |
+
args.platform_tag,
|
| 44 |
+
args.build,
|
| 45 |
+
args.remove,
|
| 46 |
+
)
|
| 47 |
+
for wheel in args.wheel
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
for name in names:
|
| 51 |
+
print(name)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def version_f(args):
|
| 55 |
+
from .. import __version__
|
| 56 |
+
|
| 57 |
+
print("wheel %s" % __version__)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def parse_build_tag(build_tag: str) -> str:
|
| 61 |
+
if not build_tag[0].isdigit():
|
| 62 |
+
raise ArgumentTypeError("build tag must begin with a digit")
|
| 63 |
+
elif "-" in build_tag:
|
| 64 |
+
raise ArgumentTypeError("invalid character ('-') in build tag")
|
| 65 |
+
|
| 66 |
+
return build_tag
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
TAGS_HELP = """\
|
| 70 |
+
Make a new wheel with given tags. Any tags unspecified will remain the same.
|
| 71 |
+
Starting the tags with a "+" will append to the existing tags. Starting with a
|
| 72 |
+
"-" will remove a tag (use --option=-TAG syntax). Multiple tags can be
|
| 73 |
+
separated by ".". The original file will remain unless --remove is given. The
|
| 74 |
+
output filename(s) will be displayed on stdout for further processing.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def parser():
|
| 79 |
+
p = argparse.ArgumentParser()
|
| 80 |
+
s = p.add_subparsers(help="commands")
|
| 81 |
+
|
| 82 |
+
unpack_parser = s.add_parser("unpack", help="Unpack wheel")
|
| 83 |
+
unpack_parser.add_argument(
|
| 84 |
+
"--dest", "-d", help="Destination directory", default="."
|
| 85 |
+
)
|
| 86 |
+
unpack_parser.add_argument("wheelfile", help="Wheel file")
|
| 87 |
+
unpack_parser.set_defaults(func=unpack_f)
|
| 88 |
+
|
| 89 |
+
repack_parser = s.add_parser("pack", help="Repack wheel")
|
| 90 |
+
repack_parser.add_argument("directory", help="Root directory of the unpacked wheel")
|
| 91 |
+
repack_parser.add_argument(
|
| 92 |
+
"--dest-dir",
|
| 93 |
+
"-d",
|
| 94 |
+
default=os.path.curdir,
|
| 95 |
+
help="Directory to store the wheel (default %(default)s)",
|
| 96 |
+
)
|
| 97 |
+
repack_parser.add_argument(
|
| 98 |
+
"--build-number", help="Build tag to use in the wheel name"
|
| 99 |
+
)
|
| 100 |
+
repack_parser.set_defaults(func=pack_f)
|
| 101 |
+
|
| 102 |
+
convert_parser = s.add_parser("convert", help="Convert egg or wininst to wheel")
|
| 103 |
+
convert_parser.add_argument("files", nargs="*", help="Files to convert")
|
| 104 |
+
convert_parser.add_argument(
|
| 105 |
+
"--dest-dir",
|
| 106 |
+
"-d",
|
| 107 |
+
default=os.path.curdir,
|
| 108 |
+
help="Directory to store wheels (default %(default)s)",
|
| 109 |
+
)
|
| 110 |
+
convert_parser.add_argument("--verbose", "-v", action="store_true")
|
| 111 |
+
convert_parser.set_defaults(func=convert_f)
|
| 112 |
+
|
| 113 |
+
tags_parser = s.add_parser(
|
| 114 |
+
"tags", help="Add or replace the tags on a wheel", description=TAGS_HELP
|
| 115 |
+
)
|
| 116 |
+
tags_parser.add_argument("wheel", nargs="*", help="Existing wheel(s) to retag")
|
| 117 |
+
tags_parser.add_argument(
|
| 118 |
+
"--remove",
|
| 119 |
+
action="store_true",
|
| 120 |
+
help="Remove the original files, keeping only the renamed ones",
|
| 121 |
+
)
|
| 122 |
+
tags_parser.add_argument(
|
| 123 |
+
"--python-tag", metavar="TAG", help="Specify an interpreter tag(s)"
|
| 124 |
+
)
|
| 125 |
+
tags_parser.add_argument("--abi-tag", metavar="TAG", help="Specify an ABI tag(s)")
|
| 126 |
+
tags_parser.add_argument(
|
| 127 |
+
"--platform-tag", metavar="TAG", help="Specify a platform tag(s)"
|
| 128 |
+
)
|
| 129 |
+
tags_parser.add_argument(
|
| 130 |
+
"--build", type=parse_build_tag, metavar="BUILD", help="Specify a build tag"
|
| 131 |
+
)
|
| 132 |
+
tags_parser.set_defaults(func=tags_f)
|
| 133 |
+
|
| 134 |
+
version_parser = s.add_parser("version", help="Print version and exit")
|
| 135 |
+
version_parser.set_defaults(func=version_f)
|
| 136 |
+
|
| 137 |
+
help_parser = s.add_parser("help", help="Show this help")
|
| 138 |
+
help_parser.set_defaults(func=lambda args: p.print_help())
|
| 139 |
+
|
| 140 |
+
return p
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def main():
|
| 144 |
+
p = parser()
|
| 145 |
+
args = p.parse_args()
|
| 146 |
+
if not hasattr(args, "func"):
|
| 147 |
+
p.print_help()
|
| 148 |
+
else:
|
| 149 |
+
try:
|
| 150 |
+
args.func(args)
|
| 151 |
+
return 0
|
| 152 |
+
except WheelError as e:
|
| 153 |
+
print(e, file=sys.stderr)
|
| 154 |
+
|
| 155 |
+
return 1
|
.venv/Lib/site-packages/wheel/cli/convert.py
ADDED
|
@@ -0,0 +1,273 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os.path
|
| 4 |
+
import re
|
| 5 |
+
import shutil
|
| 6 |
+
import tempfile
|
| 7 |
+
import zipfile
|
| 8 |
+
from glob import iglob
|
| 9 |
+
|
| 10 |
+
from ..bdist_wheel import bdist_wheel
|
| 11 |
+
from ..wheelfile import WheelFile
|
| 12 |
+
from . import WheelError
|
| 13 |
+
|
| 14 |
+
try:
|
| 15 |
+
from setuptools import Distribution
|
| 16 |
+
except ImportError:
|
| 17 |
+
from distutils.dist import Distribution
|
| 18 |
+
|
| 19 |
+
egg_info_re = re.compile(
|
| 20 |
+
r"""
|
| 21 |
+
(?P<name>.+?)-(?P<ver>.+?)
|
| 22 |
+
(-(?P<pyver>py\d\.\d+)
|
| 23 |
+
(-(?P<arch>.+?))?
|
| 24 |
+
)?.egg$""",
|
| 25 |
+
re.VERBOSE,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class _bdist_wheel_tag(bdist_wheel):
|
| 30 |
+
# allow the client to override the default generated wheel tag
|
| 31 |
+
# The default bdist_wheel implementation uses python and abi tags
|
| 32 |
+
# of the running python process. This is not suitable for
|
| 33 |
+
# generating/repackaging prebuild binaries.
|
| 34 |
+
|
| 35 |
+
full_tag_supplied = False
|
| 36 |
+
full_tag = None # None or a (pytag, soabitag, plattag) triple
|
| 37 |
+
|
| 38 |
+
def get_tag(self):
|
| 39 |
+
if self.full_tag_supplied and self.full_tag is not None:
|
| 40 |
+
return self.full_tag
|
| 41 |
+
else:
|
| 42 |
+
return bdist_wheel.get_tag(self)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def egg2wheel(egg_path: str, dest_dir: str):
|
| 46 |
+
filename = os.path.basename(egg_path)
|
| 47 |
+
match = egg_info_re.match(filename)
|
| 48 |
+
if not match:
|
| 49 |
+
raise WheelError(f"Invalid egg file name: {filename}")
|
| 50 |
+
|
| 51 |
+
egg_info = match.groupdict()
|
| 52 |
+
dir = tempfile.mkdtemp(suffix="_e2w")
|
| 53 |
+
if os.path.isfile(egg_path):
|
| 54 |
+
# assume we have a bdist_egg otherwise
|
| 55 |
+
with zipfile.ZipFile(egg_path) as egg:
|
| 56 |
+
egg.extractall(dir)
|
| 57 |
+
else:
|
| 58 |
+
# support buildout-style installed eggs directories
|
| 59 |
+
for pth in os.listdir(egg_path):
|
| 60 |
+
src = os.path.join(egg_path, pth)
|
| 61 |
+
if os.path.isfile(src):
|
| 62 |
+
shutil.copy2(src, dir)
|
| 63 |
+
else:
|
| 64 |
+
shutil.copytree(src, os.path.join(dir, pth))
|
| 65 |
+
|
| 66 |
+
pyver = egg_info["pyver"]
|
| 67 |
+
if pyver:
|
| 68 |
+
pyver = egg_info["pyver"] = pyver.replace(".", "")
|
| 69 |
+
|
| 70 |
+
arch = (egg_info["arch"] or "any").replace(".", "_").replace("-", "_")
|
| 71 |
+
|
| 72 |
+
# assume all binary eggs are for CPython
|
| 73 |
+
abi = "cp" + pyver[2:] if arch != "any" else "none"
|
| 74 |
+
|
| 75 |
+
root_is_purelib = egg_info["arch"] is None
|
| 76 |
+
if root_is_purelib:
|
| 77 |
+
bw = bdist_wheel(Distribution())
|
| 78 |
+
else:
|
| 79 |
+
bw = _bdist_wheel_tag(Distribution())
|
| 80 |
+
|
| 81 |
+
bw.root_is_pure = root_is_purelib
|
| 82 |
+
bw.python_tag = pyver
|
| 83 |
+
bw.plat_name_supplied = True
|
| 84 |
+
bw.plat_name = egg_info["arch"] or "any"
|
| 85 |
+
if not root_is_purelib:
|
| 86 |
+
bw.full_tag_supplied = True
|
| 87 |
+
bw.full_tag = (pyver, abi, arch)
|
| 88 |
+
|
| 89 |
+
dist_info_dir = os.path.join(dir, "{name}-{ver}.dist-info".format(**egg_info))
|
| 90 |
+
bw.egg2dist(os.path.join(dir, "EGG-INFO"), dist_info_dir)
|
| 91 |
+
bw.write_wheelfile(dist_info_dir, generator="egg2wheel")
|
| 92 |
+
wheel_name = "{name}-{ver}-{pyver}-{}-{}.whl".format(abi, arch, **egg_info)
|
| 93 |
+
with WheelFile(os.path.join(dest_dir, wheel_name), "w") as wf:
|
| 94 |
+
wf.write_files(dir)
|
| 95 |
+
|
| 96 |
+
shutil.rmtree(dir)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def parse_wininst_info(wininfo_name, egginfo_name):
|
| 100 |
+
"""Extract metadata from filenames.
|
| 101 |
+
|
| 102 |
+
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
|
| 103 |
+
the installer filename and the name of the egg-info directory embedded in
|
| 104 |
+
the zipfile (if any).
|
| 105 |
+
|
| 106 |
+
The egginfo filename has the format::
|
| 107 |
+
|
| 108 |
+
name-ver(-pyver)(-arch).egg-info
|
| 109 |
+
|
| 110 |
+
The installer filename has the format::
|
| 111 |
+
|
| 112 |
+
name-ver.arch(-pyver).exe
|
| 113 |
+
|
| 114 |
+
Some things to note:
|
| 115 |
+
|
| 116 |
+
1. The installer filename is not definitive. An installer can be renamed
|
| 117 |
+
and work perfectly well as an installer. So more reliable data should
|
| 118 |
+
be used whenever possible.
|
| 119 |
+
2. The egg-info data should be preferred for the name and version, because
|
| 120 |
+
these come straight from the distutils metadata, and are mandatory.
|
| 121 |
+
3. The pyver from the egg-info data should be ignored, as it is
|
| 122 |
+
constructed from the version of Python used to build the installer,
|
| 123 |
+
which is irrelevant - the installer filename is correct here (even to
|
| 124 |
+
the point that when it's not there, any version is implied).
|
| 125 |
+
4. The architecture must be taken from the installer filename, as it is
|
| 126 |
+
not included in the egg-info data.
|
| 127 |
+
5. Architecture-neutral installers still have an architecture because the
|
| 128 |
+
installer format itself (being executable) is architecture-specific. We
|
| 129 |
+
should therefore ignore the architecture if the content is pure-python.
|
| 130 |
+
"""
|
| 131 |
+
|
| 132 |
+
egginfo = None
|
| 133 |
+
if egginfo_name:
|
| 134 |
+
egginfo = egg_info_re.search(egginfo_name)
|
| 135 |
+
if not egginfo:
|
| 136 |
+
raise ValueError(f"Egg info filename {egginfo_name} is not valid")
|
| 137 |
+
|
| 138 |
+
# Parse the wininst filename
|
| 139 |
+
# 1. Distribution name (up to the first '-')
|
| 140 |
+
w_name, sep, rest = wininfo_name.partition("-")
|
| 141 |
+
if not sep:
|
| 142 |
+
raise ValueError(f"Installer filename {wininfo_name} is not valid")
|
| 143 |
+
|
| 144 |
+
# Strip '.exe'
|
| 145 |
+
rest = rest[:-4]
|
| 146 |
+
# 2. Python version (from the last '-', must start with 'py')
|
| 147 |
+
rest2, sep, w_pyver = rest.rpartition("-")
|
| 148 |
+
if sep and w_pyver.startswith("py"):
|
| 149 |
+
rest = rest2
|
| 150 |
+
w_pyver = w_pyver.replace(".", "")
|
| 151 |
+
else:
|
| 152 |
+
# Not version specific - use py2.py3. While it is possible that
|
| 153 |
+
# pure-Python code is not compatible with both Python 2 and 3, there
|
| 154 |
+
# is no way of knowing from the wininst format, so we assume the best
|
| 155 |
+
# here (the user can always manually rename the wheel to be more
|
| 156 |
+
# restrictive if needed).
|
| 157 |
+
w_pyver = "py2.py3"
|
| 158 |
+
# 3. Version and architecture
|
| 159 |
+
w_ver, sep, w_arch = rest.rpartition(".")
|
| 160 |
+
if not sep:
|
| 161 |
+
raise ValueError(f"Installer filename {wininfo_name} is not valid")
|
| 162 |
+
|
| 163 |
+
if egginfo:
|
| 164 |
+
w_name = egginfo.group("name")
|
| 165 |
+
w_ver = egginfo.group("ver")
|
| 166 |
+
|
| 167 |
+
return {"name": w_name, "ver": w_ver, "arch": w_arch, "pyver": w_pyver}
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def wininst2wheel(path, dest_dir):
|
| 171 |
+
with zipfile.ZipFile(path) as bdw:
|
| 172 |
+
# Search for egg-info in the archive
|
| 173 |
+
egginfo_name = None
|
| 174 |
+
for filename in bdw.namelist():
|
| 175 |
+
if ".egg-info" in filename:
|
| 176 |
+
egginfo_name = filename
|
| 177 |
+
break
|
| 178 |
+
|
| 179 |
+
info = parse_wininst_info(os.path.basename(path), egginfo_name)
|
| 180 |
+
|
| 181 |
+
root_is_purelib = True
|
| 182 |
+
for zipinfo in bdw.infolist():
|
| 183 |
+
if zipinfo.filename.startswith("PLATLIB"):
|
| 184 |
+
root_is_purelib = False
|
| 185 |
+
break
|
| 186 |
+
if root_is_purelib:
|
| 187 |
+
paths = {"purelib": ""}
|
| 188 |
+
else:
|
| 189 |
+
paths = {"platlib": ""}
|
| 190 |
+
|
| 191 |
+
dist_info = "{name}-{ver}".format(**info)
|
| 192 |
+
datadir = "%s.data/" % dist_info
|
| 193 |
+
|
| 194 |
+
# rewrite paths to trick ZipFile into extracting an egg
|
| 195 |
+
# XXX grab wininst .ini - between .exe, padding, and first zip file.
|
| 196 |
+
members = []
|
| 197 |
+
egginfo_name = ""
|
| 198 |
+
for zipinfo in bdw.infolist():
|
| 199 |
+
key, basename = zipinfo.filename.split("/", 1)
|
| 200 |
+
key = key.lower()
|
| 201 |
+
basepath = paths.get(key, None)
|
| 202 |
+
if basepath is None:
|
| 203 |
+
basepath = datadir + key.lower() + "/"
|
| 204 |
+
oldname = zipinfo.filename
|
| 205 |
+
newname = basepath + basename
|
| 206 |
+
zipinfo.filename = newname
|
| 207 |
+
del bdw.NameToInfo[oldname]
|
| 208 |
+
bdw.NameToInfo[newname] = zipinfo
|
| 209 |
+
# Collect member names, but omit '' (from an entry like "PLATLIB/"
|
| 210 |
+
if newname:
|
| 211 |
+
members.append(newname)
|
| 212 |
+
# Remember egg-info name for the egg2dist call below
|
| 213 |
+
if not egginfo_name:
|
| 214 |
+
if newname.endswith(".egg-info"):
|
| 215 |
+
egginfo_name = newname
|
| 216 |
+
elif ".egg-info/" in newname:
|
| 217 |
+
egginfo_name, sep, _ = newname.rpartition("/")
|
| 218 |
+
dir = tempfile.mkdtemp(suffix="_b2w")
|
| 219 |
+
bdw.extractall(dir, members)
|
| 220 |
+
|
| 221 |
+
# egg2wheel
|
| 222 |
+
abi = "none"
|
| 223 |
+
pyver = info["pyver"]
|
| 224 |
+
arch = (info["arch"] or "any").replace(".", "_").replace("-", "_")
|
| 225 |
+
# Wininst installers always have arch even if they are not
|
| 226 |
+
# architecture-specific (because the format itself is).
|
| 227 |
+
# So, assume the content is architecture-neutral if root is purelib.
|
| 228 |
+
if root_is_purelib:
|
| 229 |
+
arch = "any"
|
| 230 |
+
# If the installer is architecture-specific, it's almost certainly also
|
| 231 |
+
# CPython-specific.
|
| 232 |
+
if arch != "any":
|
| 233 |
+
pyver = pyver.replace("py", "cp")
|
| 234 |
+
wheel_name = "-".join((dist_info, pyver, abi, arch))
|
| 235 |
+
if root_is_purelib:
|
| 236 |
+
bw = bdist_wheel(Distribution())
|
| 237 |
+
else:
|
| 238 |
+
bw = _bdist_wheel_tag(Distribution())
|
| 239 |
+
|
| 240 |
+
bw.root_is_pure = root_is_purelib
|
| 241 |
+
bw.python_tag = pyver
|
| 242 |
+
bw.plat_name_supplied = True
|
| 243 |
+
bw.plat_name = info["arch"] or "any"
|
| 244 |
+
|
| 245 |
+
if not root_is_purelib:
|
| 246 |
+
bw.full_tag_supplied = True
|
| 247 |
+
bw.full_tag = (pyver, abi, arch)
|
| 248 |
+
|
| 249 |
+
dist_info_dir = os.path.join(dir, "%s.dist-info" % dist_info)
|
| 250 |
+
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
|
| 251 |
+
bw.write_wheelfile(dist_info_dir, generator="wininst2wheel")
|
| 252 |
+
|
| 253 |
+
wheel_path = os.path.join(dest_dir, wheel_name)
|
| 254 |
+
with WheelFile(wheel_path, "w") as wf:
|
| 255 |
+
wf.write_files(dir)
|
| 256 |
+
|
| 257 |
+
shutil.rmtree(dir)
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
def convert(files, dest_dir, verbose):
|
| 261 |
+
for pat in files:
|
| 262 |
+
for installer in iglob(pat):
|
| 263 |
+
if os.path.splitext(installer)[1] == ".egg":
|
| 264 |
+
conv = egg2wheel
|
| 265 |
+
else:
|
| 266 |
+
conv = wininst2wheel
|
| 267 |
+
|
| 268 |
+
if verbose:
|
| 269 |
+
print(f"{installer}... ", flush=True)
|
| 270 |
+
|
| 271 |
+
conv(installer, dest_dir)
|
| 272 |
+
if verbose:
|
| 273 |
+
print("OK")
|
.venv/Lib/site-packages/wheel/cli/pack.py
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os.path
|
| 4 |
+
import re
|
| 5 |
+
|
| 6 |
+
from wheel.cli import WheelError
|
| 7 |
+
from wheel.wheelfile import WheelFile
|
| 8 |
+
|
| 9 |
+
DIST_INFO_RE = re.compile(r"^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?))\.dist-info$")
|
| 10 |
+
BUILD_NUM_RE = re.compile(rb"Build: (\d\w*)$")
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def pack(directory: str, dest_dir: str, build_number: str | None):
|
| 14 |
+
"""Repack a previously unpacked wheel directory into a new wheel file.
|
| 15 |
+
|
| 16 |
+
The .dist-info/WHEEL file must contain one or more tags so that the target
|
| 17 |
+
wheel file name can be determined.
|
| 18 |
+
|
| 19 |
+
:param directory: The unpacked wheel directory
|
| 20 |
+
:param dest_dir: Destination directory (defaults to the current directory)
|
| 21 |
+
"""
|
| 22 |
+
# Find the .dist-info directory
|
| 23 |
+
dist_info_dirs = [
|
| 24 |
+
fn
|
| 25 |
+
for fn in os.listdir(directory)
|
| 26 |
+
if os.path.isdir(os.path.join(directory, fn)) and DIST_INFO_RE.match(fn)
|
| 27 |
+
]
|
| 28 |
+
if len(dist_info_dirs) > 1:
|
| 29 |
+
raise WheelError(f"Multiple .dist-info directories found in {directory}")
|
| 30 |
+
elif not dist_info_dirs:
|
| 31 |
+
raise WheelError(f"No .dist-info directories found in {directory}")
|
| 32 |
+
|
| 33 |
+
# Determine the target wheel filename
|
| 34 |
+
dist_info_dir = dist_info_dirs[0]
|
| 35 |
+
name_version = DIST_INFO_RE.match(dist_info_dir).group("namever")
|
| 36 |
+
|
| 37 |
+
# Read the tags and the existing build number from .dist-info/WHEEL
|
| 38 |
+
existing_build_number = None
|
| 39 |
+
wheel_file_path = os.path.join(directory, dist_info_dir, "WHEEL")
|
| 40 |
+
with open(wheel_file_path, "rb") as f:
|
| 41 |
+
tags, existing_build_number = read_tags(f.read())
|
| 42 |
+
|
| 43 |
+
if not tags:
|
| 44 |
+
raise WheelError(
|
| 45 |
+
"No tags present in {}/WHEEL; cannot determine target wheel "
|
| 46 |
+
"filename".format(dist_info_dir)
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
# Set the wheel file name and add/replace/remove the Build tag in .dist-info/WHEEL
|
| 50 |
+
build_number = build_number if build_number is not None else existing_build_number
|
| 51 |
+
if build_number is not None:
|
| 52 |
+
if build_number:
|
| 53 |
+
name_version += "-" + build_number
|
| 54 |
+
|
| 55 |
+
if build_number != existing_build_number:
|
| 56 |
+
with open(wheel_file_path, "rb+") as f:
|
| 57 |
+
wheel_file_content = f.read()
|
| 58 |
+
wheel_file_content = set_build_number(wheel_file_content, build_number)
|
| 59 |
+
|
| 60 |
+
f.seek(0)
|
| 61 |
+
f.truncate()
|
| 62 |
+
f.write(wheel_file_content)
|
| 63 |
+
|
| 64 |
+
# Reassemble the tags for the wheel file
|
| 65 |
+
tagline = compute_tagline(tags)
|
| 66 |
+
|
| 67 |
+
# Repack the wheel
|
| 68 |
+
wheel_path = os.path.join(dest_dir, f"{name_version}-{tagline}.whl")
|
| 69 |
+
with WheelFile(wheel_path, "w") as wf:
|
| 70 |
+
print(f"Repacking wheel as {wheel_path}...", end="", flush=True)
|
| 71 |
+
wf.write_files(directory)
|
| 72 |
+
|
| 73 |
+
print("OK")
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
def read_tags(input_str: bytes) -> tuple[list[str], str | None]:
|
| 77 |
+
"""Read tags from a string.
|
| 78 |
+
|
| 79 |
+
:param input_str: A string containing one or more tags, separated by spaces
|
| 80 |
+
:return: A list of tags and a list of build tags
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
tags = []
|
| 84 |
+
existing_build_number = None
|
| 85 |
+
for line in input_str.splitlines():
|
| 86 |
+
if line.startswith(b"Tag: "):
|
| 87 |
+
tags.append(line.split(b" ")[1].rstrip().decode("ascii"))
|
| 88 |
+
elif line.startswith(b"Build: "):
|
| 89 |
+
existing_build_number = line.split(b" ")[1].rstrip().decode("ascii")
|
| 90 |
+
|
| 91 |
+
return tags, existing_build_number
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def set_build_number(wheel_file_content: bytes, build_number: str | None) -> bytes:
|
| 95 |
+
"""Compute a build tag and add/replace/remove as necessary.
|
| 96 |
+
|
| 97 |
+
:param wheel_file_content: The contents of .dist-info/WHEEL
|
| 98 |
+
:param build_number: The build tags present in .dist-info/WHEEL
|
| 99 |
+
:return: The (modified) contents of .dist-info/WHEEL
|
| 100 |
+
"""
|
| 101 |
+
replacement = (
|
| 102 |
+
("Build: %s\r\n" % build_number).encode("ascii") if build_number else b""
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
wheel_file_content, num_replaced = BUILD_NUM_RE.subn(
|
| 106 |
+
replacement, wheel_file_content
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
if not num_replaced:
|
| 110 |
+
wheel_file_content += replacement
|
| 111 |
+
|
| 112 |
+
return wheel_file_content
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def compute_tagline(tags: list[str]) -> str:
|
| 116 |
+
"""Compute a tagline from a list of tags.
|
| 117 |
+
|
| 118 |
+
:param tags: A list of tags
|
| 119 |
+
:return: A tagline
|
| 120 |
+
"""
|
| 121 |
+
impls = sorted({tag.split("-")[0] for tag in tags})
|
| 122 |
+
abivers = sorted({tag.split("-")[1] for tag in tags})
|
| 123 |
+
platforms = sorted({tag.split("-")[2] for tag in tags})
|
| 124 |
+
return "-".join([".".join(impls), ".".join(abivers), ".".join(platforms)])
|
.venv/Lib/site-packages/wheel/cli/unpack.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from pathlib import Path
|
| 4 |
+
|
| 5 |
+
from ..wheelfile import WheelFile
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def unpack(path: str, dest: str = ".") -> None:
|
| 9 |
+
"""Unpack a wheel.
|
| 10 |
+
|
| 11 |
+
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
|
| 12 |
+
is the package name and {ver} its version.
|
| 13 |
+
|
| 14 |
+
:param path: The path to the wheel.
|
| 15 |
+
:param dest: Destination directory (default to current directory).
|
| 16 |
+
"""
|
| 17 |
+
with WheelFile(path) as wf:
|
| 18 |
+
namever = wf.parsed_filename.group("namever")
|
| 19 |
+
destination = Path(dest) / namever
|
| 20 |
+
print(f"Unpacking to: {destination}...", end="", flush=True)
|
| 21 |
+
for zinfo in wf.filelist:
|
| 22 |
+
wf.extract(zinfo, destination)
|
| 23 |
+
|
| 24 |
+
# Set permissions to the same values as they were set in the archive
|
| 25 |
+
# We have to do this manually due to
|
| 26 |
+
# https://github.com/python/cpython/issues/59999
|
| 27 |
+
permissions = zinfo.external_attr >> 16 & 0o777
|
| 28 |
+
destination.joinpath(zinfo.filename).chmod(permissions)
|
| 29 |
+
|
| 30 |
+
print("OK")
|
.venv/Lib/site-packages/wheel/macosx_libfile.py
ADDED
|
@@ -0,0 +1,471 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module contains function to analyse dynamic library
|
| 3 |
+
headers to extract system information
|
| 4 |
+
|
| 5 |
+
Currently only for MacOSX
|
| 6 |
+
|
| 7 |
+
Library file on macosx system starts with Mach-O or Fat field.
|
| 8 |
+
This can be distinguish by first 32 bites and it is called magic number.
|
| 9 |
+
Proper value of magic number is with suffix _MAGIC. Suffix _CIGAM means
|
| 10 |
+
reversed bytes order.
|
| 11 |
+
Both fields can occur in two types: 32 and 64 bytes.
|
| 12 |
+
|
| 13 |
+
FAT field inform that this library contains few version of library
|
| 14 |
+
(typically for different types version). It contains
|
| 15 |
+
information where Mach-O headers starts.
|
| 16 |
+
|
| 17 |
+
Each section started with Mach-O header contains one library
|
| 18 |
+
(So if file starts with this field it contains only one version).
|
| 19 |
+
|
| 20 |
+
After filed Mach-O there are section fields.
|
| 21 |
+
Each of them starts with two fields:
|
| 22 |
+
cmd - magic number for this command
|
| 23 |
+
cmdsize - total size occupied by this section information.
|
| 24 |
+
|
| 25 |
+
In this case only sections LC_VERSION_MIN_MACOSX (for macosx 10.13 and earlier)
|
| 26 |
+
and LC_BUILD_VERSION (for macosx 10.14 and newer) are interesting,
|
| 27 |
+
because them contains information about minimal system version.
|
| 28 |
+
|
| 29 |
+
Important remarks:
|
| 30 |
+
- For fat files this implementation looks for maximum number version.
|
| 31 |
+
It not check if it is 32 or 64 and do not compare it with currently built package.
|
| 32 |
+
So it is possible to false report higher version that needed.
|
| 33 |
+
- All structures signatures are taken form macosx header files.
|
| 34 |
+
- I think that binary format will be more stable than `otool` output.
|
| 35 |
+
and if apple introduce some changes both implementation will need to be updated.
|
| 36 |
+
- The system compile will set the deployment target no lower than
|
| 37 |
+
11.0 for arm64 builds. For "Universal 2" builds use the x86_64 deployment
|
| 38 |
+
target when the arm64 target is 11.0.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
from __future__ import annotations
|
| 42 |
+
|
| 43 |
+
import ctypes
|
| 44 |
+
import os
|
| 45 |
+
import sys
|
| 46 |
+
|
| 47 |
+
"""here the needed const and struct from mach-o header files"""
|
| 48 |
+
|
| 49 |
+
FAT_MAGIC = 0xCAFEBABE
|
| 50 |
+
FAT_CIGAM = 0xBEBAFECA
|
| 51 |
+
FAT_MAGIC_64 = 0xCAFEBABF
|
| 52 |
+
FAT_CIGAM_64 = 0xBFBAFECA
|
| 53 |
+
MH_MAGIC = 0xFEEDFACE
|
| 54 |
+
MH_CIGAM = 0xCEFAEDFE
|
| 55 |
+
MH_MAGIC_64 = 0xFEEDFACF
|
| 56 |
+
MH_CIGAM_64 = 0xCFFAEDFE
|
| 57 |
+
|
| 58 |
+
LC_VERSION_MIN_MACOSX = 0x24
|
| 59 |
+
LC_BUILD_VERSION = 0x32
|
| 60 |
+
|
| 61 |
+
CPU_TYPE_ARM64 = 0x0100000C
|
| 62 |
+
|
| 63 |
+
mach_header_fields = [
|
| 64 |
+
("magic", ctypes.c_uint32),
|
| 65 |
+
("cputype", ctypes.c_int),
|
| 66 |
+
("cpusubtype", ctypes.c_int),
|
| 67 |
+
("filetype", ctypes.c_uint32),
|
| 68 |
+
("ncmds", ctypes.c_uint32),
|
| 69 |
+
("sizeofcmds", ctypes.c_uint32),
|
| 70 |
+
("flags", ctypes.c_uint32),
|
| 71 |
+
]
|
| 72 |
+
"""
|
| 73 |
+
struct mach_header {
|
| 74 |
+
uint32_t magic; /* mach magic number identifier */
|
| 75 |
+
cpu_type_t cputype; /* cpu specifier */
|
| 76 |
+
cpu_subtype_t cpusubtype; /* machine specifier */
|
| 77 |
+
uint32_t filetype; /* type of file */
|
| 78 |
+
uint32_t ncmds; /* number of load commands */
|
| 79 |
+
uint32_t sizeofcmds; /* the size of all the load commands */
|
| 80 |
+
uint32_t flags; /* flags */
|
| 81 |
+
};
|
| 82 |
+
typedef integer_t cpu_type_t;
|
| 83 |
+
typedef integer_t cpu_subtype_t;
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
mach_header_fields_64 = mach_header_fields + [("reserved", ctypes.c_uint32)]
|
| 87 |
+
"""
|
| 88 |
+
struct mach_header_64 {
|
| 89 |
+
uint32_t magic; /* mach magic number identifier */
|
| 90 |
+
cpu_type_t cputype; /* cpu specifier */
|
| 91 |
+
cpu_subtype_t cpusubtype; /* machine specifier */
|
| 92 |
+
uint32_t filetype; /* type of file */
|
| 93 |
+
uint32_t ncmds; /* number of load commands */
|
| 94 |
+
uint32_t sizeofcmds; /* the size of all the load commands */
|
| 95 |
+
uint32_t flags; /* flags */
|
| 96 |
+
uint32_t reserved; /* reserved */
|
| 97 |
+
};
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
fat_header_fields = [("magic", ctypes.c_uint32), ("nfat_arch", ctypes.c_uint32)]
|
| 101 |
+
"""
|
| 102 |
+
struct fat_header {
|
| 103 |
+
uint32_t magic; /* FAT_MAGIC or FAT_MAGIC_64 */
|
| 104 |
+
uint32_t nfat_arch; /* number of structs that follow */
|
| 105 |
+
};
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
fat_arch_fields = [
|
| 109 |
+
("cputype", ctypes.c_int),
|
| 110 |
+
("cpusubtype", ctypes.c_int),
|
| 111 |
+
("offset", ctypes.c_uint32),
|
| 112 |
+
("size", ctypes.c_uint32),
|
| 113 |
+
("align", ctypes.c_uint32),
|
| 114 |
+
]
|
| 115 |
+
"""
|
| 116 |
+
struct fat_arch {
|
| 117 |
+
cpu_type_t cputype; /* cpu specifier (int) */
|
| 118 |
+
cpu_subtype_t cpusubtype; /* machine specifier (int) */
|
| 119 |
+
uint32_t offset; /* file offset to this object file */
|
| 120 |
+
uint32_t size; /* size of this object file */
|
| 121 |
+
uint32_t align; /* alignment as a power of 2 */
|
| 122 |
+
};
|
| 123 |
+
"""
|
| 124 |
+
|
| 125 |
+
fat_arch_64_fields = [
|
| 126 |
+
("cputype", ctypes.c_int),
|
| 127 |
+
("cpusubtype", ctypes.c_int),
|
| 128 |
+
("offset", ctypes.c_uint64),
|
| 129 |
+
("size", ctypes.c_uint64),
|
| 130 |
+
("align", ctypes.c_uint32),
|
| 131 |
+
("reserved", ctypes.c_uint32),
|
| 132 |
+
]
|
| 133 |
+
"""
|
| 134 |
+
struct fat_arch_64 {
|
| 135 |
+
cpu_type_t cputype; /* cpu specifier (int) */
|
| 136 |
+
cpu_subtype_t cpusubtype; /* machine specifier (int) */
|
| 137 |
+
uint64_t offset; /* file offset to this object file */
|
| 138 |
+
uint64_t size; /* size of this object file */
|
| 139 |
+
uint32_t align; /* alignment as a power of 2 */
|
| 140 |
+
uint32_t reserved; /* reserved */
|
| 141 |
+
};
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
segment_base_fields = [("cmd", ctypes.c_uint32), ("cmdsize", ctypes.c_uint32)]
|
| 145 |
+
"""base for reading segment info"""
|
| 146 |
+
|
| 147 |
+
segment_command_fields = [
|
| 148 |
+
("cmd", ctypes.c_uint32),
|
| 149 |
+
("cmdsize", ctypes.c_uint32),
|
| 150 |
+
("segname", ctypes.c_char * 16),
|
| 151 |
+
("vmaddr", ctypes.c_uint32),
|
| 152 |
+
("vmsize", ctypes.c_uint32),
|
| 153 |
+
("fileoff", ctypes.c_uint32),
|
| 154 |
+
("filesize", ctypes.c_uint32),
|
| 155 |
+
("maxprot", ctypes.c_int),
|
| 156 |
+
("initprot", ctypes.c_int),
|
| 157 |
+
("nsects", ctypes.c_uint32),
|
| 158 |
+
("flags", ctypes.c_uint32),
|
| 159 |
+
]
|
| 160 |
+
"""
|
| 161 |
+
struct segment_command { /* for 32-bit architectures */
|
| 162 |
+
uint32_t cmd; /* LC_SEGMENT */
|
| 163 |
+
uint32_t cmdsize; /* includes sizeof section structs */
|
| 164 |
+
char segname[16]; /* segment name */
|
| 165 |
+
uint32_t vmaddr; /* memory address of this segment */
|
| 166 |
+
uint32_t vmsize; /* memory size of this segment */
|
| 167 |
+
uint32_t fileoff; /* file offset of this segment */
|
| 168 |
+
uint32_t filesize; /* amount to map from the file */
|
| 169 |
+
vm_prot_t maxprot; /* maximum VM protection */
|
| 170 |
+
vm_prot_t initprot; /* initial VM protection */
|
| 171 |
+
uint32_t nsects; /* number of sections in segment */
|
| 172 |
+
uint32_t flags; /* flags */
|
| 173 |
+
};
|
| 174 |
+
typedef int vm_prot_t;
|
| 175 |
+
"""
|
| 176 |
+
|
| 177 |
+
segment_command_fields_64 = [
|
| 178 |
+
("cmd", ctypes.c_uint32),
|
| 179 |
+
("cmdsize", ctypes.c_uint32),
|
| 180 |
+
("segname", ctypes.c_char * 16),
|
| 181 |
+
("vmaddr", ctypes.c_uint64),
|
| 182 |
+
("vmsize", ctypes.c_uint64),
|
| 183 |
+
("fileoff", ctypes.c_uint64),
|
| 184 |
+
("filesize", ctypes.c_uint64),
|
| 185 |
+
("maxprot", ctypes.c_int),
|
| 186 |
+
("initprot", ctypes.c_int),
|
| 187 |
+
("nsects", ctypes.c_uint32),
|
| 188 |
+
("flags", ctypes.c_uint32),
|
| 189 |
+
]
|
| 190 |
+
"""
|
| 191 |
+
struct segment_command_64 { /* for 64-bit architectures */
|
| 192 |
+
uint32_t cmd; /* LC_SEGMENT_64 */
|
| 193 |
+
uint32_t cmdsize; /* includes sizeof section_64 structs */
|
| 194 |
+
char segname[16]; /* segment name */
|
| 195 |
+
uint64_t vmaddr; /* memory address of this segment */
|
| 196 |
+
uint64_t vmsize; /* memory size of this segment */
|
| 197 |
+
uint64_t fileoff; /* file offset of this segment */
|
| 198 |
+
uint64_t filesize; /* amount to map from the file */
|
| 199 |
+
vm_prot_t maxprot; /* maximum VM protection */
|
| 200 |
+
vm_prot_t initprot; /* initial VM protection */
|
| 201 |
+
uint32_t nsects; /* number of sections in segment */
|
| 202 |
+
uint32_t flags; /* flags */
|
| 203 |
+
};
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
version_min_command_fields = segment_base_fields + [
|
| 207 |
+
("version", ctypes.c_uint32),
|
| 208 |
+
("sdk", ctypes.c_uint32),
|
| 209 |
+
]
|
| 210 |
+
"""
|
| 211 |
+
struct version_min_command {
|
| 212 |
+
uint32_t cmd; /* LC_VERSION_MIN_MACOSX or
|
| 213 |
+
LC_VERSION_MIN_IPHONEOS or
|
| 214 |
+
LC_VERSION_MIN_WATCHOS or
|
| 215 |
+
LC_VERSION_MIN_TVOS */
|
| 216 |
+
uint32_t cmdsize; /* sizeof(struct min_version_command) */
|
| 217 |
+
uint32_t version; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 218 |
+
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 219 |
+
};
|
| 220 |
+
"""
|
| 221 |
+
|
| 222 |
+
build_version_command_fields = segment_base_fields + [
|
| 223 |
+
("platform", ctypes.c_uint32),
|
| 224 |
+
("minos", ctypes.c_uint32),
|
| 225 |
+
("sdk", ctypes.c_uint32),
|
| 226 |
+
("ntools", ctypes.c_uint32),
|
| 227 |
+
]
|
| 228 |
+
"""
|
| 229 |
+
struct build_version_command {
|
| 230 |
+
uint32_t cmd; /* LC_BUILD_VERSION */
|
| 231 |
+
uint32_t cmdsize; /* sizeof(struct build_version_command) plus */
|
| 232 |
+
/* ntools * sizeof(struct build_tool_version) */
|
| 233 |
+
uint32_t platform; /* platform */
|
| 234 |
+
uint32_t minos; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 235 |
+
uint32_t sdk; /* X.Y.Z is encoded in nibbles xxxx.yy.zz */
|
| 236 |
+
uint32_t ntools; /* number of tool entries following this */
|
| 237 |
+
};
|
| 238 |
+
"""
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
def swap32(x):
|
| 242 |
+
return (
|
| 243 |
+
((x << 24) & 0xFF000000)
|
| 244 |
+
| ((x << 8) & 0x00FF0000)
|
| 245 |
+
| ((x >> 8) & 0x0000FF00)
|
| 246 |
+
| ((x >> 24) & 0x000000FF)
|
| 247 |
+
)
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
def get_base_class_and_magic_number(lib_file, seek=None):
|
| 251 |
+
if seek is None:
|
| 252 |
+
seek = lib_file.tell()
|
| 253 |
+
else:
|
| 254 |
+
lib_file.seek(seek)
|
| 255 |
+
magic_number = ctypes.c_uint32.from_buffer_copy(
|
| 256 |
+
lib_file.read(ctypes.sizeof(ctypes.c_uint32))
|
| 257 |
+
).value
|
| 258 |
+
|
| 259 |
+
# Handle wrong byte order
|
| 260 |
+
if magic_number in [FAT_CIGAM, FAT_CIGAM_64, MH_CIGAM, MH_CIGAM_64]:
|
| 261 |
+
if sys.byteorder == "little":
|
| 262 |
+
BaseClass = ctypes.BigEndianStructure
|
| 263 |
+
else:
|
| 264 |
+
BaseClass = ctypes.LittleEndianStructure
|
| 265 |
+
|
| 266 |
+
magic_number = swap32(magic_number)
|
| 267 |
+
else:
|
| 268 |
+
BaseClass = ctypes.Structure
|
| 269 |
+
|
| 270 |
+
lib_file.seek(seek)
|
| 271 |
+
return BaseClass, magic_number
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
def read_data(struct_class, lib_file):
|
| 275 |
+
return struct_class.from_buffer_copy(lib_file.read(ctypes.sizeof(struct_class)))
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
def extract_macosx_min_system_version(path_to_lib):
|
| 279 |
+
with open(path_to_lib, "rb") as lib_file:
|
| 280 |
+
BaseClass, magic_number = get_base_class_and_magic_number(lib_file, 0)
|
| 281 |
+
if magic_number not in [FAT_MAGIC, FAT_MAGIC_64, MH_MAGIC, MH_MAGIC_64]:
|
| 282 |
+
return
|
| 283 |
+
|
| 284 |
+
if magic_number in [FAT_MAGIC, FAT_CIGAM_64]:
|
| 285 |
+
|
| 286 |
+
class FatHeader(BaseClass):
|
| 287 |
+
_fields_ = fat_header_fields
|
| 288 |
+
|
| 289 |
+
fat_header = read_data(FatHeader, lib_file)
|
| 290 |
+
if magic_number == FAT_MAGIC:
|
| 291 |
+
|
| 292 |
+
class FatArch(BaseClass):
|
| 293 |
+
_fields_ = fat_arch_fields
|
| 294 |
+
|
| 295 |
+
else:
|
| 296 |
+
|
| 297 |
+
class FatArch(BaseClass):
|
| 298 |
+
_fields_ = fat_arch_64_fields
|
| 299 |
+
|
| 300 |
+
fat_arch_list = [
|
| 301 |
+
read_data(FatArch, lib_file) for _ in range(fat_header.nfat_arch)
|
| 302 |
+
]
|
| 303 |
+
|
| 304 |
+
versions_list = []
|
| 305 |
+
for el in fat_arch_list:
|
| 306 |
+
try:
|
| 307 |
+
version = read_mach_header(lib_file, el.offset)
|
| 308 |
+
if version is not None:
|
| 309 |
+
if el.cputype == CPU_TYPE_ARM64 and len(fat_arch_list) != 1:
|
| 310 |
+
# Xcode will not set the deployment target below 11.0.0
|
| 311 |
+
# for the arm64 architecture. Ignore the arm64 deployment
|
| 312 |
+
# in fat binaries when the target is 11.0.0, that way
|
| 313 |
+
# the other architectures can select a lower deployment
|
| 314 |
+
# target.
|
| 315 |
+
# This is safe because there is no arm64 variant for
|
| 316 |
+
# macOS 10.15 or earlier.
|
| 317 |
+
if version == (11, 0, 0):
|
| 318 |
+
continue
|
| 319 |
+
versions_list.append(version)
|
| 320 |
+
except ValueError:
|
| 321 |
+
pass
|
| 322 |
+
|
| 323 |
+
if len(versions_list) > 0:
|
| 324 |
+
return max(versions_list)
|
| 325 |
+
else:
|
| 326 |
+
return None
|
| 327 |
+
|
| 328 |
+
else:
|
| 329 |
+
try:
|
| 330 |
+
return read_mach_header(lib_file, 0)
|
| 331 |
+
except ValueError:
|
| 332 |
+
"""when some error during read library files"""
|
| 333 |
+
return None
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
def read_mach_header(lib_file, seek=None):
|
| 337 |
+
"""
|
| 338 |
+
This funcition parse mach-O header and extract
|
| 339 |
+
information about minimal system version
|
| 340 |
+
|
| 341 |
+
:param lib_file: reference to opened library file with pointer
|
| 342 |
+
"""
|
| 343 |
+
if seek is not None:
|
| 344 |
+
lib_file.seek(seek)
|
| 345 |
+
base_class, magic_number = get_base_class_and_magic_number(lib_file)
|
| 346 |
+
arch = "32" if magic_number == MH_MAGIC else "64"
|
| 347 |
+
|
| 348 |
+
class SegmentBase(base_class):
|
| 349 |
+
_fields_ = segment_base_fields
|
| 350 |
+
|
| 351 |
+
if arch == "32":
|
| 352 |
+
|
| 353 |
+
class MachHeader(base_class):
|
| 354 |
+
_fields_ = mach_header_fields
|
| 355 |
+
|
| 356 |
+
else:
|
| 357 |
+
|
| 358 |
+
class MachHeader(base_class):
|
| 359 |
+
_fields_ = mach_header_fields_64
|
| 360 |
+
|
| 361 |
+
mach_header = read_data(MachHeader, lib_file)
|
| 362 |
+
for _i in range(mach_header.ncmds):
|
| 363 |
+
pos = lib_file.tell()
|
| 364 |
+
segment_base = read_data(SegmentBase, lib_file)
|
| 365 |
+
lib_file.seek(pos)
|
| 366 |
+
if segment_base.cmd == LC_VERSION_MIN_MACOSX:
|
| 367 |
+
|
| 368 |
+
class VersionMinCommand(base_class):
|
| 369 |
+
_fields_ = version_min_command_fields
|
| 370 |
+
|
| 371 |
+
version_info = read_data(VersionMinCommand, lib_file)
|
| 372 |
+
return parse_version(version_info.version)
|
| 373 |
+
elif segment_base.cmd == LC_BUILD_VERSION:
|
| 374 |
+
|
| 375 |
+
class VersionBuild(base_class):
|
| 376 |
+
_fields_ = build_version_command_fields
|
| 377 |
+
|
| 378 |
+
version_info = read_data(VersionBuild, lib_file)
|
| 379 |
+
return parse_version(version_info.minos)
|
| 380 |
+
else:
|
| 381 |
+
lib_file.seek(pos + segment_base.cmdsize)
|
| 382 |
+
continue
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
def parse_version(version):
|
| 386 |
+
x = (version & 0xFFFF0000) >> 16
|
| 387 |
+
y = (version & 0x0000FF00) >> 8
|
| 388 |
+
z = version & 0x000000FF
|
| 389 |
+
return x, y, z
|
| 390 |
+
|
| 391 |
+
|
| 392 |
+
def calculate_macosx_platform_tag(archive_root, platform_tag):
|
| 393 |
+
"""
|
| 394 |
+
Calculate proper macosx platform tag basing on files which are included to wheel
|
| 395 |
+
|
| 396 |
+
Example platform tag `macosx-10.14-x86_64`
|
| 397 |
+
"""
|
| 398 |
+
prefix, base_version, suffix = platform_tag.split("-")
|
| 399 |
+
base_version = tuple(int(x) for x in base_version.split("."))
|
| 400 |
+
base_version = base_version[:2]
|
| 401 |
+
if base_version[0] > 10:
|
| 402 |
+
base_version = (base_version[0], 0)
|
| 403 |
+
assert len(base_version) == 2
|
| 404 |
+
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
|
| 405 |
+
deploy_target = tuple(
|
| 406 |
+
int(x) for x in os.environ["MACOSX_DEPLOYMENT_TARGET"].split(".")
|
| 407 |
+
)
|
| 408 |
+
deploy_target = deploy_target[:2]
|
| 409 |
+
if deploy_target[0] > 10:
|
| 410 |
+
deploy_target = (deploy_target[0], 0)
|
| 411 |
+
if deploy_target < base_version:
|
| 412 |
+
sys.stderr.write(
|
| 413 |
+
"[WARNING] MACOSX_DEPLOYMENT_TARGET is set to a lower value ({}) than "
|
| 414 |
+
"the version on which the Python interpreter was compiled ({}), and "
|
| 415 |
+
"will be ignored.\n".format(
|
| 416 |
+
".".join(str(x) for x in deploy_target),
|
| 417 |
+
".".join(str(x) for x in base_version),
|
| 418 |
+
)
|
| 419 |
+
)
|
| 420 |
+
else:
|
| 421 |
+
base_version = deploy_target
|
| 422 |
+
|
| 423 |
+
assert len(base_version) == 2
|
| 424 |
+
start_version = base_version
|
| 425 |
+
versions_dict = {}
|
| 426 |
+
for dirpath, _dirnames, filenames in os.walk(archive_root):
|
| 427 |
+
for filename in filenames:
|
| 428 |
+
if filename.endswith(".dylib") or filename.endswith(".so"):
|
| 429 |
+
lib_path = os.path.join(dirpath, filename)
|
| 430 |
+
min_ver = extract_macosx_min_system_version(lib_path)
|
| 431 |
+
if min_ver is not None:
|
| 432 |
+
min_ver = min_ver[0:2]
|
| 433 |
+
if min_ver[0] > 10:
|
| 434 |
+
min_ver = (min_ver[0], 0)
|
| 435 |
+
versions_dict[lib_path] = min_ver
|
| 436 |
+
|
| 437 |
+
if len(versions_dict) > 0:
|
| 438 |
+
base_version = max(base_version, max(versions_dict.values()))
|
| 439 |
+
|
| 440 |
+
# macosx platform tag do not support minor bugfix release
|
| 441 |
+
fin_base_version = "_".join([str(x) for x in base_version])
|
| 442 |
+
if start_version < base_version:
|
| 443 |
+
problematic_files = [k for k, v in versions_dict.items() if v > start_version]
|
| 444 |
+
problematic_files = "\n".join(problematic_files)
|
| 445 |
+
if len(problematic_files) == 1:
|
| 446 |
+
files_form = "this file"
|
| 447 |
+
else:
|
| 448 |
+
files_form = "these files"
|
| 449 |
+
error_message = (
|
| 450 |
+
"[WARNING] This wheel needs a higher macOS version than {} "
|
| 451 |
+
"To silence this warning, set MACOSX_DEPLOYMENT_TARGET to at least "
|
| 452 |
+
+ fin_base_version
|
| 453 |
+
+ " or recreate "
|
| 454 |
+
+ files_form
|
| 455 |
+
+ " with lower "
|
| 456 |
+
"MACOSX_DEPLOYMENT_TARGET: \n" + problematic_files
|
| 457 |
+
)
|
| 458 |
+
|
| 459 |
+
if "MACOSX_DEPLOYMENT_TARGET" in os.environ:
|
| 460 |
+
error_message = error_message.format(
|
| 461 |
+
"is set in MACOSX_DEPLOYMENT_TARGET variable."
|
| 462 |
+
)
|
| 463 |
+
else:
|
| 464 |
+
error_message = error_message.format(
|
| 465 |
+
"the version your Python interpreter is compiled against."
|
| 466 |
+
)
|
| 467 |
+
|
| 468 |
+
sys.stderr.write(error_message)
|
| 469 |
+
|
| 470 |
+
platform_tag = prefix + "_" + fin_base_version + "_" + suffix
|
| 471 |
+
return platform_tag
|
.venv/Lib/site-packages/wheel/metadata.py
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tools for converting old- to new-style metadata.
|
| 3 |
+
"""
|
| 4 |
+
from __future__ import annotations
|
| 5 |
+
|
| 6 |
+
import functools
|
| 7 |
+
import itertools
|
| 8 |
+
import os.path
|
| 9 |
+
import re
|
| 10 |
+
import textwrap
|
| 11 |
+
from email.message import Message
|
| 12 |
+
from email.parser import Parser
|
| 13 |
+
from typing import Iterator
|
| 14 |
+
|
| 15 |
+
from .vendored.packaging.requirements import Requirement
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def _nonblank(str):
|
| 19 |
+
return str and not str.startswith("#")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@functools.singledispatch
|
| 23 |
+
def yield_lines(iterable):
|
| 24 |
+
r"""
|
| 25 |
+
Yield valid lines of a string or iterable.
|
| 26 |
+
>>> list(yield_lines(''))
|
| 27 |
+
[]
|
| 28 |
+
>>> list(yield_lines(['foo', 'bar']))
|
| 29 |
+
['foo', 'bar']
|
| 30 |
+
>>> list(yield_lines('foo\nbar'))
|
| 31 |
+
['foo', 'bar']
|
| 32 |
+
>>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
|
| 33 |
+
['foo', 'baz #comment']
|
| 34 |
+
>>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
|
| 35 |
+
['foo', 'bar', 'baz', 'bing']
|
| 36 |
+
"""
|
| 37 |
+
return itertools.chain.from_iterable(map(yield_lines, iterable))
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
@yield_lines.register(str)
|
| 41 |
+
def _(text):
|
| 42 |
+
return filter(_nonblank, map(str.strip, text.splitlines()))
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def split_sections(s):
|
| 46 |
+
"""Split a string or iterable thereof into (section, content) pairs
|
| 47 |
+
Each ``section`` is a stripped version of the section header ("[section]")
|
| 48 |
+
and each ``content`` is a list of stripped lines excluding blank lines and
|
| 49 |
+
comment-only lines. If there are any such lines before the first section
|
| 50 |
+
header, they're returned in a first ``section`` of ``None``.
|
| 51 |
+
"""
|
| 52 |
+
section = None
|
| 53 |
+
content = []
|
| 54 |
+
for line in yield_lines(s):
|
| 55 |
+
if line.startswith("["):
|
| 56 |
+
if line.endswith("]"):
|
| 57 |
+
if section or content:
|
| 58 |
+
yield section, content
|
| 59 |
+
section = line[1:-1].strip()
|
| 60 |
+
content = []
|
| 61 |
+
else:
|
| 62 |
+
raise ValueError("Invalid section heading", line)
|
| 63 |
+
else:
|
| 64 |
+
content.append(line)
|
| 65 |
+
|
| 66 |
+
# wrap up last segment
|
| 67 |
+
yield section, content
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def safe_extra(extra):
|
| 71 |
+
"""Convert an arbitrary string to a standard 'extra' name
|
| 72 |
+
Any runs of non-alphanumeric characters are replaced with a single '_',
|
| 73 |
+
and the result is always lowercased.
|
| 74 |
+
"""
|
| 75 |
+
return re.sub("[^A-Za-z0-9.-]+", "_", extra).lower()
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
def safe_name(name):
|
| 79 |
+
"""Convert an arbitrary string to a standard distribution name
|
| 80 |
+
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
|
| 81 |
+
"""
|
| 82 |
+
return re.sub("[^A-Za-z0-9.]+", "-", name)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
def requires_to_requires_dist(requirement: Requirement) -> str:
|
| 86 |
+
"""Return the version specifier for a requirement in PEP 345/566 fashion."""
|
| 87 |
+
if getattr(requirement, "url", None):
|
| 88 |
+
return " @ " + requirement.url
|
| 89 |
+
|
| 90 |
+
requires_dist = []
|
| 91 |
+
for spec in requirement.specifier:
|
| 92 |
+
requires_dist.append(spec.operator + spec.version)
|
| 93 |
+
|
| 94 |
+
if requires_dist:
|
| 95 |
+
return " " + ",".join(sorted(requires_dist))
|
| 96 |
+
else:
|
| 97 |
+
return ""
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def convert_requirements(requirements: list[str]) -> Iterator[str]:
|
| 101 |
+
"""Yield Requires-Dist: strings for parsed requirements strings."""
|
| 102 |
+
for req in requirements:
|
| 103 |
+
parsed_requirement = Requirement(req)
|
| 104 |
+
spec = requires_to_requires_dist(parsed_requirement)
|
| 105 |
+
extras = ",".join(sorted(safe_extra(e) for e in parsed_requirement.extras))
|
| 106 |
+
if extras:
|
| 107 |
+
extras = f"[{extras}]"
|
| 108 |
+
|
| 109 |
+
yield safe_name(parsed_requirement.name) + extras + spec
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
def generate_requirements(
|
| 113 |
+
extras_require: dict[str, list[str]]
|
| 114 |
+
) -> Iterator[tuple[str, str]]:
|
| 115 |
+
"""
|
| 116 |
+
Convert requirements from a setup()-style dictionary to
|
| 117 |
+
('Requires-Dist', 'requirement') and ('Provides-Extra', 'extra') tuples.
|
| 118 |
+
|
| 119 |
+
extras_require is a dictionary of {extra: [requirements]} as passed to setup(),
|
| 120 |
+
using the empty extra {'': [requirements]} to hold install_requires.
|
| 121 |
+
"""
|
| 122 |
+
for extra, depends in extras_require.items():
|
| 123 |
+
condition = ""
|
| 124 |
+
extra = extra or ""
|
| 125 |
+
if ":" in extra: # setuptools extra:condition syntax
|
| 126 |
+
extra, condition = extra.split(":", 1)
|
| 127 |
+
|
| 128 |
+
extra = safe_extra(extra)
|
| 129 |
+
if extra:
|
| 130 |
+
yield "Provides-Extra", extra
|
| 131 |
+
if condition:
|
| 132 |
+
condition = "(" + condition + ") and "
|
| 133 |
+
condition += "extra == '%s'" % extra
|
| 134 |
+
|
| 135 |
+
if condition:
|
| 136 |
+
condition = " ; " + condition
|
| 137 |
+
|
| 138 |
+
for new_req in convert_requirements(depends):
|
| 139 |
+
yield "Requires-Dist", new_req + condition
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def pkginfo_to_metadata(egg_info_path: str, pkginfo_path: str) -> Message:
|
| 143 |
+
"""
|
| 144 |
+
Convert .egg-info directory with PKG-INFO to the Metadata 2.1 format
|
| 145 |
+
"""
|
| 146 |
+
with open(pkginfo_path, encoding="utf-8") as headers:
|
| 147 |
+
pkg_info = Parser().parse(headers)
|
| 148 |
+
|
| 149 |
+
pkg_info.replace_header("Metadata-Version", "2.1")
|
| 150 |
+
# Those will be regenerated from `requires.txt`.
|
| 151 |
+
del pkg_info["Provides-Extra"]
|
| 152 |
+
del pkg_info["Requires-Dist"]
|
| 153 |
+
requires_path = os.path.join(egg_info_path, "requires.txt")
|
| 154 |
+
if os.path.exists(requires_path):
|
| 155 |
+
with open(requires_path, encoding="utf-8") as requires_file:
|
| 156 |
+
requires = requires_file.read()
|
| 157 |
+
|
| 158 |
+
parsed_requirements = sorted(split_sections(requires), key=lambda x: x[0] or "")
|
| 159 |
+
for extra, reqs in parsed_requirements:
|
| 160 |
+
for key, value in generate_requirements({extra: reqs}):
|
| 161 |
+
if (key, value) not in pkg_info.items():
|
| 162 |
+
pkg_info[key] = value
|
| 163 |
+
|
| 164 |
+
description = pkg_info["Description"]
|
| 165 |
+
if description:
|
| 166 |
+
description_lines = pkg_info["Description"].splitlines()
|
| 167 |
+
dedented_description = "\n".join(
|
| 168 |
+
# if the first line of long_description is blank,
|
| 169 |
+
# the first line here will be indented.
|
| 170 |
+
(
|
| 171 |
+
description_lines[0].lstrip(),
|
| 172 |
+
textwrap.dedent("\n".join(description_lines[1:])),
|
| 173 |
+
"\n",
|
| 174 |
+
)
|
| 175 |
+
)
|
| 176 |
+
pkg_info.set_payload(dedented_description)
|
| 177 |
+
del pkg_info["Description"]
|
| 178 |
+
|
| 179 |
+
return pkg_info
|
.venv/Lib/site-packages/wheel/util.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import base64
|
| 4 |
+
import logging
|
| 5 |
+
|
| 6 |
+
log = logging.getLogger("wheel")
|
| 7 |
+
|
| 8 |
+
# ensure Python logging is configured
|
| 9 |
+
try:
|
| 10 |
+
__import__("setuptools.logging")
|
| 11 |
+
except ImportError:
|
| 12 |
+
# setuptools < ??
|
| 13 |
+
from . import _setuptools_logging
|
| 14 |
+
|
| 15 |
+
_setuptools_logging.configure()
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
def urlsafe_b64encode(data: bytes) -> bytes:
|
| 19 |
+
"""urlsafe_b64encode without padding"""
|
| 20 |
+
return base64.urlsafe_b64encode(data).rstrip(b"=")
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
def urlsafe_b64decode(data: bytes) -> bytes:
|
| 24 |
+
"""urlsafe_b64decode without padding"""
|
| 25 |
+
pad = b"=" * (4 - (len(data) & 3))
|
| 26 |
+
return base64.urlsafe_b64decode(data + pad)
|
.venv/Lib/site-packages/wheel/vendored/__init__.py
ADDED
|
File without changes
|
.venv/Lib/site-packages/wheel/vendored/packaging/__init__.py
ADDED
|
File without changes
|
.venv/Lib/site-packages/wheel/vendored/packaging/_elffile.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ELF file parser.
|
| 3 |
+
|
| 4 |
+
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
| 5 |
+
interface to ``ZipFile``. Only the read interface is implemented.
|
| 6 |
+
|
| 7 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
| 8 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
import enum
|
| 12 |
+
import os
|
| 13 |
+
import struct
|
| 14 |
+
from typing import IO, Optional, Tuple
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ELFInvalid(ValueError):
|
| 18 |
+
pass
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class EIClass(enum.IntEnum):
|
| 22 |
+
C32 = 1
|
| 23 |
+
C64 = 2
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class EIData(enum.IntEnum):
|
| 27 |
+
Lsb = 1
|
| 28 |
+
Msb = 2
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class EMachine(enum.IntEnum):
|
| 32 |
+
I386 = 3
|
| 33 |
+
S390 = 22
|
| 34 |
+
Arm = 40
|
| 35 |
+
X8664 = 62
|
| 36 |
+
AArc64 = 183
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class ELFFile:
|
| 40 |
+
"""
|
| 41 |
+
Representation of an ELF executable.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, f: IO[bytes]) -> None:
|
| 45 |
+
self._f = f
|
| 46 |
+
|
| 47 |
+
try:
|
| 48 |
+
ident = self._read("16B")
|
| 49 |
+
except struct.error:
|
| 50 |
+
raise ELFInvalid("unable to parse identification")
|
| 51 |
+
magic = bytes(ident[:4])
|
| 52 |
+
if magic != b"\x7fELF":
|
| 53 |
+
raise ELFInvalid(f"invalid magic: {magic!r}")
|
| 54 |
+
|
| 55 |
+
self.capacity = ident[4] # Format for program header (bitness).
|
| 56 |
+
self.encoding = ident[5] # Data structure encoding (endianness).
|
| 57 |
+
|
| 58 |
+
try:
|
| 59 |
+
# e_fmt: Format for program header.
|
| 60 |
+
# p_fmt: Format for section header.
|
| 61 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
| 62 |
+
e_fmt, self._p_fmt, self._p_idx = {
|
| 63 |
+
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
| 64 |
+
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
| 65 |
+
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
| 66 |
+
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
| 67 |
+
}[(self.capacity, self.encoding)]
|
| 68 |
+
except KeyError:
|
| 69 |
+
raise ELFInvalid(
|
| 70 |
+
f"unrecognized capacity ({self.capacity}) or "
|
| 71 |
+
f"encoding ({self.encoding})"
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
try:
|
| 75 |
+
(
|
| 76 |
+
_,
|
| 77 |
+
self.machine, # Architecture type.
|
| 78 |
+
_,
|
| 79 |
+
_,
|
| 80 |
+
self._e_phoff, # Offset of program header.
|
| 81 |
+
_,
|
| 82 |
+
self.flags, # Processor-specific flags.
|
| 83 |
+
_,
|
| 84 |
+
self._e_phentsize, # Size of section.
|
| 85 |
+
self._e_phnum, # Number of sections.
|
| 86 |
+
) = self._read(e_fmt)
|
| 87 |
+
except struct.error as e:
|
| 88 |
+
raise ELFInvalid("unable to parse machine and section information") from e
|
| 89 |
+
|
| 90 |
+
def _read(self, fmt: str) -> Tuple[int, ...]:
|
| 91 |
+
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
| 92 |
+
|
| 93 |
+
@property
|
| 94 |
+
def interpreter(self) -> Optional[str]:
|
| 95 |
+
"""
|
| 96 |
+
The path recorded in the ``PT_INTERP`` section header.
|
| 97 |
+
"""
|
| 98 |
+
for index in range(self._e_phnum):
|
| 99 |
+
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
| 100 |
+
try:
|
| 101 |
+
data = self._read(self._p_fmt)
|
| 102 |
+
except struct.error:
|
| 103 |
+
continue
|
| 104 |
+
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
| 105 |
+
continue
|
| 106 |
+
self._f.seek(data[self._p_idx[1]])
|
| 107 |
+
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
| 108 |
+
return None
|
.venv/Lib/site-packages/wheel/vendored/packaging/_manylinux.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import collections
|
| 2 |
+
import contextlib
|
| 3 |
+
import functools
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import warnings
|
| 8 |
+
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Tuple
|
| 9 |
+
|
| 10 |
+
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
| 11 |
+
|
| 12 |
+
EF_ARM_ABIMASK = 0xFF000000
|
| 13 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
| 14 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@contextlib.contextmanager
|
| 18 |
+
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
| 19 |
+
try:
|
| 20 |
+
with open(path, "rb") as f:
|
| 21 |
+
yield ELFFile(f)
|
| 22 |
+
except (OSError, TypeError, ValueError):
|
| 23 |
+
yield None
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _is_linux_armhf(executable: str) -> bool:
|
| 27 |
+
# hard-float ABI can be detected from the ELF header of the running
|
| 28 |
+
# process
|
| 29 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
| 30 |
+
with _parse_elf(executable) as f:
|
| 31 |
+
return (
|
| 32 |
+
f is not None
|
| 33 |
+
and f.capacity == EIClass.C32
|
| 34 |
+
and f.encoding == EIData.Lsb
|
| 35 |
+
and f.machine == EMachine.Arm
|
| 36 |
+
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
| 37 |
+
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _is_linux_i686(executable: str) -> bool:
|
| 42 |
+
with _parse_elf(executable) as f:
|
| 43 |
+
return (
|
| 44 |
+
f is not None
|
| 45 |
+
and f.capacity == EIClass.C32
|
| 46 |
+
and f.encoding == EIData.Lsb
|
| 47 |
+
and f.machine == EMachine.I386
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
def _have_compatible_abi(executable: str, arch: str) -> bool:
|
| 52 |
+
if arch == "armv7l":
|
| 53 |
+
return _is_linux_armhf(executable)
|
| 54 |
+
if arch == "i686":
|
| 55 |
+
return _is_linux_i686(executable)
|
| 56 |
+
return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
# If glibc ever changes its major version, we need to know what the last
|
| 60 |
+
# minor version was, so we can build the complete list of all versions.
|
| 61 |
+
# For now, guess what the highest minor version might be, assume it will
|
| 62 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
| 63 |
+
# with the actual value.
|
| 64 |
+
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class _GLibCVersion(NamedTuple):
|
| 68 |
+
major: int
|
| 69 |
+
minor: int
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
def _glibc_version_string_confstr() -> Optional[str]:
|
| 73 |
+
"""
|
| 74 |
+
Primary implementation of glibc_version_string using os.confstr.
|
| 75 |
+
"""
|
| 76 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
| 77 |
+
# to be broken or missing. This strategy is used in the standard library
|
| 78 |
+
# platform module.
|
| 79 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
| 80 |
+
try:
|
| 81 |
+
# Should be a string like "glibc 2.17".
|
| 82 |
+
version_string: str = getattr(os, "confstr")("CS_GNU_LIBC_VERSION")
|
| 83 |
+
assert version_string is not None
|
| 84 |
+
_, version = version_string.rsplit()
|
| 85 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
| 86 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
| 87 |
+
return None
|
| 88 |
+
return version
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _glibc_version_string_ctypes() -> Optional[str]:
|
| 92 |
+
"""
|
| 93 |
+
Fallback implementation of glibc_version_string using ctypes.
|
| 94 |
+
"""
|
| 95 |
+
try:
|
| 96 |
+
import ctypes
|
| 97 |
+
except ImportError:
|
| 98 |
+
return None
|
| 99 |
+
|
| 100 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
| 101 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
| 102 |
+
# main program". This way we can let the linker do the work to figure out
|
| 103 |
+
# which libc our process is actually using.
|
| 104 |
+
#
|
| 105 |
+
# We must also handle the special case where the executable is not a
|
| 106 |
+
# dynamically linked executable. This can occur when using musl libc,
|
| 107 |
+
# for example. In this situation, dlopen() will error, leading to an
|
| 108 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
| 109 |
+
# errno set on the OSError. The single string argument used to construct
|
| 110 |
+
# OSError comes from libc itself and is therefore not portable to
|
| 111 |
+
# hard code here. In any case, failure to call dlopen() means we
|
| 112 |
+
# can proceed, so we bail on our attempt.
|
| 113 |
+
try:
|
| 114 |
+
process_namespace = ctypes.CDLL(None)
|
| 115 |
+
except OSError:
|
| 116 |
+
return None
|
| 117 |
+
|
| 118 |
+
try:
|
| 119 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
| 120 |
+
except AttributeError:
|
| 121 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
| 122 |
+
# glibc.
|
| 123 |
+
return None
|
| 124 |
+
|
| 125 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
| 126 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
| 127 |
+
version_str: str = gnu_get_libc_version()
|
| 128 |
+
# py2 / py3 compatibility:
|
| 129 |
+
if not isinstance(version_str, str):
|
| 130 |
+
version_str = version_str.decode("ascii")
|
| 131 |
+
|
| 132 |
+
return version_str
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _glibc_version_string() -> Optional[str]:
|
| 136 |
+
"""Returns glibc version string, or None if not using glibc."""
|
| 137 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
| 141 |
+
"""Parse glibc version.
|
| 142 |
+
|
| 143 |
+
We use a regexp instead of str.split because we want to discard any
|
| 144 |
+
random junk that might come after the minor version -- this might happen
|
| 145 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
| 146 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
| 147 |
+
"""
|
| 148 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
| 149 |
+
if not m:
|
| 150 |
+
warnings.warn(
|
| 151 |
+
f"Expected glibc version with 2 components major.minor,"
|
| 152 |
+
f" got: {version_str}",
|
| 153 |
+
RuntimeWarning,
|
| 154 |
+
)
|
| 155 |
+
return -1, -1
|
| 156 |
+
return int(m.group("major")), int(m.group("minor"))
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
@functools.lru_cache()
|
| 160 |
+
def _get_glibc_version() -> Tuple[int, int]:
|
| 161 |
+
version_str = _glibc_version_string()
|
| 162 |
+
if version_str is None:
|
| 163 |
+
return (-1, -1)
|
| 164 |
+
return _parse_glibc_version(version_str)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
# From PEP 513, PEP 600
|
| 168 |
+
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
|
| 169 |
+
sys_glibc = _get_glibc_version()
|
| 170 |
+
if sys_glibc < version:
|
| 171 |
+
return False
|
| 172 |
+
# Check for presence of _manylinux module.
|
| 173 |
+
try:
|
| 174 |
+
import _manylinux # noqa
|
| 175 |
+
except ImportError:
|
| 176 |
+
return True
|
| 177 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
| 178 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
| 179 |
+
if result is not None:
|
| 180 |
+
return bool(result)
|
| 181 |
+
return True
|
| 182 |
+
if version == _GLibCVersion(2, 5):
|
| 183 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
| 184 |
+
return bool(_manylinux.manylinux1_compatible)
|
| 185 |
+
if version == _GLibCVersion(2, 12):
|
| 186 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
| 187 |
+
return bool(_manylinux.manylinux2010_compatible)
|
| 188 |
+
if version == _GLibCVersion(2, 17):
|
| 189 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
| 190 |
+
return bool(_manylinux.manylinux2014_compatible)
|
| 191 |
+
return True
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
_LEGACY_MANYLINUX_MAP = {
|
| 195 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
| 196 |
+
(2, 17): "manylinux2014",
|
| 197 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
| 198 |
+
(2, 12): "manylinux2010",
|
| 199 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
| 200 |
+
(2, 5): "manylinux1",
|
| 201 |
+
}
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def platform_tags(linux: str, arch: str) -> Iterator[str]:
|
| 205 |
+
if not _have_compatible_abi(sys.executable, arch):
|
| 206 |
+
return
|
| 207 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
| 208 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
| 209 |
+
if arch in {"x86_64", "i686"}:
|
| 210 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
| 211 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
| 212 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
| 213 |
+
glibc_max_list = [current_glibc]
|
| 214 |
+
# We can assume compatibility across glibc major versions.
|
| 215 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
| 216 |
+
#
|
| 217 |
+
# Build a list of maximum glibc versions so that we can
|
| 218 |
+
# output the canonical list of all glibc from current_glibc
|
| 219 |
+
# down to too_old_glibc2, including all intermediary versions.
|
| 220 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
| 221 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
| 222 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
| 223 |
+
for glibc_max in glibc_max_list:
|
| 224 |
+
if glibc_max.major == too_old_glibc2.major:
|
| 225 |
+
min_minor = too_old_glibc2.minor
|
| 226 |
+
else:
|
| 227 |
+
# For other glibc major versions oldest supported is (x, 0).
|
| 228 |
+
min_minor = -1
|
| 229 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
| 230 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
| 231 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
| 232 |
+
if _is_compatible(tag, arch, glibc_version):
|
| 233 |
+
yield linux.replace("linux", tag)
|
| 234 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
| 235 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
| 236 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
| 237 |
+
if _is_compatible(legacy_tag, arch, glibc_version):
|
| 238 |
+
yield linux.replace("linux", legacy_tag)
|
.venv/Lib/site-packages/wheel/vendored/packaging/_musllinux.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""PEP 656 support.
|
| 2 |
+
|
| 3 |
+
This module implements logic to detect if the currently running Python is
|
| 4 |
+
linked against musl, and what musl version is used.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import functools
|
| 8 |
+
import re
|
| 9 |
+
import subprocess
|
| 10 |
+
import sys
|
| 11 |
+
from typing import Iterator, NamedTuple, Optional
|
| 12 |
+
|
| 13 |
+
from ._elffile import ELFFile
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class _MuslVersion(NamedTuple):
|
| 17 |
+
major: int
|
| 18 |
+
minor: int
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
| 22 |
+
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
| 23 |
+
if len(lines) < 2 or lines[0][:4] != "musl":
|
| 24 |
+
return None
|
| 25 |
+
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
| 26 |
+
if not m:
|
| 27 |
+
return None
|
| 28 |
+
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
@functools.lru_cache()
|
| 32 |
+
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
| 33 |
+
"""Detect currently-running musl runtime version.
|
| 34 |
+
|
| 35 |
+
This is done by checking the specified executable's dynamic linking
|
| 36 |
+
information, and invoking the loader to parse its output for a version
|
| 37 |
+
string. If the loader is musl, the output would be something like::
|
| 38 |
+
|
| 39 |
+
musl libc (x86_64)
|
| 40 |
+
Version 1.2.2
|
| 41 |
+
Dynamic Program Loader
|
| 42 |
+
"""
|
| 43 |
+
try:
|
| 44 |
+
with open(executable, "rb") as f:
|
| 45 |
+
ld = ELFFile(f).interpreter
|
| 46 |
+
except (OSError, TypeError, ValueError):
|
| 47 |
+
return None
|
| 48 |
+
if ld is None or "musl" not in ld:
|
| 49 |
+
return None
|
| 50 |
+
proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
|
| 51 |
+
return _parse_musl_version(proc.stderr)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def platform_tags(arch: str) -> Iterator[str]:
|
| 55 |
+
"""Generate musllinux tags compatible to the current platform.
|
| 56 |
+
|
| 57 |
+
:param arch: Should be the part of platform tag after the ``linux_``
|
| 58 |
+
prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
|
| 59 |
+
prerequisite for the current platform to be musllinux-compatible.
|
| 60 |
+
|
| 61 |
+
:returns: An iterator of compatible musllinux tags.
|
| 62 |
+
"""
|
| 63 |
+
sys_musl = _get_musl_version(sys.executable)
|
| 64 |
+
if sys_musl is None: # Python not dynamically linked against musl.
|
| 65 |
+
return
|
| 66 |
+
for minor in range(sys_musl.minor, -1, -1):
|
| 67 |
+
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
if __name__ == "__main__": # pragma: no cover
|
| 71 |
+
import sysconfig
|
| 72 |
+
|
| 73 |
+
plat = sysconfig.get_platform()
|
| 74 |
+
assert plat.startswith("linux-"), "not linux"
|
| 75 |
+
|
| 76 |
+
print("plat:", plat)
|
| 77 |
+
print("musl:", _get_musl_version(sys.executable))
|
| 78 |
+
print("tags:", end=" ")
|
| 79 |
+
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
| 80 |
+
print(t, end="\n ")
|
.venv/Lib/site-packages/wheel/vendored/packaging/_parser.py
ADDED
|
@@ -0,0 +1,328 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Handwritten parser of dependency specifiers.
|
| 2 |
+
|
| 3 |
+
The docstring for each __parse_* function contains ENBF-inspired grammar representing
|
| 4 |
+
the implementation.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
import ast
|
| 8 |
+
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
| 9 |
+
|
| 10 |
+
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class Node:
|
| 14 |
+
def __init__(self, value: str) -> None:
|
| 15 |
+
self.value = value
|
| 16 |
+
|
| 17 |
+
def __str__(self) -> str:
|
| 18 |
+
return self.value
|
| 19 |
+
|
| 20 |
+
def __repr__(self) -> str:
|
| 21 |
+
return f"<{self.__class__.__name__}('{self}')>"
|
| 22 |
+
|
| 23 |
+
def serialize(self) -> str:
|
| 24 |
+
raise NotImplementedError
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class Variable(Node):
|
| 28 |
+
def serialize(self) -> str:
|
| 29 |
+
return str(self)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class Value(Node):
|
| 33 |
+
def serialize(self) -> str:
|
| 34 |
+
return f'"{self}"'
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class Op(Node):
|
| 38 |
+
def serialize(self) -> str:
|
| 39 |
+
return str(self)
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
MarkerVar = Union[Variable, Value]
|
| 43 |
+
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
| 44 |
+
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
| 45 |
+
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
| 46 |
+
# mypy does not support recursive type definition
|
| 47 |
+
# https://github.com/python/mypy/issues/731
|
| 48 |
+
MarkerAtom = Any
|
| 49 |
+
MarkerList = List[Any]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class ParsedRequirement(NamedTuple):
|
| 53 |
+
name: str
|
| 54 |
+
url: str
|
| 55 |
+
extras: List[str]
|
| 56 |
+
specifier: str
|
| 57 |
+
marker: Optional[MarkerList]
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# --------------------------------------------------------------------------------------
|
| 61 |
+
# Recursive descent parser for dependency specifier
|
| 62 |
+
# --------------------------------------------------------------------------------------
|
| 63 |
+
def parse_requirement(source: str) -> ParsedRequirement:
|
| 64 |
+
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
| 68 |
+
"""
|
| 69 |
+
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
| 70 |
+
"""
|
| 71 |
+
tokenizer.consume("WS")
|
| 72 |
+
|
| 73 |
+
name_token = tokenizer.expect(
|
| 74 |
+
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
| 75 |
+
)
|
| 76 |
+
name = name_token.text
|
| 77 |
+
tokenizer.consume("WS")
|
| 78 |
+
|
| 79 |
+
extras = _parse_extras(tokenizer)
|
| 80 |
+
tokenizer.consume("WS")
|
| 81 |
+
|
| 82 |
+
url, specifier, marker = _parse_requirement_details(tokenizer)
|
| 83 |
+
tokenizer.expect("END", expected="end of dependency specifier")
|
| 84 |
+
|
| 85 |
+
return ParsedRequirement(name, url, extras, specifier, marker)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
def _parse_requirement_details(
|
| 89 |
+
tokenizer: Tokenizer,
|
| 90 |
+
) -> Tuple[str, str, Optional[MarkerList]]:
|
| 91 |
+
"""
|
| 92 |
+
requirement_details = AT URL (WS requirement_marker?)?
|
| 93 |
+
| specifier WS? (requirement_marker)?
|
| 94 |
+
"""
|
| 95 |
+
|
| 96 |
+
specifier = ""
|
| 97 |
+
url = ""
|
| 98 |
+
marker = None
|
| 99 |
+
|
| 100 |
+
if tokenizer.check("AT"):
|
| 101 |
+
tokenizer.read()
|
| 102 |
+
tokenizer.consume("WS")
|
| 103 |
+
|
| 104 |
+
url_start = tokenizer.position
|
| 105 |
+
url = tokenizer.expect("URL", expected="URL after @").text
|
| 106 |
+
if tokenizer.check("END", peek=True):
|
| 107 |
+
return (url, specifier, marker)
|
| 108 |
+
|
| 109 |
+
tokenizer.expect("WS", expected="whitespace after URL")
|
| 110 |
+
|
| 111 |
+
# The input might end after whitespace.
|
| 112 |
+
if tokenizer.check("END", peek=True):
|
| 113 |
+
return (url, specifier, marker)
|
| 114 |
+
|
| 115 |
+
marker = _parse_requirement_marker(
|
| 116 |
+
tokenizer, span_start=url_start, after="URL and whitespace"
|
| 117 |
+
)
|
| 118 |
+
else:
|
| 119 |
+
specifier_start = tokenizer.position
|
| 120 |
+
specifier = _parse_specifier(tokenizer)
|
| 121 |
+
tokenizer.consume("WS")
|
| 122 |
+
|
| 123 |
+
if tokenizer.check("END", peek=True):
|
| 124 |
+
return (url, specifier, marker)
|
| 125 |
+
|
| 126 |
+
marker = _parse_requirement_marker(
|
| 127 |
+
tokenizer,
|
| 128 |
+
span_start=specifier_start,
|
| 129 |
+
after=(
|
| 130 |
+
"version specifier"
|
| 131 |
+
if specifier
|
| 132 |
+
else "name and no valid version specifier"
|
| 133 |
+
),
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
return (url, specifier, marker)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
def _parse_requirement_marker(
|
| 140 |
+
tokenizer: Tokenizer, *, span_start: int, after: str
|
| 141 |
+
) -> MarkerList:
|
| 142 |
+
"""
|
| 143 |
+
requirement_marker = SEMICOLON marker WS?
|
| 144 |
+
"""
|
| 145 |
+
|
| 146 |
+
if not tokenizer.check("SEMICOLON"):
|
| 147 |
+
tokenizer.raise_syntax_error(
|
| 148 |
+
f"Expected end or semicolon (after {after})",
|
| 149 |
+
span_start=span_start,
|
| 150 |
+
)
|
| 151 |
+
tokenizer.read()
|
| 152 |
+
|
| 153 |
+
marker = _parse_marker(tokenizer)
|
| 154 |
+
tokenizer.consume("WS")
|
| 155 |
+
|
| 156 |
+
return marker
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
| 160 |
+
"""
|
| 161 |
+
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
| 162 |
+
"""
|
| 163 |
+
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
| 164 |
+
return []
|
| 165 |
+
|
| 166 |
+
with tokenizer.enclosing_tokens("LEFT_BRACKET", "RIGHT_BRACKET"):
|
| 167 |
+
tokenizer.consume("WS")
|
| 168 |
+
extras = _parse_extras_list(tokenizer)
|
| 169 |
+
tokenizer.consume("WS")
|
| 170 |
+
|
| 171 |
+
return extras
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
| 175 |
+
"""
|
| 176 |
+
extras_list = identifier (wsp* ',' wsp* identifier)*
|
| 177 |
+
"""
|
| 178 |
+
extras: List[str] = []
|
| 179 |
+
|
| 180 |
+
if not tokenizer.check("IDENTIFIER"):
|
| 181 |
+
return extras
|
| 182 |
+
|
| 183 |
+
extras.append(tokenizer.read().text)
|
| 184 |
+
|
| 185 |
+
while True:
|
| 186 |
+
tokenizer.consume("WS")
|
| 187 |
+
if tokenizer.check("IDENTIFIER", peek=True):
|
| 188 |
+
tokenizer.raise_syntax_error("Expected comma between extra names")
|
| 189 |
+
elif not tokenizer.check("COMMA"):
|
| 190 |
+
break
|
| 191 |
+
|
| 192 |
+
tokenizer.read()
|
| 193 |
+
tokenizer.consume("WS")
|
| 194 |
+
|
| 195 |
+
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
| 196 |
+
extras.append(extra_token.text)
|
| 197 |
+
|
| 198 |
+
return extras
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
| 202 |
+
"""
|
| 203 |
+
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
| 204 |
+
| WS? version_many WS?
|
| 205 |
+
"""
|
| 206 |
+
with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"):
|
| 207 |
+
tokenizer.consume("WS")
|
| 208 |
+
parsed_specifiers = _parse_version_many(tokenizer)
|
| 209 |
+
tokenizer.consume("WS")
|
| 210 |
+
|
| 211 |
+
return parsed_specifiers
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
| 215 |
+
"""
|
| 216 |
+
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
| 217 |
+
"""
|
| 218 |
+
parsed_specifiers = ""
|
| 219 |
+
while tokenizer.check("SPECIFIER"):
|
| 220 |
+
parsed_specifiers += tokenizer.read().text
|
| 221 |
+
tokenizer.consume("WS")
|
| 222 |
+
if not tokenizer.check("COMMA"):
|
| 223 |
+
break
|
| 224 |
+
parsed_specifiers += tokenizer.read().text
|
| 225 |
+
tokenizer.consume("WS")
|
| 226 |
+
|
| 227 |
+
return parsed_specifiers
|
| 228 |
+
|
| 229 |
+
|
| 230 |
+
# --------------------------------------------------------------------------------------
|
| 231 |
+
# Recursive descent parser for marker expression
|
| 232 |
+
# --------------------------------------------------------------------------------------
|
| 233 |
+
def parse_marker(source: str) -> MarkerList:
|
| 234 |
+
return _parse_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
| 238 |
+
"""
|
| 239 |
+
marker = marker_atom (BOOLOP marker_atom)+
|
| 240 |
+
"""
|
| 241 |
+
expression = [_parse_marker_atom(tokenizer)]
|
| 242 |
+
while tokenizer.check("BOOLOP"):
|
| 243 |
+
token = tokenizer.read()
|
| 244 |
+
expr_right = _parse_marker_atom(tokenizer)
|
| 245 |
+
expression.extend((token.text, expr_right))
|
| 246 |
+
return expression
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
| 250 |
+
"""
|
| 251 |
+
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
| 252 |
+
| WS? marker_item WS?
|
| 253 |
+
"""
|
| 254 |
+
|
| 255 |
+
tokenizer.consume("WS")
|
| 256 |
+
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
| 257 |
+
with tokenizer.enclosing_tokens("LEFT_PARENTHESIS", "RIGHT_PARENTHESIS"):
|
| 258 |
+
tokenizer.consume("WS")
|
| 259 |
+
marker: MarkerAtom = _parse_marker(tokenizer)
|
| 260 |
+
tokenizer.consume("WS")
|
| 261 |
+
else:
|
| 262 |
+
marker = _parse_marker_item(tokenizer)
|
| 263 |
+
tokenizer.consume("WS")
|
| 264 |
+
return marker
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
| 268 |
+
"""
|
| 269 |
+
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
| 270 |
+
"""
|
| 271 |
+
tokenizer.consume("WS")
|
| 272 |
+
marker_var_left = _parse_marker_var(tokenizer)
|
| 273 |
+
tokenizer.consume("WS")
|
| 274 |
+
marker_op = _parse_marker_op(tokenizer)
|
| 275 |
+
tokenizer.consume("WS")
|
| 276 |
+
marker_var_right = _parse_marker_var(tokenizer)
|
| 277 |
+
tokenizer.consume("WS")
|
| 278 |
+
return (marker_var_left, marker_op, marker_var_right)
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
| 282 |
+
"""
|
| 283 |
+
marker_var = VARIABLE | QUOTED_STRING
|
| 284 |
+
"""
|
| 285 |
+
if tokenizer.check("VARIABLE"):
|
| 286 |
+
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
| 287 |
+
elif tokenizer.check("QUOTED_STRING"):
|
| 288 |
+
return process_python_str(tokenizer.read().text)
|
| 289 |
+
else:
|
| 290 |
+
tokenizer.raise_syntax_error(
|
| 291 |
+
message="Expected a marker variable or quoted string"
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def process_env_var(env_var: str) -> Variable:
|
| 296 |
+
if (
|
| 297 |
+
env_var == "platform_python_implementation"
|
| 298 |
+
or env_var == "python_implementation"
|
| 299 |
+
):
|
| 300 |
+
return Variable("platform_python_implementation")
|
| 301 |
+
else:
|
| 302 |
+
return Variable(env_var)
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
def process_python_str(python_str: str) -> Value:
|
| 306 |
+
value = ast.literal_eval(python_str)
|
| 307 |
+
return Value(str(value))
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
| 311 |
+
"""
|
| 312 |
+
marker_op = IN | NOT IN | OP
|
| 313 |
+
"""
|
| 314 |
+
if tokenizer.check("IN"):
|
| 315 |
+
tokenizer.read()
|
| 316 |
+
return Op("in")
|
| 317 |
+
elif tokenizer.check("NOT"):
|
| 318 |
+
tokenizer.read()
|
| 319 |
+
tokenizer.expect("WS", expected="whitespace after 'not'")
|
| 320 |
+
tokenizer.expect("IN", expected="'in' after 'not'")
|
| 321 |
+
return Op("not in")
|
| 322 |
+
elif tokenizer.check("OP"):
|
| 323 |
+
return Op(tokenizer.read().text)
|
| 324 |
+
else:
|
| 325 |
+
return tokenizer.raise_syntax_error(
|
| 326 |
+
"Expected marker operator, one of "
|
| 327 |
+
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
| 328 |
+
)
|
.venv/Lib/site-packages/wheel/vendored/packaging/_structures.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class InfinityType:
|
| 7 |
+
def __repr__(self) -> str:
|
| 8 |
+
return "Infinity"
|
| 9 |
+
|
| 10 |
+
def __hash__(self) -> int:
|
| 11 |
+
return hash(repr(self))
|
| 12 |
+
|
| 13 |
+
def __lt__(self, other: object) -> bool:
|
| 14 |
+
return False
|
| 15 |
+
|
| 16 |
+
def __le__(self, other: object) -> bool:
|
| 17 |
+
return False
|
| 18 |
+
|
| 19 |
+
def __eq__(self, other: object) -> bool:
|
| 20 |
+
return isinstance(other, self.__class__)
|
| 21 |
+
|
| 22 |
+
def __gt__(self, other: object) -> bool:
|
| 23 |
+
return True
|
| 24 |
+
|
| 25 |
+
def __ge__(self, other: object) -> bool:
|
| 26 |
+
return True
|
| 27 |
+
|
| 28 |
+
def __neg__(self: object) -> "NegativeInfinityType":
|
| 29 |
+
return NegativeInfinity
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
Infinity = InfinityType()
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class NegativeInfinityType:
|
| 36 |
+
def __repr__(self) -> str:
|
| 37 |
+
return "-Infinity"
|
| 38 |
+
|
| 39 |
+
def __hash__(self) -> int:
|
| 40 |
+
return hash(repr(self))
|
| 41 |
+
|
| 42 |
+
def __lt__(self, other: object) -> bool:
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
def __le__(self, other: object) -> bool:
|
| 46 |
+
return True
|
| 47 |
+
|
| 48 |
+
def __eq__(self, other: object) -> bool:
|
| 49 |
+
return isinstance(other, self.__class__)
|
| 50 |
+
|
| 51 |
+
def __gt__(self, other: object) -> bool:
|
| 52 |
+
return False
|
| 53 |
+
|
| 54 |
+
def __ge__(self, other: object) -> bool:
|
| 55 |
+
return False
|
| 56 |
+
|
| 57 |
+
def __neg__(self: object) -> InfinityType:
|
| 58 |
+
return Infinity
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
NegativeInfinity = NegativeInfinityType()
|
.venv/Lib/site-packages/wheel/vendored/packaging/_tokenizer.py
ADDED
|
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import contextlib
|
| 2 |
+
import re
|
| 3 |
+
from dataclasses import dataclass
|
| 4 |
+
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
| 5 |
+
|
| 6 |
+
from .specifiers import Specifier
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@dataclass
|
| 10 |
+
class Token:
|
| 11 |
+
name: str
|
| 12 |
+
text: str
|
| 13 |
+
position: int
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class ParserSyntaxError(Exception):
|
| 17 |
+
"""The provided source text could not be parsed correctly."""
|
| 18 |
+
|
| 19 |
+
def __init__(
|
| 20 |
+
self,
|
| 21 |
+
message: str,
|
| 22 |
+
*,
|
| 23 |
+
source: str,
|
| 24 |
+
span: Tuple[int, int],
|
| 25 |
+
) -> None:
|
| 26 |
+
self.span = span
|
| 27 |
+
self.message = message
|
| 28 |
+
self.source = source
|
| 29 |
+
|
| 30 |
+
super().__init__()
|
| 31 |
+
|
| 32 |
+
def __str__(self) -> str:
|
| 33 |
+
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
| 34 |
+
return "\n ".join([self.message, self.source, marker])
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
| 38 |
+
"LEFT_PARENTHESIS": r"\(",
|
| 39 |
+
"RIGHT_PARENTHESIS": r"\)",
|
| 40 |
+
"LEFT_BRACKET": r"\[",
|
| 41 |
+
"RIGHT_BRACKET": r"\]",
|
| 42 |
+
"SEMICOLON": r";",
|
| 43 |
+
"COMMA": r",",
|
| 44 |
+
"QUOTED_STRING": re.compile(
|
| 45 |
+
r"""
|
| 46 |
+
(
|
| 47 |
+
('[^']*')
|
| 48 |
+
|
|
| 49 |
+
("[^"]*")
|
| 50 |
+
)
|
| 51 |
+
""",
|
| 52 |
+
re.VERBOSE,
|
| 53 |
+
),
|
| 54 |
+
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
| 55 |
+
"BOOLOP": r"\b(or|and)\b",
|
| 56 |
+
"IN": r"\bin\b",
|
| 57 |
+
"NOT": r"\bnot\b",
|
| 58 |
+
"VARIABLE": re.compile(
|
| 59 |
+
r"""
|
| 60 |
+
\b(
|
| 61 |
+
python_version
|
| 62 |
+
|python_full_version
|
| 63 |
+
|os[._]name
|
| 64 |
+
|sys[._]platform
|
| 65 |
+
|platform_(release|system)
|
| 66 |
+
|platform[._](version|machine|python_implementation)
|
| 67 |
+
|python_implementation
|
| 68 |
+
|implementation_(name|version)
|
| 69 |
+
|extra
|
| 70 |
+
)\b
|
| 71 |
+
""",
|
| 72 |
+
re.VERBOSE,
|
| 73 |
+
),
|
| 74 |
+
"SPECIFIER": re.compile(
|
| 75 |
+
Specifier._operator_regex_str + Specifier._version_regex_str,
|
| 76 |
+
re.VERBOSE | re.IGNORECASE,
|
| 77 |
+
),
|
| 78 |
+
"AT": r"\@",
|
| 79 |
+
"URL": r"[^ \t]+",
|
| 80 |
+
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
| 81 |
+
"WS": r"[ \t]+",
|
| 82 |
+
"END": r"$",
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class Tokenizer:
|
| 87 |
+
"""Context-sensitive token parsing.
|
| 88 |
+
|
| 89 |
+
Provides methods to examine the input stream to check whether the next token
|
| 90 |
+
matches.
|
| 91 |
+
"""
|
| 92 |
+
|
| 93 |
+
def __init__(
|
| 94 |
+
self,
|
| 95 |
+
source: str,
|
| 96 |
+
*,
|
| 97 |
+
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
| 98 |
+
) -> None:
|
| 99 |
+
self.source = source
|
| 100 |
+
self.rules: Dict[str, re.Pattern[str]] = {
|
| 101 |
+
name: re.compile(pattern) for name, pattern in rules.items()
|
| 102 |
+
}
|
| 103 |
+
self.next_token: Optional[Token] = None
|
| 104 |
+
self.position = 0
|
| 105 |
+
|
| 106 |
+
def consume(self, name: str) -> None:
|
| 107 |
+
"""Move beyond provided token name, if at current position."""
|
| 108 |
+
if self.check(name):
|
| 109 |
+
self.read()
|
| 110 |
+
|
| 111 |
+
def check(self, name: str, *, peek: bool = False) -> bool:
|
| 112 |
+
"""Check whether the next token has the provided name.
|
| 113 |
+
|
| 114 |
+
By default, if the check succeeds, the token *must* be read before
|
| 115 |
+
another check. If `peek` is set to `True`, the token is not loaded and
|
| 116 |
+
would need to be checked again.
|
| 117 |
+
"""
|
| 118 |
+
assert (
|
| 119 |
+
self.next_token is None
|
| 120 |
+
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
| 121 |
+
assert name in self.rules, f"Unknown token name: {name!r}"
|
| 122 |
+
|
| 123 |
+
expression = self.rules[name]
|
| 124 |
+
|
| 125 |
+
match = expression.match(self.source, self.position)
|
| 126 |
+
if match is None:
|
| 127 |
+
return False
|
| 128 |
+
if not peek:
|
| 129 |
+
self.next_token = Token(name, match[0], self.position)
|
| 130 |
+
return True
|
| 131 |
+
|
| 132 |
+
def expect(self, name: str, *, expected: str) -> Token:
|
| 133 |
+
"""Expect a certain token name next, failing with a syntax error otherwise.
|
| 134 |
+
|
| 135 |
+
The token is *not* read.
|
| 136 |
+
"""
|
| 137 |
+
if not self.check(name):
|
| 138 |
+
raise self.raise_syntax_error(f"Expected {expected}")
|
| 139 |
+
return self.read()
|
| 140 |
+
|
| 141 |
+
def read(self) -> Token:
|
| 142 |
+
"""Consume the next token and return it."""
|
| 143 |
+
token = self.next_token
|
| 144 |
+
assert token is not None
|
| 145 |
+
|
| 146 |
+
self.position += len(token.text)
|
| 147 |
+
self.next_token = None
|
| 148 |
+
|
| 149 |
+
return token
|
| 150 |
+
|
| 151 |
+
def raise_syntax_error(
|
| 152 |
+
self,
|
| 153 |
+
message: str,
|
| 154 |
+
*,
|
| 155 |
+
span_start: Optional[int] = None,
|
| 156 |
+
span_end: Optional[int] = None,
|
| 157 |
+
) -> NoReturn:
|
| 158 |
+
"""Raise ParserSyntaxError at the given position."""
|
| 159 |
+
span = (
|
| 160 |
+
self.position if span_start is None else span_start,
|
| 161 |
+
self.position if span_end is None else span_end,
|
| 162 |
+
)
|
| 163 |
+
raise ParserSyntaxError(
|
| 164 |
+
message,
|
| 165 |
+
source=self.source,
|
| 166 |
+
span=span,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
@contextlib.contextmanager
|
| 170 |
+
def enclosing_tokens(self, open_token: str, close_token: str) -> Iterator[bool]:
|
| 171 |
+
if self.check(open_token):
|
| 172 |
+
open_position = self.position
|
| 173 |
+
self.read()
|
| 174 |
+
else:
|
| 175 |
+
open_position = None
|
| 176 |
+
|
| 177 |
+
yield open_position is not None
|
| 178 |
+
|
| 179 |
+
if open_position is None:
|
| 180 |
+
return
|
| 181 |
+
|
| 182 |
+
if not self.check(close_token):
|
| 183 |
+
self.raise_syntax_error(
|
| 184 |
+
f"Expected closing {close_token}",
|
| 185 |
+
span_start=open_position,
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
self.read()
|
.venv/Lib/site-packages/wheel/vendored/packaging/markers.py
ADDED
|
@@ -0,0 +1,245 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import operator
|
| 6 |
+
import os
|
| 7 |
+
import platform
|
| 8 |
+
import sys
|
| 9 |
+
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
| 10 |
+
|
| 11 |
+
from ._parser import MarkerAtom, MarkerList, Op, Value, Variable, parse_marker
|
| 12 |
+
from ._tokenizer import ParserSyntaxError
|
| 13 |
+
from .specifiers import InvalidSpecifier, Specifier
|
| 14 |
+
from .utils import canonicalize_name
|
| 15 |
+
|
| 16 |
+
__all__ = [
|
| 17 |
+
"InvalidMarker",
|
| 18 |
+
"UndefinedComparison",
|
| 19 |
+
"UndefinedEnvironmentName",
|
| 20 |
+
"Marker",
|
| 21 |
+
"default_environment",
|
| 22 |
+
]
|
| 23 |
+
|
| 24 |
+
Operator = Callable[[str, str], bool]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class InvalidMarker(ValueError):
|
| 28 |
+
"""
|
| 29 |
+
An invalid marker was found, users should refer to PEP 508.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class UndefinedComparison(ValueError):
|
| 34 |
+
"""
|
| 35 |
+
An invalid operation was attempted on a value that doesn't support it.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class UndefinedEnvironmentName(ValueError):
|
| 40 |
+
"""
|
| 41 |
+
A name was attempted to be used that does not exist inside of the
|
| 42 |
+
environment.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def _normalize_extra_values(results: Any) -> Any:
|
| 47 |
+
"""
|
| 48 |
+
Normalize extra values.
|
| 49 |
+
"""
|
| 50 |
+
if isinstance(results[0], tuple):
|
| 51 |
+
lhs, op, rhs = results[0]
|
| 52 |
+
if isinstance(lhs, Variable) and lhs.value == "extra":
|
| 53 |
+
normalized_extra = canonicalize_name(rhs.value)
|
| 54 |
+
rhs = Value(normalized_extra)
|
| 55 |
+
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
| 56 |
+
normalized_extra = canonicalize_name(lhs.value)
|
| 57 |
+
lhs = Value(normalized_extra)
|
| 58 |
+
results[0] = lhs, op, rhs
|
| 59 |
+
return results
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
def _format_marker(
|
| 63 |
+
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
| 64 |
+
) -> str:
|
| 65 |
+
|
| 66 |
+
assert isinstance(marker, (list, tuple, str))
|
| 67 |
+
|
| 68 |
+
# Sometimes we have a structure like [[...]] which is a single item list
|
| 69 |
+
# where the single item is itself it's own list. In that case we want skip
|
| 70 |
+
# the rest of this function so that we don't get extraneous () on the
|
| 71 |
+
# outside.
|
| 72 |
+
if (
|
| 73 |
+
isinstance(marker, list)
|
| 74 |
+
and len(marker) == 1
|
| 75 |
+
and isinstance(marker[0], (list, tuple))
|
| 76 |
+
):
|
| 77 |
+
return _format_marker(marker[0])
|
| 78 |
+
|
| 79 |
+
if isinstance(marker, list):
|
| 80 |
+
inner = (_format_marker(m, first=False) for m in marker)
|
| 81 |
+
if first:
|
| 82 |
+
return " ".join(inner)
|
| 83 |
+
else:
|
| 84 |
+
return "(" + " ".join(inner) + ")"
|
| 85 |
+
elif isinstance(marker, tuple):
|
| 86 |
+
return " ".join([m.serialize() for m in marker])
|
| 87 |
+
else:
|
| 88 |
+
return marker
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
_operators: Dict[str, Operator] = {
|
| 92 |
+
"in": lambda lhs, rhs: lhs in rhs,
|
| 93 |
+
"not in": lambda lhs, rhs: lhs not in rhs,
|
| 94 |
+
"<": operator.lt,
|
| 95 |
+
"<=": operator.le,
|
| 96 |
+
"==": operator.eq,
|
| 97 |
+
"!=": operator.ne,
|
| 98 |
+
">=": operator.ge,
|
| 99 |
+
">": operator.gt,
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
| 104 |
+
try:
|
| 105 |
+
spec = Specifier("".join([op.serialize(), rhs]))
|
| 106 |
+
except InvalidSpecifier:
|
| 107 |
+
pass
|
| 108 |
+
else:
|
| 109 |
+
return spec.contains(lhs, prereleases=True)
|
| 110 |
+
|
| 111 |
+
oper: Optional[Operator] = _operators.get(op.serialize())
|
| 112 |
+
if oper is None:
|
| 113 |
+
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
| 114 |
+
|
| 115 |
+
return oper(lhs, rhs)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
| 119 |
+
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
| 120 |
+
# https://peps.python.org/pep-0685/
|
| 121 |
+
# > When comparing extra names, tools MUST normalize the names being
|
| 122 |
+
# > compared using the semantics outlined in PEP 503 for names
|
| 123 |
+
if key == "extra":
|
| 124 |
+
return tuple(canonicalize_name(v) for v in values)
|
| 125 |
+
|
| 126 |
+
# other environment markers don't have such standards
|
| 127 |
+
return values
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
| 131 |
+
groups: List[List[bool]] = [[]]
|
| 132 |
+
|
| 133 |
+
for marker in markers:
|
| 134 |
+
assert isinstance(marker, (list, tuple, str))
|
| 135 |
+
|
| 136 |
+
if isinstance(marker, list):
|
| 137 |
+
groups[-1].append(_evaluate_markers(marker, environment))
|
| 138 |
+
elif isinstance(marker, tuple):
|
| 139 |
+
lhs, op, rhs = marker
|
| 140 |
+
|
| 141 |
+
if isinstance(lhs, Variable):
|
| 142 |
+
environment_key = lhs.value
|
| 143 |
+
lhs_value = environment[environment_key]
|
| 144 |
+
rhs_value = rhs.value
|
| 145 |
+
else:
|
| 146 |
+
lhs_value = lhs.value
|
| 147 |
+
environment_key = rhs.value
|
| 148 |
+
rhs_value = environment[environment_key]
|
| 149 |
+
|
| 150 |
+
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
| 151 |
+
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
| 152 |
+
else:
|
| 153 |
+
assert marker in ["and", "or"]
|
| 154 |
+
if marker == "or":
|
| 155 |
+
groups.append([])
|
| 156 |
+
|
| 157 |
+
return any(all(item) for item in groups)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def format_full_version(info: "sys._version_info") -> str:
|
| 161 |
+
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
| 162 |
+
kind = info.releaselevel
|
| 163 |
+
if kind != "final":
|
| 164 |
+
version += kind[0] + str(info.serial)
|
| 165 |
+
return version
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
def default_environment() -> Dict[str, str]:
|
| 169 |
+
iver = format_full_version(sys.implementation.version)
|
| 170 |
+
implementation_name = sys.implementation.name
|
| 171 |
+
return {
|
| 172 |
+
"implementation_name": implementation_name,
|
| 173 |
+
"implementation_version": iver,
|
| 174 |
+
"os_name": os.name,
|
| 175 |
+
"platform_machine": platform.machine(),
|
| 176 |
+
"platform_release": platform.release(),
|
| 177 |
+
"platform_system": platform.system(),
|
| 178 |
+
"platform_version": platform.version(),
|
| 179 |
+
"python_full_version": platform.python_version(),
|
| 180 |
+
"platform_python_implementation": platform.python_implementation(),
|
| 181 |
+
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
| 182 |
+
"sys_platform": sys.platform,
|
| 183 |
+
}
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
class Marker:
|
| 187 |
+
def __init__(self, marker: str) -> None:
|
| 188 |
+
# Note: We create a Marker object without calling this constructor in
|
| 189 |
+
# packaging.requirements.Requirement. If any additional logic is
|
| 190 |
+
# added here, make sure to mirror/adapt Requirement.
|
| 191 |
+
try:
|
| 192 |
+
self._markers = _normalize_extra_values(parse_marker(marker))
|
| 193 |
+
# The attribute `_markers` can be described in terms of a recursive type:
|
| 194 |
+
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
| 195 |
+
#
|
| 196 |
+
# For example, the following expression:
|
| 197 |
+
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
| 198 |
+
#
|
| 199 |
+
# is parsed into:
|
| 200 |
+
# [
|
| 201 |
+
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
| 202 |
+
# 'and',
|
| 203 |
+
# [
|
| 204 |
+
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
| 205 |
+
# 'or',
|
| 206 |
+
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
| 207 |
+
# ]
|
| 208 |
+
# ]
|
| 209 |
+
except ParserSyntaxError as e:
|
| 210 |
+
raise InvalidMarker(str(e)) from e
|
| 211 |
+
|
| 212 |
+
def __str__(self) -> str:
|
| 213 |
+
return _format_marker(self._markers)
|
| 214 |
+
|
| 215 |
+
def __repr__(self) -> str:
|
| 216 |
+
return f"<Marker('{self}')>"
|
| 217 |
+
|
| 218 |
+
def __hash__(self) -> int:
|
| 219 |
+
return hash((self.__class__.__name__, str(self)))
|
| 220 |
+
|
| 221 |
+
def __eq__(self, other: Any) -> bool:
|
| 222 |
+
if not isinstance(other, Marker):
|
| 223 |
+
return NotImplemented
|
| 224 |
+
|
| 225 |
+
return str(self) == str(other)
|
| 226 |
+
|
| 227 |
+
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
| 228 |
+
"""Evaluate a marker.
|
| 229 |
+
|
| 230 |
+
Return the boolean from evaluating the given marker against the
|
| 231 |
+
environment. environment is an optional argument to override all or
|
| 232 |
+
part of the determined environment.
|
| 233 |
+
|
| 234 |
+
The environment is determined from the current Python process.
|
| 235 |
+
"""
|
| 236 |
+
current_environment = default_environment()
|
| 237 |
+
current_environment["extra"] = ""
|
| 238 |
+
if environment is not None:
|
| 239 |
+
current_environment.update(environment)
|
| 240 |
+
# The API used to allow setting extra to None. We need to handle this
|
| 241 |
+
# case for backwards compatibility.
|
| 242 |
+
if current_environment["extra"] is None:
|
| 243 |
+
current_environment["extra"] = ""
|
| 244 |
+
|
| 245 |
+
return _evaluate_markers(self._markers, current_environment)
|
.venv/Lib/site-packages/wheel/vendored/packaging/requirements.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import urllib.parse
|
| 6 |
+
from typing import Any, List, Optional, Set
|
| 7 |
+
|
| 8 |
+
from ._parser import parse_requirement
|
| 9 |
+
from ._tokenizer import ParserSyntaxError
|
| 10 |
+
from .markers import Marker, _normalize_extra_values
|
| 11 |
+
from .specifiers import SpecifierSet
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class InvalidRequirement(ValueError):
|
| 15 |
+
"""
|
| 16 |
+
An invalid requirement was found, users should refer to PEP 508.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class Requirement:
|
| 21 |
+
"""Parse a requirement.
|
| 22 |
+
|
| 23 |
+
Parse a given requirement string into its parts, such as name, specifier,
|
| 24 |
+
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
| 25 |
+
string.
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
# TODO: Can we test whether something is contained within a requirement?
|
| 29 |
+
# If so how do we do that? Do we need to test against the _name_ of
|
| 30 |
+
# the thing as well as the version? What about the markers?
|
| 31 |
+
# TODO: Can we normalize the name and extra name?
|
| 32 |
+
|
| 33 |
+
def __init__(self, requirement_string: str) -> None:
|
| 34 |
+
try:
|
| 35 |
+
parsed = parse_requirement(requirement_string)
|
| 36 |
+
except ParserSyntaxError as e:
|
| 37 |
+
raise InvalidRequirement(str(e)) from e
|
| 38 |
+
|
| 39 |
+
self.name: str = parsed.name
|
| 40 |
+
if parsed.url:
|
| 41 |
+
parsed_url = urllib.parse.urlparse(parsed.url)
|
| 42 |
+
if parsed_url.scheme == "file":
|
| 43 |
+
if urllib.parse.urlunparse(parsed_url) != parsed.url:
|
| 44 |
+
raise InvalidRequirement("Invalid URL given")
|
| 45 |
+
elif not (parsed_url.scheme and parsed_url.netloc) or (
|
| 46 |
+
not parsed_url.scheme and not parsed_url.netloc
|
| 47 |
+
):
|
| 48 |
+
raise InvalidRequirement(f"Invalid URL: {parsed.url}")
|
| 49 |
+
self.url: Optional[str] = parsed.url
|
| 50 |
+
else:
|
| 51 |
+
self.url = None
|
| 52 |
+
self.extras: Set[str] = set(parsed.extras if parsed.extras else [])
|
| 53 |
+
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
| 54 |
+
self.marker: Optional[Marker] = None
|
| 55 |
+
if parsed.marker is not None:
|
| 56 |
+
self.marker = Marker.__new__(Marker)
|
| 57 |
+
self.marker._markers = _normalize_extra_values(parsed.marker)
|
| 58 |
+
|
| 59 |
+
def __str__(self) -> str:
|
| 60 |
+
parts: List[str] = [self.name]
|
| 61 |
+
|
| 62 |
+
if self.extras:
|
| 63 |
+
formatted_extras = ",".join(sorted(self.extras))
|
| 64 |
+
parts.append(f"[{formatted_extras}]")
|
| 65 |
+
|
| 66 |
+
if self.specifier:
|
| 67 |
+
parts.append(str(self.specifier))
|
| 68 |
+
|
| 69 |
+
if self.url:
|
| 70 |
+
parts.append(f"@ {self.url}")
|
| 71 |
+
if self.marker:
|
| 72 |
+
parts.append(" ")
|
| 73 |
+
|
| 74 |
+
if self.marker:
|
| 75 |
+
parts.append(f"; {self.marker}")
|
| 76 |
+
|
| 77 |
+
return "".join(parts)
|
| 78 |
+
|
| 79 |
+
def __repr__(self) -> str:
|
| 80 |
+
return f"<Requirement('{self}')>"
|
| 81 |
+
|
| 82 |
+
def __hash__(self) -> int:
|
| 83 |
+
return hash((self.__class__.__name__, str(self)))
|
| 84 |
+
|
| 85 |
+
def __eq__(self, other: Any) -> bool:
|
| 86 |
+
if not isinstance(other, Requirement):
|
| 87 |
+
return NotImplemented
|
| 88 |
+
|
| 89 |
+
return (
|
| 90 |
+
self.name == other.name
|
| 91 |
+
and self.extras == other.extras
|
| 92 |
+
and self.specifier == other.specifier
|
| 93 |
+
and self.url == other.url
|
| 94 |
+
and self.marker == other.marker
|
| 95 |
+
)
|
.venv/Lib/site-packages/wheel/vendored/packaging/specifiers.py
ADDED
|
@@ -0,0 +1,1006 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
|
| 3 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 4 |
+
# for complete details.
|
| 5 |
+
"""
|
| 6 |
+
.. testsetup::
|
| 7 |
+
|
| 8 |
+
from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
| 9 |
+
from packaging.version import Version
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import abc
|
| 13 |
+
import itertools
|
| 14 |
+
import re
|
| 15 |
+
from typing import (
|
| 16 |
+
Callable,
|
| 17 |
+
Iterable,
|
| 18 |
+
Iterator,
|
| 19 |
+
List,
|
| 20 |
+
Optional,
|
| 21 |
+
Set,
|
| 22 |
+
Tuple,
|
| 23 |
+
TypeVar,
|
| 24 |
+
Union,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
from .utils import canonicalize_version
|
| 28 |
+
from .version import Version
|
| 29 |
+
|
| 30 |
+
UnparsedVersion = Union[Version, str]
|
| 31 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
| 32 |
+
CallableOperator = Callable[[Version, str], bool]
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
| 36 |
+
if not isinstance(version, Version):
|
| 37 |
+
version = Version(version)
|
| 38 |
+
return version
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class InvalidSpecifier(ValueError):
|
| 42 |
+
"""
|
| 43 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
| 44 |
+
string that is invalid.
|
| 45 |
+
|
| 46 |
+
>>> Specifier("lolwat")
|
| 47 |
+
Traceback (most recent call last):
|
| 48 |
+
...
|
| 49 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
| 54 |
+
@abc.abstractmethod
|
| 55 |
+
def __str__(self) -> str:
|
| 56 |
+
"""
|
| 57 |
+
Returns the str representation of this Specifier-like object. This
|
| 58 |
+
should be representative of the Specifier itself.
|
| 59 |
+
"""
|
| 60 |
+
|
| 61 |
+
@abc.abstractmethod
|
| 62 |
+
def __hash__(self) -> int:
|
| 63 |
+
"""
|
| 64 |
+
Returns a hash value for this Specifier-like object.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
@abc.abstractmethod
|
| 68 |
+
def __eq__(self, other: object) -> bool:
|
| 69 |
+
"""
|
| 70 |
+
Returns a boolean representing whether or not the two Specifier-like
|
| 71 |
+
objects are equal.
|
| 72 |
+
|
| 73 |
+
:param other: The other object to check against.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
@abc.abstractmethod
|
| 78 |
+
def prereleases(self) -> Optional[bool]:
|
| 79 |
+
"""Whether or not pre-releases as a whole are allowed.
|
| 80 |
+
|
| 81 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
| 82 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
| 83 |
+
"""
|
| 84 |
+
|
| 85 |
+
@prereleases.setter
|
| 86 |
+
def prereleases(self, value: bool) -> None:
|
| 87 |
+
"""Setter for :attr:`prereleases`.
|
| 88 |
+
|
| 89 |
+
:param value: The value to set.
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
@abc.abstractmethod
|
| 93 |
+
def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
|
| 94 |
+
"""
|
| 95 |
+
Determines if the given item is contained within this specifier.
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
@abc.abstractmethod
|
| 99 |
+
def filter(
|
| 100 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 101 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 102 |
+
"""
|
| 103 |
+
Takes an iterable of items and filters them so that only items which
|
| 104 |
+
are contained within this specifier are allowed in it.
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class Specifier(BaseSpecifier):
|
| 109 |
+
"""This class abstracts handling of version specifiers.
|
| 110 |
+
|
| 111 |
+
.. tip::
|
| 112 |
+
|
| 113 |
+
It is generally not required to instantiate this manually. You should instead
|
| 114 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
| 115 |
+
comma-separated version specifiers (which is what package metadata contains).
|
| 116 |
+
"""
|
| 117 |
+
|
| 118 |
+
_operator_regex_str = r"""
|
| 119 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
| 120 |
+
"""
|
| 121 |
+
_version_regex_str = r"""
|
| 122 |
+
(?P<version>
|
| 123 |
+
(?:
|
| 124 |
+
# The identity operators allow for an escape hatch that will
|
| 125 |
+
# do an exact string match of the version you wish to install.
|
| 126 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
| 127 |
+
# any semantic meaning from it. This operator is discouraged
|
| 128 |
+
# but included entirely as an escape hatch.
|
| 129 |
+
(?<====) # Only match for the identity operator
|
| 130 |
+
\s*
|
| 131 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
| 132 |
+
# we match everything except for whitespace, a
|
| 133 |
+
# semi-colon for marker support, and a closing paren
|
| 134 |
+
# since versions can be enclosed in them.
|
| 135 |
+
)
|
| 136 |
+
|
|
| 137 |
+
(?:
|
| 138 |
+
# The (non)equality operators allow for wild card and local
|
| 139 |
+
# versions to be specified so we have to define these two
|
| 140 |
+
# operators separately to enable that.
|
| 141 |
+
(?<===|!=) # Only match for equals and not equals
|
| 142 |
+
|
| 143 |
+
\s*
|
| 144 |
+
v?
|
| 145 |
+
(?:[0-9]+!)? # epoch
|
| 146 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 147 |
+
|
| 148 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
| 149 |
+
# local version together so group them with a | and make them optional.
|
| 150 |
+
(?:
|
| 151 |
+
\.\* # Wild card syntax of .*
|
| 152 |
+
|
|
| 153 |
+
(?: # pre release
|
| 154 |
+
[-_\.]?
|
| 155 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 156 |
+
[-_\.]?
|
| 157 |
+
[0-9]*
|
| 158 |
+
)?
|
| 159 |
+
(?: # post release
|
| 160 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 161 |
+
)?
|
| 162 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 163 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
| 164 |
+
)?
|
| 165 |
+
)
|
| 166 |
+
|
|
| 167 |
+
(?:
|
| 168 |
+
# The compatible operator requires at least two digits in the
|
| 169 |
+
# release segment.
|
| 170 |
+
(?<=~=) # Only match for the compatible operator
|
| 171 |
+
|
| 172 |
+
\s*
|
| 173 |
+
v?
|
| 174 |
+
(?:[0-9]+!)? # epoch
|
| 175 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
| 176 |
+
(?: # pre release
|
| 177 |
+
[-_\.]?
|
| 178 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 179 |
+
[-_\.]?
|
| 180 |
+
[0-9]*
|
| 181 |
+
)?
|
| 182 |
+
(?: # post release
|
| 183 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 184 |
+
)?
|
| 185 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 186 |
+
)
|
| 187 |
+
|
|
| 188 |
+
(?:
|
| 189 |
+
# All other operators only allow a sub set of what the
|
| 190 |
+
# (non)equality operators do. Specifically they do not allow
|
| 191 |
+
# local versions to be specified nor do they allow the prefix
|
| 192 |
+
# matching wild cards.
|
| 193 |
+
(?<!==|!=|~=) # We have special cases for these
|
| 194 |
+
# operators so we want to make sure they
|
| 195 |
+
# don't match here.
|
| 196 |
+
|
| 197 |
+
\s*
|
| 198 |
+
v?
|
| 199 |
+
(?:[0-9]+!)? # epoch
|
| 200 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 201 |
+
(?: # pre release
|
| 202 |
+
[-_\.]?
|
| 203 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 204 |
+
[-_\.]?
|
| 205 |
+
[0-9]*
|
| 206 |
+
)?
|
| 207 |
+
(?: # post release
|
| 208 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 209 |
+
)?
|
| 210 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 211 |
+
)
|
| 212 |
+
)
|
| 213 |
+
"""
|
| 214 |
+
|
| 215 |
+
_regex = re.compile(
|
| 216 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
| 217 |
+
re.VERBOSE | re.IGNORECASE,
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
_operators = {
|
| 221 |
+
"~=": "compatible",
|
| 222 |
+
"==": "equal",
|
| 223 |
+
"!=": "not_equal",
|
| 224 |
+
"<=": "less_than_equal",
|
| 225 |
+
">=": "greater_than_equal",
|
| 226 |
+
"<": "less_than",
|
| 227 |
+
">": "greater_than",
|
| 228 |
+
"===": "arbitrary",
|
| 229 |
+
}
|
| 230 |
+
|
| 231 |
+
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
| 232 |
+
"""Initialize a Specifier instance.
|
| 233 |
+
|
| 234 |
+
:param spec:
|
| 235 |
+
The string representation of a specifier which will be parsed and
|
| 236 |
+
normalized before use.
|
| 237 |
+
:param prereleases:
|
| 238 |
+
This tells the specifier if it should accept prerelease versions if
|
| 239 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 240 |
+
given specifiers.
|
| 241 |
+
:raises InvalidSpecifier:
|
| 242 |
+
If the given specifier is invalid (i.e. bad syntax).
|
| 243 |
+
"""
|
| 244 |
+
match = self._regex.search(spec)
|
| 245 |
+
if not match:
|
| 246 |
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
| 247 |
+
|
| 248 |
+
self._spec: Tuple[str, str] = (
|
| 249 |
+
match.group("operator").strip(),
|
| 250 |
+
match.group("version").strip(),
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
# Store whether or not this Specifier should accept prereleases
|
| 254 |
+
self._prereleases = prereleases
|
| 255 |
+
|
| 256 |
+
@property
|
| 257 |
+
def prereleases(self) -> bool:
|
| 258 |
+
# If there is an explicit prereleases set for this, then we'll just
|
| 259 |
+
# blindly use that.
|
| 260 |
+
if self._prereleases is not None:
|
| 261 |
+
return self._prereleases
|
| 262 |
+
|
| 263 |
+
# Look at all of our specifiers and determine if they are inclusive
|
| 264 |
+
# operators, and if they are if they are including an explicit
|
| 265 |
+
# prerelease.
|
| 266 |
+
operator, version = self._spec
|
| 267 |
+
if operator in ["==", ">=", "<=", "~=", "==="]:
|
| 268 |
+
# The == specifier can include a trailing .*, if it does we
|
| 269 |
+
# want to remove before parsing.
|
| 270 |
+
if operator == "==" and version.endswith(".*"):
|
| 271 |
+
version = version[:-2]
|
| 272 |
+
|
| 273 |
+
# Parse the version, and if it is a pre-release than this
|
| 274 |
+
# specifier allows pre-releases.
|
| 275 |
+
if Version(version).is_prerelease:
|
| 276 |
+
return True
|
| 277 |
+
|
| 278 |
+
return False
|
| 279 |
+
|
| 280 |
+
@prereleases.setter
|
| 281 |
+
def prereleases(self, value: bool) -> None:
|
| 282 |
+
self._prereleases = value
|
| 283 |
+
|
| 284 |
+
@property
|
| 285 |
+
def operator(self) -> str:
|
| 286 |
+
"""The operator of this specifier.
|
| 287 |
+
|
| 288 |
+
>>> Specifier("==1.2.3").operator
|
| 289 |
+
'=='
|
| 290 |
+
"""
|
| 291 |
+
return self._spec[0]
|
| 292 |
+
|
| 293 |
+
@property
|
| 294 |
+
def version(self) -> str:
|
| 295 |
+
"""The version of this specifier.
|
| 296 |
+
|
| 297 |
+
>>> Specifier("==1.2.3").version
|
| 298 |
+
'1.2.3'
|
| 299 |
+
"""
|
| 300 |
+
return self._spec[1]
|
| 301 |
+
|
| 302 |
+
def __repr__(self) -> str:
|
| 303 |
+
"""A representation of the Specifier that shows all internal state.
|
| 304 |
+
|
| 305 |
+
>>> Specifier('>=1.0.0')
|
| 306 |
+
<Specifier('>=1.0.0')>
|
| 307 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
| 308 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
| 309 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
| 310 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
| 311 |
+
"""
|
| 312 |
+
pre = (
|
| 313 |
+
f", prereleases={self.prereleases!r}"
|
| 314 |
+
if self._prereleases is not None
|
| 315 |
+
else ""
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
| 319 |
+
|
| 320 |
+
def __str__(self) -> str:
|
| 321 |
+
"""A string representation of the Specifier that can be round-tripped.
|
| 322 |
+
|
| 323 |
+
>>> str(Specifier('>=1.0.0'))
|
| 324 |
+
'>=1.0.0'
|
| 325 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
| 326 |
+
'>=1.0.0'
|
| 327 |
+
"""
|
| 328 |
+
return "{}{}".format(*self._spec)
|
| 329 |
+
|
| 330 |
+
@property
|
| 331 |
+
def _canonical_spec(self) -> Tuple[str, str]:
|
| 332 |
+
canonical_version = canonicalize_version(
|
| 333 |
+
self._spec[1],
|
| 334 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
| 335 |
+
)
|
| 336 |
+
return self._spec[0], canonical_version
|
| 337 |
+
|
| 338 |
+
def __hash__(self) -> int:
|
| 339 |
+
return hash(self._canonical_spec)
|
| 340 |
+
|
| 341 |
+
def __eq__(self, other: object) -> bool:
|
| 342 |
+
"""Whether or not the two Specifier-like objects are equal.
|
| 343 |
+
|
| 344 |
+
:param other: The other object to check against.
|
| 345 |
+
|
| 346 |
+
The value of :attr:`prereleases` is ignored.
|
| 347 |
+
|
| 348 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
| 349 |
+
True
|
| 350 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
| 351 |
+
... Specifier("==1.2.3", prereleases=True))
|
| 352 |
+
True
|
| 353 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
| 354 |
+
True
|
| 355 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
| 356 |
+
False
|
| 357 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
| 358 |
+
False
|
| 359 |
+
"""
|
| 360 |
+
if isinstance(other, str):
|
| 361 |
+
try:
|
| 362 |
+
other = self.__class__(str(other))
|
| 363 |
+
except InvalidSpecifier:
|
| 364 |
+
return NotImplemented
|
| 365 |
+
elif not isinstance(other, self.__class__):
|
| 366 |
+
return NotImplemented
|
| 367 |
+
|
| 368 |
+
return self._canonical_spec == other._canonical_spec
|
| 369 |
+
|
| 370 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
| 371 |
+
operator_callable: CallableOperator = getattr(
|
| 372 |
+
self, f"_compare_{self._operators[op]}"
|
| 373 |
+
)
|
| 374 |
+
return operator_callable
|
| 375 |
+
|
| 376 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
| 377 |
+
|
| 378 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
| 379 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
| 380 |
+
# implement this in terms of the other specifiers instead of
|
| 381 |
+
# implementing it ourselves. The only thing we need to do is construct
|
| 382 |
+
# the other specifiers.
|
| 383 |
+
|
| 384 |
+
# We want everything but the last item in the version, but we want to
|
| 385 |
+
# ignore suffix segments.
|
| 386 |
+
prefix = ".".join(
|
| 387 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
| 388 |
+
)
|
| 389 |
+
|
| 390 |
+
# Add the prefix notation to the end of our string
|
| 391 |
+
prefix += ".*"
|
| 392 |
+
|
| 393 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
| 394 |
+
prospective, prefix
|
| 395 |
+
)
|
| 396 |
+
|
| 397 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
| 398 |
+
|
| 399 |
+
# We need special logic to handle prefix matching
|
| 400 |
+
if spec.endswith(".*"):
|
| 401 |
+
# In the case of prefix matching we want to ignore local segment.
|
| 402 |
+
normalized_prospective = canonicalize_version(prospective.public)
|
| 403 |
+
# Get the normalized version string ignoring the trailing .*
|
| 404 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
| 405 |
+
# Split the spec out by dots, and pretend that there is an implicit
|
| 406 |
+
# dot in between a release segment and a pre-release segment.
|
| 407 |
+
split_spec = _version_split(normalized_spec)
|
| 408 |
+
|
| 409 |
+
# Split the prospective version out by dots, and pretend that there
|
| 410 |
+
# is an implicit dot in between a release segment and a pre-release
|
| 411 |
+
# segment.
|
| 412 |
+
split_prospective = _version_split(normalized_prospective)
|
| 413 |
+
|
| 414 |
+
# 0-pad the prospective version before shortening it to get the correct
|
| 415 |
+
# shortened version.
|
| 416 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
| 417 |
+
|
| 418 |
+
# Shorten the prospective version to be the same length as the spec
|
| 419 |
+
# so that we can determine if the specifier is a prefix of the
|
| 420 |
+
# prospective version or not.
|
| 421 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
| 422 |
+
|
| 423 |
+
return shortened_prospective == split_spec
|
| 424 |
+
else:
|
| 425 |
+
# Convert our spec string into a Version
|
| 426 |
+
spec_version = Version(spec)
|
| 427 |
+
|
| 428 |
+
# If the specifier does not have a local segment, then we want to
|
| 429 |
+
# act as if the prospective version also does not have a local
|
| 430 |
+
# segment.
|
| 431 |
+
if not spec_version.local:
|
| 432 |
+
prospective = Version(prospective.public)
|
| 433 |
+
|
| 434 |
+
return prospective == spec_version
|
| 435 |
+
|
| 436 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
| 437 |
+
return not self._compare_equal(prospective, spec)
|
| 438 |
+
|
| 439 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 440 |
+
|
| 441 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 442 |
+
# specifier, so local version labels can be universally removed from
|
| 443 |
+
# the prospective version.
|
| 444 |
+
return Version(prospective.public) <= Version(spec)
|
| 445 |
+
|
| 446 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 447 |
+
|
| 448 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 449 |
+
# specifier, so local version labels can be universally removed from
|
| 450 |
+
# the prospective version.
|
| 451 |
+
return Version(prospective.public) >= Version(spec)
|
| 452 |
+
|
| 453 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
| 454 |
+
|
| 455 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 456 |
+
# it as a version.
|
| 457 |
+
spec = Version(spec_str)
|
| 458 |
+
|
| 459 |
+
# Check to see if the prospective version is less than the spec
|
| 460 |
+
# version. If it's not we can short circuit and just return False now
|
| 461 |
+
# instead of doing extra unneeded work.
|
| 462 |
+
if not prospective < spec:
|
| 463 |
+
return False
|
| 464 |
+
|
| 465 |
+
# This special case is here so that, unless the specifier itself
|
| 466 |
+
# includes is a pre-release version, that we do not accept pre-release
|
| 467 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
| 468 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
| 469 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
| 470 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 471 |
+
return False
|
| 472 |
+
|
| 473 |
+
# If we've gotten to here, it means that prospective version is both
|
| 474 |
+
# less than the spec version *and* it's not a pre-release of the same
|
| 475 |
+
# version in the spec.
|
| 476 |
+
return True
|
| 477 |
+
|
| 478 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
| 479 |
+
|
| 480 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 481 |
+
# it as a version.
|
| 482 |
+
spec = Version(spec_str)
|
| 483 |
+
|
| 484 |
+
# Check to see if the prospective version is greater than the spec
|
| 485 |
+
# version. If it's not we can short circuit and just return False now
|
| 486 |
+
# instead of doing extra unneeded work.
|
| 487 |
+
if not prospective > spec:
|
| 488 |
+
return False
|
| 489 |
+
|
| 490 |
+
# This special case is here so that, unless the specifier itself
|
| 491 |
+
# includes is a post-release version, that we do not accept
|
| 492 |
+
# post-release versions for the version mentioned in the specifier
|
| 493 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
| 494 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
| 495 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 496 |
+
return False
|
| 497 |
+
|
| 498 |
+
# Ensure that we do not allow a local version of the version mentioned
|
| 499 |
+
# in the specifier, which is technically greater than, to match.
|
| 500 |
+
if prospective.local is not None:
|
| 501 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 502 |
+
return False
|
| 503 |
+
|
| 504 |
+
# If we've gotten to here, it means that prospective version is both
|
| 505 |
+
# greater than the spec version *and* it's not a pre-release of the
|
| 506 |
+
# same version in the spec.
|
| 507 |
+
return True
|
| 508 |
+
|
| 509 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
| 510 |
+
return str(prospective).lower() == str(spec).lower()
|
| 511 |
+
|
| 512 |
+
def __contains__(self, item: Union[str, Version]) -> bool:
|
| 513 |
+
"""Return whether or not the item is contained in this specifier.
|
| 514 |
+
|
| 515 |
+
:param item: The item to check for.
|
| 516 |
+
|
| 517 |
+
This is used for the ``in`` operator and behaves the same as
|
| 518 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 519 |
+
|
| 520 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
| 521 |
+
True
|
| 522 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
| 523 |
+
True
|
| 524 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
| 525 |
+
False
|
| 526 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
| 527 |
+
False
|
| 528 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
| 529 |
+
True
|
| 530 |
+
"""
|
| 531 |
+
return self.contains(item)
|
| 532 |
+
|
| 533 |
+
def contains(
|
| 534 |
+
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
| 535 |
+
) -> bool:
|
| 536 |
+
"""Return whether or not the item is contained in this specifier.
|
| 537 |
+
|
| 538 |
+
:param item:
|
| 539 |
+
The item to check for, which can be a version string or a
|
| 540 |
+
:class:`Version` instance.
|
| 541 |
+
:param prereleases:
|
| 542 |
+
Whether or not to match prereleases with this Specifier. If set to
|
| 543 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 544 |
+
whether or not prereleases are allowed.
|
| 545 |
+
|
| 546 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
| 547 |
+
True
|
| 548 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
| 549 |
+
True
|
| 550 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
| 551 |
+
False
|
| 552 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
| 553 |
+
False
|
| 554 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
| 555 |
+
True
|
| 556 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
| 557 |
+
True
|
| 558 |
+
"""
|
| 559 |
+
|
| 560 |
+
# Determine if prereleases are to be allowed or not.
|
| 561 |
+
if prereleases is None:
|
| 562 |
+
prereleases = self.prereleases
|
| 563 |
+
|
| 564 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
| 565 |
+
# "2.0" in Specifier(">=2")
|
| 566 |
+
normalized_item = _coerce_version(item)
|
| 567 |
+
|
| 568 |
+
# Determine if we should be supporting prereleases in this specifier
|
| 569 |
+
# or not, if we do not support prereleases than we can short circuit
|
| 570 |
+
# logic if this version is a prereleases.
|
| 571 |
+
if normalized_item.is_prerelease and not prereleases:
|
| 572 |
+
return False
|
| 573 |
+
|
| 574 |
+
# Actually do the comparison to determine if this item is contained
|
| 575 |
+
# within this Specifier or not.
|
| 576 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
| 577 |
+
return operator_callable(normalized_item, self.version)
|
| 578 |
+
|
| 579 |
+
def filter(
|
| 580 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 581 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 582 |
+
"""Filter items in the given iterable, that match the specifier.
|
| 583 |
+
|
| 584 |
+
:param iterable:
|
| 585 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 586 |
+
The items in the iterable will be filtered according to the specifier.
|
| 587 |
+
:param prereleases:
|
| 588 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 589 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 590 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 591 |
+
whether the only versions matching are prereleases).
|
| 592 |
+
|
| 593 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
| 594 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 595 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 596 |
+
|
| 597 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 598 |
+
['1.3']
|
| 599 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
| 600 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
| 601 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 602 |
+
['1.5a1']
|
| 603 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 604 |
+
['1.3', '1.5a1']
|
| 605 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 606 |
+
['1.3', '1.5a1']
|
| 607 |
+
"""
|
| 608 |
+
|
| 609 |
+
yielded = False
|
| 610 |
+
found_prereleases = []
|
| 611 |
+
|
| 612 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
| 613 |
+
|
| 614 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
| 615 |
+
# them match, yield them.
|
| 616 |
+
for version in iterable:
|
| 617 |
+
parsed_version = _coerce_version(version)
|
| 618 |
+
|
| 619 |
+
if self.contains(parsed_version, **kw):
|
| 620 |
+
# If our version is a prerelease, and we were not set to allow
|
| 621 |
+
# prereleases, then we'll store it for later in case nothing
|
| 622 |
+
# else matches this specifier.
|
| 623 |
+
if parsed_version.is_prerelease and not (
|
| 624 |
+
prereleases or self.prereleases
|
| 625 |
+
):
|
| 626 |
+
found_prereleases.append(version)
|
| 627 |
+
# Either this is not a prerelease, or we should have been
|
| 628 |
+
# accepting prereleases from the beginning.
|
| 629 |
+
else:
|
| 630 |
+
yielded = True
|
| 631 |
+
yield version
|
| 632 |
+
|
| 633 |
+
# Now that we've iterated over everything, determine if we've yielded
|
| 634 |
+
# any values, and if we have not and we have any prereleases stored up
|
| 635 |
+
# then we will go ahead and yield the prereleases.
|
| 636 |
+
if not yielded and found_prereleases:
|
| 637 |
+
for version in found_prereleases:
|
| 638 |
+
yield version
|
| 639 |
+
|
| 640 |
+
|
| 641 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
| 642 |
+
|
| 643 |
+
|
| 644 |
+
def _version_split(version: str) -> List[str]:
|
| 645 |
+
result: List[str] = []
|
| 646 |
+
for item in version.split("."):
|
| 647 |
+
match = _prefix_regex.search(item)
|
| 648 |
+
if match:
|
| 649 |
+
result.extend(match.groups())
|
| 650 |
+
else:
|
| 651 |
+
result.append(item)
|
| 652 |
+
return result
|
| 653 |
+
|
| 654 |
+
|
| 655 |
+
def _is_not_suffix(segment: str) -> bool:
|
| 656 |
+
return not any(
|
| 657 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
| 658 |
+
)
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
|
| 662 |
+
left_split, right_split = [], []
|
| 663 |
+
|
| 664 |
+
# Get the release segment of our versions
|
| 665 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
| 666 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
| 667 |
+
|
| 668 |
+
# Get the rest of our versions
|
| 669 |
+
left_split.append(left[len(left_split[0]) :])
|
| 670 |
+
right_split.append(right[len(right_split[0]) :])
|
| 671 |
+
|
| 672 |
+
# Insert our padding
|
| 673 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
| 674 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
| 675 |
+
|
| 676 |
+
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
| 677 |
+
|
| 678 |
+
|
| 679 |
+
class SpecifierSet(BaseSpecifier):
|
| 680 |
+
"""This class abstracts handling of a set of version specifiers.
|
| 681 |
+
|
| 682 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
| 683 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
| 684 |
+
"""
|
| 685 |
+
|
| 686 |
+
def __init__(
|
| 687 |
+
self, specifiers: str = "", prereleases: Optional[bool] = None
|
| 688 |
+
) -> None:
|
| 689 |
+
"""Initialize a SpecifierSet instance.
|
| 690 |
+
|
| 691 |
+
:param specifiers:
|
| 692 |
+
The string representation of a specifier or a comma-separated list of
|
| 693 |
+
specifiers which will be parsed and normalized before use.
|
| 694 |
+
:param prereleases:
|
| 695 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
| 696 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 697 |
+
given specifiers.
|
| 698 |
+
|
| 699 |
+
:raises InvalidSpecifier:
|
| 700 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
| 701 |
+
raised.
|
| 702 |
+
"""
|
| 703 |
+
|
| 704 |
+
# Split on `,` to break each individual specifier into it's own item, and
|
| 705 |
+
# strip each item to remove leading/trailing whitespace.
|
| 706 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
| 707 |
+
|
| 708 |
+
# Parsed each individual specifier, attempting first to make it a
|
| 709 |
+
# Specifier.
|
| 710 |
+
parsed: Set[Specifier] = set()
|
| 711 |
+
for specifier in split_specifiers:
|
| 712 |
+
parsed.add(Specifier(specifier))
|
| 713 |
+
|
| 714 |
+
# Turn our parsed specifiers into a frozen set and save them for later.
|
| 715 |
+
self._specs = frozenset(parsed)
|
| 716 |
+
|
| 717 |
+
# Store our prereleases value so we can use it later to determine if
|
| 718 |
+
# we accept prereleases or not.
|
| 719 |
+
self._prereleases = prereleases
|
| 720 |
+
|
| 721 |
+
@property
|
| 722 |
+
def prereleases(self) -> Optional[bool]:
|
| 723 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
| 724 |
+
# pass that through here.
|
| 725 |
+
if self._prereleases is not None:
|
| 726 |
+
return self._prereleases
|
| 727 |
+
|
| 728 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
| 729 |
+
# then we'll just return None since we don't know if this should have
|
| 730 |
+
# pre-releases or not.
|
| 731 |
+
if not self._specs:
|
| 732 |
+
return None
|
| 733 |
+
|
| 734 |
+
# Otherwise we'll see if any of the given specifiers accept
|
| 735 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
| 736 |
+
return any(s.prereleases for s in self._specs)
|
| 737 |
+
|
| 738 |
+
@prereleases.setter
|
| 739 |
+
def prereleases(self, value: bool) -> None:
|
| 740 |
+
self._prereleases = value
|
| 741 |
+
|
| 742 |
+
def __repr__(self) -> str:
|
| 743 |
+
"""A representation of the specifier set that shows all internal state.
|
| 744 |
+
|
| 745 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 746 |
+
match the input string.
|
| 747 |
+
|
| 748 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
| 749 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
| 750 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
| 751 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
| 752 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
| 753 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
| 754 |
+
"""
|
| 755 |
+
pre = (
|
| 756 |
+
f", prereleases={self.prereleases!r}"
|
| 757 |
+
if self._prereleases is not None
|
| 758 |
+
else ""
|
| 759 |
+
)
|
| 760 |
+
|
| 761 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
| 762 |
+
|
| 763 |
+
def __str__(self) -> str:
|
| 764 |
+
"""A string representation of the specifier set that can be round-tripped.
|
| 765 |
+
|
| 766 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 767 |
+
match the input string.
|
| 768 |
+
|
| 769 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
| 770 |
+
'!=1.0.1,>=1.0.0'
|
| 771 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
| 772 |
+
'!=1.0.1,>=1.0.0'
|
| 773 |
+
"""
|
| 774 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
| 775 |
+
|
| 776 |
+
def __hash__(self) -> int:
|
| 777 |
+
return hash(self._specs)
|
| 778 |
+
|
| 779 |
+
def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
|
| 780 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
| 781 |
+
|
| 782 |
+
:param other: The other object to combine with.
|
| 783 |
+
|
| 784 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
| 785 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 786 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
| 787 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 788 |
+
"""
|
| 789 |
+
if isinstance(other, str):
|
| 790 |
+
other = SpecifierSet(other)
|
| 791 |
+
elif not isinstance(other, SpecifierSet):
|
| 792 |
+
return NotImplemented
|
| 793 |
+
|
| 794 |
+
specifier = SpecifierSet()
|
| 795 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
| 796 |
+
|
| 797 |
+
if self._prereleases is None and other._prereleases is not None:
|
| 798 |
+
specifier._prereleases = other._prereleases
|
| 799 |
+
elif self._prereleases is not None and other._prereleases is None:
|
| 800 |
+
specifier._prereleases = self._prereleases
|
| 801 |
+
elif self._prereleases == other._prereleases:
|
| 802 |
+
specifier._prereleases = self._prereleases
|
| 803 |
+
else:
|
| 804 |
+
raise ValueError(
|
| 805 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
| 806 |
+
"overrides."
|
| 807 |
+
)
|
| 808 |
+
|
| 809 |
+
return specifier
|
| 810 |
+
|
| 811 |
+
def __eq__(self, other: object) -> bool:
|
| 812 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
| 813 |
+
|
| 814 |
+
:param other: The other object to check against.
|
| 815 |
+
|
| 816 |
+
The value of :attr:`prereleases` is ignored.
|
| 817 |
+
|
| 818 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
| 819 |
+
True
|
| 820 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
| 821 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
| 822 |
+
True
|
| 823 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
| 824 |
+
True
|
| 825 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
| 826 |
+
False
|
| 827 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
| 828 |
+
False
|
| 829 |
+
"""
|
| 830 |
+
if isinstance(other, (str, Specifier)):
|
| 831 |
+
other = SpecifierSet(str(other))
|
| 832 |
+
elif not isinstance(other, SpecifierSet):
|
| 833 |
+
return NotImplemented
|
| 834 |
+
|
| 835 |
+
return self._specs == other._specs
|
| 836 |
+
|
| 837 |
+
def __len__(self) -> int:
|
| 838 |
+
"""Returns the number of specifiers in this specifier set."""
|
| 839 |
+
return len(self._specs)
|
| 840 |
+
|
| 841 |
+
def __iter__(self) -> Iterator[Specifier]:
|
| 842 |
+
"""
|
| 843 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
| 844 |
+
in this specifier set.
|
| 845 |
+
|
| 846 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
| 847 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
| 848 |
+
"""
|
| 849 |
+
return iter(self._specs)
|
| 850 |
+
|
| 851 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
| 852 |
+
"""Return whether or not the item is contained in this specifier.
|
| 853 |
+
|
| 854 |
+
:param item: The item to check for.
|
| 855 |
+
|
| 856 |
+
This is used for the ``in`` operator and behaves the same as
|
| 857 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 858 |
+
|
| 859 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 860 |
+
True
|
| 861 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 862 |
+
True
|
| 863 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 864 |
+
False
|
| 865 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 866 |
+
False
|
| 867 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
| 868 |
+
True
|
| 869 |
+
"""
|
| 870 |
+
return self.contains(item)
|
| 871 |
+
|
| 872 |
+
def contains(
|
| 873 |
+
self,
|
| 874 |
+
item: UnparsedVersion,
|
| 875 |
+
prereleases: Optional[bool] = None,
|
| 876 |
+
installed: Optional[bool] = None,
|
| 877 |
+
) -> bool:
|
| 878 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
| 879 |
+
|
| 880 |
+
:param item:
|
| 881 |
+
The item to check for, which can be a version string or a
|
| 882 |
+
:class:`Version` instance.
|
| 883 |
+
:param prereleases:
|
| 884 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
| 885 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 886 |
+
whether or not prereleases are allowed.
|
| 887 |
+
|
| 888 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
| 889 |
+
True
|
| 890 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
| 891 |
+
True
|
| 892 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
| 893 |
+
False
|
| 894 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
| 895 |
+
False
|
| 896 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
| 897 |
+
True
|
| 898 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
| 899 |
+
True
|
| 900 |
+
"""
|
| 901 |
+
# Ensure that our item is a Version instance.
|
| 902 |
+
if not isinstance(item, Version):
|
| 903 |
+
item = Version(item)
|
| 904 |
+
|
| 905 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 906 |
+
# one for this particular filter call, then we'll use whatever the
|
| 907 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 908 |
+
if prereleases is None:
|
| 909 |
+
prereleases = self.prereleases
|
| 910 |
+
|
| 911 |
+
# We can determine if we're going to allow pre-releases by looking to
|
| 912 |
+
# see if any of the underlying items supports them. If none of them do
|
| 913 |
+
# and this item is a pre-release then we do not allow it and we can
|
| 914 |
+
# short circuit that here.
|
| 915 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
| 916 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
| 917 |
+
if not prereleases and item.is_prerelease:
|
| 918 |
+
return False
|
| 919 |
+
|
| 920 |
+
if installed and item.is_prerelease:
|
| 921 |
+
item = Version(item.base_version)
|
| 922 |
+
|
| 923 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
| 924 |
+
# given version is contained within all of them.
|
| 925 |
+
# Note: This use of all() here means that an empty set of specifiers
|
| 926 |
+
# will always return True, this is an explicit design decision.
|
| 927 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
| 928 |
+
|
| 929 |
+
def filter(
|
| 930 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
| 931 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 932 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
| 933 |
+
|
| 934 |
+
:param iterable:
|
| 935 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 936 |
+
The items in the iterable will be filtered according to the specifier.
|
| 937 |
+
:param prereleases:
|
| 938 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 939 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 940 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 941 |
+
whether the only versions matching are prereleases).
|
| 942 |
+
|
| 943 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
| 944 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 945 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 946 |
+
|
| 947 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 948 |
+
['1.3']
|
| 949 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
| 950 |
+
['1.3', <Version('1.4')>]
|
| 951 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 952 |
+
[]
|
| 953 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 954 |
+
['1.3', '1.5a1']
|
| 955 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 956 |
+
['1.3', '1.5a1']
|
| 957 |
+
|
| 958 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
| 959 |
+
versions in the set.
|
| 960 |
+
|
| 961 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
| 962 |
+
['1.3']
|
| 963 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
| 964 |
+
['1.5a1']
|
| 965 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 966 |
+
['1.3', '1.5a1']
|
| 967 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
| 968 |
+
['1.3', '1.5a1']
|
| 969 |
+
"""
|
| 970 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 971 |
+
# one for this particular filter call, then we'll use whatever the
|
| 972 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 973 |
+
if prereleases is None:
|
| 974 |
+
prereleases = self.prereleases
|
| 975 |
+
|
| 976 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
| 977 |
+
# filter method for each one, this will act as a logical AND amongst
|
| 978 |
+
# each specifier.
|
| 979 |
+
if self._specs:
|
| 980 |
+
for spec in self._specs:
|
| 981 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
| 982 |
+
return iter(iterable)
|
| 983 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
| 984 |
+
# which will filter out any pre-releases, unless there are no final
|
| 985 |
+
# releases.
|
| 986 |
+
else:
|
| 987 |
+
filtered: List[UnparsedVersionVar] = []
|
| 988 |
+
found_prereleases: List[UnparsedVersionVar] = []
|
| 989 |
+
|
| 990 |
+
for item in iterable:
|
| 991 |
+
parsed_version = _coerce_version(item)
|
| 992 |
+
|
| 993 |
+
# Store any item which is a pre-release for later unless we've
|
| 994 |
+
# already found a final version or we are accepting prereleases
|
| 995 |
+
if parsed_version.is_prerelease and not prereleases:
|
| 996 |
+
if not filtered:
|
| 997 |
+
found_prereleases.append(item)
|
| 998 |
+
else:
|
| 999 |
+
filtered.append(item)
|
| 1000 |
+
|
| 1001 |
+
# If we've found no items except for pre-releases, then we'll go
|
| 1002 |
+
# ahead and use the pre-releases
|
| 1003 |
+
if not filtered and found_prereleases and prereleases is None:
|
| 1004 |
+
return iter(found_prereleases)
|
| 1005 |
+
|
| 1006 |
+
return iter(filtered)
|
.venv/Lib/site-packages/wheel/vendored/packaging/tags.py
ADDED
|
@@ -0,0 +1,546 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import logging
|
| 6 |
+
import platform
|
| 7 |
+
import subprocess
|
| 8 |
+
import sys
|
| 9 |
+
import sysconfig
|
| 10 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
| 11 |
+
from typing import (
|
| 12 |
+
Dict,
|
| 13 |
+
FrozenSet,
|
| 14 |
+
Iterable,
|
| 15 |
+
Iterator,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Sequence,
|
| 19 |
+
Tuple,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from . import _manylinux, _musllinux
|
| 25 |
+
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
PythonVersion = Sequence[int]
|
| 29 |
+
MacVersion = Tuple[int, int]
|
| 30 |
+
|
| 31 |
+
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
| 32 |
+
"python": "py", # Generic.
|
| 33 |
+
"cpython": "cp",
|
| 34 |
+
"pypy": "pp",
|
| 35 |
+
"ironpython": "ip",
|
| 36 |
+
"jython": "jy",
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
_32_BIT_INTERPRETER = sys.maxsize <= 2**32
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class Tag:
|
| 44 |
+
"""
|
| 45 |
+
A representation of the tag triple for a wheel.
|
| 46 |
+
|
| 47 |
+
Instances are considered immutable and thus are hashable. Equality checking
|
| 48 |
+
is also supported.
|
| 49 |
+
"""
|
| 50 |
+
|
| 51 |
+
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
| 52 |
+
|
| 53 |
+
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
| 54 |
+
self._interpreter = interpreter.lower()
|
| 55 |
+
self._abi = abi.lower()
|
| 56 |
+
self._platform = platform.lower()
|
| 57 |
+
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
| 58 |
+
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
| 59 |
+
# times when scanning a page of links for packages with tags matching that
|
| 60 |
+
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
| 61 |
+
# downstream consumers.
|
| 62 |
+
self._hash = hash((self._interpreter, self._abi, self._platform))
|
| 63 |
+
|
| 64 |
+
@property
|
| 65 |
+
def interpreter(self) -> str:
|
| 66 |
+
return self._interpreter
|
| 67 |
+
|
| 68 |
+
@property
|
| 69 |
+
def abi(self) -> str:
|
| 70 |
+
return self._abi
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
def platform(self) -> str:
|
| 74 |
+
return self._platform
|
| 75 |
+
|
| 76 |
+
def __eq__(self, other: object) -> bool:
|
| 77 |
+
if not isinstance(other, Tag):
|
| 78 |
+
return NotImplemented
|
| 79 |
+
|
| 80 |
+
return (
|
| 81 |
+
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
| 82 |
+
and (self._platform == other._platform)
|
| 83 |
+
and (self._abi == other._abi)
|
| 84 |
+
and (self._interpreter == other._interpreter)
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
def __hash__(self) -> int:
|
| 88 |
+
return self._hash
|
| 89 |
+
|
| 90 |
+
def __str__(self) -> str:
|
| 91 |
+
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
| 92 |
+
|
| 93 |
+
def __repr__(self) -> str:
|
| 94 |
+
return f"<{self} @ {id(self)}>"
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
| 98 |
+
"""
|
| 99 |
+
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
| 100 |
+
|
| 101 |
+
Returning a set is required due to the possibility that the tag is a
|
| 102 |
+
compressed tag set.
|
| 103 |
+
"""
|
| 104 |
+
tags = set()
|
| 105 |
+
interpreters, abis, platforms = tag.split("-")
|
| 106 |
+
for interpreter in interpreters.split("."):
|
| 107 |
+
for abi in abis.split("."):
|
| 108 |
+
for platform_ in platforms.split("."):
|
| 109 |
+
tags.add(Tag(interpreter, abi, platform_))
|
| 110 |
+
return frozenset(tags)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
| 114 |
+
value = sysconfig.get_config_var(name)
|
| 115 |
+
if value is None and warn:
|
| 116 |
+
logger.debug(
|
| 117 |
+
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
| 118 |
+
)
|
| 119 |
+
return value
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _normalize_string(string: str) -> str:
|
| 123 |
+
return string.replace(".", "_").replace("-", "_")
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def _abi3_applies(python_version: PythonVersion) -> bool:
|
| 127 |
+
"""
|
| 128 |
+
Determine if the Python version supports abi3.
|
| 129 |
+
|
| 130 |
+
PEP 384 was first implemented in Python 3.2.
|
| 131 |
+
"""
|
| 132 |
+
return len(python_version) > 1 and tuple(python_version) >= (3, 2)
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
| 136 |
+
py_version = tuple(py_version) # To allow for version comparison.
|
| 137 |
+
abis = []
|
| 138 |
+
version = _version_nodot(py_version[:2])
|
| 139 |
+
debug = pymalloc = ucs4 = ""
|
| 140 |
+
with_debug = _get_config_var("Py_DEBUG", warn)
|
| 141 |
+
has_refcount = hasattr(sys, "gettotalrefcount")
|
| 142 |
+
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
| 143 |
+
# extension modules is the best option.
|
| 144 |
+
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
| 145 |
+
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
| 146 |
+
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
| 147 |
+
debug = "d"
|
| 148 |
+
if py_version < (3, 8):
|
| 149 |
+
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
| 150 |
+
if with_pymalloc or with_pymalloc is None:
|
| 151 |
+
pymalloc = "m"
|
| 152 |
+
if py_version < (3, 3):
|
| 153 |
+
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
| 154 |
+
if unicode_size == 4 or (
|
| 155 |
+
unicode_size is None and sys.maxunicode == 0x10FFFF
|
| 156 |
+
):
|
| 157 |
+
ucs4 = "u"
|
| 158 |
+
elif debug:
|
| 159 |
+
# Debug builds can also load "normal" extension modules.
|
| 160 |
+
# We can also assume no UCS-4 or pymalloc requirement.
|
| 161 |
+
abis.append(f"cp{version}")
|
| 162 |
+
abis.insert(
|
| 163 |
+
0,
|
| 164 |
+
"cp{version}{debug}{pymalloc}{ucs4}".format(
|
| 165 |
+
version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
|
| 166 |
+
),
|
| 167 |
+
)
|
| 168 |
+
return abis
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
def cpython_tags(
|
| 172 |
+
python_version: Optional[PythonVersion] = None,
|
| 173 |
+
abis: Optional[Iterable[str]] = None,
|
| 174 |
+
platforms: Optional[Iterable[str]] = None,
|
| 175 |
+
*,
|
| 176 |
+
warn: bool = False,
|
| 177 |
+
) -> Iterator[Tag]:
|
| 178 |
+
"""
|
| 179 |
+
Yields the tags for a CPython interpreter.
|
| 180 |
+
|
| 181 |
+
The tags consist of:
|
| 182 |
+
- cp<python_version>-<abi>-<platform>
|
| 183 |
+
- cp<python_version>-abi3-<platform>
|
| 184 |
+
- cp<python_version>-none-<platform>
|
| 185 |
+
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
| 186 |
+
|
| 187 |
+
If python_version only specifies a major version then user-provided ABIs and
|
| 188 |
+
the 'none' ABItag will be used.
|
| 189 |
+
|
| 190 |
+
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
| 191 |
+
their normal position and not at the beginning.
|
| 192 |
+
"""
|
| 193 |
+
if not python_version:
|
| 194 |
+
python_version = sys.version_info[:2]
|
| 195 |
+
|
| 196 |
+
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
| 197 |
+
|
| 198 |
+
if abis is None:
|
| 199 |
+
if len(python_version) > 1:
|
| 200 |
+
abis = _cpython_abis(python_version, warn)
|
| 201 |
+
else:
|
| 202 |
+
abis = []
|
| 203 |
+
abis = list(abis)
|
| 204 |
+
# 'abi3' and 'none' are explicitly handled later.
|
| 205 |
+
for explicit_abi in ("abi3", "none"):
|
| 206 |
+
try:
|
| 207 |
+
abis.remove(explicit_abi)
|
| 208 |
+
except ValueError:
|
| 209 |
+
pass
|
| 210 |
+
|
| 211 |
+
platforms = list(platforms or platform_tags())
|
| 212 |
+
for abi in abis:
|
| 213 |
+
for platform_ in platforms:
|
| 214 |
+
yield Tag(interpreter, abi, platform_)
|
| 215 |
+
if _abi3_applies(python_version):
|
| 216 |
+
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
| 217 |
+
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
| 218 |
+
|
| 219 |
+
if _abi3_applies(python_version):
|
| 220 |
+
for minor_version in range(python_version[1] - 1, 1, -1):
|
| 221 |
+
for platform_ in platforms:
|
| 222 |
+
interpreter = "cp{version}".format(
|
| 223 |
+
version=_version_nodot((python_version[0], minor_version))
|
| 224 |
+
)
|
| 225 |
+
yield Tag(interpreter, "abi3", platform_)
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def _generic_abi() -> List[str]:
|
| 229 |
+
"""
|
| 230 |
+
Return the ABI tag based on EXT_SUFFIX.
|
| 231 |
+
"""
|
| 232 |
+
# The following are examples of `EXT_SUFFIX`.
|
| 233 |
+
# We want to keep the parts which are related to the ABI and remove the
|
| 234 |
+
# parts which are related to the platform:
|
| 235 |
+
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
| 236 |
+
# - mac: '.cpython-310-darwin.so' => cp310
|
| 237 |
+
# - win: '.cp310-win_amd64.pyd' => cp310
|
| 238 |
+
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
| 239 |
+
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
| 240 |
+
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
| 241 |
+
# => graalpy_38_native
|
| 242 |
+
|
| 243 |
+
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
| 244 |
+
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
| 245 |
+
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
| 246 |
+
parts = ext_suffix.split(".")
|
| 247 |
+
if len(parts) < 3:
|
| 248 |
+
# CPython3.7 and earlier uses ".pyd" on Windows.
|
| 249 |
+
return _cpython_abis(sys.version_info[:2])
|
| 250 |
+
soabi = parts[1]
|
| 251 |
+
if soabi.startswith("cpython"):
|
| 252 |
+
# non-windows
|
| 253 |
+
abi = "cp" + soabi.split("-")[1]
|
| 254 |
+
elif soabi.startswith("cp"):
|
| 255 |
+
# windows
|
| 256 |
+
abi = soabi.split("-")[0]
|
| 257 |
+
elif soabi.startswith("pypy"):
|
| 258 |
+
abi = "-".join(soabi.split("-")[:2])
|
| 259 |
+
elif soabi.startswith("graalpy"):
|
| 260 |
+
abi = "-".join(soabi.split("-")[:3])
|
| 261 |
+
elif soabi:
|
| 262 |
+
# pyston, ironpython, others?
|
| 263 |
+
abi = soabi
|
| 264 |
+
else:
|
| 265 |
+
return []
|
| 266 |
+
return [_normalize_string(abi)]
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
def generic_tags(
|
| 270 |
+
interpreter: Optional[str] = None,
|
| 271 |
+
abis: Optional[Iterable[str]] = None,
|
| 272 |
+
platforms: Optional[Iterable[str]] = None,
|
| 273 |
+
*,
|
| 274 |
+
warn: bool = False,
|
| 275 |
+
) -> Iterator[Tag]:
|
| 276 |
+
"""
|
| 277 |
+
Yields the tags for a generic interpreter.
|
| 278 |
+
|
| 279 |
+
The tags consist of:
|
| 280 |
+
- <interpreter>-<abi>-<platform>
|
| 281 |
+
|
| 282 |
+
The "none" ABI will be added if it was not explicitly provided.
|
| 283 |
+
"""
|
| 284 |
+
if not interpreter:
|
| 285 |
+
interp_name = interpreter_name()
|
| 286 |
+
interp_version = interpreter_version(warn=warn)
|
| 287 |
+
interpreter = "".join([interp_name, interp_version])
|
| 288 |
+
if abis is None:
|
| 289 |
+
abis = _generic_abi()
|
| 290 |
+
else:
|
| 291 |
+
abis = list(abis)
|
| 292 |
+
platforms = list(platforms or platform_tags())
|
| 293 |
+
if "none" not in abis:
|
| 294 |
+
abis.append("none")
|
| 295 |
+
for abi in abis:
|
| 296 |
+
for platform_ in platforms:
|
| 297 |
+
yield Tag(interpreter, abi, platform_)
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
| 301 |
+
"""
|
| 302 |
+
Yields Python versions in descending order.
|
| 303 |
+
|
| 304 |
+
After the latest version, the major-only version will be yielded, and then
|
| 305 |
+
all previous versions of that major version.
|
| 306 |
+
"""
|
| 307 |
+
if len(py_version) > 1:
|
| 308 |
+
yield f"py{_version_nodot(py_version[:2])}"
|
| 309 |
+
yield f"py{py_version[0]}"
|
| 310 |
+
if len(py_version) > 1:
|
| 311 |
+
for minor in range(py_version[1] - 1, -1, -1):
|
| 312 |
+
yield f"py{_version_nodot((py_version[0], minor))}"
|
| 313 |
+
|
| 314 |
+
|
| 315 |
+
def compatible_tags(
|
| 316 |
+
python_version: Optional[PythonVersion] = None,
|
| 317 |
+
interpreter: Optional[str] = None,
|
| 318 |
+
platforms: Optional[Iterable[str]] = None,
|
| 319 |
+
) -> Iterator[Tag]:
|
| 320 |
+
"""
|
| 321 |
+
Yields the sequence of tags that are compatible with a specific version of Python.
|
| 322 |
+
|
| 323 |
+
The tags consist of:
|
| 324 |
+
- py*-none-<platform>
|
| 325 |
+
- <interpreter>-none-any # ... if `interpreter` is provided.
|
| 326 |
+
- py*-none-any
|
| 327 |
+
"""
|
| 328 |
+
if not python_version:
|
| 329 |
+
python_version = sys.version_info[:2]
|
| 330 |
+
platforms = list(platforms or platform_tags())
|
| 331 |
+
for version in _py_interpreter_range(python_version):
|
| 332 |
+
for platform_ in platforms:
|
| 333 |
+
yield Tag(version, "none", platform_)
|
| 334 |
+
if interpreter:
|
| 335 |
+
yield Tag(interpreter, "none", "any")
|
| 336 |
+
for version in _py_interpreter_range(python_version):
|
| 337 |
+
yield Tag(version, "none", "any")
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
| 341 |
+
if not is_32bit:
|
| 342 |
+
return arch
|
| 343 |
+
|
| 344 |
+
if arch.startswith("ppc"):
|
| 345 |
+
return "ppc"
|
| 346 |
+
|
| 347 |
+
return "i386"
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
| 351 |
+
formats = [cpu_arch]
|
| 352 |
+
if cpu_arch == "x86_64":
|
| 353 |
+
if version < (10, 4):
|
| 354 |
+
return []
|
| 355 |
+
formats.extend(["intel", "fat64", "fat32"])
|
| 356 |
+
|
| 357 |
+
elif cpu_arch == "i386":
|
| 358 |
+
if version < (10, 4):
|
| 359 |
+
return []
|
| 360 |
+
formats.extend(["intel", "fat32", "fat"])
|
| 361 |
+
|
| 362 |
+
elif cpu_arch == "ppc64":
|
| 363 |
+
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
| 364 |
+
if version > (10, 5) or version < (10, 4):
|
| 365 |
+
return []
|
| 366 |
+
formats.append("fat64")
|
| 367 |
+
|
| 368 |
+
elif cpu_arch == "ppc":
|
| 369 |
+
if version > (10, 6):
|
| 370 |
+
return []
|
| 371 |
+
formats.extend(["fat32", "fat"])
|
| 372 |
+
|
| 373 |
+
if cpu_arch in {"arm64", "x86_64"}:
|
| 374 |
+
formats.append("universal2")
|
| 375 |
+
|
| 376 |
+
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
| 377 |
+
formats.append("universal")
|
| 378 |
+
|
| 379 |
+
return formats
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def mac_platforms(
|
| 383 |
+
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
| 384 |
+
) -> Iterator[str]:
|
| 385 |
+
"""
|
| 386 |
+
Yields the platform tags for a macOS system.
|
| 387 |
+
|
| 388 |
+
The `version` parameter is a two-item tuple specifying the macOS version to
|
| 389 |
+
generate platform tags for. The `arch` parameter is the CPU architecture to
|
| 390 |
+
generate platform tags for. Both parameters default to the appropriate value
|
| 391 |
+
for the current system.
|
| 392 |
+
"""
|
| 393 |
+
version_str, _, cpu_arch = platform.mac_ver()
|
| 394 |
+
if version is None:
|
| 395 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
| 396 |
+
if version == (10, 16):
|
| 397 |
+
# When built against an older macOS SDK, Python will report macOS 10.16
|
| 398 |
+
# instead of the real version.
|
| 399 |
+
version_str = subprocess.run(
|
| 400 |
+
[
|
| 401 |
+
sys.executable,
|
| 402 |
+
"-sS",
|
| 403 |
+
"-c",
|
| 404 |
+
"import platform; print(platform.mac_ver()[0])",
|
| 405 |
+
],
|
| 406 |
+
check=True,
|
| 407 |
+
env={"SYSTEM_VERSION_COMPAT": "0"},
|
| 408 |
+
stdout=subprocess.PIPE,
|
| 409 |
+
universal_newlines=True,
|
| 410 |
+
).stdout
|
| 411 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
| 412 |
+
else:
|
| 413 |
+
version = version
|
| 414 |
+
if arch is None:
|
| 415 |
+
arch = _mac_arch(cpu_arch)
|
| 416 |
+
else:
|
| 417 |
+
arch = arch
|
| 418 |
+
|
| 419 |
+
if (10, 0) <= version and version < (11, 0):
|
| 420 |
+
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
| 421 |
+
# "minor" version number. The major version was always 10.
|
| 422 |
+
for minor_version in range(version[1], -1, -1):
|
| 423 |
+
compat_version = 10, minor_version
|
| 424 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 425 |
+
for binary_format in binary_formats:
|
| 426 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 427 |
+
major=10, minor=minor_version, binary_format=binary_format
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
if version >= (11, 0):
|
| 431 |
+
# Starting with Mac OS 11, each yearly release bumps the major version
|
| 432 |
+
# number. The minor versions are now the midyear updates.
|
| 433 |
+
for major_version in range(version[0], 10, -1):
|
| 434 |
+
compat_version = major_version, 0
|
| 435 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 436 |
+
for binary_format in binary_formats:
|
| 437 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 438 |
+
major=major_version, minor=0, binary_format=binary_format
|
| 439 |
+
)
|
| 440 |
+
|
| 441 |
+
if version >= (11, 0):
|
| 442 |
+
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
| 443 |
+
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
| 444 |
+
# releases exist.
|
| 445 |
+
#
|
| 446 |
+
# However, the "universal2" binary format can have a
|
| 447 |
+
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
| 448 |
+
# that version of macOS.
|
| 449 |
+
if arch == "x86_64":
|
| 450 |
+
for minor_version in range(16, 3, -1):
|
| 451 |
+
compat_version = 10, minor_version
|
| 452 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
| 453 |
+
for binary_format in binary_formats:
|
| 454 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 455 |
+
major=compat_version[0],
|
| 456 |
+
minor=compat_version[1],
|
| 457 |
+
binary_format=binary_format,
|
| 458 |
+
)
|
| 459 |
+
else:
|
| 460 |
+
for minor_version in range(16, 3, -1):
|
| 461 |
+
compat_version = 10, minor_version
|
| 462 |
+
binary_format = "universal2"
|
| 463 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
| 464 |
+
major=compat_version[0],
|
| 465 |
+
minor=compat_version[1],
|
| 466 |
+
binary_format=binary_format,
|
| 467 |
+
)
|
| 468 |
+
|
| 469 |
+
|
| 470 |
+
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
| 471 |
+
linux = _normalize_string(sysconfig.get_platform())
|
| 472 |
+
if is_32bit:
|
| 473 |
+
if linux == "linux_x86_64":
|
| 474 |
+
linux = "linux_i686"
|
| 475 |
+
elif linux == "linux_aarch64":
|
| 476 |
+
linux = "linux_armv7l"
|
| 477 |
+
_, arch = linux.split("_", 1)
|
| 478 |
+
yield from _manylinux.platform_tags(linux, arch)
|
| 479 |
+
yield from _musllinux.platform_tags(arch)
|
| 480 |
+
yield linux
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
def _generic_platforms() -> Iterator[str]:
|
| 484 |
+
yield _normalize_string(sysconfig.get_platform())
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def platform_tags() -> Iterator[str]:
|
| 488 |
+
"""
|
| 489 |
+
Provides the platform tags for this installation.
|
| 490 |
+
"""
|
| 491 |
+
if platform.system() == "Darwin":
|
| 492 |
+
return mac_platforms()
|
| 493 |
+
elif platform.system() == "Linux":
|
| 494 |
+
return _linux_platforms()
|
| 495 |
+
else:
|
| 496 |
+
return _generic_platforms()
|
| 497 |
+
|
| 498 |
+
|
| 499 |
+
def interpreter_name() -> str:
|
| 500 |
+
"""
|
| 501 |
+
Returns the name of the running interpreter.
|
| 502 |
+
|
| 503 |
+
Some implementations have a reserved, two-letter abbreviation which will
|
| 504 |
+
be returned when appropriate.
|
| 505 |
+
"""
|
| 506 |
+
name = sys.implementation.name
|
| 507 |
+
return INTERPRETER_SHORT_NAMES.get(name) or name
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
def interpreter_version(*, warn: bool = False) -> str:
|
| 511 |
+
"""
|
| 512 |
+
Returns the version of the running interpreter.
|
| 513 |
+
"""
|
| 514 |
+
version = _get_config_var("py_version_nodot", warn=warn)
|
| 515 |
+
if version:
|
| 516 |
+
version = str(version)
|
| 517 |
+
else:
|
| 518 |
+
version = _version_nodot(sys.version_info[:2])
|
| 519 |
+
return version
|
| 520 |
+
|
| 521 |
+
|
| 522 |
+
def _version_nodot(version: PythonVersion) -> str:
|
| 523 |
+
return "".join(map(str, version))
|
| 524 |
+
|
| 525 |
+
|
| 526 |
+
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
| 527 |
+
"""
|
| 528 |
+
Returns the sequence of tag triples for the running interpreter.
|
| 529 |
+
|
| 530 |
+
The order of the sequence corresponds to priority order for the
|
| 531 |
+
interpreter, from most to least important.
|
| 532 |
+
"""
|
| 533 |
+
|
| 534 |
+
interp_name = interpreter_name()
|
| 535 |
+
if interp_name == "cp":
|
| 536 |
+
yield from cpython_tags(warn=warn)
|
| 537 |
+
else:
|
| 538 |
+
yield from generic_tags()
|
| 539 |
+
|
| 540 |
+
if interp_name == "pp":
|
| 541 |
+
interp = "pp3"
|
| 542 |
+
elif interp_name == "cp":
|
| 543 |
+
interp = "cp" + interpreter_version(warn=warn)
|
| 544 |
+
else:
|
| 545 |
+
interp = None
|
| 546 |
+
yield from compatible_tags(interpreter=interp)
|
.venv/Lib/site-packages/wheel/vendored/packaging/utils.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
import re
|
| 6 |
+
from typing import FrozenSet, NewType, Tuple, Union, cast
|
| 7 |
+
|
| 8 |
+
from .tags import Tag, parse_tag
|
| 9 |
+
from .version import InvalidVersion, Version
|
| 10 |
+
|
| 11 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
| 12 |
+
NormalizedName = NewType("NormalizedName", str)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class InvalidWheelFilename(ValueError):
|
| 16 |
+
"""
|
| 17 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class InvalidSdistFilename(ValueError):
|
| 22 |
+
"""
|
| 23 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
| 24 |
+
"""
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
| 28 |
+
# PEP 427: The build number must start with a digit.
|
| 29 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
def canonicalize_name(name: str) -> NormalizedName:
|
| 33 |
+
# This is taken from PEP 503.
|
| 34 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
| 35 |
+
return cast(NormalizedName, value)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def canonicalize_version(
|
| 39 |
+
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
| 40 |
+
) -> str:
|
| 41 |
+
"""
|
| 42 |
+
This is very similar to Version.__str__, but has one subtle difference
|
| 43 |
+
with the way it handles the release segment.
|
| 44 |
+
"""
|
| 45 |
+
if isinstance(version, str):
|
| 46 |
+
try:
|
| 47 |
+
parsed = Version(version)
|
| 48 |
+
except InvalidVersion:
|
| 49 |
+
# Legacy versions cannot be normalized
|
| 50 |
+
return version
|
| 51 |
+
else:
|
| 52 |
+
parsed = version
|
| 53 |
+
|
| 54 |
+
parts = []
|
| 55 |
+
|
| 56 |
+
# Epoch
|
| 57 |
+
if parsed.epoch != 0:
|
| 58 |
+
parts.append(f"{parsed.epoch}!")
|
| 59 |
+
|
| 60 |
+
# Release segment
|
| 61 |
+
release_segment = ".".join(str(x) for x in parsed.release)
|
| 62 |
+
if strip_trailing_zero:
|
| 63 |
+
# NB: This strips trailing '.0's to normalize
|
| 64 |
+
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
| 65 |
+
parts.append(release_segment)
|
| 66 |
+
|
| 67 |
+
# Pre-release
|
| 68 |
+
if parsed.pre is not None:
|
| 69 |
+
parts.append("".join(str(x) for x in parsed.pre))
|
| 70 |
+
|
| 71 |
+
# Post-release
|
| 72 |
+
if parsed.post is not None:
|
| 73 |
+
parts.append(f".post{parsed.post}")
|
| 74 |
+
|
| 75 |
+
# Development release
|
| 76 |
+
if parsed.dev is not None:
|
| 77 |
+
parts.append(f".dev{parsed.dev}")
|
| 78 |
+
|
| 79 |
+
# Local version segment
|
| 80 |
+
if parsed.local is not None:
|
| 81 |
+
parts.append(f"+{parsed.local}")
|
| 82 |
+
|
| 83 |
+
return "".join(parts)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def parse_wheel_filename(
|
| 87 |
+
filename: str,
|
| 88 |
+
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
| 89 |
+
if not filename.endswith(".whl"):
|
| 90 |
+
raise InvalidWheelFilename(
|
| 91 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
filename = filename[:-4]
|
| 95 |
+
dashes = filename.count("-")
|
| 96 |
+
if dashes not in (4, 5):
|
| 97 |
+
raise InvalidWheelFilename(
|
| 98 |
+
f"Invalid wheel filename (wrong number of parts): {filename}"
|
| 99 |
+
)
|
| 100 |
+
|
| 101 |
+
parts = filename.split("-", dashes - 2)
|
| 102 |
+
name_part = parts[0]
|
| 103 |
+
# See PEP 427 for the rules on escaping the project name
|
| 104 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
| 105 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
| 106 |
+
name = canonicalize_name(name_part)
|
| 107 |
+
version = Version(parts[1])
|
| 108 |
+
if dashes == 5:
|
| 109 |
+
build_part = parts[2]
|
| 110 |
+
build_match = _build_tag_regex.match(build_part)
|
| 111 |
+
if build_match is None:
|
| 112 |
+
raise InvalidWheelFilename(
|
| 113 |
+
f"Invalid build number: {build_part} in '{filename}'"
|
| 114 |
+
)
|
| 115 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
| 116 |
+
else:
|
| 117 |
+
build = ()
|
| 118 |
+
tags = parse_tag(parts[-1])
|
| 119 |
+
return (name, version, build, tags)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
| 123 |
+
if filename.endswith(".tar.gz"):
|
| 124 |
+
file_stem = filename[: -len(".tar.gz")]
|
| 125 |
+
elif filename.endswith(".zip"):
|
| 126 |
+
file_stem = filename[: -len(".zip")]
|
| 127 |
+
else:
|
| 128 |
+
raise InvalidSdistFilename(
|
| 129 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
| 130 |
+
f" {filename}"
|
| 131 |
+
)
|
| 132 |
+
|
| 133 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
| 134 |
+
# so we split on the last dash.
|
| 135 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
| 136 |
+
if not sep:
|
| 137 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
| 138 |
+
|
| 139 |
+
name = canonicalize_name(name_part)
|
| 140 |
+
version = Version(version_part)
|
| 141 |
+
return (name, version)
|
.venv/Lib/site-packages/wheel/vendored/packaging/version.py
ADDED
|
@@ -0,0 +1,563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from packaging.version import parse, Version
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
import collections
|
| 11 |
+
import itertools
|
| 12 |
+
import re
|
| 13 |
+
from typing import Callable, Optional, SupportsInt, Tuple, Union
|
| 14 |
+
|
| 15 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
| 16 |
+
|
| 17 |
+
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
| 18 |
+
|
| 19 |
+
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
| 20 |
+
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
| 21 |
+
SubLocalType = Union[InfiniteTypes, int, str]
|
| 22 |
+
LocalType = Union[
|
| 23 |
+
NegativeInfinityType,
|
| 24 |
+
Tuple[
|
| 25 |
+
Union[
|
| 26 |
+
SubLocalType,
|
| 27 |
+
Tuple[SubLocalType, str],
|
| 28 |
+
Tuple[NegativeInfinityType, SubLocalType],
|
| 29 |
+
],
|
| 30 |
+
...,
|
| 31 |
+
],
|
| 32 |
+
]
|
| 33 |
+
CmpKey = Tuple[
|
| 34 |
+
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
| 35 |
+
]
|
| 36 |
+
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
| 37 |
+
|
| 38 |
+
_Version = collections.namedtuple(
|
| 39 |
+
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def parse(version: str) -> "Version":
|
| 44 |
+
"""Parse the given version string.
|
| 45 |
+
|
| 46 |
+
>>> parse('1.0.dev1')
|
| 47 |
+
<Version('1.0.dev1')>
|
| 48 |
+
|
| 49 |
+
:param version: The version string to parse.
|
| 50 |
+
:raises InvalidVersion: When the version string is not a valid version.
|
| 51 |
+
"""
|
| 52 |
+
return Version(version)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class InvalidVersion(ValueError):
|
| 56 |
+
"""Raised when a version string is not a valid version.
|
| 57 |
+
|
| 58 |
+
>>> Version("invalid")
|
| 59 |
+
Traceback (most recent call last):
|
| 60 |
+
...
|
| 61 |
+
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
| 62 |
+
"""
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class _BaseVersion:
|
| 66 |
+
_key: CmpKey
|
| 67 |
+
|
| 68 |
+
def __hash__(self) -> int:
|
| 69 |
+
return hash(self._key)
|
| 70 |
+
|
| 71 |
+
# Please keep the duplicated `isinstance` check
|
| 72 |
+
# in the six comparisons hereunder
|
| 73 |
+
# unless you find a way to avoid adding overhead function calls.
|
| 74 |
+
def __lt__(self, other: "_BaseVersion") -> bool:
|
| 75 |
+
if not isinstance(other, _BaseVersion):
|
| 76 |
+
return NotImplemented
|
| 77 |
+
|
| 78 |
+
return self._key < other._key
|
| 79 |
+
|
| 80 |
+
def __le__(self, other: "_BaseVersion") -> bool:
|
| 81 |
+
if not isinstance(other, _BaseVersion):
|
| 82 |
+
return NotImplemented
|
| 83 |
+
|
| 84 |
+
return self._key <= other._key
|
| 85 |
+
|
| 86 |
+
def __eq__(self, other: object) -> bool:
|
| 87 |
+
if not isinstance(other, _BaseVersion):
|
| 88 |
+
return NotImplemented
|
| 89 |
+
|
| 90 |
+
return self._key == other._key
|
| 91 |
+
|
| 92 |
+
def __ge__(self, other: "_BaseVersion") -> bool:
|
| 93 |
+
if not isinstance(other, _BaseVersion):
|
| 94 |
+
return NotImplemented
|
| 95 |
+
|
| 96 |
+
return self._key >= other._key
|
| 97 |
+
|
| 98 |
+
def __gt__(self, other: "_BaseVersion") -> bool:
|
| 99 |
+
if not isinstance(other, _BaseVersion):
|
| 100 |
+
return NotImplemented
|
| 101 |
+
|
| 102 |
+
return self._key > other._key
|
| 103 |
+
|
| 104 |
+
def __ne__(self, other: object) -> bool:
|
| 105 |
+
if not isinstance(other, _BaseVersion):
|
| 106 |
+
return NotImplemented
|
| 107 |
+
|
| 108 |
+
return self._key != other._key
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
| 112 |
+
# easier for 3rd party code to reuse
|
| 113 |
+
_VERSION_PATTERN = r"""
|
| 114 |
+
v?
|
| 115 |
+
(?:
|
| 116 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
| 117 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
| 118 |
+
(?P<pre> # pre-release
|
| 119 |
+
[-_\.]?
|
| 120 |
+
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
| 121 |
+
[-_\.]?
|
| 122 |
+
(?P<pre_n>[0-9]+)?
|
| 123 |
+
)?
|
| 124 |
+
(?P<post> # post release
|
| 125 |
+
(?:-(?P<post_n1>[0-9]+))
|
| 126 |
+
|
|
| 127 |
+
(?:
|
| 128 |
+
[-_\.]?
|
| 129 |
+
(?P<post_l>post|rev|r)
|
| 130 |
+
[-_\.]?
|
| 131 |
+
(?P<post_n2>[0-9]+)?
|
| 132 |
+
)
|
| 133 |
+
)?
|
| 134 |
+
(?P<dev> # dev release
|
| 135 |
+
[-_\.]?
|
| 136 |
+
(?P<dev_l>dev)
|
| 137 |
+
[-_\.]?
|
| 138 |
+
(?P<dev_n>[0-9]+)?
|
| 139 |
+
)?
|
| 140 |
+
)
|
| 141 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
| 142 |
+
"""
|
| 143 |
+
|
| 144 |
+
VERSION_PATTERN = _VERSION_PATTERN
|
| 145 |
+
"""
|
| 146 |
+
A string containing the regular expression used to match a valid version.
|
| 147 |
+
|
| 148 |
+
The pattern is not anchored at either end, and is intended for embedding in larger
|
| 149 |
+
expressions (for example, matching a version number as part of a file name). The
|
| 150 |
+
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
| 151 |
+
flags set.
|
| 152 |
+
|
| 153 |
+
:meta hide-value:
|
| 154 |
+
"""
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class Version(_BaseVersion):
|
| 158 |
+
"""This class abstracts handling of a project's versions.
|
| 159 |
+
|
| 160 |
+
A :class:`Version` instance is comparison aware and can be compared and
|
| 161 |
+
sorted using the standard Python interfaces.
|
| 162 |
+
|
| 163 |
+
>>> v1 = Version("1.0a5")
|
| 164 |
+
>>> v2 = Version("1.0")
|
| 165 |
+
>>> v1
|
| 166 |
+
<Version('1.0a5')>
|
| 167 |
+
>>> v2
|
| 168 |
+
<Version('1.0')>
|
| 169 |
+
>>> v1 < v2
|
| 170 |
+
True
|
| 171 |
+
>>> v1 == v2
|
| 172 |
+
False
|
| 173 |
+
>>> v1 > v2
|
| 174 |
+
False
|
| 175 |
+
>>> v1 >= v2
|
| 176 |
+
False
|
| 177 |
+
>>> v1 <= v2
|
| 178 |
+
True
|
| 179 |
+
"""
|
| 180 |
+
|
| 181 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
| 182 |
+
|
| 183 |
+
def __init__(self, version: str) -> None:
|
| 184 |
+
"""Initialize a Version object.
|
| 185 |
+
|
| 186 |
+
:param version:
|
| 187 |
+
The string representation of a version which will be parsed and normalized
|
| 188 |
+
before use.
|
| 189 |
+
:raises InvalidVersion:
|
| 190 |
+
If the ``version`` does not conform to PEP 440 in any way then this
|
| 191 |
+
exception will be raised.
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
# Validate the version and parse it into pieces
|
| 195 |
+
match = self._regex.search(version)
|
| 196 |
+
if not match:
|
| 197 |
+
raise InvalidVersion(f"Invalid version: '{version}'")
|
| 198 |
+
|
| 199 |
+
# Store the parsed out pieces of the version
|
| 200 |
+
self._version = _Version(
|
| 201 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
| 202 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
| 203 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
| 204 |
+
post=_parse_letter_version(
|
| 205 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
| 206 |
+
),
|
| 207 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
| 208 |
+
local=_parse_local_version(match.group("local")),
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
# Generate a key which will be used for sorting
|
| 212 |
+
self._key = _cmpkey(
|
| 213 |
+
self._version.epoch,
|
| 214 |
+
self._version.release,
|
| 215 |
+
self._version.pre,
|
| 216 |
+
self._version.post,
|
| 217 |
+
self._version.dev,
|
| 218 |
+
self._version.local,
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
def __repr__(self) -> str:
|
| 222 |
+
"""A representation of the Version that shows all internal state.
|
| 223 |
+
|
| 224 |
+
>>> Version('1.0.0')
|
| 225 |
+
<Version('1.0.0')>
|
| 226 |
+
"""
|
| 227 |
+
return f"<Version('{self}')>"
|
| 228 |
+
|
| 229 |
+
def __str__(self) -> str:
|
| 230 |
+
"""A string representation of the version that can be rounded-tripped.
|
| 231 |
+
|
| 232 |
+
>>> str(Version("1.0a5"))
|
| 233 |
+
'1.0a5'
|
| 234 |
+
"""
|
| 235 |
+
parts = []
|
| 236 |
+
|
| 237 |
+
# Epoch
|
| 238 |
+
if self.epoch != 0:
|
| 239 |
+
parts.append(f"{self.epoch}!")
|
| 240 |
+
|
| 241 |
+
# Release segment
|
| 242 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 243 |
+
|
| 244 |
+
# Pre-release
|
| 245 |
+
if self.pre is not None:
|
| 246 |
+
parts.append("".join(str(x) for x in self.pre))
|
| 247 |
+
|
| 248 |
+
# Post-release
|
| 249 |
+
if self.post is not None:
|
| 250 |
+
parts.append(f".post{self.post}")
|
| 251 |
+
|
| 252 |
+
# Development release
|
| 253 |
+
if self.dev is not None:
|
| 254 |
+
parts.append(f".dev{self.dev}")
|
| 255 |
+
|
| 256 |
+
# Local version segment
|
| 257 |
+
if self.local is not None:
|
| 258 |
+
parts.append(f"+{self.local}")
|
| 259 |
+
|
| 260 |
+
return "".join(parts)
|
| 261 |
+
|
| 262 |
+
@property
|
| 263 |
+
def epoch(self) -> int:
|
| 264 |
+
"""The epoch of the version.
|
| 265 |
+
|
| 266 |
+
>>> Version("2.0.0").epoch
|
| 267 |
+
0
|
| 268 |
+
>>> Version("1!2.0.0").epoch
|
| 269 |
+
1
|
| 270 |
+
"""
|
| 271 |
+
_epoch: int = self._version.epoch
|
| 272 |
+
return _epoch
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def release(self) -> Tuple[int, ...]:
|
| 276 |
+
"""The components of the "release" segment of the version.
|
| 277 |
+
|
| 278 |
+
>>> Version("1.2.3").release
|
| 279 |
+
(1, 2, 3)
|
| 280 |
+
>>> Version("2.0.0").release
|
| 281 |
+
(2, 0, 0)
|
| 282 |
+
>>> Version("1!2.0.0.post0").release
|
| 283 |
+
(2, 0, 0)
|
| 284 |
+
|
| 285 |
+
Includes trailing zeroes but not the epoch or any pre-release / development /
|
| 286 |
+
post-release suffixes.
|
| 287 |
+
"""
|
| 288 |
+
_release: Tuple[int, ...] = self._version.release
|
| 289 |
+
return _release
|
| 290 |
+
|
| 291 |
+
@property
|
| 292 |
+
def pre(self) -> Optional[Tuple[str, int]]:
|
| 293 |
+
"""The pre-release segment of the version.
|
| 294 |
+
|
| 295 |
+
>>> print(Version("1.2.3").pre)
|
| 296 |
+
None
|
| 297 |
+
>>> Version("1.2.3a1").pre
|
| 298 |
+
('a', 1)
|
| 299 |
+
>>> Version("1.2.3b1").pre
|
| 300 |
+
('b', 1)
|
| 301 |
+
>>> Version("1.2.3rc1").pre
|
| 302 |
+
('rc', 1)
|
| 303 |
+
"""
|
| 304 |
+
_pre: Optional[Tuple[str, int]] = self._version.pre
|
| 305 |
+
return _pre
|
| 306 |
+
|
| 307 |
+
@property
|
| 308 |
+
def post(self) -> Optional[int]:
|
| 309 |
+
"""The post-release number of the version.
|
| 310 |
+
|
| 311 |
+
>>> print(Version("1.2.3").post)
|
| 312 |
+
None
|
| 313 |
+
>>> Version("1.2.3.post1").post
|
| 314 |
+
1
|
| 315 |
+
"""
|
| 316 |
+
return self._version.post[1] if self._version.post else None
|
| 317 |
+
|
| 318 |
+
@property
|
| 319 |
+
def dev(self) -> Optional[int]:
|
| 320 |
+
"""The development number of the version.
|
| 321 |
+
|
| 322 |
+
>>> print(Version("1.2.3").dev)
|
| 323 |
+
None
|
| 324 |
+
>>> Version("1.2.3.dev1").dev
|
| 325 |
+
1
|
| 326 |
+
"""
|
| 327 |
+
return self._version.dev[1] if self._version.dev else None
|
| 328 |
+
|
| 329 |
+
@property
|
| 330 |
+
def local(self) -> Optional[str]:
|
| 331 |
+
"""The local version segment of the version.
|
| 332 |
+
|
| 333 |
+
>>> print(Version("1.2.3").local)
|
| 334 |
+
None
|
| 335 |
+
>>> Version("1.2.3+abc").local
|
| 336 |
+
'abc'
|
| 337 |
+
"""
|
| 338 |
+
if self._version.local:
|
| 339 |
+
return ".".join(str(x) for x in self._version.local)
|
| 340 |
+
else:
|
| 341 |
+
return None
|
| 342 |
+
|
| 343 |
+
@property
|
| 344 |
+
def public(self) -> str:
|
| 345 |
+
"""The public portion of the version.
|
| 346 |
+
|
| 347 |
+
>>> Version("1.2.3").public
|
| 348 |
+
'1.2.3'
|
| 349 |
+
>>> Version("1.2.3+abc").public
|
| 350 |
+
'1.2.3'
|
| 351 |
+
>>> Version("1.2.3+abc.dev1").public
|
| 352 |
+
'1.2.3'
|
| 353 |
+
"""
|
| 354 |
+
return str(self).split("+", 1)[0]
|
| 355 |
+
|
| 356 |
+
@property
|
| 357 |
+
def base_version(self) -> str:
|
| 358 |
+
"""The "base version" of the version.
|
| 359 |
+
|
| 360 |
+
>>> Version("1.2.3").base_version
|
| 361 |
+
'1.2.3'
|
| 362 |
+
>>> Version("1.2.3+abc").base_version
|
| 363 |
+
'1.2.3'
|
| 364 |
+
>>> Version("1!1.2.3+abc.dev1").base_version
|
| 365 |
+
'1!1.2.3'
|
| 366 |
+
|
| 367 |
+
The "base version" is the public version of the project without any pre or post
|
| 368 |
+
release markers.
|
| 369 |
+
"""
|
| 370 |
+
parts = []
|
| 371 |
+
|
| 372 |
+
# Epoch
|
| 373 |
+
if self.epoch != 0:
|
| 374 |
+
parts.append(f"{self.epoch}!")
|
| 375 |
+
|
| 376 |
+
# Release segment
|
| 377 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 378 |
+
|
| 379 |
+
return "".join(parts)
|
| 380 |
+
|
| 381 |
+
@property
|
| 382 |
+
def is_prerelease(self) -> bool:
|
| 383 |
+
"""Whether this version is a pre-release.
|
| 384 |
+
|
| 385 |
+
>>> Version("1.2.3").is_prerelease
|
| 386 |
+
False
|
| 387 |
+
>>> Version("1.2.3a1").is_prerelease
|
| 388 |
+
True
|
| 389 |
+
>>> Version("1.2.3b1").is_prerelease
|
| 390 |
+
True
|
| 391 |
+
>>> Version("1.2.3rc1").is_prerelease
|
| 392 |
+
True
|
| 393 |
+
>>> Version("1.2.3dev1").is_prerelease
|
| 394 |
+
True
|
| 395 |
+
"""
|
| 396 |
+
return self.dev is not None or self.pre is not None
|
| 397 |
+
|
| 398 |
+
@property
|
| 399 |
+
def is_postrelease(self) -> bool:
|
| 400 |
+
"""Whether this version is a post-release.
|
| 401 |
+
|
| 402 |
+
>>> Version("1.2.3").is_postrelease
|
| 403 |
+
False
|
| 404 |
+
>>> Version("1.2.3.post1").is_postrelease
|
| 405 |
+
True
|
| 406 |
+
"""
|
| 407 |
+
return self.post is not None
|
| 408 |
+
|
| 409 |
+
@property
|
| 410 |
+
def is_devrelease(self) -> bool:
|
| 411 |
+
"""Whether this version is a development release.
|
| 412 |
+
|
| 413 |
+
>>> Version("1.2.3").is_devrelease
|
| 414 |
+
False
|
| 415 |
+
>>> Version("1.2.3.dev1").is_devrelease
|
| 416 |
+
True
|
| 417 |
+
"""
|
| 418 |
+
return self.dev is not None
|
| 419 |
+
|
| 420 |
+
@property
|
| 421 |
+
def major(self) -> int:
|
| 422 |
+
"""The first item of :attr:`release` or ``0`` if unavailable.
|
| 423 |
+
|
| 424 |
+
>>> Version("1.2.3").major
|
| 425 |
+
1
|
| 426 |
+
"""
|
| 427 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
| 428 |
+
|
| 429 |
+
@property
|
| 430 |
+
def minor(self) -> int:
|
| 431 |
+
"""The second item of :attr:`release` or ``0`` if unavailable.
|
| 432 |
+
|
| 433 |
+
>>> Version("1.2.3").minor
|
| 434 |
+
2
|
| 435 |
+
>>> Version("1").minor
|
| 436 |
+
0
|
| 437 |
+
"""
|
| 438 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
| 439 |
+
|
| 440 |
+
@property
|
| 441 |
+
def micro(self) -> int:
|
| 442 |
+
"""The third item of :attr:`release` or ``0`` if unavailable.
|
| 443 |
+
|
| 444 |
+
>>> Version("1.2.3").micro
|
| 445 |
+
3
|
| 446 |
+
>>> Version("1").micro
|
| 447 |
+
0
|
| 448 |
+
"""
|
| 449 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
def _parse_letter_version(
|
| 453 |
+
letter: str, number: Union[str, bytes, SupportsInt]
|
| 454 |
+
) -> Optional[Tuple[str, int]]:
|
| 455 |
+
|
| 456 |
+
if letter:
|
| 457 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
| 458 |
+
# not a numeral associated with it.
|
| 459 |
+
if number is None:
|
| 460 |
+
number = 0
|
| 461 |
+
|
| 462 |
+
# We normalize any letters to their lower case form
|
| 463 |
+
letter = letter.lower()
|
| 464 |
+
|
| 465 |
+
# We consider some words to be alternate spellings of other words and
|
| 466 |
+
# in those cases we want to normalize the spellings to our preferred
|
| 467 |
+
# spelling.
|
| 468 |
+
if letter == "alpha":
|
| 469 |
+
letter = "a"
|
| 470 |
+
elif letter == "beta":
|
| 471 |
+
letter = "b"
|
| 472 |
+
elif letter in ["c", "pre", "preview"]:
|
| 473 |
+
letter = "rc"
|
| 474 |
+
elif letter in ["rev", "r"]:
|
| 475 |
+
letter = "post"
|
| 476 |
+
|
| 477 |
+
return letter, int(number)
|
| 478 |
+
if not letter and number:
|
| 479 |
+
# We assume if we are given a number, but we are not given a letter
|
| 480 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
| 481 |
+
letter = "post"
|
| 482 |
+
|
| 483 |
+
return letter, int(number)
|
| 484 |
+
|
| 485 |
+
return None
|
| 486 |
+
|
| 487 |
+
|
| 488 |
+
_local_version_separators = re.compile(r"[\._-]")
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
def _parse_local_version(local: str) -> Optional[LocalType]:
|
| 492 |
+
"""
|
| 493 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
| 494 |
+
"""
|
| 495 |
+
if local is not None:
|
| 496 |
+
return tuple(
|
| 497 |
+
part.lower() if not part.isdigit() else int(part)
|
| 498 |
+
for part in _local_version_separators.split(local)
|
| 499 |
+
)
|
| 500 |
+
return None
|
| 501 |
+
|
| 502 |
+
|
| 503 |
+
def _cmpkey(
|
| 504 |
+
epoch: int,
|
| 505 |
+
release: Tuple[int, ...],
|
| 506 |
+
pre: Optional[Tuple[str, int]],
|
| 507 |
+
post: Optional[Tuple[str, int]],
|
| 508 |
+
dev: Optional[Tuple[str, int]],
|
| 509 |
+
local: Optional[Tuple[SubLocalType]],
|
| 510 |
+
) -> CmpKey:
|
| 511 |
+
|
| 512 |
+
# When we compare a release version, we want to compare it with all of the
|
| 513 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
| 514 |
+
# leading zeros until we come to something non zero, then take the rest
|
| 515 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
| 516 |
+
# that for our sorting key.
|
| 517 |
+
_release = tuple(
|
| 518 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
| 519 |
+
)
|
| 520 |
+
|
| 521 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
| 522 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
| 523 |
+
# if there is not a pre or a post segment. If we have one of those then
|
| 524 |
+
# the normal sorting rules will handle this case correctly.
|
| 525 |
+
if pre is None and post is None and dev is not None:
|
| 526 |
+
_pre: PrePostDevType = NegativeInfinity
|
| 527 |
+
# Versions without a pre-release (except as noted above) should sort after
|
| 528 |
+
# those with one.
|
| 529 |
+
elif pre is None:
|
| 530 |
+
_pre = Infinity
|
| 531 |
+
else:
|
| 532 |
+
_pre = pre
|
| 533 |
+
|
| 534 |
+
# Versions without a post segment should sort before those with one.
|
| 535 |
+
if post is None:
|
| 536 |
+
_post: PrePostDevType = NegativeInfinity
|
| 537 |
+
|
| 538 |
+
else:
|
| 539 |
+
_post = post
|
| 540 |
+
|
| 541 |
+
# Versions without a development segment should sort after those with one.
|
| 542 |
+
if dev is None:
|
| 543 |
+
_dev: PrePostDevType = Infinity
|
| 544 |
+
|
| 545 |
+
else:
|
| 546 |
+
_dev = dev
|
| 547 |
+
|
| 548 |
+
if local is None:
|
| 549 |
+
# Versions without a local segment should sort before those with one.
|
| 550 |
+
_local: LocalType = NegativeInfinity
|
| 551 |
+
else:
|
| 552 |
+
# Versions with a local segment need that segment parsed to implement
|
| 553 |
+
# the sorting rules in PEP440.
|
| 554 |
+
# - Alpha numeric segments sort before numeric segments
|
| 555 |
+
# - Alpha numeric segments sort lexicographically
|
| 556 |
+
# - Numeric segments sort numerically
|
| 557 |
+
# - Shorter versions sort before longer versions when the prefixes
|
| 558 |
+
# match exactly
|
| 559 |
+
_local = tuple(
|
| 560 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
| 561 |
+
)
|
| 562 |
+
|
| 563 |
+
return epoch, _release, _pre, _post, _dev, _local
|
.venv/Lib/site-packages/wheel/vendored/vendor.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
packaging==23.0
|
.venv/Lib/site-packages/wheel/wheelfile.py
ADDED
|
@@ -0,0 +1,196 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import csv
|
| 4 |
+
import hashlib
|
| 5 |
+
import os.path
|
| 6 |
+
import re
|
| 7 |
+
import stat
|
| 8 |
+
import time
|
| 9 |
+
from io import StringIO, TextIOWrapper
|
| 10 |
+
from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
|
| 11 |
+
|
| 12 |
+
from wheel.cli import WheelError
|
| 13 |
+
from wheel.util import log, urlsafe_b64decode, urlsafe_b64encode
|
| 14 |
+
|
| 15 |
+
# Non-greedy matching of an optional build number may be too clever (more
|
| 16 |
+
# invalid wheel filenames will match). Separate regex for .dist-info?
|
| 17 |
+
WHEEL_INFO_RE = re.compile(
|
| 18 |
+
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]+?))(-(?P<build>\d[^\s-]*))?
|
| 19 |
+
-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>\S+)\.whl$""",
|
| 20 |
+
re.VERBOSE,
|
| 21 |
+
)
|
| 22 |
+
MINIMUM_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def get_zipinfo_datetime(timestamp=None):
|
| 26 |
+
# Some applications need reproducible .whl files, but they can't do this without
|
| 27 |
+
# forcing the timestamp of the individual ZipInfo objects. See issue #143.
|
| 28 |
+
timestamp = int(os.environ.get("SOURCE_DATE_EPOCH", timestamp or time.time()))
|
| 29 |
+
timestamp = max(timestamp, MINIMUM_TIMESTAMP)
|
| 30 |
+
return time.gmtime(timestamp)[0:6]
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class WheelFile(ZipFile):
|
| 34 |
+
"""A ZipFile derivative class that also reads SHA-256 hashes from
|
| 35 |
+
.dist-info/RECORD and checks any read files against those.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
_default_algorithm = hashlib.sha256
|
| 39 |
+
|
| 40 |
+
def __init__(self, file, mode="r", compression=ZIP_DEFLATED):
|
| 41 |
+
basename = os.path.basename(file)
|
| 42 |
+
self.parsed_filename = WHEEL_INFO_RE.match(basename)
|
| 43 |
+
if not basename.endswith(".whl") or self.parsed_filename is None:
|
| 44 |
+
raise WheelError(f"Bad wheel filename {basename!r}")
|
| 45 |
+
|
| 46 |
+
ZipFile.__init__(self, file, mode, compression=compression, allowZip64=True)
|
| 47 |
+
|
| 48 |
+
self.dist_info_path = "{}.dist-info".format(
|
| 49 |
+
self.parsed_filename.group("namever")
|
| 50 |
+
)
|
| 51 |
+
self.record_path = self.dist_info_path + "/RECORD"
|
| 52 |
+
self._file_hashes = {}
|
| 53 |
+
self._file_sizes = {}
|
| 54 |
+
if mode == "r":
|
| 55 |
+
# Ignore RECORD and any embedded wheel signatures
|
| 56 |
+
self._file_hashes[self.record_path] = None, None
|
| 57 |
+
self._file_hashes[self.record_path + ".jws"] = None, None
|
| 58 |
+
self._file_hashes[self.record_path + ".p7s"] = None, None
|
| 59 |
+
|
| 60 |
+
# Fill in the expected hashes by reading them from RECORD
|
| 61 |
+
try:
|
| 62 |
+
record = self.open(self.record_path)
|
| 63 |
+
except KeyError:
|
| 64 |
+
raise WheelError(f"Missing {self.record_path} file") from None
|
| 65 |
+
|
| 66 |
+
with record:
|
| 67 |
+
for line in csv.reader(
|
| 68 |
+
TextIOWrapper(record, newline="", encoding="utf-8")
|
| 69 |
+
):
|
| 70 |
+
path, hash_sum, size = line
|
| 71 |
+
if not hash_sum:
|
| 72 |
+
continue
|
| 73 |
+
|
| 74 |
+
algorithm, hash_sum = hash_sum.split("=")
|
| 75 |
+
try:
|
| 76 |
+
hashlib.new(algorithm)
|
| 77 |
+
except ValueError:
|
| 78 |
+
raise WheelError(
|
| 79 |
+
f"Unsupported hash algorithm: {algorithm}"
|
| 80 |
+
) from None
|
| 81 |
+
|
| 82 |
+
if algorithm.lower() in {"md5", "sha1"}:
|
| 83 |
+
raise WheelError(
|
| 84 |
+
"Weak hash algorithm ({}) is not permitted by PEP "
|
| 85 |
+
"427".format(algorithm)
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
self._file_hashes[path] = (
|
| 89 |
+
algorithm,
|
| 90 |
+
urlsafe_b64decode(hash_sum.encode("ascii")),
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
def open(self, name_or_info, mode="r", pwd=None):
|
| 94 |
+
def _update_crc(newdata):
|
| 95 |
+
eof = ef._eof
|
| 96 |
+
update_crc_orig(newdata)
|
| 97 |
+
running_hash.update(newdata)
|
| 98 |
+
if eof and running_hash.digest() != expected_hash:
|
| 99 |
+
raise WheelError(f"Hash mismatch for file '{ef_name}'")
|
| 100 |
+
|
| 101 |
+
ef_name = (
|
| 102 |
+
name_or_info.filename if isinstance(name_or_info, ZipInfo) else name_or_info
|
| 103 |
+
)
|
| 104 |
+
if (
|
| 105 |
+
mode == "r"
|
| 106 |
+
and not ef_name.endswith("/")
|
| 107 |
+
and ef_name not in self._file_hashes
|
| 108 |
+
):
|
| 109 |
+
raise WheelError(f"No hash found for file '{ef_name}'")
|
| 110 |
+
|
| 111 |
+
ef = ZipFile.open(self, name_or_info, mode, pwd)
|
| 112 |
+
if mode == "r" and not ef_name.endswith("/"):
|
| 113 |
+
algorithm, expected_hash = self._file_hashes[ef_name]
|
| 114 |
+
if expected_hash is not None:
|
| 115 |
+
# Monkey patch the _update_crc method to also check for the hash from
|
| 116 |
+
# RECORD
|
| 117 |
+
running_hash = hashlib.new(algorithm)
|
| 118 |
+
update_crc_orig, ef._update_crc = ef._update_crc, _update_crc
|
| 119 |
+
|
| 120 |
+
return ef
|
| 121 |
+
|
| 122 |
+
def write_files(self, base_dir):
|
| 123 |
+
log.info(f"creating '{self.filename}' and adding '{base_dir}' to it")
|
| 124 |
+
deferred = []
|
| 125 |
+
for root, dirnames, filenames in os.walk(base_dir):
|
| 126 |
+
# Sort the directory names so that `os.walk` will walk them in a
|
| 127 |
+
# defined order on the next iteration.
|
| 128 |
+
dirnames.sort()
|
| 129 |
+
for name in sorted(filenames):
|
| 130 |
+
path = os.path.normpath(os.path.join(root, name))
|
| 131 |
+
if os.path.isfile(path):
|
| 132 |
+
arcname = os.path.relpath(path, base_dir).replace(os.path.sep, "/")
|
| 133 |
+
if arcname == self.record_path:
|
| 134 |
+
pass
|
| 135 |
+
elif root.endswith(".dist-info"):
|
| 136 |
+
deferred.append((path, arcname))
|
| 137 |
+
else:
|
| 138 |
+
self.write(path, arcname)
|
| 139 |
+
|
| 140 |
+
deferred.sort()
|
| 141 |
+
for path, arcname in deferred:
|
| 142 |
+
self.write(path, arcname)
|
| 143 |
+
|
| 144 |
+
def write(self, filename, arcname=None, compress_type=None):
|
| 145 |
+
with open(filename, "rb") as f:
|
| 146 |
+
st = os.fstat(f.fileno())
|
| 147 |
+
data = f.read()
|
| 148 |
+
|
| 149 |
+
zinfo = ZipInfo(
|
| 150 |
+
arcname or filename, date_time=get_zipinfo_datetime(st.st_mtime)
|
| 151 |
+
)
|
| 152 |
+
zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16
|
| 153 |
+
zinfo.compress_type = compress_type or self.compression
|
| 154 |
+
self.writestr(zinfo, data, compress_type)
|
| 155 |
+
|
| 156 |
+
def writestr(self, zinfo_or_arcname, data, compress_type=None):
|
| 157 |
+
if isinstance(zinfo_or_arcname, str):
|
| 158 |
+
zinfo_or_arcname = ZipInfo(
|
| 159 |
+
zinfo_or_arcname, date_time=get_zipinfo_datetime()
|
| 160 |
+
)
|
| 161 |
+
zinfo_or_arcname.compress_type = self.compression
|
| 162 |
+
zinfo_or_arcname.external_attr = (0o664 | stat.S_IFREG) << 16
|
| 163 |
+
|
| 164 |
+
if isinstance(data, str):
|
| 165 |
+
data = data.encode("utf-8")
|
| 166 |
+
|
| 167 |
+
ZipFile.writestr(self, zinfo_or_arcname, data, compress_type)
|
| 168 |
+
fname = (
|
| 169 |
+
zinfo_or_arcname.filename
|
| 170 |
+
if isinstance(zinfo_or_arcname, ZipInfo)
|
| 171 |
+
else zinfo_or_arcname
|
| 172 |
+
)
|
| 173 |
+
log.info(f"adding '{fname}'")
|
| 174 |
+
if fname != self.record_path:
|
| 175 |
+
hash_ = self._default_algorithm(data)
|
| 176 |
+
self._file_hashes[fname] = (
|
| 177 |
+
hash_.name,
|
| 178 |
+
urlsafe_b64encode(hash_.digest()).decode("ascii"),
|
| 179 |
+
)
|
| 180 |
+
self._file_sizes[fname] = len(data)
|
| 181 |
+
|
| 182 |
+
def close(self):
|
| 183 |
+
# Write RECORD
|
| 184 |
+
if self.fp is not None and self.mode == "w" and self._file_hashes:
|
| 185 |
+
data = StringIO()
|
| 186 |
+
writer = csv.writer(data, delimiter=",", quotechar='"', lineterminator="\n")
|
| 187 |
+
writer.writerows(
|
| 188 |
+
(
|
| 189 |
+
(fname, algorithm + "=" + hash_, self._file_sizes[fname])
|
| 190 |
+
for fname, (algorithm, hash_) in self._file_hashes.items()
|
| 191 |
+
)
|
| 192 |
+
)
|
| 193 |
+
writer.writerow((format(self.record_path), "", ""))
|
| 194 |
+
self.writestr(self.record_path, data.getvalue())
|
| 195 |
+
|
| 196 |
+
ZipFile.close(self)
|
.venv/Scripts/activate
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file must be used with "source bin/activate" *from bash*
|
| 2 |
+
# you cannot run it directly
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
if [ "${BASH_SOURCE-}" = "$0" ]; then
|
| 6 |
+
echo "You must source this script: \$ source $0" >&2
|
| 7 |
+
exit 33
|
| 8 |
+
fi
|
| 9 |
+
|
| 10 |
+
deactivate () {
|
| 11 |
+
unset -f pydoc >/dev/null 2>&1 || true
|
| 12 |
+
|
| 13 |
+
# reset old environment variables
|
| 14 |
+
# ! [ -z ${VAR+_} ] returns true if VAR is declared at all
|
| 15 |
+
if ! [ -z "${_OLD_VIRTUAL_PATH:+_}" ] ; then
|
| 16 |
+
PATH="$_OLD_VIRTUAL_PATH"
|
| 17 |
+
export PATH
|
| 18 |
+
unset _OLD_VIRTUAL_PATH
|
| 19 |
+
fi
|
| 20 |
+
if ! [ -z "${_OLD_VIRTUAL_PYTHONHOME+_}" ] ; then
|
| 21 |
+
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
|
| 22 |
+
export PYTHONHOME
|
| 23 |
+
unset _OLD_VIRTUAL_PYTHONHOME
|
| 24 |
+
fi
|
| 25 |
+
|
| 26 |
+
# The hash command must be called to get it to forget past
|
| 27 |
+
# commands. Without forgetting past commands the $PATH changes
|
| 28 |
+
# we made may not be respected
|
| 29 |
+
hash -r 2>/dev/null
|
| 30 |
+
|
| 31 |
+
if ! [ -z "${_OLD_VIRTUAL_PS1+_}" ] ; then
|
| 32 |
+
PS1="$_OLD_VIRTUAL_PS1"
|
| 33 |
+
export PS1
|
| 34 |
+
unset _OLD_VIRTUAL_PS1
|
| 35 |
+
fi
|
| 36 |
+
|
| 37 |
+
unset VIRTUAL_ENV
|
| 38 |
+
unset VIRTUAL_ENV_PROMPT
|
| 39 |
+
if [ ! "${1-}" = "nondestructive" ] ; then
|
| 40 |
+
# Self destruct!
|
| 41 |
+
unset -f deactivate
|
| 42 |
+
fi
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
# unset irrelevant variables
|
| 46 |
+
deactivate nondestructive
|
| 47 |
+
|
| 48 |
+
VIRTUAL_ENV='D:\Projetos 2\pac\.venv'
|
| 49 |
+
if ([ "$OSTYPE" = "cygwin" ] || [ "$OSTYPE" = "msys" ]) && $(command -v cygpath &> /dev/null) ; then
|
| 50 |
+
VIRTUAL_ENV=$(cygpath -u "$VIRTUAL_ENV")
|
| 51 |
+
fi
|
| 52 |
+
export VIRTUAL_ENV
|
| 53 |
+
|
| 54 |
+
_OLD_VIRTUAL_PATH="$PATH"
|
| 55 |
+
PATH="$VIRTUAL_ENV/Scripts:$PATH"
|
| 56 |
+
export PATH
|
| 57 |
+
|
| 58 |
+
if [ "x" != x ] ; then
|
| 59 |
+
VIRTUAL_ENV_PROMPT=""
|
| 60 |
+
else
|
| 61 |
+
VIRTUAL_ENV_PROMPT=$(basename "$VIRTUAL_ENV")
|
| 62 |
+
fi
|
| 63 |
+
export VIRTUAL_ENV_PROMPT
|
| 64 |
+
|
| 65 |
+
# unset PYTHONHOME if set
|
| 66 |
+
if ! [ -z "${PYTHONHOME+_}" ] ; then
|
| 67 |
+
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
|
| 68 |
+
unset PYTHONHOME
|
| 69 |
+
fi
|
| 70 |
+
|
| 71 |
+
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT-}" ] ; then
|
| 72 |
+
_OLD_VIRTUAL_PS1="${PS1-}"
|
| 73 |
+
PS1="(${VIRTUAL_ENV_PROMPT}) ${PS1-}"
|
| 74 |
+
export PS1
|
| 75 |
+
fi
|
| 76 |
+
|
| 77 |
+
# Make sure to unalias pydoc if it's already there
|
| 78 |
+
alias pydoc 2>/dev/null >/dev/null && unalias pydoc || true
|
| 79 |
+
|
| 80 |
+
pydoc () {
|
| 81 |
+
python -m pydoc "$@"
|
| 82 |
+
}
|
| 83 |
+
|
| 84 |
+
# The hash command must be called to get it to forget past
|
| 85 |
+
# commands. Without forgetting past commands the $PATH changes
|
| 86 |
+
# we made may not be respected
|
| 87 |
+
hash -r 2>/dev/null
|
.venv/Scripts/activate.bat
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
@set "VIRTUAL_ENV=D:\Projetos 2\pac\.venv"
|
| 2 |
+
|
| 3 |
+
@set "VIRTUAL_ENV_PROMPT="
|
| 4 |
+
@if NOT DEFINED VIRTUAL_ENV_PROMPT (
|
| 5 |
+
@for %%d in ("%VIRTUAL_ENV%") do @set "VIRTUAL_ENV_PROMPT=%%~nxd"
|
| 6 |
+
)
|
| 7 |
+
|
| 8 |
+
@if defined _OLD_VIRTUAL_PROMPT (
|
| 9 |
+
@set "PROMPT=%_OLD_VIRTUAL_PROMPT%"
|
| 10 |
+
) else (
|
| 11 |
+
@if not defined PROMPT (
|
| 12 |
+
@set "PROMPT=$P$G"
|
| 13 |
+
)
|
| 14 |
+
@if not defined VIRTUAL_ENV_DISABLE_PROMPT (
|
| 15 |
+
@set "_OLD_VIRTUAL_PROMPT=%PROMPT%"
|
| 16 |
+
)
|
| 17 |
+
)
|
| 18 |
+
@if not defined VIRTUAL_ENV_DISABLE_PROMPT (
|
| 19 |
+
@set "PROMPT=(%VIRTUAL_ENV_PROMPT%) %PROMPT%"
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
@REM Don't use () to avoid problems with them in %PATH%
|
| 23 |
+
@if defined _OLD_VIRTUAL_PYTHONHOME @goto ENDIFVHOME
|
| 24 |
+
@set "_OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%"
|
| 25 |
+
:ENDIFVHOME
|
| 26 |
+
|
| 27 |
+
@set PYTHONHOME=
|
| 28 |
+
|
| 29 |
+
@REM if defined _OLD_VIRTUAL_PATH (
|
| 30 |
+
@if not defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH1
|
| 31 |
+
@set "PATH=%_OLD_VIRTUAL_PATH%"
|
| 32 |
+
:ENDIFVPATH1
|
| 33 |
+
@REM ) else (
|
| 34 |
+
@if defined _OLD_VIRTUAL_PATH @goto ENDIFVPATH2
|
| 35 |
+
@set "_OLD_VIRTUAL_PATH=%PATH%"
|
| 36 |
+
:ENDIFVPATH2
|
| 37 |
+
|
| 38 |
+
@set "PATH=%VIRTUAL_ENV%\Scripts;%PATH%"
|
.venv/Scripts/activate.fish
ADDED
|
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file must be used using `source bin/activate.fish` *within a running fish ( http://fishshell.com ) session*.
|
| 2 |
+
# Do not run it directly.
|
| 3 |
+
|
| 4 |
+
function _bashify_path -d "Converts a fish path to something bash can recognize"
|
| 5 |
+
set fishy_path $argv
|
| 6 |
+
set bashy_path $fishy_path[1]
|
| 7 |
+
for path_part in $fishy_path[2..-1]
|
| 8 |
+
set bashy_path "$bashy_path:$path_part"
|
| 9 |
+
end
|
| 10 |
+
echo $bashy_path
|
| 11 |
+
end
|
| 12 |
+
|
| 13 |
+
function _fishify_path -d "Converts a bash path to something fish can recognize"
|
| 14 |
+
echo $argv | tr ':' '\n'
|
| 15 |
+
end
|
| 16 |
+
|
| 17 |
+
function deactivate -d 'Exit virtualenv mode and return to the normal environment.'
|
| 18 |
+
# reset old environment variables
|
| 19 |
+
if test -n "$_OLD_VIRTUAL_PATH"
|
| 20 |
+
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
| 21 |
+
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
| 22 |
+
set -gx PATH (_fishify_path "$_OLD_VIRTUAL_PATH")
|
| 23 |
+
else
|
| 24 |
+
set -gx PATH $_OLD_VIRTUAL_PATH
|
| 25 |
+
end
|
| 26 |
+
set -e _OLD_VIRTUAL_PATH
|
| 27 |
+
end
|
| 28 |
+
|
| 29 |
+
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
|
| 30 |
+
set -gx PYTHONHOME "$_OLD_VIRTUAL_PYTHONHOME"
|
| 31 |
+
set -e _OLD_VIRTUAL_PYTHONHOME
|
| 32 |
+
end
|
| 33 |
+
|
| 34 |
+
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
|
| 35 |
+
and functions -q _old_fish_prompt
|
| 36 |
+
# Set an empty local `$fish_function_path` to allow the removal of `fish_prompt` using `functions -e`.
|
| 37 |
+
set -l fish_function_path
|
| 38 |
+
|
| 39 |
+
# Erase virtualenv's `fish_prompt` and restore the original.
|
| 40 |
+
functions -e fish_prompt
|
| 41 |
+
functions -c _old_fish_prompt fish_prompt
|
| 42 |
+
functions -e _old_fish_prompt
|
| 43 |
+
set -e _OLD_FISH_PROMPT_OVERRIDE
|
| 44 |
+
end
|
| 45 |
+
|
| 46 |
+
set -e VIRTUAL_ENV
|
| 47 |
+
set -e VIRTUAL_ENV_PROMPT
|
| 48 |
+
|
| 49 |
+
if test "$argv[1]" != 'nondestructive'
|
| 50 |
+
# Self-destruct!
|
| 51 |
+
functions -e pydoc
|
| 52 |
+
functions -e deactivate
|
| 53 |
+
functions -e _bashify_path
|
| 54 |
+
functions -e _fishify_path
|
| 55 |
+
end
|
| 56 |
+
end
|
| 57 |
+
|
| 58 |
+
# Unset irrelevant variables.
|
| 59 |
+
deactivate nondestructive
|
| 60 |
+
|
| 61 |
+
set -gx VIRTUAL_ENV 'D:\Projetos 2\pac\.venv'
|
| 62 |
+
|
| 63 |
+
# https://github.com/fish-shell/fish-shell/issues/436 altered PATH handling
|
| 64 |
+
if test (echo $FISH_VERSION | head -c 1) -lt 3
|
| 65 |
+
set -gx _OLD_VIRTUAL_PATH (_bashify_path $PATH)
|
| 66 |
+
else
|
| 67 |
+
set -gx _OLD_VIRTUAL_PATH $PATH
|
| 68 |
+
end
|
| 69 |
+
set -gx PATH "$VIRTUAL_ENV"'/Scripts' $PATH
|
| 70 |
+
|
| 71 |
+
# Prompt override provided?
|
| 72 |
+
# If not, just use the environment name.
|
| 73 |
+
if test -n ''
|
| 74 |
+
set -gx VIRTUAL_ENV_PROMPT ''
|
| 75 |
+
else
|
| 76 |
+
set -gx VIRTUAL_ENV_PROMPT (basename "$VIRTUAL_ENV")
|
| 77 |
+
end
|
| 78 |
+
|
| 79 |
+
# Unset `$PYTHONHOME` if set.
|
| 80 |
+
if set -q PYTHONHOME
|
| 81 |
+
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
|
| 82 |
+
set -e PYTHONHOME
|
| 83 |
+
end
|
| 84 |
+
|
| 85 |
+
function pydoc
|
| 86 |
+
python -m pydoc $argv
|
| 87 |
+
end
|
| 88 |
+
|
| 89 |
+
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
|
| 90 |
+
# Copy the current `fish_prompt` function as `_old_fish_prompt`.
|
| 91 |
+
functions -c fish_prompt _old_fish_prompt
|
| 92 |
+
|
| 93 |
+
function fish_prompt
|
| 94 |
+
# Run the user's prompt first; it might depend on (pipe)status.
|
| 95 |
+
set -l prompt (_old_fish_prompt)
|
| 96 |
+
|
| 97 |
+
printf '(%s) ' $VIRTUAL_ENV_PROMPT
|
| 98 |
+
|
| 99 |
+
string join -- \n $prompt # handle multi-line prompts
|
| 100 |
+
end
|
| 101 |
+
|
| 102 |
+
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
|
| 103 |
+
end
|