Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/index.py +508 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py +384 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/markers.py +162 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py +358 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py +447 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/t64.exe +3 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/util.py +1984 -0
- llava/lib/python3.10/site-packages/pip/_vendor/distlib/wheel.py +1100 -0
- llava/lib/python3.10/site-packages/pip/_vendor/packaging/_elffile.py +110 -0
- llava/lib/python3.10/site-packages/pip/_vendor/packaging/markers.py +331 -0
- llava/lib/python3.10/site-packages/pip/_vendor/packaging/metadata.py +863 -0
- llava/lib/python3.10/site-packages/pip/_vendor/packaging/specifiers.py +1020 -0
- llava/lib/python3.10/site-packages/pip/_vendor/packaging/utils.py +163 -0
- llava/lib/python3.10/site-packages/pip/_vendor/packaging/version.py +582 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/actor_pool_map_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/base_physical_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/map_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/output_splitter.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/task_pool_map_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/union_operator.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/__pycache__/iterator_impl.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/iterator_impl.py +41 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/stream_split_iterator.py +285 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/aggregate.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_all_to_all_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_arrow_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_items_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_numpy_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_pandas_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_read_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_udf_map_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_write_op.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/planner.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/random_shuffle.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/randomize_blocks.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/repartition.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/sort.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/aggregate.py +89 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/exchange/__init__.py +0 -0
- minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/exchange/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -1336,3 +1336,4 @@ minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_
|
|
| 1336 |
parrot/lib/libtsan.so filter=lfs diff=lfs merge=lfs -text
|
| 1337 |
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/idna/__pycache__/idnadata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1338 |
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1336 |
parrot/lib/libtsan.so filter=lfs diff=lfs merge=lfs -text
|
| 1337 |
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/idna/__pycache__/idnadata.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1338 |
minigpt2/lib/python3.10/site-packages/ray/_private/runtime_env/agent/thirdparty_files/__pycache__/typing_extensions.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1339 |
+
llava/lib/python3.10/site-packages/pip/_vendor/distlib/t64.exe filter=lfs diff=lfs merge=lfs -text
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-310.pyc
ADDED
|
Binary file (43.1 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-310.pyc
ADDED
|
Binary file (10.2 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-310.pyc
ADDED
|
Binary file (11.6 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-310.pyc
ADDED
|
Binary file (20.3 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-310.pyc
ADDED
|
Binary file (28.5 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/index.py
ADDED
|
@@ -0,0 +1,508 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
import hashlib
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import shutil
|
| 11 |
+
import subprocess
|
| 12 |
+
import tempfile
|
| 13 |
+
try:
|
| 14 |
+
from threading import Thread
|
| 15 |
+
except ImportError: # pragma: no cover
|
| 16 |
+
from dummy_threading import Thread
|
| 17 |
+
|
| 18 |
+
from . import DistlibException
|
| 19 |
+
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
|
| 20 |
+
urlparse, build_opener, string_types)
|
| 21 |
+
from .util import zip_dir, ServerProxy
|
| 22 |
+
|
| 23 |
+
logger = logging.getLogger(__name__)
|
| 24 |
+
|
| 25 |
+
DEFAULT_INDEX = 'https://pypi.org/pypi'
|
| 26 |
+
DEFAULT_REALM = 'pypi'
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class PackageIndex(object):
|
| 30 |
+
"""
|
| 31 |
+
This class represents a package index compatible with PyPI, the Python
|
| 32 |
+
Package Index.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
|
| 36 |
+
|
| 37 |
+
def __init__(self, url=None):
|
| 38 |
+
"""
|
| 39 |
+
Initialise an instance.
|
| 40 |
+
|
| 41 |
+
:param url: The URL of the index. If not specified, the URL for PyPI is
|
| 42 |
+
used.
|
| 43 |
+
"""
|
| 44 |
+
self.url = url or DEFAULT_INDEX
|
| 45 |
+
self.read_configuration()
|
| 46 |
+
scheme, netloc, path, params, query, frag = urlparse(self.url)
|
| 47 |
+
if params or query or frag or scheme not in ('http', 'https'):
|
| 48 |
+
raise DistlibException('invalid repository: %s' % self.url)
|
| 49 |
+
self.password_handler = None
|
| 50 |
+
self.ssl_verifier = None
|
| 51 |
+
self.gpg = None
|
| 52 |
+
self.gpg_home = None
|
| 53 |
+
with open(os.devnull, 'w') as sink:
|
| 54 |
+
# Use gpg by default rather than gpg2, as gpg2 insists on
|
| 55 |
+
# prompting for passwords
|
| 56 |
+
for s in ('gpg', 'gpg2'):
|
| 57 |
+
try:
|
| 58 |
+
rc = subprocess.check_call([s, '--version'], stdout=sink,
|
| 59 |
+
stderr=sink)
|
| 60 |
+
if rc == 0:
|
| 61 |
+
self.gpg = s
|
| 62 |
+
break
|
| 63 |
+
except OSError:
|
| 64 |
+
pass
|
| 65 |
+
|
| 66 |
+
def _get_pypirc_command(self):
|
| 67 |
+
"""
|
| 68 |
+
Get the distutils command for interacting with PyPI configurations.
|
| 69 |
+
:return: the command.
|
| 70 |
+
"""
|
| 71 |
+
from .util import _get_pypirc_command as cmd
|
| 72 |
+
return cmd()
|
| 73 |
+
|
| 74 |
+
def read_configuration(self):
|
| 75 |
+
"""
|
| 76 |
+
Read the PyPI access configuration as supported by distutils. This populates
|
| 77 |
+
``username``, ``password``, ``realm`` and ``url`` attributes from the
|
| 78 |
+
configuration.
|
| 79 |
+
"""
|
| 80 |
+
from .util import _load_pypirc
|
| 81 |
+
cfg = _load_pypirc(self)
|
| 82 |
+
self.username = cfg.get('username')
|
| 83 |
+
self.password = cfg.get('password')
|
| 84 |
+
self.realm = cfg.get('realm', 'pypi')
|
| 85 |
+
self.url = cfg.get('repository', self.url)
|
| 86 |
+
|
| 87 |
+
def save_configuration(self):
|
| 88 |
+
"""
|
| 89 |
+
Save the PyPI access configuration. You must have set ``username`` and
|
| 90 |
+
``password`` attributes before calling this method.
|
| 91 |
+
"""
|
| 92 |
+
self.check_credentials()
|
| 93 |
+
from .util import _store_pypirc
|
| 94 |
+
_store_pypirc(self)
|
| 95 |
+
|
| 96 |
+
def check_credentials(self):
|
| 97 |
+
"""
|
| 98 |
+
Check that ``username`` and ``password`` have been set, and raise an
|
| 99 |
+
exception if not.
|
| 100 |
+
"""
|
| 101 |
+
if self.username is None or self.password is None:
|
| 102 |
+
raise DistlibException('username and password must be set')
|
| 103 |
+
pm = HTTPPasswordMgr()
|
| 104 |
+
_, netloc, _, _, _, _ = urlparse(self.url)
|
| 105 |
+
pm.add_password(self.realm, netloc, self.username, self.password)
|
| 106 |
+
self.password_handler = HTTPBasicAuthHandler(pm)
|
| 107 |
+
|
| 108 |
+
def register(self, metadata): # pragma: no cover
|
| 109 |
+
"""
|
| 110 |
+
Register a distribution on PyPI, using the provided metadata.
|
| 111 |
+
|
| 112 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
| 113 |
+
and version number for the distribution to be
|
| 114 |
+
registered.
|
| 115 |
+
:return: The HTTP response received from PyPI upon submission of the
|
| 116 |
+
request.
|
| 117 |
+
"""
|
| 118 |
+
self.check_credentials()
|
| 119 |
+
metadata.validate()
|
| 120 |
+
d = metadata.todict()
|
| 121 |
+
d[':action'] = 'verify'
|
| 122 |
+
request = self.encode_request(d.items(), [])
|
| 123 |
+
self.send_request(request)
|
| 124 |
+
d[':action'] = 'submit'
|
| 125 |
+
request = self.encode_request(d.items(), [])
|
| 126 |
+
return self.send_request(request)
|
| 127 |
+
|
| 128 |
+
def _reader(self, name, stream, outbuf):
|
| 129 |
+
"""
|
| 130 |
+
Thread runner for reading lines of from a subprocess into a buffer.
|
| 131 |
+
|
| 132 |
+
:param name: The logical name of the stream (used for logging only).
|
| 133 |
+
:param stream: The stream to read from. This will typically a pipe
|
| 134 |
+
connected to the output stream of a subprocess.
|
| 135 |
+
:param outbuf: The list to append the read lines to.
|
| 136 |
+
"""
|
| 137 |
+
while True:
|
| 138 |
+
s = stream.readline()
|
| 139 |
+
if not s:
|
| 140 |
+
break
|
| 141 |
+
s = s.decode('utf-8').rstrip()
|
| 142 |
+
outbuf.append(s)
|
| 143 |
+
logger.debug('%s: %s' % (name, s))
|
| 144 |
+
stream.close()
|
| 145 |
+
|
| 146 |
+
def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover
|
| 147 |
+
"""
|
| 148 |
+
Return a suitable command for signing a file.
|
| 149 |
+
|
| 150 |
+
:param filename: The pathname to the file to be signed.
|
| 151 |
+
:param signer: The identifier of the signer of the file.
|
| 152 |
+
:param sign_password: The passphrase for the signer's
|
| 153 |
+
private key used for signing.
|
| 154 |
+
:param keystore: The path to a directory which contains the keys
|
| 155 |
+
used in verification. If not specified, the
|
| 156 |
+
instance's ``gpg_home`` attribute is used instead.
|
| 157 |
+
:return: The signing command as a list suitable to be
|
| 158 |
+
passed to :class:`subprocess.Popen`.
|
| 159 |
+
"""
|
| 160 |
+
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
| 161 |
+
if keystore is None:
|
| 162 |
+
keystore = self.gpg_home
|
| 163 |
+
if keystore:
|
| 164 |
+
cmd.extend(['--homedir', keystore])
|
| 165 |
+
if sign_password is not None:
|
| 166 |
+
cmd.extend(['--batch', '--passphrase-fd', '0'])
|
| 167 |
+
td = tempfile.mkdtemp()
|
| 168 |
+
sf = os.path.join(td, os.path.basename(filename) + '.asc')
|
| 169 |
+
cmd.extend(['--detach-sign', '--armor', '--local-user',
|
| 170 |
+
signer, '--output', sf, filename])
|
| 171 |
+
logger.debug('invoking: %s', ' '.join(cmd))
|
| 172 |
+
return cmd, sf
|
| 173 |
+
|
| 174 |
+
def run_command(self, cmd, input_data=None):
|
| 175 |
+
"""
|
| 176 |
+
Run a command in a child process , passing it any input data specified.
|
| 177 |
+
|
| 178 |
+
:param cmd: The command to run.
|
| 179 |
+
:param input_data: If specified, this must be a byte string containing
|
| 180 |
+
data to be sent to the child process.
|
| 181 |
+
:return: A tuple consisting of the subprocess' exit code, a list of
|
| 182 |
+
lines read from the subprocess' ``stdout``, and a list of
|
| 183 |
+
lines read from the subprocess' ``stderr``.
|
| 184 |
+
"""
|
| 185 |
+
kwargs = {
|
| 186 |
+
'stdout': subprocess.PIPE,
|
| 187 |
+
'stderr': subprocess.PIPE,
|
| 188 |
+
}
|
| 189 |
+
if input_data is not None:
|
| 190 |
+
kwargs['stdin'] = subprocess.PIPE
|
| 191 |
+
stdout = []
|
| 192 |
+
stderr = []
|
| 193 |
+
p = subprocess.Popen(cmd, **kwargs)
|
| 194 |
+
# We don't use communicate() here because we may need to
|
| 195 |
+
# get clever with interacting with the command
|
| 196 |
+
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
|
| 197 |
+
t1.start()
|
| 198 |
+
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
|
| 199 |
+
t2.start()
|
| 200 |
+
if input_data is not None:
|
| 201 |
+
p.stdin.write(input_data)
|
| 202 |
+
p.stdin.close()
|
| 203 |
+
|
| 204 |
+
p.wait()
|
| 205 |
+
t1.join()
|
| 206 |
+
t2.join()
|
| 207 |
+
return p.returncode, stdout, stderr
|
| 208 |
+
|
| 209 |
+
def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover
|
| 210 |
+
"""
|
| 211 |
+
Sign a file.
|
| 212 |
+
|
| 213 |
+
:param filename: The pathname to the file to be signed.
|
| 214 |
+
:param signer: The identifier of the signer of the file.
|
| 215 |
+
:param sign_password: The passphrase for the signer's
|
| 216 |
+
private key used for signing.
|
| 217 |
+
:param keystore: The path to a directory which contains the keys
|
| 218 |
+
used in signing. If not specified, the instance's
|
| 219 |
+
``gpg_home`` attribute is used instead.
|
| 220 |
+
:return: The absolute pathname of the file where the signature is
|
| 221 |
+
stored.
|
| 222 |
+
"""
|
| 223 |
+
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
|
| 224 |
+
keystore)
|
| 225 |
+
rc, stdout, stderr = self.run_command(cmd,
|
| 226 |
+
sign_password.encode('utf-8'))
|
| 227 |
+
if rc != 0:
|
| 228 |
+
raise DistlibException('sign command failed with error '
|
| 229 |
+
'code %s' % rc)
|
| 230 |
+
return sig_file
|
| 231 |
+
|
| 232 |
+
def upload_file(self, metadata, filename, signer=None, sign_password=None,
|
| 233 |
+
filetype='sdist', pyversion='source', keystore=None):
|
| 234 |
+
"""
|
| 235 |
+
Upload a release file to the index.
|
| 236 |
+
|
| 237 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
| 238 |
+
and version number for the file to be uploaded.
|
| 239 |
+
:param filename: The pathname of the file to be uploaded.
|
| 240 |
+
:param signer: The identifier of the signer of the file.
|
| 241 |
+
:param sign_password: The passphrase for the signer's
|
| 242 |
+
private key used for signing.
|
| 243 |
+
:param filetype: The type of the file being uploaded. This is the
|
| 244 |
+
distutils command which produced that file, e.g.
|
| 245 |
+
``sdist`` or ``bdist_wheel``.
|
| 246 |
+
:param pyversion: The version of Python which the release relates
|
| 247 |
+
to. For code compatible with any Python, this would
|
| 248 |
+
be ``source``, otherwise it would be e.g. ``3.2``.
|
| 249 |
+
:param keystore: The path to a directory which contains the keys
|
| 250 |
+
used in signing. If not specified, the instance's
|
| 251 |
+
``gpg_home`` attribute is used instead.
|
| 252 |
+
:return: The HTTP response received from PyPI upon submission of the
|
| 253 |
+
request.
|
| 254 |
+
"""
|
| 255 |
+
self.check_credentials()
|
| 256 |
+
if not os.path.exists(filename):
|
| 257 |
+
raise DistlibException('not found: %s' % filename)
|
| 258 |
+
metadata.validate()
|
| 259 |
+
d = metadata.todict()
|
| 260 |
+
sig_file = None
|
| 261 |
+
if signer:
|
| 262 |
+
if not self.gpg:
|
| 263 |
+
logger.warning('no signing program available - not signed')
|
| 264 |
+
else:
|
| 265 |
+
sig_file = self.sign_file(filename, signer, sign_password,
|
| 266 |
+
keystore)
|
| 267 |
+
with open(filename, 'rb') as f:
|
| 268 |
+
file_data = f.read()
|
| 269 |
+
md5_digest = hashlib.md5(file_data).hexdigest()
|
| 270 |
+
sha256_digest = hashlib.sha256(file_data).hexdigest()
|
| 271 |
+
d.update({
|
| 272 |
+
':action': 'file_upload',
|
| 273 |
+
'protocol_version': '1',
|
| 274 |
+
'filetype': filetype,
|
| 275 |
+
'pyversion': pyversion,
|
| 276 |
+
'md5_digest': md5_digest,
|
| 277 |
+
'sha256_digest': sha256_digest,
|
| 278 |
+
})
|
| 279 |
+
files = [('content', os.path.basename(filename), file_data)]
|
| 280 |
+
if sig_file:
|
| 281 |
+
with open(sig_file, 'rb') as f:
|
| 282 |
+
sig_data = f.read()
|
| 283 |
+
files.append(('gpg_signature', os.path.basename(sig_file),
|
| 284 |
+
sig_data))
|
| 285 |
+
shutil.rmtree(os.path.dirname(sig_file))
|
| 286 |
+
request = self.encode_request(d.items(), files)
|
| 287 |
+
return self.send_request(request)
|
| 288 |
+
|
| 289 |
+
def upload_documentation(self, metadata, doc_dir): # pragma: no cover
|
| 290 |
+
"""
|
| 291 |
+
Upload documentation to the index.
|
| 292 |
+
|
| 293 |
+
:param metadata: A :class:`Metadata` instance defining at least a name
|
| 294 |
+
and version number for the documentation to be
|
| 295 |
+
uploaded.
|
| 296 |
+
:param doc_dir: The pathname of the directory which contains the
|
| 297 |
+
documentation. This should be the directory that
|
| 298 |
+
contains the ``index.html`` for the documentation.
|
| 299 |
+
:return: The HTTP response received from PyPI upon submission of the
|
| 300 |
+
request.
|
| 301 |
+
"""
|
| 302 |
+
self.check_credentials()
|
| 303 |
+
if not os.path.isdir(doc_dir):
|
| 304 |
+
raise DistlibException('not a directory: %r' % doc_dir)
|
| 305 |
+
fn = os.path.join(doc_dir, 'index.html')
|
| 306 |
+
if not os.path.exists(fn):
|
| 307 |
+
raise DistlibException('not found: %r' % fn)
|
| 308 |
+
metadata.validate()
|
| 309 |
+
name, version = metadata.name, metadata.version
|
| 310 |
+
zip_data = zip_dir(doc_dir).getvalue()
|
| 311 |
+
fields = [(':action', 'doc_upload'),
|
| 312 |
+
('name', name), ('version', version)]
|
| 313 |
+
files = [('content', name, zip_data)]
|
| 314 |
+
request = self.encode_request(fields, files)
|
| 315 |
+
return self.send_request(request)
|
| 316 |
+
|
| 317 |
+
def get_verify_command(self, signature_filename, data_filename,
|
| 318 |
+
keystore=None):
|
| 319 |
+
"""
|
| 320 |
+
Return a suitable command for verifying a file.
|
| 321 |
+
|
| 322 |
+
:param signature_filename: The pathname to the file containing the
|
| 323 |
+
signature.
|
| 324 |
+
:param data_filename: The pathname to the file containing the
|
| 325 |
+
signed data.
|
| 326 |
+
:param keystore: The path to a directory which contains the keys
|
| 327 |
+
used in verification. If not specified, the
|
| 328 |
+
instance's ``gpg_home`` attribute is used instead.
|
| 329 |
+
:return: The verifying command as a list suitable to be
|
| 330 |
+
passed to :class:`subprocess.Popen`.
|
| 331 |
+
"""
|
| 332 |
+
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
|
| 333 |
+
if keystore is None:
|
| 334 |
+
keystore = self.gpg_home
|
| 335 |
+
if keystore:
|
| 336 |
+
cmd.extend(['--homedir', keystore])
|
| 337 |
+
cmd.extend(['--verify', signature_filename, data_filename])
|
| 338 |
+
logger.debug('invoking: %s', ' '.join(cmd))
|
| 339 |
+
return cmd
|
| 340 |
+
|
| 341 |
+
def verify_signature(self, signature_filename, data_filename,
|
| 342 |
+
keystore=None):
|
| 343 |
+
"""
|
| 344 |
+
Verify a signature for a file.
|
| 345 |
+
|
| 346 |
+
:param signature_filename: The pathname to the file containing the
|
| 347 |
+
signature.
|
| 348 |
+
:param data_filename: The pathname to the file containing the
|
| 349 |
+
signed data.
|
| 350 |
+
:param keystore: The path to a directory which contains the keys
|
| 351 |
+
used in verification. If not specified, the
|
| 352 |
+
instance's ``gpg_home`` attribute is used instead.
|
| 353 |
+
:return: True if the signature was verified, else False.
|
| 354 |
+
"""
|
| 355 |
+
if not self.gpg:
|
| 356 |
+
raise DistlibException('verification unavailable because gpg '
|
| 357 |
+
'unavailable')
|
| 358 |
+
cmd = self.get_verify_command(signature_filename, data_filename,
|
| 359 |
+
keystore)
|
| 360 |
+
rc, stdout, stderr = self.run_command(cmd)
|
| 361 |
+
if rc not in (0, 1):
|
| 362 |
+
raise DistlibException('verify command failed with error code %s' % rc)
|
| 363 |
+
return rc == 0
|
| 364 |
+
|
| 365 |
+
def download_file(self, url, destfile, digest=None, reporthook=None):
|
| 366 |
+
"""
|
| 367 |
+
This is a convenience method for downloading a file from an URL.
|
| 368 |
+
Normally, this will be a file from the index, though currently
|
| 369 |
+
no check is made for this (i.e. a file can be downloaded from
|
| 370 |
+
anywhere).
|
| 371 |
+
|
| 372 |
+
The method is just like the :func:`urlretrieve` function in the
|
| 373 |
+
standard library, except that it allows digest computation to be
|
| 374 |
+
done during download and checking that the downloaded data
|
| 375 |
+
matched any expected value.
|
| 376 |
+
|
| 377 |
+
:param url: The URL of the file to be downloaded (assumed to be
|
| 378 |
+
available via an HTTP GET request).
|
| 379 |
+
:param destfile: The pathname where the downloaded file is to be
|
| 380 |
+
saved.
|
| 381 |
+
:param digest: If specified, this must be a (hasher, value)
|
| 382 |
+
tuple, where hasher is the algorithm used (e.g.
|
| 383 |
+
``'md5'``) and ``value`` is the expected value.
|
| 384 |
+
:param reporthook: The same as for :func:`urlretrieve` in the
|
| 385 |
+
standard library.
|
| 386 |
+
"""
|
| 387 |
+
if digest is None:
|
| 388 |
+
digester = None
|
| 389 |
+
logger.debug('No digest specified')
|
| 390 |
+
else:
|
| 391 |
+
if isinstance(digest, (list, tuple)):
|
| 392 |
+
hasher, digest = digest
|
| 393 |
+
else:
|
| 394 |
+
hasher = 'md5'
|
| 395 |
+
digester = getattr(hashlib, hasher)()
|
| 396 |
+
logger.debug('Digest specified: %s' % digest)
|
| 397 |
+
# The following code is equivalent to urlretrieve.
|
| 398 |
+
# We need to do it this way so that we can compute the
|
| 399 |
+
# digest of the file as we go.
|
| 400 |
+
with open(destfile, 'wb') as dfp:
|
| 401 |
+
# addinfourl is not a context manager on 2.x
|
| 402 |
+
# so we have to use try/finally
|
| 403 |
+
sfp = self.send_request(Request(url))
|
| 404 |
+
try:
|
| 405 |
+
headers = sfp.info()
|
| 406 |
+
blocksize = 8192
|
| 407 |
+
size = -1
|
| 408 |
+
read = 0
|
| 409 |
+
blocknum = 0
|
| 410 |
+
if "content-length" in headers:
|
| 411 |
+
size = int(headers["Content-Length"])
|
| 412 |
+
if reporthook:
|
| 413 |
+
reporthook(blocknum, blocksize, size)
|
| 414 |
+
while True:
|
| 415 |
+
block = sfp.read(blocksize)
|
| 416 |
+
if not block:
|
| 417 |
+
break
|
| 418 |
+
read += len(block)
|
| 419 |
+
dfp.write(block)
|
| 420 |
+
if digester:
|
| 421 |
+
digester.update(block)
|
| 422 |
+
blocknum += 1
|
| 423 |
+
if reporthook:
|
| 424 |
+
reporthook(blocknum, blocksize, size)
|
| 425 |
+
finally:
|
| 426 |
+
sfp.close()
|
| 427 |
+
|
| 428 |
+
# check that we got the whole file, if we can
|
| 429 |
+
if size >= 0 and read < size:
|
| 430 |
+
raise DistlibException(
|
| 431 |
+
'retrieval incomplete: got only %d out of %d bytes'
|
| 432 |
+
% (read, size))
|
| 433 |
+
# if we have a digest, it must match.
|
| 434 |
+
if digester:
|
| 435 |
+
actual = digester.hexdigest()
|
| 436 |
+
if digest != actual:
|
| 437 |
+
raise DistlibException('%s digest mismatch for %s: expected '
|
| 438 |
+
'%s, got %s' % (hasher, destfile,
|
| 439 |
+
digest, actual))
|
| 440 |
+
logger.debug('Digest verified: %s', digest)
|
| 441 |
+
|
| 442 |
+
def send_request(self, req):
|
| 443 |
+
"""
|
| 444 |
+
Send a standard library :class:`Request` to PyPI and return its
|
| 445 |
+
response.
|
| 446 |
+
|
| 447 |
+
:param req: The request to send.
|
| 448 |
+
:return: The HTTP response from PyPI (a standard library HTTPResponse).
|
| 449 |
+
"""
|
| 450 |
+
handlers = []
|
| 451 |
+
if self.password_handler:
|
| 452 |
+
handlers.append(self.password_handler)
|
| 453 |
+
if self.ssl_verifier:
|
| 454 |
+
handlers.append(self.ssl_verifier)
|
| 455 |
+
opener = build_opener(*handlers)
|
| 456 |
+
return opener.open(req)
|
| 457 |
+
|
| 458 |
+
def encode_request(self, fields, files):
|
| 459 |
+
"""
|
| 460 |
+
Encode fields and files for posting to an HTTP server.
|
| 461 |
+
|
| 462 |
+
:param fields: The fields to send as a list of (fieldname, value)
|
| 463 |
+
tuples.
|
| 464 |
+
:param files: The files to send as a list of (fieldname, filename,
|
| 465 |
+
file_bytes) tuple.
|
| 466 |
+
"""
|
| 467 |
+
# Adapted from packaging, which in turn was adapted from
|
| 468 |
+
# http://code.activestate.com/recipes/146306
|
| 469 |
+
|
| 470 |
+
parts = []
|
| 471 |
+
boundary = self.boundary
|
| 472 |
+
for k, values in fields:
|
| 473 |
+
if not isinstance(values, (list, tuple)):
|
| 474 |
+
values = [values]
|
| 475 |
+
|
| 476 |
+
for v in values:
|
| 477 |
+
parts.extend((
|
| 478 |
+
b'--' + boundary,
|
| 479 |
+
('Content-Disposition: form-data; name="%s"' %
|
| 480 |
+
k).encode('utf-8'),
|
| 481 |
+
b'',
|
| 482 |
+
v.encode('utf-8')))
|
| 483 |
+
for key, filename, value in files:
|
| 484 |
+
parts.extend((
|
| 485 |
+
b'--' + boundary,
|
| 486 |
+
('Content-Disposition: form-data; name="%s"; filename="%s"' %
|
| 487 |
+
(key, filename)).encode('utf-8'),
|
| 488 |
+
b'',
|
| 489 |
+
value))
|
| 490 |
+
|
| 491 |
+
parts.extend((b'--' + boundary + b'--', b''))
|
| 492 |
+
|
| 493 |
+
body = b'\r\n'.join(parts)
|
| 494 |
+
ct = b'multipart/form-data; boundary=' + boundary
|
| 495 |
+
headers = {
|
| 496 |
+
'Content-type': ct,
|
| 497 |
+
'Content-length': str(len(body))
|
| 498 |
+
}
|
| 499 |
+
return Request(self.url, body, headers)
|
| 500 |
+
|
| 501 |
+
def search(self, terms, operator=None): # pragma: no cover
|
| 502 |
+
if isinstance(terms, string_types):
|
| 503 |
+
terms = {'name': terms}
|
| 504 |
+
rpc_proxy = ServerProxy(self.url, timeout=3.0)
|
| 505 |
+
try:
|
| 506 |
+
return rpc_proxy.search(terms, operator or 'and')
|
| 507 |
+
finally:
|
| 508 |
+
rpc_proxy('close')()
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/manifest.py
ADDED
|
@@ -0,0 +1,384 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2012-2023 Python Software Foundation.
|
| 4 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 5 |
+
#
|
| 6 |
+
"""
|
| 7 |
+
Class representing the list of files in a distribution.
|
| 8 |
+
|
| 9 |
+
Equivalent to distutils.filelist, but fixes some problems.
|
| 10 |
+
"""
|
| 11 |
+
import fnmatch
|
| 12 |
+
import logging
|
| 13 |
+
import os
|
| 14 |
+
import re
|
| 15 |
+
import sys
|
| 16 |
+
|
| 17 |
+
from . import DistlibException
|
| 18 |
+
from .compat import fsdecode
|
| 19 |
+
from .util import convert_path
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
__all__ = ['Manifest']
|
| 23 |
+
|
| 24 |
+
logger = logging.getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
# a \ followed by some spaces + EOL
|
| 27 |
+
_COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
|
| 28 |
+
_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
|
| 29 |
+
|
| 30 |
+
#
|
| 31 |
+
# Due to the different results returned by fnmatch.translate, we need
|
| 32 |
+
# to do slightly different processing for Python 2.7 and 3.2 ... this needed
|
| 33 |
+
# to be brought in for Python 3.6 onwards.
|
| 34 |
+
#
|
| 35 |
+
_PYTHON_VERSION = sys.version_info[:2]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class Manifest(object):
|
| 39 |
+
"""
|
| 40 |
+
A list of files built by exploring the filesystem and filtered by applying various
|
| 41 |
+
patterns to what we find there.
|
| 42 |
+
"""
|
| 43 |
+
|
| 44 |
+
def __init__(self, base=None):
|
| 45 |
+
"""
|
| 46 |
+
Initialise an instance.
|
| 47 |
+
|
| 48 |
+
:param base: The base directory to explore under.
|
| 49 |
+
"""
|
| 50 |
+
self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
|
| 51 |
+
self.prefix = self.base + os.sep
|
| 52 |
+
self.allfiles = None
|
| 53 |
+
self.files = set()
|
| 54 |
+
|
| 55 |
+
#
|
| 56 |
+
# Public API
|
| 57 |
+
#
|
| 58 |
+
|
| 59 |
+
def findall(self):
|
| 60 |
+
"""Find all files under the base and set ``allfiles`` to the absolute
|
| 61 |
+
pathnames of files found.
|
| 62 |
+
"""
|
| 63 |
+
from stat import S_ISREG, S_ISDIR, S_ISLNK
|
| 64 |
+
|
| 65 |
+
self.allfiles = allfiles = []
|
| 66 |
+
root = self.base
|
| 67 |
+
stack = [root]
|
| 68 |
+
pop = stack.pop
|
| 69 |
+
push = stack.append
|
| 70 |
+
|
| 71 |
+
while stack:
|
| 72 |
+
root = pop()
|
| 73 |
+
names = os.listdir(root)
|
| 74 |
+
|
| 75 |
+
for name in names:
|
| 76 |
+
fullname = os.path.join(root, name)
|
| 77 |
+
|
| 78 |
+
# Avoid excess stat calls -- just one will do, thank you!
|
| 79 |
+
stat = os.stat(fullname)
|
| 80 |
+
mode = stat.st_mode
|
| 81 |
+
if S_ISREG(mode):
|
| 82 |
+
allfiles.append(fsdecode(fullname))
|
| 83 |
+
elif S_ISDIR(mode) and not S_ISLNK(mode):
|
| 84 |
+
push(fullname)
|
| 85 |
+
|
| 86 |
+
def add(self, item):
|
| 87 |
+
"""
|
| 88 |
+
Add a file to the manifest.
|
| 89 |
+
|
| 90 |
+
:param item: The pathname to add. This can be relative to the base.
|
| 91 |
+
"""
|
| 92 |
+
if not item.startswith(self.prefix):
|
| 93 |
+
item = os.path.join(self.base, item)
|
| 94 |
+
self.files.add(os.path.normpath(item))
|
| 95 |
+
|
| 96 |
+
def add_many(self, items):
|
| 97 |
+
"""
|
| 98 |
+
Add a list of files to the manifest.
|
| 99 |
+
|
| 100 |
+
:param items: The pathnames to add. These can be relative to the base.
|
| 101 |
+
"""
|
| 102 |
+
for item in items:
|
| 103 |
+
self.add(item)
|
| 104 |
+
|
| 105 |
+
def sorted(self, wantdirs=False):
|
| 106 |
+
"""
|
| 107 |
+
Return sorted files in directory order
|
| 108 |
+
"""
|
| 109 |
+
|
| 110 |
+
def add_dir(dirs, d):
|
| 111 |
+
dirs.add(d)
|
| 112 |
+
logger.debug('add_dir added %s', d)
|
| 113 |
+
if d != self.base:
|
| 114 |
+
parent, _ = os.path.split(d)
|
| 115 |
+
assert parent not in ('', '/')
|
| 116 |
+
add_dir(dirs, parent)
|
| 117 |
+
|
| 118 |
+
result = set(self.files) # make a copy!
|
| 119 |
+
if wantdirs:
|
| 120 |
+
dirs = set()
|
| 121 |
+
for f in result:
|
| 122 |
+
add_dir(dirs, os.path.dirname(f))
|
| 123 |
+
result |= dirs
|
| 124 |
+
return [os.path.join(*path_tuple) for path_tuple in
|
| 125 |
+
sorted(os.path.split(path) for path in result)]
|
| 126 |
+
|
| 127 |
+
def clear(self):
|
| 128 |
+
"""Clear all collected files."""
|
| 129 |
+
self.files = set()
|
| 130 |
+
self.allfiles = []
|
| 131 |
+
|
| 132 |
+
def process_directive(self, directive):
|
| 133 |
+
"""
|
| 134 |
+
Process a directive which either adds some files from ``allfiles`` to
|
| 135 |
+
``files``, or removes some files from ``files``.
|
| 136 |
+
|
| 137 |
+
:param directive: The directive to process. This should be in a format
|
| 138 |
+
compatible with distutils ``MANIFEST.in`` files:
|
| 139 |
+
|
| 140 |
+
http://docs.python.org/distutils/sourcedist.html#commands
|
| 141 |
+
"""
|
| 142 |
+
# Parse the line: split it up, make sure the right number of words
|
| 143 |
+
# is there, and return the relevant words. 'action' is always
|
| 144 |
+
# defined: it's the first word of the line. Which of the other
|
| 145 |
+
# three are defined depends on the action; it'll be either
|
| 146 |
+
# patterns, (dir and patterns), or (dirpattern).
|
| 147 |
+
action, patterns, thedir, dirpattern = self._parse_directive(directive)
|
| 148 |
+
|
| 149 |
+
# OK, now we know that the action is valid and we have the
|
| 150 |
+
# right number of words on the line for that action -- so we
|
| 151 |
+
# can proceed with minimal error-checking.
|
| 152 |
+
if action == 'include':
|
| 153 |
+
for pattern in patterns:
|
| 154 |
+
if not self._include_pattern(pattern, anchor=True):
|
| 155 |
+
logger.warning('no files found matching %r', pattern)
|
| 156 |
+
|
| 157 |
+
elif action == 'exclude':
|
| 158 |
+
for pattern in patterns:
|
| 159 |
+
self._exclude_pattern(pattern, anchor=True)
|
| 160 |
+
|
| 161 |
+
elif action == 'global-include':
|
| 162 |
+
for pattern in patterns:
|
| 163 |
+
if not self._include_pattern(pattern, anchor=False):
|
| 164 |
+
logger.warning('no files found matching %r '
|
| 165 |
+
'anywhere in distribution', pattern)
|
| 166 |
+
|
| 167 |
+
elif action == 'global-exclude':
|
| 168 |
+
for pattern in patterns:
|
| 169 |
+
self._exclude_pattern(pattern, anchor=False)
|
| 170 |
+
|
| 171 |
+
elif action == 'recursive-include':
|
| 172 |
+
for pattern in patterns:
|
| 173 |
+
if not self._include_pattern(pattern, prefix=thedir):
|
| 174 |
+
logger.warning('no files found matching %r '
|
| 175 |
+
'under directory %r', pattern, thedir)
|
| 176 |
+
|
| 177 |
+
elif action == 'recursive-exclude':
|
| 178 |
+
for pattern in patterns:
|
| 179 |
+
self._exclude_pattern(pattern, prefix=thedir)
|
| 180 |
+
|
| 181 |
+
elif action == 'graft':
|
| 182 |
+
if not self._include_pattern(None, prefix=dirpattern):
|
| 183 |
+
logger.warning('no directories found matching %r',
|
| 184 |
+
dirpattern)
|
| 185 |
+
|
| 186 |
+
elif action == 'prune':
|
| 187 |
+
if not self._exclude_pattern(None, prefix=dirpattern):
|
| 188 |
+
logger.warning('no previously-included directories found '
|
| 189 |
+
'matching %r', dirpattern)
|
| 190 |
+
else: # pragma: no cover
|
| 191 |
+
# This should never happen, as it should be caught in
|
| 192 |
+
# _parse_template_line
|
| 193 |
+
raise DistlibException(
|
| 194 |
+
'invalid action %r' % action)
|
| 195 |
+
|
| 196 |
+
#
|
| 197 |
+
# Private API
|
| 198 |
+
#
|
| 199 |
+
|
| 200 |
+
def _parse_directive(self, directive):
|
| 201 |
+
"""
|
| 202 |
+
Validate a directive.
|
| 203 |
+
:param directive: The directive to validate.
|
| 204 |
+
:return: A tuple of action, patterns, thedir, dir_patterns
|
| 205 |
+
"""
|
| 206 |
+
words = directive.split()
|
| 207 |
+
if len(words) == 1 and words[0] not in ('include', 'exclude',
|
| 208 |
+
'global-include',
|
| 209 |
+
'global-exclude',
|
| 210 |
+
'recursive-include',
|
| 211 |
+
'recursive-exclude',
|
| 212 |
+
'graft', 'prune'):
|
| 213 |
+
# no action given, let's use the default 'include'
|
| 214 |
+
words.insert(0, 'include')
|
| 215 |
+
|
| 216 |
+
action = words[0]
|
| 217 |
+
patterns = thedir = dir_pattern = None
|
| 218 |
+
|
| 219 |
+
if action in ('include', 'exclude',
|
| 220 |
+
'global-include', 'global-exclude'):
|
| 221 |
+
if len(words) < 2:
|
| 222 |
+
raise DistlibException(
|
| 223 |
+
'%r expects <pattern1> <pattern2> ...' % action)
|
| 224 |
+
|
| 225 |
+
patterns = [convert_path(word) for word in words[1:]]
|
| 226 |
+
|
| 227 |
+
elif action in ('recursive-include', 'recursive-exclude'):
|
| 228 |
+
if len(words) < 3:
|
| 229 |
+
raise DistlibException(
|
| 230 |
+
'%r expects <dir> <pattern1> <pattern2> ...' % action)
|
| 231 |
+
|
| 232 |
+
thedir = convert_path(words[1])
|
| 233 |
+
patterns = [convert_path(word) for word in words[2:]]
|
| 234 |
+
|
| 235 |
+
elif action in ('graft', 'prune'):
|
| 236 |
+
if len(words) != 2:
|
| 237 |
+
raise DistlibException(
|
| 238 |
+
'%r expects a single <dir_pattern>' % action)
|
| 239 |
+
|
| 240 |
+
dir_pattern = convert_path(words[1])
|
| 241 |
+
|
| 242 |
+
else:
|
| 243 |
+
raise DistlibException('unknown action %r' % action)
|
| 244 |
+
|
| 245 |
+
return action, patterns, thedir, dir_pattern
|
| 246 |
+
|
| 247 |
+
def _include_pattern(self, pattern, anchor=True, prefix=None,
|
| 248 |
+
is_regex=False):
|
| 249 |
+
"""Select strings (presumably filenames) from 'self.files' that
|
| 250 |
+
match 'pattern', a Unix-style wildcard (glob) pattern.
|
| 251 |
+
|
| 252 |
+
Patterns are not quite the same as implemented by the 'fnmatch'
|
| 253 |
+
module: '*' and '?' match non-special characters, where "special"
|
| 254 |
+
is platform-dependent: slash on Unix; colon, slash, and backslash on
|
| 255 |
+
DOS/Windows; and colon on Mac OS.
|
| 256 |
+
|
| 257 |
+
If 'anchor' is true (the default), then the pattern match is more
|
| 258 |
+
stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
|
| 259 |
+
'anchor' is false, both of these will match.
|
| 260 |
+
|
| 261 |
+
If 'prefix' is supplied, then only filenames starting with 'prefix'
|
| 262 |
+
(itself a pattern) and ending with 'pattern', with anything in between
|
| 263 |
+
them, will match. 'anchor' is ignored in this case.
|
| 264 |
+
|
| 265 |
+
If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
|
| 266 |
+
'pattern' is assumed to be either a string containing a regex or a
|
| 267 |
+
regex object -- no translation is done, the regex is just compiled
|
| 268 |
+
and used as-is.
|
| 269 |
+
|
| 270 |
+
Selected strings will be added to self.files.
|
| 271 |
+
|
| 272 |
+
Return True if files are found.
|
| 273 |
+
"""
|
| 274 |
+
# XXX docstring lying about what the special chars are?
|
| 275 |
+
found = False
|
| 276 |
+
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
| 277 |
+
|
| 278 |
+
# delayed loading of allfiles list
|
| 279 |
+
if self.allfiles is None:
|
| 280 |
+
self.findall()
|
| 281 |
+
|
| 282 |
+
for name in self.allfiles:
|
| 283 |
+
if pattern_re.search(name):
|
| 284 |
+
self.files.add(name)
|
| 285 |
+
found = True
|
| 286 |
+
return found
|
| 287 |
+
|
| 288 |
+
def _exclude_pattern(self, pattern, anchor=True, prefix=None,
|
| 289 |
+
is_regex=False):
|
| 290 |
+
"""Remove strings (presumably filenames) from 'files' that match
|
| 291 |
+
'pattern'.
|
| 292 |
+
|
| 293 |
+
Other parameters are the same as for 'include_pattern()', above.
|
| 294 |
+
The list 'self.files' is modified in place. Return True if files are
|
| 295 |
+
found.
|
| 296 |
+
|
| 297 |
+
This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
|
| 298 |
+
packaging source distributions
|
| 299 |
+
"""
|
| 300 |
+
found = False
|
| 301 |
+
pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
|
| 302 |
+
for f in list(self.files):
|
| 303 |
+
if pattern_re.search(f):
|
| 304 |
+
self.files.remove(f)
|
| 305 |
+
found = True
|
| 306 |
+
return found
|
| 307 |
+
|
| 308 |
+
def _translate_pattern(self, pattern, anchor=True, prefix=None,
|
| 309 |
+
is_regex=False):
|
| 310 |
+
"""Translate a shell-like wildcard pattern to a compiled regular
|
| 311 |
+
expression.
|
| 312 |
+
|
| 313 |
+
Return the compiled regex. If 'is_regex' true,
|
| 314 |
+
then 'pattern' is directly compiled to a regex (if it's a string)
|
| 315 |
+
or just returned as-is (assumes it's a regex object).
|
| 316 |
+
"""
|
| 317 |
+
if is_regex:
|
| 318 |
+
if isinstance(pattern, str):
|
| 319 |
+
return re.compile(pattern)
|
| 320 |
+
else:
|
| 321 |
+
return pattern
|
| 322 |
+
|
| 323 |
+
if _PYTHON_VERSION > (3, 2):
|
| 324 |
+
# ditch start and end characters
|
| 325 |
+
start, _, end = self._glob_to_re('_').partition('_')
|
| 326 |
+
|
| 327 |
+
if pattern:
|
| 328 |
+
pattern_re = self._glob_to_re(pattern)
|
| 329 |
+
if _PYTHON_VERSION > (3, 2):
|
| 330 |
+
assert pattern_re.startswith(start) and pattern_re.endswith(end)
|
| 331 |
+
else:
|
| 332 |
+
pattern_re = ''
|
| 333 |
+
|
| 334 |
+
base = re.escape(os.path.join(self.base, ''))
|
| 335 |
+
if prefix is not None:
|
| 336 |
+
# ditch end of pattern character
|
| 337 |
+
if _PYTHON_VERSION <= (3, 2):
|
| 338 |
+
empty_pattern = self._glob_to_re('')
|
| 339 |
+
prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
|
| 340 |
+
else:
|
| 341 |
+
prefix_re = self._glob_to_re(prefix)
|
| 342 |
+
assert prefix_re.startswith(start) and prefix_re.endswith(end)
|
| 343 |
+
prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
|
| 344 |
+
sep = os.sep
|
| 345 |
+
if os.sep == '\\':
|
| 346 |
+
sep = r'\\'
|
| 347 |
+
if _PYTHON_VERSION <= (3, 2):
|
| 348 |
+
pattern_re = '^' + base + sep.join((prefix_re,
|
| 349 |
+
'.*' + pattern_re))
|
| 350 |
+
else:
|
| 351 |
+
pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
|
| 352 |
+
pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
|
| 353 |
+
pattern_re, end)
|
| 354 |
+
else: # no prefix -- respect anchor flag
|
| 355 |
+
if anchor:
|
| 356 |
+
if _PYTHON_VERSION <= (3, 2):
|
| 357 |
+
pattern_re = '^' + base + pattern_re
|
| 358 |
+
else:
|
| 359 |
+
pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
|
| 360 |
+
|
| 361 |
+
return re.compile(pattern_re)
|
| 362 |
+
|
| 363 |
+
def _glob_to_re(self, pattern):
|
| 364 |
+
"""Translate a shell-like glob pattern to a regular expression.
|
| 365 |
+
|
| 366 |
+
Return a string containing the regex. Differs from
|
| 367 |
+
'fnmatch.translate()' in that '*' does not match "special characters"
|
| 368 |
+
(which are platform-specific).
|
| 369 |
+
"""
|
| 370 |
+
pattern_re = fnmatch.translate(pattern)
|
| 371 |
+
|
| 372 |
+
# '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
|
| 373 |
+
# IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
|
| 374 |
+
# and by extension they shouldn't match such "special characters" under
|
| 375 |
+
# any OS. So change all non-escaped dots in the RE to match any
|
| 376 |
+
# character except the special characters (currently: just os.sep).
|
| 377 |
+
sep = os.sep
|
| 378 |
+
if os.sep == '\\':
|
| 379 |
+
# we're using a regex to manipulate a regex, so we need
|
| 380 |
+
# to escape the backslash twice
|
| 381 |
+
sep = r'\\\\'
|
| 382 |
+
escaped = r'\1[^%s]' % sep
|
| 383 |
+
pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
|
| 384 |
+
return pattern_re
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/markers.py
ADDED
|
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2012-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
"""
|
| 8 |
+
Parser for the environment markers micro-language defined in PEP 508.
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
# Note: In PEP 345, the micro-language was Python compatible, so the ast
|
| 12 |
+
# module could be used to parse it. However, PEP 508 introduced operators such
|
| 13 |
+
# as ~= and === which aren't in Python, necessitating a different approach.
|
| 14 |
+
|
| 15 |
+
import os
|
| 16 |
+
import re
|
| 17 |
+
import sys
|
| 18 |
+
import platform
|
| 19 |
+
|
| 20 |
+
from .compat import string_types
|
| 21 |
+
from .util import in_venv, parse_marker
|
| 22 |
+
from .version import LegacyVersion as LV
|
| 23 |
+
|
| 24 |
+
__all__ = ['interpret']
|
| 25 |
+
|
| 26 |
+
_VERSION_PATTERN = re.compile(r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")')
|
| 27 |
+
_VERSION_MARKERS = {'python_version', 'python_full_version'}
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def _is_version_marker(s):
|
| 31 |
+
return isinstance(s, string_types) and s in _VERSION_MARKERS
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _is_literal(o):
|
| 35 |
+
if not isinstance(o, string_types) or not o:
|
| 36 |
+
return False
|
| 37 |
+
return o[0] in '\'"'
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def _get_versions(s):
|
| 41 |
+
return {LV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)}
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class Evaluator(object):
|
| 45 |
+
"""
|
| 46 |
+
This class is used to evaluate marker expressions.
|
| 47 |
+
"""
|
| 48 |
+
|
| 49 |
+
operations = {
|
| 50 |
+
'==': lambda x, y: x == y,
|
| 51 |
+
'===': lambda x, y: x == y,
|
| 52 |
+
'~=': lambda x, y: x == y or x > y,
|
| 53 |
+
'!=': lambda x, y: x != y,
|
| 54 |
+
'<': lambda x, y: x < y,
|
| 55 |
+
'<=': lambda x, y: x == y or x < y,
|
| 56 |
+
'>': lambda x, y: x > y,
|
| 57 |
+
'>=': lambda x, y: x == y or x > y,
|
| 58 |
+
'and': lambda x, y: x and y,
|
| 59 |
+
'or': lambda x, y: x or y,
|
| 60 |
+
'in': lambda x, y: x in y,
|
| 61 |
+
'not in': lambda x, y: x not in y,
|
| 62 |
+
}
|
| 63 |
+
|
| 64 |
+
def evaluate(self, expr, context):
|
| 65 |
+
"""
|
| 66 |
+
Evaluate a marker expression returned by the :func:`parse_requirement`
|
| 67 |
+
function in the specified context.
|
| 68 |
+
"""
|
| 69 |
+
if isinstance(expr, string_types):
|
| 70 |
+
if expr[0] in '\'"':
|
| 71 |
+
result = expr[1:-1]
|
| 72 |
+
else:
|
| 73 |
+
if expr not in context:
|
| 74 |
+
raise SyntaxError('unknown variable: %s' % expr)
|
| 75 |
+
result = context[expr]
|
| 76 |
+
else:
|
| 77 |
+
assert isinstance(expr, dict)
|
| 78 |
+
op = expr['op']
|
| 79 |
+
if op not in self.operations:
|
| 80 |
+
raise NotImplementedError('op not implemented: %s' % op)
|
| 81 |
+
elhs = expr['lhs']
|
| 82 |
+
erhs = expr['rhs']
|
| 83 |
+
if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
|
| 84 |
+
raise SyntaxError('invalid comparison: %s %s %s' % (elhs, op, erhs))
|
| 85 |
+
|
| 86 |
+
lhs = self.evaluate(elhs, context)
|
| 87 |
+
rhs = self.evaluate(erhs, context)
|
| 88 |
+
if ((_is_version_marker(elhs) or _is_version_marker(erhs)) and
|
| 89 |
+
op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
|
| 90 |
+
lhs = LV(lhs)
|
| 91 |
+
rhs = LV(rhs)
|
| 92 |
+
elif _is_version_marker(elhs) and op in ('in', 'not in'):
|
| 93 |
+
lhs = LV(lhs)
|
| 94 |
+
rhs = _get_versions(rhs)
|
| 95 |
+
result = self.operations[op](lhs, rhs)
|
| 96 |
+
return result
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
_DIGITS = re.compile(r'\d+\.\d+')
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
def default_context():
|
| 103 |
+
|
| 104 |
+
def format_full_version(info):
|
| 105 |
+
version = '%s.%s.%s' % (info.major, info.minor, info.micro)
|
| 106 |
+
kind = info.releaselevel
|
| 107 |
+
if kind != 'final':
|
| 108 |
+
version += kind[0] + str(info.serial)
|
| 109 |
+
return version
|
| 110 |
+
|
| 111 |
+
if hasattr(sys, 'implementation'):
|
| 112 |
+
implementation_version = format_full_version(sys.implementation.version)
|
| 113 |
+
implementation_name = sys.implementation.name
|
| 114 |
+
else:
|
| 115 |
+
implementation_version = '0'
|
| 116 |
+
implementation_name = ''
|
| 117 |
+
|
| 118 |
+
ppv = platform.python_version()
|
| 119 |
+
m = _DIGITS.match(ppv)
|
| 120 |
+
pv = m.group(0)
|
| 121 |
+
result = {
|
| 122 |
+
'implementation_name': implementation_name,
|
| 123 |
+
'implementation_version': implementation_version,
|
| 124 |
+
'os_name': os.name,
|
| 125 |
+
'platform_machine': platform.machine(),
|
| 126 |
+
'platform_python_implementation': platform.python_implementation(),
|
| 127 |
+
'platform_release': platform.release(),
|
| 128 |
+
'platform_system': platform.system(),
|
| 129 |
+
'platform_version': platform.version(),
|
| 130 |
+
'platform_in_venv': str(in_venv()),
|
| 131 |
+
'python_full_version': ppv,
|
| 132 |
+
'python_version': pv,
|
| 133 |
+
'sys_platform': sys.platform,
|
| 134 |
+
}
|
| 135 |
+
return result
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
DEFAULT_CONTEXT = default_context()
|
| 139 |
+
del default_context
|
| 140 |
+
|
| 141 |
+
evaluator = Evaluator()
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def interpret(marker, execution_context=None):
|
| 145 |
+
"""
|
| 146 |
+
Interpret a marker and return a result depending on environment.
|
| 147 |
+
|
| 148 |
+
:param marker: The marker to interpret.
|
| 149 |
+
:type marker: str
|
| 150 |
+
:param execution_context: The context used for name lookup.
|
| 151 |
+
:type execution_context: mapping
|
| 152 |
+
"""
|
| 153 |
+
try:
|
| 154 |
+
expr, rest = parse_marker(marker)
|
| 155 |
+
except Exception as e:
|
| 156 |
+
raise SyntaxError('Unable to interpret marker syntax: %s: %s' % (marker, e))
|
| 157 |
+
if rest and rest[0] != '#':
|
| 158 |
+
raise SyntaxError('unexpected trailing data in marker: %s: %s' % (marker, rest))
|
| 159 |
+
context = dict(DEFAULT_CONTEXT)
|
| 160 |
+
if execution_context:
|
| 161 |
+
context.update(execution_context)
|
| 162 |
+
return evaluator.evaluate(expr, context)
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/resources.py
ADDED
|
@@ -0,0 +1,358 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2017 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from __future__ import unicode_literals
|
| 8 |
+
|
| 9 |
+
import bisect
|
| 10 |
+
import io
|
| 11 |
+
import logging
|
| 12 |
+
import os
|
| 13 |
+
import pkgutil
|
| 14 |
+
import sys
|
| 15 |
+
import types
|
| 16 |
+
import zipimport
|
| 17 |
+
|
| 18 |
+
from . import DistlibException
|
| 19 |
+
from .util import cached_property, get_cache_base, Cache
|
| 20 |
+
|
| 21 |
+
logger = logging.getLogger(__name__)
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
cache = None # created when needed
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class ResourceCache(Cache):
|
| 28 |
+
def __init__(self, base=None):
|
| 29 |
+
if base is None:
|
| 30 |
+
# Use native string to avoid issues on 2.x: see Python #20140.
|
| 31 |
+
base = os.path.join(get_cache_base(), str('resource-cache'))
|
| 32 |
+
super(ResourceCache, self).__init__(base)
|
| 33 |
+
|
| 34 |
+
def is_stale(self, resource, path):
|
| 35 |
+
"""
|
| 36 |
+
Is the cache stale for the given resource?
|
| 37 |
+
|
| 38 |
+
:param resource: The :class:`Resource` being cached.
|
| 39 |
+
:param path: The path of the resource in the cache.
|
| 40 |
+
:return: True if the cache is stale.
|
| 41 |
+
"""
|
| 42 |
+
# Cache invalidation is a hard problem :-)
|
| 43 |
+
return True
|
| 44 |
+
|
| 45 |
+
def get(self, resource):
|
| 46 |
+
"""
|
| 47 |
+
Get a resource into the cache,
|
| 48 |
+
|
| 49 |
+
:param resource: A :class:`Resource` instance.
|
| 50 |
+
:return: The pathname of the resource in the cache.
|
| 51 |
+
"""
|
| 52 |
+
prefix, path = resource.finder.get_cache_info(resource)
|
| 53 |
+
if prefix is None:
|
| 54 |
+
result = path
|
| 55 |
+
else:
|
| 56 |
+
result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
|
| 57 |
+
dirname = os.path.dirname(result)
|
| 58 |
+
if not os.path.isdir(dirname):
|
| 59 |
+
os.makedirs(dirname)
|
| 60 |
+
if not os.path.exists(result):
|
| 61 |
+
stale = True
|
| 62 |
+
else:
|
| 63 |
+
stale = self.is_stale(resource, path)
|
| 64 |
+
if stale:
|
| 65 |
+
# write the bytes of the resource to the cache location
|
| 66 |
+
with open(result, 'wb') as f:
|
| 67 |
+
f.write(resource.bytes)
|
| 68 |
+
return result
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class ResourceBase(object):
|
| 72 |
+
def __init__(self, finder, name):
|
| 73 |
+
self.finder = finder
|
| 74 |
+
self.name = name
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class Resource(ResourceBase):
|
| 78 |
+
"""
|
| 79 |
+
A class representing an in-package resource, such as a data file. This is
|
| 80 |
+
not normally instantiated by user code, but rather by a
|
| 81 |
+
:class:`ResourceFinder` which manages the resource.
|
| 82 |
+
"""
|
| 83 |
+
is_container = False # Backwards compatibility
|
| 84 |
+
|
| 85 |
+
def as_stream(self):
|
| 86 |
+
"""
|
| 87 |
+
Get the resource as a stream.
|
| 88 |
+
|
| 89 |
+
This is not a property to make it obvious that it returns a new stream
|
| 90 |
+
each time.
|
| 91 |
+
"""
|
| 92 |
+
return self.finder.get_stream(self)
|
| 93 |
+
|
| 94 |
+
@cached_property
|
| 95 |
+
def file_path(self):
|
| 96 |
+
global cache
|
| 97 |
+
if cache is None:
|
| 98 |
+
cache = ResourceCache()
|
| 99 |
+
return cache.get(self)
|
| 100 |
+
|
| 101 |
+
@cached_property
|
| 102 |
+
def bytes(self):
|
| 103 |
+
return self.finder.get_bytes(self)
|
| 104 |
+
|
| 105 |
+
@cached_property
|
| 106 |
+
def size(self):
|
| 107 |
+
return self.finder.get_size(self)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class ResourceContainer(ResourceBase):
|
| 111 |
+
is_container = True # Backwards compatibility
|
| 112 |
+
|
| 113 |
+
@cached_property
|
| 114 |
+
def resources(self):
|
| 115 |
+
return self.finder.get_resources(self)
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class ResourceFinder(object):
|
| 119 |
+
"""
|
| 120 |
+
Resource finder for file system resources.
|
| 121 |
+
"""
|
| 122 |
+
|
| 123 |
+
if sys.platform.startswith('java'):
|
| 124 |
+
skipped_extensions = ('.pyc', '.pyo', '.class')
|
| 125 |
+
else:
|
| 126 |
+
skipped_extensions = ('.pyc', '.pyo')
|
| 127 |
+
|
| 128 |
+
def __init__(self, module):
|
| 129 |
+
self.module = module
|
| 130 |
+
self.loader = getattr(module, '__loader__', None)
|
| 131 |
+
self.base = os.path.dirname(getattr(module, '__file__', ''))
|
| 132 |
+
|
| 133 |
+
def _adjust_path(self, path):
|
| 134 |
+
return os.path.realpath(path)
|
| 135 |
+
|
| 136 |
+
def _make_path(self, resource_name):
|
| 137 |
+
# Issue #50: need to preserve type of path on Python 2.x
|
| 138 |
+
# like os.path._get_sep
|
| 139 |
+
if isinstance(resource_name, bytes): # should only happen on 2.x
|
| 140 |
+
sep = b'/'
|
| 141 |
+
else:
|
| 142 |
+
sep = '/'
|
| 143 |
+
parts = resource_name.split(sep)
|
| 144 |
+
parts.insert(0, self.base)
|
| 145 |
+
result = os.path.join(*parts)
|
| 146 |
+
return self._adjust_path(result)
|
| 147 |
+
|
| 148 |
+
def _find(self, path):
|
| 149 |
+
return os.path.exists(path)
|
| 150 |
+
|
| 151 |
+
def get_cache_info(self, resource):
|
| 152 |
+
return None, resource.path
|
| 153 |
+
|
| 154 |
+
def find(self, resource_name):
|
| 155 |
+
path = self._make_path(resource_name)
|
| 156 |
+
if not self._find(path):
|
| 157 |
+
result = None
|
| 158 |
+
else:
|
| 159 |
+
if self._is_directory(path):
|
| 160 |
+
result = ResourceContainer(self, resource_name)
|
| 161 |
+
else:
|
| 162 |
+
result = Resource(self, resource_name)
|
| 163 |
+
result.path = path
|
| 164 |
+
return result
|
| 165 |
+
|
| 166 |
+
def get_stream(self, resource):
|
| 167 |
+
return open(resource.path, 'rb')
|
| 168 |
+
|
| 169 |
+
def get_bytes(self, resource):
|
| 170 |
+
with open(resource.path, 'rb') as f:
|
| 171 |
+
return f.read()
|
| 172 |
+
|
| 173 |
+
def get_size(self, resource):
|
| 174 |
+
return os.path.getsize(resource.path)
|
| 175 |
+
|
| 176 |
+
def get_resources(self, resource):
|
| 177 |
+
def allowed(f):
|
| 178 |
+
return (f != '__pycache__' and not
|
| 179 |
+
f.endswith(self.skipped_extensions))
|
| 180 |
+
return set([f for f in os.listdir(resource.path) if allowed(f)])
|
| 181 |
+
|
| 182 |
+
def is_container(self, resource):
|
| 183 |
+
return self._is_directory(resource.path)
|
| 184 |
+
|
| 185 |
+
_is_directory = staticmethod(os.path.isdir)
|
| 186 |
+
|
| 187 |
+
def iterator(self, resource_name):
|
| 188 |
+
resource = self.find(resource_name)
|
| 189 |
+
if resource is not None:
|
| 190 |
+
todo = [resource]
|
| 191 |
+
while todo:
|
| 192 |
+
resource = todo.pop(0)
|
| 193 |
+
yield resource
|
| 194 |
+
if resource.is_container:
|
| 195 |
+
rname = resource.name
|
| 196 |
+
for name in resource.resources:
|
| 197 |
+
if not rname:
|
| 198 |
+
new_name = name
|
| 199 |
+
else:
|
| 200 |
+
new_name = '/'.join([rname, name])
|
| 201 |
+
child = self.find(new_name)
|
| 202 |
+
if child.is_container:
|
| 203 |
+
todo.append(child)
|
| 204 |
+
else:
|
| 205 |
+
yield child
|
| 206 |
+
|
| 207 |
+
|
| 208 |
+
class ZipResourceFinder(ResourceFinder):
|
| 209 |
+
"""
|
| 210 |
+
Resource finder for resources in .zip files.
|
| 211 |
+
"""
|
| 212 |
+
def __init__(self, module):
|
| 213 |
+
super(ZipResourceFinder, self).__init__(module)
|
| 214 |
+
archive = self.loader.archive
|
| 215 |
+
self.prefix_len = 1 + len(archive)
|
| 216 |
+
# PyPy doesn't have a _files attr on zipimporter, and you can't set one
|
| 217 |
+
if hasattr(self.loader, '_files'):
|
| 218 |
+
self._files = self.loader._files
|
| 219 |
+
else:
|
| 220 |
+
self._files = zipimport._zip_directory_cache[archive]
|
| 221 |
+
self.index = sorted(self._files)
|
| 222 |
+
|
| 223 |
+
def _adjust_path(self, path):
|
| 224 |
+
return path
|
| 225 |
+
|
| 226 |
+
def _find(self, path):
|
| 227 |
+
path = path[self.prefix_len:]
|
| 228 |
+
if path in self._files:
|
| 229 |
+
result = True
|
| 230 |
+
else:
|
| 231 |
+
if path and path[-1] != os.sep:
|
| 232 |
+
path = path + os.sep
|
| 233 |
+
i = bisect.bisect(self.index, path)
|
| 234 |
+
try:
|
| 235 |
+
result = self.index[i].startswith(path)
|
| 236 |
+
except IndexError:
|
| 237 |
+
result = False
|
| 238 |
+
if not result:
|
| 239 |
+
logger.debug('_find failed: %r %r', path, self.loader.prefix)
|
| 240 |
+
else:
|
| 241 |
+
logger.debug('_find worked: %r %r', path, self.loader.prefix)
|
| 242 |
+
return result
|
| 243 |
+
|
| 244 |
+
def get_cache_info(self, resource):
|
| 245 |
+
prefix = self.loader.archive
|
| 246 |
+
path = resource.path[1 + len(prefix):]
|
| 247 |
+
return prefix, path
|
| 248 |
+
|
| 249 |
+
def get_bytes(self, resource):
|
| 250 |
+
return self.loader.get_data(resource.path)
|
| 251 |
+
|
| 252 |
+
def get_stream(self, resource):
|
| 253 |
+
return io.BytesIO(self.get_bytes(resource))
|
| 254 |
+
|
| 255 |
+
def get_size(self, resource):
|
| 256 |
+
path = resource.path[self.prefix_len:]
|
| 257 |
+
return self._files[path][3]
|
| 258 |
+
|
| 259 |
+
def get_resources(self, resource):
|
| 260 |
+
path = resource.path[self.prefix_len:]
|
| 261 |
+
if path and path[-1] != os.sep:
|
| 262 |
+
path += os.sep
|
| 263 |
+
plen = len(path)
|
| 264 |
+
result = set()
|
| 265 |
+
i = bisect.bisect(self.index, path)
|
| 266 |
+
while i < len(self.index):
|
| 267 |
+
if not self.index[i].startswith(path):
|
| 268 |
+
break
|
| 269 |
+
s = self.index[i][plen:]
|
| 270 |
+
result.add(s.split(os.sep, 1)[0]) # only immediate children
|
| 271 |
+
i += 1
|
| 272 |
+
return result
|
| 273 |
+
|
| 274 |
+
def _is_directory(self, path):
|
| 275 |
+
path = path[self.prefix_len:]
|
| 276 |
+
if path and path[-1] != os.sep:
|
| 277 |
+
path += os.sep
|
| 278 |
+
i = bisect.bisect(self.index, path)
|
| 279 |
+
try:
|
| 280 |
+
result = self.index[i].startswith(path)
|
| 281 |
+
except IndexError:
|
| 282 |
+
result = False
|
| 283 |
+
return result
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
_finder_registry = {
|
| 287 |
+
type(None): ResourceFinder,
|
| 288 |
+
zipimport.zipimporter: ZipResourceFinder
|
| 289 |
+
}
|
| 290 |
+
|
| 291 |
+
try:
|
| 292 |
+
# In Python 3.6, _frozen_importlib -> _frozen_importlib_external
|
| 293 |
+
try:
|
| 294 |
+
import _frozen_importlib_external as _fi
|
| 295 |
+
except ImportError:
|
| 296 |
+
import _frozen_importlib as _fi
|
| 297 |
+
_finder_registry[_fi.SourceFileLoader] = ResourceFinder
|
| 298 |
+
_finder_registry[_fi.FileFinder] = ResourceFinder
|
| 299 |
+
# See issue #146
|
| 300 |
+
_finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
|
| 301 |
+
del _fi
|
| 302 |
+
except (ImportError, AttributeError):
|
| 303 |
+
pass
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
def register_finder(loader, finder_maker):
|
| 307 |
+
_finder_registry[type(loader)] = finder_maker
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
_finder_cache = {}
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
def finder(package):
|
| 314 |
+
"""
|
| 315 |
+
Return a resource finder for a package.
|
| 316 |
+
:param package: The name of the package.
|
| 317 |
+
:return: A :class:`ResourceFinder` instance for the package.
|
| 318 |
+
"""
|
| 319 |
+
if package in _finder_cache:
|
| 320 |
+
result = _finder_cache[package]
|
| 321 |
+
else:
|
| 322 |
+
if package not in sys.modules:
|
| 323 |
+
__import__(package)
|
| 324 |
+
module = sys.modules[package]
|
| 325 |
+
path = getattr(module, '__path__', None)
|
| 326 |
+
if path is None:
|
| 327 |
+
raise DistlibException('You cannot get a finder for a module, '
|
| 328 |
+
'only for a package')
|
| 329 |
+
loader = getattr(module, '__loader__', None)
|
| 330 |
+
finder_maker = _finder_registry.get(type(loader))
|
| 331 |
+
if finder_maker is None:
|
| 332 |
+
raise DistlibException('Unable to locate finder for %r' % package)
|
| 333 |
+
result = finder_maker(module)
|
| 334 |
+
_finder_cache[package] = result
|
| 335 |
+
return result
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
_dummy_module = types.ModuleType(str('__dummy__'))
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
def finder_for_path(path):
|
| 342 |
+
"""
|
| 343 |
+
Return a resource finder for a path, which should represent a container.
|
| 344 |
+
|
| 345 |
+
:param path: The path.
|
| 346 |
+
:return: A :class:`ResourceFinder` instance for the path.
|
| 347 |
+
"""
|
| 348 |
+
result = None
|
| 349 |
+
# calls any path hooks, gets importer into cache
|
| 350 |
+
pkgutil.get_importer(path)
|
| 351 |
+
loader = sys.path_importer_cache.get(path)
|
| 352 |
+
finder = _finder_registry.get(type(loader))
|
| 353 |
+
if finder:
|
| 354 |
+
module = _dummy_module
|
| 355 |
+
module.__file__ = os.path.join(path, '')
|
| 356 |
+
module.__loader__ = loader
|
| 357 |
+
result = finder(module)
|
| 358 |
+
return result
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/scripts.py
ADDED
|
@@ -0,0 +1,447 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from io import BytesIO
|
| 8 |
+
import logging
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import struct
|
| 12 |
+
import sys
|
| 13 |
+
import time
|
| 14 |
+
from zipfile import ZipInfo
|
| 15 |
+
|
| 16 |
+
from .compat import sysconfig, detect_encoding, ZipFile
|
| 17 |
+
from .resources import finder
|
| 18 |
+
from .util import (FileOperator, get_export_entry, convert_path, get_executable, get_platform, in_venv)
|
| 19 |
+
|
| 20 |
+
logger = logging.getLogger(__name__)
|
| 21 |
+
|
| 22 |
+
_DEFAULT_MANIFEST = '''
|
| 23 |
+
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
| 24 |
+
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
|
| 25 |
+
<assemblyIdentity version="1.0.0.0"
|
| 26 |
+
processorArchitecture="X86"
|
| 27 |
+
name="%s"
|
| 28 |
+
type="win32"/>
|
| 29 |
+
|
| 30 |
+
<!-- Identify the application security requirements. -->
|
| 31 |
+
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
|
| 32 |
+
<security>
|
| 33 |
+
<requestedPrivileges>
|
| 34 |
+
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
|
| 35 |
+
</requestedPrivileges>
|
| 36 |
+
</security>
|
| 37 |
+
</trustInfo>
|
| 38 |
+
</assembly>'''.strip()
|
| 39 |
+
|
| 40 |
+
# check if Python is called on the first line with this expression
|
| 41 |
+
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
|
| 42 |
+
SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
|
| 43 |
+
import re
|
| 44 |
+
import sys
|
| 45 |
+
from %(module)s import %(import_name)s
|
| 46 |
+
if __name__ == '__main__':
|
| 47 |
+
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
|
| 48 |
+
sys.exit(%(func)s())
|
| 49 |
+
'''
|
| 50 |
+
|
| 51 |
+
# Pre-fetch the contents of all executable wrapper stubs.
|
| 52 |
+
# This is to address https://github.com/pypa/pip/issues/12666.
|
| 53 |
+
# When updating pip, we rename the old pip in place before installing the
|
| 54 |
+
# new version. If we try to fetch a wrapper *after* that rename, the finder
|
| 55 |
+
# machinery will be confused as the package is no longer available at the
|
| 56 |
+
# location where it was imported from. So we load everything into memory in
|
| 57 |
+
# advance.
|
| 58 |
+
|
| 59 |
+
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'):
|
| 60 |
+
# Issue 31: don't hardcode an absolute package name, but
|
| 61 |
+
# determine it relative to the current package
|
| 62 |
+
DISTLIB_PACKAGE = __name__.rsplit('.', 1)[0]
|
| 63 |
+
|
| 64 |
+
WRAPPERS = {
|
| 65 |
+
r.name: r.bytes
|
| 66 |
+
for r in finder(DISTLIB_PACKAGE).iterator("")
|
| 67 |
+
if r.name.endswith(".exe")
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def enquote_executable(executable):
|
| 72 |
+
if ' ' in executable:
|
| 73 |
+
# make sure we quote only the executable in case of env
|
| 74 |
+
# for example /usr/bin/env "/dir with spaces/bin/jython"
|
| 75 |
+
# instead of "/usr/bin/env /dir with spaces/bin/jython"
|
| 76 |
+
# otherwise whole
|
| 77 |
+
if executable.startswith('/usr/bin/env '):
|
| 78 |
+
env, _executable = executable.split(' ', 1)
|
| 79 |
+
if ' ' in _executable and not _executable.startswith('"'):
|
| 80 |
+
executable = '%s "%s"' % (env, _executable)
|
| 81 |
+
else:
|
| 82 |
+
if not executable.startswith('"'):
|
| 83 |
+
executable = '"%s"' % executable
|
| 84 |
+
return executable
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
# Keep the old name around (for now), as there is at least one project using it!
|
| 88 |
+
_enquote_executable = enquote_executable
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class ScriptMaker(object):
|
| 92 |
+
"""
|
| 93 |
+
A class to copy or create scripts from source scripts or callable
|
| 94 |
+
specifications.
|
| 95 |
+
"""
|
| 96 |
+
script_template = SCRIPT_TEMPLATE
|
| 97 |
+
|
| 98 |
+
executable = None # for shebangs
|
| 99 |
+
|
| 100 |
+
def __init__(self, source_dir, target_dir, add_launchers=True, dry_run=False, fileop=None):
|
| 101 |
+
self.source_dir = source_dir
|
| 102 |
+
self.target_dir = target_dir
|
| 103 |
+
self.add_launchers = add_launchers
|
| 104 |
+
self.force = False
|
| 105 |
+
self.clobber = False
|
| 106 |
+
# It only makes sense to set mode bits on POSIX.
|
| 107 |
+
self.set_mode = (os.name == 'posix') or (os.name == 'java' and os._name == 'posix')
|
| 108 |
+
self.variants = set(('', 'X.Y'))
|
| 109 |
+
self._fileop = fileop or FileOperator(dry_run)
|
| 110 |
+
|
| 111 |
+
self._is_nt = os.name == 'nt' or (os.name == 'java' and os._name == 'nt')
|
| 112 |
+
self.version_info = sys.version_info
|
| 113 |
+
|
| 114 |
+
def _get_alternate_executable(self, executable, options):
|
| 115 |
+
if options.get('gui', False) and self._is_nt: # pragma: no cover
|
| 116 |
+
dn, fn = os.path.split(executable)
|
| 117 |
+
fn = fn.replace('python', 'pythonw')
|
| 118 |
+
executable = os.path.join(dn, fn)
|
| 119 |
+
return executable
|
| 120 |
+
|
| 121 |
+
if sys.platform.startswith('java'): # pragma: no cover
|
| 122 |
+
|
| 123 |
+
def _is_shell(self, executable):
|
| 124 |
+
"""
|
| 125 |
+
Determine if the specified executable is a script
|
| 126 |
+
(contains a #! line)
|
| 127 |
+
"""
|
| 128 |
+
try:
|
| 129 |
+
with open(executable) as fp:
|
| 130 |
+
return fp.read(2) == '#!'
|
| 131 |
+
except (OSError, IOError):
|
| 132 |
+
logger.warning('Failed to open %s', executable)
|
| 133 |
+
return False
|
| 134 |
+
|
| 135 |
+
def _fix_jython_executable(self, executable):
|
| 136 |
+
if self._is_shell(executable):
|
| 137 |
+
# Workaround for Jython is not needed on Linux systems.
|
| 138 |
+
import java
|
| 139 |
+
|
| 140 |
+
if java.lang.System.getProperty('os.name') == 'Linux':
|
| 141 |
+
return executable
|
| 142 |
+
elif executable.lower().endswith('jython.exe'):
|
| 143 |
+
# Use wrapper exe for Jython on Windows
|
| 144 |
+
return executable
|
| 145 |
+
return '/usr/bin/env %s' % executable
|
| 146 |
+
|
| 147 |
+
def _build_shebang(self, executable, post_interp):
|
| 148 |
+
"""
|
| 149 |
+
Build a shebang line. In the simple case (on Windows, or a shebang line
|
| 150 |
+
which is not too long or contains spaces) use a simple formulation for
|
| 151 |
+
the shebang. Otherwise, use /bin/sh as the executable, with a contrived
|
| 152 |
+
shebang which allows the script to run either under Python or sh, using
|
| 153 |
+
suitable quoting. Thanks to Harald Nordgren for his input.
|
| 154 |
+
|
| 155 |
+
See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
|
| 156 |
+
https://hg.mozilla.org/mozilla-central/file/tip/mach
|
| 157 |
+
"""
|
| 158 |
+
if os.name != 'posix':
|
| 159 |
+
simple_shebang = True
|
| 160 |
+
elif getattr(sys, "cross_compiling", False):
|
| 161 |
+
# In a cross-compiling environment, the shebang will likely be a
|
| 162 |
+
# script; this *must* be invoked with the "safe" version of the
|
| 163 |
+
# shebang, or else using os.exec() to run the entry script will
|
| 164 |
+
# fail, raising "OSError 8 [Errno 8] Exec format error".
|
| 165 |
+
simple_shebang = False
|
| 166 |
+
else:
|
| 167 |
+
# Add 3 for '#!' prefix and newline suffix.
|
| 168 |
+
shebang_length = len(executable) + len(post_interp) + 3
|
| 169 |
+
if sys.platform == 'darwin':
|
| 170 |
+
max_shebang_length = 512
|
| 171 |
+
else:
|
| 172 |
+
max_shebang_length = 127
|
| 173 |
+
simple_shebang = ((b' ' not in executable) and (shebang_length <= max_shebang_length))
|
| 174 |
+
|
| 175 |
+
if simple_shebang:
|
| 176 |
+
result = b'#!' + executable + post_interp + b'\n'
|
| 177 |
+
else:
|
| 178 |
+
result = b'#!/bin/sh\n'
|
| 179 |
+
result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
|
| 180 |
+
result += b"' '''\n"
|
| 181 |
+
return result
|
| 182 |
+
|
| 183 |
+
def _get_shebang(self, encoding, post_interp=b'', options=None):
|
| 184 |
+
enquote = True
|
| 185 |
+
if self.executable:
|
| 186 |
+
executable = self.executable
|
| 187 |
+
enquote = False # assume this will be taken care of
|
| 188 |
+
elif not sysconfig.is_python_build():
|
| 189 |
+
executable = get_executable()
|
| 190 |
+
elif in_venv(): # pragma: no cover
|
| 191 |
+
executable = os.path.join(sysconfig.get_path('scripts'), 'python%s' % sysconfig.get_config_var('EXE'))
|
| 192 |
+
else: # pragma: no cover
|
| 193 |
+
if os.name == 'nt':
|
| 194 |
+
# for Python builds from source on Windows, no Python executables with
|
| 195 |
+
# a version suffix are created, so we use python.exe
|
| 196 |
+
executable = os.path.join(sysconfig.get_config_var('BINDIR'),
|
| 197 |
+
'python%s' % (sysconfig.get_config_var('EXE')))
|
| 198 |
+
else:
|
| 199 |
+
executable = os.path.join(
|
| 200 |
+
sysconfig.get_config_var('BINDIR'),
|
| 201 |
+
'python%s%s' % (sysconfig.get_config_var('VERSION'), sysconfig.get_config_var('EXE')))
|
| 202 |
+
if options:
|
| 203 |
+
executable = self._get_alternate_executable(executable, options)
|
| 204 |
+
|
| 205 |
+
if sys.platform.startswith('java'): # pragma: no cover
|
| 206 |
+
executable = self._fix_jython_executable(executable)
|
| 207 |
+
|
| 208 |
+
# Normalise case for Windows - COMMENTED OUT
|
| 209 |
+
# executable = os.path.normcase(executable)
|
| 210 |
+
# N.B. The normalising operation above has been commented out: See
|
| 211 |
+
# issue #124. Although paths in Windows are generally case-insensitive,
|
| 212 |
+
# they aren't always. For example, a path containing a ẞ (which is a
|
| 213 |
+
# LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
|
| 214 |
+
# LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
|
| 215 |
+
# Windows as equivalent in path names.
|
| 216 |
+
|
| 217 |
+
# If the user didn't specify an executable, it may be necessary to
|
| 218 |
+
# cater for executable paths with spaces (not uncommon on Windows)
|
| 219 |
+
if enquote:
|
| 220 |
+
executable = enquote_executable(executable)
|
| 221 |
+
# Issue #51: don't use fsencode, since we later try to
|
| 222 |
+
# check that the shebang is decodable using utf-8.
|
| 223 |
+
executable = executable.encode('utf-8')
|
| 224 |
+
# in case of IronPython, play safe and enable frames support
|
| 225 |
+
if (sys.platform == 'cli' and '-X:Frames' not in post_interp and
|
| 226 |
+
'-X:FullFrames' not in post_interp): # pragma: no cover
|
| 227 |
+
post_interp += b' -X:Frames'
|
| 228 |
+
shebang = self._build_shebang(executable, post_interp)
|
| 229 |
+
# Python parser starts to read a script using UTF-8 until
|
| 230 |
+
# it gets a #coding:xxx cookie. The shebang has to be the
|
| 231 |
+
# first line of a file, the #coding:xxx cookie cannot be
|
| 232 |
+
# written before. So the shebang has to be decodable from
|
| 233 |
+
# UTF-8.
|
| 234 |
+
try:
|
| 235 |
+
shebang.decode('utf-8')
|
| 236 |
+
except UnicodeDecodeError: # pragma: no cover
|
| 237 |
+
raise ValueError('The shebang (%r) is not decodable from utf-8' % shebang)
|
| 238 |
+
# If the script is encoded to a custom encoding (use a
|
| 239 |
+
# #coding:xxx cookie), the shebang has to be decodable from
|
| 240 |
+
# the script encoding too.
|
| 241 |
+
if encoding != 'utf-8':
|
| 242 |
+
try:
|
| 243 |
+
shebang.decode(encoding)
|
| 244 |
+
except UnicodeDecodeError: # pragma: no cover
|
| 245 |
+
raise ValueError('The shebang (%r) is not decodable '
|
| 246 |
+
'from the script encoding (%r)' % (shebang, encoding))
|
| 247 |
+
return shebang
|
| 248 |
+
|
| 249 |
+
def _get_script_text(self, entry):
|
| 250 |
+
return self.script_template % dict(
|
| 251 |
+
module=entry.prefix, import_name=entry.suffix.split('.')[0], func=entry.suffix)
|
| 252 |
+
|
| 253 |
+
manifest = _DEFAULT_MANIFEST
|
| 254 |
+
|
| 255 |
+
def get_manifest(self, exename):
|
| 256 |
+
base = os.path.basename(exename)
|
| 257 |
+
return self.manifest % base
|
| 258 |
+
|
| 259 |
+
def _write_script(self, names, shebang, script_bytes, filenames, ext):
|
| 260 |
+
use_launcher = self.add_launchers and self._is_nt
|
| 261 |
+
if not use_launcher:
|
| 262 |
+
script_bytes = shebang + script_bytes
|
| 263 |
+
else: # pragma: no cover
|
| 264 |
+
if ext == 'py':
|
| 265 |
+
launcher = self._get_launcher('t')
|
| 266 |
+
else:
|
| 267 |
+
launcher = self._get_launcher('w')
|
| 268 |
+
stream = BytesIO()
|
| 269 |
+
with ZipFile(stream, 'w') as zf:
|
| 270 |
+
source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
|
| 271 |
+
if source_date_epoch:
|
| 272 |
+
date_time = time.gmtime(int(source_date_epoch))[:6]
|
| 273 |
+
zinfo = ZipInfo(filename='__main__.py', date_time=date_time)
|
| 274 |
+
zf.writestr(zinfo, script_bytes)
|
| 275 |
+
else:
|
| 276 |
+
zf.writestr('__main__.py', script_bytes)
|
| 277 |
+
zip_data = stream.getvalue()
|
| 278 |
+
script_bytes = launcher + shebang + zip_data
|
| 279 |
+
for name in names:
|
| 280 |
+
outname = os.path.join(self.target_dir, name)
|
| 281 |
+
if use_launcher: # pragma: no cover
|
| 282 |
+
n, e = os.path.splitext(outname)
|
| 283 |
+
if e.startswith('.py'):
|
| 284 |
+
outname = n
|
| 285 |
+
outname = '%s.exe' % outname
|
| 286 |
+
try:
|
| 287 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
| 288 |
+
except Exception:
|
| 289 |
+
# Failed writing an executable - it might be in use.
|
| 290 |
+
logger.warning('Failed to write executable - trying to '
|
| 291 |
+
'use .deleteme logic')
|
| 292 |
+
dfname = '%s.deleteme' % outname
|
| 293 |
+
if os.path.exists(dfname):
|
| 294 |
+
os.remove(dfname) # Not allowed to fail here
|
| 295 |
+
os.rename(outname, dfname) # nor here
|
| 296 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
| 297 |
+
logger.debug('Able to replace executable using '
|
| 298 |
+
'.deleteme logic')
|
| 299 |
+
try:
|
| 300 |
+
os.remove(dfname)
|
| 301 |
+
except Exception:
|
| 302 |
+
pass # still in use - ignore error
|
| 303 |
+
else:
|
| 304 |
+
if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover
|
| 305 |
+
outname = '%s.%s' % (outname, ext)
|
| 306 |
+
if os.path.exists(outname) and not self.clobber:
|
| 307 |
+
logger.warning('Skipping existing file %s', outname)
|
| 308 |
+
continue
|
| 309 |
+
self._fileop.write_binary_file(outname, script_bytes)
|
| 310 |
+
if self.set_mode:
|
| 311 |
+
self._fileop.set_executable_mode([outname])
|
| 312 |
+
filenames.append(outname)
|
| 313 |
+
|
| 314 |
+
variant_separator = '-'
|
| 315 |
+
|
| 316 |
+
def get_script_filenames(self, name):
|
| 317 |
+
result = set()
|
| 318 |
+
if '' in self.variants:
|
| 319 |
+
result.add(name)
|
| 320 |
+
if 'X' in self.variants:
|
| 321 |
+
result.add('%s%s' % (name, self.version_info[0]))
|
| 322 |
+
if 'X.Y' in self.variants:
|
| 323 |
+
result.add('%s%s%s.%s' % (name, self.variant_separator, self.version_info[0], self.version_info[1]))
|
| 324 |
+
return result
|
| 325 |
+
|
| 326 |
+
def _make_script(self, entry, filenames, options=None):
|
| 327 |
+
post_interp = b''
|
| 328 |
+
if options:
|
| 329 |
+
args = options.get('interpreter_args', [])
|
| 330 |
+
if args:
|
| 331 |
+
args = ' %s' % ' '.join(args)
|
| 332 |
+
post_interp = args.encode('utf-8')
|
| 333 |
+
shebang = self._get_shebang('utf-8', post_interp, options=options)
|
| 334 |
+
script = self._get_script_text(entry).encode('utf-8')
|
| 335 |
+
scriptnames = self.get_script_filenames(entry.name)
|
| 336 |
+
if options and options.get('gui', False):
|
| 337 |
+
ext = 'pyw'
|
| 338 |
+
else:
|
| 339 |
+
ext = 'py'
|
| 340 |
+
self._write_script(scriptnames, shebang, script, filenames, ext)
|
| 341 |
+
|
| 342 |
+
def _copy_script(self, script, filenames):
|
| 343 |
+
adjust = False
|
| 344 |
+
script = os.path.join(self.source_dir, convert_path(script))
|
| 345 |
+
outname = os.path.join(self.target_dir, os.path.basename(script))
|
| 346 |
+
if not self.force and not self._fileop.newer(script, outname):
|
| 347 |
+
logger.debug('not copying %s (up-to-date)', script)
|
| 348 |
+
return
|
| 349 |
+
|
| 350 |
+
# Always open the file, but ignore failures in dry-run mode --
|
| 351 |
+
# that way, we'll get accurate feedback if we can read the
|
| 352 |
+
# script.
|
| 353 |
+
try:
|
| 354 |
+
f = open(script, 'rb')
|
| 355 |
+
except IOError: # pragma: no cover
|
| 356 |
+
if not self.dry_run:
|
| 357 |
+
raise
|
| 358 |
+
f = None
|
| 359 |
+
else:
|
| 360 |
+
first_line = f.readline()
|
| 361 |
+
if not first_line: # pragma: no cover
|
| 362 |
+
logger.warning('%s is an empty file (skipping)', script)
|
| 363 |
+
return
|
| 364 |
+
|
| 365 |
+
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
|
| 366 |
+
if match:
|
| 367 |
+
adjust = True
|
| 368 |
+
post_interp = match.group(1) or b''
|
| 369 |
+
|
| 370 |
+
if not adjust:
|
| 371 |
+
if f:
|
| 372 |
+
f.close()
|
| 373 |
+
self._fileop.copy_file(script, outname)
|
| 374 |
+
if self.set_mode:
|
| 375 |
+
self._fileop.set_executable_mode([outname])
|
| 376 |
+
filenames.append(outname)
|
| 377 |
+
else:
|
| 378 |
+
logger.info('copying and adjusting %s -> %s', script, self.target_dir)
|
| 379 |
+
if not self._fileop.dry_run:
|
| 380 |
+
encoding, lines = detect_encoding(f.readline)
|
| 381 |
+
f.seek(0)
|
| 382 |
+
shebang = self._get_shebang(encoding, post_interp)
|
| 383 |
+
if b'pythonw' in first_line: # pragma: no cover
|
| 384 |
+
ext = 'pyw'
|
| 385 |
+
else:
|
| 386 |
+
ext = 'py'
|
| 387 |
+
n = os.path.basename(outname)
|
| 388 |
+
self._write_script([n], shebang, f.read(), filenames, ext)
|
| 389 |
+
if f:
|
| 390 |
+
f.close()
|
| 391 |
+
|
| 392 |
+
@property
|
| 393 |
+
def dry_run(self):
|
| 394 |
+
return self._fileop.dry_run
|
| 395 |
+
|
| 396 |
+
@dry_run.setter
|
| 397 |
+
def dry_run(self, value):
|
| 398 |
+
self._fileop.dry_run = value
|
| 399 |
+
|
| 400 |
+
if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover
|
| 401 |
+
# Executable launcher support.
|
| 402 |
+
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
|
| 403 |
+
|
| 404 |
+
def _get_launcher(self, kind):
|
| 405 |
+
if struct.calcsize('P') == 8: # 64-bit
|
| 406 |
+
bits = '64'
|
| 407 |
+
else:
|
| 408 |
+
bits = '32'
|
| 409 |
+
platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
|
| 410 |
+
name = '%s%s%s.exe' % (kind, bits, platform_suffix)
|
| 411 |
+
if name not in WRAPPERS:
|
| 412 |
+
msg = ('Unable to find resource %s in package %s' %
|
| 413 |
+
(name, DISTLIB_PACKAGE))
|
| 414 |
+
raise ValueError(msg)
|
| 415 |
+
return WRAPPERS[name]
|
| 416 |
+
|
| 417 |
+
# Public API follows
|
| 418 |
+
|
| 419 |
+
def make(self, specification, options=None):
|
| 420 |
+
"""
|
| 421 |
+
Make a script.
|
| 422 |
+
|
| 423 |
+
:param specification: The specification, which is either a valid export
|
| 424 |
+
entry specification (to make a script from a
|
| 425 |
+
callable) or a filename (to make a script by
|
| 426 |
+
copying from a source location).
|
| 427 |
+
:param options: A dictionary of options controlling script generation.
|
| 428 |
+
:return: A list of all absolute pathnames written to.
|
| 429 |
+
"""
|
| 430 |
+
filenames = []
|
| 431 |
+
entry = get_export_entry(specification)
|
| 432 |
+
if entry is None:
|
| 433 |
+
self._copy_script(specification, filenames)
|
| 434 |
+
else:
|
| 435 |
+
self._make_script(entry, filenames, options=options)
|
| 436 |
+
return filenames
|
| 437 |
+
|
| 438 |
+
def make_multiple(self, specifications, options=None):
|
| 439 |
+
"""
|
| 440 |
+
Take a list of specifications and make scripts from them,
|
| 441 |
+
:param specifications: A list of specifications.
|
| 442 |
+
:return: A list of all absolute pathnames written to,
|
| 443 |
+
"""
|
| 444 |
+
filenames = []
|
| 445 |
+
for specification in specifications:
|
| 446 |
+
filenames.extend(self.make(specification, options))
|
| 447 |
+
return filenames
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/t64.exe
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:81a618f21cb87db9076134e70388b6e9cb7c2106739011b6a51772d22cae06b7
|
| 3 |
+
size 108032
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/util.py
ADDED
|
@@ -0,0 +1,1984 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#
|
| 2 |
+
# Copyright (C) 2012-2023 The Python Software Foundation.
|
| 3 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 4 |
+
#
|
| 5 |
+
import codecs
|
| 6 |
+
from collections import deque
|
| 7 |
+
import contextlib
|
| 8 |
+
import csv
|
| 9 |
+
from glob import iglob as std_iglob
|
| 10 |
+
import io
|
| 11 |
+
import json
|
| 12 |
+
import logging
|
| 13 |
+
import os
|
| 14 |
+
import py_compile
|
| 15 |
+
import re
|
| 16 |
+
import socket
|
| 17 |
+
try:
|
| 18 |
+
import ssl
|
| 19 |
+
except ImportError: # pragma: no cover
|
| 20 |
+
ssl = None
|
| 21 |
+
import subprocess
|
| 22 |
+
import sys
|
| 23 |
+
import tarfile
|
| 24 |
+
import tempfile
|
| 25 |
+
import textwrap
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
import threading
|
| 29 |
+
except ImportError: # pragma: no cover
|
| 30 |
+
import dummy_threading as threading
|
| 31 |
+
import time
|
| 32 |
+
|
| 33 |
+
from . import DistlibException
|
| 34 |
+
from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib,
|
| 35 |
+
xmlrpclib, HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile,
|
| 36 |
+
fsdecode, unquote, urlparse)
|
| 37 |
+
|
| 38 |
+
logger = logging.getLogger(__name__)
|
| 39 |
+
|
| 40 |
+
#
|
| 41 |
+
# Requirement parsing code as per PEP 508
|
| 42 |
+
#
|
| 43 |
+
|
| 44 |
+
IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
|
| 45 |
+
VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
|
| 46 |
+
COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
|
| 47 |
+
MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
|
| 48 |
+
OR = re.compile(r'^or\b\s*')
|
| 49 |
+
AND = re.compile(r'^and\b\s*')
|
| 50 |
+
NON_SPACE = re.compile(r'(\S+)\s*')
|
| 51 |
+
STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def parse_marker(marker_string):
|
| 55 |
+
"""
|
| 56 |
+
Parse a marker string and return a dictionary containing a marker expression.
|
| 57 |
+
|
| 58 |
+
The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
|
| 59 |
+
the expression grammar, or strings. A string contained in quotes is to be
|
| 60 |
+
interpreted as a literal string, and a string not contained in quotes is a
|
| 61 |
+
variable (such as os_name).
|
| 62 |
+
"""
|
| 63 |
+
|
| 64 |
+
def marker_var(remaining):
|
| 65 |
+
# either identifier, or literal string
|
| 66 |
+
m = IDENTIFIER.match(remaining)
|
| 67 |
+
if m:
|
| 68 |
+
result = m.groups()[0]
|
| 69 |
+
remaining = remaining[m.end():]
|
| 70 |
+
elif not remaining:
|
| 71 |
+
raise SyntaxError('unexpected end of input')
|
| 72 |
+
else:
|
| 73 |
+
q = remaining[0]
|
| 74 |
+
if q not in '\'"':
|
| 75 |
+
raise SyntaxError('invalid expression: %s' % remaining)
|
| 76 |
+
oq = '\'"'.replace(q, '')
|
| 77 |
+
remaining = remaining[1:]
|
| 78 |
+
parts = [q]
|
| 79 |
+
while remaining:
|
| 80 |
+
# either a string chunk, or oq, or q to terminate
|
| 81 |
+
if remaining[0] == q:
|
| 82 |
+
break
|
| 83 |
+
elif remaining[0] == oq:
|
| 84 |
+
parts.append(oq)
|
| 85 |
+
remaining = remaining[1:]
|
| 86 |
+
else:
|
| 87 |
+
m = STRING_CHUNK.match(remaining)
|
| 88 |
+
if not m:
|
| 89 |
+
raise SyntaxError('error in string literal: %s' % remaining)
|
| 90 |
+
parts.append(m.groups()[0])
|
| 91 |
+
remaining = remaining[m.end():]
|
| 92 |
+
else:
|
| 93 |
+
s = ''.join(parts)
|
| 94 |
+
raise SyntaxError('unterminated string: %s' % s)
|
| 95 |
+
parts.append(q)
|
| 96 |
+
result = ''.join(parts)
|
| 97 |
+
remaining = remaining[1:].lstrip() # skip past closing quote
|
| 98 |
+
return result, remaining
|
| 99 |
+
|
| 100 |
+
def marker_expr(remaining):
|
| 101 |
+
if remaining and remaining[0] == '(':
|
| 102 |
+
result, remaining = marker(remaining[1:].lstrip())
|
| 103 |
+
if remaining[0] != ')':
|
| 104 |
+
raise SyntaxError('unterminated parenthesis: %s' % remaining)
|
| 105 |
+
remaining = remaining[1:].lstrip()
|
| 106 |
+
else:
|
| 107 |
+
lhs, remaining = marker_var(remaining)
|
| 108 |
+
while remaining:
|
| 109 |
+
m = MARKER_OP.match(remaining)
|
| 110 |
+
if not m:
|
| 111 |
+
break
|
| 112 |
+
op = m.groups()[0]
|
| 113 |
+
remaining = remaining[m.end():]
|
| 114 |
+
rhs, remaining = marker_var(remaining)
|
| 115 |
+
lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
|
| 116 |
+
result = lhs
|
| 117 |
+
return result, remaining
|
| 118 |
+
|
| 119 |
+
def marker_and(remaining):
|
| 120 |
+
lhs, remaining = marker_expr(remaining)
|
| 121 |
+
while remaining:
|
| 122 |
+
m = AND.match(remaining)
|
| 123 |
+
if not m:
|
| 124 |
+
break
|
| 125 |
+
remaining = remaining[m.end():]
|
| 126 |
+
rhs, remaining = marker_expr(remaining)
|
| 127 |
+
lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
|
| 128 |
+
return lhs, remaining
|
| 129 |
+
|
| 130 |
+
def marker(remaining):
|
| 131 |
+
lhs, remaining = marker_and(remaining)
|
| 132 |
+
while remaining:
|
| 133 |
+
m = OR.match(remaining)
|
| 134 |
+
if not m:
|
| 135 |
+
break
|
| 136 |
+
remaining = remaining[m.end():]
|
| 137 |
+
rhs, remaining = marker_and(remaining)
|
| 138 |
+
lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
|
| 139 |
+
return lhs, remaining
|
| 140 |
+
|
| 141 |
+
return marker(marker_string)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def parse_requirement(req):
|
| 145 |
+
"""
|
| 146 |
+
Parse a requirement passed in as a string. Return a Container
|
| 147 |
+
whose attributes contain the various parts of the requirement.
|
| 148 |
+
"""
|
| 149 |
+
remaining = req.strip()
|
| 150 |
+
if not remaining or remaining.startswith('#'):
|
| 151 |
+
return None
|
| 152 |
+
m = IDENTIFIER.match(remaining)
|
| 153 |
+
if not m:
|
| 154 |
+
raise SyntaxError('name expected: %s' % remaining)
|
| 155 |
+
distname = m.groups()[0]
|
| 156 |
+
remaining = remaining[m.end():]
|
| 157 |
+
extras = mark_expr = versions = uri = None
|
| 158 |
+
if remaining and remaining[0] == '[':
|
| 159 |
+
i = remaining.find(']', 1)
|
| 160 |
+
if i < 0:
|
| 161 |
+
raise SyntaxError('unterminated extra: %s' % remaining)
|
| 162 |
+
s = remaining[1:i]
|
| 163 |
+
remaining = remaining[i + 1:].lstrip()
|
| 164 |
+
extras = []
|
| 165 |
+
while s:
|
| 166 |
+
m = IDENTIFIER.match(s)
|
| 167 |
+
if not m:
|
| 168 |
+
raise SyntaxError('malformed extra: %s' % s)
|
| 169 |
+
extras.append(m.groups()[0])
|
| 170 |
+
s = s[m.end():]
|
| 171 |
+
if not s:
|
| 172 |
+
break
|
| 173 |
+
if s[0] != ',':
|
| 174 |
+
raise SyntaxError('comma expected in extras: %s' % s)
|
| 175 |
+
s = s[1:].lstrip()
|
| 176 |
+
if not extras:
|
| 177 |
+
extras = None
|
| 178 |
+
if remaining:
|
| 179 |
+
if remaining[0] == '@':
|
| 180 |
+
# it's a URI
|
| 181 |
+
remaining = remaining[1:].lstrip()
|
| 182 |
+
m = NON_SPACE.match(remaining)
|
| 183 |
+
if not m:
|
| 184 |
+
raise SyntaxError('invalid URI: %s' % remaining)
|
| 185 |
+
uri = m.groups()[0]
|
| 186 |
+
t = urlparse(uri)
|
| 187 |
+
# there are issues with Python and URL parsing, so this test
|
| 188 |
+
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
| 189 |
+
# always parse invalid URLs correctly - it should raise
|
| 190 |
+
# exceptions for malformed URLs
|
| 191 |
+
if not (t.scheme and t.netloc):
|
| 192 |
+
raise SyntaxError('Invalid URL: %s' % uri)
|
| 193 |
+
remaining = remaining[m.end():].lstrip()
|
| 194 |
+
else:
|
| 195 |
+
|
| 196 |
+
def get_versions(ver_remaining):
|
| 197 |
+
"""
|
| 198 |
+
Return a list of operator, version tuples if any are
|
| 199 |
+
specified, else None.
|
| 200 |
+
"""
|
| 201 |
+
m = COMPARE_OP.match(ver_remaining)
|
| 202 |
+
versions = None
|
| 203 |
+
if m:
|
| 204 |
+
versions = []
|
| 205 |
+
while True:
|
| 206 |
+
op = m.groups()[0]
|
| 207 |
+
ver_remaining = ver_remaining[m.end():]
|
| 208 |
+
m = VERSION_IDENTIFIER.match(ver_remaining)
|
| 209 |
+
if not m:
|
| 210 |
+
raise SyntaxError('invalid version: %s' % ver_remaining)
|
| 211 |
+
v = m.groups()[0]
|
| 212 |
+
versions.append((op, v))
|
| 213 |
+
ver_remaining = ver_remaining[m.end():]
|
| 214 |
+
if not ver_remaining or ver_remaining[0] != ',':
|
| 215 |
+
break
|
| 216 |
+
ver_remaining = ver_remaining[1:].lstrip()
|
| 217 |
+
# Some packages have a trailing comma which would break things
|
| 218 |
+
# See issue #148
|
| 219 |
+
if not ver_remaining:
|
| 220 |
+
break
|
| 221 |
+
m = COMPARE_OP.match(ver_remaining)
|
| 222 |
+
if not m:
|
| 223 |
+
raise SyntaxError('invalid constraint: %s' % ver_remaining)
|
| 224 |
+
if not versions:
|
| 225 |
+
versions = None
|
| 226 |
+
return versions, ver_remaining
|
| 227 |
+
|
| 228 |
+
if remaining[0] != '(':
|
| 229 |
+
versions, remaining = get_versions(remaining)
|
| 230 |
+
else:
|
| 231 |
+
i = remaining.find(')', 1)
|
| 232 |
+
if i < 0:
|
| 233 |
+
raise SyntaxError('unterminated parenthesis: %s' % remaining)
|
| 234 |
+
s = remaining[1:i]
|
| 235 |
+
remaining = remaining[i + 1:].lstrip()
|
| 236 |
+
# As a special diversion from PEP 508, allow a version number
|
| 237 |
+
# a.b.c in parentheses as a synonym for ~= a.b.c (because this
|
| 238 |
+
# is allowed in earlier PEPs)
|
| 239 |
+
if COMPARE_OP.match(s):
|
| 240 |
+
versions, _ = get_versions(s)
|
| 241 |
+
else:
|
| 242 |
+
m = VERSION_IDENTIFIER.match(s)
|
| 243 |
+
if not m:
|
| 244 |
+
raise SyntaxError('invalid constraint: %s' % s)
|
| 245 |
+
v = m.groups()[0]
|
| 246 |
+
s = s[m.end():].lstrip()
|
| 247 |
+
if s:
|
| 248 |
+
raise SyntaxError('invalid constraint: %s' % s)
|
| 249 |
+
versions = [('~=', v)]
|
| 250 |
+
|
| 251 |
+
if remaining:
|
| 252 |
+
if remaining[0] != ';':
|
| 253 |
+
raise SyntaxError('invalid requirement: %s' % remaining)
|
| 254 |
+
remaining = remaining[1:].lstrip()
|
| 255 |
+
|
| 256 |
+
mark_expr, remaining = parse_marker(remaining)
|
| 257 |
+
|
| 258 |
+
if remaining and remaining[0] != '#':
|
| 259 |
+
raise SyntaxError('unexpected trailing data: %s' % remaining)
|
| 260 |
+
|
| 261 |
+
if not versions:
|
| 262 |
+
rs = distname
|
| 263 |
+
else:
|
| 264 |
+
rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
|
| 265 |
+
return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs)
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
def get_resources_dests(resources_root, rules):
|
| 269 |
+
"""Find destinations for resources files"""
|
| 270 |
+
|
| 271 |
+
def get_rel_path(root, path):
|
| 272 |
+
# normalizes and returns a lstripped-/-separated path
|
| 273 |
+
root = root.replace(os.path.sep, '/')
|
| 274 |
+
path = path.replace(os.path.sep, '/')
|
| 275 |
+
assert path.startswith(root)
|
| 276 |
+
return path[len(root):].lstrip('/')
|
| 277 |
+
|
| 278 |
+
destinations = {}
|
| 279 |
+
for base, suffix, dest in rules:
|
| 280 |
+
prefix = os.path.join(resources_root, base)
|
| 281 |
+
for abs_base in iglob(prefix):
|
| 282 |
+
abs_glob = os.path.join(abs_base, suffix)
|
| 283 |
+
for abs_path in iglob(abs_glob):
|
| 284 |
+
resource_file = get_rel_path(resources_root, abs_path)
|
| 285 |
+
if dest is None: # remove the entry if it was here
|
| 286 |
+
destinations.pop(resource_file, None)
|
| 287 |
+
else:
|
| 288 |
+
rel_path = get_rel_path(abs_base, abs_path)
|
| 289 |
+
rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
|
| 290 |
+
destinations[resource_file] = rel_dest + '/' + rel_path
|
| 291 |
+
return destinations
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
def in_venv():
|
| 295 |
+
if hasattr(sys, 'real_prefix'):
|
| 296 |
+
# virtualenv venvs
|
| 297 |
+
result = True
|
| 298 |
+
else:
|
| 299 |
+
# PEP 405 venvs
|
| 300 |
+
result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
|
| 301 |
+
return result
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def get_executable():
|
| 305 |
+
# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
|
| 306 |
+
# changes to the stub launcher mean that sys.executable always points
|
| 307 |
+
# to the stub on OS X
|
| 308 |
+
# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
|
| 309 |
+
# in os.environ):
|
| 310 |
+
# result = os.environ['__PYVENV_LAUNCHER__']
|
| 311 |
+
# else:
|
| 312 |
+
# result = sys.executable
|
| 313 |
+
# return result
|
| 314 |
+
# Avoid normcasing: see issue #143
|
| 315 |
+
# result = os.path.normcase(sys.executable)
|
| 316 |
+
result = sys.executable
|
| 317 |
+
if not isinstance(result, text_type):
|
| 318 |
+
result = fsdecode(result)
|
| 319 |
+
return result
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
def proceed(prompt, allowed_chars, error_prompt=None, default=None):
|
| 323 |
+
p = prompt
|
| 324 |
+
while True:
|
| 325 |
+
s = raw_input(p)
|
| 326 |
+
p = prompt
|
| 327 |
+
if not s and default:
|
| 328 |
+
s = default
|
| 329 |
+
if s:
|
| 330 |
+
c = s[0].lower()
|
| 331 |
+
if c in allowed_chars:
|
| 332 |
+
break
|
| 333 |
+
if error_prompt:
|
| 334 |
+
p = '%c: %s\n%s' % (c, error_prompt, prompt)
|
| 335 |
+
return c
|
| 336 |
+
|
| 337 |
+
|
| 338 |
+
def extract_by_key(d, keys):
|
| 339 |
+
if isinstance(keys, string_types):
|
| 340 |
+
keys = keys.split()
|
| 341 |
+
result = {}
|
| 342 |
+
for key in keys:
|
| 343 |
+
if key in d:
|
| 344 |
+
result[key] = d[key]
|
| 345 |
+
return result
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
def read_exports(stream):
|
| 349 |
+
if sys.version_info[0] >= 3:
|
| 350 |
+
# needs to be a text stream
|
| 351 |
+
stream = codecs.getreader('utf-8')(stream)
|
| 352 |
+
# Try to load as JSON, falling back on legacy format
|
| 353 |
+
data = stream.read()
|
| 354 |
+
stream = StringIO(data)
|
| 355 |
+
try:
|
| 356 |
+
jdata = json.load(stream)
|
| 357 |
+
result = jdata['extensions']['python.exports']['exports']
|
| 358 |
+
for group, entries in result.items():
|
| 359 |
+
for k, v in entries.items():
|
| 360 |
+
s = '%s = %s' % (k, v)
|
| 361 |
+
entry = get_export_entry(s)
|
| 362 |
+
assert entry is not None
|
| 363 |
+
entries[k] = entry
|
| 364 |
+
return result
|
| 365 |
+
except Exception:
|
| 366 |
+
stream.seek(0, 0)
|
| 367 |
+
|
| 368 |
+
def read_stream(cp, stream):
|
| 369 |
+
if hasattr(cp, 'read_file'):
|
| 370 |
+
cp.read_file(stream)
|
| 371 |
+
else:
|
| 372 |
+
cp.readfp(stream)
|
| 373 |
+
|
| 374 |
+
cp = configparser.ConfigParser()
|
| 375 |
+
try:
|
| 376 |
+
read_stream(cp, stream)
|
| 377 |
+
except configparser.MissingSectionHeaderError:
|
| 378 |
+
stream.close()
|
| 379 |
+
data = textwrap.dedent(data)
|
| 380 |
+
stream = StringIO(data)
|
| 381 |
+
read_stream(cp, stream)
|
| 382 |
+
|
| 383 |
+
result = {}
|
| 384 |
+
for key in cp.sections():
|
| 385 |
+
result[key] = entries = {}
|
| 386 |
+
for name, value in cp.items(key):
|
| 387 |
+
s = '%s = %s' % (name, value)
|
| 388 |
+
entry = get_export_entry(s)
|
| 389 |
+
assert entry is not None
|
| 390 |
+
# entry.dist = self
|
| 391 |
+
entries[name] = entry
|
| 392 |
+
return result
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
def write_exports(exports, stream):
|
| 396 |
+
if sys.version_info[0] >= 3:
|
| 397 |
+
# needs to be a text stream
|
| 398 |
+
stream = codecs.getwriter('utf-8')(stream)
|
| 399 |
+
cp = configparser.ConfigParser()
|
| 400 |
+
for k, v in exports.items():
|
| 401 |
+
# TODO check k, v for valid values
|
| 402 |
+
cp.add_section(k)
|
| 403 |
+
for entry in v.values():
|
| 404 |
+
if entry.suffix is None:
|
| 405 |
+
s = entry.prefix
|
| 406 |
+
else:
|
| 407 |
+
s = '%s:%s' % (entry.prefix, entry.suffix)
|
| 408 |
+
if entry.flags:
|
| 409 |
+
s = '%s [%s]' % (s, ', '.join(entry.flags))
|
| 410 |
+
cp.set(k, entry.name, s)
|
| 411 |
+
cp.write(stream)
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
@contextlib.contextmanager
|
| 415 |
+
def tempdir():
|
| 416 |
+
td = tempfile.mkdtemp()
|
| 417 |
+
try:
|
| 418 |
+
yield td
|
| 419 |
+
finally:
|
| 420 |
+
shutil.rmtree(td)
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
@contextlib.contextmanager
|
| 424 |
+
def chdir(d):
|
| 425 |
+
cwd = os.getcwd()
|
| 426 |
+
try:
|
| 427 |
+
os.chdir(d)
|
| 428 |
+
yield
|
| 429 |
+
finally:
|
| 430 |
+
os.chdir(cwd)
|
| 431 |
+
|
| 432 |
+
|
| 433 |
+
@contextlib.contextmanager
|
| 434 |
+
def socket_timeout(seconds=15):
|
| 435 |
+
cto = socket.getdefaulttimeout()
|
| 436 |
+
try:
|
| 437 |
+
socket.setdefaulttimeout(seconds)
|
| 438 |
+
yield
|
| 439 |
+
finally:
|
| 440 |
+
socket.setdefaulttimeout(cto)
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
class cached_property(object):
|
| 444 |
+
|
| 445 |
+
def __init__(self, func):
|
| 446 |
+
self.func = func
|
| 447 |
+
# for attr in ('__name__', '__module__', '__doc__'):
|
| 448 |
+
# setattr(self, attr, getattr(func, attr, None))
|
| 449 |
+
|
| 450 |
+
def __get__(self, obj, cls=None):
|
| 451 |
+
if obj is None:
|
| 452 |
+
return self
|
| 453 |
+
value = self.func(obj)
|
| 454 |
+
object.__setattr__(obj, self.func.__name__, value)
|
| 455 |
+
# obj.__dict__[self.func.__name__] = value = self.func(obj)
|
| 456 |
+
return value
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
def convert_path(pathname):
|
| 460 |
+
"""Return 'pathname' as a name that will work on the native filesystem.
|
| 461 |
+
|
| 462 |
+
The path is split on '/' and put back together again using the current
|
| 463 |
+
directory separator. Needed because filenames in the setup script are
|
| 464 |
+
always supplied in Unix style, and have to be converted to the local
|
| 465 |
+
convention before we can actually use them in the filesystem. Raises
|
| 466 |
+
ValueError on non-Unix-ish systems if 'pathname' either starts or
|
| 467 |
+
ends with a slash.
|
| 468 |
+
"""
|
| 469 |
+
if os.sep == '/':
|
| 470 |
+
return pathname
|
| 471 |
+
if not pathname:
|
| 472 |
+
return pathname
|
| 473 |
+
if pathname[0] == '/':
|
| 474 |
+
raise ValueError("path '%s' cannot be absolute" % pathname)
|
| 475 |
+
if pathname[-1] == '/':
|
| 476 |
+
raise ValueError("path '%s' cannot end with '/'" % pathname)
|
| 477 |
+
|
| 478 |
+
paths = pathname.split('/')
|
| 479 |
+
while os.curdir in paths:
|
| 480 |
+
paths.remove(os.curdir)
|
| 481 |
+
if not paths:
|
| 482 |
+
return os.curdir
|
| 483 |
+
return os.path.join(*paths)
|
| 484 |
+
|
| 485 |
+
|
| 486 |
+
class FileOperator(object):
|
| 487 |
+
|
| 488 |
+
def __init__(self, dry_run=False):
|
| 489 |
+
self.dry_run = dry_run
|
| 490 |
+
self.ensured = set()
|
| 491 |
+
self._init_record()
|
| 492 |
+
|
| 493 |
+
def _init_record(self):
|
| 494 |
+
self.record = False
|
| 495 |
+
self.files_written = set()
|
| 496 |
+
self.dirs_created = set()
|
| 497 |
+
|
| 498 |
+
def record_as_written(self, path):
|
| 499 |
+
if self.record:
|
| 500 |
+
self.files_written.add(path)
|
| 501 |
+
|
| 502 |
+
def newer(self, source, target):
|
| 503 |
+
"""Tell if the target is newer than the source.
|
| 504 |
+
|
| 505 |
+
Returns true if 'source' exists and is more recently modified than
|
| 506 |
+
'target', or if 'source' exists and 'target' doesn't.
|
| 507 |
+
|
| 508 |
+
Returns false if both exist and 'target' is the same age or younger
|
| 509 |
+
than 'source'. Raise PackagingFileError if 'source' does not exist.
|
| 510 |
+
|
| 511 |
+
Note that this test is not very accurate: files created in the same
|
| 512 |
+
second will have the same "age".
|
| 513 |
+
"""
|
| 514 |
+
if not os.path.exists(source):
|
| 515 |
+
raise DistlibException("file '%r' does not exist" % os.path.abspath(source))
|
| 516 |
+
if not os.path.exists(target):
|
| 517 |
+
return True
|
| 518 |
+
|
| 519 |
+
return os.stat(source).st_mtime > os.stat(target).st_mtime
|
| 520 |
+
|
| 521 |
+
def copy_file(self, infile, outfile, check=True):
|
| 522 |
+
"""Copy a file respecting dry-run and force flags.
|
| 523 |
+
"""
|
| 524 |
+
self.ensure_dir(os.path.dirname(outfile))
|
| 525 |
+
logger.info('Copying %s to %s', infile, outfile)
|
| 526 |
+
if not self.dry_run:
|
| 527 |
+
msg = None
|
| 528 |
+
if check:
|
| 529 |
+
if os.path.islink(outfile):
|
| 530 |
+
msg = '%s is a symlink' % outfile
|
| 531 |
+
elif os.path.exists(outfile) and not os.path.isfile(outfile):
|
| 532 |
+
msg = '%s is a non-regular file' % outfile
|
| 533 |
+
if msg:
|
| 534 |
+
raise ValueError(msg + ' which would be overwritten')
|
| 535 |
+
shutil.copyfile(infile, outfile)
|
| 536 |
+
self.record_as_written(outfile)
|
| 537 |
+
|
| 538 |
+
def copy_stream(self, instream, outfile, encoding=None):
|
| 539 |
+
assert not os.path.isdir(outfile)
|
| 540 |
+
self.ensure_dir(os.path.dirname(outfile))
|
| 541 |
+
logger.info('Copying stream %s to %s', instream, outfile)
|
| 542 |
+
if not self.dry_run:
|
| 543 |
+
if encoding is None:
|
| 544 |
+
outstream = open(outfile, 'wb')
|
| 545 |
+
else:
|
| 546 |
+
outstream = codecs.open(outfile, 'w', encoding=encoding)
|
| 547 |
+
try:
|
| 548 |
+
shutil.copyfileobj(instream, outstream)
|
| 549 |
+
finally:
|
| 550 |
+
outstream.close()
|
| 551 |
+
self.record_as_written(outfile)
|
| 552 |
+
|
| 553 |
+
def write_binary_file(self, path, data):
|
| 554 |
+
self.ensure_dir(os.path.dirname(path))
|
| 555 |
+
if not self.dry_run:
|
| 556 |
+
if os.path.exists(path):
|
| 557 |
+
os.remove(path)
|
| 558 |
+
with open(path, 'wb') as f:
|
| 559 |
+
f.write(data)
|
| 560 |
+
self.record_as_written(path)
|
| 561 |
+
|
| 562 |
+
def write_text_file(self, path, data, encoding):
|
| 563 |
+
self.write_binary_file(path, data.encode(encoding))
|
| 564 |
+
|
| 565 |
+
def set_mode(self, bits, mask, files):
|
| 566 |
+
if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
|
| 567 |
+
# Set the executable bits (owner, group, and world) on
|
| 568 |
+
# all the files specified.
|
| 569 |
+
for f in files:
|
| 570 |
+
if self.dry_run:
|
| 571 |
+
logger.info("changing mode of %s", f)
|
| 572 |
+
else:
|
| 573 |
+
mode = (os.stat(f).st_mode | bits) & mask
|
| 574 |
+
logger.info("changing mode of %s to %o", f, mode)
|
| 575 |
+
os.chmod(f, mode)
|
| 576 |
+
|
| 577 |
+
set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
|
| 578 |
+
|
| 579 |
+
def ensure_dir(self, path):
|
| 580 |
+
path = os.path.abspath(path)
|
| 581 |
+
if path not in self.ensured and not os.path.exists(path):
|
| 582 |
+
self.ensured.add(path)
|
| 583 |
+
d, f = os.path.split(path)
|
| 584 |
+
self.ensure_dir(d)
|
| 585 |
+
logger.info('Creating %s' % path)
|
| 586 |
+
if not self.dry_run:
|
| 587 |
+
os.mkdir(path)
|
| 588 |
+
if self.record:
|
| 589 |
+
self.dirs_created.add(path)
|
| 590 |
+
|
| 591 |
+
def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
|
| 592 |
+
dpath = cache_from_source(path, not optimize)
|
| 593 |
+
logger.info('Byte-compiling %s to %s', path, dpath)
|
| 594 |
+
if not self.dry_run:
|
| 595 |
+
if force or self.newer(path, dpath):
|
| 596 |
+
if not prefix:
|
| 597 |
+
diagpath = None
|
| 598 |
+
else:
|
| 599 |
+
assert path.startswith(prefix)
|
| 600 |
+
diagpath = path[len(prefix):]
|
| 601 |
+
compile_kwargs = {}
|
| 602 |
+
if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
|
| 603 |
+
if not isinstance(hashed_invalidation, py_compile.PycInvalidationMode):
|
| 604 |
+
hashed_invalidation = py_compile.PycInvalidationMode.CHECKED_HASH
|
| 605 |
+
compile_kwargs['invalidation_mode'] = hashed_invalidation
|
| 606 |
+
py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
|
| 607 |
+
self.record_as_written(dpath)
|
| 608 |
+
return dpath
|
| 609 |
+
|
| 610 |
+
def ensure_removed(self, path):
|
| 611 |
+
if os.path.exists(path):
|
| 612 |
+
if os.path.isdir(path) and not os.path.islink(path):
|
| 613 |
+
logger.debug('Removing directory tree at %s', path)
|
| 614 |
+
if not self.dry_run:
|
| 615 |
+
shutil.rmtree(path)
|
| 616 |
+
if self.record:
|
| 617 |
+
if path in self.dirs_created:
|
| 618 |
+
self.dirs_created.remove(path)
|
| 619 |
+
else:
|
| 620 |
+
if os.path.islink(path):
|
| 621 |
+
s = 'link'
|
| 622 |
+
else:
|
| 623 |
+
s = 'file'
|
| 624 |
+
logger.debug('Removing %s %s', s, path)
|
| 625 |
+
if not self.dry_run:
|
| 626 |
+
os.remove(path)
|
| 627 |
+
if self.record:
|
| 628 |
+
if path in self.files_written:
|
| 629 |
+
self.files_written.remove(path)
|
| 630 |
+
|
| 631 |
+
def is_writable(self, path):
|
| 632 |
+
result = False
|
| 633 |
+
while not result:
|
| 634 |
+
if os.path.exists(path):
|
| 635 |
+
result = os.access(path, os.W_OK)
|
| 636 |
+
break
|
| 637 |
+
parent = os.path.dirname(path)
|
| 638 |
+
if parent == path:
|
| 639 |
+
break
|
| 640 |
+
path = parent
|
| 641 |
+
return result
|
| 642 |
+
|
| 643 |
+
def commit(self):
|
| 644 |
+
"""
|
| 645 |
+
Commit recorded changes, turn off recording, return
|
| 646 |
+
changes.
|
| 647 |
+
"""
|
| 648 |
+
assert self.record
|
| 649 |
+
result = self.files_written, self.dirs_created
|
| 650 |
+
self._init_record()
|
| 651 |
+
return result
|
| 652 |
+
|
| 653 |
+
def rollback(self):
|
| 654 |
+
if not self.dry_run:
|
| 655 |
+
for f in list(self.files_written):
|
| 656 |
+
if os.path.exists(f):
|
| 657 |
+
os.remove(f)
|
| 658 |
+
# dirs should all be empty now, except perhaps for
|
| 659 |
+
# __pycache__ subdirs
|
| 660 |
+
# reverse so that subdirs appear before their parents
|
| 661 |
+
dirs = sorted(self.dirs_created, reverse=True)
|
| 662 |
+
for d in dirs:
|
| 663 |
+
flist = os.listdir(d)
|
| 664 |
+
if flist:
|
| 665 |
+
assert flist == ['__pycache__']
|
| 666 |
+
sd = os.path.join(d, flist[0])
|
| 667 |
+
os.rmdir(sd)
|
| 668 |
+
os.rmdir(d) # should fail if non-empty
|
| 669 |
+
self._init_record()
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
def resolve(module_name, dotted_path):
|
| 673 |
+
if module_name in sys.modules:
|
| 674 |
+
mod = sys.modules[module_name]
|
| 675 |
+
else:
|
| 676 |
+
mod = __import__(module_name)
|
| 677 |
+
if dotted_path is None:
|
| 678 |
+
result = mod
|
| 679 |
+
else:
|
| 680 |
+
parts = dotted_path.split('.')
|
| 681 |
+
result = getattr(mod, parts.pop(0))
|
| 682 |
+
for p in parts:
|
| 683 |
+
result = getattr(result, p)
|
| 684 |
+
return result
|
| 685 |
+
|
| 686 |
+
|
| 687 |
+
class ExportEntry(object):
|
| 688 |
+
|
| 689 |
+
def __init__(self, name, prefix, suffix, flags):
|
| 690 |
+
self.name = name
|
| 691 |
+
self.prefix = prefix
|
| 692 |
+
self.suffix = suffix
|
| 693 |
+
self.flags = flags
|
| 694 |
+
|
| 695 |
+
@cached_property
|
| 696 |
+
def value(self):
|
| 697 |
+
return resolve(self.prefix, self.suffix)
|
| 698 |
+
|
| 699 |
+
def __repr__(self): # pragma: no cover
|
| 700 |
+
return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, self.suffix, self.flags)
|
| 701 |
+
|
| 702 |
+
def __eq__(self, other):
|
| 703 |
+
if not isinstance(other, ExportEntry):
|
| 704 |
+
result = False
|
| 705 |
+
else:
|
| 706 |
+
result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and
|
| 707 |
+
self.flags == other.flags)
|
| 708 |
+
return result
|
| 709 |
+
|
| 710 |
+
__hash__ = object.__hash__
|
| 711 |
+
|
| 712 |
+
|
| 713 |
+
ENTRY_RE = re.compile(
|
| 714 |
+
r'''(?P<name>([^\[]\S*))
|
| 715 |
+
\s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
|
| 716 |
+
\s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
|
| 717 |
+
''', re.VERBOSE)
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
def get_export_entry(specification):
|
| 721 |
+
m = ENTRY_RE.search(specification)
|
| 722 |
+
if not m:
|
| 723 |
+
result = None
|
| 724 |
+
if '[' in specification or ']' in specification:
|
| 725 |
+
raise DistlibException("Invalid specification "
|
| 726 |
+
"'%s'" % specification)
|
| 727 |
+
else:
|
| 728 |
+
d = m.groupdict()
|
| 729 |
+
name = d['name']
|
| 730 |
+
path = d['callable']
|
| 731 |
+
colons = path.count(':')
|
| 732 |
+
if colons == 0:
|
| 733 |
+
prefix, suffix = path, None
|
| 734 |
+
else:
|
| 735 |
+
if colons != 1:
|
| 736 |
+
raise DistlibException("Invalid specification "
|
| 737 |
+
"'%s'" % specification)
|
| 738 |
+
prefix, suffix = path.split(':')
|
| 739 |
+
flags = d['flags']
|
| 740 |
+
if flags is None:
|
| 741 |
+
if '[' in specification or ']' in specification:
|
| 742 |
+
raise DistlibException("Invalid specification "
|
| 743 |
+
"'%s'" % specification)
|
| 744 |
+
flags = []
|
| 745 |
+
else:
|
| 746 |
+
flags = [f.strip() for f in flags.split(',')]
|
| 747 |
+
result = ExportEntry(name, prefix, suffix, flags)
|
| 748 |
+
return result
|
| 749 |
+
|
| 750 |
+
|
| 751 |
+
def get_cache_base(suffix=None):
|
| 752 |
+
"""
|
| 753 |
+
Return the default base location for distlib caches. If the directory does
|
| 754 |
+
not exist, it is created. Use the suffix provided for the base directory,
|
| 755 |
+
and default to '.distlib' if it isn't provided.
|
| 756 |
+
|
| 757 |
+
On Windows, if LOCALAPPDATA is defined in the environment, then it is
|
| 758 |
+
assumed to be a directory, and will be the parent directory of the result.
|
| 759 |
+
On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
|
| 760 |
+
directory - using os.expanduser('~') - will be the parent directory of
|
| 761 |
+
the result.
|
| 762 |
+
|
| 763 |
+
The result is just the directory '.distlib' in the parent directory as
|
| 764 |
+
determined above, or with the name specified with ``suffix``.
|
| 765 |
+
"""
|
| 766 |
+
if suffix is None:
|
| 767 |
+
suffix = '.distlib'
|
| 768 |
+
if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
|
| 769 |
+
result = os.path.expandvars('$localappdata')
|
| 770 |
+
else:
|
| 771 |
+
# Assume posix, or old Windows
|
| 772 |
+
result = os.path.expanduser('~')
|
| 773 |
+
# we use 'isdir' instead of 'exists', because we want to
|
| 774 |
+
# fail if there's a file with that name
|
| 775 |
+
if os.path.isdir(result):
|
| 776 |
+
usable = os.access(result, os.W_OK)
|
| 777 |
+
if not usable:
|
| 778 |
+
logger.warning('Directory exists but is not writable: %s', result)
|
| 779 |
+
else:
|
| 780 |
+
try:
|
| 781 |
+
os.makedirs(result)
|
| 782 |
+
usable = True
|
| 783 |
+
except OSError:
|
| 784 |
+
logger.warning('Unable to create %s', result, exc_info=True)
|
| 785 |
+
usable = False
|
| 786 |
+
if not usable:
|
| 787 |
+
result = tempfile.mkdtemp()
|
| 788 |
+
logger.warning('Default location unusable, using %s', result)
|
| 789 |
+
return os.path.join(result, suffix)
|
| 790 |
+
|
| 791 |
+
|
| 792 |
+
def path_to_cache_dir(path, use_abspath=True):
|
| 793 |
+
"""
|
| 794 |
+
Convert an absolute path to a directory name for use in a cache.
|
| 795 |
+
|
| 796 |
+
The algorithm used is:
|
| 797 |
+
|
| 798 |
+
#. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
|
| 799 |
+
#. Any occurrence of ``os.sep`` is replaced with ``'--'``.
|
| 800 |
+
#. ``'.cache'`` is appended.
|
| 801 |
+
"""
|
| 802 |
+
d, p = os.path.splitdrive(os.path.abspath(path) if use_abspath else path)
|
| 803 |
+
if d:
|
| 804 |
+
d = d.replace(':', '---')
|
| 805 |
+
p = p.replace(os.sep, '--')
|
| 806 |
+
return d + p + '.cache'
|
| 807 |
+
|
| 808 |
+
|
| 809 |
+
def ensure_slash(s):
|
| 810 |
+
if not s.endswith('/'):
|
| 811 |
+
return s + '/'
|
| 812 |
+
return s
|
| 813 |
+
|
| 814 |
+
|
| 815 |
+
def parse_credentials(netloc):
|
| 816 |
+
username = password = None
|
| 817 |
+
if '@' in netloc:
|
| 818 |
+
prefix, netloc = netloc.rsplit('@', 1)
|
| 819 |
+
if ':' not in prefix:
|
| 820 |
+
username = prefix
|
| 821 |
+
else:
|
| 822 |
+
username, password = prefix.split(':', 1)
|
| 823 |
+
if username:
|
| 824 |
+
username = unquote(username)
|
| 825 |
+
if password:
|
| 826 |
+
password = unquote(password)
|
| 827 |
+
return username, password, netloc
|
| 828 |
+
|
| 829 |
+
|
| 830 |
+
def get_process_umask():
|
| 831 |
+
result = os.umask(0o22)
|
| 832 |
+
os.umask(result)
|
| 833 |
+
return result
|
| 834 |
+
|
| 835 |
+
|
| 836 |
+
def is_string_sequence(seq):
|
| 837 |
+
result = True
|
| 838 |
+
i = None
|
| 839 |
+
for i, s in enumerate(seq):
|
| 840 |
+
if not isinstance(s, string_types):
|
| 841 |
+
result = False
|
| 842 |
+
break
|
| 843 |
+
assert i is not None
|
| 844 |
+
return result
|
| 845 |
+
|
| 846 |
+
|
| 847 |
+
PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
|
| 848 |
+
'([a-z0-9_.+-]+)', re.I)
|
| 849 |
+
PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
|
| 850 |
+
|
| 851 |
+
|
| 852 |
+
def split_filename(filename, project_name=None):
|
| 853 |
+
"""
|
| 854 |
+
Extract name, version, python version from a filename (no extension)
|
| 855 |
+
|
| 856 |
+
Return name, version, pyver or None
|
| 857 |
+
"""
|
| 858 |
+
result = None
|
| 859 |
+
pyver = None
|
| 860 |
+
filename = unquote(filename).replace(' ', '-')
|
| 861 |
+
m = PYTHON_VERSION.search(filename)
|
| 862 |
+
if m:
|
| 863 |
+
pyver = m.group(1)
|
| 864 |
+
filename = filename[:m.start()]
|
| 865 |
+
if project_name and len(filename) > len(project_name) + 1:
|
| 866 |
+
m = re.match(re.escape(project_name) + r'\b', filename)
|
| 867 |
+
if m:
|
| 868 |
+
n = m.end()
|
| 869 |
+
result = filename[:n], filename[n + 1:], pyver
|
| 870 |
+
if result is None:
|
| 871 |
+
m = PROJECT_NAME_AND_VERSION.match(filename)
|
| 872 |
+
if m:
|
| 873 |
+
result = m.group(1), m.group(3), pyver
|
| 874 |
+
return result
|
| 875 |
+
|
| 876 |
+
|
| 877 |
+
# Allow spaces in name because of legacy dists like "Twisted Core"
|
| 878 |
+
NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
|
| 879 |
+
r'\(\s*(?P<ver>[^\s)]+)\)$')
|
| 880 |
+
|
| 881 |
+
|
| 882 |
+
def parse_name_and_version(p):
|
| 883 |
+
"""
|
| 884 |
+
A utility method used to get name and version from a string.
|
| 885 |
+
|
| 886 |
+
From e.g. a Provides-Dist value.
|
| 887 |
+
|
| 888 |
+
:param p: A value in a form 'foo (1.0)'
|
| 889 |
+
:return: The name and version as a tuple.
|
| 890 |
+
"""
|
| 891 |
+
m = NAME_VERSION_RE.match(p)
|
| 892 |
+
if not m:
|
| 893 |
+
raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
|
| 894 |
+
d = m.groupdict()
|
| 895 |
+
return d['name'].strip().lower(), d['ver']
|
| 896 |
+
|
| 897 |
+
|
| 898 |
+
def get_extras(requested, available):
|
| 899 |
+
result = set()
|
| 900 |
+
requested = set(requested or [])
|
| 901 |
+
available = set(available or [])
|
| 902 |
+
if '*' in requested:
|
| 903 |
+
requested.remove('*')
|
| 904 |
+
result |= available
|
| 905 |
+
for r in requested:
|
| 906 |
+
if r == '-':
|
| 907 |
+
result.add(r)
|
| 908 |
+
elif r.startswith('-'):
|
| 909 |
+
unwanted = r[1:]
|
| 910 |
+
if unwanted not in available:
|
| 911 |
+
logger.warning('undeclared extra: %s' % unwanted)
|
| 912 |
+
if unwanted in result:
|
| 913 |
+
result.remove(unwanted)
|
| 914 |
+
else:
|
| 915 |
+
if r not in available:
|
| 916 |
+
logger.warning('undeclared extra: %s' % r)
|
| 917 |
+
result.add(r)
|
| 918 |
+
return result
|
| 919 |
+
|
| 920 |
+
|
| 921 |
+
#
|
| 922 |
+
# Extended metadata functionality
|
| 923 |
+
#
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
def _get_external_data(url):
|
| 927 |
+
result = {}
|
| 928 |
+
try:
|
| 929 |
+
# urlopen might fail if it runs into redirections,
|
| 930 |
+
# because of Python issue #13696. Fixed in locators
|
| 931 |
+
# using a custom redirect handler.
|
| 932 |
+
resp = urlopen(url)
|
| 933 |
+
headers = resp.info()
|
| 934 |
+
ct = headers.get('Content-Type')
|
| 935 |
+
if not ct.startswith('application/json'):
|
| 936 |
+
logger.debug('Unexpected response for JSON request: %s', ct)
|
| 937 |
+
else:
|
| 938 |
+
reader = codecs.getreader('utf-8')(resp)
|
| 939 |
+
# data = reader.read().decode('utf-8')
|
| 940 |
+
# result = json.loads(data)
|
| 941 |
+
result = json.load(reader)
|
| 942 |
+
except Exception as e:
|
| 943 |
+
logger.exception('Failed to get external data for %s: %s', url, e)
|
| 944 |
+
return result
|
| 945 |
+
|
| 946 |
+
|
| 947 |
+
_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
|
| 948 |
+
|
| 949 |
+
|
| 950 |
+
def get_project_data(name):
|
| 951 |
+
url = '%s/%s/project.json' % (name[0].upper(), name)
|
| 952 |
+
url = urljoin(_external_data_base_url, url)
|
| 953 |
+
result = _get_external_data(url)
|
| 954 |
+
return result
|
| 955 |
+
|
| 956 |
+
|
| 957 |
+
def get_package_data(name, version):
|
| 958 |
+
url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
|
| 959 |
+
url = urljoin(_external_data_base_url, url)
|
| 960 |
+
return _get_external_data(url)
|
| 961 |
+
|
| 962 |
+
|
| 963 |
+
class Cache(object):
|
| 964 |
+
"""
|
| 965 |
+
A class implementing a cache for resources that need to live in the file system
|
| 966 |
+
e.g. shared libraries. This class was moved from resources to here because it
|
| 967 |
+
could be used by other modules, e.g. the wheel module.
|
| 968 |
+
"""
|
| 969 |
+
|
| 970 |
+
def __init__(self, base):
|
| 971 |
+
"""
|
| 972 |
+
Initialise an instance.
|
| 973 |
+
|
| 974 |
+
:param base: The base directory where the cache should be located.
|
| 975 |
+
"""
|
| 976 |
+
# we use 'isdir' instead of 'exists', because we want to
|
| 977 |
+
# fail if there's a file with that name
|
| 978 |
+
if not os.path.isdir(base): # pragma: no cover
|
| 979 |
+
os.makedirs(base)
|
| 980 |
+
if (os.stat(base).st_mode & 0o77) != 0:
|
| 981 |
+
logger.warning('Directory \'%s\' is not private', base)
|
| 982 |
+
self.base = os.path.abspath(os.path.normpath(base))
|
| 983 |
+
|
| 984 |
+
def prefix_to_dir(self, prefix, use_abspath=True):
|
| 985 |
+
"""
|
| 986 |
+
Converts a resource prefix to a directory name in the cache.
|
| 987 |
+
"""
|
| 988 |
+
return path_to_cache_dir(prefix, use_abspath=use_abspath)
|
| 989 |
+
|
| 990 |
+
def clear(self):
|
| 991 |
+
"""
|
| 992 |
+
Clear the cache.
|
| 993 |
+
"""
|
| 994 |
+
not_removed = []
|
| 995 |
+
for fn in os.listdir(self.base):
|
| 996 |
+
fn = os.path.join(self.base, fn)
|
| 997 |
+
try:
|
| 998 |
+
if os.path.islink(fn) or os.path.isfile(fn):
|
| 999 |
+
os.remove(fn)
|
| 1000 |
+
elif os.path.isdir(fn):
|
| 1001 |
+
shutil.rmtree(fn)
|
| 1002 |
+
except Exception:
|
| 1003 |
+
not_removed.append(fn)
|
| 1004 |
+
return not_removed
|
| 1005 |
+
|
| 1006 |
+
|
| 1007 |
+
class EventMixin(object):
|
| 1008 |
+
"""
|
| 1009 |
+
A very simple publish/subscribe system.
|
| 1010 |
+
"""
|
| 1011 |
+
|
| 1012 |
+
def __init__(self):
|
| 1013 |
+
self._subscribers = {}
|
| 1014 |
+
|
| 1015 |
+
def add(self, event, subscriber, append=True):
|
| 1016 |
+
"""
|
| 1017 |
+
Add a subscriber for an event.
|
| 1018 |
+
|
| 1019 |
+
:param event: The name of an event.
|
| 1020 |
+
:param subscriber: The subscriber to be added (and called when the
|
| 1021 |
+
event is published).
|
| 1022 |
+
:param append: Whether to append or prepend the subscriber to an
|
| 1023 |
+
existing subscriber list for the event.
|
| 1024 |
+
"""
|
| 1025 |
+
subs = self._subscribers
|
| 1026 |
+
if event not in subs:
|
| 1027 |
+
subs[event] = deque([subscriber])
|
| 1028 |
+
else:
|
| 1029 |
+
sq = subs[event]
|
| 1030 |
+
if append:
|
| 1031 |
+
sq.append(subscriber)
|
| 1032 |
+
else:
|
| 1033 |
+
sq.appendleft(subscriber)
|
| 1034 |
+
|
| 1035 |
+
def remove(self, event, subscriber):
|
| 1036 |
+
"""
|
| 1037 |
+
Remove a subscriber for an event.
|
| 1038 |
+
|
| 1039 |
+
:param event: The name of an event.
|
| 1040 |
+
:param subscriber: The subscriber to be removed.
|
| 1041 |
+
"""
|
| 1042 |
+
subs = self._subscribers
|
| 1043 |
+
if event not in subs:
|
| 1044 |
+
raise ValueError('No subscribers: %r' % event)
|
| 1045 |
+
subs[event].remove(subscriber)
|
| 1046 |
+
|
| 1047 |
+
def get_subscribers(self, event):
|
| 1048 |
+
"""
|
| 1049 |
+
Return an iterator for the subscribers for an event.
|
| 1050 |
+
:param event: The event to return subscribers for.
|
| 1051 |
+
"""
|
| 1052 |
+
return iter(self._subscribers.get(event, ()))
|
| 1053 |
+
|
| 1054 |
+
def publish(self, event, *args, **kwargs):
|
| 1055 |
+
"""
|
| 1056 |
+
Publish a event and return a list of values returned by its
|
| 1057 |
+
subscribers.
|
| 1058 |
+
|
| 1059 |
+
:param event: The event to publish.
|
| 1060 |
+
:param args: The positional arguments to pass to the event's
|
| 1061 |
+
subscribers.
|
| 1062 |
+
:param kwargs: The keyword arguments to pass to the event's
|
| 1063 |
+
subscribers.
|
| 1064 |
+
"""
|
| 1065 |
+
result = []
|
| 1066 |
+
for subscriber in self.get_subscribers(event):
|
| 1067 |
+
try:
|
| 1068 |
+
value = subscriber(event, *args, **kwargs)
|
| 1069 |
+
except Exception:
|
| 1070 |
+
logger.exception('Exception during event publication')
|
| 1071 |
+
value = None
|
| 1072 |
+
result.append(value)
|
| 1073 |
+
logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result)
|
| 1074 |
+
return result
|
| 1075 |
+
|
| 1076 |
+
|
| 1077 |
+
#
|
| 1078 |
+
# Simple sequencing
|
| 1079 |
+
#
|
| 1080 |
+
class Sequencer(object):
|
| 1081 |
+
|
| 1082 |
+
def __init__(self):
|
| 1083 |
+
self._preds = {}
|
| 1084 |
+
self._succs = {}
|
| 1085 |
+
self._nodes = set() # nodes with no preds/succs
|
| 1086 |
+
|
| 1087 |
+
def add_node(self, node):
|
| 1088 |
+
self._nodes.add(node)
|
| 1089 |
+
|
| 1090 |
+
def remove_node(self, node, edges=False):
|
| 1091 |
+
if node in self._nodes:
|
| 1092 |
+
self._nodes.remove(node)
|
| 1093 |
+
if edges:
|
| 1094 |
+
for p in set(self._preds.get(node, ())):
|
| 1095 |
+
self.remove(p, node)
|
| 1096 |
+
for s in set(self._succs.get(node, ())):
|
| 1097 |
+
self.remove(node, s)
|
| 1098 |
+
# Remove empties
|
| 1099 |
+
for k, v in list(self._preds.items()):
|
| 1100 |
+
if not v:
|
| 1101 |
+
del self._preds[k]
|
| 1102 |
+
for k, v in list(self._succs.items()):
|
| 1103 |
+
if not v:
|
| 1104 |
+
del self._succs[k]
|
| 1105 |
+
|
| 1106 |
+
def add(self, pred, succ):
|
| 1107 |
+
assert pred != succ
|
| 1108 |
+
self._preds.setdefault(succ, set()).add(pred)
|
| 1109 |
+
self._succs.setdefault(pred, set()).add(succ)
|
| 1110 |
+
|
| 1111 |
+
def remove(self, pred, succ):
|
| 1112 |
+
assert pred != succ
|
| 1113 |
+
try:
|
| 1114 |
+
preds = self._preds[succ]
|
| 1115 |
+
succs = self._succs[pred]
|
| 1116 |
+
except KeyError: # pragma: no cover
|
| 1117 |
+
raise ValueError('%r not a successor of anything' % succ)
|
| 1118 |
+
try:
|
| 1119 |
+
preds.remove(pred)
|
| 1120 |
+
succs.remove(succ)
|
| 1121 |
+
except KeyError: # pragma: no cover
|
| 1122 |
+
raise ValueError('%r not a successor of %r' % (succ, pred))
|
| 1123 |
+
|
| 1124 |
+
def is_step(self, step):
|
| 1125 |
+
return (step in self._preds or step in self._succs or step in self._nodes)
|
| 1126 |
+
|
| 1127 |
+
def get_steps(self, final):
|
| 1128 |
+
if not self.is_step(final):
|
| 1129 |
+
raise ValueError('Unknown: %r' % final)
|
| 1130 |
+
result = []
|
| 1131 |
+
todo = []
|
| 1132 |
+
seen = set()
|
| 1133 |
+
todo.append(final)
|
| 1134 |
+
while todo:
|
| 1135 |
+
step = todo.pop(0)
|
| 1136 |
+
if step in seen:
|
| 1137 |
+
# if a step was already seen,
|
| 1138 |
+
# move it to the end (so it will appear earlier
|
| 1139 |
+
# when reversed on return) ... but not for the
|
| 1140 |
+
# final step, as that would be confusing for
|
| 1141 |
+
# users
|
| 1142 |
+
if step != final:
|
| 1143 |
+
result.remove(step)
|
| 1144 |
+
result.append(step)
|
| 1145 |
+
else:
|
| 1146 |
+
seen.add(step)
|
| 1147 |
+
result.append(step)
|
| 1148 |
+
preds = self._preds.get(step, ())
|
| 1149 |
+
todo.extend(preds)
|
| 1150 |
+
return reversed(result)
|
| 1151 |
+
|
| 1152 |
+
@property
|
| 1153 |
+
def strong_connections(self):
|
| 1154 |
+
# http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
|
| 1155 |
+
index_counter = [0]
|
| 1156 |
+
stack = []
|
| 1157 |
+
lowlinks = {}
|
| 1158 |
+
index = {}
|
| 1159 |
+
result = []
|
| 1160 |
+
|
| 1161 |
+
graph = self._succs
|
| 1162 |
+
|
| 1163 |
+
def strongconnect(node):
|
| 1164 |
+
# set the depth index for this node to the smallest unused index
|
| 1165 |
+
index[node] = index_counter[0]
|
| 1166 |
+
lowlinks[node] = index_counter[0]
|
| 1167 |
+
index_counter[0] += 1
|
| 1168 |
+
stack.append(node)
|
| 1169 |
+
|
| 1170 |
+
# Consider successors
|
| 1171 |
+
try:
|
| 1172 |
+
successors = graph[node]
|
| 1173 |
+
except Exception:
|
| 1174 |
+
successors = []
|
| 1175 |
+
for successor in successors:
|
| 1176 |
+
if successor not in lowlinks:
|
| 1177 |
+
# Successor has not yet been visited
|
| 1178 |
+
strongconnect(successor)
|
| 1179 |
+
lowlinks[node] = min(lowlinks[node], lowlinks[successor])
|
| 1180 |
+
elif successor in stack:
|
| 1181 |
+
# the successor is in the stack and hence in the current
|
| 1182 |
+
# strongly connected component (SCC)
|
| 1183 |
+
lowlinks[node] = min(lowlinks[node], index[successor])
|
| 1184 |
+
|
| 1185 |
+
# If `node` is a root node, pop the stack and generate an SCC
|
| 1186 |
+
if lowlinks[node] == index[node]:
|
| 1187 |
+
connected_component = []
|
| 1188 |
+
|
| 1189 |
+
while True:
|
| 1190 |
+
successor = stack.pop()
|
| 1191 |
+
connected_component.append(successor)
|
| 1192 |
+
if successor == node:
|
| 1193 |
+
break
|
| 1194 |
+
component = tuple(connected_component)
|
| 1195 |
+
# storing the result
|
| 1196 |
+
result.append(component)
|
| 1197 |
+
|
| 1198 |
+
for node in graph:
|
| 1199 |
+
if node not in lowlinks:
|
| 1200 |
+
strongconnect(node)
|
| 1201 |
+
|
| 1202 |
+
return result
|
| 1203 |
+
|
| 1204 |
+
@property
|
| 1205 |
+
def dot(self):
|
| 1206 |
+
result = ['digraph G {']
|
| 1207 |
+
for succ in self._preds:
|
| 1208 |
+
preds = self._preds[succ]
|
| 1209 |
+
for pred in preds:
|
| 1210 |
+
result.append(' %s -> %s;' % (pred, succ))
|
| 1211 |
+
for node in self._nodes:
|
| 1212 |
+
result.append(' %s;' % node)
|
| 1213 |
+
result.append('}')
|
| 1214 |
+
return '\n'.join(result)
|
| 1215 |
+
|
| 1216 |
+
|
| 1217 |
+
#
|
| 1218 |
+
# Unarchiving functionality for zip, tar, tgz, tbz, whl
|
| 1219 |
+
#
|
| 1220 |
+
|
| 1221 |
+
ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl')
|
| 1222 |
+
|
| 1223 |
+
|
| 1224 |
+
def unarchive(archive_filename, dest_dir, format=None, check=True):
|
| 1225 |
+
|
| 1226 |
+
def check_path(path):
|
| 1227 |
+
if not isinstance(path, text_type):
|
| 1228 |
+
path = path.decode('utf-8')
|
| 1229 |
+
p = os.path.abspath(os.path.join(dest_dir, path))
|
| 1230 |
+
if not p.startswith(dest_dir) or p[plen] != os.sep:
|
| 1231 |
+
raise ValueError('path outside destination: %r' % p)
|
| 1232 |
+
|
| 1233 |
+
dest_dir = os.path.abspath(dest_dir)
|
| 1234 |
+
plen = len(dest_dir)
|
| 1235 |
+
archive = None
|
| 1236 |
+
if format is None:
|
| 1237 |
+
if archive_filename.endswith(('.zip', '.whl')):
|
| 1238 |
+
format = 'zip'
|
| 1239 |
+
elif archive_filename.endswith(('.tar.gz', '.tgz')):
|
| 1240 |
+
format = 'tgz'
|
| 1241 |
+
mode = 'r:gz'
|
| 1242 |
+
elif archive_filename.endswith(('.tar.bz2', '.tbz')):
|
| 1243 |
+
format = 'tbz'
|
| 1244 |
+
mode = 'r:bz2'
|
| 1245 |
+
elif archive_filename.endswith('.tar'):
|
| 1246 |
+
format = 'tar'
|
| 1247 |
+
mode = 'r'
|
| 1248 |
+
else: # pragma: no cover
|
| 1249 |
+
raise ValueError('Unknown format for %r' % archive_filename)
|
| 1250 |
+
try:
|
| 1251 |
+
if format == 'zip':
|
| 1252 |
+
archive = ZipFile(archive_filename, 'r')
|
| 1253 |
+
if check:
|
| 1254 |
+
names = archive.namelist()
|
| 1255 |
+
for name in names:
|
| 1256 |
+
check_path(name)
|
| 1257 |
+
else:
|
| 1258 |
+
archive = tarfile.open(archive_filename, mode)
|
| 1259 |
+
if check:
|
| 1260 |
+
names = archive.getnames()
|
| 1261 |
+
for name in names:
|
| 1262 |
+
check_path(name)
|
| 1263 |
+
if format != 'zip' and sys.version_info[0] < 3:
|
| 1264 |
+
# See Python issue 17153. If the dest path contains Unicode,
|
| 1265 |
+
# tarfile extraction fails on Python 2.x if a member path name
|
| 1266 |
+
# contains non-ASCII characters - it leads to an implicit
|
| 1267 |
+
# bytes -> unicode conversion using ASCII to decode.
|
| 1268 |
+
for tarinfo in archive.getmembers():
|
| 1269 |
+
if not isinstance(tarinfo.name, text_type):
|
| 1270 |
+
tarinfo.name = tarinfo.name.decode('utf-8')
|
| 1271 |
+
|
| 1272 |
+
# Limit extraction of dangerous items, if this Python
|
| 1273 |
+
# allows it easily. If not, just trust the input.
|
| 1274 |
+
# See: https://docs.python.org/3/library/tarfile.html#extraction-filters
|
| 1275 |
+
def extraction_filter(member, path):
|
| 1276 |
+
"""Run tarfile.tar_filter, but raise the expected ValueError"""
|
| 1277 |
+
# This is only called if the current Python has tarfile filters
|
| 1278 |
+
try:
|
| 1279 |
+
return tarfile.tar_filter(member, path)
|
| 1280 |
+
except tarfile.FilterError as exc:
|
| 1281 |
+
raise ValueError(str(exc))
|
| 1282 |
+
|
| 1283 |
+
archive.extraction_filter = extraction_filter
|
| 1284 |
+
|
| 1285 |
+
archive.extractall(dest_dir)
|
| 1286 |
+
|
| 1287 |
+
finally:
|
| 1288 |
+
if archive:
|
| 1289 |
+
archive.close()
|
| 1290 |
+
|
| 1291 |
+
|
| 1292 |
+
def zip_dir(directory):
|
| 1293 |
+
"""zip a directory tree into a BytesIO object"""
|
| 1294 |
+
result = io.BytesIO()
|
| 1295 |
+
dlen = len(directory)
|
| 1296 |
+
with ZipFile(result, "w") as zf:
|
| 1297 |
+
for root, dirs, files in os.walk(directory):
|
| 1298 |
+
for name in files:
|
| 1299 |
+
full = os.path.join(root, name)
|
| 1300 |
+
rel = root[dlen:]
|
| 1301 |
+
dest = os.path.join(rel, name)
|
| 1302 |
+
zf.write(full, dest)
|
| 1303 |
+
return result
|
| 1304 |
+
|
| 1305 |
+
|
| 1306 |
+
#
|
| 1307 |
+
# Simple progress bar
|
| 1308 |
+
#
|
| 1309 |
+
|
| 1310 |
+
UNITS = ('', 'K', 'M', 'G', 'T', 'P')
|
| 1311 |
+
|
| 1312 |
+
|
| 1313 |
+
class Progress(object):
|
| 1314 |
+
unknown = 'UNKNOWN'
|
| 1315 |
+
|
| 1316 |
+
def __init__(self, minval=0, maxval=100):
|
| 1317 |
+
assert maxval is None or maxval >= minval
|
| 1318 |
+
self.min = self.cur = minval
|
| 1319 |
+
self.max = maxval
|
| 1320 |
+
self.started = None
|
| 1321 |
+
self.elapsed = 0
|
| 1322 |
+
self.done = False
|
| 1323 |
+
|
| 1324 |
+
def update(self, curval):
|
| 1325 |
+
assert self.min <= curval
|
| 1326 |
+
assert self.max is None or curval <= self.max
|
| 1327 |
+
self.cur = curval
|
| 1328 |
+
now = time.time()
|
| 1329 |
+
if self.started is None:
|
| 1330 |
+
self.started = now
|
| 1331 |
+
else:
|
| 1332 |
+
self.elapsed = now - self.started
|
| 1333 |
+
|
| 1334 |
+
def increment(self, incr):
|
| 1335 |
+
assert incr >= 0
|
| 1336 |
+
self.update(self.cur + incr)
|
| 1337 |
+
|
| 1338 |
+
def start(self):
|
| 1339 |
+
self.update(self.min)
|
| 1340 |
+
return self
|
| 1341 |
+
|
| 1342 |
+
def stop(self):
|
| 1343 |
+
if self.max is not None:
|
| 1344 |
+
self.update(self.max)
|
| 1345 |
+
self.done = True
|
| 1346 |
+
|
| 1347 |
+
@property
|
| 1348 |
+
def maximum(self):
|
| 1349 |
+
return self.unknown if self.max is None else self.max
|
| 1350 |
+
|
| 1351 |
+
@property
|
| 1352 |
+
def percentage(self):
|
| 1353 |
+
if self.done:
|
| 1354 |
+
result = '100 %'
|
| 1355 |
+
elif self.max is None:
|
| 1356 |
+
result = ' ?? %'
|
| 1357 |
+
else:
|
| 1358 |
+
v = 100.0 * (self.cur - self.min) / (self.max - self.min)
|
| 1359 |
+
result = '%3d %%' % v
|
| 1360 |
+
return result
|
| 1361 |
+
|
| 1362 |
+
def format_duration(self, duration):
|
| 1363 |
+
if (duration <= 0) and self.max is None or self.cur == self.min:
|
| 1364 |
+
result = '??:??:??'
|
| 1365 |
+
# elif duration < 1:
|
| 1366 |
+
# result = '--:--:--'
|
| 1367 |
+
else:
|
| 1368 |
+
result = time.strftime('%H:%M:%S', time.gmtime(duration))
|
| 1369 |
+
return result
|
| 1370 |
+
|
| 1371 |
+
@property
|
| 1372 |
+
def ETA(self):
|
| 1373 |
+
if self.done:
|
| 1374 |
+
prefix = 'Done'
|
| 1375 |
+
t = self.elapsed
|
| 1376 |
+
# import pdb; pdb.set_trace()
|
| 1377 |
+
else:
|
| 1378 |
+
prefix = 'ETA '
|
| 1379 |
+
if self.max is None:
|
| 1380 |
+
t = -1
|
| 1381 |
+
elif self.elapsed == 0 or (self.cur == self.min):
|
| 1382 |
+
t = 0
|
| 1383 |
+
else:
|
| 1384 |
+
# import pdb; pdb.set_trace()
|
| 1385 |
+
t = float(self.max - self.min)
|
| 1386 |
+
t /= self.cur - self.min
|
| 1387 |
+
t = (t - 1) * self.elapsed
|
| 1388 |
+
return '%s: %s' % (prefix, self.format_duration(t))
|
| 1389 |
+
|
| 1390 |
+
@property
|
| 1391 |
+
def speed(self):
|
| 1392 |
+
if self.elapsed == 0:
|
| 1393 |
+
result = 0.0
|
| 1394 |
+
else:
|
| 1395 |
+
result = (self.cur - self.min) / self.elapsed
|
| 1396 |
+
for unit in UNITS:
|
| 1397 |
+
if result < 1000:
|
| 1398 |
+
break
|
| 1399 |
+
result /= 1000.0
|
| 1400 |
+
return '%d %sB/s' % (result, unit)
|
| 1401 |
+
|
| 1402 |
+
|
| 1403 |
+
#
|
| 1404 |
+
# Glob functionality
|
| 1405 |
+
#
|
| 1406 |
+
|
| 1407 |
+
RICH_GLOB = re.compile(r'\{([^}]*)\}')
|
| 1408 |
+
_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
|
| 1409 |
+
_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
|
| 1410 |
+
|
| 1411 |
+
|
| 1412 |
+
def iglob(path_glob):
|
| 1413 |
+
"""Extended globbing function that supports ** and {opt1,opt2,opt3}."""
|
| 1414 |
+
if _CHECK_RECURSIVE_GLOB.search(path_glob):
|
| 1415 |
+
msg = """invalid glob %r: recursive glob "**" must be used alone"""
|
| 1416 |
+
raise ValueError(msg % path_glob)
|
| 1417 |
+
if _CHECK_MISMATCH_SET.search(path_glob):
|
| 1418 |
+
msg = """invalid glob %r: mismatching set marker '{' or '}'"""
|
| 1419 |
+
raise ValueError(msg % path_glob)
|
| 1420 |
+
return _iglob(path_glob)
|
| 1421 |
+
|
| 1422 |
+
|
| 1423 |
+
def _iglob(path_glob):
|
| 1424 |
+
rich_path_glob = RICH_GLOB.split(path_glob, 1)
|
| 1425 |
+
if len(rich_path_glob) > 1:
|
| 1426 |
+
assert len(rich_path_glob) == 3, rich_path_glob
|
| 1427 |
+
prefix, set, suffix = rich_path_glob
|
| 1428 |
+
for item in set.split(','):
|
| 1429 |
+
for path in _iglob(''.join((prefix, item, suffix))):
|
| 1430 |
+
yield path
|
| 1431 |
+
else:
|
| 1432 |
+
if '**' not in path_glob:
|
| 1433 |
+
for item in std_iglob(path_glob):
|
| 1434 |
+
yield item
|
| 1435 |
+
else:
|
| 1436 |
+
prefix, radical = path_glob.split('**', 1)
|
| 1437 |
+
if prefix == '':
|
| 1438 |
+
prefix = '.'
|
| 1439 |
+
if radical == '':
|
| 1440 |
+
radical = '*'
|
| 1441 |
+
else:
|
| 1442 |
+
# we support both
|
| 1443 |
+
radical = radical.lstrip('/')
|
| 1444 |
+
radical = radical.lstrip('\\')
|
| 1445 |
+
for path, dir, files in os.walk(prefix):
|
| 1446 |
+
path = os.path.normpath(path)
|
| 1447 |
+
for fn in _iglob(os.path.join(path, radical)):
|
| 1448 |
+
yield fn
|
| 1449 |
+
|
| 1450 |
+
|
| 1451 |
+
if ssl:
|
| 1452 |
+
from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError)
|
| 1453 |
+
|
| 1454 |
+
#
|
| 1455 |
+
# HTTPSConnection which verifies certificates/matches domains
|
| 1456 |
+
#
|
| 1457 |
+
|
| 1458 |
+
class HTTPSConnection(httplib.HTTPSConnection):
|
| 1459 |
+
ca_certs = None # set this to the path to the certs file (.pem)
|
| 1460 |
+
check_domain = True # only used if ca_certs is not None
|
| 1461 |
+
|
| 1462 |
+
# noinspection PyPropertyAccess
|
| 1463 |
+
def connect(self):
|
| 1464 |
+
sock = socket.create_connection((self.host, self.port), self.timeout)
|
| 1465 |
+
if getattr(self, '_tunnel_host', False):
|
| 1466 |
+
self.sock = sock
|
| 1467 |
+
self._tunnel()
|
| 1468 |
+
|
| 1469 |
+
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
|
| 1470 |
+
if hasattr(ssl, 'OP_NO_SSLv2'):
|
| 1471 |
+
context.options |= ssl.OP_NO_SSLv2
|
| 1472 |
+
if getattr(self, 'cert_file', None):
|
| 1473 |
+
context.load_cert_chain(self.cert_file, self.key_file)
|
| 1474 |
+
kwargs = {}
|
| 1475 |
+
if self.ca_certs:
|
| 1476 |
+
context.verify_mode = ssl.CERT_REQUIRED
|
| 1477 |
+
context.load_verify_locations(cafile=self.ca_certs)
|
| 1478 |
+
if getattr(ssl, 'HAS_SNI', False):
|
| 1479 |
+
kwargs['server_hostname'] = self.host
|
| 1480 |
+
|
| 1481 |
+
self.sock = context.wrap_socket(sock, **kwargs)
|
| 1482 |
+
if self.ca_certs and self.check_domain:
|
| 1483 |
+
try:
|
| 1484 |
+
match_hostname(self.sock.getpeercert(), self.host)
|
| 1485 |
+
logger.debug('Host verified: %s', self.host)
|
| 1486 |
+
except CertificateError: # pragma: no cover
|
| 1487 |
+
self.sock.shutdown(socket.SHUT_RDWR)
|
| 1488 |
+
self.sock.close()
|
| 1489 |
+
raise
|
| 1490 |
+
|
| 1491 |
+
class HTTPSHandler(BaseHTTPSHandler):
|
| 1492 |
+
|
| 1493 |
+
def __init__(self, ca_certs, check_domain=True):
|
| 1494 |
+
BaseHTTPSHandler.__init__(self)
|
| 1495 |
+
self.ca_certs = ca_certs
|
| 1496 |
+
self.check_domain = check_domain
|
| 1497 |
+
|
| 1498 |
+
def _conn_maker(self, *args, **kwargs):
|
| 1499 |
+
"""
|
| 1500 |
+
This is called to create a connection instance. Normally you'd
|
| 1501 |
+
pass a connection class to do_open, but it doesn't actually check for
|
| 1502 |
+
a class, and just expects a callable. As long as we behave just as a
|
| 1503 |
+
constructor would have, we should be OK. If it ever changes so that
|
| 1504 |
+
we *must* pass a class, we'll create an UnsafeHTTPSConnection class
|
| 1505 |
+
which just sets check_domain to False in the class definition, and
|
| 1506 |
+
choose which one to pass to do_open.
|
| 1507 |
+
"""
|
| 1508 |
+
result = HTTPSConnection(*args, **kwargs)
|
| 1509 |
+
if self.ca_certs:
|
| 1510 |
+
result.ca_certs = self.ca_certs
|
| 1511 |
+
result.check_domain = self.check_domain
|
| 1512 |
+
return result
|
| 1513 |
+
|
| 1514 |
+
def https_open(self, req):
|
| 1515 |
+
try:
|
| 1516 |
+
return self.do_open(self._conn_maker, req)
|
| 1517 |
+
except URLError as e:
|
| 1518 |
+
if 'certificate verify failed' in str(e.reason):
|
| 1519 |
+
raise CertificateError('Unable to verify server certificate '
|
| 1520 |
+
'for %s' % req.host)
|
| 1521 |
+
else:
|
| 1522 |
+
raise
|
| 1523 |
+
|
| 1524 |
+
#
|
| 1525 |
+
# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
|
| 1526 |
+
# Middle proxy using HTTP listens on port 443, or an index mistakenly serves
|
| 1527 |
+
# HTML containing a http://xyz link when it should be https://xyz),
|
| 1528 |
+
# you can use the following handler class, which does not allow HTTP traffic.
|
| 1529 |
+
#
|
| 1530 |
+
# It works by inheriting from HTTPHandler - so build_opener won't add a
|
| 1531 |
+
# handler for HTTP itself.
|
| 1532 |
+
#
|
| 1533 |
+
class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
|
| 1534 |
+
|
| 1535 |
+
def http_open(self, req):
|
| 1536 |
+
raise URLError('Unexpected HTTP request on what should be a secure '
|
| 1537 |
+
'connection: %s' % req)
|
| 1538 |
+
|
| 1539 |
+
|
| 1540 |
+
#
|
| 1541 |
+
# XML-RPC with timeouts
|
| 1542 |
+
#
|
| 1543 |
+
class Transport(xmlrpclib.Transport):
|
| 1544 |
+
|
| 1545 |
+
def __init__(self, timeout, use_datetime=0):
|
| 1546 |
+
self.timeout = timeout
|
| 1547 |
+
xmlrpclib.Transport.__init__(self, use_datetime)
|
| 1548 |
+
|
| 1549 |
+
def make_connection(self, host):
|
| 1550 |
+
h, eh, x509 = self.get_host_info(host)
|
| 1551 |
+
if not self._connection or host != self._connection[0]:
|
| 1552 |
+
self._extra_headers = eh
|
| 1553 |
+
self._connection = host, httplib.HTTPConnection(h)
|
| 1554 |
+
return self._connection[1]
|
| 1555 |
+
|
| 1556 |
+
|
| 1557 |
+
if ssl:
|
| 1558 |
+
|
| 1559 |
+
class SafeTransport(xmlrpclib.SafeTransport):
|
| 1560 |
+
|
| 1561 |
+
def __init__(self, timeout, use_datetime=0):
|
| 1562 |
+
self.timeout = timeout
|
| 1563 |
+
xmlrpclib.SafeTransport.__init__(self, use_datetime)
|
| 1564 |
+
|
| 1565 |
+
def make_connection(self, host):
|
| 1566 |
+
h, eh, kwargs = self.get_host_info(host)
|
| 1567 |
+
if not kwargs:
|
| 1568 |
+
kwargs = {}
|
| 1569 |
+
kwargs['timeout'] = self.timeout
|
| 1570 |
+
if not self._connection or host != self._connection[0]:
|
| 1571 |
+
self._extra_headers = eh
|
| 1572 |
+
self._connection = host, httplib.HTTPSConnection(h, None, **kwargs)
|
| 1573 |
+
return self._connection[1]
|
| 1574 |
+
|
| 1575 |
+
|
| 1576 |
+
class ServerProxy(xmlrpclib.ServerProxy):
|
| 1577 |
+
|
| 1578 |
+
def __init__(self, uri, **kwargs):
|
| 1579 |
+
self.timeout = timeout = kwargs.pop('timeout', None)
|
| 1580 |
+
# The above classes only come into play if a timeout
|
| 1581 |
+
# is specified
|
| 1582 |
+
if timeout is not None:
|
| 1583 |
+
# scheme = splittype(uri) # deprecated as of Python 3.8
|
| 1584 |
+
scheme = urlparse(uri)[0]
|
| 1585 |
+
use_datetime = kwargs.get('use_datetime', 0)
|
| 1586 |
+
if scheme == 'https':
|
| 1587 |
+
tcls = SafeTransport
|
| 1588 |
+
else:
|
| 1589 |
+
tcls = Transport
|
| 1590 |
+
kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
|
| 1591 |
+
self.transport = t
|
| 1592 |
+
xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
|
| 1593 |
+
|
| 1594 |
+
|
| 1595 |
+
#
|
| 1596 |
+
# CSV functionality. This is provided because on 2.x, the csv module can't
|
| 1597 |
+
# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
|
| 1598 |
+
#
|
| 1599 |
+
|
| 1600 |
+
|
| 1601 |
+
def _csv_open(fn, mode, **kwargs):
|
| 1602 |
+
if sys.version_info[0] < 3:
|
| 1603 |
+
mode += 'b'
|
| 1604 |
+
else:
|
| 1605 |
+
kwargs['newline'] = ''
|
| 1606 |
+
# Python 3 determines encoding from locale. Force 'utf-8'
|
| 1607 |
+
# file encoding to match other forced utf-8 encoding
|
| 1608 |
+
kwargs['encoding'] = 'utf-8'
|
| 1609 |
+
return open(fn, mode, **kwargs)
|
| 1610 |
+
|
| 1611 |
+
|
| 1612 |
+
class CSVBase(object):
|
| 1613 |
+
defaults = {
|
| 1614 |
+
'delimiter': str(','), # The strs are used because we need native
|
| 1615 |
+
'quotechar': str('"'), # str in the csv API (2.x won't take
|
| 1616 |
+
'lineterminator': str('\n') # Unicode)
|
| 1617 |
+
}
|
| 1618 |
+
|
| 1619 |
+
def __enter__(self):
|
| 1620 |
+
return self
|
| 1621 |
+
|
| 1622 |
+
def __exit__(self, *exc_info):
|
| 1623 |
+
self.stream.close()
|
| 1624 |
+
|
| 1625 |
+
|
| 1626 |
+
class CSVReader(CSVBase):
|
| 1627 |
+
|
| 1628 |
+
def __init__(self, **kwargs):
|
| 1629 |
+
if 'stream' in kwargs:
|
| 1630 |
+
stream = kwargs['stream']
|
| 1631 |
+
if sys.version_info[0] >= 3:
|
| 1632 |
+
# needs to be a text stream
|
| 1633 |
+
stream = codecs.getreader('utf-8')(stream)
|
| 1634 |
+
self.stream = stream
|
| 1635 |
+
else:
|
| 1636 |
+
self.stream = _csv_open(kwargs['path'], 'r')
|
| 1637 |
+
self.reader = csv.reader(self.stream, **self.defaults)
|
| 1638 |
+
|
| 1639 |
+
def __iter__(self):
|
| 1640 |
+
return self
|
| 1641 |
+
|
| 1642 |
+
def next(self):
|
| 1643 |
+
result = next(self.reader)
|
| 1644 |
+
if sys.version_info[0] < 3:
|
| 1645 |
+
for i, item in enumerate(result):
|
| 1646 |
+
if not isinstance(item, text_type):
|
| 1647 |
+
result[i] = item.decode('utf-8')
|
| 1648 |
+
return result
|
| 1649 |
+
|
| 1650 |
+
__next__ = next
|
| 1651 |
+
|
| 1652 |
+
|
| 1653 |
+
class CSVWriter(CSVBase):
|
| 1654 |
+
|
| 1655 |
+
def __init__(self, fn, **kwargs):
|
| 1656 |
+
self.stream = _csv_open(fn, 'w')
|
| 1657 |
+
self.writer = csv.writer(self.stream, **self.defaults)
|
| 1658 |
+
|
| 1659 |
+
def writerow(self, row):
|
| 1660 |
+
if sys.version_info[0] < 3:
|
| 1661 |
+
r = []
|
| 1662 |
+
for item in row:
|
| 1663 |
+
if isinstance(item, text_type):
|
| 1664 |
+
item = item.encode('utf-8')
|
| 1665 |
+
r.append(item)
|
| 1666 |
+
row = r
|
| 1667 |
+
self.writer.writerow(row)
|
| 1668 |
+
|
| 1669 |
+
|
| 1670 |
+
#
|
| 1671 |
+
# Configurator functionality
|
| 1672 |
+
#
|
| 1673 |
+
|
| 1674 |
+
|
| 1675 |
+
class Configurator(BaseConfigurator):
|
| 1676 |
+
|
| 1677 |
+
value_converters = dict(BaseConfigurator.value_converters)
|
| 1678 |
+
value_converters['inc'] = 'inc_convert'
|
| 1679 |
+
|
| 1680 |
+
def __init__(self, config, base=None):
|
| 1681 |
+
super(Configurator, self).__init__(config)
|
| 1682 |
+
self.base = base or os.getcwd()
|
| 1683 |
+
|
| 1684 |
+
def configure_custom(self, config):
|
| 1685 |
+
|
| 1686 |
+
def convert(o):
|
| 1687 |
+
if isinstance(o, (list, tuple)):
|
| 1688 |
+
result = type(o)([convert(i) for i in o])
|
| 1689 |
+
elif isinstance(o, dict):
|
| 1690 |
+
if '()' in o:
|
| 1691 |
+
result = self.configure_custom(o)
|
| 1692 |
+
else:
|
| 1693 |
+
result = {}
|
| 1694 |
+
for k in o:
|
| 1695 |
+
result[k] = convert(o[k])
|
| 1696 |
+
else:
|
| 1697 |
+
result = self.convert(o)
|
| 1698 |
+
return result
|
| 1699 |
+
|
| 1700 |
+
c = config.pop('()')
|
| 1701 |
+
if not callable(c):
|
| 1702 |
+
c = self.resolve(c)
|
| 1703 |
+
props = config.pop('.', None)
|
| 1704 |
+
# Check for valid identifiers
|
| 1705 |
+
args = config.pop('[]', ())
|
| 1706 |
+
if args:
|
| 1707 |
+
args = tuple([convert(o) for o in args])
|
| 1708 |
+
items = [(k, convert(config[k])) for k in config if valid_ident(k)]
|
| 1709 |
+
kwargs = dict(items)
|
| 1710 |
+
result = c(*args, **kwargs)
|
| 1711 |
+
if props:
|
| 1712 |
+
for n, v in props.items():
|
| 1713 |
+
setattr(result, n, convert(v))
|
| 1714 |
+
return result
|
| 1715 |
+
|
| 1716 |
+
def __getitem__(self, key):
|
| 1717 |
+
result = self.config[key]
|
| 1718 |
+
if isinstance(result, dict) and '()' in result:
|
| 1719 |
+
self.config[key] = result = self.configure_custom(result)
|
| 1720 |
+
return result
|
| 1721 |
+
|
| 1722 |
+
def inc_convert(self, value):
|
| 1723 |
+
"""Default converter for the inc:// protocol."""
|
| 1724 |
+
if not os.path.isabs(value):
|
| 1725 |
+
value = os.path.join(self.base, value)
|
| 1726 |
+
with codecs.open(value, 'r', encoding='utf-8') as f:
|
| 1727 |
+
result = json.load(f)
|
| 1728 |
+
return result
|
| 1729 |
+
|
| 1730 |
+
|
| 1731 |
+
class SubprocessMixin(object):
|
| 1732 |
+
"""
|
| 1733 |
+
Mixin for running subprocesses and capturing their output
|
| 1734 |
+
"""
|
| 1735 |
+
|
| 1736 |
+
def __init__(self, verbose=False, progress=None):
|
| 1737 |
+
self.verbose = verbose
|
| 1738 |
+
self.progress = progress
|
| 1739 |
+
|
| 1740 |
+
def reader(self, stream, context):
|
| 1741 |
+
"""
|
| 1742 |
+
Read lines from a subprocess' output stream and either pass to a progress
|
| 1743 |
+
callable (if specified) or write progress information to sys.stderr.
|
| 1744 |
+
"""
|
| 1745 |
+
progress = self.progress
|
| 1746 |
+
verbose = self.verbose
|
| 1747 |
+
while True:
|
| 1748 |
+
s = stream.readline()
|
| 1749 |
+
if not s:
|
| 1750 |
+
break
|
| 1751 |
+
if progress is not None:
|
| 1752 |
+
progress(s, context)
|
| 1753 |
+
else:
|
| 1754 |
+
if not verbose:
|
| 1755 |
+
sys.stderr.write('.')
|
| 1756 |
+
else:
|
| 1757 |
+
sys.stderr.write(s.decode('utf-8'))
|
| 1758 |
+
sys.stderr.flush()
|
| 1759 |
+
stream.close()
|
| 1760 |
+
|
| 1761 |
+
def run_command(self, cmd, **kwargs):
|
| 1762 |
+
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
|
| 1763 |
+
t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
|
| 1764 |
+
t1.start()
|
| 1765 |
+
t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
|
| 1766 |
+
t2.start()
|
| 1767 |
+
p.wait()
|
| 1768 |
+
t1.join()
|
| 1769 |
+
t2.join()
|
| 1770 |
+
if self.progress is not None:
|
| 1771 |
+
self.progress('done.', 'main')
|
| 1772 |
+
elif self.verbose:
|
| 1773 |
+
sys.stderr.write('done.\n')
|
| 1774 |
+
return p
|
| 1775 |
+
|
| 1776 |
+
|
| 1777 |
+
def normalize_name(name):
|
| 1778 |
+
"""Normalize a python package name a la PEP 503"""
|
| 1779 |
+
# https://www.python.org/dev/peps/pep-0503/#normalized-names
|
| 1780 |
+
return re.sub('[-_.]+', '-', name).lower()
|
| 1781 |
+
|
| 1782 |
+
|
| 1783 |
+
# def _get_pypirc_command():
|
| 1784 |
+
# """
|
| 1785 |
+
# Get the distutils command for interacting with PyPI configurations.
|
| 1786 |
+
# :return: the command.
|
| 1787 |
+
# """
|
| 1788 |
+
# from distutils.core import Distribution
|
| 1789 |
+
# from distutils.config import PyPIRCCommand
|
| 1790 |
+
# d = Distribution()
|
| 1791 |
+
# return PyPIRCCommand(d)
|
| 1792 |
+
|
| 1793 |
+
|
| 1794 |
+
class PyPIRCFile(object):
|
| 1795 |
+
|
| 1796 |
+
DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
|
| 1797 |
+
DEFAULT_REALM = 'pypi'
|
| 1798 |
+
|
| 1799 |
+
def __init__(self, fn=None, url=None):
|
| 1800 |
+
if fn is None:
|
| 1801 |
+
fn = os.path.join(os.path.expanduser('~'), '.pypirc')
|
| 1802 |
+
self.filename = fn
|
| 1803 |
+
self.url = url
|
| 1804 |
+
|
| 1805 |
+
def read(self):
|
| 1806 |
+
result = {}
|
| 1807 |
+
|
| 1808 |
+
if os.path.exists(self.filename):
|
| 1809 |
+
repository = self.url or self.DEFAULT_REPOSITORY
|
| 1810 |
+
|
| 1811 |
+
config = configparser.RawConfigParser()
|
| 1812 |
+
config.read(self.filename)
|
| 1813 |
+
sections = config.sections()
|
| 1814 |
+
if 'distutils' in sections:
|
| 1815 |
+
# let's get the list of servers
|
| 1816 |
+
index_servers = config.get('distutils', 'index-servers')
|
| 1817 |
+
_servers = [server.strip() for server in index_servers.split('\n') if server.strip() != '']
|
| 1818 |
+
if _servers == []:
|
| 1819 |
+
# nothing set, let's try to get the default pypi
|
| 1820 |
+
if 'pypi' in sections:
|
| 1821 |
+
_servers = ['pypi']
|
| 1822 |
+
else:
|
| 1823 |
+
for server in _servers:
|
| 1824 |
+
result = {'server': server}
|
| 1825 |
+
result['username'] = config.get(server, 'username')
|
| 1826 |
+
|
| 1827 |
+
# optional params
|
| 1828 |
+
for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM),
|
| 1829 |
+
('password', None)):
|
| 1830 |
+
if config.has_option(server, key):
|
| 1831 |
+
result[key] = config.get(server, key)
|
| 1832 |
+
else:
|
| 1833 |
+
result[key] = default
|
| 1834 |
+
|
| 1835 |
+
# work around people having "repository" for the "pypi"
|
| 1836 |
+
# section of their config set to the HTTP (rather than
|
| 1837 |
+
# HTTPS) URL
|
| 1838 |
+
if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')):
|
| 1839 |
+
result['repository'] = self.DEFAULT_REPOSITORY
|
| 1840 |
+
elif (result['server'] != repository and result['repository'] != repository):
|
| 1841 |
+
result = {}
|
| 1842 |
+
elif 'server-login' in sections:
|
| 1843 |
+
# old format
|
| 1844 |
+
server = 'server-login'
|
| 1845 |
+
if config.has_option(server, 'repository'):
|
| 1846 |
+
repository = config.get(server, 'repository')
|
| 1847 |
+
else:
|
| 1848 |
+
repository = self.DEFAULT_REPOSITORY
|
| 1849 |
+
result = {
|
| 1850 |
+
'username': config.get(server, 'username'),
|
| 1851 |
+
'password': config.get(server, 'password'),
|
| 1852 |
+
'repository': repository,
|
| 1853 |
+
'server': server,
|
| 1854 |
+
'realm': self.DEFAULT_REALM
|
| 1855 |
+
}
|
| 1856 |
+
return result
|
| 1857 |
+
|
| 1858 |
+
def update(self, username, password):
|
| 1859 |
+
# import pdb; pdb.set_trace()
|
| 1860 |
+
config = configparser.RawConfigParser()
|
| 1861 |
+
fn = self.filename
|
| 1862 |
+
config.read(fn)
|
| 1863 |
+
if not config.has_section('pypi'):
|
| 1864 |
+
config.add_section('pypi')
|
| 1865 |
+
config.set('pypi', 'username', username)
|
| 1866 |
+
config.set('pypi', 'password', password)
|
| 1867 |
+
with open(fn, 'w') as f:
|
| 1868 |
+
config.write(f)
|
| 1869 |
+
|
| 1870 |
+
|
| 1871 |
+
def _load_pypirc(index):
|
| 1872 |
+
"""
|
| 1873 |
+
Read the PyPI access configuration as supported by distutils.
|
| 1874 |
+
"""
|
| 1875 |
+
return PyPIRCFile(url=index.url).read()
|
| 1876 |
+
|
| 1877 |
+
|
| 1878 |
+
def _store_pypirc(index):
|
| 1879 |
+
PyPIRCFile().update(index.username, index.password)
|
| 1880 |
+
|
| 1881 |
+
|
| 1882 |
+
#
|
| 1883 |
+
# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
|
| 1884 |
+
# tweaks
|
| 1885 |
+
#
|
| 1886 |
+
|
| 1887 |
+
|
| 1888 |
+
def get_host_platform():
|
| 1889 |
+
"""Return a string that identifies the current platform. This is used mainly to
|
| 1890 |
+
distinguish platform-specific build directories and platform-specific built
|
| 1891 |
+
distributions. Typically includes the OS name and version and the
|
| 1892 |
+
architecture (as supplied by 'os.uname()'), although the exact information
|
| 1893 |
+
included depends on the OS; eg. on Linux, the kernel version isn't
|
| 1894 |
+
particularly important.
|
| 1895 |
+
|
| 1896 |
+
Examples of returned values:
|
| 1897 |
+
linux-i586
|
| 1898 |
+
linux-alpha (?)
|
| 1899 |
+
solaris-2.6-sun4u
|
| 1900 |
+
|
| 1901 |
+
Windows will return one of:
|
| 1902 |
+
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
|
| 1903 |
+
win32 (all others - specifically, sys.platform is returned)
|
| 1904 |
+
|
| 1905 |
+
For other non-POSIX platforms, currently just returns 'sys.platform'.
|
| 1906 |
+
|
| 1907 |
+
"""
|
| 1908 |
+
if os.name == 'nt':
|
| 1909 |
+
if 'amd64' in sys.version.lower():
|
| 1910 |
+
return 'win-amd64'
|
| 1911 |
+
if '(arm)' in sys.version.lower():
|
| 1912 |
+
return 'win-arm32'
|
| 1913 |
+
if '(arm64)' in sys.version.lower():
|
| 1914 |
+
return 'win-arm64'
|
| 1915 |
+
return sys.platform
|
| 1916 |
+
|
| 1917 |
+
# Set for cross builds explicitly
|
| 1918 |
+
if "_PYTHON_HOST_PLATFORM" in os.environ:
|
| 1919 |
+
return os.environ["_PYTHON_HOST_PLATFORM"]
|
| 1920 |
+
|
| 1921 |
+
if os.name != 'posix' or not hasattr(os, 'uname'):
|
| 1922 |
+
# XXX what about the architecture? NT is Intel or Alpha,
|
| 1923 |
+
# Mac OS is M68k or PPC, etc.
|
| 1924 |
+
return sys.platform
|
| 1925 |
+
|
| 1926 |
+
# Try to distinguish various flavours of Unix
|
| 1927 |
+
|
| 1928 |
+
(osname, host, release, version, machine) = os.uname()
|
| 1929 |
+
|
| 1930 |
+
# Convert the OS name to lowercase, remove '/' characters, and translate
|
| 1931 |
+
# spaces (for "Power Macintosh")
|
| 1932 |
+
osname = osname.lower().replace('/', '')
|
| 1933 |
+
machine = machine.replace(' ', '_').replace('/', '-')
|
| 1934 |
+
|
| 1935 |
+
if osname[:5] == 'linux':
|
| 1936 |
+
# At least on Linux/Intel, 'machine' is the processor --
|
| 1937 |
+
# i386, etc.
|
| 1938 |
+
# XXX what about Alpha, SPARC, etc?
|
| 1939 |
+
return "%s-%s" % (osname, machine)
|
| 1940 |
+
|
| 1941 |
+
elif osname[:5] == 'sunos':
|
| 1942 |
+
if release[0] >= '5': # SunOS 5 == Solaris 2
|
| 1943 |
+
osname = 'solaris'
|
| 1944 |
+
release = '%d.%s' % (int(release[0]) - 3, release[2:])
|
| 1945 |
+
# We can't use 'platform.architecture()[0]' because a
|
| 1946 |
+
# bootstrap problem. We use a dict to get an error
|
| 1947 |
+
# if some suspicious happens.
|
| 1948 |
+
bitness = {2147483647: '32bit', 9223372036854775807: '64bit'}
|
| 1949 |
+
machine += '.%s' % bitness[sys.maxsize]
|
| 1950 |
+
# fall through to standard osname-release-machine representation
|
| 1951 |
+
elif osname[:3] == 'aix':
|
| 1952 |
+
from _aix_support import aix_platform
|
| 1953 |
+
return aix_platform()
|
| 1954 |
+
elif osname[:6] == 'cygwin':
|
| 1955 |
+
osname = 'cygwin'
|
| 1956 |
+
rel_re = re.compile(r'[\d.]+', re.ASCII)
|
| 1957 |
+
m = rel_re.match(release)
|
| 1958 |
+
if m:
|
| 1959 |
+
release = m.group()
|
| 1960 |
+
elif osname[:6] == 'darwin':
|
| 1961 |
+
import _osx_support
|
| 1962 |
+
try:
|
| 1963 |
+
from distutils import sysconfig
|
| 1964 |
+
except ImportError:
|
| 1965 |
+
import sysconfig
|
| 1966 |
+
osname, release, machine = _osx_support.get_platform_osx(sysconfig.get_config_vars(), osname, release, machine)
|
| 1967 |
+
|
| 1968 |
+
return '%s-%s-%s' % (osname, release, machine)
|
| 1969 |
+
|
| 1970 |
+
|
| 1971 |
+
_TARGET_TO_PLAT = {
|
| 1972 |
+
'x86': 'win32',
|
| 1973 |
+
'x64': 'win-amd64',
|
| 1974 |
+
'arm': 'win-arm32',
|
| 1975 |
+
}
|
| 1976 |
+
|
| 1977 |
+
|
| 1978 |
+
def get_platform():
|
| 1979 |
+
if os.name != 'nt':
|
| 1980 |
+
return get_host_platform()
|
| 1981 |
+
cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
|
| 1982 |
+
if cross_compilation_target not in _TARGET_TO_PLAT:
|
| 1983 |
+
return get_host_platform()
|
| 1984 |
+
return _TARGET_TO_PLAT[cross_compilation_target]
|
llava/lib/python3.10/site-packages/pip/_vendor/distlib/wheel.py
ADDED
|
@@ -0,0 +1,1100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
#
|
| 3 |
+
# Copyright (C) 2013-2023 Vinay Sajip.
|
| 4 |
+
# Licensed to the Python Software Foundation under a contributor agreement.
|
| 5 |
+
# See LICENSE.txt and CONTRIBUTORS.txt.
|
| 6 |
+
#
|
| 7 |
+
from __future__ import unicode_literals
|
| 8 |
+
|
| 9 |
+
import base64
|
| 10 |
+
import codecs
|
| 11 |
+
import datetime
|
| 12 |
+
from email import message_from_file
|
| 13 |
+
import hashlib
|
| 14 |
+
import json
|
| 15 |
+
import logging
|
| 16 |
+
import os
|
| 17 |
+
import posixpath
|
| 18 |
+
import re
|
| 19 |
+
import shutil
|
| 20 |
+
import sys
|
| 21 |
+
import tempfile
|
| 22 |
+
import zipfile
|
| 23 |
+
|
| 24 |
+
from . import __version__, DistlibException
|
| 25 |
+
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
|
| 26 |
+
from .database import InstalledDistribution
|
| 27 |
+
from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME
|
| 28 |
+
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache, cached_property, get_cache_base,
|
| 29 |
+
read_exports, tempdir, get_platform)
|
| 30 |
+
from .version import NormalizedVersion, UnsupportedVersionError
|
| 31 |
+
|
| 32 |
+
logger = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
cache = None # created when needed
|
| 35 |
+
|
| 36 |
+
if hasattr(sys, 'pypy_version_info'): # pragma: no cover
|
| 37 |
+
IMP_PREFIX = 'pp'
|
| 38 |
+
elif sys.platform.startswith('java'): # pragma: no cover
|
| 39 |
+
IMP_PREFIX = 'jy'
|
| 40 |
+
elif sys.platform == 'cli': # pragma: no cover
|
| 41 |
+
IMP_PREFIX = 'ip'
|
| 42 |
+
else:
|
| 43 |
+
IMP_PREFIX = 'cp'
|
| 44 |
+
|
| 45 |
+
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
|
| 46 |
+
if not VER_SUFFIX: # pragma: no cover
|
| 47 |
+
VER_SUFFIX = '%s%s' % sys.version_info[:2]
|
| 48 |
+
PYVER = 'py' + VER_SUFFIX
|
| 49 |
+
IMPVER = IMP_PREFIX + VER_SUFFIX
|
| 50 |
+
|
| 51 |
+
ARCH = get_platform().replace('-', '_').replace('.', '_')
|
| 52 |
+
|
| 53 |
+
ABI = sysconfig.get_config_var('SOABI')
|
| 54 |
+
if ABI and ABI.startswith('cpython-'):
|
| 55 |
+
ABI = ABI.replace('cpython-', 'cp').split('-')[0]
|
| 56 |
+
else:
|
| 57 |
+
|
| 58 |
+
def _derive_abi():
|
| 59 |
+
parts = ['cp', VER_SUFFIX]
|
| 60 |
+
if sysconfig.get_config_var('Py_DEBUG'):
|
| 61 |
+
parts.append('d')
|
| 62 |
+
if IMP_PREFIX == 'cp':
|
| 63 |
+
vi = sys.version_info[:2]
|
| 64 |
+
if vi < (3, 8):
|
| 65 |
+
wpm = sysconfig.get_config_var('WITH_PYMALLOC')
|
| 66 |
+
if wpm is None:
|
| 67 |
+
wpm = True
|
| 68 |
+
if wpm:
|
| 69 |
+
parts.append('m')
|
| 70 |
+
if vi < (3, 3):
|
| 71 |
+
us = sysconfig.get_config_var('Py_UNICODE_SIZE')
|
| 72 |
+
if us == 4 or (us is None and sys.maxunicode == 0x10FFFF):
|
| 73 |
+
parts.append('u')
|
| 74 |
+
return ''.join(parts)
|
| 75 |
+
|
| 76 |
+
ABI = _derive_abi()
|
| 77 |
+
del _derive_abi
|
| 78 |
+
|
| 79 |
+
FILENAME_RE = re.compile(
|
| 80 |
+
r'''
|
| 81 |
+
(?P<nm>[^-]+)
|
| 82 |
+
-(?P<vn>\d+[^-]*)
|
| 83 |
+
(-(?P<bn>\d+[^-]*))?
|
| 84 |
+
-(?P<py>\w+\d+(\.\w+\d+)*)
|
| 85 |
+
-(?P<bi>\w+)
|
| 86 |
+
-(?P<ar>\w+(\.\w+)*)
|
| 87 |
+
\.whl$
|
| 88 |
+
''', re.IGNORECASE | re.VERBOSE)
|
| 89 |
+
|
| 90 |
+
NAME_VERSION_RE = re.compile(r'''
|
| 91 |
+
(?P<nm>[^-]+)
|
| 92 |
+
-(?P<vn>\d+[^-]*)
|
| 93 |
+
(-(?P<bn>\d+[^-]*))?$
|
| 94 |
+
''', re.IGNORECASE | re.VERBOSE)
|
| 95 |
+
|
| 96 |
+
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
|
| 97 |
+
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
|
| 98 |
+
SHEBANG_PYTHON = b'#!python'
|
| 99 |
+
SHEBANG_PYTHONW = b'#!pythonw'
|
| 100 |
+
|
| 101 |
+
if os.sep == '/':
|
| 102 |
+
to_posix = lambda o: o
|
| 103 |
+
else:
|
| 104 |
+
to_posix = lambda o: o.replace(os.sep, '/')
|
| 105 |
+
|
| 106 |
+
if sys.version_info[0] < 3:
|
| 107 |
+
import imp
|
| 108 |
+
else:
|
| 109 |
+
imp = None
|
| 110 |
+
import importlib.machinery
|
| 111 |
+
import importlib.util
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
def _get_suffixes():
|
| 115 |
+
if imp:
|
| 116 |
+
return [s[0] for s in imp.get_suffixes()]
|
| 117 |
+
else:
|
| 118 |
+
return importlib.machinery.EXTENSION_SUFFIXES
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _load_dynamic(name, path):
|
| 122 |
+
# https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
|
| 123 |
+
if imp:
|
| 124 |
+
return imp.load_dynamic(name, path)
|
| 125 |
+
else:
|
| 126 |
+
spec = importlib.util.spec_from_file_location(name, path)
|
| 127 |
+
module = importlib.util.module_from_spec(spec)
|
| 128 |
+
sys.modules[name] = module
|
| 129 |
+
spec.loader.exec_module(module)
|
| 130 |
+
return module
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class Mounter(object):
|
| 134 |
+
|
| 135 |
+
def __init__(self):
|
| 136 |
+
self.impure_wheels = {}
|
| 137 |
+
self.libs = {}
|
| 138 |
+
|
| 139 |
+
def add(self, pathname, extensions):
|
| 140 |
+
self.impure_wheels[pathname] = extensions
|
| 141 |
+
self.libs.update(extensions)
|
| 142 |
+
|
| 143 |
+
def remove(self, pathname):
|
| 144 |
+
extensions = self.impure_wheels.pop(pathname)
|
| 145 |
+
for k, v in extensions:
|
| 146 |
+
if k in self.libs:
|
| 147 |
+
del self.libs[k]
|
| 148 |
+
|
| 149 |
+
def find_module(self, fullname, path=None):
|
| 150 |
+
if fullname in self.libs:
|
| 151 |
+
result = self
|
| 152 |
+
else:
|
| 153 |
+
result = None
|
| 154 |
+
return result
|
| 155 |
+
|
| 156 |
+
def load_module(self, fullname):
|
| 157 |
+
if fullname in sys.modules:
|
| 158 |
+
result = sys.modules[fullname]
|
| 159 |
+
else:
|
| 160 |
+
if fullname not in self.libs:
|
| 161 |
+
raise ImportError('unable to find extension for %s' % fullname)
|
| 162 |
+
result = _load_dynamic(fullname, self.libs[fullname])
|
| 163 |
+
result.__loader__ = self
|
| 164 |
+
parts = fullname.rsplit('.', 1)
|
| 165 |
+
if len(parts) > 1:
|
| 166 |
+
result.__package__ = parts[0]
|
| 167 |
+
return result
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
_hook = Mounter()
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class Wheel(object):
|
| 174 |
+
"""
|
| 175 |
+
Class to build and install from Wheel files (PEP 427).
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
wheel_version = (1, 1)
|
| 179 |
+
hash_kind = 'sha256'
|
| 180 |
+
|
| 181 |
+
def __init__(self, filename=None, sign=False, verify=False):
|
| 182 |
+
"""
|
| 183 |
+
Initialise an instance using a (valid) filename.
|
| 184 |
+
"""
|
| 185 |
+
self.sign = sign
|
| 186 |
+
self.should_verify = verify
|
| 187 |
+
self.buildver = ''
|
| 188 |
+
self.pyver = [PYVER]
|
| 189 |
+
self.abi = ['none']
|
| 190 |
+
self.arch = ['any']
|
| 191 |
+
self.dirname = os.getcwd()
|
| 192 |
+
if filename is None:
|
| 193 |
+
self.name = 'dummy'
|
| 194 |
+
self.version = '0.1'
|
| 195 |
+
self._filename = self.filename
|
| 196 |
+
else:
|
| 197 |
+
m = NAME_VERSION_RE.match(filename)
|
| 198 |
+
if m:
|
| 199 |
+
info = m.groupdict('')
|
| 200 |
+
self.name = info['nm']
|
| 201 |
+
# Reinstate the local version separator
|
| 202 |
+
self.version = info['vn'].replace('_', '-')
|
| 203 |
+
self.buildver = info['bn']
|
| 204 |
+
self._filename = self.filename
|
| 205 |
+
else:
|
| 206 |
+
dirname, filename = os.path.split(filename)
|
| 207 |
+
m = FILENAME_RE.match(filename)
|
| 208 |
+
if not m:
|
| 209 |
+
raise DistlibException('Invalid name or '
|
| 210 |
+
'filename: %r' % filename)
|
| 211 |
+
if dirname:
|
| 212 |
+
self.dirname = os.path.abspath(dirname)
|
| 213 |
+
self._filename = filename
|
| 214 |
+
info = m.groupdict('')
|
| 215 |
+
self.name = info['nm']
|
| 216 |
+
self.version = info['vn']
|
| 217 |
+
self.buildver = info['bn']
|
| 218 |
+
self.pyver = info['py'].split('.')
|
| 219 |
+
self.abi = info['bi'].split('.')
|
| 220 |
+
self.arch = info['ar'].split('.')
|
| 221 |
+
|
| 222 |
+
@property
|
| 223 |
+
def filename(self):
|
| 224 |
+
"""
|
| 225 |
+
Build and return a filename from the various components.
|
| 226 |
+
"""
|
| 227 |
+
if self.buildver:
|
| 228 |
+
buildver = '-' + self.buildver
|
| 229 |
+
else:
|
| 230 |
+
buildver = ''
|
| 231 |
+
pyver = '.'.join(self.pyver)
|
| 232 |
+
abi = '.'.join(self.abi)
|
| 233 |
+
arch = '.'.join(self.arch)
|
| 234 |
+
# replace - with _ as a local version separator
|
| 235 |
+
version = self.version.replace('-', '_')
|
| 236 |
+
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver, abi, arch)
|
| 237 |
+
|
| 238 |
+
@property
|
| 239 |
+
def exists(self):
|
| 240 |
+
path = os.path.join(self.dirname, self.filename)
|
| 241 |
+
return os.path.isfile(path)
|
| 242 |
+
|
| 243 |
+
@property
|
| 244 |
+
def tags(self):
|
| 245 |
+
for pyver in self.pyver:
|
| 246 |
+
for abi in self.abi:
|
| 247 |
+
for arch in self.arch:
|
| 248 |
+
yield pyver, abi, arch
|
| 249 |
+
|
| 250 |
+
@cached_property
|
| 251 |
+
def metadata(self):
|
| 252 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 253 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 254 |
+
info_dir = '%s.dist-info' % name_ver
|
| 255 |
+
wrapper = codecs.getreader('utf-8')
|
| 256 |
+
with ZipFile(pathname, 'r') as zf:
|
| 257 |
+
self.get_wheel_metadata(zf)
|
| 258 |
+
# wv = wheel_metadata['Wheel-Version'].split('.', 1)
|
| 259 |
+
# file_version = tuple([int(i) for i in wv])
|
| 260 |
+
# if file_version < (1, 1):
|
| 261 |
+
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
|
| 262 |
+
# LEGACY_METADATA_FILENAME]
|
| 263 |
+
# else:
|
| 264 |
+
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
|
| 265 |
+
fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
|
| 266 |
+
result = None
|
| 267 |
+
for fn in fns:
|
| 268 |
+
try:
|
| 269 |
+
metadata_filename = posixpath.join(info_dir, fn)
|
| 270 |
+
with zf.open(metadata_filename) as bf:
|
| 271 |
+
wf = wrapper(bf)
|
| 272 |
+
result = Metadata(fileobj=wf)
|
| 273 |
+
if result:
|
| 274 |
+
break
|
| 275 |
+
except KeyError:
|
| 276 |
+
pass
|
| 277 |
+
if not result:
|
| 278 |
+
raise ValueError('Invalid wheel, because metadata is '
|
| 279 |
+
'missing: looked in %s' % ', '.join(fns))
|
| 280 |
+
return result
|
| 281 |
+
|
| 282 |
+
def get_wheel_metadata(self, zf):
|
| 283 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 284 |
+
info_dir = '%s.dist-info' % name_ver
|
| 285 |
+
metadata_filename = posixpath.join(info_dir, 'WHEEL')
|
| 286 |
+
with zf.open(metadata_filename) as bf:
|
| 287 |
+
wf = codecs.getreader('utf-8')(bf)
|
| 288 |
+
message = message_from_file(wf)
|
| 289 |
+
return dict(message)
|
| 290 |
+
|
| 291 |
+
@cached_property
|
| 292 |
+
def info(self):
|
| 293 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 294 |
+
with ZipFile(pathname, 'r') as zf:
|
| 295 |
+
result = self.get_wheel_metadata(zf)
|
| 296 |
+
return result
|
| 297 |
+
|
| 298 |
+
def process_shebang(self, data):
|
| 299 |
+
m = SHEBANG_RE.match(data)
|
| 300 |
+
if m:
|
| 301 |
+
end = m.end()
|
| 302 |
+
shebang, data_after_shebang = data[:end], data[end:]
|
| 303 |
+
# Preserve any arguments after the interpreter
|
| 304 |
+
if b'pythonw' in shebang.lower():
|
| 305 |
+
shebang_python = SHEBANG_PYTHONW
|
| 306 |
+
else:
|
| 307 |
+
shebang_python = SHEBANG_PYTHON
|
| 308 |
+
m = SHEBANG_DETAIL_RE.match(shebang)
|
| 309 |
+
if m:
|
| 310 |
+
args = b' ' + m.groups()[-1]
|
| 311 |
+
else:
|
| 312 |
+
args = b''
|
| 313 |
+
shebang = shebang_python + args
|
| 314 |
+
data = shebang + data_after_shebang
|
| 315 |
+
else:
|
| 316 |
+
cr = data.find(b'\r')
|
| 317 |
+
lf = data.find(b'\n')
|
| 318 |
+
if cr < 0 or cr > lf:
|
| 319 |
+
term = b'\n'
|
| 320 |
+
else:
|
| 321 |
+
if data[cr:cr + 2] == b'\r\n':
|
| 322 |
+
term = b'\r\n'
|
| 323 |
+
else:
|
| 324 |
+
term = b'\r'
|
| 325 |
+
data = SHEBANG_PYTHON + term + data
|
| 326 |
+
return data
|
| 327 |
+
|
| 328 |
+
def get_hash(self, data, hash_kind=None):
|
| 329 |
+
if hash_kind is None:
|
| 330 |
+
hash_kind = self.hash_kind
|
| 331 |
+
try:
|
| 332 |
+
hasher = getattr(hashlib, hash_kind)
|
| 333 |
+
except AttributeError:
|
| 334 |
+
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
|
| 335 |
+
result = hasher(data).digest()
|
| 336 |
+
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
|
| 337 |
+
return hash_kind, result
|
| 338 |
+
|
| 339 |
+
def write_record(self, records, record_path, archive_record_path):
|
| 340 |
+
records = list(records) # make a copy, as mutated
|
| 341 |
+
records.append((archive_record_path, '', ''))
|
| 342 |
+
with CSVWriter(record_path) as writer:
|
| 343 |
+
for row in records:
|
| 344 |
+
writer.writerow(row)
|
| 345 |
+
|
| 346 |
+
def write_records(self, info, libdir, archive_paths):
|
| 347 |
+
records = []
|
| 348 |
+
distinfo, info_dir = info
|
| 349 |
+
# hasher = getattr(hashlib, self.hash_kind)
|
| 350 |
+
for ap, p in archive_paths:
|
| 351 |
+
with open(p, 'rb') as f:
|
| 352 |
+
data = f.read()
|
| 353 |
+
digest = '%s=%s' % self.get_hash(data)
|
| 354 |
+
size = os.path.getsize(p)
|
| 355 |
+
records.append((ap, digest, size))
|
| 356 |
+
|
| 357 |
+
p = os.path.join(distinfo, 'RECORD')
|
| 358 |
+
ap = to_posix(os.path.join(info_dir, 'RECORD'))
|
| 359 |
+
self.write_record(records, p, ap)
|
| 360 |
+
archive_paths.append((ap, p))
|
| 361 |
+
|
| 362 |
+
def build_zip(self, pathname, archive_paths):
|
| 363 |
+
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
|
| 364 |
+
for ap, p in archive_paths:
|
| 365 |
+
logger.debug('Wrote %s to %s in wheel', p, ap)
|
| 366 |
+
zf.write(p, ap)
|
| 367 |
+
|
| 368 |
+
def build(self, paths, tags=None, wheel_version=None):
|
| 369 |
+
"""
|
| 370 |
+
Build a wheel from files in specified paths, and use any specified tags
|
| 371 |
+
when determining the name of the wheel.
|
| 372 |
+
"""
|
| 373 |
+
if tags is None:
|
| 374 |
+
tags = {}
|
| 375 |
+
|
| 376 |
+
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
|
| 377 |
+
if libkey == 'platlib':
|
| 378 |
+
is_pure = 'false'
|
| 379 |
+
default_pyver = [IMPVER]
|
| 380 |
+
default_abi = [ABI]
|
| 381 |
+
default_arch = [ARCH]
|
| 382 |
+
else:
|
| 383 |
+
is_pure = 'true'
|
| 384 |
+
default_pyver = [PYVER]
|
| 385 |
+
default_abi = ['none']
|
| 386 |
+
default_arch = ['any']
|
| 387 |
+
|
| 388 |
+
self.pyver = tags.get('pyver', default_pyver)
|
| 389 |
+
self.abi = tags.get('abi', default_abi)
|
| 390 |
+
self.arch = tags.get('arch', default_arch)
|
| 391 |
+
|
| 392 |
+
libdir = paths[libkey]
|
| 393 |
+
|
| 394 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 395 |
+
data_dir = '%s.data' % name_ver
|
| 396 |
+
info_dir = '%s.dist-info' % name_ver
|
| 397 |
+
|
| 398 |
+
archive_paths = []
|
| 399 |
+
|
| 400 |
+
# First, stuff which is not in site-packages
|
| 401 |
+
for key in ('data', 'headers', 'scripts'):
|
| 402 |
+
if key not in paths:
|
| 403 |
+
continue
|
| 404 |
+
path = paths[key]
|
| 405 |
+
if os.path.isdir(path):
|
| 406 |
+
for root, dirs, files in os.walk(path):
|
| 407 |
+
for fn in files:
|
| 408 |
+
p = fsdecode(os.path.join(root, fn))
|
| 409 |
+
rp = os.path.relpath(p, path)
|
| 410 |
+
ap = to_posix(os.path.join(data_dir, key, rp))
|
| 411 |
+
archive_paths.append((ap, p))
|
| 412 |
+
if key == 'scripts' and not p.endswith('.exe'):
|
| 413 |
+
with open(p, 'rb') as f:
|
| 414 |
+
data = f.read()
|
| 415 |
+
data = self.process_shebang(data)
|
| 416 |
+
with open(p, 'wb') as f:
|
| 417 |
+
f.write(data)
|
| 418 |
+
|
| 419 |
+
# Now, stuff which is in site-packages, other than the
|
| 420 |
+
# distinfo stuff.
|
| 421 |
+
path = libdir
|
| 422 |
+
distinfo = None
|
| 423 |
+
for root, dirs, files in os.walk(path):
|
| 424 |
+
if root == path:
|
| 425 |
+
# At the top level only, save distinfo for later
|
| 426 |
+
# and skip it for now
|
| 427 |
+
for i, dn in enumerate(dirs):
|
| 428 |
+
dn = fsdecode(dn)
|
| 429 |
+
if dn.endswith('.dist-info'):
|
| 430 |
+
distinfo = os.path.join(root, dn)
|
| 431 |
+
del dirs[i]
|
| 432 |
+
break
|
| 433 |
+
assert distinfo, '.dist-info directory expected, not found'
|
| 434 |
+
|
| 435 |
+
for fn in files:
|
| 436 |
+
# comment out next suite to leave .pyc files in
|
| 437 |
+
if fsdecode(fn).endswith(('.pyc', '.pyo')):
|
| 438 |
+
continue
|
| 439 |
+
p = os.path.join(root, fn)
|
| 440 |
+
rp = to_posix(os.path.relpath(p, path))
|
| 441 |
+
archive_paths.append((rp, p))
|
| 442 |
+
|
| 443 |
+
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
|
| 444 |
+
files = os.listdir(distinfo)
|
| 445 |
+
for fn in files:
|
| 446 |
+
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
|
| 447 |
+
p = fsdecode(os.path.join(distinfo, fn))
|
| 448 |
+
ap = to_posix(os.path.join(info_dir, fn))
|
| 449 |
+
archive_paths.append((ap, p))
|
| 450 |
+
|
| 451 |
+
wheel_metadata = [
|
| 452 |
+
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
|
| 453 |
+
'Generator: distlib %s' % __version__,
|
| 454 |
+
'Root-Is-Purelib: %s' % is_pure,
|
| 455 |
+
]
|
| 456 |
+
for pyver, abi, arch in self.tags:
|
| 457 |
+
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
|
| 458 |
+
p = os.path.join(distinfo, 'WHEEL')
|
| 459 |
+
with open(p, 'w') as f:
|
| 460 |
+
f.write('\n'.join(wheel_metadata))
|
| 461 |
+
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
|
| 462 |
+
archive_paths.append((ap, p))
|
| 463 |
+
|
| 464 |
+
# sort the entries by archive path. Not needed by any spec, but it
|
| 465 |
+
# keeps the archive listing and RECORD tidier than they would otherwise
|
| 466 |
+
# be. Use the number of path segments to keep directory entries together,
|
| 467 |
+
# and keep the dist-info stuff at the end.
|
| 468 |
+
def sorter(t):
|
| 469 |
+
ap = t[0]
|
| 470 |
+
n = ap.count('/')
|
| 471 |
+
if '.dist-info' in ap:
|
| 472 |
+
n += 10000
|
| 473 |
+
return (n, ap)
|
| 474 |
+
|
| 475 |
+
archive_paths = sorted(archive_paths, key=sorter)
|
| 476 |
+
|
| 477 |
+
# Now, at last, RECORD.
|
| 478 |
+
# Paths in here are archive paths - nothing else makes sense.
|
| 479 |
+
self.write_records((distinfo, info_dir), libdir, archive_paths)
|
| 480 |
+
# Now, ready to build the zip file
|
| 481 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 482 |
+
self.build_zip(pathname, archive_paths)
|
| 483 |
+
return pathname
|
| 484 |
+
|
| 485 |
+
def skip_entry(self, arcname):
|
| 486 |
+
"""
|
| 487 |
+
Determine whether an archive entry should be skipped when verifying
|
| 488 |
+
or installing.
|
| 489 |
+
"""
|
| 490 |
+
# The signature file won't be in RECORD,
|
| 491 |
+
# and we don't currently don't do anything with it
|
| 492 |
+
# We also skip directories, as they won't be in RECORD
|
| 493 |
+
# either. See:
|
| 494 |
+
#
|
| 495 |
+
# https://github.com/pypa/wheel/issues/294
|
| 496 |
+
# https://github.com/pypa/wheel/issues/287
|
| 497 |
+
# https://github.com/pypa/wheel/pull/289
|
| 498 |
+
#
|
| 499 |
+
return arcname.endswith(('/', '/RECORD.jws'))
|
| 500 |
+
|
| 501 |
+
def install(self, paths, maker, **kwargs):
|
| 502 |
+
"""
|
| 503 |
+
Install a wheel to the specified paths. If kwarg ``warner`` is
|
| 504 |
+
specified, it should be a callable, which will be called with two
|
| 505 |
+
tuples indicating the wheel version of this software and the wheel
|
| 506 |
+
version in the file, if there is a discrepancy in the versions.
|
| 507 |
+
This can be used to issue any warnings to raise any exceptions.
|
| 508 |
+
If kwarg ``lib_only`` is True, only the purelib/platlib files are
|
| 509 |
+
installed, and the headers, scripts, data and dist-info metadata are
|
| 510 |
+
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
|
| 511 |
+
bytecode will try to use file-hash based invalidation (PEP-552) on
|
| 512 |
+
supported interpreter versions (CPython 3.7+).
|
| 513 |
+
|
| 514 |
+
The return value is a :class:`InstalledDistribution` instance unless
|
| 515 |
+
``options.lib_only`` is True, in which case the return value is ``None``.
|
| 516 |
+
"""
|
| 517 |
+
|
| 518 |
+
dry_run = maker.dry_run
|
| 519 |
+
warner = kwargs.get('warner')
|
| 520 |
+
lib_only = kwargs.get('lib_only', False)
|
| 521 |
+
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
|
| 522 |
+
|
| 523 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 524 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 525 |
+
data_dir = '%s.data' % name_ver
|
| 526 |
+
info_dir = '%s.dist-info' % name_ver
|
| 527 |
+
|
| 528 |
+
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
|
| 529 |
+
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
|
| 530 |
+
record_name = posixpath.join(info_dir, 'RECORD')
|
| 531 |
+
|
| 532 |
+
wrapper = codecs.getreader('utf-8')
|
| 533 |
+
|
| 534 |
+
with ZipFile(pathname, 'r') as zf:
|
| 535 |
+
with zf.open(wheel_metadata_name) as bwf:
|
| 536 |
+
wf = wrapper(bwf)
|
| 537 |
+
message = message_from_file(wf)
|
| 538 |
+
wv = message['Wheel-Version'].split('.', 1)
|
| 539 |
+
file_version = tuple([int(i) for i in wv])
|
| 540 |
+
if (file_version != self.wheel_version) and warner:
|
| 541 |
+
warner(self.wheel_version, file_version)
|
| 542 |
+
|
| 543 |
+
if message['Root-Is-Purelib'] == 'true':
|
| 544 |
+
libdir = paths['purelib']
|
| 545 |
+
else:
|
| 546 |
+
libdir = paths['platlib']
|
| 547 |
+
|
| 548 |
+
records = {}
|
| 549 |
+
with zf.open(record_name) as bf:
|
| 550 |
+
with CSVReader(stream=bf) as reader:
|
| 551 |
+
for row in reader:
|
| 552 |
+
p = row[0]
|
| 553 |
+
records[p] = row
|
| 554 |
+
|
| 555 |
+
data_pfx = posixpath.join(data_dir, '')
|
| 556 |
+
info_pfx = posixpath.join(info_dir, '')
|
| 557 |
+
script_pfx = posixpath.join(data_dir, 'scripts', '')
|
| 558 |
+
|
| 559 |
+
# make a new instance rather than a copy of maker's,
|
| 560 |
+
# as we mutate it
|
| 561 |
+
fileop = FileOperator(dry_run=dry_run)
|
| 562 |
+
fileop.record = True # so we can rollback if needed
|
| 563 |
+
|
| 564 |
+
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
|
| 565 |
+
|
| 566 |
+
outfiles = [] # for RECORD writing
|
| 567 |
+
|
| 568 |
+
# for script copying/shebang processing
|
| 569 |
+
workdir = tempfile.mkdtemp()
|
| 570 |
+
# set target dir later
|
| 571 |
+
# we default add_launchers to False, as the
|
| 572 |
+
# Python Launcher should be used instead
|
| 573 |
+
maker.source_dir = workdir
|
| 574 |
+
maker.target_dir = None
|
| 575 |
+
try:
|
| 576 |
+
for zinfo in zf.infolist():
|
| 577 |
+
arcname = zinfo.filename
|
| 578 |
+
if isinstance(arcname, text_type):
|
| 579 |
+
u_arcname = arcname
|
| 580 |
+
else:
|
| 581 |
+
u_arcname = arcname.decode('utf-8')
|
| 582 |
+
if self.skip_entry(u_arcname):
|
| 583 |
+
continue
|
| 584 |
+
row = records[u_arcname]
|
| 585 |
+
if row[2] and str(zinfo.file_size) != row[2]:
|
| 586 |
+
raise DistlibException('size mismatch for '
|
| 587 |
+
'%s' % u_arcname)
|
| 588 |
+
if row[1]:
|
| 589 |
+
kind, value = row[1].split('=', 1)
|
| 590 |
+
with zf.open(arcname) as bf:
|
| 591 |
+
data = bf.read()
|
| 592 |
+
_, digest = self.get_hash(data, kind)
|
| 593 |
+
if digest != value:
|
| 594 |
+
raise DistlibException('digest mismatch for '
|
| 595 |
+
'%s' % arcname)
|
| 596 |
+
|
| 597 |
+
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
|
| 598 |
+
logger.debug('lib_only: skipping %s', u_arcname)
|
| 599 |
+
continue
|
| 600 |
+
is_script = (u_arcname.startswith(script_pfx) and not u_arcname.endswith('.exe'))
|
| 601 |
+
|
| 602 |
+
if u_arcname.startswith(data_pfx):
|
| 603 |
+
_, where, rp = u_arcname.split('/', 2)
|
| 604 |
+
outfile = os.path.join(paths[where], convert_path(rp))
|
| 605 |
+
else:
|
| 606 |
+
# meant for site-packages.
|
| 607 |
+
if u_arcname in (wheel_metadata_name, record_name):
|
| 608 |
+
continue
|
| 609 |
+
outfile = os.path.join(libdir, convert_path(u_arcname))
|
| 610 |
+
if not is_script:
|
| 611 |
+
with zf.open(arcname) as bf:
|
| 612 |
+
fileop.copy_stream(bf, outfile)
|
| 613 |
+
# Issue #147: permission bits aren't preserved. Using
|
| 614 |
+
# zf.extract(zinfo, libdir) should have worked, but didn't,
|
| 615 |
+
# see https://www.thetopsites.net/article/53834422.shtml
|
| 616 |
+
# So ... manually preserve permission bits as given in zinfo
|
| 617 |
+
if os.name == 'posix':
|
| 618 |
+
# just set the normal permission bits
|
| 619 |
+
os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF)
|
| 620 |
+
outfiles.append(outfile)
|
| 621 |
+
# Double check the digest of the written file
|
| 622 |
+
if not dry_run and row[1]:
|
| 623 |
+
with open(outfile, 'rb') as bf:
|
| 624 |
+
data = bf.read()
|
| 625 |
+
_, newdigest = self.get_hash(data, kind)
|
| 626 |
+
if newdigest != digest:
|
| 627 |
+
raise DistlibException('digest mismatch '
|
| 628 |
+
'on write for '
|
| 629 |
+
'%s' % outfile)
|
| 630 |
+
if bc and outfile.endswith('.py'):
|
| 631 |
+
try:
|
| 632 |
+
pyc = fileop.byte_compile(outfile, hashed_invalidation=bc_hashed_invalidation)
|
| 633 |
+
outfiles.append(pyc)
|
| 634 |
+
except Exception:
|
| 635 |
+
# Don't give up if byte-compilation fails,
|
| 636 |
+
# but log it and perhaps warn the user
|
| 637 |
+
logger.warning('Byte-compilation failed', exc_info=True)
|
| 638 |
+
else:
|
| 639 |
+
fn = os.path.basename(convert_path(arcname))
|
| 640 |
+
workname = os.path.join(workdir, fn)
|
| 641 |
+
with zf.open(arcname) as bf:
|
| 642 |
+
fileop.copy_stream(bf, workname)
|
| 643 |
+
|
| 644 |
+
dn, fn = os.path.split(outfile)
|
| 645 |
+
maker.target_dir = dn
|
| 646 |
+
filenames = maker.make(fn)
|
| 647 |
+
fileop.set_executable_mode(filenames)
|
| 648 |
+
outfiles.extend(filenames)
|
| 649 |
+
|
| 650 |
+
if lib_only:
|
| 651 |
+
logger.debug('lib_only: returning None')
|
| 652 |
+
dist = None
|
| 653 |
+
else:
|
| 654 |
+
# Generate scripts
|
| 655 |
+
|
| 656 |
+
# Try to get pydist.json so we can see if there are
|
| 657 |
+
# any commands to generate. If this fails (e.g. because
|
| 658 |
+
# of a legacy wheel), log a warning but don't give up.
|
| 659 |
+
commands = None
|
| 660 |
+
file_version = self.info['Wheel-Version']
|
| 661 |
+
if file_version == '1.0':
|
| 662 |
+
# Use legacy info
|
| 663 |
+
ep = posixpath.join(info_dir, 'entry_points.txt')
|
| 664 |
+
try:
|
| 665 |
+
with zf.open(ep) as bwf:
|
| 666 |
+
epdata = read_exports(bwf)
|
| 667 |
+
commands = {}
|
| 668 |
+
for key in ('console', 'gui'):
|
| 669 |
+
k = '%s_scripts' % key
|
| 670 |
+
if k in epdata:
|
| 671 |
+
commands['wrap_%s' % key] = d = {}
|
| 672 |
+
for v in epdata[k].values():
|
| 673 |
+
s = '%s:%s' % (v.prefix, v.suffix)
|
| 674 |
+
if v.flags:
|
| 675 |
+
s += ' [%s]' % ','.join(v.flags)
|
| 676 |
+
d[v.name] = s
|
| 677 |
+
except Exception:
|
| 678 |
+
logger.warning('Unable to read legacy script '
|
| 679 |
+
'metadata, so cannot generate '
|
| 680 |
+
'scripts')
|
| 681 |
+
else:
|
| 682 |
+
try:
|
| 683 |
+
with zf.open(metadata_name) as bwf:
|
| 684 |
+
wf = wrapper(bwf)
|
| 685 |
+
commands = json.load(wf).get('extensions')
|
| 686 |
+
if commands:
|
| 687 |
+
commands = commands.get('python.commands')
|
| 688 |
+
except Exception:
|
| 689 |
+
logger.warning('Unable to read JSON metadata, so '
|
| 690 |
+
'cannot generate scripts')
|
| 691 |
+
if commands:
|
| 692 |
+
console_scripts = commands.get('wrap_console', {})
|
| 693 |
+
gui_scripts = commands.get('wrap_gui', {})
|
| 694 |
+
if console_scripts or gui_scripts:
|
| 695 |
+
script_dir = paths.get('scripts', '')
|
| 696 |
+
if not os.path.isdir(script_dir):
|
| 697 |
+
raise ValueError('Valid script path not '
|
| 698 |
+
'specified')
|
| 699 |
+
maker.target_dir = script_dir
|
| 700 |
+
for k, v in console_scripts.items():
|
| 701 |
+
script = '%s = %s' % (k, v)
|
| 702 |
+
filenames = maker.make(script)
|
| 703 |
+
fileop.set_executable_mode(filenames)
|
| 704 |
+
|
| 705 |
+
if gui_scripts:
|
| 706 |
+
options = {'gui': True}
|
| 707 |
+
for k, v in gui_scripts.items():
|
| 708 |
+
script = '%s = %s' % (k, v)
|
| 709 |
+
filenames = maker.make(script, options)
|
| 710 |
+
fileop.set_executable_mode(filenames)
|
| 711 |
+
|
| 712 |
+
p = os.path.join(libdir, info_dir)
|
| 713 |
+
dist = InstalledDistribution(p)
|
| 714 |
+
|
| 715 |
+
# Write SHARED
|
| 716 |
+
paths = dict(paths) # don't change passed in dict
|
| 717 |
+
del paths['purelib']
|
| 718 |
+
del paths['platlib']
|
| 719 |
+
paths['lib'] = libdir
|
| 720 |
+
p = dist.write_shared_locations(paths, dry_run)
|
| 721 |
+
if p:
|
| 722 |
+
outfiles.append(p)
|
| 723 |
+
|
| 724 |
+
# Write RECORD
|
| 725 |
+
dist.write_installed_files(outfiles, paths['prefix'], dry_run)
|
| 726 |
+
return dist
|
| 727 |
+
except Exception: # pragma: no cover
|
| 728 |
+
logger.exception('installation failed.')
|
| 729 |
+
fileop.rollback()
|
| 730 |
+
raise
|
| 731 |
+
finally:
|
| 732 |
+
shutil.rmtree(workdir)
|
| 733 |
+
|
| 734 |
+
def _get_dylib_cache(self):
|
| 735 |
+
global cache
|
| 736 |
+
if cache is None:
|
| 737 |
+
# Use native string to avoid issues on 2.x: see Python #20140.
|
| 738 |
+
base = os.path.join(get_cache_base(), str('dylib-cache'), '%s.%s' % sys.version_info[:2])
|
| 739 |
+
cache = Cache(base)
|
| 740 |
+
return cache
|
| 741 |
+
|
| 742 |
+
def _get_extensions(self):
|
| 743 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 744 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 745 |
+
info_dir = '%s.dist-info' % name_ver
|
| 746 |
+
arcname = posixpath.join(info_dir, 'EXTENSIONS')
|
| 747 |
+
wrapper = codecs.getreader('utf-8')
|
| 748 |
+
result = []
|
| 749 |
+
with ZipFile(pathname, 'r') as zf:
|
| 750 |
+
try:
|
| 751 |
+
with zf.open(arcname) as bf:
|
| 752 |
+
wf = wrapper(bf)
|
| 753 |
+
extensions = json.load(wf)
|
| 754 |
+
cache = self._get_dylib_cache()
|
| 755 |
+
prefix = cache.prefix_to_dir(self.filename, use_abspath=False)
|
| 756 |
+
cache_base = os.path.join(cache.base, prefix)
|
| 757 |
+
if not os.path.isdir(cache_base):
|
| 758 |
+
os.makedirs(cache_base)
|
| 759 |
+
for name, relpath in extensions.items():
|
| 760 |
+
dest = os.path.join(cache_base, convert_path(relpath))
|
| 761 |
+
if not os.path.exists(dest):
|
| 762 |
+
extract = True
|
| 763 |
+
else:
|
| 764 |
+
file_time = os.stat(dest).st_mtime
|
| 765 |
+
file_time = datetime.datetime.fromtimestamp(file_time)
|
| 766 |
+
info = zf.getinfo(relpath)
|
| 767 |
+
wheel_time = datetime.datetime(*info.date_time)
|
| 768 |
+
extract = wheel_time > file_time
|
| 769 |
+
if extract:
|
| 770 |
+
zf.extract(relpath, cache_base)
|
| 771 |
+
result.append((name, dest))
|
| 772 |
+
except KeyError:
|
| 773 |
+
pass
|
| 774 |
+
return result
|
| 775 |
+
|
| 776 |
+
def is_compatible(self):
|
| 777 |
+
"""
|
| 778 |
+
Determine if a wheel is compatible with the running system.
|
| 779 |
+
"""
|
| 780 |
+
return is_compatible(self)
|
| 781 |
+
|
| 782 |
+
def is_mountable(self):
|
| 783 |
+
"""
|
| 784 |
+
Determine if a wheel is asserted as mountable by its metadata.
|
| 785 |
+
"""
|
| 786 |
+
return True # for now - metadata details TBD
|
| 787 |
+
|
| 788 |
+
def mount(self, append=False):
|
| 789 |
+
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
|
| 790 |
+
if not self.is_compatible():
|
| 791 |
+
msg = 'Wheel %s not compatible with this Python.' % pathname
|
| 792 |
+
raise DistlibException(msg)
|
| 793 |
+
if not self.is_mountable():
|
| 794 |
+
msg = 'Wheel %s is marked as not mountable.' % pathname
|
| 795 |
+
raise DistlibException(msg)
|
| 796 |
+
if pathname in sys.path:
|
| 797 |
+
logger.debug('%s already in path', pathname)
|
| 798 |
+
else:
|
| 799 |
+
if append:
|
| 800 |
+
sys.path.append(pathname)
|
| 801 |
+
else:
|
| 802 |
+
sys.path.insert(0, pathname)
|
| 803 |
+
extensions = self._get_extensions()
|
| 804 |
+
if extensions:
|
| 805 |
+
if _hook not in sys.meta_path:
|
| 806 |
+
sys.meta_path.append(_hook)
|
| 807 |
+
_hook.add(pathname, extensions)
|
| 808 |
+
|
| 809 |
+
def unmount(self):
|
| 810 |
+
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
|
| 811 |
+
if pathname not in sys.path:
|
| 812 |
+
logger.debug('%s not in path', pathname)
|
| 813 |
+
else:
|
| 814 |
+
sys.path.remove(pathname)
|
| 815 |
+
if pathname in _hook.impure_wheels:
|
| 816 |
+
_hook.remove(pathname)
|
| 817 |
+
if not _hook.impure_wheels:
|
| 818 |
+
if _hook in sys.meta_path:
|
| 819 |
+
sys.meta_path.remove(_hook)
|
| 820 |
+
|
| 821 |
+
def verify(self):
|
| 822 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 823 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 824 |
+
# data_dir = '%s.data' % name_ver
|
| 825 |
+
info_dir = '%s.dist-info' % name_ver
|
| 826 |
+
|
| 827 |
+
# metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
|
| 828 |
+
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
|
| 829 |
+
record_name = posixpath.join(info_dir, 'RECORD')
|
| 830 |
+
|
| 831 |
+
wrapper = codecs.getreader('utf-8')
|
| 832 |
+
|
| 833 |
+
with ZipFile(pathname, 'r') as zf:
|
| 834 |
+
with zf.open(wheel_metadata_name) as bwf:
|
| 835 |
+
wf = wrapper(bwf)
|
| 836 |
+
message_from_file(wf)
|
| 837 |
+
# wv = message['Wheel-Version'].split('.', 1)
|
| 838 |
+
# file_version = tuple([int(i) for i in wv])
|
| 839 |
+
# TODO version verification
|
| 840 |
+
|
| 841 |
+
records = {}
|
| 842 |
+
with zf.open(record_name) as bf:
|
| 843 |
+
with CSVReader(stream=bf) as reader:
|
| 844 |
+
for row in reader:
|
| 845 |
+
p = row[0]
|
| 846 |
+
records[p] = row
|
| 847 |
+
|
| 848 |
+
for zinfo in zf.infolist():
|
| 849 |
+
arcname = zinfo.filename
|
| 850 |
+
if isinstance(arcname, text_type):
|
| 851 |
+
u_arcname = arcname
|
| 852 |
+
else:
|
| 853 |
+
u_arcname = arcname.decode('utf-8')
|
| 854 |
+
# See issue #115: some wheels have .. in their entries, but
|
| 855 |
+
# in the filename ... e.g. __main__..py ! So the check is
|
| 856 |
+
# updated to look for .. in the directory portions
|
| 857 |
+
p = u_arcname.split('/')
|
| 858 |
+
if '..' in p:
|
| 859 |
+
raise DistlibException('invalid entry in '
|
| 860 |
+
'wheel: %r' % u_arcname)
|
| 861 |
+
|
| 862 |
+
if self.skip_entry(u_arcname):
|
| 863 |
+
continue
|
| 864 |
+
row = records[u_arcname]
|
| 865 |
+
if row[2] and str(zinfo.file_size) != row[2]:
|
| 866 |
+
raise DistlibException('size mismatch for '
|
| 867 |
+
'%s' % u_arcname)
|
| 868 |
+
if row[1]:
|
| 869 |
+
kind, value = row[1].split('=', 1)
|
| 870 |
+
with zf.open(arcname) as bf:
|
| 871 |
+
data = bf.read()
|
| 872 |
+
_, digest = self.get_hash(data, kind)
|
| 873 |
+
if digest != value:
|
| 874 |
+
raise DistlibException('digest mismatch for '
|
| 875 |
+
'%s' % arcname)
|
| 876 |
+
|
| 877 |
+
def update(self, modifier, dest_dir=None, **kwargs):
|
| 878 |
+
"""
|
| 879 |
+
Update the contents of a wheel in a generic way. The modifier should
|
| 880 |
+
be a callable which expects a dictionary argument: its keys are
|
| 881 |
+
archive-entry paths, and its values are absolute filesystem paths
|
| 882 |
+
where the contents the corresponding archive entries can be found. The
|
| 883 |
+
modifier is free to change the contents of the files pointed to, add
|
| 884 |
+
new entries and remove entries, before returning. This method will
|
| 885 |
+
extract the entire contents of the wheel to a temporary location, call
|
| 886 |
+
the modifier, and then use the passed (and possibly updated)
|
| 887 |
+
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
|
| 888 |
+
wheel is written there -- otherwise, the original wheel is overwritten.
|
| 889 |
+
|
| 890 |
+
The modifier should return True if it updated the wheel, else False.
|
| 891 |
+
This method returns the same value the modifier returns.
|
| 892 |
+
"""
|
| 893 |
+
|
| 894 |
+
def get_version(path_map, info_dir):
|
| 895 |
+
version = path = None
|
| 896 |
+
key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
|
| 897 |
+
if key not in path_map:
|
| 898 |
+
key = '%s/PKG-INFO' % info_dir
|
| 899 |
+
if key in path_map:
|
| 900 |
+
path = path_map[key]
|
| 901 |
+
version = Metadata(path=path).version
|
| 902 |
+
return version, path
|
| 903 |
+
|
| 904 |
+
def update_version(version, path):
|
| 905 |
+
updated = None
|
| 906 |
+
try:
|
| 907 |
+
NormalizedVersion(version)
|
| 908 |
+
i = version.find('-')
|
| 909 |
+
if i < 0:
|
| 910 |
+
updated = '%s+1' % version
|
| 911 |
+
else:
|
| 912 |
+
parts = [int(s) for s in version[i + 1:].split('.')]
|
| 913 |
+
parts[-1] += 1
|
| 914 |
+
updated = '%s+%s' % (version[:i], '.'.join(str(i) for i in parts))
|
| 915 |
+
except UnsupportedVersionError:
|
| 916 |
+
logger.debug('Cannot update non-compliant (PEP-440) '
|
| 917 |
+
'version %r', version)
|
| 918 |
+
if updated:
|
| 919 |
+
md = Metadata(path=path)
|
| 920 |
+
md.version = updated
|
| 921 |
+
legacy = path.endswith(LEGACY_METADATA_FILENAME)
|
| 922 |
+
md.write(path=path, legacy=legacy)
|
| 923 |
+
logger.debug('Version updated from %r to %r', version, updated)
|
| 924 |
+
|
| 925 |
+
pathname = os.path.join(self.dirname, self.filename)
|
| 926 |
+
name_ver = '%s-%s' % (self.name, self.version)
|
| 927 |
+
info_dir = '%s.dist-info' % name_ver
|
| 928 |
+
record_name = posixpath.join(info_dir, 'RECORD')
|
| 929 |
+
with tempdir() as workdir:
|
| 930 |
+
with ZipFile(pathname, 'r') as zf:
|
| 931 |
+
path_map = {}
|
| 932 |
+
for zinfo in zf.infolist():
|
| 933 |
+
arcname = zinfo.filename
|
| 934 |
+
if isinstance(arcname, text_type):
|
| 935 |
+
u_arcname = arcname
|
| 936 |
+
else:
|
| 937 |
+
u_arcname = arcname.decode('utf-8')
|
| 938 |
+
if u_arcname == record_name:
|
| 939 |
+
continue
|
| 940 |
+
if '..' in u_arcname:
|
| 941 |
+
raise DistlibException('invalid entry in '
|
| 942 |
+
'wheel: %r' % u_arcname)
|
| 943 |
+
zf.extract(zinfo, workdir)
|
| 944 |
+
path = os.path.join(workdir, convert_path(u_arcname))
|
| 945 |
+
path_map[u_arcname] = path
|
| 946 |
+
|
| 947 |
+
# Remember the version.
|
| 948 |
+
original_version, _ = get_version(path_map, info_dir)
|
| 949 |
+
# Files extracted. Call the modifier.
|
| 950 |
+
modified = modifier(path_map, **kwargs)
|
| 951 |
+
if modified:
|
| 952 |
+
# Something changed - need to build a new wheel.
|
| 953 |
+
current_version, path = get_version(path_map, info_dir)
|
| 954 |
+
if current_version and (current_version == original_version):
|
| 955 |
+
# Add or update local version to signify changes.
|
| 956 |
+
update_version(current_version, path)
|
| 957 |
+
# Decide where the new wheel goes.
|
| 958 |
+
if dest_dir is None:
|
| 959 |
+
fd, newpath = tempfile.mkstemp(suffix='.whl', prefix='wheel-update-', dir=workdir)
|
| 960 |
+
os.close(fd)
|
| 961 |
+
else:
|
| 962 |
+
if not os.path.isdir(dest_dir):
|
| 963 |
+
raise DistlibException('Not a directory: %r' % dest_dir)
|
| 964 |
+
newpath = os.path.join(dest_dir, self.filename)
|
| 965 |
+
archive_paths = list(path_map.items())
|
| 966 |
+
distinfo = os.path.join(workdir, info_dir)
|
| 967 |
+
info = distinfo, info_dir
|
| 968 |
+
self.write_records(info, workdir, archive_paths)
|
| 969 |
+
self.build_zip(newpath, archive_paths)
|
| 970 |
+
if dest_dir is None:
|
| 971 |
+
shutil.copyfile(newpath, pathname)
|
| 972 |
+
return modified
|
| 973 |
+
|
| 974 |
+
|
| 975 |
+
def _get_glibc_version():
|
| 976 |
+
import platform
|
| 977 |
+
ver = platform.libc_ver()
|
| 978 |
+
result = []
|
| 979 |
+
if ver[0] == 'glibc':
|
| 980 |
+
for s in ver[1].split('.'):
|
| 981 |
+
result.append(int(s) if s.isdigit() else 0)
|
| 982 |
+
result = tuple(result)
|
| 983 |
+
return result
|
| 984 |
+
|
| 985 |
+
|
| 986 |
+
def compatible_tags():
|
| 987 |
+
"""
|
| 988 |
+
Return (pyver, abi, arch) tuples compatible with this Python.
|
| 989 |
+
"""
|
| 990 |
+
class _Version:
|
| 991 |
+
def __init__(self, major, minor):
|
| 992 |
+
self.major = major
|
| 993 |
+
self.major_minor = (major, minor)
|
| 994 |
+
self.string = ''.join((str(major), str(minor)))
|
| 995 |
+
|
| 996 |
+
def __str__(self):
|
| 997 |
+
return self.string
|
| 998 |
+
|
| 999 |
+
|
| 1000 |
+
versions = [
|
| 1001 |
+
_Version(sys.version_info.major, minor_version)
|
| 1002 |
+
for minor_version in range(sys.version_info.minor, -1, -1)
|
| 1003 |
+
]
|
| 1004 |
+
abis = []
|
| 1005 |
+
for suffix in _get_suffixes():
|
| 1006 |
+
if suffix.startswith('.abi'):
|
| 1007 |
+
abis.append(suffix.split('.', 2)[1])
|
| 1008 |
+
abis.sort()
|
| 1009 |
+
if ABI != 'none':
|
| 1010 |
+
abis.insert(0, ABI)
|
| 1011 |
+
abis.append('none')
|
| 1012 |
+
result = []
|
| 1013 |
+
|
| 1014 |
+
arches = [ARCH]
|
| 1015 |
+
if sys.platform == 'darwin':
|
| 1016 |
+
m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
|
| 1017 |
+
if m:
|
| 1018 |
+
name, major, minor, arch = m.groups()
|
| 1019 |
+
minor = int(minor)
|
| 1020 |
+
matches = [arch]
|
| 1021 |
+
if arch in ('i386', 'ppc'):
|
| 1022 |
+
matches.append('fat')
|
| 1023 |
+
if arch in ('i386', 'ppc', 'x86_64'):
|
| 1024 |
+
matches.append('fat3')
|
| 1025 |
+
if arch in ('ppc64', 'x86_64'):
|
| 1026 |
+
matches.append('fat64')
|
| 1027 |
+
if arch in ('i386', 'x86_64'):
|
| 1028 |
+
matches.append('intel')
|
| 1029 |
+
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
|
| 1030 |
+
matches.append('universal')
|
| 1031 |
+
while minor >= 0:
|
| 1032 |
+
for match in matches:
|
| 1033 |
+
s = '%s_%s_%s_%s' % (name, major, minor, match)
|
| 1034 |
+
if s != ARCH: # already there
|
| 1035 |
+
arches.append(s)
|
| 1036 |
+
minor -= 1
|
| 1037 |
+
|
| 1038 |
+
# Most specific - our Python version, ABI and arch
|
| 1039 |
+
for i, version_object in enumerate(versions):
|
| 1040 |
+
version = str(version_object)
|
| 1041 |
+
add_abis = []
|
| 1042 |
+
|
| 1043 |
+
if i == 0:
|
| 1044 |
+
add_abis = abis
|
| 1045 |
+
|
| 1046 |
+
if IMP_PREFIX == 'cp' and version_object.major_minor >= (3, 2):
|
| 1047 |
+
limited_api_abi = 'abi' + str(version_object.major)
|
| 1048 |
+
if limited_api_abi not in add_abis:
|
| 1049 |
+
add_abis.append(limited_api_abi)
|
| 1050 |
+
|
| 1051 |
+
for abi in add_abis:
|
| 1052 |
+
for arch in arches:
|
| 1053 |
+
result.append((''.join((IMP_PREFIX, version)), abi, arch))
|
| 1054 |
+
# manylinux
|
| 1055 |
+
if abi != 'none' and sys.platform.startswith('linux'):
|
| 1056 |
+
arch = arch.replace('linux_', '')
|
| 1057 |
+
parts = _get_glibc_version()
|
| 1058 |
+
if len(parts) == 2:
|
| 1059 |
+
if parts >= (2, 5):
|
| 1060 |
+
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux1_%s' % arch))
|
| 1061 |
+
if parts >= (2, 12):
|
| 1062 |
+
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2010_%s' % arch))
|
| 1063 |
+
if parts >= (2, 17):
|
| 1064 |
+
result.append((''.join((IMP_PREFIX, version)), abi, 'manylinux2014_%s' % arch))
|
| 1065 |
+
result.append((''.join(
|
| 1066 |
+
(IMP_PREFIX, version)), abi, 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch)))
|
| 1067 |
+
|
| 1068 |
+
# where no ABI / arch dependency, but IMP_PREFIX dependency
|
| 1069 |
+
for i, version_object in enumerate(versions):
|
| 1070 |
+
version = str(version_object)
|
| 1071 |
+
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
|
| 1072 |
+
if i == 0:
|
| 1073 |
+
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
|
| 1074 |
+
|
| 1075 |
+
# no IMP_PREFIX, ABI or arch dependency
|
| 1076 |
+
for i, version_object in enumerate(versions):
|
| 1077 |
+
version = str(version_object)
|
| 1078 |
+
result.append((''.join(('py', version)), 'none', 'any'))
|
| 1079 |
+
if i == 0:
|
| 1080 |
+
result.append((''.join(('py', version[0])), 'none', 'any'))
|
| 1081 |
+
|
| 1082 |
+
return set(result)
|
| 1083 |
+
|
| 1084 |
+
|
| 1085 |
+
COMPATIBLE_TAGS = compatible_tags()
|
| 1086 |
+
|
| 1087 |
+
del compatible_tags
|
| 1088 |
+
|
| 1089 |
+
|
| 1090 |
+
def is_compatible(wheel, tags=None):
|
| 1091 |
+
if not isinstance(wheel, Wheel):
|
| 1092 |
+
wheel = Wheel(wheel) # assume it's a filename
|
| 1093 |
+
result = False
|
| 1094 |
+
if tags is None:
|
| 1095 |
+
tags = COMPATIBLE_TAGS
|
| 1096 |
+
for ver, abi, arch in tags:
|
| 1097 |
+
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
|
| 1098 |
+
result = True
|
| 1099 |
+
break
|
| 1100 |
+
return result
|
llava/lib/python3.10/site-packages/pip/_vendor/packaging/_elffile.py
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
ELF file parser.
|
| 3 |
+
|
| 4 |
+
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
| 5 |
+
interface to ``ZipFile``. Only the read interface is implemented.
|
| 6 |
+
|
| 7 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
| 8 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import enum
|
| 14 |
+
import os
|
| 15 |
+
import struct
|
| 16 |
+
from typing import IO
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class ELFInvalid(ValueError):
|
| 20 |
+
pass
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class EIClass(enum.IntEnum):
|
| 24 |
+
C32 = 1
|
| 25 |
+
C64 = 2
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class EIData(enum.IntEnum):
|
| 29 |
+
Lsb = 1
|
| 30 |
+
Msb = 2
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class EMachine(enum.IntEnum):
|
| 34 |
+
I386 = 3
|
| 35 |
+
S390 = 22
|
| 36 |
+
Arm = 40
|
| 37 |
+
X8664 = 62
|
| 38 |
+
AArc64 = 183
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class ELFFile:
|
| 42 |
+
"""
|
| 43 |
+
Representation of an ELF executable.
|
| 44 |
+
"""
|
| 45 |
+
|
| 46 |
+
def __init__(self, f: IO[bytes]) -> None:
|
| 47 |
+
self._f = f
|
| 48 |
+
|
| 49 |
+
try:
|
| 50 |
+
ident = self._read("16B")
|
| 51 |
+
except struct.error as e:
|
| 52 |
+
raise ELFInvalid("unable to parse identification") from e
|
| 53 |
+
magic = bytes(ident[:4])
|
| 54 |
+
if magic != b"\x7fELF":
|
| 55 |
+
raise ELFInvalid(f"invalid magic: {magic!r}")
|
| 56 |
+
|
| 57 |
+
self.capacity = ident[4] # Format for program header (bitness).
|
| 58 |
+
self.encoding = ident[5] # Data structure encoding (endianness).
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
# e_fmt: Format for program header.
|
| 62 |
+
# p_fmt: Format for section header.
|
| 63 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
| 64 |
+
e_fmt, self._p_fmt, self._p_idx = {
|
| 65 |
+
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
| 66 |
+
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
| 67 |
+
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
| 68 |
+
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
| 69 |
+
}[(self.capacity, self.encoding)]
|
| 70 |
+
except KeyError as e:
|
| 71 |
+
raise ELFInvalid(
|
| 72 |
+
f"unrecognized capacity ({self.capacity}) or "
|
| 73 |
+
f"encoding ({self.encoding})"
|
| 74 |
+
) from e
|
| 75 |
+
|
| 76 |
+
try:
|
| 77 |
+
(
|
| 78 |
+
_,
|
| 79 |
+
self.machine, # Architecture type.
|
| 80 |
+
_,
|
| 81 |
+
_,
|
| 82 |
+
self._e_phoff, # Offset of program header.
|
| 83 |
+
_,
|
| 84 |
+
self.flags, # Processor-specific flags.
|
| 85 |
+
_,
|
| 86 |
+
self._e_phentsize, # Size of section.
|
| 87 |
+
self._e_phnum, # Number of sections.
|
| 88 |
+
) = self._read(e_fmt)
|
| 89 |
+
except struct.error as e:
|
| 90 |
+
raise ELFInvalid("unable to parse machine and section information") from e
|
| 91 |
+
|
| 92 |
+
def _read(self, fmt: str) -> tuple[int, ...]:
|
| 93 |
+
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
| 94 |
+
|
| 95 |
+
@property
|
| 96 |
+
def interpreter(self) -> str | None:
|
| 97 |
+
"""
|
| 98 |
+
The path recorded in the ``PT_INTERP`` section header.
|
| 99 |
+
"""
|
| 100 |
+
for index in range(self._e_phnum):
|
| 101 |
+
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
| 102 |
+
try:
|
| 103 |
+
data = self._read(self._p_fmt)
|
| 104 |
+
except struct.error:
|
| 105 |
+
continue
|
| 106 |
+
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
| 107 |
+
continue
|
| 108 |
+
self._f.seek(data[self._p_idx[1]])
|
| 109 |
+
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
| 110 |
+
return None
|
llava/lib/python3.10/site-packages/pip/_vendor/packaging/markers.py
ADDED
|
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import operator
|
| 8 |
+
import os
|
| 9 |
+
import platform
|
| 10 |
+
import sys
|
| 11 |
+
from typing import Any, Callable, TypedDict, cast
|
| 12 |
+
|
| 13 |
+
from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
|
| 14 |
+
from ._parser import parse_marker as _parse_marker
|
| 15 |
+
from ._tokenizer import ParserSyntaxError
|
| 16 |
+
from .specifiers import InvalidSpecifier, Specifier
|
| 17 |
+
from .utils import canonicalize_name
|
| 18 |
+
|
| 19 |
+
__all__ = [
|
| 20 |
+
"InvalidMarker",
|
| 21 |
+
"Marker",
|
| 22 |
+
"UndefinedComparison",
|
| 23 |
+
"UndefinedEnvironmentName",
|
| 24 |
+
"default_environment",
|
| 25 |
+
]
|
| 26 |
+
|
| 27 |
+
Operator = Callable[[str, str], bool]
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class InvalidMarker(ValueError):
|
| 31 |
+
"""
|
| 32 |
+
An invalid marker was found, users should refer to PEP 508.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class UndefinedComparison(ValueError):
|
| 37 |
+
"""
|
| 38 |
+
An invalid operation was attempted on a value that doesn't support it.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class UndefinedEnvironmentName(ValueError):
|
| 43 |
+
"""
|
| 44 |
+
A name was attempted to be used that does not exist inside of the
|
| 45 |
+
environment.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class Environment(TypedDict):
|
| 50 |
+
implementation_name: str
|
| 51 |
+
"""The implementation's identifier, e.g. ``'cpython'``."""
|
| 52 |
+
|
| 53 |
+
implementation_version: str
|
| 54 |
+
"""
|
| 55 |
+
The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
|
| 56 |
+
``'7.3.13'`` for PyPy3.10 v7.3.13.
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
os_name: str
|
| 60 |
+
"""
|
| 61 |
+
The value of :py:data:`os.name`. The name of the operating system dependent module
|
| 62 |
+
imported, e.g. ``'posix'``.
|
| 63 |
+
"""
|
| 64 |
+
|
| 65 |
+
platform_machine: str
|
| 66 |
+
"""
|
| 67 |
+
Returns the machine type, e.g. ``'i386'``.
|
| 68 |
+
|
| 69 |
+
An empty string if the value cannot be determined.
|
| 70 |
+
"""
|
| 71 |
+
|
| 72 |
+
platform_release: str
|
| 73 |
+
"""
|
| 74 |
+
The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
|
| 75 |
+
|
| 76 |
+
An empty string if the value cannot be determined.
|
| 77 |
+
"""
|
| 78 |
+
|
| 79 |
+
platform_system: str
|
| 80 |
+
"""
|
| 81 |
+
The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
|
| 82 |
+
|
| 83 |
+
An empty string if the value cannot be determined.
|
| 84 |
+
"""
|
| 85 |
+
|
| 86 |
+
platform_version: str
|
| 87 |
+
"""
|
| 88 |
+
The system's release version, e.g. ``'#3 on degas'``.
|
| 89 |
+
|
| 90 |
+
An empty string if the value cannot be determined.
|
| 91 |
+
"""
|
| 92 |
+
|
| 93 |
+
python_full_version: str
|
| 94 |
+
"""
|
| 95 |
+
The Python version as string ``'major.minor.patchlevel'``.
|
| 96 |
+
|
| 97 |
+
Note that unlike the Python :py:data:`sys.version`, this value will always include
|
| 98 |
+
the patchlevel (it defaults to 0).
|
| 99 |
+
"""
|
| 100 |
+
|
| 101 |
+
platform_python_implementation: str
|
| 102 |
+
"""
|
| 103 |
+
A string identifying the Python implementation, e.g. ``'CPython'``.
|
| 104 |
+
"""
|
| 105 |
+
|
| 106 |
+
python_version: str
|
| 107 |
+
"""The Python version as string ``'major.minor'``."""
|
| 108 |
+
|
| 109 |
+
sys_platform: str
|
| 110 |
+
"""
|
| 111 |
+
This string contains a platform identifier that can be used to append
|
| 112 |
+
platform-specific components to :py:data:`sys.path`, for instance.
|
| 113 |
+
|
| 114 |
+
For Unix systems, except on Linux and AIX, this is the lowercased OS name as
|
| 115 |
+
returned by ``uname -s`` with the first part of the version as returned by
|
| 116 |
+
``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
|
| 117 |
+
was built.
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def _normalize_extra_values(results: Any) -> Any:
|
| 122 |
+
"""
|
| 123 |
+
Normalize extra values.
|
| 124 |
+
"""
|
| 125 |
+
if isinstance(results[0], tuple):
|
| 126 |
+
lhs, op, rhs = results[0]
|
| 127 |
+
if isinstance(lhs, Variable) and lhs.value == "extra":
|
| 128 |
+
normalized_extra = canonicalize_name(rhs.value)
|
| 129 |
+
rhs = Value(normalized_extra)
|
| 130 |
+
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
| 131 |
+
normalized_extra = canonicalize_name(lhs.value)
|
| 132 |
+
lhs = Value(normalized_extra)
|
| 133 |
+
results[0] = lhs, op, rhs
|
| 134 |
+
return results
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def _format_marker(
|
| 138 |
+
marker: list[str] | MarkerAtom | str, first: bool | None = True
|
| 139 |
+
) -> str:
|
| 140 |
+
assert isinstance(marker, (list, tuple, str))
|
| 141 |
+
|
| 142 |
+
# Sometimes we have a structure like [[...]] which is a single item list
|
| 143 |
+
# where the single item is itself it's own list. In that case we want skip
|
| 144 |
+
# the rest of this function so that we don't get extraneous () on the
|
| 145 |
+
# outside.
|
| 146 |
+
if (
|
| 147 |
+
isinstance(marker, list)
|
| 148 |
+
and len(marker) == 1
|
| 149 |
+
and isinstance(marker[0], (list, tuple))
|
| 150 |
+
):
|
| 151 |
+
return _format_marker(marker[0])
|
| 152 |
+
|
| 153 |
+
if isinstance(marker, list):
|
| 154 |
+
inner = (_format_marker(m, first=False) for m in marker)
|
| 155 |
+
if first:
|
| 156 |
+
return " ".join(inner)
|
| 157 |
+
else:
|
| 158 |
+
return "(" + " ".join(inner) + ")"
|
| 159 |
+
elif isinstance(marker, tuple):
|
| 160 |
+
return " ".join([m.serialize() for m in marker])
|
| 161 |
+
else:
|
| 162 |
+
return marker
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
_operators: dict[str, Operator] = {
|
| 166 |
+
"in": lambda lhs, rhs: lhs in rhs,
|
| 167 |
+
"not in": lambda lhs, rhs: lhs not in rhs,
|
| 168 |
+
"<": operator.lt,
|
| 169 |
+
"<=": operator.le,
|
| 170 |
+
"==": operator.eq,
|
| 171 |
+
"!=": operator.ne,
|
| 172 |
+
">=": operator.ge,
|
| 173 |
+
">": operator.gt,
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
|
| 177 |
+
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
| 178 |
+
try:
|
| 179 |
+
spec = Specifier("".join([op.serialize(), rhs]))
|
| 180 |
+
except InvalidSpecifier:
|
| 181 |
+
pass
|
| 182 |
+
else:
|
| 183 |
+
return spec.contains(lhs, prereleases=True)
|
| 184 |
+
|
| 185 |
+
oper: Operator | None = _operators.get(op.serialize())
|
| 186 |
+
if oper is None:
|
| 187 |
+
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
| 188 |
+
|
| 189 |
+
return oper(lhs, rhs)
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
def _normalize(*values: str, key: str) -> tuple[str, ...]:
|
| 193 |
+
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
| 194 |
+
# https://peps.python.org/pep-0685/
|
| 195 |
+
# > When comparing extra names, tools MUST normalize the names being
|
| 196 |
+
# > compared using the semantics outlined in PEP 503 for names
|
| 197 |
+
if key == "extra":
|
| 198 |
+
return tuple(canonicalize_name(v) for v in values)
|
| 199 |
+
|
| 200 |
+
# other environment markers don't have such standards
|
| 201 |
+
return values
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
|
| 205 |
+
groups: list[list[bool]] = [[]]
|
| 206 |
+
|
| 207 |
+
for marker in markers:
|
| 208 |
+
assert isinstance(marker, (list, tuple, str))
|
| 209 |
+
|
| 210 |
+
if isinstance(marker, list):
|
| 211 |
+
groups[-1].append(_evaluate_markers(marker, environment))
|
| 212 |
+
elif isinstance(marker, tuple):
|
| 213 |
+
lhs, op, rhs = marker
|
| 214 |
+
|
| 215 |
+
if isinstance(lhs, Variable):
|
| 216 |
+
environment_key = lhs.value
|
| 217 |
+
lhs_value = environment[environment_key]
|
| 218 |
+
rhs_value = rhs.value
|
| 219 |
+
else:
|
| 220 |
+
lhs_value = lhs.value
|
| 221 |
+
environment_key = rhs.value
|
| 222 |
+
rhs_value = environment[environment_key]
|
| 223 |
+
|
| 224 |
+
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
| 225 |
+
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
| 226 |
+
else:
|
| 227 |
+
assert marker in ["and", "or"]
|
| 228 |
+
if marker == "or":
|
| 229 |
+
groups.append([])
|
| 230 |
+
|
| 231 |
+
return any(all(item) for item in groups)
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def format_full_version(info: sys._version_info) -> str:
|
| 235 |
+
version = f"{info.major}.{info.minor}.{info.micro}"
|
| 236 |
+
kind = info.releaselevel
|
| 237 |
+
if kind != "final":
|
| 238 |
+
version += kind[0] + str(info.serial)
|
| 239 |
+
return version
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
def default_environment() -> Environment:
|
| 243 |
+
iver = format_full_version(sys.implementation.version)
|
| 244 |
+
implementation_name = sys.implementation.name
|
| 245 |
+
return {
|
| 246 |
+
"implementation_name": implementation_name,
|
| 247 |
+
"implementation_version": iver,
|
| 248 |
+
"os_name": os.name,
|
| 249 |
+
"platform_machine": platform.machine(),
|
| 250 |
+
"platform_release": platform.release(),
|
| 251 |
+
"platform_system": platform.system(),
|
| 252 |
+
"platform_version": platform.version(),
|
| 253 |
+
"python_full_version": platform.python_version(),
|
| 254 |
+
"platform_python_implementation": platform.python_implementation(),
|
| 255 |
+
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
| 256 |
+
"sys_platform": sys.platform,
|
| 257 |
+
}
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
class Marker:
|
| 261 |
+
def __init__(self, marker: str) -> None:
|
| 262 |
+
# Note: We create a Marker object without calling this constructor in
|
| 263 |
+
# packaging.requirements.Requirement. If any additional logic is
|
| 264 |
+
# added here, make sure to mirror/adapt Requirement.
|
| 265 |
+
try:
|
| 266 |
+
self._markers = _normalize_extra_values(_parse_marker(marker))
|
| 267 |
+
# The attribute `_markers` can be described in terms of a recursive type:
|
| 268 |
+
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
| 269 |
+
#
|
| 270 |
+
# For example, the following expression:
|
| 271 |
+
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
| 272 |
+
#
|
| 273 |
+
# is parsed into:
|
| 274 |
+
# [
|
| 275 |
+
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
| 276 |
+
# 'and',
|
| 277 |
+
# [
|
| 278 |
+
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
| 279 |
+
# 'or',
|
| 280 |
+
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
| 281 |
+
# ]
|
| 282 |
+
# ]
|
| 283 |
+
except ParserSyntaxError as e:
|
| 284 |
+
raise InvalidMarker(str(e)) from e
|
| 285 |
+
|
| 286 |
+
def __str__(self) -> str:
|
| 287 |
+
return _format_marker(self._markers)
|
| 288 |
+
|
| 289 |
+
def __repr__(self) -> str:
|
| 290 |
+
return f"<Marker('{self}')>"
|
| 291 |
+
|
| 292 |
+
def __hash__(self) -> int:
|
| 293 |
+
return hash((self.__class__.__name__, str(self)))
|
| 294 |
+
|
| 295 |
+
def __eq__(self, other: Any) -> bool:
|
| 296 |
+
if not isinstance(other, Marker):
|
| 297 |
+
return NotImplemented
|
| 298 |
+
|
| 299 |
+
return str(self) == str(other)
|
| 300 |
+
|
| 301 |
+
def evaluate(self, environment: dict[str, str] | None = None) -> bool:
|
| 302 |
+
"""Evaluate a marker.
|
| 303 |
+
|
| 304 |
+
Return the boolean from evaluating the given marker against the
|
| 305 |
+
environment. environment is an optional argument to override all or
|
| 306 |
+
part of the determined environment.
|
| 307 |
+
|
| 308 |
+
The environment is determined from the current Python process.
|
| 309 |
+
"""
|
| 310 |
+
current_environment = cast("dict[str, str]", default_environment())
|
| 311 |
+
current_environment["extra"] = ""
|
| 312 |
+
if environment is not None:
|
| 313 |
+
current_environment.update(environment)
|
| 314 |
+
# The API used to allow setting extra to None. We need to handle this
|
| 315 |
+
# case for backwards compatibility.
|
| 316 |
+
if current_environment["extra"] is None:
|
| 317 |
+
current_environment["extra"] = ""
|
| 318 |
+
|
| 319 |
+
return _evaluate_markers(
|
| 320 |
+
self._markers, _repair_python_full_version(current_environment)
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
def _repair_python_full_version(env: dict[str, str]) -> dict[str, str]:
|
| 325 |
+
"""
|
| 326 |
+
Work around platform.python_version() returning something that is not PEP 440
|
| 327 |
+
compliant for non-tagged Python builds.
|
| 328 |
+
"""
|
| 329 |
+
if env["python_full_version"].endswith("+"):
|
| 330 |
+
env["python_full_version"] += "local"
|
| 331 |
+
return env
|
llava/lib/python3.10/site-packages/pip/_vendor/packaging/metadata.py
ADDED
|
@@ -0,0 +1,863 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import email.feedparser
|
| 4 |
+
import email.header
|
| 5 |
+
import email.message
|
| 6 |
+
import email.parser
|
| 7 |
+
import email.policy
|
| 8 |
+
import pathlib
|
| 9 |
+
import sys
|
| 10 |
+
import typing
|
| 11 |
+
from typing import (
|
| 12 |
+
Any,
|
| 13 |
+
Callable,
|
| 14 |
+
Generic,
|
| 15 |
+
Literal,
|
| 16 |
+
TypedDict,
|
| 17 |
+
cast,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from . import licenses, requirements, specifiers, utils
|
| 21 |
+
from . import version as version_module
|
| 22 |
+
from .licenses import NormalizedLicenseExpression
|
| 23 |
+
|
| 24 |
+
T = typing.TypeVar("T")
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
if sys.version_info >= (3, 11): # pragma: no cover
|
| 28 |
+
ExceptionGroup = ExceptionGroup
|
| 29 |
+
else: # pragma: no cover
|
| 30 |
+
|
| 31 |
+
class ExceptionGroup(Exception):
|
| 32 |
+
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
|
| 33 |
+
|
| 34 |
+
If :external:exc:`ExceptionGroup` is already defined by Python itself,
|
| 35 |
+
that version is used instead.
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
message: str
|
| 39 |
+
exceptions: list[Exception]
|
| 40 |
+
|
| 41 |
+
def __init__(self, message: str, exceptions: list[Exception]) -> None:
|
| 42 |
+
self.message = message
|
| 43 |
+
self.exceptions = exceptions
|
| 44 |
+
|
| 45 |
+
def __repr__(self) -> str:
|
| 46 |
+
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class InvalidMetadata(ValueError):
|
| 50 |
+
"""A metadata field contains invalid data."""
|
| 51 |
+
|
| 52 |
+
field: str
|
| 53 |
+
"""The name of the field that contains invalid data."""
|
| 54 |
+
|
| 55 |
+
def __init__(self, field: str, message: str) -> None:
|
| 56 |
+
self.field = field
|
| 57 |
+
super().__init__(message)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
# The RawMetadata class attempts to make as few assumptions about the underlying
|
| 61 |
+
# serialization formats as possible. The idea is that as long as a serialization
|
| 62 |
+
# formats offer some very basic primitives in *some* way then we can support
|
| 63 |
+
# serializing to and from that format.
|
| 64 |
+
class RawMetadata(TypedDict, total=False):
|
| 65 |
+
"""A dictionary of raw core metadata.
|
| 66 |
+
|
| 67 |
+
Each field in core metadata maps to a key of this dictionary (when data is
|
| 68 |
+
provided). The key is lower-case and underscores are used instead of dashes
|
| 69 |
+
compared to the equivalent core metadata field. Any core metadata field that
|
| 70 |
+
can be specified multiple times or can hold multiple values in a single
|
| 71 |
+
field have a key with a plural name. See :class:`Metadata` whose attributes
|
| 72 |
+
match the keys of this dictionary.
|
| 73 |
+
|
| 74 |
+
Core metadata fields that can be specified multiple times are stored as a
|
| 75 |
+
list or dict depending on which is appropriate for the field. Any fields
|
| 76 |
+
which hold multiple values in a single field are stored as a list.
|
| 77 |
+
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
# Metadata 1.0 - PEP 241
|
| 81 |
+
metadata_version: str
|
| 82 |
+
name: str
|
| 83 |
+
version: str
|
| 84 |
+
platforms: list[str]
|
| 85 |
+
summary: str
|
| 86 |
+
description: str
|
| 87 |
+
keywords: list[str]
|
| 88 |
+
home_page: str
|
| 89 |
+
author: str
|
| 90 |
+
author_email: str
|
| 91 |
+
license: str
|
| 92 |
+
|
| 93 |
+
# Metadata 1.1 - PEP 314
|
| 94 |
+
supported_platforms: list[str]
|
| 95 |
+
download_url: str
|
| 96 |
+
classifiers: list[str]
|
| 97 |
+
requires: list[str]
|
| 98 |
+
provides: list[str]
|
| 99 |
+
obsoletes: list[str]
|
| 100 |
+
|
| 101 |
+
# Metadata 1.2 - PEP 345
|
| 102 |
+
maintainer: str
|
| 103 |
+
maintainer_email: str
|
| 104 |
+
requires_dist: list[str]
|
| 105 |
+
provides_dist: list[str]
|
| 106 |
+
obsoletes_dist: list[str]
|
| 107 |
+
requires_python: str
|
| 108 |
+
requires_external: list[str]
|
| 109 |
+
project_urls: dict[str, str]
|
| 110 |
+
|
| 111 |
+
# Metadata 2.0
|
| 112 |
+
# PEP 426 attempted to completely revamp the metadata format
|
| 113 |
+
# but got stuck without ever being able to build consensus on
|
| 114 |
+
# it and ultimately ended up withdrawn.
|
| 115 |
+
#
|
| 116 |
+
# However, a number of tools had started emitting METADATA with
|
| 117 |
+
# `2.0` Metadata-Version, so for historical reasons, this version
|
| 118 |
+
# was skipped.
|
| 119 |
+
|
| 120 |
+
# Metadata 2.1 - PEP 566
|
| 121 |
+
description_content_type: str
|
| 122 |
+
provides_extra: list[str]
|
| 123 |
+
|
| 124 |
+
# Metadata 2.2 - PEP 643
|
| 125 |
+
dynamic: list[str]
|
| 126 |
+
|
| 127 |
+
# Metadata 2.3 - PEP 685
|
| 128 |
+
# No new fields were added in PEP 685, just some edge case were
|
| 129 |
+
# tightened up to provide better interoptability.
|
| 130 |
+
|
| 131 |
+
# Metadata 2.4 - PEP 639
|
| 132 |
+
license_expression: str
|
| 133 |
+
license_files: list[str]
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
_STRING_FIELDS = {
|
| 137 |
+
"author",
|
| 138 |
+
"author_email",
|
| 139 |
+
"description",
|
| 140 |
+
"description_content_type",
|
| 141 |
+
"download_url",
|
| 142 |
+
"home_page",
|
| 143 |
+
"license",
|
| 144 |
+
"license_expression",
|
| 145 |
+
"maintainer",
|
| 146 |
+
"maintainer_email",
|
| 147 |
+
"metadata_version",
|
| 148 |
+
"name",
|
| 149 |
+
"requires_python",
|
| 150 |
+
"summary",
|
| 151 |
+
"version",
|
| 152 |
+
}
|
| 153 |
+
|
| 154 |
+
_LIST_FIELDS = {
|
| 155 |
+
"classifiers",
|
| 156 |
+
"dynamic",
|
| 157 |
+
"license_files",
|
| 158 |
+
"obsoletes",
|
| 159 |
+
"obsoletes_dist",
|
| 160 |
+
"platforms",
|
| 161 |
+
"provides",
|
| 162 |
+
"provides_dist",
|
| 163 |
+
"provides_extra",
|
| 164 |
+
"requires",
|
| 165 |
+
"requires_dist",
|
| 166 |
+
"requires_external",
|
| 167 |
+
"supported_platforms",
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
_DICT_FIELDS = {
|
| 171 |
+
"project_urls",
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def _parse_keywords(data: str) -> list[str]:
|
| 176 |
+
"""Split a string of comma-separated keywords into a list of keywords."""
|
| 177 |
+
return [k.strip() for k in data.split(",")]
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def _parse_project_urls(data: list[str]) -> dict[str, str]:
|
| 181 |
+
"""Parse a list of label/URL string pairings separated by a comma."""
|
| 182 |
+
urls = {}
|
| 183 |
+
for pair in data:
|
| 184 |
+
# Our logic is slightly tricky here as we want to try and do
|
| 185 |
+
# *something* reasonable with malformed data.
|
| 186 |
+
#
|
| 187 |
+
# The main thing that we have to worry about, is data that does
|
| 188 |
+
# not have a ',' at all to split the label from the Value. There
|
| 189 |
+
# isn't a singular right answer here, and we will fail validation
|
| 190 |
+
# later on (if the caller is validating) so it doesn't *really*
|
| 191 |
+
# matter, but since the missing value has to be an empty str
|
| 192 |
+
# and our return value is dict[str, str], if we let the key
|
| 193 |
+
# be the missing value, then they'd have multiple '' values that
|
| 194 |
+
# overwrite each other in a accumulating dict.
|
| 195 |
+
#
|
| 196 |
+
# The other potentional issue is that it's possible to have the
|
| 197 |
+
# same label multiple times in the metadata, with no solid "right"
|
| 198 |
+
# answer with what to do in that case. As such, we'll do the only
|
| 199 |
+
# thing we can, which is treat the field as unparseable and add it
|
| 200 |
+
# to our list of unparsed fields.
|
| 201 |
+
parts = [p.strip() for p in pair.split(",", 1)]
|
| 202 |
+
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
|
| 203 |
+
|
| 204 |
+
# TODO: The spec doesn't say anything about if the keys should be
|
| 205 |
+
# considered case sensitive or not... logically they should
|
| 206 |
+
# be case-preserving and case-insensitive, but doing that
|
| 207 |
+
# would open up more cases where we might have duplicate
|
| 208 |
+
# entries.
|
| 209 |
+
label, url = parts
|
| 210 |
+
if label in urls:
|
| 211 |
+
# The label already exists in our set of urls, so this field
|
| 212 |
+
# is unparseable, and we can just add the whole thing to our
|
| 213 |
+
# unparseable data and stop processing it.
|
| 214 |
+
raise KeyError("duplicate labels in project urls")
|
| 215 |
+
urls[label] = url
|
| 216 |
+
|
| 217 |
+
return urls
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
|
| 221 |
+
"""Get the body of the message."""
|
| 222 |
+
# If our source is a str, then our caller has managed encodings for us,
|
| 223 |
+
# and we don't need to deal with it.
|
| 224 |
+
if isinstance(source, str):
|
| 225 |
+
payload = msg.get_payload()
|
| 226 |
+
assert isinstance(payload, str)
|
| 227 |
+
return payload
|
| 228 |
+
# If our source is a bytes, then we're managing the encoding and we need
|
| 229 |
+
# to deal with it.
|
| 230 |
+
else:
|
| 231 |
+
bpayload = msg.get_payload(decode=True)
|
| 232 |
+
assert isinstance(bpayload, bytes)
|
| 233 |
+
try:
|
| 234 |
+
return bpayload.decode("utf8", "strict")
|
| 235 |
+
except UnicodeDecodeError as exc:
|
| 236 |
+
raise ValueError("payload in an invalid encoding") from exc
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
# The various parse_FORMAT functions here are intended to be as lenient as
|
| 240 |
+
# possible in their parsing, while still returning a correctly typed
|
| 241 |
+
# RawMetadata.
|
| 242 |
+
#
|
| 243 |
+
# To aid in this, we also generally want to do as little touching of the
|
| 244 |
+
# data as possible, except where there are possibly some historic holdovers
|
| 245 |
+
# that make valid data awkward to work with.
|
| 246 |
+
#
|
| 247 |
+
# While this is a lower level, intermediate format than our ``Metadata``
|
| 248 |
+
# class, some light touch ups can make a massive difference in usability.
|
| 249 |
+
|
| 250 |
+
# Map METADATA fields to RawMetadata.
|
| 251 |
+
_EMAIL_TO_RAW_MAPPING = {
|
| 252 |
+
"author": "author",
|
| 253 |
+
"author-email": "author_email",
|
| 254 |
+
"classifier": "classifiers",
|
| 255 |
+
"description": "description",
|
| 256 |
+
"description-content-type": "description_content_type",
|
| 257 |
+
"download-url": "download_url",
|
| 258 |
+
"dynamic": "dynamic",
|
| 259 |
+
"home-page": "home_page",
|
| 260 |
+
"keywords": "keywords",
|
| 261 |
+
"license": "license",
|
| 262 |
+
"license-expression": "license_expression",
|
| 263 |
+
"license-file": "license_files",
|
| 264 |
+
"maintainer": "maintainer",
|
| 265 |
+
"maintainer-email": "maintainer_email",
|
| 266 |
+
"metadata-version": "metadata_version",
|
| 267 |
+
"name": "name",
|
| 268 |
+
"obsoletes": "obsoletes",
|
| 269 |
+
"obsoletes-dist": "obsoletes_dist",
|
| 270 |
+
"platform": "platforms",
|
| 271 |
+
"project-url": "project_urls",
|
| 272 |
+
"provides": "provides",
|
| 273 |
+
"provides-dist": "provides_dist",
|
| 274 |
+
"provides-extra": "provides_extra",
|
| 275 |
+
"requires": "requires",
|
| 276 |
+
"requires-dist": "requires_dist",
|
| 277 |
+
"requires-external": "requires_external",
|
| 278 |
+
"requires-python": "requires_python",
|
| 279 |
+
"summary": "summary",
|
| 280 |
+
"supported-platform": "supported_platforms",
|
| 281 |
+
"version": "version",
|
| 282 |
+
}
|
| 283 |
+
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
|
| 287 |
+
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
|
| 288 |
+
|
| 289 |
+
This function returns a two-item tuple of dicts. The first dict is of
|
| 290 |
+
recognized fields from the core metadata specification. Fields that can be
|
| 291 |
+
parsed and translated into Python's built-in types are converted
|
| 292 |
+
appropriately. All other fields are left as-is. Fields that are allowed to
|
| 293 |
+
appear multiple times are stored as lists.
|
| 294 |
+
|
| 295 |
+
The second dict contains all other fields from the metadata. This includes
|
| 296 |
+
any unrecognized fields. It also includes any fields which are expected to
|
| 297 |
+
be parsed into a built-in type but were not formatted appropriately. Finally,
|
| 298 |
+
any fields that are expected to appear only once but are repeated are
|
| 299 |
+
included in this dict.
|
| 300 |
+
|
| 301 |
+
"""
|
| 302 |
+
raw: dict[str, str | list[str] | dict[str, str]] = {}
|
| 303 |
+
unparsed: dict[str, list[str]] = {}
|
| 304 |
+
|
| 305 |
+
if isinstance(data, str):
|
| 306 |
+
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
|
| 307 |
+
else:
|
| 308 |
+
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
|
| 309 |
+
|
| 310 |
+
# We have to wrap parsed.keys() in a set, because in the case of multiple
|
| 311 |
+
# values for a key (a list), the key will appear multiple times in the
|
| 312 |
+
# list of keys, but we're avoiding that by using get_all().
|
| 313 |
+
for name in frozenset(parsed.keys()):
|
| 314 |
+
# Header names in RFC are case insensitive, so we'll normalize to all
|
| 315 |
+
# lower case to make comparisons easier.
|
| 316 |
+
name = name.lower()
|
| 317 |
+
|
| 318 |
+
# We use get_all() here, even for fields that aren't multiple use,
|
| 319 |
+
# because otherwise someone could have e.g. two Name fields, and we
|
| 320 |
+
# would just silently ignore it rather than doing something about it.
|
| 321 |
+
headers = parsed.get_all(name) or []
|
| 322 |
+
|
| 323 |
+
# The way the email module works when parsing bytes is that it
|
| 324 |
+
# unconditionally decodes the bytes as ascii using the surrogateescape
|
| 325 |
+
# handler. When you pull that data back out (such as with get_all() ),
|
| 326 |
+
# it looks to see if the str has any surrogate escapes, and if it does
|
| 327 |
+
# it wraps it in a Header object instead of returning the string.
|
| 328 |
+
#
|
| 329 |
+
# As such, we'll look for those Header objects, and fix up the encoding.
|
| 330 |
+
value = []
|
| 331 |
+
# Flag if we have run into any issues processing the headers, thus
|
| 332 |
+
# signalling that the data belongs in 'unparsed'.
|
| 333 |
+
valid_encoding = True
|
| 334 |
+
for h in headers:
|
| 335 |
+
# It's unclear if this can return more types than just a Header or
|
| 336 |
+
# a str, so we'll just assert here to make sure.
|
| 337 |
+
assert isinstance(h, (email.header.Header, str))
|
| 338 |
+
|
| 339 |
+
# If it's a header object, we need to do our little dance to get
|
| 340 |
+
# the real data out of it. In cases where there is invalid data
|
| 341 |
+
# we're going to end up with mojibake, but there's no obvious, good
|
| 342 |
+
# way around that without reimplementing parts of the Header object
|
| 343 |
+
# ourselves.
|
| 344 |
+
#
|
| 345 |
+
# That should be fine since, if mojibacked happens, this key is
|
| 346 |
+
# going into the unparsed dict anyways.
|
| 347 |
+
if isinstance(h, email.header.Header):
|
| 348 |
+
# The Header object stores it's data as chunks, and each chunk
|
| 349 |
+
# can be independently encoded, so we'll need to check each
|
| 350 |
+
# of them.
|
| 351 |
+
chunks: list[tuple[bytes, str | None]] = []
|
| 352 |
+
for bin, encoding in email.header.decode_header(h):
|
| 353 |
+
try:
|
| 354 |
+
bin.decode("utf8", "strict")
|
| 355 |
+
except UnicodeDecodeError:
|
| 356 |
+
# Enable mojibake.
|
| 357 |
+
encoding = "latin1"
|
| 358 |
+
valid_encoding = False
|
| 359 |
+
else:
|
| 360 |
+
encoding = "utf8"
|
| 361 |
+
chunks.append((bin, encoding))
|
| 362 |
+
|
| 363 |
+
# Turn our chunks back into a Header object, then let that
|
| 364 |
+
# Header object do the right thing to turn them into a
|
| 365 |
+
# string for us.
|
| 366 |
+
value.append(str(email.header.make_header(chunks)))
|
| 367 |
+
# This is already a string, so just add it.
|
| 368 |
+
else:
|
| 369 |
+
value.append(h)
|
| 370 |
+
|
| 371 |
+
# We've processed all of our values to get them into a list of str,
|
| 372 |
+
# but we may have mojibake data, in which case this is an unparsed
|
| 373 |
+
# field.
|
| 374 |
+
if not valid_encoding:
|
| 375 |
+
unparsed[name] = value
|
| 376 |
+
continue
|
| 377 |
+
|
| 378 |
+
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
|
| 379 |
+
if raw_name is None:
|
| 380 |
+
# This is a bit of a weird situation, we've encountered a key that
|
| 381 |
+
# we don't know what it means, so we don't know whether it's meant
|
| 382 |
+
# to be a list or not.
|
| 383 |
+
#
|
| 384 |
+
# Since we can't really tell one way or another, we'll just leave it
|
| 385 |
+
# as a list, even though it may be a single item list, because that's
|
| 386 |
+
# what makes the most sense for email headers.
|
| 387 |
+
unparsed[name] = value
|
| 388 |
+
continue
|
| 389 |
+
|
| 390 |
+
# If this is one of our string fields, then we'll check to see if our
|
| 391 |
+
# value is a list of a single item. If it is then we'll assume that
|
| 392 |
+
# it was emitted as a single string, and unwrap the str from inside
|
| 393 |
+
# the list.
|
| 394 |
+
#
|
| 395 |
+
# If it's any other kind of data, then we haven't the faintest clue
|
| 396 |
+
# what we should parse it as, and we have to just add it to our list
|
| 397 |
+
# of unparsed stuff.
|
| 398 |
+
if raw_name in _STRING_FIELDS and len(value) == 1:
|
| 399 |
+
raw[raw_name] = value[0]
|
| 400 |
+
# If this is one of our list of string fields, then we can just assign
|
| 401 |
+
# the value, since email *only* has strings, and our get_all() call
|
| 402 |
+
# above ensures that this is a list.
|
| 403 |
+
elif raw_name in _LIST_FIELDS:
|
| 404 |
+
raw[raw_name] = value
|
| 405 |
+
# Special Case: Keywords
|
| 406 |
+
# The keywords field is implemented in the metadata spec as a str,
|
| 407 |
+
# but it conceptually is a list of strings, and is serialized using
|
| 408 |
+
# ", ".join(keywords), so we'll do some light data massaging to turn
|
| 409 |
+
# this into what it logically is.
|
| 410 |
+
elif raw_name == "keywords" and len(value) == 1:
|
| 411 |
+
raw[raw_name] = _parse_keywords(value[0])
|
| 412 |
+
# Special Case: Project-URL
|
| 413 |
+
# The project urls is implemented in the metadata spec as a list of
|
| 414 |
+
# specially-formatted strings that represent a key and a value, which
|
| 415 |
+
# is fundamentally a mapping, however the email format doesn't support
|
| 416 |
+
# mappings in a sane way, so it was crammed into a list of strings
|
| 417 |
+
# instead.
|
| 418 |
+
#
|
| 419 |
+
# We will do a little light data massaging to turn this into a map as
|
| 420 |
+
# it logically should be.
|
| 421 |
+
elif raw_name == "project_urls":
|
| 422 |
+
try:
|
| 423 |
+
raw[raw_name] = _parse_project_urls(value)
|
| 424 |
+
except KeyError:
|
| 425 |
+
unparsed[name] = value
|
| 426 |
+
# Nothing that we've done has managed to parse this, so it'll just
|
| 427 |
+
# throw it in our unparseable data and move on.
|
| 428 |
+
else:
|
| 429 |
+
unparsed[name] = value
|
| 430 |
+
|
| 431 |
+
# We need to support getting the Description from the message payload in
|
| 432 |
+
# addition to getting it from the the headers. This does mean, though, there
|
| 433 |
+
# is the possibility of it being set both ways, in which case we put both
|
| 434 |
+
# in 'unparsed' since we don't know which is right.
|
| 435 |
+
try:
|
| 436 |
+
payload = _get_payload(parsed, data)
|
| 437 |
+
except ValueError:
|
| 438 |
+
unparsed.setdefault("description", []).append(
|
| 439 |
+
parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload]
|
| 440 |
+
)
|
| 441 |
+
else:
|
| 442 |
+
if payload:
|
| 443 |
+
# Check to see if we've already got a description, if so then both
|
| 444 |
+
# it, and this body move to unparseable.
|
| 445 |
+
if "description" in raw:
|
| 446 |
+
description_header = cast(str, raw.pop("description"))
|
| 447 |
+
unparsed.setdefault("description", []).extend(
|
| 448 |
+
[description_header, payload]
|
| 449 |
+
)
|
| 450 |
+
elif "description" in unparsed:
|
| 451 |
+
unparsed["description"].append(payload)
|
| 452 |
+
else:
|
| 453 |
+
raw["description"] = payload
|
| 454 |
+
|
| 455 |
+
# We need to cast our `raw` to a metadata, because a TypedDict only support
|
| 456 |
+
# literal key names, but we're computing our key names on purpose, but the
|
| 457 |
+
# way this function is implemented, our `TypedDict` can only have valid key
|
| 458 |
+
# names.
|
| 459 |
+
return cast(RawMetadata, raw), unparsed
|
| 460 |
+
|
| 461 |
+
|
| 462 |
+
_NOT_FOUND = object()
|
| 463 |
+
|
| 464 |
+
|
| 465 |
+
# Keep the two values in sync.
|
| 466 |
+
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
|
| 467 |
+
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
|
| 468 |
+
|
| 469 |
+
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
class _Validator(Generic[T]):
|
| 473 |
+
"""Validate a metadata field.
|
| 474 |
+
|
| 475 |
+
All _process_*() methods correspond to a core metadata field. The method is
|
| 476 |
+
called with the field's raw value. If the raw value is valid it is returned
|
| 477 |
+
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
|
| 478 |
+
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
|
| 479 |
+
as appropriate).
|
| 480 |
+
"""
|
| 481 |
+
|
| 482 |
+
name: str
|
| 483 |
+
raw_name: str
|
| 484 |
+
added: _MetadataVersion
|
| 485 |
+
|
| 486 |
+
def __init__(
|
| 487 |
+
self,
|
| 488 |
+
*,
|
| 489 |
+
added: _MetadataVersion = "1.0",
|
| 490 |
+
) -> None:
|
| 491 |
+
self.added = added
|
| 492 |
+
|
| 493 |
+
def __set_name__(self, _owner: Metadata, name: str) -> None:
|
| 494 |
+
self.name = name
|
| 495 |
+
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
|
| 496 |
+
|
| 497 |
+
def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
|
| 498 |
+
# With Python 3.8, the caching can be replaced with functools.cached_property().
|
| 499 |
+
# No need to check the cache as attribute lookup will resolve into the
|
| 500 |
+
# instance's __dict__ before __get__ is called.
|
| 501 |
+
cache = instance.__dict__
|
| 502 |
+
value = instance._raw.get(self.name)
|
| 503 |
+
|
| 504 |
+
# To make the _process_* methods easier, we'll check if the value is None
|
| 505 |
+
# and if this field is NOT a required attribute, and if both of those
|
| 506 |
+
# things are true, we'll skip the the converter. This will mean that the
|
| 507 |
+
# converters never have to deal with the None union.
|
| 508 |
+
if self.name in _REQUIRED_ATTRS or value is not None:
|
| 509 |
+
try:
|
| 510 |
+
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
|
| 511 |
+
except AttributeError:
|
| 512 |
+
pass
|
| 513 |
+
else:
|
| 514 |
+
value = converter(value)
|
| 515 |
+
|
| 516 |
+
cache[self.name] = value
|
| 517 |
+
try:
|
| 518 |
+
del instance._raw[self.name] # type: ignore[misc]
|
| 519 |
+
except KeyError:
|
| 520 |
+
pass
|
| 521 |
+
|
| 522 |
+
return cast(T, value)
|
| 523 |
+
|
| 524 |
+
def _invalid_metadata(
|
| 525 |
+
self, msg: str, cause: Exception | None = None
|
| 526 |
+
) -> InvalidMetadata:
|
| 527 |
+
exc = InvalidMetadata(
|
| 528 |
+
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
|
| 529 |
+
)
|
| 530 |
+
exc.__cause__ = cause
|
| 531 |
+
return exc
|
| 532 |
+
|
| 533 |
+
def _process_metadata_version(self, value: str) -> _MetadataVersion:
|
| 534 |
+
# Implicitly makes Metadata-Version required.
|
| 535 |
+
if value not in _VALID_METADATA_VERSIONS:
|
| 536 |
+
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
|
| 537 |
+
return cast(_MetadataVersion, value)
|
| 538 |
+
|
| 539 |
+
def _process_name(self, value: str) -> str:
|
| 540 |
+
if not value:
|
| 541 |
+
raise self._invalid_metadata("{field} is a required field")
|
| 542 |
+
# Validate the name as a side-effect.
|
| 543 |
+
try:
|
| 544 |
+
utils.canonicalize_name(value, validate=True)
|
| 545 |
+
except utils.InvalidName as exc:
|
| 546 |
+
raise self._invalid_metadata(
|
| 547 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
| 548 |
+
) from exc
|
| 549 |
+
else:
|
| 550 |
+
return value
|
| 551 |
+
|
| 552 |
+
def _process_version(self, value: str) -> version_module.Version:
|
| 553 |
+
if not value:
|
| 554 |
+
raise self._invalid_metadata("{field} is a required field")
|
| 555 |
+
try:
|
| 556 |
+
return version_module.parse(value)
|
| 557 |
+
except version_module.InvalidVersion as exc:
|
| 558 |
+
raise self._invalid_metadata(
|
| 559 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
| 560 |
+
) from exc
|
| 561 |
+
|
| 562 |
+
def _process_summary(self, value: str) -> str:
|
| 563 |
+
"""Check the field contains no newlines."""
|
| 564 |
+
if "\n" in value:
|
| 565 |
+
raise self._invalid_metadata("{field} must be a single line")
|
| 566 |
+
return value
|
| 567 |
+
|
| 568 |
+
def _process_description_content_type(self, value: str) -> str:
|
| 569 |
+
content_types = {"text/plain", "text/x-rst", "text/markdown"}
|
| 570 |
+
message = email.message.EmailMessage()
|
| 571 |
+
message["content-type"] = value
|
| 572 |
+
|
| 573 |
+
content_type, parameters = (
|
| 574 |
+
# Defaults to `text/plain` if parsing failed.
|
| 575 |
+
message.get_content_type().lower(),
|
| 576 |
+
message["content-type"].params,
|
| 577 |
+
)
|
| 578 |
+
# Check if content-type is valid or defaulted to `text/plain` and thus was
|
| 579 |
+
# not parseable.
|
| 580 |
+
if content_type not in content_types or content_type not in value.lower():
|
| 581 |
+
raise self._invalid_metadata(
|
| 582 |
+
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
| 583 |
+
)
|
| 584 |
+
|
| 585 |
+
charset = parameters.get("charset", "UTF-8")
|
| 586 |
+
if charset != "UTF-8":
|
| 587 |
+
raise self._invalid_metadata(
|
| 588 |
+
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
| 589 |
+
)
|
| 590 |
+
|
| 591 |
+
markdown_variants = {"GFM", "CommonMark"}
|
| 592 |
+
variant = parameters.get("variant", "GFM") # Use an acceptable default.
|
| 593 |
+
if content_type == "text/markdown" and variant not in markdown_variants:
|
| 594 |
+
raise self._invalid_metadata(
|
| 595 |
+
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
|
| 596 |
+
f"not {variant!r}",
|
| 597 |
+
)
|
| 598 |
+
return value
|
| 599 |
+
|
| 600 |
+
def _process_dynamic(self, value: list[str]) -> list[str]:
|
| 601 |
+
for dynamic_field in map(str.lower, value):
|
| 602 |
+
if dynamic_field in {"name", "version", "metadata-version"}:
|
| 603 |
+
raise self._invalid_metadata(
|
| 604 |
+
f"{dynamic_field!r} is not allowed as a dynamic field"
|
| 605 |
+
)
|
| 606 |
+
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
|
| 607 |
+
raise self._invalid_metadata(
|
| 608 |
+
f"{dynamic_field!r} is not a valid dynamic field"
|
| 609 |
+
)
|
| 610 |
+
return list(map(str.lower, value))
|
| 611 |
+
|
| 612 |
+
def _process_provides_extra(
|
| 613 |
+
self,
|
| 614 |
+
value: list[str],
|
| 615 |
+
) -> list[utils.NormalizedName]:
|
| 616 |
+
normalized_names = []
|
| 617 |
+
try:
|
| 618 |
+
for name in value:
|
| 619 |
+
normalized_names.append(utils.canonicalize_name(name, validate=True))
|
| 620 |
+
except utils.InvalidName as exc:
|
| 621 |
+
raise self._invalid_metadata(
|
| 622 |
+
f"{name!r} is invalid for {{field}}", cause=exc
|
| 623 |
+
) from exc
|
| 624 |
+
else:
|
| 625 |
+
return normalized_names
|
| 626 |
+
|
| 627 |
+
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
|
| 628 |
+
try:
|
| 629 |
+
return specifiers.SpecifierSet(value)
|
| 630 |
+
except specifiers.InvalidSpecifier as exc:
|
| 631 |
+
raise self._invalid_metadata(
|
| 632 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
| 633 |
+
) from exc
|
| 634 |
+
|
| 635 |
+
def _process_requires_dist(
|
| 636 |
+
self,
|
| 637 |
+
value: list[str],
|
| 638 |
+
) -> list[requirements.Requirement]:
|
| 639 |
+
reqs = []
|
| 640 |
+
try:
|
| 641 |
+
for req in value:
|
| 642 |
+
reqs.append(requirements.Requirement(req))
|
| 643 |
+
except requirements.InvalidRequirement as exc:
|
| 644 |
+
raise self._invalid_metadata(
|
| 645 |
+
f"{req!r} is invalid for {{field}}", cause=exc
|
| 646 |
+
) from exc
|
| 647 |
+
else:
|
| 648 |
+
return reqs
|
| 649 |
+
|
| 650 |
+
def _process_license_expression(
|
| 651 |
+
self, value: str
|
| 652 |
+
) -> NormalizedLicenseExpression | None:
|
| 653 |
+
try:
|
| 654 |
+
return licenses.canonicalize_license_expression(value)
|
| 655 |
+
except ValueError as exc:
|
| 656 |
+
raise self._invalid_metadata(
|
| 657 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
| 658 |
+
) from exc
|
| 659 |
+
|
| 660 |
+
def _process_license_files(self, value: list[str]) -> list[str]:
|
| 661 |
+
paths = []
|
| 662 |
+
for path in value:
|
| 663 |
+
if ".." in path:
|
| 664 |
+
raise self._invalid_metadata(
|
| 665 |
+
f"{path!r} is invalid for {{field}}, "
|
| 666 |
+
"parent directory indicators are not allowed"
|
| 667 |
+
)
|
| 668 |
+
if "*" in path:
|
| 669 |
+
raise self._invalid_metadata(
|
| 670 |
+
f"{path!r} is invalid for {{field}}, paths must be resolved"
|
| 671 |
+
)
|
| 672 |
+
if (
|
| 673 |
+
pathlib.PurePosixPath(path).is_absolute()
|
| 674 |
+
or pathlib.PureWindowsPath(path).is_absolute()
|
| 675 |
+
):
|
| 676 |
+
raise self._invalid_metadata(
|
| 677 |
+
f"{path!r} is invalid for {{field}}, paths must be relative"
|
| 678 |
+
)
|
| 679 |
+
if pathlib.PureWindowsPath(path).as_posix() != path:
|
| 680 |
+
raise self._invalid_metadata(
|
| 681 |
+
f"{path!r} is invalid for {{field}}, "
|
| 682 |
+
"paths must use '/' delimiter"
|
| 683 |
+
)
|
| 684 |
+
paths.append(path)
|
| 685 |
+
return paths
|
| 686 |
+
|
| 687 |
+
|
| 688 |
+
class Metadata:
|
| 689 |
+
"""Representation of distribution metadata.
|
| 690 |
+
|
| 691 |
+
Compared to :class:`RawMetadata`, this class provides objects representing
|
| 692 |
+
metadata fields instead of only using built-in types. Any invalid metadata
|
| 693 |
+
will cause :exc:`InvalidMetadata` to be raised (with a
|
| 694 |
+
:py:attr:`~BaseException.__cause__` attribute as appropriate).
|
| 695 |
+
"""
|
| 696 |
+
|
| 697 |
+
_raw: RawMetadata
|
| 698 |
+
|
| 699 |
+
@classmethod
|
| 700 |
+
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
|
| 701 |
+
"""Create an instance from :class:`RawMetadata`.
|
| 702 |
+
|
| 703 |
+
If *validate* is true, all metadata will be validated. All exceptions
|
| 704 |
+
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
| 705 |
+
"""
|
| 706 |
+
ins = cls()
|
| 707 |
+
ins._raw = data.copy() # Mutations occur due to caching enriched values.
|
| 708 |
+
|
| 709 |
+
if validate:
|
| 710 |
+
exceptions: list[Exception] = []
|
| 711 |
+
try:
|
| 712 |
+
metadata_version = ins.metadata_version
|
| 713 |
+
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
|
| 714 |
+
except InvalidMetadata as metadata_version_exc:
|
| 715 |
+
exceptions.append(metadata_version_exc)
|
| 716 |
+
metadata_version = None
|
| 717 |
+
|
| 718 |
+
# Make sure to check for the fields that are present, the required
|
| 719 |
+
# fields (so their absence can be reported).
|
| 720 |
+
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
|
| 721 |
+
# Remove fields that have already been checked.
|
| 722 |
+
fields_to_check -= {"metadata_version"}
|
| 723 |
+
|
| 724 |
+
for key in fields_to_check:
|
| 725 |
+
try:
|
| 726 |
+
if metadata_version:
|
| 727 |
+
# Can't use getattr() as that triggers descriptor protocol which
|
| 728 |
+
# will fail due to no value for the instance argument.
|
| 729 |
+
try:
|
| 730 |
+
field_metadata_version = cls.__dict__[key].added
|
| 731 |
+
except KeyError:
|
| 732 |
+
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
|
| 733 |
+
exceptions.append(exc)
|
| 734 |
+
continue
|
| 735 |
+
field_age = _VALID_METADATA_VERSIONS.index(
|
| 736 |
+
field_metadata_version
|
| 737 |
+
)
|
| 738 |
+
if field_age > metadata_age:
|
| 739 |
+
field = _RAW_TO_EMAIL_MAPPING[key]
|
| 740 |
+
exc = InvalidMetadata(
|
| 741 |
+
field,
|
| 742 |
+
f"{field} introduced in metadata version "
|
| 743 |
+
f"{field_metadata_version}, not {metadata_version}",
|
| 744 |
+
)
|
| 745 |
+
exceptions.append(exc)
|
| 746 |
+
continue
|
| 747 |
+
getattr(ins, key)
|
| 748 |
+
except InvalidMetadata as exc:
|
| 749 |
+
exceptions.append(exc)
|
| 750 |
+
|
| 751 |
+
if exceptions:
|
| 752 |
+
raise ExceptionGroup("invalid metadata", exceptions)
|
| 753 |
+
|
| 754 |
+
return ins
|
| 755 |
+
|
| 756 |
+
@classmethod
|
| 757 |
+
def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
|
| 758 |
+
"""Parse metadata from email headers.
|
| 759 |
+
|
| 760 |
+
If *validate* is true, the metadata will be validated. All exceptions
|
| 761 |
+
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
| 762 |
+
"""
|
| 763 |
+
raw, unparsed = parse_email(data)
|
| 764 |
+
|
| 765 |
+
if validate:
|
| 766 |
+
exceptions: list[Exception] = []
|
| 767 |
+
for unparsed_key in unparsed:
|
| 768 |
+
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
|
| 769 |
+
message = f"{unparsed_key!r} has invalid data"
|
| 770 |
+
else:
|
| 771 |
+
message = f"unrecognized field: {unparsed_key!r}"
|
| 772 |
+
exceptions.append(InvalidMetadata(unparsed_key, message))
|
| 773 |
+
|
| 774 |
+
if exceptions:
|
| 775 |
+
raise ExceptionGroup("unparsed", exceptions)
|
| 776 |
+
|
| 777 |
+
try:
|
| 778 |
+
return cls.from_raw(raw, validate=validate)
|
| 779 |
+
except ExceptionGroup as exc_group:
|
| 780 |
+
raise ExceptionGroup(
|
| 781 |
+
"invalid or unparsed metadata", exc_group.exceptions
|
| 782 |
+
) from None
|
| 783 |
+
|
| 784 |
+
metadata_version: _Validator[_MetadataVersion] = _Validator()
|
| 785 |
+
""":external:ref:`core-metadata-metadata-version`
|
| 786 |
+
(required; validated to be a valid metadata version)"""
|
| 787 |
+
# `name` is not normalized/typed to NormalizedName so as to provide access to
|
| 788 |
+
# the original/raw name.
|
| 789 |
+
name: _Validator[str] = _Validator()
|
| 790 |
+
""":external:ref:`core-metadata-name`
|
| 791 |
+
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
|
| 792 |
+
*validate* parameter)"""
|
| 793 |
+
version: _Validator[version_module.Version] = _Validator()
|
| 794 |
+
""":external:ref:`core-metadata-version` (required)"""
|
| 795 |
+
dynamic: _Validator[list[str] | None] = _Validator(
|
| 796 |
+
added="2.2",
|
| 797 |
+
)
|
| 798 |
+
""":external:ref:`core-metadata-dynamic`
|
| 799 |
+
(validated against core metadata field names and lowercased)"""
|
| 800 |
+
platforms: _Validator[list[str] | None] = _Validator()
|
| 801 |
+
""":external:ref:`core-metadata-platform`"""
|
| 802 |
+
supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
|
| 803 |
+
""":external:ref:`core-metadata-supported-platform`"""
|
| 804 |
+
summary: _Validator[str | None] = _Validator()
|
| 805 |
+
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
|
| 806 |
+
description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body
|
| 807 |
+
""":external:ref:`core-metadata-description`"""
|
| 808 |
+
description_content_type: _Validator[str | None] = _Validator(added="2.1")
|
| 809 |
+
""":external:ref:`core-metadata-description-content-type` (validated)"""
|
| 810 |
+
keywords: _Validator[list[str] | None] = _Validator()
|
| 811 |
+
""":external:ref:`core-metadata-keywords`"""
|
| 812 |
+
home_page: _Validator[str | None] = _Validator()
|
| 813 |
+
""":external:ref:`core-metadata-home-page`"""
|
| 814 |
+
download_url: _Validator[str | None] = _Validator(added="1.1")
|
| 815 |
+
""":external:ref:`core-metadata-download-url`"""
|
| 816 |
+
author: _Validator[str | None] = _Validator()
|
| 817 |
+
""":external:ref:`core-metadata-author`"""
|
| 818 |
+
author_email: _Validator[str | None] = _Validator()
|
| 819 |
+
""":external:ref:`core-metadata-author-email`"""
|
| 820 |
+
maintainer: _Validator[str | None] = _Validator(added="1.2")
|
| 821 |
+
""":external:ref:`core-metadata-maintainer`"""
|
| 822 |
+
maintainer_email: _Validator[str | None] = _Validator(added="1.2")
|
| 823 |
+
""":external:ref:`core-metadata-maintainer-email`"""
|
| 824 |
+
license: _Validator[str | None] = _Validator()
|
| 825 |
+
""":external:ref:`core-metadata-license`"""
|
| 826 |
+
license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(
|
| 827 |
+
added="2.4"
|
| 828 |
+
)
|
| 829 |
+
""":external:ref:`core-metadata-license-expression`"""
|
| 830 |
+
license_files: _Validator[list[str] | None] = _Validator(added="2.4")
|
| 831 |
+
""":external:ref:`core-metadata-license-file`"""
|
| 832 |
+
classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
|
| 833 |
+
""":external:ref:`core-metadata-classifier`"""
|
| 834 |
+
requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
|
| 835 |
+
added="1.2"
|
| 836 |
+
)
|
| 837 |
+
""":external:ref:`core-metadata-requires-dist`"""
|
| 838 |
+
requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
|
| 839 |
+
added="1.2"
|
| 840 |
+
)
|
| 841 |
+
""":external:ref:`core-metadata-requires-python`"""
|
| 842 |
+
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
|
| 843 |
+
# don't do any processing on the values.
|
| 844 |
+
requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
|
| 845 |
+
""":external:ref:`core-metadata-requires-external`"""
|
| 846 |
+
project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
|
| 847 |
+
""":external:ref:`core-metadata-project-url`"""
|
| 848 |
+
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
|
| 849 |
+
# regardless of metadata version.
|
| 850 |
+
provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
|
| 851 |
+
added="2.1",
|
| 852 |
+
)
|
| 853 |
+
""":external:ref:`core-metadata-provides-extra`"""
|
| 854 |
+
provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
|
| 855 |
+
""":external:ref:`core-metadata-provides-dist`"""
|
| 856 |
+
obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
|
| 857 |
+
""":external:ref:`core-metadata-obsoletes-dist`"""
|
| 858 |
+
requires: _Validator[list[str] | None] = _Validator(added="1.1")
|
| 859 |
+
"""``Requires`` (deprecated)"""
|
| 860 |
+
provides: _Validator[list[str] | None] = _Validator(added="1.1")
|
| 861 |
+
"""``Provides`` (deprecated)"""
|
| 862 |
+
obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
|
| 863 |
+
"""``Obsoletes`` (deprecated)"""
|
llava/lib/python3.10/site-packages/pip/_vendor/packaging/specifiers.py
ADDED
|
@@ -0,0 +1,1020 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
| 8 |
+
from pip._vendor.packaging.version import Version
|
| 9 |
+
"""
|
| 10 |
+
|
| 11 |
+
from __future__ import annotations
|
| 12 |
+
|
| 13 |
+
import abc
|
| 14 |
+
import itertools
|
| 15 |
+
import re
|
| 16 |
+
from typing import Callable, Iterable, Iterator, TypeVar, Union
|
| 17 |
+
|
| 18 |
+
from .utils import canonicalize_version
|
| 19 |
+
from .version import Version
|
| 20 |
+
|
| 21 |
+
UnparsedVersion = Union[Version, str]
|
| 22 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
| 23 |
+
CallableOperator = Callable[[Version, str], bool]
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
| 27 |
+
if not isinstance(version, Version):
|
| 28 |
+
version = Version(version)
|
| 29 |
+
return version
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class InvalidSpecifier(ValueError):
|
| 33 |
+
"""
|
| 34 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
| 35 |
+
string that is invalid.
|
| 36 |
+
|
| 37 |
+
>>> Specifier("lolwat")
|
| 38 |
+
Traceback (most recent call last):
|
| 39 |
+
...
|
| 40 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
| 45 |
+
@abc.abstractmethod
|
| 46 |
+
def __str__(self) -> str:
|
| 47 |
+
"""
|
| 48 |
+
Returns the str representation of this Specifier-like object. This
|
| 49 |
+
should be representative of the Specifier itself.
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
@abc.abstractmethod
|
| 53 |
+
def __hash__(self) -> int:
|
| 54 |
+
"""
|
| 55 |
+
Returns a hash value for this Specifier-like object.
|
| 56 |
+
"""
|
| 57 |
+
|
| 58 |
+
@abc.abstractmethod
|
| 59 |
+
def __eq__(self, other: object) -> bool:
|
| 60 |
+
"""
|
| 61 |
+
Returns a boolean representing whether or not the two Specifier-like
|
| 62 |
+
objects are equal.
|
| 63 |
+
|
| 64 |
+
:param other: The other object to check against.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
@abc.abstractmethod
|
| 69 |
+
def prereleases(self) -> bool | None:
|
| 70 |
+
"""Whether or not pre-releases as a whole are allowed.
|
| 71 |
+
|
| 72 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
| 73 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
| 74 |
+
"""
|
| 75 |
+
|
| 76 |
+
@prereleases.setter
|
| 77 |
+
def prereleases(self, value: bool) -> None:
|
| 78 |
+
"""Setter for :attr:`prereleases`.
|
| 79 |
+
|
| 80 |
+
:param value: The value to set.
|
| 81 |
+
"""
|
| 82 |
+
|
| 83 |
+
@abc.abstractmethod
|
| 84 |
+
def contains(self, item: str, prereleases: bool | None = None) -> bool:
|
| 85 |
+
"""
|
| 86 |
+
Determines if the given item is contained within this specifier.
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
@abc.abstractmethod
|
| 90 |
+
def filter(
|
| 91 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
| 92 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 93 |
+
"""
|
| 94 |
+
Takes an iterable of items and filters them so that only items which
|
| 95 |
+
are contained within this specifier are allowed in it.
|
| 96 |
+
"""
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class Specifier(BaseSpecifier):
|
| 100 |
+
"""This class abstracts handling of version specifiers.
|
| 101 |
+
|
| 102 |
+
.. tip::
|
| 103 |
+
|
| 104 |
+
It is generally not required to instantiate this manually. You should instead
|
| 105 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
| 106 |
+
comma-separated version specifiers (which is what package metadata contains).
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
_operator_regex_str = r"""
|
| 110 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
| 111 |
+
"""
|
| 112 |
+
_version_regex_str = r"""
|
| 113 |
+
(?P<version>
|
| 114 |
+
(?:
|
| 115 |
+
# The identity operators allow for an escape hatch that will
|
| 116 |
+
# do an exact string match of the version you wish to install.
|
| 117 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
| 118 |
+
# any semantic meaning from it. This operator is discouraged
|
| 119 |
+
# but included entirely as an escape hatch.
|
| 120 |
+
(?<====) # Only match for the identity operator
|
| 121 |
+
\s*
|
| 122 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
| 123 |
+
# we match everything except for whitespace, a
|
| 124 |
+
# semi-colon for marker support, and a closing paren
|
| 125 |
+
# since versions can be enclosed in them.
|
| 126 |
+
)
|
| 127 |
+
|
|
| 128 |
+
(?:
|
| 129 |
+
# The (non)equality operators allow for wild card and local
|
| 130 |
+
# versions to be specified so we have to define these two
|
| 131 |
+
# operators separately to enable that.
|
| 132 |
+
(?<===|!=) # Only match for equals and not equals
|
| 133 |
+
|
| 134 |
+
\s*
|
| 135 |
+
v?
|
| 136 |
+
(?:[0-9]+!)? # epoch
|
| 137 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 138 |
+
|
| 139 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
| 140 |
+
# local version together so group them with a | and make them optional.
|
| 141 |
+
(?:
|
| 142 |
+
\.\* # Wild card syntax of .*
|
| 143 |
+
|
|
| 144 |
+
(?: # pre release
|
| 145 |
+
[-_\.]?
|
| 146 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 147 |
+
[-_\.]?
|
| 148 |
+
[0-9]*
|
| 149 |
+
)?
|
| 150 |
+
(?: # post release
|
| 151 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 152 |
+
)?
|
| 153 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 154 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
| 155 |
+
)?
|
| 156 |
+
)
|
| 157 |
+
|
|
| 158 |
+
(?:
|
| 159 |
+
# The compatible operator requires at least two digits in the
|
| 160 |
+
# release segment.
|
| 161 |
+
(?<=~=) # Only match for the compatible operator
|
| 162 |
+
|
| 163 |
+
\s*
|
| 164 |
+
v?
|
| 165 |
+
(?:[0-9]+!)? # epoch
|
| 166 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
| 167 |
+
(?: # pre release
|
| 168 |
+
[-_\.]?
|
| 169 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 170 |
+
[-_\.]?
|
| 171 |
+
[0-9]*
|
| 172 |
+
)?
|
| 173 |
+
(?: # post release
|
| 174 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 175 |
+
)?
|
| 176 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 177 |
+
)
|
| 178 |
+
|
|
| 179 |
+
(?:
|
| 180 |
+
# All other operators only allow a sub set of what the
|
| 181 |
+
# (non)equality operators do. Specifically they do not allow
|
| 182 |
+
# local versions to be specified nor do they allow the prefix
|
| 183 |
+
# matching wild cards.
|
| 184 |
+
(?<!==|!=|~=) # We have special cases for these
|
| 185 |
+
# operators so we want to make sure they
|
| 186 |
+
# don't match here.
|
| 187 |
+
|
| 188 |
+
\s*
|
| 189 |
+
v?
|
| 190 |
+
(?:[0-9]+!)? # epoch
|
| 191 |
+
[0-9]+(?:\.[0-9]+)* # release
|
| 192 |
+
(?: # pre release
|
| 193 |
+
[-_\.]?
|
| 194 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
| 195 |
+
[-_\.]?
|
| 196 |
+
[0-9]*
|
| 197 |
+
)?
|
| 198 |
+
(?: # post release
|
| 199 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
| 200 |
+
)?
|
| 201 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
| 202 |
+
)
|
| 203 |
+
)
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
_regex = re.compile(
|
| 207 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
| 208 |
+
re.VERBOSE | re.IGNORECASE,
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
_operators = {
|
| 212 |
+
"~=": "compatible",
|
| 213 |
+
"==": "equal",
|
| 214 |
+
"!=": "not_equal",
|
| 215 |
+
"<=": "less_than_equal",
|
| 216 |
+
">=": "greater_than_equal",
|
| 217 |
+
"<": "less_than",
|
| 218 |
+
">": "greater_than",
|
| 219 |
+
"===": "arbitrary",
|
| 220 |
+
}
|
| 221 |
+
|
| 222 |
+
def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
|
| 223 |
+
"""Initialize a Specifier instance.
|
| 224 |
+
|
| 225 |
+
:param spec:
|
| 226 |
+
The string representation of a specifier which will be parsed and
|
| 227 |
+
normalized before use.
|
| 228 |
+
:param prereleases:
|
| 229 |
+
This tells the specifier if it should accept prerelease versions if
|
| 230 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 231 |
+
given specifiers.
|
| 232 |
+
:raises InvalidSpecifier:
|
| 233 |
+
If the given specifier is invalid (i.e. bad syntax).
|
| 234 |
+
"""
|
| 235 |
+
match = self._regex.search(spec)
|
| 236 |
+
if not match:
|
| 237 |
+
raise InvalidSpecifier(f"Invalid specifier: {spec!r}")
|
| 238 |
+
|
| 239 |
+
self._spec: tuple[str, str] = (
|
| 240 |
+
match.group("operator").strip(),
|
| 241 |
+
match.group("version").strip(),
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
# Store whether or not this Specifier should accept prereleases
|
| 245 |
+
self._prereleases = prereleases
|
| 246 |
+
|
| 247 |
+
# https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
|
| 248 |
+
@property # type: ignore[override]
|
| 249 |
+
def prereleases(self) -> bool:
|
| 250 |
+
# If there is an explicit prereleases set for this, then we'll just
|
| 251 |
+
# blindly use that.
|
| 252 |
+
if self._prereleases is not None:
|
| 253 |
+
return self._prereleases
|
| 254 |
+
|
| 255 |
+
# Look at all of our specifiers and determine if they are inclusive
|
| 256 |
+
# operators, and if they are if they are including an explicit
|
| 257 |
+
# prerelease.
|
| 258 |
+
operator, version = self._spec
|
| 259 |
+
if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]:
|
| 260 |
+
# The == specifier can include a trailing .*, if it does we
|
| 261 |
+
# want to remove before parsing.
|
| 262 |
+
if operator == "==" and version.endswith(".*"):
|
| 263 |
+
version = version[:-2]
|
| 264 |
+
|
| 265 |
+
# Parse the version, and if it is a pre-release than this
|
| 266 |
+
# specifier allows pre-releases.
|
| 267 |
+
if Version(version).is_prerelease:
|
| 268 |
+
return True
|
| 269 |
+
|
| 270 |
+
return False
|
| 271 |
+
|
| 272 |
+
@prereleases.setter
|
| 273 |
+
def prereleases(self, value: bool) -> None:
|
| 274 |
+
self._prereleases = value
|
| 275 |
+
|
| 276 |
+
@property
|
| 277 |
+
def operator(self) -> str:
|
| 278 |
+
"""The operator of this specifier.
|
| 279 |
+
|
| 280 |
+
>>> Specifier("==1.2.3").operator
|
| 281 |
+
'=='
|
| 282 |
+
"""
|
| 283 |
+
return self._spec[0]
|
| 284 |
+
|
| 285 |
+
@property
|
| 286 |
+
def version(self) -> str:
|
| 287 |
+
"""The version of this specifier.
|
| 288 |
+
|
| 289 |
+
>>> Specifier("==1.2.3").version
|
| 290 |
+
'1.2.3'
|
| 291 |
+
"""
|
| 292 |
+
return self._spec[1]
|
| 293 |
+
|
| 294 |
+
def __repr__(self) -> str:
|
| 295 |
+
"""A representation of the Specifier that shows all internal state.
|
| 296 |
+
|
| 297 |
+
>>> Specifier('>=1.0.0')
|
| 298 |
+
<Specifier('>=1.0.0')>
|
| 299 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
| 300 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
| 301 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
| 302 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
| 303 |
+
"""
|
| 304 |
+
pre = (
|
| 305 |
+
f", prereleases={self.prereleases!r}"
|
| 306 |
+
if self._prereleases is not None
|
| 307 |
+
else ""
|
| 308 |
+
)
|
| 309 |
+
|
| 310 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
| 311 |
+
|
| 312 |
+
def __str__(self) -> str:
|
| 313 |
+
"""A string representation of the Specifier that can be round-tripped.
|
| 314 |
+
|
| 315 |
+
>>> str(Specifier('>=1.0.0'))
|
| 316 |
+
'>=1.0.0'
|
| 317 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
| 318 |
+
'>=1.0.0'
|
| 319 |
+
"""
|
| 320 |
+
return "{}{}".format(*self._spec)
|
| 321 |
+
|
| 322 |
+
@property
|
| 323 |
+
def _canonical_spec(self) -> tuple[str, str]:
|
| 324 |
+
canonical_version = canonicalize_version(
|
| 325 |
+
self._spec[1],
|
| 326 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
| 327 |
+
)
|
| 328 |
+
return self._spec[0], canonical_version
|
| 329 |
+
|
| 330 |
+
def __hash__(self) -> int:
|
| 331 |
+
return hash(self._canonical_spec)
|
| 332 |
+
|
| 333 |
+
def __eq__(self, other: object) -> bool:
|
| 334 |
+
"""Whether or not the two Specifier-like objects are equal.
|
| 335 |
+
|
| 336 |
+
:param other: The other object to check against.
|
| 337 |
+
|
| 338 |
+
The value of :attr:`prereleases` is ignored.
|
| 339 |
+
|
| 340 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
| 341 |
+
True
|
| 342 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
| 343 |
+
... Specifier("==1.2.3", prereleases=True))
|
| 344 |
+
True
|
| 345 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
| 346 |
+
True
|
| 347 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
| 348 |
+
False
|
| 349 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
| 350 |
+
False
|
| 351 |
+
"""
|
| 352 |
+
if isinstance(other, str):
|
| 353 |
+
try:
|
| 354 |
+
other = self.__class__(str(other))
|
| 355 |
+
except InvalidSpecifier:
|
| 356 |
+
return NotImplemented
|
| 357 |
+
elif not isinstance(other, self.__class__):
|
| 358 |
+
return NotImplemented
|
| 359 |
+
|
| 360 |
+
return self._canonical_spec == other._canonical_spec
|
| 361 |
+
|
| 362 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
| 363 |
+
operator_callable: CallableOperator = getattr(
|
| 364 |
+
self, f"_compare_{self._operators[op]}"
|
| 365 |
+
)
|
| 366 |
+
return operator_callable
|
| 367 |
+
|
| 368 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
| 369 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
| 370 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
| 371 |
+
# implement this in terms of the other specifiers instead of
|
| 372 |
+
# implementing it ourselves. The only thing we need to do is construct
|
| 373 |
+
# the other specifiers.
|
| 374 |
+
|
| 375 |
+
# We want everything but the last item in the version, but we want to
|
| 376 |
+
# ignore suffix segments.
|
| 377 |
+
prefix = _version_join(
|
| 378 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
# Add the prefix notation to the end of our string
|
| 382 |
+
prefix += ".*"
|
| 383 |
+
|
| 384 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
| 385 |
+
prospective, prefix
|
| 386 |
+
)
|
| 387 |
+
|
| 388 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
| 389 |
+
# We need special logic to handle prefix matching
|
| 390 |
+
if spec.endswith(".*"):
|
| 391 |
+
# In the case of prefix matching we want to ignore local segment.
|
| 392 |
+
normalized_prospective = canonicalize_version(
|
| 393 |
+
prospective.public, strip_trailing_zero=False
|
| 394 |
+
)
|
| 395 |
+
# Get the normalized version string ignoring the trailing .*
|
| 396 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
| 397 |
+
# Split the spec out by bangs and dots, and pretend that there is
|
| 398 |
+
# an implicit dot in between a release segment and a pre-release segment.
|
| 399 |
+
split_spec = _version_split(normalized_spec)
|
| 400 |
+
|
| 401 |
+
# Split the prospective version out by bangs and dots, and pretend
|
| 402 |
+
# that there is an implicit dot in between a release segment and
|
| 403 |
+
# a pre-release segment.
|
| 404 |
+
split_prospective = _version_split(normalized_prospective)
|
| 405 |
+
|
| 406 |
+
# 0-pad the prospective version before shortening it to get the correct
|
| 407 |
+
# shortened version.
|
| 408 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
| 409 |
+
|
| 410 |
+
# Shorten the prospective version to be the same length as the spec
|
| 411 |
+
# so that we can determine if the specifier is a prefix of the
|
| 412 |
+
# prospective version or not.
|
| 413 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
| 414 |
+
|
| 415 |
+
return shortened_prospective == split_spec
|
| 416 |
+
else:
|
| 417 |
+
# Convert our spec string into a Version
|
| 418 |
+
spec_version = Version(spec)
|
| 419 |
+
|
| 420 |
+
# If the specifier does not have a local segment, then we want to
|
| 421 |
+
# act as if the prospective version also does not have a local
|
| 422 |
+
# segment.
|
| 423 |
+
if not spec_version.local:
|
| 424 |
+
prospective = Version(prospective.public)
|
| 425 |
+
|
| 426 |
+
return prospective == spec_version
|
| 427 |
+
|
| 428 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
| 429 |
+
return not self._compare_equal(prospective, spec)
|
| 430 |
+
|
| 431 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 432 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 433 |
+
# specifier, so local version labels can be universally removed from
|
| 434 |
+
# the prospective version.
|
| 435 |
+
return Version(prospective.public) <= Version(spec)
|
| 436 |
+
|
| 437 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
| 438 |
+
# NB: Local version identifiers are NOT permitted in the version
|
| 439 |
+
# specifier, so local version labels can be universally removed from
|
| 440 |
+
# the prospective version.
|
| 441 |
+
return Version(prospective.public) >= Version(spec)
|
| 442 |
+
|
| 443 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
| 444 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 445 |
+
# it as a version.
|
| 446 |
+
spec = Version(spec_str)
|
| 447 |
+
|
| 448 |
+
# Check to see if the prospective version is less than the spec
|
| 449 |
+
# version. If it's not we can short circuit and just return False now
|
| 450 |
+
# instead of doing extra unneeded work.
|
| 451 |
+
if not prospective < spec:
|
| 452 |
+
return False
|
| 453 |
+
|
| 454 |
+
# This special case is here so that, unless the specifier itself
|
| 455 |
+
# includes is a pre-release version, that we do not accept pre-release
|
| 456 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
| 457 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
| 458 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
| 459 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 460 |
+
return False
|
| 461 |
+
|
| 462 |
+
# If we've gotten to here, it means that prospective version is both
|
| 463 |
+
# less than the spec version *and* it's not a pre-release of the same
|
| 464 |
+
# version in the spec.
|
| 465 |
+
return True
|
| 466 |
+
|
| 467 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
| 468 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
| 469 |
+
# it as a version.
|
| 470 |
+
spec = Version(spec_str)
|
| 471 |
+
|
| 472 |
+
# Check to see if the prospective version is greater than the spec
|
| 473 |
+
# version. If it's not we can short circuit and just return False now
|
| 474 |
+
# instead of doing extra unneeded work.
|
| 475 |
+
if not prospective > spec:
|
| 476 |
+
return False
|
| 477 |
+
|
| 478 |
+
# This special case is here so that, unless the specifier itself
|
| 479 |
+
# includes is a post-release version, that we do not accept
|
| 480 |
+
# post-release versions for the version mentioned in the specifier
|
| 481 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
| 482 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
| 483 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 484 |
+
return False
|
| 485 |
+
|
| 486 |
+
# Ensure that we do not allow a local version of the version mentioned
|
| 487 |
+
# in the specifier, which is technically greater than, to match.
|
| 488 |
+
if prospective.local is not None:
|
| 489 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
| 490 |
+
return False
|
| 491 |
+
|
| 492 |
+
# If we've gotten to here, it means that prospective version is both
|
| 493 |
+
# greater than the spec version *and* it's not a pre-release of the
|
| 494 |
+
# same version in the spec.
|
| 495 |
+
return True
|
| 496 |
+
|
| 497 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
| 498 |
+
return str(prospective).lower() == str(spec).lower()
|
| 499 |
+
|
| 500 |
+
def __contains__(self, item: str | Version) -> bool:
|
| 501 |
+
"""Return whether or not the item is contained in this specifier.
|
| 502 |
+
|
| 503 |
+
:param item: The item to check for.
|
| 504 |
+
|
| 505 |
+
This is used for the ``in`` operator and behaves the same as
|
| 506 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 507 |
+
|
| 508 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
| 509 |
+
True
|
| 510 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
| 511 |
+
True
|
| 512 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
| 513 |
+
False
|
| 514 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
| 515 |
+
False
|
| 516 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
| 517 |
+
True
|
| 518 |
+
"""
|
| 519 |
+
return self.contains(item)
|
| 520 |
+
|
| 521 |
+
def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
|
| 522 |
+
"""Return whether or not the item is contained in this specifier.
|
| 523 |
+
|
| 524 |
+
:param item:
|
| 525 |
+
The item to check for, which can be a version string or a
|
| 526 |
+
:class:`Version` instance.
|
| 527 |
+
:param prereleases:
|
| 528 |
+
Whether or not to match prereleases with this Specifier. If set to
|
| 529 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 530 |
+
whether or not prereleases are allowed.
|
| 531 |
+
|
| 532 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
| 533 |
+
True
|
| 534 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
| 535 |
+
True
|
| 536 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
| 537 |
+
False
|
| 538 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
| 539 |
+
False
|
| 540 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
| 541 |
+
True
|
| 542 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
| 543 |
+
True
|
| 544 |
+
"""
|
| 545 |
+
|
| 546 |
+
# Determine if prereleases are to be allowed or not.
|
| 547 |
+
if prereleases is None:
|
| 548 |
+
prereleases = self.prereleases
|
| 549 |
+
|
| 550 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
| 551 |
+
# "2.0" in Specifier(">=2")
|
| 552 |
+
normalized_item = _coerce_version(item)
|
| 553 |
+
|
| 554 |
+
# Determine if we should be supporting prereleases in this specifier
|
| 555 |
+
# or not, if we do not support prereleases than we can short circuit
|
| 556 |
+
# logic if this version is a prereleases.
|
| 557 |
+
if normalized_item.is_prerelease and not prereleases:
|
| 558 |
+
return False
|
| 559 |
+
|
| 560 |
+
# Actually do the comparison to determine if this item is contained
|
| 561 |
+
# within this Specifier or not.
|
| 562 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
| 563 |
+
return operator_callable(normalized_item, self.version)
|
| 564 |
+
|
| 565 |
+
def filter(
|
| 566 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
| 567 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 568 |
+
"""Filter items in the given iterable, that match the specifier.
|
| 569 |
+
|
| 570 |
+
:param iterable:
|
| 571 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 572 |
+
The items in the iterable will be filtered according to the specifier.
|
| 573 |
+
:param prereleases:
|
| 574 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 575 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 576 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 577 |
+
whether the only versions matching are prereleases).
|
| 578 |
+
|
| 579 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
| 580 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 581 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 582 |
+
|
| 583 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 584 |
+
['1.3']
|
| 585 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
| 586 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
| 587 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 588 |
+
['1.5a1']
|
| 589 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 590 |
+
['1.3', '1.5a1']
|
| 591 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 592 |
+
['1.3', '1.5a1']
|
| 593 |
+
"""
|
| 594 |
+
|
| 595 |
+
yielded = False
|
| 596 |
+
found_prereleases = []
|
| 597 |
+
|
| 598 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
| 599 |
+
|
| 600 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
| 601 |
+
# them match, yield them.
|
| 602 |
+
for version in iterable:
|
| 603 |
+
parsed_version = _coerce_version(version)
|
| 604 |
+
|
| 605 |
+
if self.contains(parsed_version, **kw):
|
| 606 |
+
# If our version is a prerelease, and we were not set to allow
|
| 607 |
+
# prereleases, then we'll store it for later in case nothing
|
| 608 |
+
# else matches this specifier.
|
| 609 |
+
if parsed_version.is_prerelease and not (
|
| 610 |
+
prereleases or self.prereleases
|
| 611 |
+
):
|
| 612 |
+
found_prereleases.append(version)
|
| 613 |
+
# Either this is not a prerelease, or we should have been
|
| 614 |
+
# accepting prereleases from the beginning.
|
| 615 |
+
else:
|
| 616 |
+
yielded = True
|
| 617 |
+
yield version
|
| 618 |
+
|
| 619 |
+
# Now that we've iterated over everything, determine if we've yielded
|
| 620 |
+
# any values, and if we have not and we have any prereleases stored up
|
| 621 |
+
# then we will go ahead and yield the prereleases.
|
| 622 |
+
if not yielded and found_prereleases:
|
| 623 |
+
for version in found_prereleases:
|
| 624 |
+
yield version
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
| 628 |
+
|
| 629 |
+
|
| 630 |
+
def _version_split(version: str) -> list[str]:
|
| 631 |
+
"""Split version into components.
|
| 632 |
+
|
| 633 |
+
The split components are intended for version comparison. The logic does
|
| 634 |
+
not attempt to retain the original version string, so joining the
|
| 635 |
+
components back with :func:`_version_join` may not produce the original
|
| 636 |
+
version string.
|
| 637 |
+
"""
|
| 638 |
+
result: list[str] = []
|
| 639 |
+
|
| 640 |
+
epoch, _, rest = version.rpartition("!")
|
| 641 |
+
result.append(epoch or "0")
|
| 642 |
+
|
| 643 |
+
for item in rest.split("."):
|
| 644 |
+
match = _prefix_regex.search(item)
|
| 645 |
+
if match:
|
| 646 |
+
result.extend(match.groups())
|
| 647 |
+
else:
|
| 648 |
+
result.append(item)
|
| 649 |
+
return result
|
| 650 |
+
|
| 651 |
+
|
| 652 |
+
def _version_join(components: list[str]) -> str:
|
| 653 |
+
"""Join split version components into a version string.
|
| 654 |
+
|
| 655 |
+
This function assumes the input came from :func:`_version_split`, where the
|
| 656 |
+
first component must be the epoch (either empty or numeric), and all other
|
| 657 |
+
components numeric.
|
| 658 |
+
"""
|
| 659 |
+
epoch, *rest = components
|
| 660 |
+
return f"{epoch}!{'.'.join(rest)}"
|
| 661 |
+
|
| 662 |
+
|
| 663 |
+
def _is_not_suffix(segment: str) -> bool:
|
| 664 |
+
return not any(
|
| 665 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
| 666 |
+
)
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
|
| 670 |
+
left_split, right_split = [], []
|
| 671 |
+
|
| 672 |
+
# Get the release segment of our versions
|
| 673 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
| 674 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
| 675 |
+
|
| 676 |
+
# Get the rest of our versions
|
| 677 |
+
left_split.append(left[len(left_split[0]) :])
|
| 678 |
+
right_split.append(right[len(right_split[0]) :])
|
| 679 |
+
|
| 680 |
+
# Insert our padding
|
| 681 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
| 682 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
| 683 |
+
|
| 684 |
+
return (
|
| 685 |
+
list(itertools.chain.from_iterable(left_split)),
|
| 686 |
+
list(itertools.chain.from_iterable(right_split)),
|
| 687 |
+
)
|
| 688 |
+
|
| 689 |
+
|
| 690 |
+
class SpecifierSet(BaseSpecifier):
|
| 691 |
+
"""This class abstracts handling of a set of version specifiers.
|
| 692 |
+
|
| 693 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
| 694 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
| 695 |
+
"""
|
| 696 |
+
|
| 697 |
+
def __init__(
|
| 698 |
+
self,
|
| 699 |
+
specifiers: str | Iterable[Specifier] = "",
|
| 700 |
+
prereleases: bool | None = None,
|
| 701 |
+
) -> None:
|
| 702 |
+
"""Initialize a SpecifierSet instance.
|
| 703 |
+
|
| 704 |
+
:param specifiers:
|
| 705 |
+
The string representation of a specifier or a comma-separated list of
|
| 706 |
+
specifiers which will be parsed and normalized before use.
|
| 707 |
+
May also be an iterable of ``Specifier`` instances, which will be used
|
| 708 |
+
as is.
|
| 709 |
+
:param prereleases:
|
| 710 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
| 711 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
| 712 |
+
given specifiers.
|
| 713 |
+
|
| 714 |
+
:raises InvalidSpecifier:
|
| 715 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
| 716 |
+
raised.
|
| 717 |
+
"""
|
| 718 |
+
|
| 719 |
+
if isinstance(specifiers, str):
|
| 720 |
+
# Split on `,` to break each individual specifier into its own item, and
|
| 721 |
+
# strip each item to remove leading/trailing whitespace.
|
| 722 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
| 723 |
+
|
| 724 |
+
# Make each individual specifier a Specifier and save in a frozen set
|
| 725 |
+
# for later.
|
| 726 |
+
self._specs = frozenset(map(Specifier, split_specifiers))
|
| 727 |
+
else:
|
| 728 |
+
# Save the supplied specifiers in a frozen set.
|
| 729 |
+
self._specs = frozenset(specifiers)
|
| 730 |
+
|
| 731 |
+
# Store our prereleases value so we can use it later to determine if
|
| 732 |
+
# we accept prereleases or not.
|
| 733 |
+
self._prereleases = prereleases
|
| 734 |
+
|
| 735 |
+
@property
|
| 736 |
+
def prereleases(self) -> bool | None:
|
| 737 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
| 738 |
+
# pass that through here.
|
| 739 |
+
if self._prereleases is not None:
|
| 740 |
+
return self._prereleases
|
| 741 |
+
|
| 742 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
| 743 |
+
# then we'll just return None since we don't know if this should have
|
| 744 |
+
# pre-releases or not.
|
| 745 |
+
if not self._specs:
|
| 746 |
+
return None
|
| 747 |
+
|
| 748 |
+
# Otherwise we'll see if any of the given specifiers accept
|
| 749 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
| 750 |
+
return any(s.prereleases for s in self._specs)
|
| 751 |
+
|
| 752 |
+
@prereleases.setter
|
| 753 |
+
def prereleases(self, value: bool) -> None:
|
| 754 |
+
self._prereleases = value
|
| 755 |
+
|
| 756 |
+
def __repr__(self) -> str:
|
| 757 |
+
"""A representation of the specifier set that shows all internal state.
|
| 758 |
+
|
| 759 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 760 |
+
match the input string.
|
| 761 |
+
|
| 762 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
| 763 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
| 764 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
| 765 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
| 766 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
| 767 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
| 768 |
+
"""
|
| 769 |
+
pre = (
|
| 770 |
+
f", prereleases={self.prereleases!r}"
|
| 771 |
+
if self._prereleases is not None
|
| 772 |
+
else ""
|
| 773 |
+
)
|
| 774 |
+
|
| 775 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
| 776 |
+
|
| 777 |
+
def __str__(self) -> str:
|
| 778 |
+
"""A string representation of the specifier set that can be round-tripped.
|
| 779 |
+
|
| 780 |
+
Note that the ordering of the individual specifiers within the set may not
|
| 781 |
+
match the input string.
|
| 782 |
+
|
| 783 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
| 784 |
+
'!=1.0.1,>=1.0.0'
|
| 785 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
| 786 |
+
'!=1.0.1,>=1.0.0'
|
| 787 |
+
"""
|
| 788 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
| 789 |
+
|
| 790 |
+
def __hash__(self) -> int:
|
| 791 |
+
return hash(self._specs)
|
| 792 |
+
|
| 793 |
+
def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
|
| 794 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
| 795 |
+
|
| 796 |
+
:param other: The other object to combine with.
|
| 797 |
+
|
| 798 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
| 799 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 800 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
| 801 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
| 802 |
+
"""
|
| 803 |
+
if isinstance(other, str):
|
| 804 |
+
other = SpecifierSet(other)
|
| 805 |
+
elif not isinstance(other, SpecifierSet):
|
| 806 |
+
return NotImplemented
|
| 807 |
+
|
| 808 |
+
specifier = SpecifierSet()
|
| 809 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
| 810 |
+
|
| 811 |
+
if self._prereleases is None and other._prereleases is not None:
|
| 812 |
+
specifier._prereleases = other._prereleases
|
| 813 |
+
elif self._prereleases is not None and other._prereleases is None:
|
| 814 |
+
specifier._prereleases = self._prereleases
|
| 815 |
+
elif self._prereleases == other._prereleases:
|
| 816 |
+
specifier._prereleases = self._prereleases
|
| 817 |
+
else:
|
| 818 |
+
raise ValueError(
|
| 819 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
| 820 |
+
"overrides."
|
| 821 |
+
)
|
| 822 |
+
|
| 823 |
+
return specifier
|
| 824 |
+
|
| 825 |
+
def __eq__(self, other: object) -> bool:
|
| 826 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
| 827 |
+
|
| 828 |
+
:param other: The other object to check against.
|
| 829 |
+
|
| 830 |
+
The value of :attr:`prereleases` is ignored.
|
| 831 |
+
|
| 832 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
| 833 |
+
True
|
| 834 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
| 835 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
| 836 |
+
True
|
| 837 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
| 838 |
+
True
|
| 839 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
| 840 |
+
False
|
| 841 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
| 842 |
+
False
|
| 843 |
+
"""
|
| 844 |
+
if isinstance(other, (str, Specifier)):
|
| 845 |
+
other = SpecifierSet(str(other))
|
| 846 |
+
elif not isinstance(other, SpecifierSet):
|
| 847 |
+
return NotImplemented
|
| 848 |
+
|
| 849 |
+
return self._specs == other._specs
|
| 850 |
+
|
| 851 |
+
def __len__(self) -> int:
|
| 852 |
+
"""Returns the number of specifiers in this specifier set."""
|
| 853 |
+
return len(self._specs)
|
| 854 |
+
|
| 855 |
+
def __iter__(self) -> Iterator[Specifier]:
|
| 856 |
+
"""
|
| 857 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
| 858 |
+
in this specifier set.
|
| 859 |
+
|
| 860 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
| 861 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
| 862 |
+
"""
|
| 863 |
+
return iter(self._specs)
|
| 864 |
+
|
| 865 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
| 866 |
+
"""Return whether or not the item is contained in this specifier.
|
| 867 |
+
|
| 868 |
+
:param item: The item to check for.
|
| 869 |
+
|
| 870 |
+
This is used for the ``in`` operator and behaves the same as
|
| 871 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
| 872 |
+
|
| 873 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 874 |
+
True
|
| 875 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 876 |
+
True
|
| 877 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 878 |
+
False
|
| 879 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
| 880 |
+
False
|
| 881 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
| 882 |
+
True
|
| 883 |
+
"""
|
| 884 |
+
return self.contains(item)
|
| 885 |
+
|
| 886 |
+
def contains(
|
| 887 |
+
self,
|
| 888 |
+
item: UnparsedVersion,
|
| 889 |
+
prereleases: bool | None = None,
|
| 890 |
+
installed: bool | None = None,
|
| 891 |
+
) -> bool:
|
| 892 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
| 893 |
+
|
| 894 |
+
:param item:
|
| 895 |
+
The item to check for, which can be a version string or a
|
| 896 |
+
:class:`Version` instance.
|
| 897 |
+
:param prereleases:
|
| 898 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
| 899 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
| 900 |
+
whether or not prereleases are allowed.
|
| 901 |
+
|
| 902 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
| 903 |
+
True
|
| 904 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
| 905 |
+
True
|
| 906 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
| 907 |
+
False
|
| 908 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
| 909 |
+
False
|
| 910 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
| 911 |
+
True
|
| 912 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
| 913 |
+
True
|
| 914 |
+
"""
|
| 915 |
+
# Ensure that our item is a Version instance.
|
| 916 |
+
if not isinstance(item, Version):
|
| 917 |
+
item = Version(item)
|
| 918 |
+
|
| 919 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 920 |
+
# one for this particular filter call, then we'll use whatever the
|
| 921 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 922 |
+
if prereleases is None:
|
| 923 |
+
prereleases = self.prereleases
|
| 924 |
+
|
| 925 |
+
# We can determine if we're going to allow pre-releases by looking to
|
| 926 |
+
# see if any of the underlying items supports them. If none of them do
|
| 927 |
+
# and this item is a pre-release then we do not allow it and we can
|
| 928 |
+
# short circuit that here.
|
| 929 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
| 930 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
| 931 |
+
if not prereleases and item.is_prerelease:
|
| 932 |
+
return False
|
| 933 |
+
|
| 934 |
+
if installed and item.is_prerelease:
|
| 935 |
+
item = Version(item.base_version)
|
| 936 |
+
|
| 937 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
| 938 |
+
# given version is contained within all of them.
|
| 939 |
+
# Note: This use of all() here means that an empty set of specifiers
|
| 940 |
+
# will always return True, this is an explicit design decision.
|
| 941 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
| 942 |
+
|
| 943 |
+
def filter(
|
| 944 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
| 945 |
+
) -> Iterator[UnparsedVersionVar]:
|
| 946 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
| 947 |
+
|
| 948 |
+
:param iterable:
|
| 949 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
| 950 |
+
The items in the iterable will be filtered according to the specifier.
|
| 951 |
+
:param prereleases:
|
| 952 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
| 953 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
| 954 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
| 955 |
+
whether the only versions matching are prereleases).
|
| 956 |
+
|
| 957 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
| 958 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
| 959 |
+
SHOULD be accepted if no other versions match the given specifier.
|
| 960 |
+
|
| 961 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
| 962 |
+
['1.3']
|
| 963 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
| 964 |
+
['1.3', <Version('1.4')>]
|
| 965 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
| 966 |
+
[]
|
| 967 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
| 968 |
+
['1.3', '1.5a1']
|
| 969 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 970 |
+
['1.3', '1.5a1']
|
| 971 |
+
|
| 972 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
| 973 |
+
versions in the set.
|
| 974 |
+
|
| 975 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
| 976 |
+
['1.3']
|
| 977 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
| 978 |
+
['1.5a1']
|
| 979 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
| 980 |
+
['1.3', '1.5a1']
|
| 981 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
| 982 |
+
['1.3', '1.5a1']
|
| 983 |
+
"""
|
| 984 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
| 985 |
+
# one for this particular filter call, then we'll use whatever the
|
| 986 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
| 987 |
+
if prereleases is None:
|
| 988 |
+
prereleases = self.prereleases
|
| 989 |
+
|
| 990 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
| 991 |
+
# filter method for each one, this will act as a logical AND amongst
|
| 992 |
+
# each specifier.
|
| 993 |
+
if self._specs:
|
| 994 |
+
for spec in self._specs:
|
| 995 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
| 996 |
+
return iter(iterable)
|
| 997 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
| 998 |
+
# which will filter out any pre-releases, unless there are no final
|
| 999 |
+
# releases.
|
| 1000 |
+
else:
|
| 1001 |
+
filtered: list[UnparsedVersionVar] = []
|
| 1002 |
+
found_prereleases: list[UnparsedVersionVar] = []
|
| 1003 |
+
|
| 1004 |
+
for item in iterable:
|
| 1005 |
+
parsed_version = _coerce_version(item)
|
| 1006 |
+
|
| 1007 |
+
# Store any item which is a pre-release for later unless we've
|
| 1008 |
+
# already found a final version or we are accepting prereleases
|
| 1009 |
+
if parsed_version.is_prerelease and not prereleases:
|
| 1010 |
+
if not filtered:
|
| 1011 |
+
found_prereleases.append(item)
|
| 1012 |
+
else:
|
| 1013 |
+
filtered.append(item)
|
| 1014 |
+
|
| 1015 |
+
# If we've found no items except for pre-releases, then we'll go
|
| 1016 |
+
# ahead and use the pre-releases
|
| 1017 |
+
if not filtered and found_prereleases and prereleases is None:
|
| 1018 |
+
return iter(found_prereleases)
|
| 1019 |
+
|
| 1020 |
+
return iter(filtered)
|
llava/lib/python3.10/site-packages/pip/_vendor/packaging/utils.py
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
|
| 5 |
+
from __future__ import annotations
|
| 6 |
+
|
| 7 |
+
import functools
|
| 8 |
+
import re
|
| 9 |
+
from typing import NewType, Tuple, Union, cast
|
| 10 |
+
|
| 11 |
+
from .tags import Tag, parse_tag
|
| 12 |
+
from .version import InvalidVersion, Version, _TrimmedRelease
|
| 13 |
+
|
| 14 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
| 15 |
+
NormalizedName = NewType("NormalizedName", str)
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class InvalidName(ValueError):
|
| 19 |
+
"""
|
| 20 |
+
An invalid distribution name; users should refer to the packaging user guide.
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
class InvalidWheelFilename(ValueError):
|
| 25 |
+
"""
|
| 26 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
| 27 |
+
"""
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class InvalidSdistFilename(ValueError):
|
| 31 |
+
"""
|
| 32 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
| 33 |
+
"""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
# Core metadata spec for `Name`
|
| 37 |
+
_validate_regex = re.compile(
|
| 38 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
| 39 |
+
)
|
| 40 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
| 41 |
+
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
| 42 |
+
# PEP 427: The build number must start with a digit.
|
| 43 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
| 47 |
+
if validate and not _validate_regex.match(name):
|
| 48 |
+
raise InvalidName(f"name is invalid: {name!r}")
|
| 49 |
+
# This is taken from PEP 503.
|
| 50 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
| 51 |
+
return cast(NormalizedName, value)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
def is_normalized_name(name: str) -> bool:
|
| 55 |
+
return _normalized_regex.match(name) is not None
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@functools.singledispatch
|
| 59 |
+
def canonicalize_version(
|
| 60 |
+
version: Version | str, *, strip_trailing_zero: bool = True
|
| 61 |
+
) -> str:
|
| 62 |
+
"""
|
| 63 |
+
Return a canonical form of a version as a string.
|
| 64 |
+
|
| 65 |
+
>>> canonicalize_version('1.0.1')
|
| 66 |
+
'1.0.1'
|
| 67 |
+
|
| 68 |
+
Per PEP 625, versions may have multiple canonical forms, differing
|
| 69 |
+
only by trailing zeros.
|
| 70 |
+
|
| 71 |
+
>>> canonicalize_version('1.0.0')
|
| 72 |
+
'1'
|
| 73 |
+
>>> canonicalize_version('1.0.0', strip_trailing_zero=False)
|
| 74 |
+
'1.0.0'
|
| 75 |
+
|
| 76 |
+
Invalid versions are returned unaltered.
|
| 77 |
+
|
| 78 |
+
>>> canonicalize_version('foo bar baz')
|
| 79 |
+
'foo bar baz'
|
| 80 |
+
"""
|
| 81 |
+
return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
@canonicalize_version.register
|
| 85 |
+
def _(version: str, *, strip_trailing_zero: bool = True) -> str:
|
| 86 |
+
try:
|
| 87 |
+
parsed = Version(version)
|
| 88 |
+
except InvalidVersion:
|
| 89 |
+
# Legacy versions cannot be normalized
|
| 90 |
+
return version
|
| 91 |
+
return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero)
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def parse_wheel_filename(
|
| 95 |
+
filename: str,
|
| 96 |
+
) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
|
| 97 |
+
if not filename.endswith(".whl"):
|
| 98 |
+
raise InvalidWheelFilename(
|
| 99 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename!r}"
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
filename = filename[:-4]
|
| 103 |
+
dashes = filename.count("-")
|
| 104 |
+
if dashes not in (4, 5):
|
| 105 |
+
raise InvalidWheelFilename(
|
| 106 |
+
f"Invalid wheel filename (wrong number of parts): {filename!r}"
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
parts = filename.split("-", dashes - 2)
|
| 110 |
+
name_part = parts[0]
|
| 111 |
+
# See PEP 427 for the rules on escaping the project name.
|
| 112 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
| 113 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename!r}")
|
| 114 |
+
name = canonicalize_name(name_part)
|
| 115 |
+
|
| 116 |
+
try:
|
| 117 |
+
version = Version(parts[1])
|
| 118 |
+
except InvalidVersion as e:
|
| 119 |
+
raise InvalidWheelFilename(
|
| 120 |
+
f"Invalid wheel filename (invalid version): {filename!r}"
|
| 121 |
+
) from e
|
| 122 |
+
|
| 123 |
+
if dashes == 5:
|
| 124 |
+
build_part = parts[2]
|
| 125 |
+
build_match = _build_tag_regex.match(build_part)
|
| 126 |
+
if build_match is None:
|
| 127 |
+
raise InvalidWheelFilename(
|
| 128 |
+
f"Invalid build number: {build_part} in {filename!r}"
|
| 129 |
+
)
|
| 130 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
| 131 |
+
else:
|
| 132 |
+
build = ()
|
| 133 |
+
tags = parse_tag(parts[-1])
|
| 134 |
+
return (name, version, build, tags)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
|
| 138 |
+
if filename.endswith(".tar.gz"):
|
| 139 |
+
file_stem = filename[: -len(".tar.gz")]
|
| 140 |
+
elif filename.endswith(".zip"):
|
| 141 |
+
file_stem = filename[: -len(".zip")]
|
| 142 |
+
else:
|
| 143 |
+
raise InvalidSdistFilename(
|
| 144 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
| 145 |
+
f" {filename!r}"
|
| 146 |
+
)
|
| 147 |
+
|
| 148 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
| 149 |
+
# so we split on the last dash.
|
| 150 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
| 151 |
+
if not sep:
|
| 152 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")
|
| 153 |
+
|
| 154 |
+
name = canonicalize_name(name_part)
|
| 155 |
+
|
| 156 |
+
try:
|
| 157 |
+
version = Version(version_part)
|
| 158 |
+
except InvalidVersion as e:
|
| 159 |
+
raise InvalidSdistFilename(
|
| 160 |
+
f"Invalid sdist filename (invalid version): {filename!r}"
|
| 161 |
+
) from e
|
| 162 |
+
|
| 163 |
+
return (name, version)
|
llava/lib/python3.10/site-packages/pip/_vendor/packaging/version.py
ADDED
|
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
| 2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
| 3 |
+
# for complete details.
|
| 4 |
+
"""
|
| 5 |
+
.. testsetup::
|
| 6 |
+
|
| 7 |
+
from pip._vendor.packaging.version import parse, Version
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
from __future__ import annotations
|
| 11 |
+
|
| 12 |
+
import itertools
|
| 13 |
+
import re
|
| 14 |
+
from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union
|
| 15 |
+
|
| 16 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
| 17 |
+
|
| 18 |
+
__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"]
|
| 19 |
+
|
| 20 |
+
LocalType = Tuple[Union[int, str], ...]
|
| 21 |
+
|
| 22 |
+
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
| 23 |
+
CmpLocalType = Union[
|
| 24 |
+
NegativeInfinityType,
|
| 25 |
+
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
| 26 |
+
]
|
| 27 |
+
CmpKey = Tuple[
|
| 28 |
+
int,
|
| 29 |
+
Tuple[int, ...],
|
| 30 |
+
CmpPrePostDevType,
|
| 31 |
+
CmpPrePostDevType,
|
| 32 |
+
CmpPrePostDevType,
|
| 33 |
+
CmpLocalType,
|
| 34 |
+
]
|
| 35 |
+
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class _Version(NamedTuple):
|
| 39 |
+
epoch: int
|
| 40 |
+
release: tuple[int, ...]
|
| 41 |
+
dev: tuple[str, int] | None
|
| 42 |
+
pre: tuple[str, int] | None
|
| 43 |
+
post: tuple[str, int] | None
|
| 44 |
+
local: LocalType | None
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def parse(version: str) -> Version:
|
| 48 |
+
"""Parse the given version string.
|
| 49 |
+
|
| 50 |
+
>>> parse('1.0.dev1')
|
| 51 |
+
<Version('1.0.dev1')>
|
| 52 |
+
|
| 53 |
+
:param version: The version string to parse.
|
| 54 |
+
:raises InvalidVersion: When the version string is not a valid version.
|
| 55 |
+
"""
|
| 56 |
+
return Version(version)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class InvalidVersion(ValueError):
|
| 60 |
+
"""Raised when a version string is not a valid version.
|
| 61 |
+
|
| 62 |
+
>>> Version("invalid")
|
| 63 |
+
Traceback (most recent call last):
|
| 64 |
+
...
|
| 65 |
+
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
| 66 |
+
"""
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class _BaseVersion:
|
| 70 |
+
_key: tuple[Any, ...]
|
| 71 |
+
|
| 72 |
+
def __hash__(self) -> int:
|
| 73 |
+
return hash(self._key)
|
| 74 |
+
|
| 75 |
+
# Please keep the duplicated `isinstance` check
|
| 76 |
+
# in the six comparisons hereunder
|
| 77 |
+
# unless you find a way to avoid adding overhead function calls.
|
| 78 |
+
def __lt__(self, other: _BaseVersion) -> bool:
|
| 79 |
+
if not isinstance(other, _BaseVersion):
|
| 80 |
+
return NotImplemented
|
| 81 |
+
|
| 82 |
+
return self._key < other._key
|
| 83 |
+
|
| 84 |
+
def __le__(self, other: _BaseVersion) -> bool:
|
| 85 |
+
if not isinstance(other, _BaseVersion):
|
| 86 |
+
return NotImplemented
|
| 87 |
+
|
| 88 |
+
return self._key <= other._key
|
| 89 |
+
|
| 90 |
+
def __eq__(self, other: object) -> bool:
|
| 91 |
+
if not isinstance(other, _BaseVersion):
|
| 92 |
+
return NotImplemented
|
| 93 |
+
|
| 94 |
+
return self._key == other._key
|
| 95 |
+
|
| 96 |
+
def __ge__(self, other: _BaseVersion) -> bool:
|
| 97 |
+
if not isinstance(other, _BaseVersion):
|
| 98 |
+
return NotImplemented
|
| 99 |
+
|
| 100 |
+
return self._key >= other._key
|
| 101 |
+
|
| 102 |
+
def __gt__(self, other: _BaseVersion) -> bool:
|
| 103 |
+
if not isinstance(other, _BaseVersion):
|
| 104 |
+
return NotImplemented
|
| 105 |
+
|
| 106 |
+
return self._key > other._key
|
| 107 |
+
|
| 108 |
+
def __ne__(self, other: object) -> bool:
|
| 109 |
+
if not isinstance(other, _BaseVersion):
|
| 110 |
+
return NotImplemented
|
| 111 |
+
|
| 112 |
+
return self._key != other._key
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
| 116 |
+
# easier for 3rd party code to reuse
|
| 117 |
+
_VERSION_PATTERN = r"""
|
| 118 |
+
v?
|
| 119 |
+
(?:
|
| 120 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
| 121 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
| 122 |
+
(?P<pre> # pre-release
|
| 123 |
+
[-_\.]?
|
| 124 |
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
| 125 |
+
[-_\.]?
|
| 126 |
+
(?P<pre_n>[0-9]+)?
|
| 127 |
+
)?
|
| 128 |
+
(?P<post> # post release
|
| 129 |
+
(?:-(?P<post_n1>[0-9]+))
|
| 130 |
+
|
|
| 131 |
+
(?:
|
| 132 |
+
[-_\.]?
|
| 133 |
+
(?P<post_l>post|rev|r)
|
| 134 |
+
[-_\.]?
|
| 135 |
+
(?P<post_n2>[0-9]+)?
|
| 136 |
+
)
|
| 137 |
+
)?
|
| 138 |
+
(?P<dev> # dev release
|
| 139 |
+
[-_\.]?
|
| 140 |
+
(?P<dev_l>dev)
|
| 141 |
+
[-_\.]?
|
| 142 |
+
(?P<dev_n>[0-9]+)?
|
| 143 |
+
)?
|
| 144 |
+
)
|
| 145 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
| 146 |
+
"""
|
| 147 |
+
|
| 148 |
+
VERSION_PATTERN = _VERSION_PATTERN
|
| 149 |
+
"""
|
| 150 |
+
A string containing the regular expression used to match a valid version.
|
| 151 |
+
|
| 152 |
+
The pattern is not anchored at either end, and is intended for embedding in larger
|
| 153 |
+
expressions (for example, matching a version number as part of a file name). The
|
| 154 |
+
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
| 155 |
+
flags set.
|
| 156 |
+
|
| 157 |
+
:meta hide-value:
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
|
| 161 |
+
class Version(_BaseVersion):
|
| 162 |
+
"""This class abstracts handling of a project's versions.
|
| 163 |
+
|
| 164 |
+
A :class:`Version` instance is comparison aware and can be compared and
|
| 165 |
+
sorted using the standard Python interfaces.
|
| 166 |
+
|
| 167 |
+
>>> v1 = Version("1.0a5")
|
| 168 |
+
>>> v2 = Version("1.0")
|
| 169 |
+
>>> v1
|
| 170 |
+
<Version('1.0a5')>
|
| 171 |
+
>>> v2
|
| 172 |
+
<Version('1.0')>
|
| 173 |
+
>>> v1 < v2
|
| 174 |
+
True
|
| 175 |
+
>>> v1 == v2
|
| 176 |
+
False
|
| 177 |
+
>>> v1 > v2
|
| 178 |
+
False
|
| 179 |
+
>>> v1 >= v2
|
| 180 |
+
False
|
| 181 |
+
>>> v1 <= v2
|
| 182 |
+
True
|
| 183 |
+
"""
|
| 184 |
+
|
| 185 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
| 186 |
+
_key: CmpKey
|
| 187 |
+
|
| 188 |
+
def __init__(self, version: str) -> None:
|
| 189 |
+
"""Initialize a Version object.
|
| 190 |
+
|
| 191 |
+
:param version:
|
| 192 |
+
The string representation of a version which will be parsed and normalized
|
| 193 |
+
before use.
|
| 194 |
+
:raises InvalidVersion:
|
| 195 |
+
If the ``version`` does not conform to PEP 440 in any way then this
|
| 196 |
+
exception will be raised.
|
| 197 |
+
"""
|
| 198 |
+
|
| 199 |
+
# Validate the version and parse it into pieces
|
| 200 |
+
match = self._regex.search(version)
|
| 201 |
+
if not match:
|
| 202 |
+
raise InvalidVersion(f"Invalid version: {version!r}")
|
| 203 |
+
|
| 204 |
+
# Store the parsed out pieces of the version
|
| 205 |
+
self._version = _Version(
|
| 206 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
| 207 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
| 208 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
| 209 |
+
post=_parse_letter_version(
|
| 210 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
| 211 |
+
),
|
| 212 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
| 213 |
+
local=_parse_local_version(match.group("local")),
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
# Generate a key which will be used for sorting
|
| 217 |
+
self._key = _cmpkey(
|
| 218 |
+
self._version.epoch,
|
| 219 |
+
self._version.release,
|
| 220 |
+
self._version.pre,
|
| 221 |
+
self._version.post,
|
| 222 |
+
self._version.dev,
|
| 223 |
+
self._version.local,
|
| 224 |
+
)
|
| 225 |
+
|
| 226 |
+
def __repr__(self) -> str:
|
| 227 |
+
"""A representation of the Version that shows all internal state.
|
| 228 |
+
|
| 229 |
+
>>> Version('1.0.0')
|
| 230 |
+
<Version('1.0.0')>
|
| 231 |
+
"""
|
| 232 |
+
return f"<Version('{self}')>"
|
| 233 |
+
|
| 234 |
+
def __str__(self) -> str:
|
| 235 |
+
"""A string representation of the version that can be round-tripped.
|
| 236 |
+
|
| 237 |
+
>>> str(Version("1.0a5"))
|
| 238 |
+
'1.0a5'
|
| 239 |
+
"""
|
| 240 |
+
parts = []
|
| 241 |
+
|
| 242 |
+
# Epoch
|
| 243 |
+
if self.epoch != 0:
|
| 244 |
+
parts.append(f"{self.epoch}!")
|
| 245 |
+
|
| 246 |
+
# Release segment
|
| 247 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 248 |
+
|
| 249 |
+
# Pre-release
|
| 250 |
+
if self.pre is not None:
|
| 251 |
+
parts.append("".join(str(x) for x in self.pre))
|
| 252 |
+
|
| 253 |
+
# Post-release
|
| 254 |
+
if self.post is not None:
|
| 255 |
+
parts.append(f".post{self.post}")
|
| 256 |
+
|
| 257 |
+
# Development release
|
| 258 |
+
if self.dev is not None:
|
| 259 |
+
parts.append(f".dev{self.dev}")
|
| 260 |
+
|
| 261 |
+
# Local version segment
|
| 262 |
+
if self.local is not None:
|
| 263 |
+
parts.append(f"+{self.local}")
|
| 264 |
+
|
| 265 |
+
return "".join(parts)
|
| 266 |
+
|
| 267 |
+
@property
|
| 268 |
+
def epoch(self) -> int:
|
| 269 |
+
"""The epoch of the version.
|
| 270 |
+
|
| 271 |
+
>>> Version("2.0.0").epoch
|
| 272 |
+
0
|
| 273 |
+
>>> Version("1!2.0.0").epoch
|
| 274 |
+
1
|
| 275 |
+
"""
|
| 276 |
+
return self._version.epoch
|
| 277 |
+
|
| 278 |
+
@property
|
| 279 |
+
def release(self) -> tuple[int, ...]:
|
| 280 |
+
"""The components of the "release" segment of the version.
|
| 281 |
+
|
| 282 |
+
>>> Version("1.2.3").release
|
| 283 |
+
(1, 2, 3)
|
| 284 |
+
>>> Version("2.0.0").release
|
| 285 |
+
(2, 0, 0)
|
| 286 |
+
>>> Version("1!2.0.0.post0").release
|
| 287 |
+
(2, 0, 0)
|
| 288 |
+
|
| 289 |
+
Includes trailing zeroes but not the epoch or any pre-release / development /
|
| 290 |
+
post-release suffixes.
|
| 291 |
+
"""
|
| 292 |
+
return self._version.release
|
| 293 |
+
|
| 294 |
+
@property
|
| 295 |
+
def pre(self) -> tuple[str, int] | None:
|
| 296 |
+
"""The pre-release segment of the version.
|
| 297 |
+
|
| 298 |
+
>>> print(Version("1.2.3").pre)
|
| 299 |
+
None
|
| 300 |
+
>>> Version("1.2.3a1").pre
|
| 301 |
+
('a', 1)
|
| 302 |
+
>>> Version("1.2.3b1").pre
|
| 303 |
+
('b', 1)
|
| 304 |
+
>>> Version("1.2.3rc1").pre
|
| 305 |
+
('rc', 1)
|
| 306 |
+
"""
|
| 307 |
+
return self._version.pre
|
| 308 |
+
|
| 309 |
+
@property
|
| 310 |
+
def post(self) -> int | None:
|
| 311 |
+
"""The post-release number of the version.
|
| 312 |
+
|
| 313 |
+
>>> print(Version("1.2.3").post)
|
| 314 |
+
None
|
| 315 |
+
>>> Version("1.2.3.post1").post
|
| 316 |
+
1
|
| 317 |
+
"""
|
| 318 |
+
return self._version.post[1] if self._version.post else None
|
| 319 |
+
|
| 320 |
+
@property
|
| 321 |
+
def dev(self) -> int | None:
|
| 322 |
+
"""The development number of the version.
|
| 323 |
+
|
| 324 |
+
>>> print(Version("1.2.3").dev)
|
| 325 |
+
None
|
| 326 |
+
>>> Version("1.2.3.dev1").dev
|
| 327 |
+
1
|
| 328 |
+
"""
|
| 329 |
+
return self._version.dev[1] if self._version.dev else None
|
| 330 |
+
|
| 331 |
+
@property
|
| 332 |
+
def local(self) -> str | None:
|
| 333 |
+
"""The local version segment of the version.
|
| 334 |
+
|
| 335 |
+
>>> print(Version("1.2.3").local)
|
| 336 |
+
None
|
| 337 |
+
>>> Version("1.2.3+abc").local
|
| 338 |
+
'abc'
|
| 339 |
+
"""
|
| 340 |
+
if self._version.local:
|
| 341 |
+
return ".".join(str(x) for x in self._version.local)
|
| 342 |
+
else:
|
| 343 |
+
return None
|
| 344 |
+
|
| 345 |
+
@property
|
| 346 |
+
def public(self) -> str:
|
| 347 |
+
"""The public portion of the version.
|
| 348 |
+
|
| 349 |
+
>>> Version("1.2.3").public
|
| 350 |
+
'1.2.3'
|
| 351 |
+
>>> Version("1.2.3+abc").public
|
| 352 |
+
'1.2.3'
|
| 353 |
+
>>> Version("1!1.2.3dev1+abc").public
|
| 354 |
+
'1!1.2.3.dev1'
|
| 355 |
+
"""
|
| 356 |
+
return str(self).split("+", 1)[0]
|
| 357 |
+
|
| 358 |
+
@property
|
| 359 |
+
def base_version(self) -> str:
|
| 360 |
+
"""The "base version" of the version.
|
| 361 |
+
|
| 362 |
+
>>> Version("1.2.3").base_version
|
| 363 |
+
'1.2.3'
|
| 364 |
+
>>> Version("1.2.3+abc").base_version
|
| 365 |
+
'1.2.3'
|
| 366 |
+
>>> Version("1!1.2.3dev1+abc").base_version
|
| 367 |
+
'1!1.2.3'
|
| 368 |
+
|
| 369 |
+
The "base version" is the public version of the project without any pre or post
|
| 370 |
+
release markers.
|
| 371 |
+
"""
|
| 372 |
+
parts = []
|
| 373 |
+
|
| 374 |
+
# Epoch
|
| 375 |
+
if self.epoch != 0:
|
| 376 |
+
parts.append(f"{self.epoch}!")
|
| 377 |
+
|
| 378 |
+
# Release segment
|
| 379 |
+
parts.append(".".join(str(x) for x in self.release))
|
| 380 |
+
|
| 381 |
+
return "".join(parts)
|
| 382 |
+
|
| 383 |
+
@property
|
| 384 |
+
def is_prerelease(self) -> bool:
|
| 385 |
+
"""Whether this version is a pre-release.
|
| 386 |
+
|
| 387 |
+
>>> Version("1.2.3").is_prerelease
|
| 388 |
+
False
|
| 389 |
+
>>> Version("1.2.3a1").is_prerelease
|
| 390 |
+
True
|
| 391 |
+
>>> Version("1.2.3b1").is_prerelease
|
| 392 |
+
True
|
| 393 |
+
>>> Version("1.2.3rc1").is_prerelease
|
| 394 |
+
True
|
| 395 |
+
>>> Version("1.2.3dev1").is_prerelease
|
| 396 |
+
True
|
| 397 |
+
"""
|
| 398 |
+
return self.dev is not None or self.pre is not None
|
| 399 |
+
|
| 400 |
+
@property
|
| 401 |
+
def is_postrelease(self) -> bool:
|
| 402 |
+
"""Whether this version is a post-release.
|
| 403 |
+
|
| 404 |
+
>>> Version("1.2.3").is_postrelease
|
| 405 |
+
False
|
| 406 |
+
>>> Version("1.2.3.post1").is_postrelease
|
| 407 |
+
True
|
| 408 |
+
"""
|
| 409 |
+
return self.post is not None
|
| 410 |
+
|
| 411 |
+
@property
|
| 412 |
+
def is_devrelease(self) -> bool:
|
| 413 |
+
"""Whether this version is a development release.
|
| 414 |
+
|
| 415 |
+
>>> Version("1.2.3").is_devrelease
|
| 416 |
+
False
|
| 417 |
+
>>> Version("1.2.3.dev1").is_devrelease
|
| 418 |
+
True
|
| 419 |
+
"""
|
| 420 |
+
return self.dev is not None
|
| 421 |
+
|
| 422 |
+
@property
|
| 423 |
+
def major(self) -> int:
|
| 424 |
+
"""The first item of :attr:`release` or ``0`` if unavailable.
|
| 425 |
+
|
| 426 |
+
>>> Version("1.2.3").major
|
| 427 |
+
1
|
| 428 |
+
"""
|
| 429 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
| 430 |
+
|
| 431 |
+
@property
|
| 432 |
+
def minor(self) -> int:
|
| 433 |
+
"""The second item of :attr:`release` or ``0`` if unavailable.
|
| 434 |
+
|
| 435 |
+
>>> Version("1.2.3").minor
|
| 436 |
+
2
|
| 437 |
+
>>> Version("1").minor
|
| 438 |
+
0
|
| 439 |
+
"""
|
| 440 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
| 441 |
+
|
| 442 |
+
@property
|
| 443 |
+
def micro(self) -> int:
|
| 444 |
+
"""The third item of :attr:`release` or ``0`` if unavailable.
|
| 445 |
+
|
| 446 |
+
>>> Version("1.2.3").micro
|
| 447 |
+
3
|
| 448 |
+
>>> Version("1").micro
|
| 449 |
+
0
|
| 450 |
+
"""
|
| 451 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
| 452 |
+
|
| 453 |
+
|
| 454 |
+
class _TrimmedRelease(Version):
|
| 455 |
+
@property
|
| 456 |
+
def release(self) -> tuple[int, ...]:
|
| 457 |
+
"""
|
| 458 |
+
Release segment without any trailing zeros.
|
| 459 |
+
|
| 460 |
+
>>> _TrimmedRelease('1.0.0').release
|
| 461 |
+
(1,)
|
| 462 |
+
>>> _TrimmedRelease('0.0').release
|
| 463 |
+
(0,)
|
| 464 |
+
"""
|
| 465 |
+
rel = super().release
|
| 466 |
+
nonzeros = (index for index, val in enumerate(rel) if val)
|
| 467 |
+
last_nonzero = max(nonzeros, default=0)
|
| 468 |
+
return rel[: last_nonzero + 1]
|
| 469 |
+
|
| 470 |
+
|
| 471 |
+
def _parse_letter_version(
|
| 472 |
+
letter: str | None, number: str | bytes | SupportsInt | None
|
| 473 |
+
) -> tuple[str, int] | None:
|
| 474 |
+
if letter:
|
| 475 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
| 476 |
+
# not a numeral associated with it.
|
| 477 |
+
if number is None:
|
| 478 |
+
number = 0
|
| 479 |
+
|
| 480 |
+
# We normalize any letters to their lower case form
|
| 481 |
+
letter = letter.lower()
|
| 482 |
+
|
| 483 |
+
# We consider some words to be alternate spellings of other words and
|
| 484 |
+
# in those cases we want to normalize the spellings to our preferred
|
| 485 |
+
# spelling.
|
| 486 |
+
if letter == "alpha":
|
| 487 |
+
letter = "a"
|
| 488 |
+
elif letter == "beta":
|
| 489 |
+
letter = "b"
|
| 490 |
+
elif letter in ["c", "pre", "preview"]:
|
| 491 |
+
letter = "rc"
|
| 492 |
+
elif letter in ["rev", "r"]:
|
| 493 |
+
letter = "post"
|
| 494 |
+
|
| 495 |
+
return letter, int(number)
|
| 496 |
+
|
| 497 |
+
assert not letter
|
| 498 |
+
if number:
|
| 499 |
+
# We assume if we are given a number, but we are not given a letter
|
| 500 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
| 501 |
+
letter = "post"
|
| 502 |
+
|
| 503 |
+
return letter, int(number)
|
| 504 |
+
|
| 505 |
+
return None
|
| 506 |
+
|
| 507 |
+
|
| 508 |
+
_local_version_separators = re.compile(r"[\._-]")
|
| 509 |
+
|
| 510 |
+
|
| 511 |
+
def _parse_local_version(local: str | None) -> LocalType | None:
|
| 512 |
+
"""
|
| 513 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
| 514 |
+
"""
|
| 515 |
+
if local is not None:
|
| 516 |
+
return tuple(
|
| 517 |
+
part.lower() if not part.isdigit() else int(part)
|
| 518 |
+
for part in _local_version_separators.split(local)
|
| 519 |
+
)
|
| 520 |
+
return None
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
def _cmpkey(
|
| 524 |
+
epoch: int,
|
| 525 |
+
release: tuple[int, ...],
|
| 526 |
+
pre: tuple[str, int] | None,
|
| 527 |
+
post: tuple[str, int] | None,
|
| 528 |
+
dev: tuple[str, int] | None,
|
| 529 |
+
local: LocalType | None,
|
| 530 |
+
) -> CmpKey:
|
| 531 |
+
# When we compare a release version, we want to compare it with all of the
|
| 532 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
| 533 |
+
# leading zeros until we come to something non zero, then take the rest
|
| 534 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
| 535 |
+
# that for our sorting key.
|
| 536 |
+
_release = tuple(
|
| 537 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
| 538 |
+
)
|
| 539 |
+
|
| 540 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
| 541 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
| 542 |
+
# if there is not a pre or a post segment. If we have one of those then
|
| 543 |
+
# the normal sorting rules will handle this case correctly.
|
| 544 |
+
if pre is None and post is None and dev is not None:
|
| 545 |
+
_pre: CmpPrePostDevType = NegativeInfinity
|
| 546 |
+
# Versions without a pre-release (except as noted above) should sort after
|
| 547 |
+
# those with one.
|
| 548 |
+
elif pre is None:
|
| 549 |
+
_pre = Infinity
|
| 550 |
+
else:
|
| 551 |
+
_pre = pre
|
| 552 |
+
|
| 553 |
+
# Versions without a post segment should sort before those with one.
|
| 554 |
+
if post is None:
|
| 555 |
+
_post: CmpPrePostDevType = NegativeInfinity
|
| 556 |
+
|
| 557 |
+
else:
|
| 558 |
+
_post = post
|
| 559 |
+
|
| 560 |
+
# Versions without a development segment should sort after those with one.
|
| 561 |
+
if dev is None:
|
| 562 |
+
_dev: CmpPrePostDevType = Infinity
|
| 563 |
+
|
| 564 |
+
else:
|
| 565 |
+
_dev = dev
|
| 566 |
+
|
| 567 |
+
if local is None:
|
| 568 |
+
# Versions without a local segment should sort before those with one.
|
| 569 |
+
_local: CmpLocalType = NegativeInfinity
|
| 570 |
+
else:
|
| 571 |
+
# Versions with a local segment need that segment parsed to implement
|
| 572 |
+
# the sorting rules in PEP440.
|
| 573 |
+
# - Alpha numeric segments sort before numeric segments
|
| 574 |
+
# - Alpha numeric segments sort lexicographically
|
| 575 |
+
# - Numeric segments sort numerically
|
| 576 |
+
# - Shorter versions sort before longer versions when the prefixes
|
| 577 |
+
# match exactly
|
| 578 |
+
_local = tuple(
|
| 579 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
| 580 |
+
)
|
| 581 |
+
|
| 582 |
+
return epoch, _release, _pre, _post, _dev, _local
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (192 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/actor_pool_map_operator.cpython-310.pyc
ADDED
|
Binary file (25.7 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/base_physical_operator.cpython-310.pyc
ADDED
|
Binary file (6.74 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/map_operator.cpython-310.pyc
ADDED
|
Binary file (22.7 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/output_splitter.cpython-310.pyc
ADDED
|
Binary file (11.3 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/task_pool_map_operator.cpython-310.pyc
ADDED
|
Binary file (5.58 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/execution/operators/__pycache__/union_operator.cpython-310.pyc
ADDED
|
Binary file (3.84 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/__pycache__/iterator_impl.cpython-310.pyc
ADDED
|
Binary file (2.01 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/iterator_impl.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import TYPE_CHECKING, Iterator, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
from ray.data._internal.execution.interfaces.ref_bundle import RefBundle
|
| 4 |
+
from ray.data._internal.stats import DatasetStats
|
| 5 |
+
from ray.data._internal.util import create_dataset_tag
|
| 6 |
+
from ray.data.iterator import DataIterator
|
| 7 |
+
|
| 8 |
+
if TYPE_CHECKING:
|
| 9 |
+
import pyarrow
|
| 10 |
+
|
| 11 |
+
from ray.data import Dataset
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class DataIteratorImpl(DataIterator):
|
| 15 |
+
def __init__(
|
| 16 |
+
self,
|
| 17 |
+
base_dataset: "Dataset",
|
| 18 |
+
):
|
| 19 |
+
self._base_dataset = base_dataset
|
| 20 |
+
|
| 21 |
+
def __repr__(self) -> str:
|
| 22 |
+
return f"DataIterator({self._base_dataset})"
|
| 23 |
+
|
| 24 |
+
def _to_ref_bundle_iterator(
|
| 25 |
+
self,
|
| 26 |
+
) -> Tuple[Iterator[RefBundle], Optional[DatasetStats], bool]:
|
| 27 |
+
ds = self._base_dataset
|
| 28 |
+
ref_bundles_iterator, stats, executor = ds._plan.execute_to_iterator()
|
| 29 |
+
ds._current_executor = executor
|
| 30 |
+
return ref_bundles_iterator, stats, False
|
| 31 |
+
|
| 32 |
+
def stats(self) -> str:
|
| 33 |
+
return self._base_dataset.stats()
|
| 34 |
+
|
| 35 |
+
def schema(self) -> Union[type, "pyarrow.lib.Schema"]:
|
| 36 |
+
return self._base_dataset.schema()
|
| 37 |
+
|
| 38 |
+
def _get_dataset_tag(self):
|
| 39 |
+
return create_dataset_tag(
|
| 40 |
+
self._base_dataset._plan._dataset_name, self._base_dataset._uuid
|
| 41 |
+
)
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/iterator/stream_split_iterator.py
ADDED
|
@@ -0,0 +1,285 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import copy
|
| 2 |
+
import logging
|
| 3 |
+
import threading
|
| 4 |
+
import time
|
| 5 |
+
from dataclasses import replace
|
| 6 |
+
from typing import TYPE_CHECKING, Dict, Iterator, List, Optional, Tuple, Union
|
| 7 |
+
|
| 8 |
+
import ray
|
| 9 |
+
from ray.data._internal.execution.interfaces import NodeIdStr, RefBundle
|
| 10 |
+
from ray.data._internal.execution.legacy_compat import execute_to_legacy_bundle_iterator
|
| 11 |
+
from ray.data._internal.execution.operators.output_splitter import OutputSplitter
|
| 12 |
+
from ray.data._internal.execution.streaming_executor import StreamingExecutor
|
| 13 |
+
from ray.data._internal.stats import DatasetStats
|
| 14 |
+
from ray.data._internal.util import create_dataset_tag
|
| 15 |
+
from ray.data.block import Block, BlockMetadata
|
| 16 |
+
from ray.data.iterator import DataIterator
|
| 17 |
+
from ray.types import ObjectRef
|
| 18 |
+
from ray.util.debug import log_once
|
| 19 |
+
from ray.util.scheduling_strategies import NodeAffinitySchedulingStrategy
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
import pyarrow
|
| 23 |
+
|
| 24 |
+
from ray.data import Dataset
|
| 25 |
+
|
| 26 |
+
logger = logging.getLogger(__name__)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
BLOCKED_CLIENT_WARN_TIMEOUT = 30
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class StreamSplitDataIterator(DataIterator):
|
| 33 |
+
"""Implements a collection of iterators over a shared data stream."""
|
| 34 |
+
|
| 35 |
+
@staticmethod
|
| 36 |
+
def create(
|
| 37 |
+
base_dataset: "Dataset",
|
| 38 |
+
n: int,
|
| 39 |
+
equal: bool,
|
| 40 |
+
locality_hints: Optional[List[NodeIdStr]],
|
| 41 |
+
) -> List["StreamSplitDataIterator"]:
|
| 42 |
+
"""Create a split iterator from the given base Dataset and options.
|
| 43 |
+
|
| 44 |
+
See also: `Dataset.streaming_split`.
|
| 45 |
+
"""
|
| 46 |
+
# To avoid deadlock, the concurrency on this actor must be set to at least `n`.
|
| 47 |
+
coord_actor = SplitCoordinator.options(
|
| 48 |
+
max_concurrency=n,
|
| 49 |
+
scheduling_strategy=NodeAffinitySchedulingStrategy(
|
| 50 |
+
ray.get_runtime_context().get_node_id(), soft=False
|
| 51 |
+
),
|
| 52 |
+
).remote(base_dataset, n, equal, locality_hints)
|
| 53 |
+
|
| 54 |
+
return [
|
| 55 |
+
StreamSplitDataIterator(base_dataset, coord_actor, i, n) for i in range(n)
|
| 56 |
+
]
|
| 57 |
+
|
| 58 |
+
def __init__(
|
| 59 |
+
self,
|
| 60 |
+
base_dataset: "Dataset",
|
| 61 |
+
coord_actor: ray.actor.ActorHandle,
|
| 62 |
+
output_split_idx: int,
|
| 63 |
+
world_size: int,
|
| 64 |
+
):
|
| 65 |
+
self._base_dataset = base_dataset
|
| 66 |
+
self._coord_actor = coord_actor
|
| 67 |
+
self._output_split_idx = output_split_idx
|
| 68 |
+
self._world_size = world_size
|
| 69 |
+
self._iter_stats = DatasetStats(metadata={}, parent=None)
|
| 70 |
+
|
| 71 |
+
def _to_ref_bundle_iterator(
|
| 72 |
+
self,
|
| 73 |
+
) -> Tuple[Iterator[RefBundle], Optional[DatasetStats], bool]:
|
| 74 |
+
def gen_blocks() -> Iterator[RefBundle]:
|
| 75 |
+
cur_epoch = ray.get(
|
| 76 |
+
self._coord_actor.start_epoch.remote(self._output_split_idx)
|
| 77 |
+
)
|
| 78 |
+
future: ObjectRef[
|
| 79 |
+
Optional[ObjectRef[Block]]
|
| 80 |
+
] = self._coord_actor.get.remote(cur_epoch, self._output_split_idx)
|
| 81 |
+
while True:
|
| 82 |
+
block_ref_and_md: Optional[
|
| 83 |
+
Tuple[ObjectRef[Block], BlockMetadata]
|
| 84 |
+
] = ray.get(future)
|
| 85 |
+
if not block_ref_and_md:
|
| 86 |
+
break
|
| 87 |
+
else:
|
| 88 |
+
future = self._coord_actor.get.remote(
|
| 89 |
+
cur_epoch, self._output_split_idx
|
| 90 |
+
)
|
| 91 |
+
yield RefBundle(blocks=(block_ref_and_md,), owns_blocks=False)
|
| 92 |
+
|
| 93 |
+
return gen_blocks(), self._iter_stats, False
|
| 94 |
+
|
| 95 |
+
def stats(self) -> str:
|
| 96 |
+
"""Implements DataIterator."""
|
| 97 |
+
# Merge the locally recorded iter stats and the remotely recorded
|
| 98 |
+
# stream execution stats.
|
| 99 |
+
stats = ray.get(self._coord_actor.stats.remote())
|
| 100 |
+
summary = stats.to_summary()
|
| 101 |
+
summary.iter_stats = self._iter_stats.to_summary().iter_stats
|
| 102 |
+
summary.iter_stats.streaming_split_coord_time.add(
|
| 103 |
+
stats.streaming_split_coordinator_s.get()
|
| 104 |
+
)
|
| 105 |
+
return summary.to_string()
|
| 106 |
+
|
| 107 |
+
def schema(self) -> Union[type, "pyarrow.lib.Schema"]:
|
| 108 |
+
"""Implements DataIterator."""
|
| 109 |
+
return self._base_dataset.schema()
|
| 110 |
+
|
| 111 |
+
def world_size(self) -> int:
|
| 112 |
+
"""Returns the number of splits total."""
|
| 113 |
+
return self._world_size
|
| 114 |
+
|
| 115 |
+
def _get_dataset_tag(self):
|
| 116 |
+
return create_dataset_tag(
|
| 117 |
+
self._base_dataset._plan._dataset_name,
|
| 118 |
+
self._base_dataset._uuid,
|
| 119 |
+
self._output_split_idx,
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
@ray.remote(num_cpus=0)
|
| 124 |
+
class SplitCoordinator:
|
| 125 |
+
"""Coordinator actor for routing blocks to output splits.
|
| 126 |
+
|
| 127 |
+
This actor runs a streaming executor locally on its main thread. Clients can
|
| 128 |
+
retrieve results via actor calls running on other threads.
|
| 129 |
+
"""
|
| 130 |
+
|
| 131 |
+
def __init__(
|
| 132 |
+
self,
|
| 133 |
+
dataset: "Dataset",
|
| 134 |
+
n: int,
|
| 135 |
+
equal: bool,
|
| 136 |
+
locality_hints: Optional[List[NodeIdStr]],
|
| 137 |
+
):
|
| 138 |
+
# Automatically set locality with output to the specified location hints.
|
| 139 |
+
if locality_hints:
|
| 140 |
+
dataset.context.execution_options.locality_with_output = locality_hints
|
| 141 |
+
logger.info(f"Auto configuring locality_with_output={locality_hints}")
|
| 142 |
+
|
| 143 |
+
# Set current DataContext.
|
| 144 |
+
ray.data.DataContext._set_current(dataset.context)
|
| 145 |
+
|
| 146 |
+
self._base_dataset = dataset
|
| 147 |
+
self._n = n
|
| 148 |
+
self._equal = equal
|
| 149 |
+
self._locality_hints = locality_hints
|
| 150 |
+
self._lock = threading.RLock()
|
| 151 |
+
self._executor = None
|
| 152 |
+
|
| 153 |
+
# Guarded by self._lock.
|
| 154 |
+
self._next_bundle: Dict[int, RefBundle] = {}
|
| 155 |
+
self._unfinished_clients_in_epoch = n
|
| 156 |
+
self._cur_epoch = -1
|
| 157 |
+
|
| 158 |
+
def gen_epochs():
|
| 159 |
+
while True:
|
| 160 |
+
executor = StreamingExecutor(
|
| 161 |
+
copy.deepcopy(dataset.context.execution_options),
|
| 162 |
+
create_dataset_tag(
|
| 163 |
+
self._base_dataset._name, self._base_dataset._uuid
|
| 164 |
+
),
|
| 165 |
+
)
|
| 166 |
+
self._executor = executor
|
| 167 |
+
|
| 168 |
+
def add_split_op(dag):
|
| 169 |
+
return OutputSplitter(dag, n, equal, locality_hints)
|
| 170 |
+
|
| 171 |
+
output_iterator = execute_to_legacy_bundle_iterator(
|
| 172 |
+
executor,
|
| 173 |
+
dataset._plan,
|
| 174 |
+
dag_rewrite=add_split_op,
|
| 175 |
+
)
|
| 176 |
+
yield output_iterator
|
| 177 |
+
|
| 178 |
+
self._next_epoch = gen_epochs()
|
| 179 |
+
self._output_iterator = None
|
| 180 |
+
# Store the error raised from the `gen_epoch` call.
|
| 181 |
+
self._gen_epoch_error: Optional[Exception] = None
|
| 182 |
+
|
| 183 |
+
def stats(self) -> DatasetStats:
|
| 184 |
+
"""Returns stats from the base dataset."""
|
| 185 |
+
if self._executor:
|
| 186 |
+
return self._executor.get_stats()
|
| 187 |
+
return self._base_dataset._plan.stats()
|
| 188 |
+
|
| 189 |
+
def start_epoch(self, split_idx: int) -> str:
|
| 190 |
+
"""Called to start an epoch.
|
| 191 |
+
|
| 192 |
+
Returns:
|
| 193 |
+
UUID for the epoch, which must be used when accessing results via get().
|
| 194 |
+
"""
|
| 195 |
+
|
| 196 |
+
# Wait for all clients to arrive at the barrier before starting a new epoch.
|
| 197 |
+
epoch_id = self._barrier(split_idx)
|
| 198 |
+
return epoch_id
|
| 199 |
+
|
| 200 |
+
def get(
|
| 201 |
+
self, epoch_id: int, output_split_idx: int
|
| 202 |
+
) -> Optional[Tuple[ObjectRef[Block], BlockMetadata]]:
|
| 203 |
+
"""Blocking get operation.
|
| 204 |
+
|
| 205 |
+
This is intended to be called concurrently from multiple clients.
|
| 206 |
+
"""
|
| 207 |
+
start_time = time.perf_counter()
|
| 208 |
+
if epoch_id != self._cur_epoch:
|
| 209 |
+
raise ValueError(
|
| 210 |
+
"Invalid iterator: the dataset has moved on to another epoch."
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
try:
|
| 214 |
+
# Ensure there is at least one bundle.
|
| 215 |
+
with self._lock:
|
| 216 |
+
if output_split_idx in self._next_bundle:
|
| 217 |
+
next_bundle = self._next_bundle[output_split_idx]
|
| 218 |
+
else:
|
| 219 |
+
next_bundle = None
|
| 220 |
+
|
| 221 |
+
# Fetch next bundle if needed.
|
| 222 |
+
while next_bundle is None or not next_bundle.blocks:
|
| 223 |
+
# This is a BLOCKING call, so do it outside the lock.
|
| 224 |
+
next_bundle = self._output_iterator.get_next(output_split_idx)
|
| 225 |
+
|
| 226 |
+
block = next_bundle.blocks[-1]
|
| 227 |
+
next_bundle = replace(next_bundle, blocks=next_bundle.blocks[:-1])
|
| 228 |
+
|
| 229 |
+
# Accumulate any remaining blocks in next_bundle map as needed.
|
| 230 |
+
with self._lock:
|
| 231 |
+
self._next_bundle[output_split_idx] = next_bundle
|
| 232 |
+
if not next_bundle.blocks:
|
| 233 |
+
del self._next_bundle[output_split_idx]
|
| 234 |
+
|
| 235 |
+
return block
|
| 236 |
+
except StopIteration:
|
| 237 |
+
return None
|
| 238 |
+
finally:
|
| 239 |
+
stats = self.stats()
|
| 240 |
+
if stats and stats.streaming_split_coordinator_s:
|
| 241 |
+
stats.streaming_split_coordinator_s.add(
|
| 242 |
+
time.perf_counter() - start_time
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
def _barrier(self, split_idx: int) -> int:
|
| 246 |
+
"""Arrive and block until the start of the given epoch."""
|
| 247 |
+
|
| 248 |
+
# Decrement and await all clients to arrive here.
|
| 249 |
+
with self._lock:
|
| 250 |
+
starting_epoch = self._cur_epoch
|
| 251 |
+
self._unfinished_clients_in_epoch -= 1
|
| 252 |
+
|
| 253 |
+
start_time = time.time()
|
| 254 |
+
while (
|
| 255 |
+
self._cur_epoch == starting_epoch and self._unfinished_clients_in_epoch != 0
|
| 256 |
+
):
|
| 257 |
+
if time.time() - start_time > BLOCKED_CLIENT_WARN_TIMEOUT:
|
| 258 |
+
if log_once(f"stream_split_blocked_{split_idx}_{starting_epoch}"):
|
| 259 |
+
logger.warning(
|
| 260 |
+
f"StreamSplitDataIterator(epoch={starting_epoch}, "
|
| 261 |
+
f"split={split_idx}) blocked waiting on other clients "
|
| 262 |
+
f"for more than {BLOCKED_CLIENT_WARN_TIMEOUT}s. All "
|
| 263 |
+
"clients must read from the DataIterator splits at "
|
| 264 |
+
"the same time. This warning will not be printed again "
|
| 265 |
+
"for this epoch."
|
| 266 |
+
)
|
| 267 |
+
time.sleep(0.1)
|
| 268 |
+
|
| 269 |
+
# Advance to the next epoch.
|
| 270 |
+
with self._lock:
|
| 271 |
+
if self._cur_epoch == starting_epoch:
|
| 272 |
+
self._cur_epoch += 1
|
| 273 |
+
self._unfinished_clients_in_epoch = self._n
|
| 274 |
+
try:
|
| 275 |
+
self._output_iterator = next(self._next_epoch)
|
| 276 |
+
except Exception as e:
|
| 277 |
+
self._gen_epoch_error = e
|
| 278 |
+
|
| 279 |
+
if self._gen_epoch_error is not None:
|
| 280 |
+
# If there was an error when advancing to the next epoch,
|
| 281 |
+
# re-raise it for all threads.
|
| 282 |
+
raise self._gen_epoch_error
|
| 283 |
+
|
| 284 |
+
assert self._output_iterator is not None
|
| 285 |
+
return starting_epoch + 1
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (180 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/aggregate.cpython-310.pyc
ADDED
|
Binary file (2.49 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_all_to_all_op.cpython-310.pyc
ADDED
|
Binary file (2.39 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_arrow_op.cpython-310.pyc
ADDED
|
Binary file (190 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_items_op.cpython-310.pyc
ADDED
|
Binary file (190 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_numpy_op.cpython-310.pyc
ADDED
|
Binary file (190 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_from_pandas_op.cpython-310.pyc
ADDED
|
Binary file (191 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_read_op.cpython-310.pyc
ADDED
|
Binary file (3.63 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_udf_map_op.cpython-310.pyc
ADDED
|
Binary file (15.9 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/plan_write_op.cpython-310.pyc
ADDED
|
Binary file (3.35 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/planner.cpython-310.pyc
ADDED
|
Binary file (5.92 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/random_shuffle.cpython-310.pyc
ADDED
|
Binary file (2.62 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/randomize_blocks.cpython-310.pyc
ADDED
|
Binary file (1.52 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/repartition.cpython-310.pyc
ADDED
|
Binary file (2.53 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/__pycache__/sort.cpython-310.pyc
ADDED
|
Binary file (2.4 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/aggregate.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
from ray.data._internal.execution.interfaces import (
|
| 4 |
+
AllToAllTransformFn,
|
| 5 |
+
RefBundle,
|
| 6 |
+
TaskContext,
|
| 7 |
+
)
|
| 8 |
+
from ray.data._internal.planner.exchange.aggregate_task_spec import (
|
| 9 |
+
SortAggregateTaskSpec,
|
| 10 |
+
)
|
| 11 |
+
from ray.data._internal.planner.exchange.pull_based_shuffle_task_scheduler import (
|
| 12 |
+
PullBasedShuffleTaskScheduler,
|
| 13 |
+
)
|
| 14 |
+
from ray.data._internal.planner.exchange.push_based_shuffle_task_scheduler import (
|
| 15 |
+
PushBasedShuffleTaskScheduler,
|
| 16 |
+
)
|
| 17 |
+
from ray.data._internal.planner.exchange.sort_task_spec import SortKey, SortTaskSpec
|
| 18 |
+
from ray.data._internal.stats import StatsDict
|
| 19 |
+
from ray.data._internal.util import unify_block_metadata_schema
|
| 20 |
+
from ray.data.aggregate import AggregateFn
|
| 21 |
+
from ray.data.context import DataContext
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def generate_aggregate_fn(
|
| 25 |
+
key: Optional[Union[str, List[str]]],
|
| 26 |
+
aggs: List[AggregateFn],
|
| 27 |
+
batch_format: str,
|
| 28 |
+
_debug_limit_shuffle_execution_to_num_blocks: Optional[int] = None,
|
| 29 |
+
) -> AllToAllTransformFn:
|
| 30 |
+
"""Generate function to aggregate blocks by the specified key column or key
|
| 31 |
+
function.
|
| 32 |
+
"""
|
| 33 |
+
if len(aggs) == 0:
|
| 34 |
+
raise ValueError("Aggregate requires at least one aggregation")
|
| 35 |
+
|
| 36 |
+
def fn(
|
| 37 |
+
refs: List[RefBundle],
|
| 38 |
+
ctx: TaskContext,
|
| 39 |
+
) -> Tuple[List[RefBundle], StatsDict]:
|
| 40 |
+
blocks = []
|
| 41 |
+
metadata = []
|
| 42 |
+
for ref_bundle in refs:
|
| 43 |
+
blocks.extend(ref_bundle.block_refs)
|
| 44 |
+
metadata.extend(ref_bundle.metadata)
|
| 45 |
+
if len(blocks) == 0:
|
| 46 |
+
return (blocks, {})
|
| 47 |
+
unified_schema = unify_block_metadata_schema(metadata)
|
| 48 |
+
for agg_fn in aggs:
|
| 49 |
+
agg_fn._validate(unified_schema)
|
| 50 |
+
|
| 51 |
+
num_mappers = len(blocks)
|
| 52 |
+
|
| 53 |
+
sort_key = SortKey(key)
|
| 54 |
+
|
| 55 |
+
if key is None:
|
| 56 |
+
num_outputs = 1
|
| 57 |
+
boundaries = []
|
| 58 |
+
else:
|
| 59 |
+
# Use same number of output partitions.
|
| 60 |
+
num_outputs = num_mappers
|
| 61 |
+
sample_bar = ctx.sub_progress_bar_dict[
|
| 62 |
+
SortTaskSpec.SORT_SAMPLE_SUB_PROGRESS_BAR_NAME
|
| 63 |
+
]
|
| 64 |
+
# Sample boundaries for aggregate key.
|
| 65 |
+
boundaries = SortTaskSpec.sample_boundaries(
|
| 66 |
+
blocks, sort_key, num_outputs, sample_bar
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
agg_spec = SortAggregateTaskSpec(
|
| 70 |
+
boundaries=boundaries,
|
| 71 |
+
key=sort_key,
|
| 72 |
+
aggs=aggs,
|
| 73 |
+
batch_format=batch_format,
|
| 74 |
+
)
|
| 75 |
+
if DataContext.get_current().use_push_based_shuffle:
|
| 76 |
+
scheduler = PushBasedShuffleTaskScheduler(agg_spec)
|
| 77 |
+
else:
|
| 78 |
+
scheduler = PullBasedShuffleTaskScheduler(agg_spec)
|
| 79 |
+
|
| 80 |
+
return scheduler.execute(
|
| 81 |
+
refs,
|
| 82 |
+
num_outputs,
|
| 83 |
+
ctx,
|
| 84 |
+
_debug_limit_execution_to_num_blocks=(
|
| 85 |
+
_debug_limit_shuffle_execution_to_num_blocks
|
| 86 |
+
),
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
return fn
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/exchange/__init__.py
ADDED
|
File without changes
|
minigpt2/lib/python3.10/site-packages/ray/data/_internal/planner/exchange/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (189 Bytes). View file
|
|
|