koichi12 commited on
Commit
1f0374e
·
verified ·
1 Parent(s): c63d08b

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +5 -0
  2. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__init__.py +33 -0
  3. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc +0 -0
  4. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc +0 -0
  5. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-311.pyc +0 -0
  6. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-311.pyc +0 -0
  7. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-311.pyc +0 -0
  8. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-311.pyc +0 -0
  11. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc +0 -0
  12. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc +0 -0
  13. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc +0 -0
  14. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-311.pyc +0 -0
  15. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-311.pyc +0 -0
  16. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/locators.py +1303 -0
  17. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/t64-arm.exe +3 -0
  18. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/t64.exe +3 -0
  19. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/util.py +2025 -0
  20. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/w64-arm.exe +3 -0
  21. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/w64.exe +3 -0
  22. .venv/lib/python3.11/site-packages/pip/_vendor/distro/__init__.py +54 -0
  23. .venv/lib/python3.11/site-packages/pip/_vendor/distro/__main__.py +4 -0
  24. .venv/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-311.pyc +0 -0
  25. .venv/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-311.pyc +0 -0
  26. .venv/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-311.pyc +0 -0
  27. .venv/lib/python3.11/site-packages/pip/_vendor/distro/distro.py +1399 -0
  28. .venv/lib/python3.11/site-packages/pip/_vendor/distro/py.typed +0 -0
  29. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc +3 -0
  30. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__init__.py +608 -0
  31. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc +0 -0
  32. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc +0 -0
  33. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc +0 -0
  34. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc +0 -0
  35. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc +0 -0
  36. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc +0 -0
  37. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc +0 -0
  38. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc +0 -0
  39. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc +0 -0
  40. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-311.pyc +0 -0
  41. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc +0 -0
  42. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/_asyncio.py +94 -0
  43. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/_utils.py +76 -0
  44. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/after.py +51 -0
  45. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/before.py +46 -0
  46. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/before_sleep.py +71 -0
  47. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/nap.py +43 -0
  48. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/py.typed +0 -0
  49. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/retry.py +272 -0
  50. .venv/lib/python3.11/site-packages/pip/_vendor/tenacity/stop.py +103 -0
.gitattributes CHANGED
@@ -97,3 +97,8 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/_inductor/_
97
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cuda_cupti/lib/libpcsamplingutil.so filter=lfs diff=lfs merge=lfs -text
98
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/_inductor/__pycache__/lowering.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
99
  .venv/lib/python3.11/site-packages/pydantic/__pycache__/json_schema.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
97
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cuda_cupti/lib/libpcsamplingutil.so filter=lfs diff=lfs merge=lfs -text
98
  tuning-competition-baseline/.venv/lib/python3.11/site-packages/torch/_inductor/__pycache__/lowering.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
99
  .venv/lib/python3.11/site-packages/pydantic/__pycache__/json_schema.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
100
+ .venv/lib/python3.11/site-packages/pip/_vendor/distlib/w64-arm.exe filter=lfs diff=lfs merge=lfs -text
101
+ .venv/lib/python3.11/site-packages/pip/_vendor/distlib/t64.exe filter=lfs diff=lfs merge=lfs -text
102
+ .venv/lib/python3.11/site-packages/pip/_vendor/distlib/w64.exe filter=lfs diff=lfs merge=lfs -text
103
+ .venv/lib/python3.11/site-packages/pip/_vendor/distlib/t64-arm.exe filter=lfs diff=lfs merge=lfs -text
104
+ .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__init__.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012-2023 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ import logging
8
+
9
+ __version__ = '0.3.8'
10
+
11
+
12
+ class DistlibException(Exception):
13
+ pass
14
+
15
+
16
+ try:
17
+ from logging import NullHandler
18
+ except ImportError: # pragma: no cover
19
+
20
+ class NullHandler(logging.Handler):
21
+
22
+ def handle(self, record):
23
+ pass
24
+
25
+ def emit(self, record):
26
+ pass
27
+
28
+ def createLock(self):
29
+ self.lock = None
30
+
31
+
32
+ logger = logging.getLogger(__name__)
33
+ logger.addHandler(NullHandler())
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (1.46 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/compat.cpython-311.pyc ADDED
Binary file (52.4 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/database.cpython-311.pyc ADDED
Binary file (72.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/index.cpython-311.pyc ADDED
Binary file (26.7 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/locators.cpython-311.pyc ADDED
Binary file (65.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/manifest.cpython-311.pyc ADDED
Binary file (17 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/markers.cpython-311.pyc ADDED
Binary file (8.54 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/metadata.cpython-311.pyc ADDED
Binary file (47.4 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/resources.cpython-311.pyc ADDED
Binary file (19 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/scripts.cpython-311.pyc ADDED
Binary file (21.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/util.cpython-311.pyc ADDED
Binary file (98.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/version.cpython-311.pyc ADDED
Binary file (34.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/__pycache__/wheel.cpython-311.pyc ADDED
Binary file (59.5 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/locators.py ADDED
@@ -0,0 +1,1303 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012-2023 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+
8
+ import gzip
9
+ from io import BytesIO
10
+ import json
11
+ import logging
12
+ import os
13
+ import posixpath
14
+ import re
15
+ try:
16
+ import threading
17
+ except ImportError: # pragma: no cover
18
+ import dummy_threading as threading
19
+ import zlib
20
+
21
+ from . import DistlibException
22
+ from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url,
23
+ queue, quote, unescape, build_opener,
24
+ HTTPRedirectHandler as BaseRedirectHandler, text_type,
25
+ Request, HTTPError, URLError)
26
+ from .database import Distribution, DistributionPath, make_dist
27
+ from .metadata import Metadata, MetadataInvalidError
28
+ from .util import (cached_property, ensure_slash, split_filename, get_project_data,
29
+ parse_requirement, parse_name_and_version, ServerProxy,
30
+ normalize_name)
31
+ from .version import get_scheme, UnsupportedVersionError
32
+ from .wheel import Wheel, is_compatible
33
+
34
+ logger = logging.getLogger(__name__)
35
+
36
+ HASHER_HASH = re.compile(r'^(\w+)=([a-f0-9]+)')
37
+ CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I)
38
+ HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml')
39
+ DEFAULT_INDEX = 'https://pypi.org/pypi'
40
+
41
+
42
+ def get_all_distribution_names(url=None):
43
+ """
44
+ Return all distribution names known by an index.
45
+ :param url: The URL of the index.
46
+ :return: A list of all known distribution names.
47
+ """
48
+ if url is None:
49
+ url = DEFAULT_INDEX
50
+ client = ServerProxy(url, timeout=3.0)
51
+ try:
52
+ return client.list_packages()
53
+ finally:
54
+ client('close')()
55
+
56
+
57
+ class RedirectHandler(BaseRedirectHandler):
58
+ """
59
+ A class to work around a bug in some Python 3.2.x releases.
60
+ """
61
+ # There's a bug in the base version for some 3.2.x
62
+ # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header
63
+ # returns e.g. /abc, it bails because it says the scheme ''
64
+ # is bogus, when actually it should use the request's
65
+ # URL for the scheme. See Python issue #13696.
66
+ def http_error_302(self, req, fp, code, msg, headers):
67
+ # Some servers (incorrectly) return multiple Location headers
68
+ # (so probably same goes for URI). Use first header.
69
+ newurl = None
70
+ for key in ('location', 'uri'):
71
+ if key in headers:
72
+ newurl = headers[key]
73
+ break
74
+ if newurl is None: # pragma: no cover
75
+ return
76
+ urlparts = urlparse(newurl)
77
+ if urlparts.scheme == '':
78
+ newurl = urljoin(req.get_full_url(), newurl)
79
+ if hasattr(headers, 'replace_header'):
80
+ headers.replace_header(key, newurl)
81
+ else:
82
+ headers[key] = newurl
83
+ return BaseRedirectHandler.http_error_302(self, req, fp, code, msg,
84
+ headers)
85
+
86
+ http_error_301 = http_error_303 = http_error_307 = http_error_302
87
+
88
+
89
+ class Locator(object):
90
+ """
91
+ A base class for locators - things that locate distributions.
92
+ """
93
+ source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz')
94
+ binary_extensions = ('.egg', '.exe', '.whl')
95
+ excluded_extensions = ('.pdf',)
96
+
97
+ # A list of tags indicating which wheels you want to match. The default
98
+ # value of None matches against the tags compatible with the running
99
+ # Python. If you want to match other values, set wheel_tags on a locator
100
+ # instance to a list of tuples (pyver, abi, arch) which you want to match.
101
+ wheel_tags = None
102
+
103
+ downloadable_extensions = source_extensions + ('.whl',)
104
+
105
+ def __init__(self, scheme='default'):
106
+ """
107
+ Initialise an instance.
108
+ :param scheme: Because locators look for most recent versions, they
109
+ need to know the version scheme to use. This specifies
110
+ the current PEP-recommended scheme - use ``'legacy'``
111
+ if you need to support existing distributions on PyPI.
112
+ """
113
+ self._cache = {}
114
+ self.scheme = scheme
115
+ # Because of bugs in some of the handlers on some of the platforms,
116
+ # we use our own opener rather than just using urlopen.
117
+ self.opener = build_opener(RedirectHandler())
118
+ # If get_project() is called from locate(), the matcher instance
119
+ # is set from the requirement passed to locate(). See issue #18 for
120
+ # why this can be useful to know.
121
+ self.matcher = None
122
+ self.errors = queue.Queue()
123
+
124
+ def get_errors(self):
125
+ """
126
+ Return any errors which have occurred.
127
+ """
128
+ result = []
129
+ while not self.errors.empty(): # pragma: no cover
130
+ try:
131
+ e = self.errors.get(False)
132
+ result.append(e)
133
+ except self.errors.Empty:
134
+ continue
135
+ self.errors.task_done()
136
+ return result
137
+
138
+ def clear_errors(self):
139
+ """
140
+ Clear any errors which may have been logged.
141
+ """
142
+ # Just get the errors and throw them away
143
+ self.get_errors()
144
+
145
+ def clear_cache(self):
146
+ self._cache.clear()
147
+
148
+ def _get_scheme(self):
149
+ return self._scheme
150
+
151
+ def _set_scheme(self, value):
152
+ self._scheme = value
153
+
154
+ scheme = property(_get_scheme, _set_scheme)
155
+
156
+ def _get_project(self, name):
157
+ """
158
+ For a given project, get a dictionary mapping available versions to Distribution
159
+ instances.
160
+
161
+ This should be implemented in subclasses.
162
+
163
+ If called from a locate() request, self.matcher will be set to a
164
+ matcher for the requirement to satisfy, otherwise it will be None.
165
+ """
166
+ raise NotImplementedError('Please implement in the subclass')
167
+
168
+ def get_distribution_names(self):
169
+ """
170
+ Return all the distribution names known to this locator.
171
+ """
172
+ raise NotImplementedError('Please implement in the subclass')
173
+
174
+ def get_project(self, name):
175
+ """
176
+ For a given project, get a dictionary mapping available versions to Distribution
177
+ instances.
178
+
179
+ This calls _get_project to do all the work, and just implements a caching layer on top.
180
+ """
181
+ if self._cache is None: # pragma: no cover
182
+ result = self._get_project(name)
183
+ elif name in self._cache:
184
+ result = self._cache[name]
185
+ else:
186
+ self.clear_errors()
187
+ result = self._get_project(name)
188
+ self._cache[name] = result
189
+ return result
190
+
191
+ def score_url(self, url):
192
+ """
193
+ Give an url a score which can be used to choose preferred URLs
194
+ for a given project release.
195
+ """
196
+ t = urlparse(url)
197
+ basename = posixpath.basename(t.path)
198
+ compatible = True
199
+ is_wheel = basename.endswith('.whl')
200
+ is_downloadable = basename.endswith(self.downloadable_extensions)
201
+ if is_wheel:
202
+ compatible = is_compatible(Wheel(basename), self.wheel_tags)
203
+ return (t.scheme == 'https', 'pypi.org' in t.netloc,
204
+ is_downloadable, is_wheel, compatible, basename)
205
+
206
+ def prefer_url(self, url1, url2):
207
+ """
208
+ Choose one of two URLs where both are candidates for distribution
209
+ archives for the same version of a distribution (for example,
210
+ .tar.gz vs. zip).
211
+
212
+ The current implementation favours https:// URLs over http://, archives
213
+ from PyPI over those from other locations, wheel compatibility (if a
214
+ wheel) and then the archive name.
215
+ """
216
+ result = url2
217
+ if url1:
218
+ s1 = self.score_url(url1)
219
+ s2 = self.score_url(url2)
220
+ if s1 > s2:
221
+ result = url1
222
+ if result != url2:
223
+ logger.debug('Not replacing %r with %r', url1, url2)
224
+ else:
225
+ logger.debug('Replacing %r with %r', url1, url2)
226
+ return result
227
+
228
+ def split_filename(self, filename, project_name):
229
+ """
230
+ Attempt to split a filename in project name, version and Python version.
231
+ """
232
+ return split_filename(filename, project_name)
233
+
234
+ def convert_url_to_download_info(self, url, project_name):
235
+ """
236
+ See if a URL is a candidate for a download URL for a project (the URL
237
+ has typically been scraped from an HTML page).
238
+
239
+ If it is, a dictionary is returned with keys "name", "version",
240
+ "filename" and "url"; otherwise, None is returned.
241
+ """
242
+ def same_project(name1, name2):
243
+ return normalize_name(name1) == normalize_name(name2)
244
+
245
+ result = None
246
+ scheme, netloc, path, params, query, frag = urlparse(url)
247
+ if frag.lower().startswith('egg='): # pragma: no cover
248
+ logger.debug('%s: version hint in fragment: %r',
249
+ project_name, frag)
250
+ m = HASHER_HASH.match(frag)
251
+ if m:
252
+ algo, digest = m.groups()
253
+ else:
254
+ algo, digest = None, None
255
+ origpath = path
256
+ if path and path[-1] == '/': # pragma: no cover
257
+ path = path[:-1]
258
+ if path.endswith('.whl'):
259
+ try:
260
+ wheel = Wheel(path)
261
+ if not is_compatible(wheel, self.wheel_tags):
262
+ logger.debug('Wheel not compatible: %s', path)
263
+ else:
264
+ if project_name is None:
265
+ include = True
266
+ else:
267
+ include = same_project(wheel.name, project_name)
268
+ if include:
269
+ result = {
270
+ 'name': wheel.name,
271
+ 'version': wheel.version,
272
+ 'filename': wheel.filename,
273
+ 'url': urlunparse((scheme, netloc, origpath,
274
+ params, query, '')),
275
+ 'python-version': ', '.join(
276
+ ['.'.join(list(v[2:])) for v in wheel.pyver]),
277
+ }
278
+ except Exception: # pragma: no cover
279
+ logger.warning('invalid path for wheel: %s', path)
280
+ elif not path.endswith(self.downloadable_extensions): # pragma: no cover
281
+ logger.debug('Not downloadable: %s', path)
282
+ else: # downloadable extension
283
+ path = filename = posixpath.basename(path)
284
+ for ext in self.downloadable_extensions:
285
+ if path.endswith(ext):
286
+ path = path[:-len(ext)]
287
+ t = self.split_filename(path, project_name)
288
+ if not t: # pragma: no cover
289
+ logger.debug('No match for project/version: %s', path)
290
+ else:
291
+ name, version, pyver = t
292
+ if not project_name or same_project(project_name, name):
293
+ result = {
294
+ 'name': name,
295
+ 'version': version,
296
+ 'filename': filename,
297
+ 'url': urlunparse((scheme, netloc, origpath,
298
+ params, query, '')),
299
+ }
300
+ if pyver: # pragma: no cover
301
+ result['python-version'] = pyver
302
+ break
303
+ if result and algo:
304
+ result['%s_digest' % algo] = digest
305
+ return result
306
+
307
+ def _get_digest(self, info):
308
+ """
309
+ Get a digest from a dictionary by looking at a "digests" dictionary
310
+ or keys of the form 'algo_digest'.
311
+
312
+ Returns a 2-tuple (algo, digest) if found, else None. Currently
313
+ looks only for SHA256, then MD5.
314
+ """
315
+ result = None
316
+ if 'digests' in info:
317
+ digests = info['digests']
318
+ for algo in ('sha256', 'md5'):
319
+ if algo in digests:
320
+ result = (algo, digests[algo])
321
+ break
322
+ if not result:
323
+ for algo in ('sha256', 'md5'):
324
+ key = '%s_digest' % algo
325
+ if key in info:
326
+ result = (algo, info[key])
327
+ break
328
+ return result
329
+
330
+ def _update_version_data(self, result, info):
331
+ """
332
+ Update a result dictionary (the final result from _get_project) with a
333
+ dictionary for a specific version, which typically holds information
334
+ gleaned from a filename or URL for an archive for the distribution.
335
+ """
336
+ name = info.pop('name')
337
+ version = info.pop('version')
338
+ if version in result:
339
+ dist = result[version]
340
+ md = dist.metadata
341
+ else:
342
+ dist = make_dist(name, version, scheme=self.scheme)
343
+ md = dist.metadata
344
+ dist.digest = digest = self._get_digest(info)
345
+ url = info['url']
346
+ result['digests'][url] = digest
347
+ if md.source_url != info['url']:
348
+ md.source_url = self.prefer_url(md.source_url, url)
349
+ result['urls'].setdefault(version, set()).add(url)
350
+ dist.locator = self
351
+ result[version] = dist
352
+
353
+ def locate(self, requirement, prereleases=False):
354
+ """
355
+ Find the most recent distribution which matches the given
356
+ requirement.
357
+
358
+ :param requirement: A requirement of the form 'foo (1.0)' or perhaps
359
+ 'foo (>= 1.0, < 2.0, != 1.3)'
360
+ :param prereleases: If ``True``, allow pre-release versions
361
+ to be located. Otherwise, pre-release versions
362
+ are not returned.
363
+ :return: A :class:`Distribution` instance, or ``None`` if no such
364
+ distribution could be located.
365
+ """
366
+ result = None
367
+ r = parse_requirement(requirement)
368
+ if r is None: # pragma: no cover
369
+ raise DistlibException('Not a valid requirement: %r' % requirement)
370
+ scheme = get_scheme(self.scheme)
371
+ self.matcher = matcher = scheme.matcher(r.requirement)
372
+ logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
373
+ versions = self.get_project(r.name)
374
+ if len(versions) > 2: # urls and digests keys are present
375
+ # sometimes, versions are invalid
376
+ slist = []
377
+ vcls = matcher.version_class
378
+ for k in versions:
379
+ if k in ('urls', 'digests'):
380
+ continue
381
+ try:
382
+ if not matcher.match(k):
383
+ pass # logger.debug('%s did not match %r', matcher, k)
384
+ else:
385
+ if prereleases or not vcls(k).is_prerelease:
386
+ slist.append(k)
387
+ except Exception: # pragma: no cover
388
+ logger.warning('error matching %s with %r', matcher, k)
389
+ pass # slist.append(k)
390
+ if len(slist) > 1:
391
+ slist = sorted(slist, key=scheme.key)
392
+ if slist:
393
+ logger.debug('sorted list: %s', slist)
394
+ version = slist[-1]
395
+ result = versions[version]
396
+ if result:
397
+ if r.extras:
398
+ result.extras = r.extras
399
+ result.download_urls = versions.get('urls', {}).get(version, set())
400
+ d = {}
401
+ sd = versions.get('digests', {})
402
+ for url in result.download_urls:
403
+ if url in sd: # pragma: no cover
404
+ d[url] = sd[url]
405
+ result.digests = d
406
+ self.matcher = None
407
+ return result
408
+
409
+
410
+ class PyPIRPCLocator(Locator):
411
+ """
412
+ This locator uses XML-RPC to locate distributions. It therefore
413
+ cannot be used with simple mirrors (that only mirror file content).
414
+ """
415
+ def __init__(self, url, **kwargs):
416
+ """
417
+ Initialise an instance.
418
+
419
+ :param url: The URL to use for XML-RPC.
420
+ :param kwargs: Passed to the superclass constructor.
421
+ """
422
+ super(PyPIRPCLocator, self).__init__(**kwargs)
423
+ self.base_url = url
424
+ self.client = ServerProxy(url, timeout=3.0)
425
+
426
+ def get_distribution_names(self):
427
+ """
428
+ Return all the distribution names known to this locator.
429
+ """
430
+ return set(self.client.list_packages())
431
+
432
+ def _get_project(self, name):
433
+ result = {'urls': {}, 'digests': {}}
434
+ versions = self.client.package_releases(name, True)
435
+ for v in versions:
436
+ urls = self.client.release_urls(name, v)
437
+ data = self.client.release_data(name, v)
438
+ metadata = Metadata(scheme=self.scheme)
439
+ metadata.name = data['name']
440
+ metadata.version = data['version']
441
+ metadata.license = data.get('license')
442
+ metadata.keywords = data.get('keywords', [])
443
+ metadata.summary = data.get('summary')
444
+ dist = Distribution(metadata)
445
+ if urls:
446
+ info = urls[0]
447
+ metadata.source_url = info['url']
448
+ dist.digest = self._get_digest(info)
449
+ dist.locator = self
450
+ result[v] = dist
451
+ for info in urls:
452
+ url = info['url']
453
+ digest = self._get_digest(info)
454
+ result['urls'].setdefault(v, set()).add(url)
455
+ result['digests'][url] = digest
456
+ return result
457
+
458
+
459
+ class PyPIJSONLocator(Locator):
460
+ """
461
+ This locator uses PyPI's JSON interface. It's very limited in functionality
462
+ and probably not worth using.
463
+ """
464
+ def __init__(self, url, **kwargs):
465
+ super(PyPIJSONLocator, self).__init__(**kwargs)
466
+ self.base_url = ensure_slash(url)
467
+
468
+ def get_distribution_names(self):
469
+ """
470
+ Return all the distribution names known to this locator.
471
+ """
472
+ raise NotImplementedError('Not available from this locator')
473
+
474
+ def _get_project(self, name):
475
+ result = {'urls': {}, 'digests': {}}
476
+ url = urljoin(self.base_url, '%s/json' % quote(name))
477
+ try:
478
+ resp = self.opener.open(url)
479
+ data = resp.read().decode() # for now
480
+ d = json.loads(data)
481
+ md = Metadata(scheme=self.scheme)
482
+ data = d['info']
483
+ md.name = data['name']
484
+ md.version = data['version']
485
+ md.license = data.get('license')
486
+ md.keywords = data.get('keywords', [])
487
+ md.summary = data.get('summary')
488
+ dist = Distribution(md)
489
+ dist.locator = self
490
+ # urls = d['urls']
491
+ result[md.version] = dist
492
+ for info in d['urls']:
493
+ url = info['url']
494
+ dist.download_urls.add(url)
495
+ dist.digests[url] = self._get_digest(info)
496
+ result['urls'].setdefault(md.version, set()).add(url)
497
+ result['digests'][url] = self._get_digest(info)
498
+ # Now get other releases
499
+ for version, infos in d['releases'].items():
500
+ if version == md.version:
501
+ continue # already done
502
+ omd = Metadata(scheme=self.scheme)
503
+ omd.name = md.name
504
+ omd.version = version
505
+ odist = Distribution(omd)
506
+ odist.locator = self
507
+ result[version] = odist
508
+ for info in infos:
509
+ url = info['url']
510
+ odist.download_urls.add(url)
511
+ odist.digests[url] = self._get_digest(info)
512
+ result['urls'].setdefault(version, set()).add(url)
513
+ result['digests'][url] = self._get_digest(info)
514
+ # for info in urls:
515
+ # md.source_url = info['url']
516
+ # dist.digest = self._get_digest(info)
517
+ # dist.locator = self
518
+ # for info in urls:
519
+ # url = info['url']
520
+ # result['urls'].setdefault(md.version, set()).add(url)
521
+ # result['digests'][url] = self._get_digest(info)
522
+ except Exception as e:
523
+ self.errors.put(text_type(e))
524
+ logger.exception('JSON fetch failed: %s', e)
525
+ return result
526
+
527
+
528
+ class Page(object):
529
+ """
530
+ This class represents a scraped HTML page.
531
+ """
532
+ # The following slightly hairy-looking regex just looks for the contents of
533
+ # an anchor link, which has an attribute "href" either immediately preceded
534
+ # or immediately followed by a "rel" attribute. The attribute values can be
535
+ # declared with double quotes, single quotes or no quotes - which leads to
536
+ # the length of the expression.
537
+ _href = re.compile("""
538
+ (rel\\s*=\\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\\s\n]*))\\s+)?
539
+ href\\s*=\\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\\s\n]*))
540
+ (\\s+rel\\s*=\\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\\s\n]*)))?
541
+ """, re.I | re.S | re.X)
542
+ _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S)
543
+
544
+ def __init__(self, data, url):
545
+ """
546
+ Initialise an instance with the Unicode page contents and the URL they
547
+ came from.
548
+ """
549
+ self.data = data
550
+ self.base_url = self.url = url
551
+ m = self._base.search(self.data)
552
+ if m:
553
+ self.base_url = m.group(1)
554
+
555
+ _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
556
+
557
+ @cached_property
558
+ def links(self):
559
+ """
560
+ Return the URLs of all the links on a page together with information
561
+ about their "rel" attribute, for determining which ones to treat as
562
+ downloads and which ones to queue for further scraping.
563
+ """
564
+ def clean(url):
565
+ "Tidy up an URL."
566
+ scheme, netloc, path, params, query, frag = urlparse(url)
567
+ return urlunparse((scheme, netloc, quote(path),
568
+ params, query, frag))
569
+
570
+ result = set()
571
+ for match in self._href.finditer(self.data):
572
+ d = match.groupdict('')
573
+ rel = (d['rel1'] or d['rel2'] or d['rel3'] or
574
+ d['rel4'] or d['rel5'] or d['rel6'])
575
+ url = d['url1'] or d['url2'] or d['url3']
576
+ url = urljoin(self.base_url, url)
577
+ url = unescape(url)
578
+ url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
579
+ result.add((url, rel))
580
+ # We sort the result, hoping to bring the most recent versions
581
+ # to the front
582
+ result = sorted(result, key=lambda t: t[0], reverse=True)
583
+ return result
584
+
585
+
586
+ class SimpleScrapingLocator(Locator):
587
+ """
588
+ A locator which scrapes HTML pages to locate downloads for a distribution.
589
+ This runs multiple threads to do the I/O; performance is at least as good
590
+ as pip's PackageFinder, which works in an analogous fashion.
591
+ """
592
+
593
+ # These are used to deal with various Content-Encoding schemes.
594
+ decoders = {
595
+ 'deflate': zlib.decompress,
596
+ 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(b)).read(),
597
+ 'none': lambda b: b,
598
+ }
599
+
600
+ def __init__(self, url, timeout=None, num_workers=10, **kwargs):
601
+ """
602
+ Initialise an instance.
603
+ :param url: The root URL to use for scraping.
604
+ :param timeout: The timeout, in seconds, to be applied to requests.
605
+ This defaults to ``None`` (no timeout specified).
606
+ :param num_workers: The number of worker threads you want to do I/O,
607
+ This defaults to 10.
608
+ :param kwargs: Passed to the superclass.
609
+ """
610
+ super(SimpleScrapingLocator, self).__init__(**kwargs)
611
+ self.base_url = ensure_slash(url)
612
+ self.timeout = timeout
613
+ self._page_cache = {}
614
+ self._seen = set()
615
+ self._to_fetch = queue.Queue()
616
+ self._bad_hosts = set()
617
+ self.skip_externals = False
618
+ self.num_workers = num_workers
619
+ self._lock = threading.RLock()
620
+ # See issue #45: we need to be resilient when the locator is used
621
+ # in a thread, e.g. with concurrent.futures. We can't use self._lock
622
+ # as it is for coordinating our internal threads - the ones created
623
+ # in _prepare_threads.
624
+ self._gplock = threading.RLock()
625
+ self.platform_check = False # See issue #112
626
+
627
+ def _prepare_threads(self):
628
+ """
629
+ Threads are created only when get_project is called, and terminate
630
+ before it returns. They are there primarily to parallelise I/O (i.e.
631
+ fetching web pages).
632
+ """
633
+ self._threads = []
634
+ for i in range(self.num_workers):
635
+ t = threading.Thread(target=self._fetch)
636
+ t.daemon = True
637
+ t.start()
638
+ self._threads.append(t)
639
+
640
+ def _wait_threads(self):
641
+ """
642
+ Tell all the threads to terminate (by sending a sentinel value) and
643
+ wait for them to do so.
644
+ """
645
+ # Note that you need two loops, since you can't say which
646
+ # thread will get each sentinel
647
+ for t in self._threads:
648
+ self._to_fetch.put(None) # sentinel
649
+ for t in self._threads:
650
+ t.join()
651
+ self._threads = []
652
+
653
+ def _get_project(self, name):
654
+ result = {'urls': {}, 'digests': {}}
655
+ with self._gplock:
656
+ self.result = result
657
+ self.project_name = name
658
+ url = urljoin(self.base_url, '%s/' % quote(name))
659
+ self._seen.clear()
660
+ self._page_cache.clear()
661
+ self._prepare_threads()
662
+ try:
663
+ logger.debug('Queueing %s', url)
664
+ self._to_fetch.put(url)
665
+ self._to_fetch.join()
666
+ finally:
667
+ self._wait_threads()
668
+ del self.result
669
+ return result
670
+
671
+ platform_dependent = re.compile(r'\b(linux_(i\d86|x86_64|arm\w+)|'
672
+ r'win(32|_amd64)|macosx_?\d+)\b', re.I)
673
+
674
+ def _is_platform_dependent(self, url):
675
+ """
676
+ Does an URL refer to a platform-specific download?
677
+ """
678
+ return self.platform_dependent.search(url)
679
+
680
+ def _process_download(self, url):
681
+ """
682
+ See if an URL is a suitable download for a project.
683
+
684
+ If it is, register information in the result dictionary (for
685
+ _get_project) about the specific version it's for.
686
+
687
+ Note that the return value isn't actually used other than as a boolean
688
+ value.
689
+ """
690
+ if self.platform_check and self._is_platform_dependent(url):
691
+ info = None
692
+ else:
693
+ info = self.convert_url_to_download_info(url, self.project_name)
694
+ logger.debug('process_download: %s -> %s', url, info)
695
+ if info:
696
+ with self._lock: # needed because self.result is shared
697
+ self._update_version_data(self.result, info)
698
+ return info
699
+
700
+ def _should_queue(self, link, referrer, rel):
701
+ """
702
+ Determine whether a link URL from a referring page and with a
703
+ particular "rel" attribute should be queued for scraping.
704
+ """
705
+ scheme, netloc, path, _, _, _ = urlparse(link)
706
+ if path.endswith(self.source_extensions + self.binary_extensions +
707
+ self.excluded_extensions):
708
+ result = False
709
+ elif self.skip_externals and not link.startswith(self.base_url):
710
+ result = False
711
+ elif not referrer.startswith(self.base_url):
712
+ result = False
713
+ elif rel not in ('homepage', 'download'):
714
+ result = False
715
+ elif scheme not in ('http', 'https', 'ftp'):
716
+ result = False
717
+ elif self._is_platform_dependent(link):
718
+ result = False
719
+ else:
720
+ host = netloc.split(':', 1)[0]
721
+ if host.lower() == 'localhost':
722
+ result = False
723
+ else:
724
+ result = True
725
+ logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
726
+ referrer, result)
727
+ return result
728
+
729
+ def _fetch(self):
730
+ """
731
+ Get a URL to fetch from the work queue, get the HTML page, examine its
732
+ links for download candidates and candidates for further scraping.
733
+
734
+ This is a handy method to run in a thread.
735
+ """
736
+ while True:
737
+ url = self._to_fetch.get()
738
+ try:
739
+ if url:
740
+ page = self.get_page(url)
741
+ if page is None: # e.g. after an error
742
+ continue
743
+ for link, rel in page.links:
744
+ if link not in self._seen:
745
+ try:
746
+ self._seen.add(link)
747
+ if (not self._process_download(link) and
748
+ self._should_queue(link, url, rel)):
749
+ logger.debug('Queueing %s from %s', link, url)
750
+ self._to_fetch.put(link)
751
+ except MetadataInvalidError: # e.g. invalid versions
752
+ pass
753
+ except Exception as e: # pragma: no cover
754
+ self.errors.put(text_type(e))
755
+ finally:
756
+ # always do this, to avoid hangs :-)
757
+ self._to_fetch.task_done()
758
+ if not url:
759
+ # logger.debug('Sentinel seen, quitting.')
760
+ break
761
+
762
+ def get_page(self, url):
763
+ """
764
+ Get the HTML for an URL, possibly from an in-memory cache.
765
+
766
+ XXX TODO Note: this cache is never actually cleared. It's assumed that
767
+ the data won't get stale over the lifetime of a locator instance (not
768
+ necessarily true for the default_locator).
769
+ """
770
+ # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
771
+ scheme, netloc, path, _, _, _ = urlparse(url)
772
+ if scheme == 'file' and os.path.isdir(url2pathname(path)):
773
+ url = urljoin(ensure_slash(url), 'index.html')
774
+
775
+ if url in self._page_cache:
776
+ result = self._page_cache[url]
777
+ logger.debug('Returning %s from cache: %s', url, result)
778
+ else:
779
+ host = netloc.split(':', 1)[0]
780
+ result = None
781
+ if host in self._bad_hosts:
782
+ logger.debug('Skipping %s due to bad host %s', url, host)
783
+ else:
784
+ req = Request(url, headers={'Accept-encoding': 'identity'})
785
+ try:
786
+ logger.debug('Fetching %s', url)
787
+ resp = self.opener.open(req, timeout=self.timeout)
788
+ logger.debug('Fetched %s', url)
789
+ headers = resp.info()
790
+ content_type = headers.get('Content-Type', '')
791
+ if HTML_CONTENT_TYPE.match(content_type):
792
+ final_url = resp.geturl()
793
+ data = resp.read()
794
+ encoding = headers.get('Content-Encoding')
795
+ if encoding:
796
+ decoder = self.decoders[encoding] # fail if not found
797
+ data = decoder(data)
798
+ encoding = 'utf-8'
799
+ m = CHARSET.search(content_type)
800
+ if m:
801
+ encoding = m.group(1)
802
+ try:
803
+ data = data.decode(encoding)
804
+ except UnicodeError: # pragma: no cover
805
+ data = data.decode('latin-1') # fallback
806
+ result = Page(data, final_url)
807
+ self._page_cache[final_url] = result
808
+ except HTTPError as e:
809
+ if e.code != 404:
810
+ logger.exception('Fetch failed: %s: %s', url, e)
811
+ except URLError as e: # pragma: no cover
812
+ logger.exception('Fetch failed: %s: %s', url, e)
813
+ with self._lock:
814
+ self._bad_hosts.add(host)
815
+ except Exception as e: # pragma: no cover
816
+ logger.exception('Fetch failed: %s: %s', url, e)
817
+ finally:
818
+ self._page_cache[url] = result # even if None (failure)
819
+ return result
820
+
821
+ _distname_re = re.compile('<a href=[^>]*>([^<]+)<')
822
+
823
+ def get_distribution_names(self):
824
+ """
825
+ Return all the distribution names known to this locator.
826
+ """
827
+ result = set()
828
+ page = self.get_page(self.base_url)
829
+ if not page:
830
+ raise DistlibException('Unable to get %s' % self.base_url)
831
+ for match in self._distname_re.finditer(page.data):
832
+ result.add(match.group(1))
833
+ return result
834
+
835
+
836
+ class DirectoryLocator(Locator):
837
+ """
838
+ This class locates distributions in a directory tree.
839
+ """
840
+
841
+ def __init__(self, path, **kwargs):
842
+ """
843
+ Initialise an instance.
844
+ :param path: The root of the directory tree to search.
845
+ :param kwargs: Passed to the superclass constructor,
846
+ except for:
847
+ * recursive - if True (the default), subdirectories are
848
+ recursed into. If False, only the top-level directory
849
+ is searched,
850
+ """
851
+ self.recursive = kwargs.pop('recursive', True)
852
+ super(DirectoryLocator, self).__init__(**kwargs)
853
+ path = os.path.abspath(path)
854
+ if not os.path.isdir(path): # pragma: no cover
855
+ raise DistlibException('Not a directory: %r' % path)
856
+ self.base_dir = path
857
+
858
+ def should_include(self, filename, parent):
859
+ """
860
+ Should a filename be considered as a candidate for a distribution
861
+ archive? As well as the filename, the directory which contains it
862
+ is provided, though not used by the current implementation.
863
+ """
864
+ return filename.endswith(self.downloadable_extensions)
865
+
866
+ def _get_project(self, name):
867
+ result = {'urls': {}, 'digests': {}}
868
+ for root, dirs, files in os.walk(self.base_dir):
869
+ for fn in files:
870
+ if self.should_include(fn, root):
871
+ fn = os.path.join(root, fn)
872
+ url = urlunparse(('file', '',
873
+ pathname2url(os.path.abspath(fn)),
874
+ '', '', ''))
875
+ info = self.convert_url_to_download_info(url, name)
876
+ if info:
877
+ self._update_version_data(result, info)
878
+ if not self.recursive:
879
+ break
880
+ return result
881
+
882
+ def get_distribution_names(self):
883
+ """
884
+ Return all the distribution names known to this locator.
885
+ """
886
+ result = set()
887
+ for root, dirs, files in os.walk(self.base_dir):
888
+ for fn in files:
889
+ if self.should_include(fn, root):
890
+ fn = os.path.join(root, fn)
891
+ url = urlunparse(('file', '',
892
+ pathname2url(os.path.abspath(fn)),
893
+ '', '', ''))
894
+ info = self.convert_url_to_download_info(url, None)
895
+ if info:
896
+ result.add(info['name'])
897
+ if not self.recursive:
898
+ break
899
+ return result
900
+
901
+
902
+ class JSONLocator(Locator):
903
+ """
904
+ This locator uses special extended metadata (not available on PyPI) and is
905
+ the basis of performant dependency resolution in distlib. Other locators
906
+ require archive downloads before dependencies can be determined! As you
907
+ might imagine, that can be slow.
908
+ """
909
+ def get_distribution_names(self):
910
+ """
911
+ Return all the distribution names known to this locator.
912
+ """
913
+ raise NotImplementedError('Not available from this locator')
914
+
915
+ def _get_project(self, name):
916
+ result = {'urls': {}, 'digests': {}}
917
+ data = get_project_data(name)
918
+ if data:
919
+ for info in data.get('files', []):
920
+ if info['ptype'] != 'sdist' or info['pyversion'] != 'source':
921
+ continue
922
+ # We don't store summary in project metadata as it makes
923
+ # the data bigger for no benefit during dependency
924
+ # resolution
925
+ dist = make_dist(data['name'], info['version'],
926
+ summary=data.get('summary',
927
+ 'Placeholder for summary'),
928
+ scheme=self.scheme)
929
+ md = dist.metadata
930
+ md.source_url = info['url']
931
+ # TODO SHA256 digest
932
+ if 'digest' in info and info['digest']:
933
+ dist.digest = ('md5', info['digest'])
934
+ md.dependencies = info.get('requirements', {})
935
+ dist.exports = info.get('exports', {})
936
+ result[dist.version] = dist
937
+ result['urls'].setdefault(dist.version, set()).add(info['url'])
938
+ return result
939
+
940
+
941
+ class DistPathLocator(Locator):
942
+ """
943
+ This locator finds installed distributions in a path. It can be useful for
944
+ adding to an :class:`AggregatingLocator`.
945
+ """
946
+ def __init__(self, distpath, **kwargs):
947
+ """
948
+ Initialise an instance.
949
+
950
+ :param distpath: A :class:`DistributionPath` instance to search.
951
+ """
952
+ super(DistPathLocator, self).__init__(**kwargs)
953
+ assert isinstance(distpath, DistributionPath)
954
+ self.distpath = distpath
955
+
956
+ def _get_project(self, name):
957
+ dist = self.distpath.get_distribution(name)
958
+ if dist is None:
959
+ result = {'urls': {}, 'digests': {}}
960
+ else:
961
+ result = {
962
+ dist.version: dist,
963
+ 'urls': {dist.version: set([dist.source_url])},
964
+ 'digests': {dist.version: set([None])}
965
+ }
966
+ return result
967
+
968
+
969
+ class AggregatingLocator(Locator):
970
+ """
971
+ This class allows you to chain and/or merge a list of locators.
972
+ """
973
+ def __init__(self, *locators, **kwargs):
974
+ """
975
+ Initialise an instance.
976
+
977
+ :param locators: The list of locators to search.
978
+ :param kwargs: Passed to the superclass constructor,
979
+ except for:
980
+ * merge - if False (the default), the first successful
981
+ search from any of the locators is returned. If True,
982
+ the results from all locators are merged (this can be
983
+ slow).
984
+ """
985
+ self.merge = kwargs.pop('merge', False)
986
+ self.locators = locators
987
+ super(AggregatingLocator, self).__init__(**kwargs)
988
+
989
+ def clear_cache(self):
990
+ super(AggregatingLocator, self).clear_cache()
991
+ for locator in self.locators:
992
+ locator.clear_cache()
993
+
994
+ def _set_scheme(self, value):
995
+ self._scheme = value
996
+ for locator in self.locators:
997
+ locator.scheme = value
998
+
999
+ scheme = property(Locator.scheme.fget, _set_scheme)
1000
+
1001
+ def _get_project(self, name):
1002
+ result = {}
1003
+ for locator in self.locators:
1004
+ d = locator.get_project(name)
1005
+ if d:
1006
+ if self.merge:
1007
+ files = result.get('urls', {})
1008
+ digests = result.get('digests', {})
1009
+ # next line could overwrite result['urls'], result['digests']
1010
+ result.update(d)
1011
+ df = result.get('urls')
1012
+ if files and df:
1013
+ for k, v in files.items():
1014
+ if k in df:
1015
+ df[k] |= v
1016
+ else:
1017
+ df[k] = v
1018
+ dd = result.get('digests')
1019
+ if digests and dd:
1020
+ dd.update(digests)
1021
+ else:
1022
+ # See issue #18. If any dists are found and we're looking
1023
+ # for specific constraints, we only return something if
1024
+ # a match is found. For example, if a DirectoryLocator
1025
+ # returns just foo (1.0) while we're looking for
1026
+ # foo (>= 2.0), we'll pretend there was nothing there so
1027
+ # that subsequent locators can be queried. Otherwise we
1028
+ # would just return foo (1.0) which would then lead to a
1029
+ # failure to find foo (>= 2.0), because other locators
1030
+ # weren't searched. Note that this only matters when
1031
+ # merge=False.
1032
+ if self.matcher is None:
1033
+ found = True
1034
+ else:
1035
+ found = False
1036
+ for k in d:
1037
+ if self.matcher.match(k):
1038
+ found = True
1039
+ break
1040
+ if found:
1041
+ result = d
1042
+ break
1043
+ return result
1044
+
1045
+ def get_distribution_names(self):
1046
+ """
1047
+ Return all the distribution names known to this locator.
1048
+ """
1049
+ result = set()
1050
+ for locator in self.locators:
1051
+ try:
1052
+ result |= locator.get_distribution_names()
1053
+ except NotImplementedError:
1054
+ pass
1055
+ return result
1056
+
1057
+
1058
+ # We use a legacy scheme simply because most of the dists on PyPI use legacy
1059
+ # versions which don't conform to PEP 440.
1060
+ default_locator = AggregatingLocator(
1061
+ # JSONLocator(), # don't use as PEP 426 is withdrawn
1062
+ SimpleScrapingLocator('https://pypi.org/simple/',
1063
+ timeout=3.0),
1064
+ scheme='legacy')
1065
+
1066
+ locate = default_locator.locate
1067
+
1068
+
1069
+ class DependencyFinder(object):
1070
+ """
1071
+ Locate dependencies for distributions.
1072
+ """
1073
+
1074
+ def __init__(self, locator=None):
1075
+ """
1076
+ Initialise an instance, using the specified locator
1077
+ to locate distributions.
1078
+ """
1079
+ self.locator = locator or default_locator
1080
+ self.scheme = get_scheme(self.locator.scheme)
1081
+
1082
+ def add_distribution(self, dist):
1083
+ """
1084
+ Add a distribution to the finder. This will update internal information
1085
+ about who provides what.
1086
+ :param dist: The distribution to add.
1087
+ """
1088
+ logger.debug('adding distribution %s', dist)
1089
+ name = dist.key
1090
+ self.dists_by_name[name] = dist
1091
+ self.dists[(name, dist.version)] = dist
1092
+ for p in dist.provides:
1093
+ name, version = parse_name_and_version(p)
1094
+ logger.debug('Add to provided: %s, %s, %s', name, version, dist)
1095
+ self.provided.setdefault(name, set()).add((version, dist))
1096
+
1097
+ def remove_distribution(self, dist):
1098
+ """
1099
+ Remove a distribution from the finder. This will update internal
1100
+ information about who provides what.
1101
+ :param dist: The distribution to remove.
1102
+ """
1103
+ logger.debug('removing distribution %s', dist)
1104
+ name = dist.key
1105
+ del self.dists_by_name[name]
1106
+ del self.dists[(name, dist.version)]
1107
+ for p in dist.provides:
1108
+ name, version = parse_name_and_version(p)
1109
+ logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
1110
+ s = self.provided[name]
1111
+ s.remove((version, dist))
1112
+ if not s:
1113
+ del self.provided[name]
1114
+
1115
+ def get_matcher(self, reqt):
1116
+ """
1117
+ Get a version matcher for a requirement.
1118
+ :param reqt: The requirement
1119
+ :type reqt: str
1120
+ :return: A version matcher (an instance of
1121
+ :class:`distlib.version.Matcher`).
1122
+ """
1123
+ try:
1124
+ matcher = self.scheme.matcher(reqt)
1125
+ except UnsupportedVersionError: # pragma: no cover
1126
+ # XXX compat-mode if cannot read the version
1127
+ name = reqt.split()[0]
1128
+ matcher = self.scheme.matcher(name)
1129
+ return matcher
1130
+
1131
+ def find_providers(self, reqt):
1132
+ """
1133
+ Find the distributions which can fulfill a requirement.
1134
+
1135
+ :param reqt: The requirement.
1136
+ :type reqt: str
1137
+ :return: A set of distribution which can fulfill the requirement.
1138
+ """
1139
+ matcher = self.get_matcher(reqt)
1140
+ name = matcher.key # case-insensitive
1141
+ result = set()
1142
+ provided = self.provided
1143
+ if name in provided:
1144
+ for version, provider in provided[name]:
1145
+ try:
1146
+ match = matcher.match(version)
1147
+ except UnsupportedVersionError:
1148
+ match = False
1149
+
1150
+ if match:
1151
+ result.add(provider)
1152
+ break
1153
+ return result
1154
+
1155
+ def try_to_replace(self, provider, other, problems):
1156
+ """
1157
+ Attempt to replace one provider with another. This is typically used
1158
+ when resolving dependencies from multiple sources, e.g. A requires
1159
+ (B >= 1.0) while C requires (B >= 1.1).
1160
+
1161
+ For successful replacement, ``provider`` must meet all the requirements
1162
+ which ``other`` fulfills.
1163
+
1164
+ :param provider: The provider we are trying to replace with.
1165
+ :param other: The provider we're trying to replace.
1166
+ :param problems: If False is returned, this will contain what
1167
+ problems prevented replacement. This is currently
1168
+ a tuple of the literal string 'cantreplace',
1169
+ ``provider``, ``other`` and the set of requirements
1170
+ that ``provider`` couldn't fulfill.
1171
+ :return: True if we can replace ``other`` with ``provider``, else
1172
+ False.
1173
+ """
1174
+ rlist = self.reqts[other]
1175
+ unmatched = set()
1176
+ for s in rlist:
1177
+ matcher = self.get_matcher(s)
1178
+ if not matcher.match(provider.version):
1179
+ unmatched.add(s)
1180
+ if unmatched:
1181
+ # can't replace other with provider
1182
+ problems.add(('cantreplace', provider, other,
1183
+ frozenset(unmatched)))
1184
+ result = False
1185
+ else:
1186
+ # can replace other with provider
1187
+ self.remove_distribution(other)
1188
+ del self.reqts[other]
1189
+ for s in rlist:
1190
+ self.reqts.setdefault(provider, set()).add(s)
1191
+ self.add_distribution(provider)
1192
+ result = True
1193
+ return result
1194
+
1195
+ def find(self, requirement, meta_extras=None, prereleases=False):
1196
+ """
1197
+ Find a distribution and all distributions it depends on.
1198
+
1199
+ :param requirement: The requirement specifying the distribution to
1200
+ find, or a Distribution instance.
1201
+ :param meta_extras: A list of meta extras such as :test:, :build: and
1202
+ so on.
1203
+ :param prereleases: If ``True``, allow pre-release versions to be
1204
+ returned - otherwise, don't return prereleases
1205
+ unless they're all that's available.
1206
+
1207
+ Return a set of :class:`Distribution` instances and a set of
1208
+ problems.
1209
+
1210
+ The distributions returned should be such that they have the
1211
+ :attr:`required` attribute set to ``True`` if they were
1212
+ from the ``requirement`` passed to ``find()``, and they have the
1213
+ :attr:`build_time_dependency` attribute set to ``True`` unless they
1214
+ are post-installation dependencies of the ``requirement``.
1215
+
1216
+ The problems should be a tuple consisting of the string
1217
+ ``'unsatisfied'`` and the requirement which couldn't be satisfied
1218
+ by any distribution known to the locator.
1219
+ """
1220
+
1221
+ self.provided = {}
1222
+ self.dists = {}
1223
+ self.dists_by_name = {}
1224
+ self.reqts = {}
1225
+
1226
+ meta_extras = set(meta_extras or [])
1227
+ if ':*:' in meta_extras:
1228
+ meta_extras.remove(':*:')
1229
+ # :meta: and :run: are implicitly included
1230
+ meta_extras |= set([':test:', ':build:', ':dev:'])
1231
+
1232
+ if isinstance(requirement, Distribution):
1233
+ dist = odist = requirement
1234
+ logger.debug('passed %s as requirement', odist)
1235
+ else:
1236
+ dist = odist = self.locator.locate(requirement,
1237
+ prereleases=prereleases)
1238
+ if dist is None:
1239
+ raise DistlibException('Unable to locate %r' % requirement)
1240
+ logger.debug('located %s', odist)
1241
+ dist.requested = True
1242
+ problems = set()
1243
+ todo = set([dist])
1244
+ install_dists = set([odist])
1245
+ while todo:
1246
+ dist = todo.pop()
1247
+ name = dist.key # case-insensitive
1248
+ if name not in self.dists_by_name:
1249
+ self.add_distribution(dist)
1250
+ else:
1251
+ # import pdb; pdb.set_trace()
1252
+ other = self.dists_by_name[name]
1253
+ if other != dist:
1254
+ self.try_to_replace(dist, other, problems)
1255
+
1256
+ ireqts = dist.run_requires | dist.meta_requires
1257
+ sreqts = dist.build_requires
1258
+ ereqts = set()
1259
+ if meta_extras and dist in install_dists:
1260
+ for key in ('test', 'build', 'dev'):
1261
+ e = ':%s:' % key
1262
+ if e in meta_extras:
1263
+ ereqts |= getattr(dist, '%s_requires' % key)
1264
+ all_reqts = ireqts | sreqts | ereqts
1265
+ for r in all_reqts:
1266
+ providers = self.find_providers(r)
1267
+ if not providers:
1268
+ logger.debug('No providers found for %r', r)
1269
+ provider = self.locator.locate(r, prereleases=prereleases)
1270
+ # If no provider is found and we didn't consider
1271
+ # prereleases, consider them now.
1272
+ if provider is None and not prereleases:
1273
+ provider = self.locator.locate(r, prereleases=True)
1274
+ if provider is None:
1275
+ logger.debug('Cannot satisfy %r', r)
1276
+ problems.add(('unsatisfied', r))
1277
+ else:
1278
+ n, v = provider.key, provider.version
1279
+ if (n, v) not in self.dists:
1280
+ todo.add(provider)
1281
+ providers.add(provider)
1282
+ if r in ireqts and dist in install_dists:
1283
+ install_dists.add(provider)
1284
+ logger.debug('Adding %s to install_dists',
1285
+ provider.name_and_version)
1286
+ for p in providers:
1287
+ name = p.key
1288
+ if name not in self.dists_by_name:
1289
+ self.reqts.setdefault(p, set()).add(r)
1290
+ else:
1291
+ other = self.dists_by_name[name]
1292
+ if other != p:
1293
+ # see if other can be replaced by p
1294
+ self.try_to_replace(p, other, problems)
1295
+
1296
+ dists = set(self.dists.values())
1297
+ for dist in dists:
1298
+ dist.build_time_dependency = dist not in install_dists
1299
+ if dist.build_time_dependency:
1300
+ logger.debug('%s is a build-time dependency only.',
1301
+ dist.name_and_version)
1302
+ logger.debug('find done for %s', odist)
1303
+ return dists, problems
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/t64-arm.exe ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebc4c06b7d95e74e315419ee7e88e1d0f71e9e9477538c00a93a9ff8c66a6cfc
3
+ size 182784
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/t64.exe ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81a618f21cb87db9076134e70388b6e9cb7c2106739011b6a51772d22cae06b7
3
+ size 108032
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/util.py ADDED
@@ -0,0 +1,2025 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # Copyright (C) 2012-2023 The Python Software Foundation.
3
+ # See LICENSE.txt and CONTRIBUTORS.txt.
4
+ #
5
+ import codecs
6
+ from collections import deque
7
+ import contextlib
8
+ import csv
9
+ from glob import iglob as std_iglob
10
+ import io
11
+ import json
12
+ import logging
13
+ import os
14
+ import py_compile
15
+ import re
16
+ import socket
17
+ try:
18
+ import ssl
19
+ except ImportError: # pragma: no cover
20
+ ssl = None
21
+ import subprocess
22
+ import sys
23
+ import tarfile
24
+ import tempfile
25
+ import textwrap
26
+
27
+ try:
28
+ import threading
29
+ except ImportError: # pragma: no cover
30
+ import dummy_threading as threading
31
+ import time
32
+
33
+ from . import DistlibException
34
+ from .compat import (string_types, text_type, shutil, raw_input, StringIO,
35
+ cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
36
+ HTTPHandler, BaseConfigurator, valid_ident,
37
+ Container, configparser, URLError, ZipFile, fsdecode,
38
+ unquote, urlparse)
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+ #
43
+ # Requirement parsing code as per PEP 508
44
+ #
45
+
46
+ IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
47
+ VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
48
+ COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
49
+ MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
50
+ OR = re.compile(r'^or\b\s*')
51
+ AND = re.compile(r'^and\b\s*')
52
+ NON_SPACE = re.compile(r'(\S+)\s*')
53
+ STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
54
+
55
+
56
+ def parse_marker(marker_string):
57
+ """
58
+ Parse a marker string and return a dictionary containing a marker expression.
59
+
60
+ The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
61
+ the expression grammar, or strings. A string contained in quotes is to be
62
+ interpreted as a literal string, and a string not contained in quotes is a
63
+ variable (such as os_name).
64
+ """
65
+
66
+ def marker_var(remaining):
67
+ # either identifier, or literal string
68
+ m = IDENTIFIER.match(remaining)
69
+ if m:
70
+ result = m.groups()[0]
71
+ remaining = remaining[m.end():]
72
+ elif not remaining:
73
+ raise SyntaxError('unexpected end of input')
74
+ else:
75
+ q = remaining[0]
76
+ if q not in '\'"':
77
+ raise SyntaxError('invalid expression: %s' % remaining)
78
+ oq = '\'"'.replace(q, '')
79
+ remaining = remaining[1:]
80
+ parts = [q]
81
+ while remaining:
82
+ # either a string chunk, or oq, or q to terminate
83
+ if remaining[0] == q:
84
+ break
85
+ elif remaining[0] == oq:
86
+ parts.append(oq)
87
+ remaining = remaining[1:]
88
+ else:
89
+ m = STRING_CHUNK.match(remaining)
90
+ if not m:
91
+ raise SyntaxError('error in string literal: %s' %
92
+ remaining)
93
+ parts.append(m.groups()[0])
94
+ remaining = remaining[m.end():]
95
+ else:
96
+ s = ''.join(parts)
97
+ raise SyntaxError('unterminated string: %s' % s)
98
+ parts.append(q)
99
+ result = ''.join(parts)
100
+ remaining = remaining[1:].lstrip() # skip past closing quote
101
+ return result, remaining
102
+
103
+ def marker_expr(remaining):
104
+ if remaining and remaining[0] == '(':
105
+ result, remaining = marker(remaining[1:].lstrip())
106
+ if remaining[0] != ')':
107
+ raise SyntaxError('unterminated parenthesis: %s' % remaining)
108
+ remaining = remaining[1:].lstrip()
109
+ else:
110
+ lhs, remaining = marker_var(remaining)
111
+ while remaining:
112
+ m = MARKER_OP.match(remaining)
113
+ if not m:
114
+ break
115
+ op = m.groups()[0]
116
+ remaining = remaining[m.end():]
117
+ rhs, remaining = marker_var(remaining)
118
+ lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
119
+ result = lhs
120
+ return result, remaining
121
+
122
+ def marker_and(remaining):
123
+ lhs, remaining = marker_expr(remaining)
124
+ while remaining:
125
+ m = AND.match(remaining)
126
+ if not m:
127
+ break
128
+ remaining = remaining[m.end():]
129
+ rhs, remaining = marker_expr(remaining)
130
+ lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
131
+ return lhs, remaining
132
+
133
+ def marker(remaining):
134
+ lhs, remaining = marker_and(remaining)
135
+ while remaining:
136
+ m = OR.match(remaining)
137
+ if not m:
138
+ break
139
+ remaining = remaining[m.end():]
140
+ rhs, remaining = marker_and(remaining)
141
+ lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
142
+ return lhs, remaining
143
+
144
+ return marker(marker_string)
145
+
146
+
147
+ def parse_requirement(req):
148
+ """
149
+ Parse a requirement passed in as a string. Return a Container
150
+ whose attributes contain the various parts of the requirement.
151
+ """
152
+ remaining = req.strip()
153
+ if not remaining or remaining.startswith('#'):
154
+ return None
155
+ m = IDENTIFIER.match(remaining)
156
+ if not m:
157
+ raise SyntaxError('name expected: %s' % remaining)
158
+ distname = m.groups()[0]
159
+ remaining = remaining[m.end():]
160
+ extras = mark_expr = versions = uri = None
161
+ if remaining and remaining[0] == '[':
162
+ i = remaining.find(']', 1)
163
+ if i < 0:
164
+ raise SyntaxError('unterminated extra: %s' % remaining)
165
+ s = remaining[1:i]
166
+ remaining = remaining[i + 1:].lstrip()
167
+ extras = []
168
+ while s:
169
+ m = IDENTIFIER.match(s)
170
+ if not m:
171
+ raise SyntaxError('malformed extra: %s' % s)
172
+ extras.append(m.groups()[0])
173
+ s = s[m.end():]
174
+ if not s:
175
+ break
176
+ if s[0] != ',':
177
+ raise SyntaxError('comma expected in extras: %s' % s)
178
+ s = s[1:].lstrip()
179
+ if not extras:
180
+ extras = None
181
+ if remaining:
182
+ if remaining[0] == '@':
183
+ # it's a URI
184
+ remaining = remaining[1:].lstrip()
185
+ m = NON_SPACE.match(remaining)
186
+ if not m:
187
+ raise SyntaxError('invalid URI: %s' % remaining)
188
+ uri = m.groups()[0]
189
+ t = urlparse(uri)
190
+ # there are issues with Python and URL parsing, so this test
191
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
192
+ # always parse invalid URLs correctly - it should raise
193
+ # exceptions for malformed URLs
194
+ if not (t.scheme and t.netloc):
195
+ raise SyntaxError('Invalid URL: %s' % uri)
196
+ remaining = remaining[m.end():].lstrip()
197
+ else:
198
+
199
+ def get_versions(ver_remaining):
200
+ """
201
+ Return a list of operator, version tuples if any are
202
+ specified, else None.
203
+ """
204
+ m = COMPARE_OP.match(ver_remaining)
205
+ versions = None
206
+ if m:
207
+ versions = []
208
+ while True:
209
+ op = m.groups()[0]
210
+ ver_remaining = ver_remaining[m.end():]
211
+ m = VERSION_IDENTIFIER.match(ver_remaining)
212
+ if not m:
213
+ raise SyntaxError('invalid version: %s' %
214
+ ver_remaining)
215
+ v = m.groups()[0]
216
+ versions.append((op, v))
217
+ ver_remaining = ver_remaining[m.end():]
218
+ if not ver_remaining or ver_remaining[0] != ',':
219
+ break
220
+ ver_remaining = ver_remaining[1:].lstrip()
221
+ # Some packages have a trailing comma which would break things
222
+ # See issue #148
223
+ if not ver_remaining:
224
+ break
225
+ m = COMPARE_OP.match(ver_remaining)
226
+ if not m:
227
+ raise SyntaxError('invalid constraint: %s' %
228
+ ver_remaining)
229
+ if not versions:
230
+ versions = None
231
+ return versions, ver_remaining
232
+
233
+ if remaining[0] != '(':
234
+ versions, remaining = get_versions(remaining)
235
+ else:
236
+ i = remaining.find(')', 1)
237
+ if i < 0:
238
+ raise SyntaxError('unterminated parenthesis: %s' %
239
+ remaining)
240
+ s = remaining[1:i]
241
+ remaining = remaining[i + 1:].lstrip()
242
+ # As a special diversion from PEP 508, allow a version number
243
+ # a.b.c in parentheses as a synonym for ~= a.b.c (because this
244
+ # is allowed in earlier PEPs)
245
+ if COMPARE_OP.match(s):
246
+ versions, _ = get_versions(s)
247
+ else:
248
+ m = VERSION_IDENTIFIER.match(s)
249
+ if not m:
250
+ raise SyntaxError('invalid constraint: %s' % s)
251
+ v = m.groups()[0]
252
+ s = s[m.end():].lstrip()
253
+ if s:
254
+ raise SyntaxError('invalid constraint: %s' % s)
255
+ versions = [('~=', v)]
256
+
257
+ if remaining:
258
+ if remaining[0] != ';':
259
+ raise SyntaxError('invalid requirement: %s' % remaining)
260
+ remaining = remaining[1:].lstrip()
261
+
262
+ mark_expr, remaining = parse_marker(remaining)
263
+
264
+ if remaining and remaining[0] != '#':
265
+ raise SyntaxError('unexpected trailing data: %s' % remaining)
266
+
267
+ if not versions:
268
+ rs = distname
269
+ else:
270
+ rs = '%s %s' % (distname, ', '.join(
271
+ ['%s %s' % con for con in versions]))
272
+ return Container(name=distname,
273
+ extras=extras,
274
+ constraints=versions,
275
+ marker=mark_expr,
276
+ url=uri,
277
+ requirement=rs)
278
+
279
+
280
+ def get_resources_dests(resources_root, rules):
281
+ """Find destinations for resources files"""
282
+
283
+ def get_rel_path(root, path):
284
+ # normalizes and returns a lstripped-/-separated path
285
+ root = root.replace(os.path.sep, '/')
286
+ path = path.replace(os.path.sep, '/')
287
+ assert path.startswith(root)
288
+ return path[len(root):].lstrip('/')
289
+
290
+ destinations = {}
291
+ for base, suffix, dest in rules:
292
+ prefix = os.path.join(resources_root, base)
293
+ for abs_base in iglob(prefix):
294
+ abs_glob = os.path.join(abs_base, suffix)
295
+ for abs_path in iglob(abs_glob):
296
+ resource_file = get_rel_path(resources_root, abs_path)
297
+ if dest is None: # remove the entry if it was here
298
+ destinations.pop(resource_file, None)
299
+ else:
300
+ rel_path = get_rel_path(abs_base, abs_path)
301
+ rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
302
+ destinations[resource_file] = rel_dest + '/' + rel_path
303
+ return destinations
304
+
305
+
306
+ def in_venv():
307
+ if hasattr(sys, 'real_prefix'):
308
+ # virtualenv venvs
309
+ result = True
310
+ else:
311
+ # PEP 405 venvs
312
+ result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
313
+ return result
314
+
315
+
316
+ def get_executable():
317
+ # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
318
+ # changes to the stub launcher mean that sys.executable always points
319
+ # to the stub on OS X
320
+ # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
321
+ # in os.environ):
322
+ # result = os.environ['__PYVENV_LAUNCHER__']
323
+ # else:
324
+ # result = sys.executable
325
+ # return result
326
+ # Avoid normcasing: see issue #143
327
+ # result = os.path.normcase(sys.executable)
328
+ result = sys.executable
329
+ if not isinstance(result, text_type):
330
+ result = fsdecode(result)
331
+ return result
332
+
333
+
334
+ def proceed(prompt, allowed_chars, error_prompt=None, default=None):
335
+ p = prompt
336
+ while True:
337
+ s = raw_input(p)
338
+ p = prompt
339
+ if not s and default:
340
+ s = default
341
+ if s:
342
+ c = s[0].lower()
343
+ if c in allowed_chars:
344
+ break
345
+ if error_prompt:
346
+ p = '%c: %s\n%s' % (c, error_prompt, prompt)
347
+ return c
348
+
349
+
350
+ def extract_by_key(d, keys):
351
+ if isinstance(keys, string_types):
352
+ keys = keys.split()
353
+ result = {}
354
+ for key in keys:
355
+ if key in d:
356
+ result[key] = d[key]
357
+ return result
358
+
359
+
360
+ def read_exports(stream):
361
+ if sys.version_info[0] >= 3:
362
+ # needs to be a text stream
363
+ stream = codecs.getreader('utf-8')(stream)
364
+ # Try to load as JSON, falling back on legacy format
365
+ data = stream.read()
366
+ stream = StringIO(data)
367
+ try:
368
+ jdata = json.load(stream)
369
+ result = jdata['extensions']['python.exports']['exports']
370
+ for group, entries in result.items():
371
+ for k, v in entries.items():
372
+ s = '%s = %s' % (k, v)
373
+ entry = get_export_entry(s)
374
+ assert entry is not None
375
+ entries[k] = entry
376
+ return result
377
+ except Exception:
378
+ stream.seek(0, 0)
379
+
380
+ def read_stream(cp, stream):
381
+ if hasattr(cp, 'read_file'):
382
+ cp.read_file(stream)
383
+ else:
384
+ cp.readfp(stream)
385
+
386
+ cp = configparser.ConfigParser()
387
+ try:
388
+ read_stream(cp, stream)
389
+ except configparser.MissingSectionHeaderError:
390
+ stream.close()
391
+ data = textwrap.dedent(data)
392
+ stream = StringIO(data)
393
+ read_stream(cp, stream)
394
+
395
+ result = {}
396
+ for key in cp.sections():
397
+ result[key] = entries = {}
398
+ for name, value in cp.items(key):
399
+ s = '%s = %s' % (name, value)
400
+ entry = get_export_entry(s)
401
+ assert entry is not None
402
+ # entry.dist = self
403
+ entries[name] = entry
404
+ return result
405
+
406
+
407
+ def write_exports(exports, stream):
408
+ if sys.version_info[0] >= 3:
409
+ # needs to be a text stream
410
+ stream = codecs.getwriter('utf-8')(stream)
411
+ cp = configparser.ConfigParser()
412
+ for k, v in exports.items():
413
+ # TODO check k, v for valid values
414
+ cp.add_section(k)
415
+ for entry in v.values():
416
+ if entry.suffix is None:
417
+ s = entry.prefix
418
+ else:
419
+ s = '%s:%s' % (entry.prefix, entry.suffix)
420
+ if entry.flags:
421
+ s = '%s [%s]' % (s, ', '.join(entry.flags))
422
+ cp.set(k, entry.name, s)
423
+ cp.write(stream)
424
+
425
+
426
+ @contextlib.contextmanager
427
+ def tempdir():
428
+ td = tempfile.mkdtemp()
429
+ try:
430
+ yield td
431
+ finally:
432
+ shutil.rmtree(td)
433
+
434
+
435
+ @contextlib.contextmanager
436
+ def chdir(d):
437
+ cwd = os.getcwd()
438
+ try:
439
+ os.chdir(d)
440
+ yield
441
+ finally:
442
+ os.chdir(cwd)
443
+
444
+
445
+ @contextlib.contextmanager
446
+ def socket_timeout(seconds=15):
447
+ cto = socket.getdefaulttimeout()
448
+ try:
449
+ socket.setdefaulttimeout(seconds)
450
+ yield
451
+ finally:
452
+ socket.setdefaulttimeout(cto)
453
+
454
+
455
+ class cached_property(object):
456
+
457
+ def __init__(self, func):
458
+ self.func = func
459
+ # for attr in ('__name__', '__module__', '__doc__'):
460
+ # setattr(self, attr, getattr(func, attr, None))
461
+
462
+ def __get__(self, obj, cls=None):
463
+ if obj is None:
464
+ return self
465
+ value = self.func(obj)
466
+ object.__setattr__(obj, self.func.__name__, value)
467
+ # obj.__dict__[self.func.__name__] = value = self.func(obj)
468
+ return value
469
+
470
+
471
+ def convert_path(pathname):
472
+ """Return 'pathname' as a name that will work on the native filesystem.
473
+
474
+ The path is split on '/' and put back together again using the current
475
+ directory separator. Needed because filenames in the setup script are
476
+ always supplied in Unix style, and have to be converted to the local
477
+ convention before we can actually use them in the filesystem. Raises
478
+ ValueError on non-Unix-ish systems if 'pathname' either starts or
479
+ ends with a slash.
480
+ """
481
+ if os.sep == '/':
482
+ return pathname
483
+ if not pathname:
484
+ return pathname
485
+ if pathname[0] == '/':
486
+ raise ValueError("path '%s' cannot be absolute" % pathname)
487
+ if pathname[-1] == '/':
488
+ raise ValueError("path '%s' cannot end with '/'" % pathname)
489
+
490
+ paths = pathname.split('/')
491
+ while os.curdir in paths:
492
+ paths.remove(os.curdir)
493
+ if not paths:
494
+ return os.curdir
495
+ return os.path.join(*paths)
496
+
497
+
498
+ class FileOperator(object):
499
+
500
+ def __init__(self, dry_run=False):
501
+ self.dry_run = dry_run
502
+ self.ensured = set()
503
+ self._init_record()
504
+
505
+ def _init_record(self):
506
+ self.record = False
507
+ self.files_written = set()
508
+ self.dirs_created = set()
509
+
510
+ def record_as_written(self, path):
511
+ if self.record:
512
+ self.files_written.add(path)
513
+
514
+ def newer(self, source, target):
515
+ """Tell if the target is newer than the source.
516
+
517
+ Returns true if 'source' exists and is more recently modified than
518
+ 'target', or if 'source' exists and 'target' doesn't.
519
+
520
+ Returns false if both exist and 'target' is the same age or younger
521
+ than 'source'. Raise PackagingFileError if 'source' does not exist.
522
+
523
+ Note that this test is not very accurate: files created in the same
524
+ second will have the same "age".
525
+ """
526
+ if not os.path.exists(source):
527
+ raise DistlibException("file '%r' does not exist" %
528
+ os.path.abspath(source))
529
+ if not os.path.exists(target):
530
+ return True
531
+
532
+ return os.stat(source).st_mtime > os.stat(target).st_mtime
533
+
534
+ def copy_file(self, infile, outfile, check=True):
535
+ """Copy a file respecting dry-run and force flags.
536
+ """
537
+ self.ensure_dir(os.path.dirname(outfile))
538
+ logger.info('Copying %s to %s', infile, outfile)
539
+ if not self.dry_run:
540
+ msg = None
541
+ if check:
542
+ if os.path.islink(outfile):
543
+ msg = '%s is a symlink' % outfile
544
+ elif os.path.exists(outfile) and not os.path.isfile(outfile):
545
+ msg = '%s is a non-regular file' % outfile
546
+ if msg:
547
+ raise ValueError(msg + ' which would be overwritten')
548
+ shutil.copyfile(infile, outfile)
549
+ self.record_as_written(outfile)
550
+
551
+ def copy_stream(self, instream, outfile, encoding=None):
552
+ assert not os.path.isdir(outfile)
553
+ self.ensure_dir(os.path.dirname(outfile))
554
+ logger.info('Copying stream %s to %s', instream, outfile)
555
+ if not self.dry_run:
556
+ if encoding is None:
557
+ outstream = open(outfile, 'wb')
558
+ else:
559
+ outstream = codecs.open(outfile, 'w', encoding=encoding)
560
+ try:
561
+ shutil.copyfileobj(instream, outstream)
562
+ finally:
563
+ outstream.close()
564
+ self.record_as_written(outfile)
565
+
566
+ def write_binary_file(self, path, data):
567
+ self.ensure_dir(os.path.dirname(path))
568
+ if not self.dry_run:
569
+ if os.path.exists(path):
570
+ os.remove(path)
571
+ with open(path, 'wb') as f:
572
+ f.write(data)
573
+ self.record_as_written(path)
574
+
575
+ def write_text_file(self, path, data, encoding):
576
+ self.write_binary_file(path, data.encode(encoding))
577
+
578
+ def set_mode(self, bits, mask, files):
579
+ if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
580
+ # Set the executable bits (owner, group, and world) on
581
+ # all the files specified.
582
+ for f in files:
583
+ if self.dry_run:
584
+ logger.info("changing mode of %s", f)
585
+ else:
586
+ mode = (os.stat(f).st_mode | bits) & mask
587
+ logger.info("changing mode of %s to %o", f, mode)
588
+ os.chmod(f, mode)
589
+
590
+ set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
591
+
592
+ def ensure_dir(self, path):
593
+ path = os.path.abspath(path)
594
+ if path not in self.ensured and not os.path.exists(path):
595
+ self.ensured.add(path)
596
+ d, f = os.path.split(path)
597
+ self.ensure_dir(d)
598
+ logger.info('Creating %s' % path)
599
+ if not self.dry_run:
600
+ os.mkdir(path)
601
+ if self.record:
602
+ self.dirs_created.add(path)
603
+
604
+ def byte_compile(self,
605
+ path,
606
+ optimize=False,
607
+ force=False,
608
+ prefix=None,
609
+ hashed_invalidation=False):
610
+ dpath = cache_from_source(path, not optimize)
611
+ logger.info('Byte-compiling %s to %s', path, dpath)
612
+ if not self.dry_run:
613
+ if force or self.newer(path, dpath):
614
+ if not prefix:
615
+ diagpath = None
616
+ else:
617
+ assert path.startswith(prefix)
618
+ diagpath = path[len(prefix):]
619
+ compile_kwargs = {}
620
+ if hashed_invalidation and hasattr(py_compile,
621
+ 'PycInvalidationMode'):
622
+ compile_kwargs[
623
+ 'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
624
+ py_compile.compile(path, dpath, diagpath, True,
625
+ **compile_kwargs) # raise error
626
+ self.record_as_written(dpath)
627
+ return dpath
628
+
629
+ def ensure_removed(self, path):
630
+ if os.path.exists(path):
631
+ if os.path.isdir(path) and not os.path.islink(path):
632
+ logger.debug('Removing directory tree at %s', path)
633
+ if not self.dry_run:
634
+ shutil.rmtree(path)
635
+ if self.record:
636
+ if path in self.dirs_created:
637
+ self.dirs_created.remove(path)
638
+ else:
639
+ if os.path.islink(path):
640
+ s = 'link'
641
+ else:
642
+ s = 'file'
643
+ logger.debug('Removing %s %s', s, path)
644
+ if not self.dry_run:
645
+ os.remove(path)
646
+ if self.record:
647
+ if path in self.files_written:
648
+ self.files_written.remove(path)
649
+
650
+ def is_writable(self, path):
651
+ result = False
652
+ while not result:
653
+ if os.path.exists(path):
654
+ result = os.access(path, os.W_OK)
655
+ break
656
+ parent = os.path.dirname(path)
657
+ if parent == path:
658
+ break
659
+ path = parent
660
+ return result
661
+
662
+ def commit(self):
663
+ """
664
+ Commit recorded changes, turn off recording, return
665
+ changes.
666
+ """
667
+ assert self.record
668
+ result = self.files_written, self.dirs_created
669
+ self._init_record()
670
+ return result
671
+
672
+ def rollback(self):
673
+ if not self.dry_run:
674
+ for f in list(self.files_written):
675
+ if os.path.exists(f):
676
+ os.remove(f)
677
+ # dirs should all be empty now, except perhaps for
678
+ # __pycache__ subdirs
679
+ # reverse so that subdirs appear before their parents
680
+ dirs = sorted(self.dirs_created, reverse=True)
681
+ for d in dirs:
682
+ flist = os.listdir(d)
683
+ if flist:
684
+ assert flist == ['__pycache__']
685
+ sd = os.path.join(d, flist[0])
686
+ os.rmdir(sd)
687
+ os.rmdir(d) # should fail if non-empty
688
+ self._init_record()
689
+
690
+
691
+ def resolve(module_name, dotted_path):
692
+ if module_name in sys.modules:
693
+ mod = sys.modules[module_name]
694
+ else:
695
+ mod = __import__(module_name)
696
+ if dotted_path is None:
697
+ result = mod
698
+ else:
699
+ parts = dotted_path.split('.')
700
+ result = getattr(mod, parts.pop(0))
701
+ for p in parts:
702
+ result = getattr(result, p)
703
+ return result
704
+
705
+
706
+ class ExportEntry(object):
707
+
708
+ def __init__(self, name, prefix, suffix, flags):
709
+ self.name = name
710
+ self.prefix = prefix
711
+ self.suffix = suffix
712
+ self.flags = flags
713
+
714
+ @cached_property
715
+ def value(self):
716
+ return resolve(self.prefix, self.suffix)
717
+
718
+ def __repr__(self): # pragma: no cover
719
+ return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
720
+ self.suffix, self.flags)
721
+
722
+ def __eq__(self, other):
723
+ if not isinstance(other, ExportEntry):
724
+ result = False
725
+ else:
726
+ result = (self.name == other.name and self.prefix == other.prefix
727
+ and self.suffix == other.suffix
728
+ and self.flags == other.flags)
729
+ return result
730
+
731
+ __hash__ = object.__hash__
732
+
733
+
734
+ ENTRY_RE = re.compile(
735
+ r'''(?P<name>([^\[]\S*))
736
+ \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
737
+ \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
738
+ ''', re.VERBOSE)
739
+
740
+
741
+ def get_export_entry(specification):
742
+ m = ENTRY_RE.search(specification)
743
+ if not m:
744
+ result = None
745
+ if '[' in specification or ']' in specification:
746
+ raise DistlibException("Invalid specification "
747
+ "'%s'" % specification)
748
+ else:
749
+ d = m.groupdict()
750
+ name = d['name']
751
+ path = d['callable']
752
+ colons = path.count(':')
753
+ if colons == 0:
754
+ prefix, suffix = path, None
755
+ else:
756
+ if colons != 1:
757
+ raise DistlibException("Invalid specification "
758
+ "'%s'" % specification)
759
+ prefix, suffix = path.split(':')
760
+ flags = d['flags']
761
+ if flags is None:
762
+ if '[' in specification or ']' in specification:
763
+ raise DistlibException("Invalid specification "
764
+ "'%s'" % specification)
765
+ flags = []
766
+ else:
767
+ flags = [f.strip() for f in flags.split(',')]
768
+ result = ExportEntry(name, prefix, suffix, flags)
769
+ return result
770
+
771
+
772
+ def get_cache_base(suffix=None):
773
+ """
774
+ Return the default base location for distlib caches. If the directory does
775
+ not exist, it is created. Use the suffix provided for the base directory,
776
+ and default to '.distlib' if it isn't provided.
777
+
778
+ On Windows, if LOCALAPPDATA is defined in the environment, then it is
779
+ assumed to be a directory, and will be the parent directory of the result.
780
+ On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
781
+ directory - using os.expanduser('~') - will be the parent directory of
782
+ the result.
783
+
784
+ The result is just the directory '.distlib' in the parent directory as
785
+ determined above, or with the name specified with ``suffix``.
786
+ """
787
+ if suffix is None:
788
+ suffix = '.distlib'
789
+ if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
790
+ result = os.path.expandvars('$localappdata')
791
+ else:
792
+ # Assume posix, or old Windows
793
+ result = os.path.expanduser('~')
794
+ # we use 'isdir' instead of 'exists', because we want to
795
+ # fail if there's a file with that name
796
+ if os.path.isdir(result):
797
+ usable = os.access(result, os.W_OK)
798
+ if not usable:
799
+ logger.warning('Directory exists but is not writable: %s', result)
800
+ else:
801
+ try:
802
+ os.makedirs(result)
803
+ usable = True
804
+ except OSError:
805
+ logger.warning('Unable to create %s', result, exc_info=True)
806
+ usable = False
807
+ if not usable:
808
+ result = tempfile.mkdtemp()
809
+ logger.warning('Default location unusable, using %s', result)
810
+ return os.path.join(result, suffix)
811
+
812
+
813
+ def path_to_cache_dir(path):
814
+ """
815
+ Convert an absolute path to a directory name for use in a cache.
816
+
817
+ The algorithm used is:
818
+
819
+ #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
820
+ #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
821
+ #. ``'.cache'`` is appended.
822
+ """
823
+ d, p = os.path.splitdrive(os.path.abspath(path))
824
+ if d:
825
+ d = d.replace(':', '---')
826
+ p = p.replace(os.sep, '--')
827
+ return d + p + '.cache'
828
+
829
+
830
+ def ensure_slash(s):
831
+ if not s.endswith('/'):
832
+ return s + '/'
833
+ return s
834
+
835
+
836
+ def parse_credentials(netloc):
837
+ username = password = None
838
+ if '@' in netloc:
839
+ prefix, netloc = netloc.rsplit('@', 1)
840
+ if ':' not in prefix:
841
+ username = prefix
842
+ else:
843
+ username, password = prefix.split(':', 1)
844
+ if username:
845
+ username = unquote(username)
846
+ if password:
847
+ password = unquote(password)
848
+ return username, password, netloc
849
+
850
+
851
+ def get_process_umask():
852
+ result = os.umask(0o22)
853
+ os.umask(result)
854
+ return result
855
+
856
+
857
+ def is_string_sequence(seq):
858
+ result = True
859
+ i = None
860
+ for i, s in enumerate(seq):
861
+ if not isinstance(s, string_types):
862
+ result = False
863
+ break
864
+ assert i is not None
865
+ return result
866
+
867
+
868
+ PROJECT_NAME_AND_VERSION = re.compile(
869
+ '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
870
+ '([a-z0-9_.+-]+)', re.I)
871
+ PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
872
+
873
+
874
+ def split_filename(filename, project_name=None):
875
+ """
876
+ Extract name, version, python version from a filename (no extension)
877
+
878
+ Return name, version, pyver or None
879
+ """
880
+ result = None
881
+ pyver = None
882
+ filename = unquote(filename).replace(' ', '-')
883
+ m = PYTHON_VERSION.search(filename)
884
+ if m:
885
+ pyver = m.group(1)
886
+ filename = filename[:m.start()]
887
+ if project_name and len(filename) > len(project_name) + 1:
888
+ m = re.match(re.escape(project_name) + r'\b', filename)
889
+ if m:
890
+ n = m.end()
891
+ result = filename[:n], filename[n + 1:], pyver
892
+ if result is None:
893
+ m = PROJECT_NAME_AND_VERSION.match(filename)
894
+ if m:
895
+ result = m.group(1), m.group(3), pyver
896
+ return result
897
+
898
+
899
+ # Allow spaces in name because of legacy dists like "Twisted Core"
900
+ NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
901
+ r'\(\s*(?P<ver>[^\s)]+)\)$')
902
+
903
+
904
+ def parse_name_and_version(p):
905
+ """
906
+ A utility method used to get name and version from a string.
907
+
908
+ From e.g. a Provides-Dist value.
909
+
910
+ :param p: A value in a form 'foo (1.0)'
911
+ :return: The name and version as a tuple.
912
+ """
913
+ m = NAME_VERSION_RE.match(p)
914
+ if not m:
915
+ raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
916
+ d = m.groupdict()
917
+ return d['name'].strip().lower(), d['ver']
918
+
919
+
920
+ def get_extras(requested, available):
921
+ result = set()
922
+ requested = set(requested or [])
923
+ available = set(available or [])
924
+ if '*' in requested:
925
+ requested.remove('*')
926
+ result |= available
927
+ for r in requested:
928
+ if r == '-':
929
+ result.add(r)
930
+ elif r.startswith('-'):
931
+ unwanted = r[1:]
932
+ if unwanted not in available:
933
+ logger.warning('undeclared extra: %s' % unwanted)
934
+ if unwanted in result:
935
+ result.remove(unwanted)
936
+ else:
937
+ if r not in available:
938
+ logger.warning('undeclared extra: %s' % r)
939
+ result.add(r)
940
+ return result
941
+
942
+
943
+ #
944
+ # Extended metadata functionality
945
+ #
946
+
947
+
948
+ def _get_external_data(url):
949
+ result = {}
950
+ try:
951
+ # urlopen might fail if it runs into redirections,
952
+ # because of Python issue #13696. Fixed in locators
953
+ # using a custom redirect handler.
954
+ resp = urlopen(url)
955
+ headers = resp.info()
956
+ ct = headers.get('Content-Type')
957
+ if not ct.startswith('application/json'):
958
+ logger.debug('Unexpected response for JSON request: %s', ct)
959
+ else:
960
+ reader = codecs.getreader('utf-8')(resp)
961
+ # data = reader.read().decode('utf-8')
962
+ # result = json.loads(data)
963
+ result = json.load(reader)
964
+ except Exception as e:
965
+ logger.exception('Failed to get external data for %s: %s', url, e)
966
+ return result
967
+
968
+
969
+ _external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
970
+
971
+
972
+ def get_project_data(name):
973
+ url = '%s/%s/project.json' % (name[0].upper(), name)
974
+ url = urljoin(_external_data_base_url, url)
975
+ result = _get_external_data(url)
976
+ return result
977
+
978
+
979
+ def get_package_data(name, version):
980
+ url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
981
+ url = urljoin(_external_data_base_url, url)
982
+ return _get_external_data(url)
983
+
984
+
985
+ class Cache(object):
986
+ """
987
+ A class implementing a cache for resources that need to live in the file system
988
+ e.g. shared libraries. This class was moved from resources to here because it
989
+ could be used by other modules, e.g. the wheel module.
990
+ """
991
+
992
+ def __init__(self, base):
993
+ """
994
+ Initialise an instance.
995
+
996
+ :param base: The base directory where the cache should be located.
997
+ """
998
+ # we use 'isdir' instead of 'exists', because we want to
999
+ # fail if there's a file with that name
1000
+ if not os.path.isdir(base): # pragma: no cover
1001
+ os.makedirs(base)
1002
+ if (os.stat(base).st_mode & 0o77) != 0:
1003
+ logger.warning('Directory \'%s\' is not private', base)
1004
+ self.base = os.path.abspath(os.path.normpath(base))
1005
+
1006
+ def prefix_to_dir(self, prefix):
1007
+ """
1008
+ Converts a resource prefix to a directory name in the cache.
1009
+ """
1010
+ return path_to_cache_dir(prefix)
1011
+
1012
+ def clear(self):
1013
+ """
1014
+ Clear the cache.
1015
+ """
1016
+ not_removed = []
1017
+ for fn in os.listdir(self.base):
1018
+ fn = os.path.join(self.base, fn)
1019
+ try:
1020
+ if os.path.islink(fn) or os.path.isfile(fn):
1021
+ os.remove(fn)
1022
+ elif os.path.isdir(fn):
1023
+ shutil.rmtree(fn)
1024
+ except Exception:
1025
+ not_removed.append(fn)
1026
+ return not_removed
1027
+
1028
+
1029
+ class EventMixin(object):
1030
+ """
1031
+ A very simple publish/subscribe system.
1032
+ """
1033
+
1034
+ def __init__(self):
1035
+ self._subscribers = {}
1036
+
1037
+ def add(self, event, subscriber, append=True):
1038
+ """
1039
+ Add a subscriber for an event.
1040
+
1041
+ :param event: The name of an event.
1042
+ :param subscriber: The subscriber to be added (and called when the
1043
+ event is published).
1044
+ :param append: Whether to append or prepend the subscriber to an
1045
+ existing subscriber list for the event.
1046
+ """
1047
+ subs = self._subscribers
1048
+ if event not in subs:
1049
+ subs[event] = deque([subscriber])
1050
+ else:
1051
+ sq = subs[event]
1052
+ if append:
1053
+ sq.append(subscriber)
1054
+ else:
1055
+ sq.appendleft(subscriber)
1056
+
1057
+ def remove(self, event, subscriber):
1058
+ """
1059
+ Remove a subscriber for an event.
1060
+
1061
+ :param event: The name of an event.
1062
+ :param subscriber: The subscriber to be removed.
1063
+ """
1064
+ subs = self._subscribers
1065
+ if event not in subs:
1066
+ raise ValueError('No subscribers: %r' % event)
1067
+ subs[event].remove(subscriber)
1068
+
1069
+ def get_subscribers(self, event):
1070
+ """
1071
+ Return an iterator for the subscribers for an event.
1072
+ :param event: The event to return subscribers for.
1073
+ """
1074
+ return iter(self._subscribers.get(event, ()))
1075
+
1076
+ def publish(self, event, *args, **kwargs):
1077
+ """
1078
+ Publish a event and return a list of values returned by its
1079
+ subscribers.
1080
+
1081
+ :param event: The event to publish.
1082
+ :param args: The positional arguments to pass to the event's
1083
+ subscribers.
1084
+ :param kwargs: The keyword arguments to pass to the event's
1085
+ subscribers.
1086
+ """
1087
+ result = []
1088
+ for subscriber in self.get_subscribers(event):
1089
+ try:
1090
+ value = subscriber(event, *args, **kwargs)
1091
+ except Exception:
1092
+ logger.exception('Exception during event publication')
1093
+ value = None
1094
+ result.append(value)
1095
+ logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event,
1096
+ args, kwargs, result)
1097
+ return result
1098
+
1099
+
1100
+ #
1101
+ # Simple sequencing
1102
+ #
1103
+ class Sequencer(object):
1104
+
1105
+ def __init__(self):
1106
+ self._preds = {}
1107
+ self._succs = {}
1108
+ self._nodes = set() # nodes with no preds/succs
1109
+
1110
+ def add_node(self, node):
1111
+ self._nodes.add(node)
1112
+
1113
+ def remove_node(self, node, edges=False):
1114
+ if node in self._nodes:
1115
+ self._nodes.remove(node)
1116
+ if edges:
1117
+ for p in set(self._preds.get(node, ())):
1118
+ self.remove(p, node)
1119
+ for s in set(self._succs.get(node, ())):
1120
+ self.remove(node, s)
1121
+ # Remove empties
1122
+ for k, v in list(self._preds.items()):
1123
+ if not v:
1124
+ del self._preds[k]
1125
+ for k, v in list(self._succs.items()):
1126
+ if not v:
1127
+ del self._succs[k]
1128
+
1129
+ def add(self, pred, succ):
1130
+ assert pred != succ
1131
+ self._preds.setdefault(succ, set()).add(pred)
1132
+ self._succs.setdefault(pred, set()).add(succ)
1133
+
1134
+ def remove(self, pred, succ):
1135
+ assert pred != succ
1136
+ try:
1137
+ preds = self._preds[succ]
1138
+ succs = self._succs[pred]
1139
+ except KeyError: # pragma: no cover
1140
+ raise ValueError('%r not a successor of anything' % succ)
1141
+ try:
1142
+ preds.remove(pred)
1143
+ succs.remove(succ)
1144
+ except KeyError: # pragma: no cover
1145
+ raise ValueError('%r not a successor of %r' % (succ, pred))
1146
+
1147
+ def is_step(self, step):
1148
+ return (step in self._preds or step in self._succs
1149
+ or step in self._nodes)
1150
+
1151
+ def get_steps(self, final):
1152
+ if not self.is_step(final):
1153
+ raise ValueError('Unknown: %r' % final)
1154
+ result = []
1155
+ todo = []
1156
+ seen = set()
1157
+ todo.append(final)
1158
+ while todo:
1159
+ step = todo.pop(0)
1160
+ if step in seen:
1161
+ # if a step was already seen,
1162
+ # move it to the end (so it will appear earlier
1163
+ # when reversed on return) ... but not for the
1164
+ # final step, as that would be confusing for
1165
+ # users
1166
+ if step != final:
1167
+ result.remove(step)
1168
+ result.append(step)
1169
+ else:
1170
+ seen.add(step)
1171
+ result.append(step)
1172
+ preds = self._preds.get(step, ())
1173
+ todo.extend(preds)
1174
+ return reversed(result)
1175
+
1176
+ @property
1177
+ def strong_connections(self):
1178
+ # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
1179
+ index_counter = [0]
1180
+ stack = []
1181
+ lowlinks = {}
1182
+ index = {}
1183
+ result = []
1184
+
1185
+ graph = self._succs
1186
+
1187
+ def strongconnect(node):
1188
+ # set the depth index for this node to the smallest unused index
1189
+ index[node] = index_counter[0]
1190
+ lowlinks[node] = index_counter[0]
1191
+ index_counter[0] += 1
1192
+ stack.append(node)
1193
+
1194
+ # Consider successors
1195
+ try:
1196
+ successors = graph[node]
1197
+ except Exception:
1198
+ successors = []
1199
+ for successor in successors:
1200
+ if successor not in lowlinks:
1201
+ # Successor has not yet been visited
1202
+ strongconnect(successor)
1203
+ lowlinks[node] = min(lowlinks[node], lowlinks[successor])
1204
+ elif successor in stack:
1205
+ # the successor is in the stack and hence in the current
1206
+ # strongly connected component (SCC)
1207
+ lowlinks[node] = min(lowlinks[node], index[successor])
1208
+
1209
+ # If `node` is a root node, pop the stack and generate an SCC
1210
+ if lowlinks[node] == index[node]:
1211
+ connected_component = []
1212
+
1213
+ while True:
1214
+ successor = stack.pop()
1215
+ connected_component.append(successor)
1216
+ if successor == node:
1217
+ break
1218
+ component = tuple(connected_component)
1219
+ # storing the result
1220
+ result.append(component)
1221
+
1222
+ for node in graph:
1223
+ if node not in lowlinks:
1224
+ strongconnect(node)
1225
+
1226
+ return result
1227
+
1228
+ @property
1229
+ def dot(self):
1230
+ result = ['digraph G {']
1231
+ for succ in self._preds:
1232
+ preds = self._preds[succ]
1233
+ for pred in preds:
1234
+ result.append(' %s -> %s;' % (pred, succ))
1235
+ for node in self._nodes:
1236
+ result.append(' %s;' % node)
1237
+ result.append('}')
1238
+ return '\n'.join(result)
1239
+
1240
+
1241
+ #
1242
+ # Unarchiving functionality for zip, tar, tgz, tbz, whl
1243
+ #
1244
+
1245
+ ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz',
1246
+ '.whl')
1247
+
1248
+
1249
+ def unarchive(archive_filename, dest_dir, format=None, check=True):
1250
+
1251
+ def check_path(path):
1252
+ if not isinstance(path, text_type):
1253
+ path = path.decode('utf-8')
1254
+ p = os.path.abspath(os.path.join(dest_dir, path))
1255
+ if not p.startswith(dest_dir) or p[plen] != os.sep:
1256
+ raise ValueError('path outside destination: %r' % p)
1257
+
1258
+ dest_dir = os.path.abspath(dest_dir)
1259
+ plen = len(dest_dir)
1260
+ archive = None
1261
+ if format is None:
1262
+ if archive_filename.endswith(('.zip', '.whl')):
1263
+ format = 'zip'
1264
+ elif archive_filename.endswith(('.tar.gz', '.tgz')):
1265
+ format = 'tgz'
1266
+ mode = 'r:gz'
1267
+ elif archive_filename.endswith(('.tar.bz2', '.tbz')):
1268
+ format = 'tbz'
1269
+ mode = 'r:bz2'
1270
+ elif archive_filename.endswith('.tar'):
1271
+ format = 'tar'
1272
+ mode = 'r'
1273
+ else: # pragma: no cover
1274
+ raise ValueError('Unknown format for %r' % archive_filename)
1275
+ try:
1276
+ if format == 'zip':
1277
+ archive = ZipFile(archive_filename, 'r')
1278
+ if check:
1279
+ names = archive.namelist()
1280
+ for name in names:
1281
+ check_path(name)
1282
+ else:
1283
+ archive = tarfile.open(archive_filename, mode)
1284
+ if check:
1285
+ names = archive.getnames()
1286
+ for name in names:
1287
+ check_path(name)
1288
+ if format != 'zip' and sys.version_info[0] < 3:
1289
+ # See Python issue 17153. If the dest path contains Unicode,
1290
+ # tarfile extraction fails on Python 2.x if a member path name
1291
+ # contains non-ASCII characters - it leads to an implicit
1292
+ # bytes -> unicode conversion using ASCII to decode.
1293
+ for tarinfo in archive.getmembers():
1294
+ if not isinstance(tarinfo.name, text_type):
1295
+ tarinfo.name = tarinfo.name.decode('utf-8')
1296
+
1297
+ # Limit extraction of dangerous items, if this Python
1298
+ # allows it easily. If not, just trust the input.
1299
+ # See: https://docs.python.org/3/library/tarfile.html#extraction-filters
1300
+ def extraction_filter(member, path):
1301
+ """Run tarfile.tar_filter, but raise the expected ValueError"""
1302
+ # This is only called if the current Python has tarfile filters
1303
+ try:
1304
+ return tarfile.tar_filter(member, path)
1305
+ except tarfile.FilterError as exc:
1306
+ raise ValueError(str(exc))
1307
+
1308
+ archive.extraction_filter = extraction_filter
1309
+
1310
+ archive.extractall(dest_dir)
1311
+
1312
+ finally:
1313
+ if archive:
1314
+ archive.close()
1315
+
1316
+
1317
+ def zip_dir(directory):
1318
+ """zip a directory tree into a BytesIO object"""
1319
+ result = io.BytesIO()
1320
+ dlen = len(directory)
1321
+ with ZipFile(result, "w") as zf:
1322
+ for root, dirs, files in os.walk(directory):
1323
+ for name in files:
1324
+ full = os.path.join(root, name)
1325
+ rel = root[dlen:]
1326
+ dest = os.path.join(rel, name)
1327
+ zf.write(full, dest)
1328
+ return result
1329
+
1330
+
1331
+ #
1332
+ # Simple progress bar
1333
+ #
1334
+
1335
+ UNITS = ('', 'K', 'M', 'G', 'T', 'P')
1336
+
1337
+
1338
+ class Progress(object):
1339
+ unknown = 'UNKNOWN'
1340
+
1341
+ def __init__(self, minval=0, maxval=100):
1342
+ assert maxval is None or maxval >= minval
1343
+ self.min = self.cur = minval
1344
+ self.max = maxval
1345
+ self.started = None
1346
+ self.elapsed = 0
1347
+ self.done = False
1348
+
1349
+ def update(self, curval):
1350
+ assert self.min <= curval
1351
+ assert self.max is None or curval <= self.max
1352
+ self.cur = curval
1353
+ now = time.time()
1354
+ if self.started is None:
1355
+ self.started = now
1356
+ else:
1357
+ self.elapsed = now - self.started
1358
+
1359
+ def increment(self, incr):
1360
+ assert incr >= 0
1361
+ self.update(self.cur + incr)
1362
+
1363
+ def start(self):
1364
+ self.update(self.min)
1365
+ return self
1366
+
1367
+ def stop(self):
1368
+ if self.max is not None:
1369
+ self.update(self.max)
1370
+ self.done = True
1371
+
1372
+ @property
1373
+ def maximum(self):
1374
+ return self.unknown if self.max is None else self.max
1375
+
1376
+ @property
1377
+ def percentage(self):
1378
+ if self.done:
1379
+ result = '100 %'
1380
+ elif self.max is None:
1381
+ result = ' ?? %'
1382
+ else:
1383
+ v = 100.0 * (self.cur - self.min) / (self.max - self.min)
1384
+ result = '%3d %%' % v
1385
+ return result
1386
+
1387
+ def format_duration(self, duration):
1388
+ if (duration <= 0) and self.max is None or self.cur == self.min:
1389
+ result = '??:??:??'
1390
+ # elif duration < 1:
1391
+ # result = '--:--:--'
1392
+ else:
1393
+ result = time.strftime('%H:%M:%S', time.gmtime(duration))
1394
+ return result
1395
+
1396
+ @property
1397
+ def ETA(self):
1398
+ if self.done:
1399
+ prefix = 'Done'
1400
+ t = self.elapsed
1401
+ # import pdb; pdb.set_trace()
1402
+ else:
1403
+ prefix = 'ETA '
1404
+ if self.max is None:
1405
+ t = -1
1406
+ elif self.elapsed == 0 or (self.cur == self.min):
1407
+ t = 0
1408
+ else:
1409
+ # import pdb; pdb.set_trace()
1410
+ t = float(self.max - self.min)
1411
+ t /= self.cur - self.min
1412
+ t = (t - 1) * self.elapsed
1413
+ return '%s: %s' % (prefix, self.format_duration(t))
1414
+
1415
+ @property
1416
+ def speed(self):
1417
+ if self.elapsed == 0:
1418
+ result = 0.0
1419
+ else:
1420
+ result = (self.cur - self.min) / self.elapsed
1421
+ for unit in UNITS:
1422
+ if result < 1000:
1423
+ break
1424
+ result /= 1000.0
1425
+ return '%d %sB/s' % (result, unit)
1426
+
1427
+
1428
+ #
1429
+ # Glob functionality
1430
+ #
1431
+
1432
+ RICH_GLOB = re.compile(r'\{([^}]*)\}')
1433
+ _CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
1434
+ _CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
1435
+
1436
+
1437
+ def iglob(path_glob):
1438
+ """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
1439
+ if _CHECK_RECURSIVE_GLOB.search(path_glob):
1440
+ msg = """invalid glob %r: recursive glob "**" must be used alone"""
1441
+ raise ValueError(msg % path_glob)
1442
+ if _CHECK_MISMATCH_SET.search(path_glob):
1443
+ msg = """invalid glob %r: mismatching set marker '{' or '}'"""
1444
+ raise ValueError(msg % path_glob)
1445
+ return _iglob(path_glob)
1446
+
1447
+
1448
+ def _iglob(path_glob):
1449
+ rich_path_glob = RICH_GLOB.split(path_glob, 1)
1450
+ if len(rich_path_glob) > 1:
1451
+ assert len(rich_path_glob) == 3, rich_path_glob
1452
+ prefix, set, suffix = rich_path_glob
1453
+ for item in set.split(','):
1454
+ for path in _iglob(''.join((prefix, item, suffix))):
1455
+ yield path
1456
+ else:
1457
+ if '**' not in path_glob:
1458
+ for item in std_iglob(path_glob):
1459
+ yield item
1460
+ else:
1461
+ prefix, radical = path_glob.split('**', 1)
1462
+ if prefix == '':
1463
+ prefix = '.'
1464
+ if radical == '':
1465
+ radical = '*'
1466
+ else:
1467
+ # we support both
1468
+ radical = radical.lstrip('/')
1469
+ radical = radical.lstrip('\\')
1470
+ for path, dir, files in os.walk(prefix):
1471
+ path = os.path.normpath(path)
1472
+ for fn in _iglob(os.path.join(path, radical)):
1473
+ yield fn
1474
+
1475
+
1476
+ if ssl:
1477
+ from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
1478
+ CertificateError)
1479
+
1480
+ #
1481
+ # HTTPSConnection which verifies certificates/matches domains
1482
+ #
1483
+
1484
+ class HTTPSConnection(httplib.HTTPSConnection):
1485
+ ca_certs = None # set this to the path to the certs file (.pem)
1486
+ check_domain = True # only used if ca_certs is not None
1487
+
1488
+ # noinspection PyPropertyAccess
1489
+ def connect(self):
1490
+ sock = socket.create_connection((self.host, self.port),
1491
+ self.timeout)
1492
+ if getattr(self, '_tunnel_host', False):
1493
+ self.sock = sock
1494
+ self._tunnel()
1495
+
1496
+ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
1497
+ if hasattr(ssl, 'OP_NO_SSLv2'):
1498
+ context.options |= ssl.OP_NO_SSLv2
1499
+ if getattr(self, 'cert_file', None):
1500
+ context.load_cert_chain(self.cert_file, self.key_file)
1501
+ kwargs = {}
1502
+ if self.ca_certs:
1503
+ context.verify_mode = ssl.CERT_REQUIRED
1504
+ context.load_verify_locations(cafile=self.ca_certs)
1505
+ if getattr(ssl, 'HAS_SNI', False):
1506
+ kwargs['server_hostname'] = self.host
1507
+
1508
+ self.sock = context.wrap_socket(sock, **kwargs)
1509
+ if self.ca_certs and self.check_domain:
1510
+ try:
1511
+ match_hostname(self.sock.getpeercert(), self.host)
1512
+ logger.debug('Host verified: %s', self.host)
1513
+ except CertificateError: # pragma: no cover
1514
+ self.sock.shutdown(socket.SHUT_RDWR)
1515
+ self.sock.close()
1516
+ raise
1517
+
1518
+ class HTTPSHandler(BaseHTTPSHandler):
1519
+
1520
+ def __init__(self, ca_certs, check_domain=True):
1521
+ BaseHTTPSHandler.__init__(self)
1522
+ self.ca_certs = ca_certs
1523
+ self.check_domain = check_domain
1524
+
1525
+ def _conn_maker(self, *args, **kwargs):
1526
+ """
1527
+ This is called to create a connection instance. Normally you'd
1528
+ pass a connection class to do_open, but it doesn't actually check for
1529
+ a class, and just expects a callable. As long as we behave just as a
1530
+ constructor would have, we should be OK. If it ever changes so that
1531
+ we *must* pass a class, we'll create an UnsafeHTTPSConnection class
1532
+ which just sets check_domain to False in the class definition, and
1533
+ choose which one to pass to do_open.
1534
+ """
1535
+ result = HTTPSConnection(*args, **kwargs)
1536
+ if self.ca_certs:
1537
+ result.ca_certs = self.ca_certs
1538
+ result.check_domain = self.check_domain
1539
+ return result
1540
+
1541
+ def https_open(self, req):
1542
+ try:
1543
+ return self.do_open(self._conn_maker, req)
1544
+ except URLError as e:
1545
+ if 'certificate verify failed' in str(e.reason):
1546
+ raise CertificateError(
1547
+ 'Unable to verify server certificate '
1548
+ 'for %s' % req.host)
1549
+ else:
1550
+ raise
1551
+
1552
+ #
1553
+ # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
1554
+ # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
1555
+ # HTML containing a http://xyz link when it should be https://xyz),
1556
+ # you can use the following handler class, which does not allow HTTP traffic.
1557
+ #
1558
+ # It works by inheriting from HTTPHandler - so build_opener won't add a
1559
+ # handler for HTTP itself.
1560
+ #
1561
+ class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
1562
+
1563
+ def http_open(self, req):
1564
+ raise URLError(
1565
+ 'Unexpected HTTP request on what should be a secure '
1566
+ 'connection: %s' % req)
1567
+
1568
+
1569
+ #
1570
+ # XML-RPC with timeouts
1571
+ #
1572
+ class Transport(xmlrpclib.Transport):
1573
+
1574
+ def __init__(self, timeout, use_datetime=0):
1575
+ self.timeout = timeout
1576
+ xmlrpclib.Transport.__init__(self, use_datetime)
1577
+
1578
+ def make_connection(self, host):
1579
+ h, eh, x509 = self.get_host_info(host)
1580
+ if not self._connection or host != self._connection[0]:
1581
+ self._extra_headers = eh
1582
+ self._connection = host, httplib.HTTPConnection(h)
1583
+ return self._connection[1]
1584
+
1585
+
1586
+ if ssl:
1587
+
1588
+ class SafeTransport(xmlrpclib.SafeTransport):
1589
+
1590
+ def __init__(self, timeout, use_datetime=0):
1591
+ self.timeout = timeout
1592
+ xmlrpclib.SafeTransport.__init__(self, use_datetime)
1593
+
1594
+ def make_connection(self, host):
1595
+ h, eh, kwargs = self.get_host_info(host)
1596
+ if not kwargs:
1597
+ kwargs = {}
1598
+ kwargs['timeout'] = self.timeout
1599
+ if not self._connection or host != self._connection[0]:
1600
+ self._extra_headers = eh
1601
+ self._connection = host, httplib.HTTPSConnection(
1602
+ h, None, **kwargs)
1603
+ return self._connection[1]
1604
+
1605
+
1606
+ class ServerProxy(xmlrpclib.ServerProxy):
1607
+
1608
+ def __init__(self, uri, **kwargs):
1609
+ self.timeout = timeout = kwargs.pop('timeout', None)
1610
+ # The above classes only come into play if a timeout
1611
+ # is specified
1612
+ if timeout is not None:
1613
+ # scheme = splittype(uri) # deprecated as of Python 3.8
1614
+ scheme = urlparse(uri)[0]
1615
+ use_datetime = kwargs.get('use_datetime', 0)
1616
+ if scheme == 'https':
1617
+ tcls = SafeTransport
1618
+ else:
1619
+ tcls = Transport
1620
+ kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
1621
+ self.transport = t
1622
+ xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
1623
+
1624
+
1625
+ #
1626
+ # CSV functionality. This is provided because on 2.x, the csv module can't
1627
+ # handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
1628
+ #
1629
+
1630
+
1631
+ def _csv_open(fn, mode, **kwargs):
1632
+ if sys.version_info[0] < 3:
1633
+ mode += 'b'
1634
+ else:
1635
+ kwargs['newline'] = ''
1636
+ # Python 3 determines encoding from locale. Force 'utf-8'
1637
+ # file encoding to match other forced utf-8 encoding
1638
+ kwargs['encoding'] = 'utf-8'
1639
+ return open(fn, mode, **kwargs)
1640
+
1641
+
1642
+ class CSVBase(object):
1643
+ defaults = {
1644
+ 'delimiter': str(','), # The strs are used because we need native
1645
+ 'quotechar': str('"'), # str in the csv API (2.x won't take
1646
+ 'lineterminator': str('\n') # Unicode)
1647
+ }
1648
+
1649
+ def __enter__(self):
1650
+ return self
1651
+
1652
+ def __exit__(self, *exc_info):
1653
+ self.stream.close()
1654
+
1655
+
1656
+ class CSVReader(CSVBase):
1657
+
1658
+ def __init__(self, **kwargs):
1659
+ if 'stream' in kwargs:
1660
+ stream = kwargs['stream']
1661
+ if sys.version_info[0] >= 3:
1662
+ # needs to be a text stream
1663
+ stream = codecs.getreader('utf-8')(stream)
1664
+ self.stream = stream
1665
+ else:
1666
+ self.stream = _csv_open(kwargs['path'], 'r')
1667
+ self.reader = csv.reader(self.stream, **self.defaults)
1668
+
1669
+ def __iter__(self):
1670
+ return self
1671
+
1672
+ def next(self):
1673
+ result = next(self.reader)
1674
+ if sys.version_info[0] < 3:
1675
+ for i, item in enumerate(result):
1676
+ if not isinstance(item, text_type):
1677
+ result[i] = item.decode('utf-8')
1678
+ return result
1679
+
1680
+ __next__ = next
1681
+
1682
+
1683
+ class CSVWriter(CSVBase):
1684
+
1685
+ def __init__(self, fn, **kwargs):
1686
+ self.stream = _csv_open(fn, 'w')
1687
+ self.writer = csv.writer(self.stream, **self.defaults)
1688
+
1689
+ def writerow(self, row):
1690
+ if sys.version_info[0] < 3:
1691
+ r = []
1692
+ for item in row:
1693
+ if isinstance(item, text_type):
1694
+ item = item.encode('utf-8')
1695
+ r.append(item)
1696
+ row = r
1697
+ self.writer.writerow(row)
1698
+
1699
+
1700
+ #
1701
+ # Configurator functionality
1702
+ #
1703
+
1704
+
1705
+ class Configurator(BaseConfigurator):
1706
+
1707
+ value_converters = dict(BaseConfigurator.value_converters)
1708
+ value_converters['inc'] = 'inc_convert'
1709
+
1710
+ def __init__(self, config, base=None):
1711
+ super(Configurator, self).__init__(config)
1712
+ self.base = base or os.getcwd()
1713
+
1714
+ def configure_custom(self, config):
1715
+
1716
+ def convert(o):
1717
+ if isinstance(o, (list, tuple)):
1718
+ result = type(o)([convert(i) for i in o])
1719
+ elif isinstance(o, dict):
1720
+ if '()' in o:
1721
+ result = self.configure_custom(o)
1722
+ else:
1723
+ result = {}
1724
+ for k in o:
1725
+ result[k] = convert(o[k])
1726
+ else:
1727
+ result = self.convert(o)
1728
+ return result
1729
+
1730
+ c = config.pop('()')
1731
+ if not callable(c):
1732
+ c = self.resolve(c)
1733
+ props = config.pop('.', None)
1734
+ # Check for valid identifiers
1735
+ args = config.pop('[]', ())
1736
+ if args:
1737
+ args = tuple([convert(o) for o in args])
1738
+ items = [(k, convert(config[k])) for k in config if valid_ident(k)]
1739
+ kwargs = dict(items)
1740
+ result = c(*args, **kwargs)
1741
+ if props:
1742
+ for n, v in props.items():
1743
+ setattr(result, n, convert(v))
1744
+ return result
1745
+
1746
+ def __getitem__(self, key):
1747
+ result = self.config[key]
1748
+ if isinstance(result, dict) and '()' in result:
1749
+ self.config[key] = result = self.configure_custom(result)
1750
+ return result
1751
+
1752
+ def inc_convert(self, value):
1753
+ """Default converter for the inc:// protocol."""
1754
+ if not os.path.isabs(value):
1755
+ value = os.path.join(self.base, value)
1756
+ with codecs.open(value, 'r', encoding='utf-8') as f:
1757
+ result = json.load(f)
1758
+ return result
1759
+
1760
+
1761
+ class SubprocessMixin(object):
1762
+ """
1763
+ Mixin for running subprocesses and capturing their output
1764
+ """
1765
+
1766
+ def __init__(self, verbose=False, progress=None):
1767
+ self.verbose = verbose
1768
+ self.progress = progress
1769
+
1770
+ def reader(self, stream, context):
1771
+ """
1772
+ Read lines from a subprocess' output stream and either pass to a progress
1773
+ callable (if specified) or write progress information to sys.stderr.
1774
+ """
1775
+ progress = self.progress
1776
+ verbose = self.verbose
1777
+ while True:
1778
+ s = stream.readline()
1779
+ if not s:
1780
+ break
1781
+ if progress is not None:
1782
+ progress(s, context)
1783
+ else:
1784
+ if not verbose:
1785
+ sys.stderr.write('.')
1786
+ else:
1787
+ sys.stderr.write(s.decode('utf-8'))
1788
+ sys.stderr.flush()
1789
+ stream.close()
1790
+
1791
+ def run_command(self, cmd, **kwargs):
1792
+ p = subprocess.Popen(cmd,
1793
+ stdout=subprocess.PIPE,
1794
+ stderr=subprocess.PIPE,
1795
+ **kwargs)
1796
+ t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
1797
+ t1.start()
1798
+ t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
1799
+ t2.start()
1800
+ p.wait()
1801
+ t1.join()
1802
+ t2.join()
1803
+ if self.progress is not None:
1804
+ self.progress('done.', 'main')
1805
+ elif self.verbose:
1806
+ sys.stderr.write('done.\n')
1807
+ return p
1808
+
1809
+
1810
+ def normalize_name(name):
1811
+ """Normalize a python package name a la PEP 503"""
1812
+ # https://www.python.org/dev/peps/pep-0503/#normalized-names
1813
+ return re.sub('[-_.]+', '-', name).lower()
1814
+
1815
+
1816
+ # def _get_pypirc_command():
1817
+ # """
1818
+ # Get the distutils command for interacting with PyPI configurations.
1819
+ # :return: the command.
1820
+ # """
1821
+ # from distutils.core import Distribution
1822
+ # from distutils.config import PyPIRCCommand
1823
+ # d = Distribution()
1824
+ # return PyPIRCCommand(d)
1825
+
1826
+
1827
+ class PyPIRCFile(object):
1828
+
1829
+ DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
1830
+ DEFAULT_REALM = 'pypi'
1831
+
1832
+ def __init__(self, fn=None, url=None):
1833
+ if fn is None:
1834
+ fn = os.path.join(os.path.expanduser('~'), '.pypirc')
1835
+ self.filename = fn
1836
+ self.url = url
1837
+
1838
+ def read(self):
1839
+ result = {}
1840
+
1841
+ if os.path.exists(self.filename):
1842
+ repository = self.url or self.DEFAULT_REPOSITORY
1843
+
1844
+ config = configparser.RawConfigParser()
1845
+ config.read(self.filename)
1846
+ sections = config.sections()
1847
+ if 'distutils' in sections:
1848
+ # let's get the list of servers
1849
+ index_servers = config.get('distutils', 'index-servers')
1850
+ _servers = [
1851
+ server.strip() for server in index_servers.split('\n')
1852
+ if server.strip() != ''
1853
+ ]
1854
+ if _servers == []:
1855
+ # nothing set, let's try to get the default pypi
1856
+ if 'pypi' in sections:
1857
+ _servers = ['pypi']
1858
+ else:
1859
+ for server in _servers:
1860
+ result = {'server': server}
1861
+ result['username'] = config.get(server, 'username')
1862
+
1863
+ # optional params
1864
+ for key, default in (('repository',
1865
+ self.DEFAULT_REPOSITORY),
1866
+ ('realm', self.DEFAULT_REALM),
1867
+ ('password', None)):
1868
+ if config.has_option(server, key):
1869
+ result[key] = config.get(server, key)
1870
+ else:
1871
+ result[key] = default
1872
+
1873
+ # work around people having "repository" for the "pypi"
1874
+ # section of their config set to the HTTP (rather than
1875
+ # HTTPS) URL
1876
+ if (server == 'pypi' and repository
1877
+ in (self.DEFAULT_REPOSITORY, 'pypi')):
1878
+ result['repository'] = self.DEFAULT_REPOSITORY
1879
+ elif (result['server'] != repository
1880
+ and result['repository'] != repository):
1881
+ result = {}
1882
+ elif 'server-login' in sections:
1883
+ # old format
1884
+ server = 'server-login'
1885
+ if config.has_option(server, 'repository'):
1886
+ repository = config.get(server, 'repository')
1887
+ else:
1888
+ repository = self.DEFAULT_REPOSITORY
1889
+ result = {
1890
+ 'username': config.get(server, 'username'),
1891
+ 'password': config.get(server, 'password'),
1892
+ 'repository': repository,
1893
+ 'server': server,
1894
+ 'realm': self.DEFAULT_REALM
1895
+ }
1896
+ return result
1897
+
1898
+ def update(self, username, password):
1899
+ # import pdb; pdb.set_trace()
1900
+ config = configparser.RawConfigParser()
1901
+ fn = self.filename
1902
+ config.read(fn)
1903
+ if not config.has_section('pypi'):
1904
+ config.add_section('pypi')
1905
+ config.set('pypi', 'username', username)
1906
+ config.set('pypi', 'password', password)
1907
+ with open(fn, 'w') as f:
1908
+ config.write(f)
1909
+
1910
+
1911
+ def _load_pypirc(index):
1912
+ """
1913
+ Read the PyPI access configuration as supported by distutils.
1914
+ """
1915
+ return PyPIRCFile(url=index.url).read()
1916
+
1917
+
1918
+ def _store_pypirc(index):
1919
+ PyPIRCFile().update(index.username, index.password)
1920
+
1921
+
1922
+ #
1923
+ # get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
1924
+ # tweaks
1925
+ #
1926
+
1927
+
1928
+ def get_host_platform():
1929
+ """Return a string that identifies the current platform. This is used mainly to
1930
+ distinguish platform-specific build directories and platform-specific built
1931
+ distributions. Typically includes the OS name and version and the
1932
+ architecture (as supplied by 'os.uname()'), although the exact information
1933
+ included depends on the OS; eg. on Linux, the kernel version isn't
1934
+ particularly important.
1935
+
1936
+ Examples of returned values:
1937
+ linux-i586
1938
+ linux-alpha (?)
1939
+ solaris-2.6-sun4u
1940
+
1941
+ Windows will return one of:
1942
+ win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
1943
+ win32 (all others - specifically, sys.platform is returned)
1944
+
1945
+ For other non-POSIX platforms, currently just returns 'sys.platform'.
1946
+
1947
+ """
1948
+ if os.name == 'nt':
1949
+ if 'amd64' in sys.version.lower():
1950
+ return 'win-amd64'
1951
+ if '(arm)' in sys.version.lower():
1952
+ return 'win-arm32'
1953
+ if '(arm64)' in sys.version.lower():
1954
+ return 'win-arm64'
1955
+ return sys.platform
1956
+
1957
+ # Set for cross builds explicitly
1958
+ if "_PYTHON_HOST_PLATFORM" in os.environ:
1959
+ return os.environ["_PYTHON_HOST_PLATFORM"]
1960
+
1961
+ if os.name != 'posix' or not hasattr(os, 'uname'):
1962
+ # XXX what about the architecture? NT is Intel or Alpha,
1963
+ # Mac OS is M68k or PPC, etc.
1964
+ return sys.platform
1965
+
1966
+ # Try to distinguish various flavours of Unix
1967
+
1968
+ (osname, host, release, version, machine) = os.uname()
1969
+
1970
+ # Convert the OS name to lowercase, remove '/' characters, and translate
1971
+ # spaces (for "Power Macintosh")
1972
+ osname = osname.lower().replace('/', '')
1973
+ machine = machine.replace(' ', '_').replace('/', '-')
1974
+
1975
+ if osname[:5] == 'linux':
1976
+ # At least on Linux/Intel, 'machine' is the processor --
1977
+ # i386, etc.
1978
+ # XXX what about Alpha, SPARC, etc?
1979
+ return "%s-%s" % (osname, machine)
1980
+
1981
+ elif osname[:5] == 'sunos':
1982
+ if release[0] >= '5': # SunOS 5 == Solaris 2
1983
+ osname = 'solaris'
1984
+ release = '%d.%s' % (int(release[0]) - 3, release[2:])
1985
+ # We can't use 'platform.architecture()[0]' because a
1986
+ # bootstrap problem. We use a dict to get an error
1987
+ # if some suspicious happens.
1988
+ bitness = {2147483647: '32bit', 9223372036854775807: '64bit'}
1989
+ machine += '.%s' % bitness[sys.maxsize]
1990
+ # fall through to standard osname-release-machine representation
1991
+ elif osname[:3] == 'aix':
1992
+ from _aix_support import aix_platform
1993
+ return aix_platform()
1994
+ elif osname[:6] == 'cygwin':
1995
+ osname = 'cygwin'
1996
+ rel_re = re.compile(r'[\d.]+', re.ASCII)
1997
+ m = rel_re.match(release)
1998
+ if m:
1999
+ release = m.group()
2000
+ elif osname[:6] == 'darwin':
2001
+ import _osx_support
2002
+ try:
2003
+ from distutils import sysconfig
2004
+ except ImportError:
2005
+ import sysconfig
2006
+ osname, release, machine = _osx_support.get_platform_osx(
2007
+ sysconfig.get_config_vars(), osname, release, machine)
2008
+
2009
+ return '%s-%s-%s' % (osname, release, machine)
2010
+
2011
+
2012
+ _TARGET_TO_PLAT = {
2013
+ 'x86': 'win32',
2014
+ 'x64': 'win-amd64',
2015
+ 'arm': 'win-arm32',
2016
+ }
2017
+
2018
+
2019
+ def get_platform():
2020
+ if os.name != 'nt':
2021
+ return get_host_platform()
2022
+ cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
2023
+ if cross_compilation_target not in _TARGET_TO_PLAT:
2024
+ return get_host_platform()
2025
+ return _TARGET_TO_PLAT[cross_compilation_target]
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/w64-arm.exe ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5dc9884a8f458371550e09bd396e5418bf375820a31b9899f6499bf391c7b2e
3
+ size 168448
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/w64.exe ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a319ffaba23a017d7b1e18ba726ba6c54c53d6446db55f92af53c279894f8ad
3
+ size 101888
.venv/lib/python3.11/site-packages/pip/_vendor/distro/__init__.py ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .distro import (
2
+ NORMALIZED_DISTRO_ID,
3
+ NORMALIZED_LSB_ID,
4
+ NORMALIZED_OS_ID,
5
+ LinuxDistribution,
6
+ __version__,
7
+ build_number,
8
+ codename,
9
+ distro_release_attr,
10
+ distro_release_info,
11
+ id,
12
+ info,
13
+ like,
14
+ linux_distribution,
15
+ lsb_release_attr,
16
+ lsb_release_info,
17
+ major_version,
18
+ minor_version,
19
+ name,
20
+ os_release_attr,
21
+ os_release_info,
22
+ uname_attr,
23
+ uname_info,
24
+ version,
25
+ version_parts,
26
+ )
27
+
28
+ __all__ = [
29
+ "NORMALIZED_DISTRO_ID",
30
+ "NORMALIZED_LSB_ID",
31
+ "NORMALIZED_OS_ID",
32
+ "LinuxDistribution",
33
+ "build_number",
34
+ "codename",
35
+ "distro_release_attr",
36
+ "distro_release_info",
37
+ "id",
38
+ "info",
39
+ "like",
40
+ "linux_distribution",
41
+ "lsb_release_attr",
42
+ "lsb_release_info",
43
+ "major_version",
44
+ "minor_version",
45
+ "name",
46
+ "os_release_attr",
47
+ "os_release_info",
48
+ "uname_attr",
49
+ "uname_info",
50
+ "version",
51
+ "version_parts",
52
+ ]
53
+
54
+ __version__ = __version__
.venv/lib/python3.11/site-packages/pip/_vendor/distro/__main__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ from .distro import main
2
+
3
+ if __name__ == "__main__":
4
+ main()
.venv/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (1.19 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/__main__.cpython-311.pyc ADDED
Binary file (324 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distro/__pycache__/distro.cpython-311.pyc ADDED
Binary file (57.7 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distro/distro.py ADDED
@@ -0,0 +1,1399 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+ # Copyright 2015,2016,2017 Nir Cohen
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ """
17
+ The ``distro`` package (``distro`` stands for Linux Distribution) provides
18
+ information about the Linux distribution it runs on, such as a reliable
19
+ machine-readable distro ID, or version information.
20
+
21
+ It is the recommended replacement for Python's original
22
+ :py:func:`platform.linux_distribution` function, but it provides much more
23
+ functionality. An alternative implementation became necessary because Python
24
+ 3.5 deprecated this function, and Python 3.8 removed it altogether. Its
25
+ predecessor function :py:func:`platform.dist` was already deprecated since
26
+ Python 2.6 and removed in Python 3.8. Still, there are many cases in which
27
+ access to OS distribution information is needed. See `Python issue 1322
28
+ <https://bugs.python.org/issue1322>`_ for more information.
29
+ """
30
+
31
+ import argparse
32
+ import json
33
+ import logging
34
+ import os
35
+ import re
36
+ import shlex
37
+ import subprocess
38
+ import sys
39
+ import warnings
40
+ from typing import (
41
+ Any,
42
+ Callable,
43
+ Dict,
44
+ Iterable,
45
+ Optional,
46
+ Sequence,
47
+ TextIO,
48
+ Tuple,
49
+ Type,
50
+ )
51
+
52
+ try:
53
+ from typing import TypedDict
54
+ except ImportError:
55
+ # Python 3.7
56
+ TypedDict = dict
57
+
58
+ __version__ = "1.8.0"
59
+
60
+
61
+ class VersionDict(TypedDict):
62
+ major: str
63
+ minor: str
64
+ build_number: str
65
+
66
+
67
+ class InfoDict(TypedDict):
68
+ id: str
69
+ version: str
70
+ version_parts: VersionDict
71
+ like: str
72
+ codename: str
73
+
74
+
75
+ _UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
76
+ _UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
77
+ _OS_RELEASE_BASENAME = "os-release"
78
+
79
+ #: Translation table for normalizing the "ID" attribute defined in os-release
80
+ #: files, for use by the :func:`distro.id` method.
81
+ #:
82
+ #: * Key: Value as defined in the os-release file, translated to lower case,
83
+ #: with blanks translated to underscores.
84
+ #:
85
+ #: * Value: Normalized value.
86
+ NORMALIZED_OS_ID = {
87
+ "ol": "oracle", # Oracle Linux
88
+ "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap
89
+ }
90
+
91
+ #: Translation table for normalizing the "Distributor ID" attribute returned by
92
+ #: the lsb_release command, for use by the :func:`distro.id` method.
93
+ #:
94
+ #: * Key: Value as returned by the lsb_release command, translated to lower
95
+ #: case, with blanks translated to underscores.
96
+ #:
97
+ #: * Value: Normalized value.
98
+ NORMALIZED_LSB_ID = {
99
+ "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
100
+ "enterpriseenterpriseserver": "oracle", # Oracle Linux 5
101
+ "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
102
+ "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
103
+ "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
104
+ }
105
+
106
+ #: Translation table for normalizing the distro ID derived from the file name
107
+ #: of distro release files, for use by the :func:`distro.id` method.
108
+ #:
109
+ #: * Key: Value as derived from the file name of a distro release file,
110
+ #: translated to lower case, with blanks translated to underscores.
111
+ #:
112
+ #: * Value: Normalized value.
113
+ NORMALIZED_DISTRO_ID = {
114
+ "redhat": "rhel", # RHEL 6.x, 7.x
115
+ }
116
+
117
+ # Pattern for content of distro release file (reversed)
118
+ _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
119
+ r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
120
+ )
121
+
122
+ # Pattern for base file name of distro release file
123
+ _DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
124
+
125
+ # Base file names to be looked up for if _UNIXCONFDIR is not readable.
126
+ _DISTRO_RELEASE_BASENAMES = [
127
+ "SuSE-release",
128
+ "arch-release",
129
+ "base-release",
130
+ "centos-release",
131
+ "fedora-release",
132
+ "gentoo-release",
133
+ "mageia-release",
134
+ "mandrake-release",
135
+ "mandriva-release",
136
+ "mandrivalinux-release",
137
+ "manjaro-release",
138
+ "oracle-release",
139
+ "redhat-release",
140
+ "rocky-release",
141
+ "sl-release",
142
+ "slackware-version",
143
+ ]
144
+
145
+ # Base file names to be ignored when searching for distro release file
146
+ _DISTRO_RELEASE_IGNORE_BASENAMES = (
147
+ "debian_version",
148
+ "lsb-release",
149
+ "oem-release",
150
+ _OS_RELEASE_BASENAME,
151
+ "system-release",
152
+ "plesk-release",
153
+ "iredmail-release",
154
+ )
155
+
156
+
157
+ def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]:
158
+ """
159
+ .. deprecated:: 1.6.0
160
+
161
+ :func:`distro.linux_distribution()` is deprecated. It should only be
162
+ used as a compatibility shim with Python's
163
+ :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
164
+ :func:`distro.version` and :func:`distro.name` instead.
165
+
166
+ Return information about the current OS distribution as a tuple
167
+ ``(id_name, version, codename)`` with items as follows:
168
+
169
+ * ``id_name``: If *full_distribution_name* is false, the result of
170
+ :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
171
+
172
+ * ``version``: The result of :func:`distro.version`.
173
+
174
+ * ``codename``: The extra item (usually in parentheses) after the
175
+ os-release version number, or the result of :func:`distro.codename`.
176
+
177
+ The interface of this function is compatible with the original
178
+ :py:func:`platform.linux_distribution` function, supporting a subset of
179
+ its parameters.
180
+
181
+ The data it returns may not exactly be the same, because it uses more data
182
+ sources than the original function, and that may lead to different data if
183
+ the OS distribution is not consistent across multiple data sources it
184
+ provides (there are indeed such distributions ...).
185
+
186
+ Another reason for differences is the fact that the :func:`distro.id`
187
+ method normalizes the distro ID string to a reliable machine-readable value
188
+ for a number of popular OS distributions.
189
+ """
190
+ warnings.warn(
191
+ "distro.linux_distribution() is deprecated. It should only be used as a "
192
+ "compatibility shim with Python's platform.linux_distribution(). Please use "
193
+ "distro.id(), distro.version() and distro.name() instead.",
194
+ DeprecationWarning,
195
+ stacklevel=2,
196
+ )
197
+ return _distro.linux_distribution(full_distribution_name)
198
+
199
+
200
+ def id() -> str:
201
+ """
202
+ Return the distro ID of the current distribution, as a
203
+ machine-readable string.
204
+
205
+ For a number of OS distributions, the returned distro ID value is
206
+ *reliable*, in the sense that it is documented and that it does not change
207
+ across releases of the distribution.
208
+
209
+ This package maintains the following reliable distro ID values:
210
+
211
+ ============== =========================================
212
+ Distro ID Distribution
213
+ ============== =========================================
214
+ "ubuntu" Ubuntu
215
+ "debian" Debian
216
+ "rhel" RedHat Enterprise Linux
217
+ "centos" CentOS
218
+ "fedora" Fedora
219
+ "sles" SUSE Linux Enterprise Server
220
+ "opensuse" openSUSE
221
+ "amzn" Amazon Linux
222
+ "arch" Arch Linux
223
+ "buildroot" Buildroot
224
+ "cloudlinux" CloudLinux OS
225
+ "exherbo" Exherbo Linux
226
+ "gentoo" GenToo Linux
227
+ "ibm_powerkvm" IBM PowerKVM
228
+ "kvmibm" KVM for IBM z Systems
229
+ "linuxmint" Linux Mint
230
+ "mageia" Mageia
231
+ "mandriva" Mandriva Linux
232
+ "parallels" Parallels
233
+ "pidora" Pidora
234
+ "raspbian" Raspbian
235
+ "oracle" Oracle Linux (and Oracle Enterprise Linux)
236
+ "scientific" Scientific Linux
237
+ "slackware" Slackware
238
+ "xenserver" XenServer
239
+ "openbsd" OpenBSD
240
+ "netbsd" NetBSD
241
+ "freebsd" FreeBSD
242
+ "midnightbsd" MidnightBSD
243
+ "rocky" Rocky Linux
244
+ "aix" AIX
245
+ "guix" Guix System
246
+ ============== =========================================
247
+
248
+ If you have a need to get distros for reliable IDs added into this set,
249
+ or if you find that the :func:`distro.id` function returns a different
250
+ distro ID for one of the listed distros, please create an issue in the
251
+ `distro issue tracker`_.
252
+
253
+ **Lookup hierarchy and transformations:**
254
+
255
+ First, the ID is obtained from the following sources, in the specified
256
+ order. The first available and non-empty value is used:
257
+
258
+ * the value of the "ID" attribute of the os-release file,
259
+
260
+ * the value of the "Distributor ID" attribute returned by the lsb_release
261
+ command,
262
+
263
+ * the first part of the file name of the distro release file,
264
+
265
+ The so determined ID value then passes the following transformations,
266
+ before it is returned by this method:
267
+
268
+ * it is translated to lower case,
269
+
270
+ * blanks (which should not be there anyway) are translated to underscores,
271
+
272
+ * a normalization of the ID is performed, based upon
273
+ `normalization tables`_. The purpose of this normalization is to ensure
274
+ that the ID is as reliable as possible, even across incompatible changes
275
+ in the OS distributions. A common reason for an incompatible change is
276
+ the addition of an os-release file, or the addition of the lsb_release
277
+ command, with ID values that differ from what was previously determined
278
+ from the distro release file name.
279
+ """
280
+ return _distro.id()
281
+
282
+
283
+ def name(pretty: bool = False) -> str:
284
+ """
285
+ Return the name of the current OS distribution, as a human-readable
286
+ string.
287
+
288
+ If *pretty* is false, the name is returned without version or codename.
289
+ (e.g. "CentOS Linux")
290
+
291
+ If *pretty* is true, the version and codename are appended.
292
+ (e.g. "CentOS Linux 7.1.1503 (Core)")
293
+
294
+ **Lookup hierarchy:**
295
+
296
+ The name is obtained from the following sources, in the specified order.
297
+ The first available and non-empty value is used:
298
+
299
+ * If *pretty* is false:
300
+
301
+ - the value of the "NAME" attribute of the os-release file,
302
+
303
+ - the value of the "Distributor ID" attribute returned by the lsb_release
304
+ command,
305
+
306
+ - the value of the "<name>" field of the distro release file.
307
+
308
+ * If *pretty* is true:
309
+
310
+ - the value of the "PRETTY_NAME" attribute of the os-release file,
311
+
312
+ - the value of the "Description" attribute returned by the lsb_release
313
+ command,
314
+
315
+ - the value of the "<name>" field of the distro release file, appended
316
+ with the value of the pretty version ("<version_id>" and "<codename>"
317
+ fields) of the distro release file, if available.
318
+ """
319
+ return _distro.name(pretty)
320
+
321
+
322
+ def version(pretty: bool = False, best: bool = False) -> str:
323
+ """
324
+ Return the version of the current OS distribution, as a human-readable
325
+ string.
326
+
327
+ If *pretty* is false, the version is returned without codename (e.g.
328
+ "7.0").
329
+
330
+ If *pretty* is true, the codename in parenthesis is appended, if the
331
+ codename is non-empty (e.g. "7.0 (Maipo)").
332
+
333
+ Some distributions provide version numbers with different precisions in
334
+ the different sources of distribution information. Examining the different
335
+ sources in a fixed priority order does not always yield the most precise
336
+ version (e.g. for Debian 8.2, or CentOS 7.1).
337
+
338
+ Some other distributions may not provide this kind of information. In these
339
+ cases, an empty string would be returned. This behavior can be observed
340
+ with rolling releases distributions (e.g. Arch Linux).
341
+
342
+ The *best* parameter can be used to control the approach for the returned
343
+ version:
344
+
345
+ If *best* is false, the first non-empty version number in priority order of
346
+ the examined sources is returned.
347
+
348
+ If *best* is true, the most precise version number out of all examined
349
+ sources is returned.
350
+
351
+ **Lookup hierarchy:**
352
+
353
+ In all cases, the version number is obtained from the following sources.
354
+ If *best* is false, this order represents the priority order:
355
+
356
+ * the value of the "VERSION_ID" attribute of the os-release file,
357
+ * the value of the "Release" attribute returned by the lsb_release
358
+ command,
359
+ * the version number parsed from the "<version_id>" field of the first line
360
+ of the distro release file,
361
+ * the version number parsed from the "PRETTY_NAME" attribute of the
362
+ os-release file, if it follows the format of the distro release files.
363
+ * the version number parsed from the "Description" attribute returned by
364
+ the lsb_release command, if it follows the format of the distro release
365
+ files.
366
+ """
367
+ return _distro.version(pretty, best)
368
+
369
+
370
+ def version_parts(best: bool = False) -> Tuple[str, str, str]:
371
+ """
372
+ Return the version of the current OS distribution as a tuple
373
+ ``(major, minor, build_number)`` with items as follows:
374
+
375
+ * ``major``: The result of :func:`distro.major_version`.
376
+
377
+ * ``minor``: The result of :func:`distro.minor_version`.
378
+
379
+ * ``build_number``: The result of :func:`distro.build_number`.
380
+
381
+ For a description of the *best* parameter, see the :func:`distro.version`
382
+ method.
383
+ """
384
+ return _distro.version_parts(best)
385
+
386
+
387
+ def major_version(best: bool = False) -> str:
388
+ """
389
+ Return the major version of the current OS distribution, as a string,
390
+ if provided.
391
+ Otherwise, the empty string is returned. The major version is the first
392
+ part of the dot-separated version string.
393
+
394
+ For a description of the *best* parameter, see the :func:`distro.version`
395
+ method.
396
+ """
397
+ return _distro.major_version(best)
398
+
399
+
400
+ def minor_version(best: bool = False) -> str:
401
+ """
402
+ Return the minor version of the current OS distribution, as a string,
403
+ if provided.
404
+ Otherwise, the empty string is returned. The minor version is the second
405
+ part of the dot-separated version string.
406
+
407
+ For a description of the *best* parameter, see the :func:`distro.version`
408
+ method.
409
+ """
410
+ return _distro.minor_version(best)
411
+
412
+
413
+ def build_number(best: bool = False) -> str:
414
+ """
415
+ Return the build number of the current OS distribution, as a string,
416
+ if provided.
417
+ Otherwise, the empty string is returned. The build number is the third part
418
+ of the dot-separated version string.
419
+
420
+ For a description of the *best* parameter, see the :func:`distro.version`
421
+ method.
422
+ """
423
+ return _distro.build_number(best)
424
+
425
+
426
+ def like() -> str:
427
+ """
428
+ Return a space-separated list of distro IDs of distributions that are
429
+ closely related to the current OS distribution in regards to packaging
430
+ and programming interfaces, for example distributions the current
431
+ distribution is a derivative from.
432
+
433
+ **Lookup hierarchy:**
434
+
435
+ This information item is only provided by the os-release file.
436
+ For details, see the description of the "ID_LIKE" attribute in the
437
+ `os-release man page
438
+ <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
439
+ """
440
+ return _distro.like()
441
+
442
+
443
+ def codename() -> str:
444
+ """
445
+ Return the codename for the release of the current OS distribution,
446
+ as a string.
447
+
448
+ If the distribution does not have a codename, an empty string is returned.
449
+
450
+ Note that the returned codename is not always really a codename. For
451
+ example, openSUSE returns "x86_64". This function does not handle such
452
+ cases in any special way and just returns the string it finds, if any.
453
+
454
+ **Lookup hierarchy:**
455
+
456
+ * the codename within the "VERSION" attribute of the os-release file, if
457
+ provided,
458
+
459
+ * the value of the "Codename" attribute returned by the lsb_release
460
+ command,
461
+
462
+ * the value of the "<codename>" field of the distro release file.
463
+ """
464
+ return _distro.codename()
465
+
466
+
467
+ def info(pretty: bool = False, best: bool = False) -> InfoDict:
468
+ """
469
+ Return certain machine-readable information items about the current OS
470
+ distribution in a dictionary, as shown in the following example:
471
+
472
+ .. sourcecode:: python
473
+
474
+ {
475
+ 'id': 'rhel',
476
+ 'version': '7.0',
477
+ 'version_parts': {
478
+ 'major': '7',
479
+ 'minor': '0',
480
+ 'build_number': ''
481
+ },
482
+ 'like': 'fedora',
483
+ 'codename': 'Maipo'
484
+ }
485
+
486
+ The dictionary structure and keys are always the same, regardless of which
487
+ information items are available in the underlying data sources. The values
488
+ for the various keys are as follows:
489
+
490
+ * ``id``: The result of :func:`distro.id`.
491
+
492
+ * ``version``: The result of :func:`distro.version`.
493
+
494
+ * ``version_parts -> major``: The result of :func:`distro.major_version`.
495
+
496
+ * ``version_parts -> minor``: The result of :func:`distro.minor_version`.
497
+
498
+ * ``version_parts -> build_number``: The result of
499
+ :func:`distro.build_number`.
500
+
501
+ * ``like``: The result of :func:`distro.like`.
502
+
503
+ * ``codename``: The result of :func:`distro.codename`.
504
+
505
+ For a description of the *pretty* and *best* parameters, see the
506
+ :func:`distro.version` method.
507
+ """
508
+ return _distro.info(pretty, best)
509
+
510
+
511
+ def os_release_info() -> Dict[str, str]:
512
+ """
513
+ Return a dictionary containing key-value pairs for the information items
514
+ from the os-release file data source of the current OS distribution.
515
+
516
+ See `os-release file`_ for details about these information items.
517
+ """
518
+ return _distro.os_release_info()
519
+
520
+
521
+ def lsb_release_info() -> Dict[str, str]:
522
+ """
523
+ Return a dictionary containing key-value pairs for the information items
524
+ from the lsb_release command data source of the current OS distribution.
525
+
526
+ See `lsb_release command output`_ for details about these information
527
+ items.
528
+ """
529
+ return _distro.lsb_release_info()
530
+
531
+
532
+ def distro_release_info() -> Dict[str, str]:
533
+ """
534
+ Return a dictionary containing key-value pairs for the information items
535
+ from the distro release file data source of the current OS distribution.
536
+
537
+ See `distro release file`_ for details about these information items.
538
+ """
539
+ return _distro.distro_release_info()
540
+
541
+
542
+ def uname_info() -> Dict[str, str]:
543
+ """
544
+ Return a dictionary containing key-value pairs for the information items
545
+ from the distro release file data source of the current OS distribution.
546
+ """
547
+ return _distro.uname_info()
548
+
549
+
550
+ def os_release_attr(attribute: str) -> str:
551
+ """
552
+ Return a single named information item from the os-release file data source
553
+ of the current OS distribution.
554
+
555
+ Parameters:
556
+
557
+ * ``attribute`` (string): Key of the information item.
558
+
559
+ Returns:
560
+
561
+ * (string): Value of the information item, if the item exists.
562
+ The empty string, if the item does not exist.
563
+
564
+ See `os-release file`_ for details about these information items.
565
+ """
566
+ return _distro.os_release_attr(attribute)
567
+
568
+
569
+ def lsb_release_attr(attribute: str) -> str:
570
+ """
571
+ Return a single named information item from the lsb_release command output
572
+ data source of the current OS distribution.
573
+
574
+ Parameters:
575
+
576
+ * ``attribute`` (string): Key of the information item.
577
+
578
+ Returns:
579
+
580
+ * (string): Value of the information item, if the item exists.
581
+ The empty string, if the item does not exist.
582
+
583
+ See `lsb_release command output`_ for details about these information
584
+ items.
585
+ """
586
+ return _distro.lsb_release_attr(attribute)
587
+
588
+
589
+ def distro_release_attr(attribute: str) -> str:
590
+ """
591
+ Return a single named information item from the distro release file
592
+ data source of the current OS distribution.
593
+
594
+ Parameters:
595
+
596
+ * ``attribute`` (string): Key of the information item.
597
+
598
+ Returns:
599
+
600
+ * (string): Value of the information item, if the item exists.
601
+ The empty string, if the item does not exist.
602
+
603
+ See `distro release file`_ for details about these information items.
604
+ """
605
+ return _distro.distro_release_attr(attribute)
606
+
607
+
608
+ def uname_attr(attribute: str) -> str:
609
+ """
610
+ Return a single named information item from the distro release file
611
+ data source of the current OS distribution.
612
+
613
+ Parameters:
614
+
615
+ * ``attribute`` (string): Key of the information item.
616
+
617
+ Returns:
618
+
619
+ * (string): Value of the information item, if the item exists.
620
+ The empty string, if the item does not exist.
621
+ """
622
+ return _distro.uname_attr(attribute)
623
+
624
+
625
+ try:
626
+ from functools import cached_property
627
+ except ImportError:
628
+ # Python < 3.8
629
+ class cached_property: # type: ignore
630
+ """A version of @property which caches the value. On access, it calls the
631
+ underlying function and sets the value in `__dict__` so future accesses
632
+ will not re-call the property.
633
+ """
634
+
635
+ def __init__(self, f: Callable[[Any], Any]) -> None:
636
+ self._fname = f.__name__
637
+ self._f = f
638
+
639
+ def __get__(self, obj: Any, owner: Type[Any]) -> Any:
640
+ assert obj is not None, f"call {self._fname} on an instance"
641
+ ret = obj.__dict__[self._fname] = self._f(obj)
642
+ return ret
643
+
644
+
645
+ class LinuxDistribution:
646
+ """
647
+ Provides information about a OS distribution.
648
+
649
+ This package creates a private module-global instance of this class with
650
+ default initialization arguments, that is used by the
651
+ `consolidated accessor functions`_ and `single source accessor functions`_.
652
+ By using default initialization arguments, that module-global instance
653
+ returns data about the current OS distribution (i.e. the distro this
654
+ package runs on).
655
+
656
+ Normally, it is not necessary to create additional instances of this class.
657
+ However, in situations where control is needed over the exact data sources
658
+ that are used, instances of this class can be created with a specific
659
+ distro release file, or a specific os-release file, or without invoking the
660
+ lsb_release command.
661
+ """
662
+
663
+ def __init__(
664
+ self,
665
+ include_lsb: Optional[bool] = None,
666
+ os_release_file: str = "",
667
+ distro_release_file: str = "",
668
+ include_uname: Optional[bool] = None,
669
+ root_dir: Optional[str] = None,
670
+ include_oslevel: Optional[bool] = None,
671
+ ) -> None:
672
+ """
673
+ The initialization method of this class gathers information from the
674
+ available data sources, and stores that in private instance attributes.
675
+ Subsequent access to the information items uses these private instance
676
+ attributes, so that the data sources are read only once.
677
+
678
+ Parameters:
679
+
680
+ * ``include_lsb`` (bool): Controls whether the
681
+ `lsb_release command output`_ is included as a data source.
682
+
683
+ If the lsb_release command is not available in the program execution
684
+ path, the data source for the lsb_release command will be empty.
685
+
686
+ * ``os_release_file`` (string): The path name of the
687
+ `os-release file`_ that is to be used as a data source.
688
+
689
+ An empty string (the default) will cause the default path name to
690
+ be used (see `os-release file`_ for details).
691
+
692
+ If the specified or defaulted os-release file does not exist, the
693
+ data source for the os-release file will be empty.
694
+
695
+ * ``distro_release_file`` (string): The path name of the
696
+ `distro release file`_ that is to be used as a data source.
697
+
698
+ An empty string (the default) will cause a default search algorithm
699
+ to be used (see `distro release file`_ for details).
700
+
701
+ If the specified distro release file does not exist, or if no default
702
+ distro release file can be found, the data source for the distro
703
+ release file will be empty.
704
+
705
+ * ``include_uname`` (bool): Controls whether uname command output is
706
+ included as a data source. If the uname command is not available in
707
+ the program execution path the data source for the uname command will
708
+ be empty.
709
+
710
+ * ``root_dir`` (string): The absolute path to the root directory to use
711
+ to find distro-related information files. Note that ``include_*``
712
+ parameters must not be enabled in combination with ``root_dir``.
713
+
714
+ * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command
715
+ output is included as a data source. If the oslevel command is not
716
+ available in the program execution path the data source will be
717
+ empty.
718
+
719
+ Public instance attributes:
720
+
721
+ * ``os_release_file`` (string): The path name of the
722
+ `os-release file`_ that is actually used as a data source. The
723
+ empty string if no distro release file is used as a data source.
724
+
725
+ * ``distro_release_file`` (string): The path name of the
726
+ `distro release file`_ that is actually used as a data source. The
727
+ empty string if no distro release file is used as a data source.
728
+
729
+ * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
730
+ This controls whether the lsb information will be loaded.
731
+
732
+ * ``include_uname`` (bool): The result of the ``include_uname``
733
+ parameter. This controls whether the uname information will
734
+ be loaded.
735
+
736
+ * ``include_oslevel`` (bool): The result of the ``include_oslevel``
737
+ parameter. This controls whether (AIX) oslevel information will be
738
+ loaded.
739
+
740
+ * ``root_dir`` (string): The result of the ``root_dir`` parameter.
741
+ The absolute path to the root directory to use to find distro-related
742
+ information files.
743
+
744
+ Raises:
745
+
746
+ * :py:exc:`ValueError`: Initialization parameters combination is not
747
+ supported.
748
+
749
+ * :py:exc:`OSError`: Some I/O issue with an os-release file or distro
750
+ release file.
751
+
752
+ * :py:exc:`UnicodeError`: A data source has unexpected characters or
753
+ uses an unexpected encoding.
754
+ """
755
+ self.root_dir = root_dir
756
+ self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
757
+ self.usr_lib_dir = (
758
+ os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
759
+ )
760
+
761
+ if os_release_file:
762
+ self.os_release_file = os_release_file
763
+ else:
764
+ etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
765
+ usr_lib_os_release_file = os.path.join(
766
+ self.usr_lib_dir, _OS_RELEASE_BASENAME
767
+ )
768
+
769
+ # NOTE: The idea is to respect order **and** have it set
770
+ # at all times for API backwards compatibility.
771
+ if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
772
+ usr_lib_os_release_file
773
+ ):
774
+ self.os_release_file = etc_dir_os_release_file
775
+ else:
776
+ self.os_release_file = usr_lib_os_release_file
777
+
778
+ self.distro_release_file = distro_release_file or "" # updated later
779
+
780
+ is_root_dir_defined = root_dir is not None
781
+ if is_root_dir_defined and (include_lsb or include_uname or include_oslevel):
782
+ raise ValueError(
783
+ "Including subprocess data sources from specific root_dir is disallowed"
784
+ " to prevent false information"
785
+ )
786
+ self.include_lsb = (
787
+ include_lsb if include_lsb is not None else not is_root_dir_defined
788
+ )
789
+ self.include_uname = (
790
+ include_uname if include_uname is not None else not is_root_dir_defined
791
+ )
792
+ self.include_oslevel = (
793
+ include_oslevel if include_oslevel is not None else not is_root_dir_defined
794
+ )
795
+
796
+ def __repr__(self) -> str:
797
+ """Return repr of all info"""
798
+ return (
799
+ "LinuxDistribution("
800
+ "os_release_file={self.os_release_file!r}, "
801
+ "distro_release_file={self.distro_release_file!r}, "
802
+ "include_lsb={self.include_lsb!r}, "
803
+ "include_uname={self.include_uname!r}, "
804
+ "include_oslevel={self.include_oslevel!r}, "
805
+ "root_dir={self.root_dir!r}, "
806
+ "_os_release_info={self._os_release_info!r}, "
807
+ "_lsb_release_info={self._lsb_release_info!r}, "
808
+ "_distro_release_info={self._distro_release_info!r}, "
809
+ "_uname_info={self._uname_info!r}, "
810
+ "_oslevel_info={self._oslevel_info!r})".format(self=self)
811
+ )
812
+
813
+ def linux_distribution(
814
+ self, full_distribution_name: bool = True
815
+ ) -> Tuple[str, str, str]:
816
+ """
817
+ Return information about the OS distribution that is compatible
818
+ with Python's :func:`platform.linux_distribution`, supporting a subset
819
+ of its parameters.
820
+
821
+ For details, see :func:`distro.linux_distribution`.
822
+ """
823
+ return (
824
+ self.name() if full_distribution_name else self.id(),
825
+ self.version(),
826
+ self._os_release_info.get("release_codename") or self.codename(),
827
+ )
828
+
829
+ def id(self) -> str:
830
+ """Return the distro ID of the OS distribution, as a string.
831
+
832
+ For details, see :func:`distro.id`.
833
+ """
834
+
835
+ def normalize(distro_id: str, table: Dict[str, str]) -> str:
836
+ distro_id = distro_id.lower().replace(" ", "_")
837
+ return table.get(distro_id, distro_id)
838
+
839
+ distro_id = self.os_release_attr("id")
840
+ if distro_id:
841
+ return normalize(distro_id, NORMALIZED_OS_ID)
842
+
843
+ distro_id = self.lsb_release_attr("distributor_id")
844
+ if distro_id:
845
+ return normalize(distro_id, NORMALIZED_LSB_ID)
846
+
847
+ distro_id = self.distro_release_attr("id")
848
+ if distro_id:
849
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
850
+
851
+ distro_id = self.uname_attr("id")
852
+ if distro_id:
853
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
854
+
855
+ return ""
856
+
857
+ def name(self, pretty: bool = False) -> str:
858
+ """
859
+ Return the name of the OS distribution, as a string.
860
+
861
+ For details, see :func:`distro.name`.
862
+ """
863
+ name = (
864
+ self.os_release_attr("name")
865
+ or self.lsb_release_attr("distributor_id")
866
+ or self.distro_release_attr("name")
867
+ or self.uname_attr("name")
868
+ )
869
+ if pretty:
870
+ name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
871
+ "description"
872
+ )
873
+ if not name:
874
+ name = self.distro_release_attr("name") or self.uname_attr("name")
875
+ version = self.version(pretty=True)
876
+ if version:
877
+ name = f"{name} {version}"
878
+ return name or ""
879
+
880
+ def version(self, pretty: bool = False, best: bool = False) -> str:
881
+ """
882
+ Return the version of the OS distribution, as a string.
883
+
884
+ For details, see :func:`distro.version`.
885
+ """
886
+ versions = [
887
+ self.os_release_attr("version_id"),
888
+ self.lsb_release_attr("release"),
889
+ self.distro_release_attr("version_id"),
890
+ self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
891
+ "version_id", ""
892
+ ),
893
+ self._parse_distro_release_content(
894
+ self.lsb_release_attr("description")
895
+ ).get("version_id", ""),
896
+ self.uname_attr("release"),
897
+ ]
898
+ if self.uname_attr("id").startswith("aix"):
899
+ # On AIX platforms, prefer oslevel command output.
900
+ versions.insert(0, self.oslevel_info())
901
+ elif self.id() == "debian" or "debian" in self.like().split():
902
+ # On Debian-like, add debian_version file content to candidates list.
903
+ versions.append(self._debian_version)
904
+ version = ""
905
+ if best:
906
+ # This algorithm uses the last version in priority order that has
907
+ # the best precision. If the versions are not in conflict, that
908
+ # does not matter; otherwise, using the last one instead of the
909
+ # first one might be considered a surprise.
910
+ for v in versions:
911
+ if v.count(".") > version.count(".") or version == "":
912
+ version = v
913
+ else:
914
+ for v in versions:
915
+ if v != "":
916
+ version = v
917
+ break
918
+ if pretty and version and self.codename():
919
+ version = f"{version} ({self.codename()})"
920
+ return version
921
+
922
+ def version_parts(self, best: bool = False) -> Tuple[str, str, str]:
923
+ """
924
+ Return the version of the OS distribution, as a tuple of version
925
+ numbers.
926
+
927
+ For details, see :func:`distro.version_parts`.
928
+ """
929
+ version_str = self.version(best=best)
930
+ if version_str:
931
+ version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
932
+ matches = version_regex.match(version_str)
933
+ if matches:
934
+ major, minor, build_number = matches.groups()
935
+ return major, minor or "", build_number or ""
936
+ return "", "", ""
937
+
938
+ def major_version(self, best: bool = False) -> str:
939
+ """
940
+ Return the major version number of the current distribution.
941
+
942
+ For details, see :func:`distro.major_version`.
943
+ """
944
+ return self.version_parts(best)[0]
945
+
946
+ def minor_version(self, best: bool = False) -> str:
947
+ """
948
+ Return the minor version number of the current distribution.
949
+
950
+ For details, see :func:`distro.minor_version`.
951
+ """
952
+ return self.version_parts(best)[1]
953
+
954
+ def build_number(self, best: bool = False) -> str:
955
+ """
956
+ Return the build number of the current distribution.
957
+
958
+ For details, see :func:`distro.build_number`.
959
+ """
960
+ return self.version_parts(best)[2]
961
+
962
+ def like(self) -> str:
963
+ """
964
+ Return the IDs of distributions that are like the OS distribution.
965
+
966
+ For details, see :func:`distro.like`.
967
+ """
968
+ return self.os_release_attr("id_like") or ""
969
+
970
+ def codename(self) -> str:
971
+ """
972
+ Return the codename of the OS distribution.
973
+
974
+ For details, see :func:`distro.codename`.
975
+ """
976
+ try:
977
+ # Handle os_release specially since distros might purposefully set
978
+ # this to empty string to have no codename
979
+ return self._os_release_info["codename"]
980
+ except KeyError:
981
+ return (
982
+ self.lsb_release_attr("codename")
983
+ or self.distro_release_attr("codename")
984
+ or ""
985
+ )
986
+
987
+ def info(self, pretty: bool = False, best: bool = False) -> InfoDict:
988
+ """
989
+ Return certain machine-readable information about the OS
990
+ distribution.
991
+
992
+ For details, see :func:`distro.info`.
993
+ """
994
+ return dict(
995
+ id=self.id(),
996
+ version=self.version(pretty, best),
997
+ version_parts=dict(
998
+ major=self.major_version(best),
999
+ minor=self.minor_version(best),
1000
+ build_number=self.build_number(best),
1001
+ ),
1002
+ like=self.like(),
1003
+ codename=self.codename(),
1004
+ )
1005
+
1006
+ def os_release_info(self) -> Dict[str, str]:
1007
+ """
1008
+ Return a dictionary containing key-value pairs for the information
1009
+ items from the os-release file data source of the OS distribution.
1010
+
1011
+ For details, see :func:`distro.os_release_info`.
1012
+ """
1013
+ return self._os_release_info
1014
+
1015
+ def lsb_release_info(self) -> Dict[str, str]:
1016
+ """
1017
+ Return a dictionary containing key-value pairs for the information
1018
+ items from the lsb_release command data source of the OS
1019
+ distribution.
1020
+
1021
+ For details, see :func:`distro.lsb_release_info`.
1022
+ """
1023
+ return self._lsb_release_info
1024
+
1025
+ def distro_release_info(self) -> Dict[str, str]:
1026
+ """
1027
+ Return a dictionary containing key-value pairs for the information
1028
+ items from the distro release file data source of the OS
1029
+ distribution.
1030
+
1031
+ For details, see :func:`distro.distro_release_info`.
1032
+ """
1033
+ return self._distro_release_info
1034
+
1035
+ def uname_info(self) -> Dict[str, str]:
1036
+ """
1037
+ Return a dictionary containing key-value pairs for the information
1038
+ items from the uname command data source of the OS distribution.
1039
+
1040
+ For details, see :func:`distro.uname_info`.
1041
+ """
1042
+ return self._uname_info
1043
+
1044
+ def oslevel_info(self) -> str:
1045
+ """
1046
+ Return AIX' oslevel command output.
1047
+ """
1048
+ return self._oslevel_info
1049
+
1050
+ def os_release_attr(self, attribute: str) -> str:
1051
+ """
1052
+ Return a single named information item from the os-release file data
1053
+ source of the OS distribution.
1054
+
1055
+ For details, see :func:`distro.os_release_attr`.
1056
+ """
1057
+ return self._os_release_info.get(attribute, "")
1058
+
1059
+ def lsb_release_attr(self, attribute: str) -> str:
1060
+ """
1061
+ Return a single named information item from the lsb_release command
1062
+ output data source of the OS distribution.
1063
+
1064
+ For details, see :func:`distro.lsb_release_attr`.
1065
+ """
1066
+ return self._lsb_release_info.get(attribute, "")
1067
+
1068
+ def distro_release_attr(self, attribute: str) -> str:
1069
+ """
1070
+ Return a single named information item from the distro release file
1071
+ data source of the OS distribution.
1072
+
1073
+ For details, see :func:`distro.distro_release_attr`.
1074
+ """
1075
+ return self._distro_release_info.get(attribute, "")
1076
+
1077
+ def uname_attr(self, attribute: str) -> str:
1078
+ """
1079
+ Return a single named information item from the uname command
1080
+ output data source of the OS distribution.
1081
+
1082
+ For details, see :func:`distro.uname_attr`.
1083
+ """
1084
+ return self._uname_info.get(attribute, "")
1085
+
1086
+ @cached_property
1087
+ def _os_release_info(self) -> Dict[str, str]:
1088
+ """
1089
+ Get the information items from the specified os-release file.
1090
+
1091
+ Returns:
1092
+ A dictionary containing all information items.
1093
+ """
1094
+ if os.path.isfile(self.os_release_file):
1095
+ with open(self.os_release_file, encoding="utf-8") as release_file:
1096
+ return self._parse_os_release_content(release_file)
1097
+ return {}
1098
+
1099
+ @staticmethod
1100
+ def _parse_os_release_content(lines: TextIO) -> Dict[str, str]:
1101
+ """
1102
+ Parse the lines of an os-release file.
1103
+
1104
+ Parameters:
1105
+
1106
+ * lines: Iterable through the lines in the os-release file.
1107
+ Each line must be a unicode string or a UTF-8 encoded byte
1108
+ string.
1109
+
1110
+ Returns:
1111
+ A dictionary containing all information items.
1112
+ """
1113
+ props = {}
1114
+ lexer = shlex.shlex(lines, posix=True)
1115
+ lexer.whitespace_split = True
1116
+
1117
+ tokens = list(lexer)
1118
+ for token in tokens:
1119
+ # At this point, all shell-like parsing has been done (i.e.
1120
+ # comments processed, quotes and backslash escape sequences
1121
+ # processed, multi-line values assembled, trailing newlines
1122
+ # stripped, etc.), so the tokens are now either:
1123
+ # * variable assignments: var=value
1124
+ # * commands or their arguments (not allowed in os-release)
1125
+ # Ignore any tokens that are not variable assignments
1126
+ if "=" in token:
1127
+ k, v = token.split("=", 1)
1128
+ props[k.lower()] = v
1129
+
1130
+ if "version" in props:
1131
+ # extract release codename (if any) from version attribute
1132
+ match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"])
1133
+ if match:
1134
+ release_codename = match.group(1) or match.group(2)
1135
+ props["codename"] = props["release_codename"] = release_codename
1136
+
1137
+ if "version_codename" in props:
1138
+ # os-release added a version_codename field. Use that in
1139
+ # preference to anything else Note that some distros purposefully
1140
+ # do not have code names. They should be setting
1141
+ # version_codename=""
1142
+ props["codename"] = props["version_codename"]
1143
+ elif "ubuntu_codename" in props:
1144
+ # Same as above but a non-standard field name used on older Ubuntus
1145
+ props["codename"] = props["ubuntu_codename"]
1146
+
1147
+ return props
1148
+
1149
+ @cached_property
1150
+ def _lsb_release_info(self) -> Dict[str, str]:
1151
+ """
1152
+ Get the information items from the lsb_release command output.
1153
+
1154
+ Returns:
1155
+ A dictionary containing all information items.
1156
+ """
1157
+ if not self.include_lsb:
1158
+ return {}
1159
+ try:
1160
+ cmd = ("lsb_release", "-a")
1161
+ stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
1162
+ # Command not found or lsb_release returned error
1163
+ except (OSError, subprocess.CalledProcessError):
1164
+ return {}
1165
+ content = self._to_str(stdout).splitlines()
1166
+ return self._parse_lsb_release_content(content)
1167
+
1168
+ @staticmethod
1169
+ def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]:
1170
+ """
1171
+ Parse the output of the lsb_release command.
1172
+
1173
+ Parameters:
1174
+
1175
+ * lines: Iterable through the lines of the lsb_release output.
1176
+ Each line must be a unicode string or a UTF-8 encoded byte
1177
+ string.
1178
+
1179
+ Returns:
1180
+ A dictionary containing all information items.
1181
+ """
1182
+ props = {}
1183
+ for line in lines:
1184
+ kv = line.strip("\n").split(":", 1)
1185
+ if len(kv) != 2:
1186
+ # Ignore lines without colon.
1187
+ continue
1188
+ k, v = kv
1189
+ props.update({k.replace(" ", "_").lower(): v.strip()})
1190
+ return props
1191
+
1192
+ @cached_property
1193
+ def _uname_info(self) -> Dict[str, str]:
1194
+ if not self.include_uname:
1195
+ return {}
1196
+ try:
1197
+ cmd = ("uname", "-rs")
1198
+ stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL)
1199
+ except OSError:
1200
+ return {}
1201
+ content = self._to_str(stdout).splitlines()
1202
+ return self._parse_uname_content(content)
1203
+
1204
+ @cached_property
1205
+ def _oslevel_info(self) -> str:
1206
+ if not self.include_oslevel:
1207
+ return ""
1208
+ try:
1209
+ stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL)
1210
+ except (OSError, subprocess.CalledProcessError):
1211
+ return ""
1212
+ return self._to_str(stdout).strip()
1213
+
1214
+ @cached_property
1215
+ def _debian_version(self) -> str:
1216
+ try:
1217
+ with open(
1218
+ os.path.join(self.etc_dir, "debian_version"), encoding="ascii"
1219
+ ) as fp:
1220
+ return fp.readline().rstrip()
1221
+ except FileNotFoundError:
1222
+ return ""
1223
+
1224
+ @staticmethod
1225
+ def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]:
1226
+ if not lines:
1227
+ return {}
1228
+ props = {}
1229
+ match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
1230
+ if match:
1231
+ name, version = match.groups()
1232
+
1233
+ # This is to prevent the Linux kernel version from
1234
+ # appearing as the 'best' version on otherwise
1235
+ # identifiable distributions.
1236
+ if name == "Linux":
1237
+ return {}
1238
+ props["id"] = name.lower()
1239
+ props["name"] = name
1240
+ props["release"] = version
1241
+ return props
1242
+
1243
+ @staticmethod
1244
+ def _to_str(bytestring: bytes) -> str:
1245
+ encoding = sys.getfilesystemencoding()
1246
+ return bytestring.decode(encoding)
1247
+
1248
+ @cached_property
1249
+ def _distro_release_info(self) -> Dict[str, str]:
1250
+ """
1251
+ Get the information items from the specified distro release file.
1252
+
1253
+ Returns:
1254
+ A dictionary containing all information items.
1255
+ """
1256
+ if self.distro_release_file:
1257
+ # If it was specified, we use it and parse what we can, even if
1258
+ # its file name or content does not match the expected pattern.
1259
+ distro_info = self._parse_distro_release_file(self.distro_release_file)
1260
+ basename = os.path.basename(self.distro_release_file)
1261
+ # The file name pattern for user-specified distro release files
1262
+ # is somewhat more tolerant (compared to when searching for the
1263
+ # file), because we want to use what was specified as best as
1264
+ # possible.
1265
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
1266
+ else:
1267
+ try:
1268
+ basenames = [
1269
+ basename
1270
+ for basename in os.listdir(self.etc_dir)
1271
+ if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES
1272
+ and os.path.isfile(os.path.join(self.etc_dir, basename))
1273
+ ]
1274
+ # We sort for repeatability in cases where there are multiple
1275
+ # distro specific files; e.g. CentOS, Oracle, Enterprise all
1276
+ # containing `redhat-release` on top of their own.
1277
+ basenames.sort()
1278
+ except OSError:
1279
+ # This may occur when /etc is not readable but we can't be
1280
+ # sure about the *-release files. Check common entries of
1281
+ # /etc for information. If they turn out to not be there the
1282
+ # error is handled in `_parse_distro_release_file()`.
1283
+ basenames = _DISTRO_RELEASE_BASENAMES
1284
+ for basename in basenames:
1285
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
1286
+ if match is None:
1287
+ continue
1288
+ filepath = os.path.join(self.etc_dir, basename)
1289
+ distro_info = self._parse_distro_release_file(filepath)
1290
+ # The name is always present if the pattern matches.
1291
+ if "name" not in distro_info:
1292
+ continue
1293
+ self.distro_release_file = filepath
1294
+ break
1295
+ else: # the loop didn't "break": no candidate.
1296
+ return {}
1297
+
1298
+ if match is not None:
1299
+ distro_info["id"] = match.group(1)
1300
+
1301
+ # CloudLinux < 7: manually enrich info with proper id.
1302
+ if "cloudlinux" in distro_info.get("name", "").lower():
1303
+ distro_info["id"] = "cloudlinux"
1304
+
1305
+ return distro_info
1306
+
1307
+ def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]:
1308
+ """
1309
+ Parse a distro release file.
1310
+
1311
+ Parameters:
1312
+
1313
+ * filepath: Path name of the distro release file.
1314
+
1315
+ Returns:
1316
+ A dictionary containing all information items.
1317
+ """
1318
+ try:
1319
+ with open(filepath, encoding="utf-8") as fp:
1320
+ # Only parse the first line. For instance, on SLES there
1321
+ # are multiple lines. We don't want them...
1322
+ return self._parse_distro_release_content(fp.readline())
1323
+ except OSError:
1324
+ # Ignore not being able to read a specific, seemingly version
1325
+ # related file.
1326
+ # See https://github.com/python-distro/distro/issues/162
1327
+ return {}
1328
+
1329
+ @staticmethod
1330
+ def _parse_distro_release_content(line: str) -> Dict[str, str]:
1331
+ """
1332
+ Parse a line from a distro release file.
1333
+
1334
+ Parameters:
1335
+ * line: Line from the distro release file. Must be a unicode string
1336
+ or a UTF-8 encoded byte string.
1337
+
1338
+ Returns:
1339
+ A dictionary containing all information items.
1340
+ """
1341
+ matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
1342
+ distro_info = {}
1343
+ if matches:
1344
+ # regexp ensures non-None
1345
+ distro_info["name"] = matches.group(3)[::-1]
1346
+ if matches.group(2):
1347
+ distro_info["version_id"] = matches.group(2)[::-1]
1348
+ if matches.group(1):
1349
+ distro_info["codename"] = matches.group(1)[::-1]
1350
+ elif line:
1351
+ distro_info["name"] = line.strip()
1352
+ return distro_info
1353
+
1354
+
1355
+ _distro = LinuxDistribution()
1356
+
1357
+
1358
+ def main() -> None:
1359
+ logger = logging.getLogger(__name__)
1360
+ logger.setLevel(logging.DEBUG)
1361
+ logger.addHandler(logging.StreamHandler(sys.stdout))
1362
+
1363
+ parser = argparse.ArgumentParser(description="OS distro info tool")
1364
+ parser.add_argument(
1365
+ "--json", "-j", help="Output in machine readable format", action="store_true"
1366
+ )
1367
+
1368
+ parser.add_argument(
1369
+ "--root-dir",
1370
+ "-r",
1371
+ type=str,
1372
+ dest="root_dir",
1373
+ help="Path to the root filesystem directory (defaults to /)",
1374
+ )
1375
+
1376
+ args = parser.parse_args()
1377
+
1378
+ if args.root_dir:
1379
+ dist = LinuxDistribution(
1380
+ include_lsb=False,
1381
+ include_uname=False,
1382
+ include_oslevel=False,
1383
+ root_dir=args.root_dir,
1384
+ )
1385
+ else:
1386
+ dist = _distro
1387
+
1388
+ if args.json:
1389
+ logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
1390
+ else:
1391
+ logger.info("Name: %s", dist.name(pretty=True))
1392
+ distribution_version = dist.version(pretty=True)
1393
+ logger.info("Version: %s", distribution_version)
1394
+ distribution_codename = dist.codename()
1395
+ logger.info("Codename: %s", distribution_codename)
1396
+
1397
+
1398
+ if __name__ == "__main__":
1399
+ main()
.venv/lib/python3.11/site-packages/pip/_vendor/distro/py.typed ADDED
File without changes
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/core.cpython-311.pyc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5162235fdbcdfb73cb019ca21b906eed9b6a4f6512564a1f7138e4865ab4022
3
+ size 295433
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__init__.py ADDED
@@ -0,0 +1,608 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016-2018 Julien Danjou
2
+ # Copyright 2017 Elisey Zanko
3
+ # Copyright 2016 Étienne Bersac
4
+ # Copyright 2016 Joshua Harlow
5
+ # Copyright 2013-2014 Ray Holder
6
+ #
7
+ # Licensed under the Apache License, Version 2.0 (the "License");
8
+ # you may not use this file except in compliance with the License.
9
+ # You may obtain a copy of the License at
10
+ #
11
+ # http://www.apache.org/licenses/LICENSE-2.0
12
+ #
13
+ # Unless required by applicable law or agreed to in writing, software
14
+ # distributed under the License is distributed on an "AS IS" BASIS,
15
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ # See the License for the specific language governing permissions and
17
+ # limitations under the License.
18
+
19
+
20
+ import functools
21
+ import sys
22
+ import threading
23
+ import time
24
+ import typing as t
25
+ import warnings
26
+ from abc import ABC, abstractmethod
27
+ from concurrent import futures
28
+ from inspect import iscoroutinefunction
29
+
30
+ # Import all built-in retry strategies for easier usage.
31
+ from .retry import retry_base # noqa
32
+ from .retry import retry_all # noqa
33
+ from .retry import retry_always # noqa
34
+ from .retry import retry_any # noqa
35
+ from .retry import retry_if_exception # noqa
36
+ from .retry import retry_if_exception_type # noqa
37
+ from .retry import retry_if_exception_cause_type # noqa
38
+ from .retry import retry_if_not_exception_type # noqa
39
+ from .retry import retry_if_not_result # noqa
40
+ from .retry import retry_if_result # noqa
41
+ from .retry import retry_never # noqa
42
+ from .retry import retry_unless_exception_type # noqa
43
+ from .retry import retry_if_exception_message # noqa
44
+ from .retry import retry_if_not_exception_message # noqa
45
+
46
+ # Import all nap strategies for easier usage.
47
+ from .nap import sleep # noqa
48
+ from .nap import sleep_using_event # noqa
49
+
50
+ # Import all built-in stop strategies for easier usage.
51
+ from .stop import stop_after_attempt # noqa
52
+ from .stop import stop_after_delay # noqa
53
+ from .stop import stop_all # noqa
54
+ from .stop import stop_any # noqa
55
+ from .stop import stop_never # noqa
56
+ from .stop import stop_when_event_set # noqa
57
+
58
+ # Import all built-in wait strategies for easier usage.
59
+ from .wait import wait_chain # noqa
60
+ from .wait import wait_combine # noqa
61
+ from .wait import wait_exponential # noqa
62
+ from .wait import wait_fixed # noqa
63
+ from .wait import wait_incrementing # noqa
64
+ from .wait import wait_none # noqa
65
+ from .wait import wait_random # noqa
66
+ from .wait import wait_random_exponential # noqa
67
+ from .wait import wait_random_exponential as wait_full_jitter # noqa
68
+ from .wait import wait_exponential_jitter # noqa
69
+
70
+ # Import all built-in before strategies for easier usage.
71
+ from .before import before_log # noqa
72
+ from .before import before_nothing # noqa
73
+
74
+ # Import all built-in after strategies for easier usage.
75
+ from .after import after_log # noqa
76
+ from .after import after_nothing # noqa
77
+
78
+ # Import all built-in after strategies for easier usage.
79
+ from .before_sleep import before_sleep_log # noqa
80
+ from .before_sleep import before_sleep_nothing # noqa
81
+
82
+ # Replace a conditional import with a hard-coded None so that pip does
83
+ # not attempt to use tornado even if it is present in the environment.
84
+ # If tornado is non-None, tenacity will attempt to execute some code
85
+ # that is sensitive to the version of tornado, which could break pip
86
+ # if an old version is found.
87
+ tornado = None # type: ignore
88
+
89
+ if t.TYPE_CHECKING:
90
+ import types
91
+
92
+ from .retry import RetryBaseT
93
+ from .stop import StopBaseT
94
+ from .wait import WaitBaseT
95
+
96
+
97
+ WrappedFnReturnT = t.TypeVar("WrappedFnReturnT")
98
+ WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Any])
99
+
100
+
101
+ class TryAgain(Exception):
102
+ """Always retry the executed function when raised."""
103
+
104
+
105
+ NO_RESULT = object()
106
+
107
+
108
+ class DoAttempt:
109
+ pass
110
+
111
+
112
+ class DoSleep(float):
113
+ pass
114
+
115
+
116
+ class BaseAction:
117
+ """Base class for representing actions to take by retry object.
118
+
119
+ Concrete implementations must define:
120
+ - __init__: to initialize all necessary fields
121
+ - REPR_FIELDS: class variable specifying attributes to include in repr(self)
122
+ - NAME: for identification in retry object methods and callbacks
123
+ """
124
+
125
+ REPR_FIELDS: t.Sequence[str] = ()
126
+ NAME: t.Optional[str] = None
127
+
128
+ def __repr__(self) -> str:
129
+ state_str = ", ".join(f"{field}={getattr(self, field)!r}" for field in self.REPR_FIELDS)
130
+ return f"{self.__class__.__name__}({state_str})"
131
+
132
+ def __str__(self) -> str:
133
+ return repr(self)
134
+
135
+
136
+ class RetryAction(BaseAction):
137
+ REPR_FIELDS = ("sleep",)
138
+ NAME = "retry"
139
+
140
+ def __init__(self, sleep: t.SupportsFloat) -> None:
141
+ self.sleep = float(sleep)
142
+
143
+
144
+ _unset = object()
145
+
146
+
147
+ def _first_set(first: t.Union[t.Any, object], second: t.Any) -> t.Any:
148
+ return second if first is _unset else first
149
+
150
+
151
+ class RetryError(Exception):
152
+ """Encapsulates the last attempt instance right before giving up."""
153
+
154
+ def __init__(self, last_attempt: "Future") -> None:
155
+ self.last_attempt = last_attempt
156
+ super().__init__(last_attempt)
157
+
158
+ def reraise(self) -> "t.NoReturn":
159
+ if self.last_attempt.failed:
160
+ raise self.last_attempt.result()
161
+ raise self
162
+
163
+ def __str__(self) -> str:
164
+ return f"{self.__class__.__name__}[{self.last_attempt}]"
165
+
166
+
167
+ class AttemptManager:
168
+ """Manage attempt context."""
169
+
170
+ def __init__(self, retry_state: "RetryCallState"):
171
+ self.retry_state = retry_state
172
+
173
+ def __enter__(self) -> None:
174
+ pass
175
+
176
+ def __exit__(
177
+ self,
178
+ exc_type: t.Optional[t.Type[BaseException]],
179
+ exc_value: t.Optional[BaseException],
180
+ traceback: t.Optional["types.TracebackType"],
181
+ ) -> t.Optional[bool]:
182
+ if exc_type is not None and exc_value is not None:
183
+ self.retry_state.set_exception((exc_type, exc_value, traceback))
184
+ return True # Swallow exception.
185
+ else:
186
+ # We don't have the result, actually.
187
+ self.retry_state.set_result(None)
188
+ return None
189
+
190
+
191
+ class BaseRetrying(ABC):
192
+ def __init__(
193
+ self,
194
+ sleep: t.Callable[[t.Union[int, float]], None] = sleep,
195
+ stop: "StopBaseT" = stop_never,
196
+ wait: "WaitBaseT" = wait_none(),
197
+ retry: "RetryBaseT" = retry_if_exception_type(),
198
+ before: t.Callable[["RetryCallState"], None] = before_nothing,
199
+ after: t.Callable[["RetryCallState"], None] = after_nothing,
200
+ before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
201
+ reraise: bool = False,
202
+ retry_error_cls: t.Type[RetryError] = RetryError,
203
+ retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
204
+ ):
205
+ self.sleep = sleep
206
+ self.stop = stop
207
+ self.wait = wait
208
+ self.retry = retry
209
+ self.before = before
210
+ self.after = after
211
+ self.before_sleep = before_sleep
212
+ self.reraise = reraise
213
+ self._local = threading.local()
214
+ self.retry_error_cls = retry_error_cls
215
+ self.retry_error_callback = retry_error_callback
216
+
217
+ def copy(
218
+ self,
219
+ sleep: t.Union[t.Callable[[t.Union[int, float]], None], object] = _unset,
220
+ stop: t.Union["StopBaseT", object] = _unset,
221
+ wait: t.Union["WaitBaseT", object] = _unset,
222
+ retry: t.Union[retry_base, object] = _unset,
223
+ before: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
224
+ after: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
225
+ before_sleep: t.Union[t.Optional[t.Callable[["RetryCallState"], None]], object] = _unset,
226
+ reraise: t.Union[bool, object] = _unset,
227
+ retry_error_cls: t.Union[t.Type[RetryError], object] = _unset,
228
+ retry_error_callback: t.Union[t.Optional[t.Callable[["RetryCallState"], t.Any]], object] = _unset,
229
+ ) -> "BaseRetrying":
230
+ """Copy this object with some parameters changed if needed."""
231
+ return self.__class__(
232
+ sleep=_first_set(sleep, self.sleep),
233
+ stop=_first_set(stop, self.stop),
234
+ wait=_first_set(wait, self.wait),
235
+ retry=_first_set(retry, self.retry),
236
+ before=_first_set(before, self.before),
237
+ after=_first_set(after, self.after),
238
+ before_sleep=_first_set(before_sleep, self.before_sleep),
239
+ reraise=_first_set(reraise, self.reraise),
240
+ retry_error_cls=_first_set(retry_error_cls, self.retry_error_cls),
241
+ retry_error_callback=_first_set(retry_error_callback, self.retry_error_callback),
242
+ )
243
+
244
+ def __repr__(self) -> str:
245
+ return (
246
+ f"<{self.__class__.__name__} object at 0x{id(self):x} ("
247
+ f"stop={self.stop}, "
248
+ f"wait={self.wait}, "
249
+ f"sleep={self.sleep}, "
250
+ f"retry={self.retry}, "
251
+ f"before={self.before}, "
252
+ f"after={self.after})>"
253
+ )
254
+
255
+ @property
256
+ def statistics(self) -> t.Dict[str, t.Any]:
257
+ """Return a dictionary of runtime statistics.
258
+
259
+ This dictionary will be empty when the controller has never been
260
+ ran. When it is running or has ran previously it should have (but
261
+ may not) have useful and/or informational keys and values when
262
+ running is underway and/or completed.
263
+
264
+ .. warning:: The keys in this dictionary **should** be some what
265
+ stable (not changing), but there existence **may**
266
+ change between major releases as new statistics are
267
+ gathered or removed so before accessing keys ensure that
268
+ they actually exist and handle when they do not.
269
+
270
+ .. note:: The values in this dictionary are local to the thread
271
+ running call (so if multiple threads share the same retrying
272
+ object - either directly or indirectly) they will each have
273
+ there own view of statistics they have collected (in the
274
+ future we may provide a way to aggregate the various
275
+ statistics from each thread).
276
+ """
277
+ try:
278
+ return self._local.statistics # type: ignore[no-any-return]
279
+ except AttributeError:
280
+ self._local.statistics = t.cast(t.Dict[str, t.Any], {})
281
+ return self._local.statistics
282
+
283
+ def wraps(self, f: WrappedFn) -> WrappedFn:
284
+ """Wrap a function for retrying.
285
+
286
+ :param f: A function to wraps for retrying.
287
+ """
288
+
289
+ @functools.wraps(f)
290
+ def wrapped_f(*args: t.Any, **kw: t.Any) -> t.Any:
291
+ return self(f, *args, **kw)
292
+
293
+ def retry_with(*args: t.Any, **kwargs: t.Any) -> WrappedFn:
294
+ return self.copy(*args, **kwargs).wraps(f)
295
+
296
+ wrapped_f.retry = self # type: ignore[attr-defined]
297
+ wrapped_f.retry_with = retry_with # type: ignore[attr-defined]
298
+
299
+ return wrapped_f # type: ignore[return-value]
300
+
301
+ def begin(self) -> None:
302
+ self.statistics.clear()
303
+ self.statistics["start_time"] = time.monotonic()
304
+ self.statistics["attempt_number"] = 1
305
+ self.statistics["idle_for"] = 0
306
+
307
+ def iter(self, retry_state: "RetryCallState") -> t.Union[DoAttempt, DoSleep, t.Any]: # noqa
308
+ fut = retry_state.outcome
309
+ if fut is None:
310
+ if self.before is not None:
311
+ self.before(retry_state)
312
+ return DoAttempt()
313
+
314
+ is_explicit_retry = fut.failed and isinstance(fut.exception(), TryAgain)
315
+ if not (is_explicit_retry or self.retry(retry_state)):
316
+ return fut.result()
317
+
318
+ if self.after is not None:
319
+ self.after(retry_state)
320
+
321
+ self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start
322
+ if self.stop(retry_state):
323
+ if self.retry_error_callback:
324
+ return self.retry_error_callback(retry_state)
325
+ retry_exc = self.retry_error_cls(fut)
326
+ if self.reraise:
327
+ raise retry_exc.reraise()
328
+ raise retry_exc from fut.exception()
329
+
330
+ if self.wait:
331
+ sleep = self.wait(retry_state)
332
+ else:
333
+ sleep = 0.0
334
+ retry_state.next_action = RetryAction(sleep)
335
+ retry_state.idle_for += sleep
336
+ self.statistics["idle_for"] += sleep
337
+ self.statistics["attempt_number"] += 1
338
+
339
+ if self.before_sleep is not None:
340
+ self.before_sleep(retry_state)
341
+
342
+ return DoSleep(sleep)
343
+
344
+ def __iter__(self) -> t.Generator[AttemptManager, None, None]:
345
+ self.begin()
346
+
347
+ retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
348
+ while True:
349
+ do = self.iter(retry_state=retry_state)
350
+ if isinstance(do, DoAttempt):
351
+ yield AttemptManager(retry_state=retry_state)
352
+ elif isinstance(do, DoSleep):
353
+ retry_state.prepare_for_next_attempt()
354
+ self.sleep(do)
355
+ else:
356
+ break
357
+
358
+ @abstractmethod
359
+ def __call__(
360
+ self,
361
+ fn: t.Callable[..., WrappedFnReturnT],
362
+ *args: t.Any,
363
+ **kwargs: t.Any,
364
+ ) -> WrappedFnReturnT:
365
+ pass
366
+
367
+
368
+ class Retrying(BaseRetrying):
369
+ """Retrying controller."""
370
+
371
+ def __call__(
372
+ self,
373
+ fn: t.Callable[..., WrappedFnReturnT],
374
+ *args: t.Any,
375
+ **kwargs: t.Any,
376
+ ) -> WrappedFnReturnT:
377
+ self.begin()
378
+
379
+ retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
380
+ while True:
381
+ do = self.iter(retry_state=retry_state)
382
+ if isinstance(do, DoAttempt):
383
+ try:
384
+ result = fn(*args, **kwargs)
385
+ except BaseException: # noqa: B902
386
+ retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
387
+ else:
388
+ retry_state.set_result(result)
389
+ elif isinstance(do, DoSleep):
390
+ retry_state.prepare_for_next_attempt()
391
+ self.sleep(do)
392
+ else:
393
+ return do # type: ignore[no-any-return]
394
+
395
+
396
+ if sys.version_info[1] >= 9:
397
+ FutureGenericT = futures.Future[t.Any]
398
+ else:
399
+ FutureGenericT = futures.Future
400
+
401
+
402
+ class Future(FutureGenericT):
403
+ """Encapsulates a (future or past) attempted call to a target function."""
404
+
405
+ def __init__(self, attempt_number: int) -> None:
406
+ super().__init__()
407
+ self.attempt_number = attempt_number
408
+
409
+ @property
410
+ def failed(self) -> bool:
411
+ """Return whether a exception is being held in this future."""
412
+ return self.exception() is not None
413
+
414
+ @classmethod
415
+ def construct(cls, attempt_number: int, value: t.Any, has_exception: bool) -> "Future":
416
+ """Construct a new Future object."""
417
+ fut = cls(attempt_number)
418
+ if has_exception:
419
+ fut.set_exception(value)
420
+ else:
421
+ fut.set_result(value)
422
+ return fut
423
+
424
+
425
+ class RetryCallState:
426
+ """State related to a single call wrapped with Retrying."""
427
+
428
+ def __init__(
429
+ self,
430
+ retry_object: BaseRetrying,
431
+ fn: t.Optional[WrappedFn],
432
+ args: t.Any,
433
+ kwargs: t.Any,
434
+ ) -> None:
435
+ #: Retry call start timestamp
436
+ self.start_time = time.monotonic()
437
+ #: Retry manager object
438
+ self.retry_object = retry_object
439
+ #: Function wrapped by this retry call
440
+ self.fn = fn
441
+ #: Arguments of the function wrapped by this retry call
442
+ self.args = args
443
+ #: Keyword arguments of the function wrapped by this retry call
444
+ self.kwargs = kwargs
445
+
446
+ #: The number of the current attempt
447
+ self.attempt_number: int = 1
448
+ #: Last outcome (result or exception) produced by the function
449
+ self.outcome: t.Optional[Future] = None
450
+ #: Timestamp of the last outcome
451
+ self.outcome_timestamp: t.Optional[float] = None
452
+ #: Time spent sleeping in retries
453
+ self.idle_for: float = 0.0
454
+ #: Next action as decided by the retry manager
455
+ self.next_action: t.Optional[RetryAction] = None
456
+
457
+ @property
458
+ def seconds_since_start(self) -> t.Optional[float]:
459
+ if self.outcome_timestamp is None:
460
+ return None
461
+ return self.outcome_timestamp - self.start_time
462
+
463
+ def prepare_for_next_attempt(self) -> None:
464
+ self.outcome = None
465
+ self.outcome_timestamp = None
466
+ self.attempt_number += 1
467
+ self.next_action = None
468
+
469
+ def set_result(self, val: t.Any) -> None:
470
+ ts = time.monotonic()
471
+ fut = Future(self.attempt_number)
472
+ fut.set_result(val)
473
+ self.outcome, self.outcome_timestamp = fut, ts
474
+
475
+ def set_exception(
476
+ self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType| None"]
477
+ ) -> None:
478
+ ts = time.monotonic()
479
+ fut = Future(self.attempt_number)
480
+ fut.set_exception(exc_info[1])
481
+ self.outcome, self.outcome_timestamp = fut, ts
482
+
483
+ def __repr__(self) -> str:
484
+ if self.outcome is None:
485
+ result = "none yet"
486
+ elif self.outcome.failed:
487
+ exception = self.outcome.exception()
488
+ result = f"failed ({exception.__class__.__name__} {exception})"
489
+ else:
490
+ result = f"returned {self.outcome.result()}"
491
+
492
+ slept = float(round(self.idle_for, 2))
493
+ clsname = self.__class__.__name__
494
+ return f"<{clsname} {id(self)}: attempt #{self.attempt_number}; slept for {slept}; last result: {result}>"
495
+
496
+
497
+ @t.overload
498
+ def retry(func: WrappedFn) -> WrappedFn:
499
+ ...
500
+
501
+
502
+ @t.overload
503
+ def retry(
504
+ sleep: t.Callable[[t.Union[int, float]], None] = sleep,
505
+ stop: "StopBaseT" = stop_never,
506
+ wait: "WaitBaseT" = wait_none(),
507
+ retry: "RetryBaseT" = retry_if_exception_type(),
508
+ before: t.Callable[["RetryCallState"], None] = before_nothing,
509
+ after: t.Callable[["RetryCallState"], None] = after_nothing,
510
+ before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
511
+ reraise: bool = False,
512
+ retry_error_cls: t.Type["RetryError"] = RetryError,
513
+ retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
514
+ ) -> t.Callable[[WrappedFn], WrappedFn]:
515
+ ...
516
+
517
+
518
+ def retry(*dargs: t.Any, **dkw: t.Any) -> t.Any:
519
+ """Wrap a function with a new `Retrying` object.
520
+
521
+ :param dargs: positional arguments passed to Retrying object
522
+ :param dkw: keyword arguments passed to the Retrying object
523
+ """
524
+ # support both @retry and @retry() as valid syntax
525
+ if len(dargs) == 1 and callable(dargs[0]):
526
+ return retry()(dargs[0])
527
+ else:
528
+
529
+ def wrap(f: WrappedFn) -> WrappedFn:
530
+ if isinstance(f, retry_base):
531
+ warnings.warn(
532
+ f"Got retry_base instance ({f.__class__.__name__}) as callable argument, "
533
+ f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)"
534
+ )
535
+ r: "BaseRetrying"
536
+ if iscoroutinefunction(f):
537
+ r = AsyncRetrying(*dargs, **dkw)
538
+ elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f):
539
+ r = TornadoRetrying(*dargs, **dkw)
540
+ else:
541
+ r = Retrying(*dargs, **dkw)
542
+
543
+ return r.wraps(f)
544
+
545
+ return wrap
546
+
547
+
548
+ from pip._vendor.tenacity._asyncio import AsyncRetrying # noqa:E402,I100
549
+
550
+ if tornado:
551
+ from pip._vendor.tenacity.tornadoweb import TornadoRetrying
552
+
553
+
554
+ __all__ = [
555
+ "retry_base",
556
+ "retry_all",
557
+ "retry_always",
558
+ "retry_any",
559
+ "retry_if_exception",
560
+ "retry_if_exception_type",
561
+ "retry_if_exception_cause_type",
562
+ "retry_if_not_exception_type",
563
+ "retry_if_not_result",
564
+ "retry_if_result",
565
+ "retry_never",
566
+ "retry_unless_exception_type",
567
+ "retry_if_exception_message",
568
+ "retry_if_not_exception_message",
569
+ "sleep",
570
+ "sleep_using_event",
571
+ "stop_after_attempt",
572
+ "stop_after_delay",
573
+ "stop_all",
574
+ "stop_any",
575
+ "stop_never",
576
+ "stop_when_event_set",
577
+ "wait_chain",
578
+ "wait_combine",
579
+ "wait_exponential",
580
+ "wait_fixed",
581
+ "wait_incrementing",
582
+ "wait_none",
583
+ "wait_random",
584
+ "wait_random_exponential",
585
+ "wait_full_jitter",
586
+ "wait_exponential_jitter",
587
+ "before_log",
588
+ "before_nothing",
589
+ "after_log",
590
+ "after_nothing",
591
+ "before_sleep_log",
592
+ "before_sleep_nothing",
593
+ "retry",
594
+ "WrappedFn",
595
+ "TryAgain",
596
+ "NO_RESULT",
597
+ "DoAttempt",
598
+ "DoSleep",
599
+ "BaseAction",
600
+ "RetryAction",
601
+ "RetryError",
602
+ "AttemptManager",
603
+ "BaseRetrying",
604
+ "Retrying",
605
+ "Future",
606
+ "RetryCallState",
607
+ "AsyncRetrying",
608
+ ]
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (29 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-311.pyc ADDED
Binary file (5.22 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-311.pyc ADDED
Binary file (2.56 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-311.pyc ADDED
Binary file (1.75 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-311.pyc ADDED
Binary file (1.59 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/before_sleep.cpython-311.pyc ADDED
Binary file (2.37 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/nap.cpython-311.pyc ADDED
Binary file (1.56 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/retry.cpython-311.pyc ADDED
Binary file (15.9 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/stop.cpython-311.pyc ADDED
Binary file (6.29 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/tornadoweb.cpython-311.pyc ADDED
Binary file (2.91 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/__pycache__/wait.cpython-311.pyc ADDED
Binary file (13.3 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/_asyncio.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Étienne Bersac
2
+ # Copyright 2016 Julien Danjou
3
+ # Copyright 2016 Joshua Harlow
4
+ # Copyright 2013-2014 Ray Holder
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+
18
+ import functools
19
+ import sys
20
+ import typing as t
21
+ from asyncio import sleep
22
+
23
+ from pip._vendor.tenacity import AttemptManager
24
+ from pip._vendor.tenacity import BaseRetrying
25
+ from pip._vendor.tenacity import DoAttempt
26
+ from pip._vendor.tenacity import DoSleep
27
+ from pip._vendor.tenacity import RetryCallState
28
+
29
+ WrappedFnReturnT = t.TypeVar("WrappedFnReturnT")
30
+ WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable[..., t.Awaitable[t.Any]])
31
+
32
+
33
+ class AsyncRetrying(BaseRetrying):
34
+ sleep: t.Callable[[float], t.Awaitable[t.Any]]
35
+
36
+ def __init__(self, sleep: t.Callable[[float], t.Awaitable[t.Any]] = sleep, **kwargs: t.Any) -> None:
37
+ super().__init__(**kwargs)
38
+ self.sleep = sleep
39
+
40
+ async def __call__( # type: ignore[override]
41
+ self, fn: WrappedFn, *args: t.Any, **kwargs: t.Any
42
+ ) -> WrappedFnReturnT:
43
+ self.begin()
44
+
45
+ retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
46
+ while True:
47
+ do = self.iter(retry_state=retry_state)
48
+ if isinstance(do, DoAttempt):
49
+ try:
50
+ result = await fn(*args, **kwargs)
51
+ except BaseException: # noqa: B902
52
+ retry_state.set_exception(sys.exc_info()) # type: ignore[arg-type]
53
+ else:
54
+ retry_state.set_result(result)
55
+ elif isinstance(do, DoSleep):
56
+ retry_state.prepare_for_next_attempt()
57
+ await self.sleep(do)
58
+ else:
59
+ return do # type: ignore[no-any-return]
60
+
61
+ def __iter__(self) -> t.Generator[AttemptManager, None, None]:
62
+ raise TypeError("AsyncRetrying object is not iterable")
63
+
64
+ def __aiter__(self) -> "AsyncRetrying":
65
+ self.begin()
66
+ self._retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
67
+ return self
68
+
69
+ async def __anext__(self) -> AttemptManager:
70
+ while True:
71
+ do = self.iter(retry_state=self._retry_state)
72
+ if do is None:
73
+ raise StopAsyncIteration
74
+ elif isinstance(do, DoAttempt):
75
+ return AttemptManager(retry_state=self._retry_state)
76
+ elif isinstance(do, DoSleep):
77
+ self._retry_state.prepare_for_next_attempt()
78
+ await self.sleep(do)
79
+ else:
80
+ raise StopAsyncIteration
81
+
82
+ def wraps(self, fn: WrappedFn) -> WrappedFn:
83
+ fn = super().wraps(fn)
84
+ # Ensure wrapper is recognized as a coroutine function.
85
+
86
+ @functools.wraps(fn)
87
+ async def async_wrapped(*args: t.Any, **kwargs: t.Any) -> t.Any:
88
+ return await fn(*args, **kwargs)
89
+
90
+ # Preserve attributes
91
+ async_wrapped.retry = fn.retry # type: ignore[attr-defined]
92
+ async_wrapped.retry_with = fn.retry_with # type: ignore[attr-defined]
93
+
94
+ return async_wrapped # type: ignore[return-value]
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/_utils.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Julien Danjou
2
+ # Copyright 2016 Joshua Harlow
3
+ # Copyright 2013-2014 Ray Holder
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ import sys
18
+ import typing
19
+ from datetime import timedelta
20
+
21
+
22
+ # sys.maxsize:
23
+ # An integer giving the maximum value a variable of type Py_ssize_t can take.
24
+ MAX_WAIT = sys.maxsize / 2
25
+
26
+
27
+ def find_ordinal(pos_num: int) -> str:
28
+ # See: https://en.wikipedia.org/wiki/English_numerals#Ordinal_numbers
29
+ if pos_num == 0:
30
+ return "th"
31
+ elif pos_num == 1:
32
+ return "st"
33
+ elif pos_num == 2:
34
+ return "nd"
35
+ elif pos_num == 3:
36
+ return "rd"
37
+ elif 4 <= pos_num <= 20:
38
+ return "th"
39
+ else:
40
+ return find_ordinal(pos_num % 10)
41
+
42
+
43
+ def to_ordinal(pos_num: int) -> str:
44
+ return f"{pos_num}{find_ordinal(pos_num)}"
45
+
46
+
47
+ def get_callback_name(cb: typing.Callable[..., typing.Any]) -> str:
48
+ """Get a callback fully-qualified name.
49
+
50
+ If no name can be produced ``repr(cb)`` is called and returned.
51
+ """
52
+ segments = []
53
+ try:
54
+ segments.append(cb.__qualname__)
55
+ except AttributeError:
56
+ try:
57
+ segments.append(cb.__name__)
58
+ except AttributeError:
59
+ pass
60
+ if not segments:
61
+ return repr(cb)
62
+ else:
63
+ try:
64
+ # When running under sphinx it appears this can be none?
65
+ if cb.__module__:
66
+ segments.insert(0, cb.__module__)
67
+ except AttributeError:
68
+ pass
69
+ return ".".join(segments)
70
+
71
+
72
+ time_unit_type = typing.Union[int, float, timedelta]
73
+
74
+
75
+ def to_seconds(time_unit: time_unit_type) -> float:
76
+ return float(time_unit.total_seconds() if isinstance(time_unit, timedelta) else time_unit)
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/after.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Julien Danjou
2
+ # Copyright 2016 Joshua Harlow
3
+ # Copyright 2013-2014 Ray Holder
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ import typing
18
+
19
+ from pip._vendor.tenacity import _utils
20
+
21
+ if typing.TYPE_CHECKING:
22
+ import logging
23
+
24
+ from pip._vendor.tenacity import RetryCallState
25
+
26
+
27
+ def after_nothing(retry_state: "RetryCallState") -> None:
28
+ """After call strategy that does nothing."""
29
+
30
+
31
+ def after_log(
32
+ logger: "logging.Logger",
33
+ log_level: int,
34
+ sec_format: str = "%0.3f",
35
+ ) -> typing.Callable[["RetryCallState"], None]:
36
+ """After call strategy that logs to some logger the finished attempt."""
37
+
38
+ def log_it(retry_state: "RetryCallState") -> None:
39
+ if retry_state.fn is None:
40
+ # NOTE(sileht): can't really happen, but we must please mypy
41
+ fn_name = "<unknown>"
42
+ else:
43
+ fn_name = _utils.get_callback_name(retry_state.fn)
44
+ logger.log(
45
+ log_level,
46
+ f"Finished call to '{fn_name}' "
47
+ f"after {sec_format % retry_state.seconds_since_start}(s), "
48
+ f"this was the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.",
49
+ )
50
+
51
+ return log_it
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/before.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Julien Danjou
2
+ # Copyright 2016 Joshua Harlow
3
+ # Copyright 2013-2014 Ray Holder
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ import typing
18
+
19
+ from pip._vendor.tenacity import _utils
20
+
21
+ if typing.TYPE_CHECKING:
22
+ import logging
23
+
24
+ from pip._vendor.tenacity import RetryCallState
25
+
26
+
27
+ def before_nothing(retry_state: "RetryCallState") -> None:
28
+ """Before call strategy that does nothing."""
29
+
30
+
31
+ def before_log(logger: "logging.Logger", log_level: int) -> typing.Callable[["RetryCallState"], None]:
32
+ """Before call strategy that logs to some logger the attempt."""
33
+
34
+ def log_it(retry_state: "RetryCallState") -> None:
35
+ if retry_state.fn is None:
36
+ # NOTE(sileht): can't really happen, but we must please mypy
37
+ fn_name = "<unknown>"
38
+ else:
39
+ fn_name = _utils.get_callback_name(retry_state.fn)
40
+ logger.log(
41
+ log_level,
42
+ f"Starting call to '{fn_name}', "
43
+ f"this is the {_utils.to_ordinal(retry_state.attempt_number)} time calling it.",
44
+ )
45
+
46
+ return log_it
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/before_sleep.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Julien Danjou
2
+ # Copyright 2016 Joshua Harlow
3
+ # Copyright 2013-2014 Ray Holder
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ import typing
18
+
19
+ from pip._vendor.tenacity import _utils
20
+
21
+ if typing.TYPE_CHECKING:
22
+ import logging
23
+
24
+ from pip._vendor.tenacity import RetryCallState
25
+
26
+
27
+ def before_sleep_nothing(retry_state: "RetryCallState") -> None:
28
+ """Before call strategy that does nothing."""
29
+
30
+
31
+ def before_sleep_log(
32
+ logger: "logging.Logger",
33
+ log_level: int,
34
+ exc_info: bool = False,
35
+ ) -> typing.Callable[["RetryCallState"], None]:
36
+ """Before call strategy that logs to some logger the attempt."""
37
+
38
+ def log_it(retry_state: "RetryCallState") -> None:
39
+ local_exc_info: BaseException | bool | None
40
+
41
+ if retry_state.outcome is None:
42
+ raise RuntimeError("log_it() called before outcome was set")
43
+
44
+ if retry_state.next_action is None:
45
+ raise RuntimeError("log_it() called before next_action was set")
46
+
47
+ if retry_state.outcome.failed:
48
+ ex = retry_state.outcome.exception()
49
+ verb, value = "raised", f"{ex.__class__.__name__}: {ex}"
50
+
51
+ if exc_info:
52
+ local_exc_info = retry_state.outcome.exception()
53
+ else:
54
+ local_exc_info = False
55
+ else:
56
+ verb, value = "returned", retry_state.outcome.result()
57
+ local_exc_info = False # exc_info does not apply when no exception
58
+
59
+ if retry_state.fn is None:
60
+ # NOTE(sileht): can't really happen, but we must please mypy
61
+ fn_name = "<unknown>"
62
+ else:
63
+ fn_name = _utils.get_callback_name(retry_state.fn)
64
+
65
+ logger.log(
66
+ log_level,
67
+ f"Retrying {fn_name} " f"in {retry_state.next_action.sleep} seconds as it {verb} {value}.",
68
+ exc_info=local_exc_info,
69
+ )
70
+
71
+ return log_it
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/nap.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016 Étienne Bersac
2
+ # Copyright 2016 Julien Danjou
3
+ # Copyright 2016 Joshua Harlow
4
+ # Copyright 2013-2014 Ray Holder
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+
18
+ import time
19
+ import typing
20
+
21
+ if typing.TYPE_CHECKING:
22
+ import threading
23
+
24
+
25
+ def sleep(seconds: float) -> None:
26
+ """
27
+ Sleep strategy that delays execution for a given number of seconds.
28
+
29
+ This is the default strategy, and may be mocked out for unit testing.
30
+ """
31
+ time.sleep(seconds)
32
+
33
+
34
+ class sleep_using_event:
35
+ """Sleep strategy that waits on an event to be set."""
36
+
37
+ def __init__(self, event: "threading.Event") -> None:
38
+ self.event = event
39
+
40
+ def __call__(self, timeout: typing.Optional[float]) -> None:
41
+ # NOTE(harlowja): this may *not* actually wait for timeout
42
+ # seconds if the event is set (ie this may eject out early).
43
+ self.event.wait(timeout=timeout)
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/py.typed ADDED
File without changes
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/retry.py ADDED
@@ -0,0 +1,272 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016–2021 Julien Danjou
2
+ # Copyright 2016 Joshua Harlow
3
+ # Copyright 2013-2014 Ray Holder
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ import abc
18
+ import re
19
+ import typing
20
+
21
+ if typing.TYPE_CHECKING:
22
+ from pip._vendor.tenacity import RetryCallState
23
+
24
+
25
+ class retry_base(abc.ABC):
26
+ """Abstract base class for retry strategies."""
27
+
28
+ @abc.abstractmethod
29
+ def __call__(self, retry_state: "RetryCallState") -> bool:
30
+ pass
31
+
32
+ def __and__(self, other: "retry_base") -> "retry_all":
33
+ return retry_all(self, other)
34
+
35
+ def __or__(self, other: "retry_base") -> "retry_any":
36
+ return retry_any(self, other)
37
+
38
+
39
+ RetryBaseT = typing.Union[retry_base, typing.Callable[["RetryCallState"], bool]]
40
+
41
+
42
+ class _retry_never(retry_base):
43
+ """Retry strategy that never rejects any result."""
44
+
45
+ def __call__(self, retry_state: "RetryCallState") -> bool:
46
+ return False
47
+
48
+
49
+ retry_never = _retry_never()
50
+
51
+
52
+ class _retry_always(retry_base):
53
+ """Retry strategy that always rejects any result."""
54
+
55
+ def __call__(self, retry_state: "RetryCallState") -> bool:
56
+ return True
57
+
58
+
59
+ retry_always = _retry_always()
60
+
61
+
62
+ class retry_if_exception(retry_base):
63
+ """Retry strategy that retries if an exception verifies a predicate."""
64
+
65
+ def __init__(self, predicate: typing.Callable[[BaseException], bool]) -> None:
66
+ self.predicate = predicate
67
+
68
+ def __call__(self, retry_state: "RetryCallState") -> bool:
69
+ if retry_state.outcome is None:
70
+ raise RuntimeError("__call__() called before outcome was set")
71
+
72
+ if retry_state.outcome.failed:
73
+ exception = retry_state.outcome.exception()
74
+ if exception is None:
75
+ raise RuntimeError("outcome failed but the exception is None")
76
+ return self.predicate(exception)
77
+ else:
78
+ return False
79
+
80
+
81
+ class retry_if_exception_type(retry_if_exception):
82
+ """Retries if an exception has been raised of one or more types."""
83
+
84
+ def __init__(
85
+ self,
86
+ exception_types: typing.Union[
87
+ typing.Type[BaseException],
88
+ typing.Tuple[typing.Type[BaseException], ...],
89
+ ] = Exception,
90
+ ) -> None:
91
+ self.exception_types = exception_types
92
+ super().__init__(lambda e: isinstance(e, exception_types))
93
+
94
+
95
+ class retry_if_not_exception_type(retry_if_exception):
96
+ """Retries except an exception has been raised of one or more types."""
97
+
98
+ def __init__(
99
+ self,
100
+ exception_types: typing.Union[
101
+ typing.Type[BaseException],
102
+ typing.Tuple[typing.Type[BaseException], ...],
103
+ ] = Exception,
104
+ ) -> None:
105
+ self.exception_types = exception_types
106
+ super().__init__(lambda e: not isinstance(e, exception_types))
107
+
108
+
109
+ class retry_unless_exception_type(retry_if_exception):
110
+ """Retries until an exception is raised of one or more types."""
111
+
112
+ def __init__(
113
+ self,
114
+ exception_types: typing.Union[
115
+ typing.Type[BaseException],
116
+ typing.Tuple[typing.Type[BaseException], ...],
117
+ ] = Exception,
118
+ ) -> None:
119
+ self.exception_types = exception_types
120
+ super().__init__(lambda e: not isinstance(e, exception_types))
121
+
122
+ def __call__(self, retry_state: "RetryCallState") -> bool:
123
+ if retry_state.outcome is None:
124
+ raise RuntimeError("__call__() called before outcome was set")
125
+
126
+ # always retry if no exception was raised
127
+ if not retry_state.outcome.failed:
128
+ return True
129
+
130
+ exception = retry_state.outcome.exception()
131
+ if exception is None:
132
+ raise RuntimeError("outcome failed but the exception is None")
133
+ return self.predicate(exception)
134
+
135
+
136
+ class retry_if_exception_cause_type(retry_base):
137
+ """Retries if any of the causes of the raised exception is of one or more types.
138
+
139
+ The check on the type of the cause of the exception is done recursively (until finding
140
+ an exception in the chain that has no `__cause__`)
141
+ """
142
+
143
+ def __init__(
144
+ self,
145
+ exception_types: typing.Union[
146
+ typing.Type[BaseException],
147
+ typing.Tuple[typing.Type[BaseException], ...],
148
+ ] = Exception,
149
+ ) -> None:
150
+ self.exception_cause_types = exception_types
151
+
152
+ def __call__(self, retry_state: "RetryCallState") -> bool:
153
+ if retry_state.outcome is None:
154
+ raise RuntimeError("__call__ called before outcome was set")
155
+
156
+ if retry_state.outcome.failed:
157
+ exc = retry_state.outcome.exception()
158
+ while exc is not None:
159
+ if isinstance(exc.__cause__, self.exception_cause_types):
160
+ return True
161
+ exc = exc.__cause__
162
+
163
+ return False
164
+
165
+
166
+ class retry_if_result(retry_base):
167
+ """Retries if the result verifies a predicate."""
168
+
169
+ def __init__(self, predicate: typing.Callable[[typing.Any], bool]) -> None:
170
+ self.predicate = predicate
171
+
172
+ def __call__(self, retry_state: "RetryCallState") -> bool:
173
+ if retry_state.outcome is None:
174
+ raise RuntimeError("__call__() called before outcome was set")
175
+
176
+ if not retry_state.outcome.failed:
177
+ return self.predicate(retry_state.outcome.result())
178
+ else:
179
+ return False
180
+
181
+
182
+ class retry_if_not_result(retry_base):
183
+ """Retries if the result refutes a predicate."""
184
+
185
+ def __init__(self, predicate: typing.Callable[[typing.Any], bool]) -> None:
186
+ self.predicate = predicate
187
+
188
+ def __call__(self, retry_state: "RetryCallState") -> bool:
189
+ if retry_state.outcome is None:
190
+ raise RuntimeError("__call__() called before outcome was set")
191
+
192
+ if not retry_state.outcome.failed:
193
+ return not self.predicate(retry_state.outcome.result())
194
+ else:
195
+ return False
196
+
197
+
198
+ class retry_if_exception_message(retry_if_exception):
199
+ """Retries if an exception message equals or matches."""
200
+
201
+ def __init__(
202
+ self,
203
+ message: typing.Optional[str] = None,
204
+ match: typing.Optional[str] = None,
205
+ ) -> None:
206
+ if message and match:
207
+ raise TypeError(f"{self.__class__.__name__}() takes either 'message' or 'match', not both")
208
+
209
+ # set predicate
210
+ if message:
211
+
212
+ def message_fnc(exception: BaseException) -> bool:
213
+ return message == str(exception)
214
+
215
+ predicate = message_fnc
216
+ elif match:
217
+ prog = re.compile(match)
218
+
219
+ def match_fnc(exception: BaseException) -> bool:
220
+ return bool(prog.match(str(exception)))
221
+
222
+ predicate = match_fnc
223
+ else:
224
+ raise TypeError(f"{self.__class__.__name__}() missing 1 required argument 'message' or 'match'")
225
+
226
+ super().__init__(predicate)
227
+
228
+
229
+ class retry_if_not_exception_message(retry_if_exception_message):
230
+ """Retries until an exception message equals or matches."""
231
+
232
+ def __init__(
233
+ self,
234
+ message: typing.Optional[str] = None,
235
+ match: typing.Optional[str] = None,
236
+ ) -> None:
237
+ super().__init__(message, match)
238
+ # invert predicate
239
+ if_predicate = self.predicate
240
+ self.predicate = lambda *args_, **kwargs_: not if_predicate(*args_, **kwargs_)
241
+
242
+ def __call__(self, retry_state: "RetryCallState") -> bool:
243
+ if retry_state.outcome is None:
244
+ raise RuntimeError("__call__() called before outcome was set")
245
+
246
+ if not retry_state.outcome.failed:
247
+ return True
248
+
249
+ exception = retry_state.outcome.exception()
250
+ if exception is None:
251
+ raise RuntimeError("outcome failed but the exception is None")
252
+ return self.predicate(exception)
253
+
254
+
255
+ class retry_any(retry_base):
256
+ """Retries if any of the retries condition is valid."""
257
+
258
+ def __init__(self, *retries: retry_base) -> None:
259
+ self.retries = retries
260
+
261
+ def __call__(self, retry_state: "RetryCallState") -> bool:
262
+ return any(r(retry_state) for r in self.retries)
263
+
264
+
265
+ class retry_all(retry_base):
266
+ """Retries if all the retries condition are valid."""
267
+
268
+ def __init__(self, *retries: retry_base) -> None:
269
+ self.retries = retries
270
+
271
+ def __call__(self, retry_state: "RetryCallState") -> bool:
272
+ return all(r(retry_state) for r in self.retries)
.venv/lib/python3.11/site-packages/pip/_vendor/tenacity/stop.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2016–2021 Julien Danjou
2
+ # Copyright 2016 Joshua Harlow
3
+ # Copyright 2013-2014 Ray Holder
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+ import abc
17
+ import typing
18
+
19
+ from pip._vendor.tenacity import _utils
20
+
21
+ if typing.TYPE_CHECKING:
22
+ import threading
23
+
24
+ from pip._vendor.tenacity import RetryCallState
25
+
26
+
27
+ class stop_base(abc.ABC):
28
+ """Abstract base class for stop strategies."""
29
+
30
+ @abc.abstractmethod
31
+ def __call__(self, retry_state: "RetryCallState") -> bool:
32
+ pass
33
+
34
+ def __and__(self, other: "stop_base") -> "stop_all":
35
+ return stop_all(self, other)
36
+
37
+ def __or__(self, other: "stop_base") -> "stop_any":
38
+ return stop_any(self, other)
39
+
40
+
41
+ StopBaseT = typing.Union[stop_base, typing.Callable[["RetryCallState"], bool]]
42
+
43
+
44
+ class stop_any(stop_base):
45
+ """Stop if any of the stop condition is valid."""
46
+
47
+ def __init__(self, *stops: stop_base) -> None:
48
+ self.stops = stops
49
+
50
+ def __call__(self, retry_state: "RetryCallState") -> bool:
51
+ return any(x(retry_state) for x in self.stops)
52
+
53
+
54
+ class stop_all(stop_base):
55
+ """Stop if all the stop conditions are valid."""
56
+
57
+ def __init__(self, *stops: stop_base) -> None:
58
+ self.stops = stops
59
+
60
+ def __call__(self, retry_state: "RetryCallState") -> bool:
61
+ return all(x(retry_state) for x in self.stops)
62
+
63
+
64
+ class _stop_never(stop_base):
65
+ """Never stop."""
66
+
67
+ def __call__(self, retry_state: "RetryCallState") -> bool:
68
+ return False
69
+
70
+
71
+ stop_never = _stop_never()
72
+
73
+
74
+ class stop_when_event_set(stop_base):
75
+ """Stop when the given event is set."""
76
+
77
+ def __init__(self, event: "threading.Event") -> None:
78
+ self.event = event
79
+
80
+ def __call__(self, retry_state: "RetryCallState") -> bool:
81
+ return self.event.is_set()
82
+
83
+
84
+ class stop_after_attempt(stop_base):
85
+ """Stop when the previous attempt >= max_attempt."""
86
+
87
+ def __init__(self, max_attempt_number: int) -> None:
88
+ self.max_attempt_number = max_attempt_number
89
+
90
+ def __call__(self, retry_state: "RetryCallState") -> bool:
91
+ return retry_state.attempt_number >= self.max_attempt_number
92
+
93
+
94
+ class stop_after_delay(stop_base):
95
+ """Stop when the time from the first attempt >= limit."""
96
+
97
+ def __init__(self, max_delay: _utils.time_unit_type) -> None:
98
+ self.max_delay = _utils.to_seconds(max_delay)
99
+
100
+ def __call__(self, retry_state: "RetryCallState") -> bool:
101
+ if retry_state.seconds_since_start is None:
102
+ raise RuntimeError("__call__() called but seconds_since_start is not set")
103
+ return retry_state.seconds_since_start >= self.max_delay