koichi12 commited on
Commit
6ed5a9a
·
verified ·
1 Parent(s): 1adfd7e

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py +6 -0
  2. .venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc +0 -0
  3. .venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc +0 -0
  4. .venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc +0 -0
  5. .venv/lib/python3.11/site-packages/pip/_vendor/__init__.py +121 -0
  6. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/compat.py +1138 -0
  7. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/database.py +1359 -0
  8. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/index.py +508 -0
  9. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/manifest.py +384 -0
  10. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/markers.py +167 -0
  11. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/metadata.py +1068 -0
  12. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/resources.py +358 -0
  13. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/scripts.py +452 -0
  14. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/t32.exe +0 -0
  15. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/version.py +751 -0
  16. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/w32.exe +0 -0
  17. .venv/lib/python3.11/site-packages/pip/_vendor/distlib/wheel.py +1099 -0
  18. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__about__.py +26 -0
  19. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__init__.py +25 -0
  20. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc +0 -0
  21. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc +0 -0
  22. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc +0 -0
  23. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc +0 -0
  24. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc +0 -0
  25. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc +0 -0
  26. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc +0 -0
  27. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc +0 -0
  28. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc +0 -0
  29. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc +0 -0
  30. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc +0 -0
  31. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/_manylinux.py +301 -0
  32. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/_musllinux.py +136 -0
  33. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/_structures.py +61 -0
  34. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/markers.py +304 -0
  35. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/py.typed +0 -0
  36. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/requirements.py +146 -0
  37. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/specifiers.py +802 -0
  38. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/tags.py +487 -0
  39. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/utils.py +136 -0
  40. .venv/lib/python3.11/site-packages/pip/_vendor/packaging/version.py +504 -0
  41. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__init__.py +322 -0
  42. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc +0 -0
  43. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc +0 -0
  44. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc +0 -0
  45. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc +0 -0
  46. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc +0 -0
  47. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc +0 -0
  48. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc +0 -0
  49. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc +0 -0
  50. .venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc +0 -0
.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from ._dists import Distribution
2
+ from ._envs import Environment
3
+
4
+ __all__ = ["NAME", "Distribution", "Environment"]
5
+
6
+ NAME = "importlib"
.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (388 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_compat.cpython-311.pyc ADDED
Binary file (3.56 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_internal/metadata/importlib/__pycache__/_envs.cpython-311.pyc ADDED
Binary file (12.5 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/__init__.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ pip._vendor is for vendoring dependencies of pip to prevent needing pip to
3
+ depend on something external.
4
+
5
+ Files inside of pip._vendor should be considered immutable and should only be
6
+ updated to versions from upstream.
7
+ """
8
+ from __future__ import absolute_import
9
+
10
+ import glob
11
+ import os.path
12
+ import sys
13
+
14
+ # Downstream redistributors which have debundled our dependencies should also
15
+ # patch this value to be true. This will trigger the additional patching
16
+ # to cause things like "six" to be available as pip.
17
+ DEBUNDLED = False
18
+
19
+ # By default, look in this directory for a bunch of .whl files which we will
20
+ # add to the beginning of sys.path before attempting to import anything. This
21
+ # is done to support downstream re-distributors like Debian and Fedora who
22
+ # wish to create their own Wheels for our dependencies to aid in debundling.
23
+ WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
24
+
25
+
26
+ # Define a small helper function to alias our vendored modules to the real ones
27
+ # if the vendored ones do not exist. This idea of this was taken from
28
+ # https://github.com/kennethreitz/requests/pull/2567.
29
+ def vendored(modulename):
30
+ vendored_name = "{0}.{1}".format(__name__, modulename)
31
+
32
+ try:
33
+ __import__(modulename, globals(), locals(), level=0)
34
+ except ImportError:
35
+ # We can just silently allow import failures to pass here. If we
36
+ # got to this point it means that ``import pip._vendor.whatever``
37
+ # failed and so did ``import whatever``. Since we're importing this
38
+ # upfront in an attempt to alias imports, not erroring here will
39
+ # just mean we get a regular import error whenever pip *actually*
40
+ # tries to import one of these modules to use it, which actually
41
+ # gives us a better error message than we would have otherwise
42
+ # gotten.
43
+ pass
44
+ else:
45
+ sys.modules[vendored_name] = sys.modules[modulename]
46
+ base, head = vendored_name.rsplit(".", 1)
47
+ setattr(sys.modules[base], head, sys.modules[modulename])
48
+
49
+
50
+ # If we're operating in a debundled setup, then we want to go ahead and trigger
51
+ # the aliasing of our vendored libraries as well as looking for wheels to add
52
+ # to our sys.path. This will cause all of this code to be a no-op typically
53
+ # however downstream redistributors can enable it in a consistent way across
54
+ # all platforms.
55
+ if DEBUNDLED:
56
+ # Actually look inside of WHEEL_DIR to find .whl files and add them to the
57
+ # front of our sys.path.
58
+ sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
59
+
60
+ # Actually alias all of our vendored dependencies.
61
+ vendored("cachecontrol")
62
+ vendored("certifi")
63
+ vendored("colorama")
64
+ vendored("distlib")
65
+ vendored("distro")
66
+ vendored("six")
67
+ vendored("six.moves")
68
+ vendored("six.moves.urllib")
69
+ vendored("six.moves.urllib.parse")
70
+ vendored("packaging")
71
+ vendored("packaging.version")
72
+ vendored("packaging.specifiers")
73
+ vendored("pep517")
74
+ vendored("pkg_resources")
75
+ vendored("platformdirs")
76
+ vendored("progress")
77
+ vendored("requests")
78
+ vendored("requests.exceptions")
79
+ vendored("requests.packages")
80
+ vendored("requests.packages.urllib3")
81
+ vendored("requests.packages.urllib3._collections")
82
+ vendored("requests.packages.urllib3.connection")
83
+ vendored("requests.packages.urllib3.connectionpool")
84
+ vendored("requests.packages.urllib3.contrib")
85
+ vendored("requests.packages.urllib3.contrib.ntlmpool")
86
+ vendored("requests.packages.urllib3.contrib.pyopenssl")
87
+ vendored("requests.packages.urllib3.exceptions")
88
+ vendored("requests.packages.urllib3.fields")
89
+ vendored("requests.packages.urllib3.filepost")
90
+ vendored("requests.packages.urllib3.packages")
91
+ vendored("requests.packages.urllib3.packages.ordered_dict")
92
+ vendored("requests.packages.urllib3.packages.six")
93
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname")
94
+ vendored("requests.packages.urllib3.packages.ssl_match_hostname."
95
+ "_implementation")
96
+ vendored("requests.packages.urllib3.poolmanager")
97
+ vendored("requests.packages.urllib3.request")
98
+ vendored("requests.packages.urllib3.response")
99
+ vendored("requests.packages.urllib3.util")
100
+ vendored("requests.packages.urllib3.util.connection")
101
+ vendored("requests.packages.urllib3.util.request")
102
+ vendored("requests.packages.urllib3.util.response")
103
+ vendored("requests.packages.urllib3.util.retry")
104
+ vendored("requests.packages.urllib3.util.ssl_")
105
+ vendored("requests.packages.urllib3.util.timeout")
106
+ vendored("requests.packages.urllib3.util.url")
107
+ vendored("resolvelib")
108
+ vendored("rich")
109
+ vendored("rich.console")
110
+ vendored("rich.highlighter")
111
+ vendored("rich.logging")
112
+ vendored("rich.markup")
113
+ vendored("rich.progress")
114
+ vendored("rich.segment")
115
+ vendored("rich.style")
116
+ vendored("rich.text")
117
+ vendored("rich.traceback")
118
+ vendored("tenacity")
119
+ vendored("tomli")
120
+ vendored("truststore")
121
+ vendored("urllib3")
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/compat.py ADDED
@@ -0,0 +1,1138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2013-2017 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ from __future__ import absolute_import
8
+
9
+ import os
10
+ import re
11
+ import shutil
12
+ import sys
13
+
14
+ try:
15
+ import ssl
16
+ except ImportError: # pragma: no cover
17
+ ssl = None
18
+
19
+ if sys.version_info[0] < 3: # pragma: no cover
20
+ from StringIO import StringIO
21
+ string_types = basestring,
22
+ text_type = unicode
23
+ from types import FileType as file_type
24
+ import __builtin__ as builtins
25
+ import ConfigParser as configparser
26
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit
27
+ from urllib import (urlretrieve, quote as _quote, unquote, url2pathname,
28
+ pathname2url, ContentTooShortError, splittype)
29
+
30
+ def quote(s):
31
+ if isinstance(s, unicode):
32
+ s = s.encode('utf-8')
33
+ return _quote(s)
34
+
35
+ import urllib2
36
+ from urllib2 import (Request, urlopen, URLError, HTTPError,
37
+ HTTPBasicAuthHandler, HTTPPasswordMgr, HTTPHandler,
38
+ HTTPRedirectHandler, build_opener)
39
+ if ssl:
40
+ from urllib2 import HTTPSHandler
41
+ import httplib
42
+ import xmlrpclib
43
+ import Queue as queue
44
+ from HTMLParser import HTMLParser
45
+ import htmlentitydefs
46
+ raw_input = raw_input
47
+ from itertools import ifilter as filter
48
+ from itertools import ifilterfalse as filterfalse
49
+
50
+ # Leaving this around for now, in case it needs resurrecting in some way
51
+ # _userprog = None
52
+ # def splituser(host):
53
+ # """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
54
+ # global _userprog
55
+ # if _userprog is None:
56
+ # import re
57
+ # _userprog = re.compile('^(.*)@(.*)$')
58
+
59
+ # match = _userprog.match(host)
60
+ # if match: return match.group(1, 2)
61
+ # return None, host
62
+
63
+ else: # pragma: no cover
64
+ from io import StringIO
65
+ string_types = str,
66
+ text_type = str
67
+ from io import TextIOWrapper as file_type
68
+ import builtins
69
+ import configparser
70
+ from urllib.parse import (urlparse, urlunparse, urljoin, quote, unquote,
71
+ urlsplit, urlunsplit, splittype)
72
+ from urllib.request import (urlopen, urlretrieve, Request, url2pathname,
73
+ pathname2url, HTTPBasicAuthHandler,
74
+ HTTPPasswordMgr, HTTPHandler,
75
+ HTTPRedirectHandler, build_opener)
76
+ if ssl:
77
+ from urllib.request import HTTPSHandler
78
+ from urllib.error import HTTPError, URLError, ContentTooShortError
79
+ import http.client as httplib
80
+ import urllib.request as urllib2
81
+ import xmlrpc.client as xmlrpclib
82
+ import queue
83
+ from html.parser import HTMLParser
84
+ import html.entities as htmlentitydefs
85
+ raw_input = input
86
+ from itertools import filterfalse
87
+ filter = filter
88
+
89
+ try:
90
+ from ssl import match_hostname, CertificateError
91
+ except ImportError: # pragma: no cover
92
+
93
+ class CertificateError(ValueError):
94
+ pass
95
+
96
+ def _dnsname_match(dn, hostname, max_wildcards=1):
97
+ """Matching according to RFC 6125, section 6.4.3
98
+
99
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
100
+ """
101
+ pats = []
102
+ if not dn:
103
+ return False
104
+
105
+ parts = dn.split('.')
106
+ leftmost, remainder = parts[0], parts[1:]
107
+
108
+ wildcards = leftmost.count('*')
109
+ if wildcards > max_wildcards:
110
+ # Issue #17980: avoid denials of service by refusing more
111
+ # than one wildcard per fragment. A survey of established
112
+ # policy among SSL implementations showed it to be a
113
+ # reasonable choice.
114
+ raise CertificateError(
115
+ "too many wildcards in certificate DNS name: " + repr(dn))
116
+
117
+ # speed up common case w/o wildcards
118
+ if not wildcards:
119
+ return dn.lower() == hostname.lower()
120
+
121
+ # RFC 6125, section 6.4.3, subitem 1.
122
+ # The client SHOULD NOT attempt to match a presented identifier in which
123
+ # the wildcard character comprises a label other than the left-most label.
124
+ if leftmost == '*':
125
+ # When '*' is a fragment by itself, it matches a non-empty dotless
126
+ # fragment.
127
+ pats.append('[^.]+')
128
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
129
+ # RFC 6125, section 6.4.3, subitem 3.
130
+ # The client SHOULD NOT attempt to match a presented identifier
131
+ # where the wildcard character is embedded within an A-label or
132
+ # U-label of an internationalized domain name.
133
+ pats.append(re.escape(leftmost))
134
+ else:
135
+ # Otherwise, '*' matches any dotless string, e.g. www*
136
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
137
+
138
+ # add the remaining fragments, ignore any wildcards
139
+ for frag in remainder:
140
+ pats.append(re.escape(frag))
141
+
142
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
143
+ return pat.match(hostname)
144
+
145
+ def match_hostname(cert, hostname):
146
+ """Verify that *cert* (in decoded format as returned by
147
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
148
+ rules are followed, but IP addresses are not accepted for *hostname*.
149
+
150
+ CertificateError is raised on failure. On success, the function
151
+ returns nothing.
152
+ """
153
+ if not cert:
154
+ raise ValueError("empty or no certificate, match_hostname needs a "
155
+ "SSL socket or SSL context with either "
156
+ "CERT_OPTIONAL or CERT_REQUIRED")
157
+ dnsnames = []
158
+ san = cert.get('subjectAltName', ())
159
+ for key, value in san:
160
+ if key == 'DNS':
161
+ if _dnsname_match(value, hostname):
162
+ return
163
+ dnsnames.append(value)
164
+ if not dnsnames:
165
+ # The subject is only checked when there is no dNSName entry
166
+ # in subjectAltName
167
+ for sub in cert.get('subject', ()):
168
+ for key, value in sub:
169
+ # XXX according to RFC 2818, the most specific Common Name
170
+ # must be used.
171
+ if key == 'commonName':
172
+ if _dnsname_match(value, hostname):
173
+ return
174
+ dnsnames.append(value)
175
+ if len(dnsnames) > 1:
176
+ raise CertificateError("hostname %r "
177
+ "doesn't match either of %s" %
178
+ (hostname, ', '.join(map(repr, dnsnames))))
179
+ elif len(dnsnames) == 1:
180
+ raise CertificateError("hostname %r "
181
+ "doesn't match %r" %
182
+ (hostname, dnsnames[0]))
183
+ else:
184
+ raise CertificateError("no appropriate commonName or "
185
+ "subjectAltName fields were found")
186
+
187
+
188
+ try:
189
+ from types import SimpleNamespace as Container
190
+ except ImportError: # pragma: no cover
191
+
192
+ class Container(object):
193
+ """
194
+ A generic container for when multiple values need to be returned
195
+ """
196
+
197
+ def __init__(self, **kwargs):
198
+ self.__dict__.update(kwargs)
199
+
200
+
201
+ try:
202
+ from shutil import which
203
+ except ImportError: # pragma: no cover
204
+ # Implementation from Python 3.3
205
+ def which(cmd, mode=os.F_OK | os.X_OK, path=None):
206
+ """Given a command, mode, and a PATH string, return the path which
207
+ conforms to the given mode on the PATH, or None if there is no such
208
+ file.
209
+
210
+ `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
211
+ of os.environ.get("PATH"), or can be overridden with a custom search
212
+ path.
213
+
214
+ """
215
+
216
+ # Check that a given file can be accessed with the correct mode.
217
+ # Additionally check that `file` is not a directory, as on Windows
218
+ # directories pass the os.access check.
219
+ def _access_check(fn, mode):
220
+ return (os.path.exists(fn) and os.access(fn, mode)
221
+ and not os.path.isdir(fn))
222
+
223
+ # If we're given a path with a directory part, look it up directly rather
224
+ # than referring to PATH directories. This includes checking relative to the
225
+ # current directory, e.g. ./script
226
+ if os.path.dirname(cmd):
227
+ if _access_check(cmd, mode):
228
+ return cmd
229
+ return None
230
+
231
+ if path is None:
232
+ path = os.environ.get("PATH", os.defpath)
233
+ if not path:
234
+ return None
235
+ path = path.split(os.pathsep)
236
+
237
+ if sys.platform == "win32":
238
+ # The current directory takes precedence on Windows.
239
+ if os.curdir not in path:
240
+ path.insert(0, os.curdir)
241
+
242
+ # PATHEXT is necessary to check on Windows.
243
+ pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
244
+ # See if the given file matches any of the expected path extensions.
245
+ # This will allow us to short circuit when given "python.exe".
246
+ # If it does match, only test that one, otherwise we have to try
247
+ # others.
248
+ if any(cmd.lower().endswith(ext.lower()) for ext in pathext):
249
+ files = [cmd]
250
+ else:
251
+ files = [cmd + ext for ext in pathext]
252
+ else:
253
+ # On other platforms you don't have things like PATHEXT to tell you
254
+ # what file suffixes are executable, so just pass on cmd as-is.
255
+ files = [cmd]
256
+
257
+ seen = set()
258
+ for dir in path:
259
+ normdir = os.path.normcase(dir)
260
+ if normdir not in seen:
261
+ seen.add(normdir)
262
+ for thefile in files:
263
+ name = os.path.join(dir, thefile)
264
+ if _access_check(name, mode):
265
+ return name
266
+ return None
267
+
268
+
269
+ # ZipFile is a context manager in 2.7, but not in 2.6
270
+
271
+ from zipfile import ZipFile as BaseZipFile
272
+
273
+ if hasattr(BaseZipFile, '__enter__'): # pragma: no cover
274
+ ZipFile = BaseZipFile
275
+ else: # pragma: no cover
276
+ from zipfile import ZipExtFile as BaseZipExtFile
277
+
278
+ class ZipExtFile(BaseZipExtFile):
279
+
280
+ def __init__(self, base):
281
+ self.__dict__.update(base.__dict__)
282
+
283
+ def __enter__(self):
284
+ return self
285
+
286
+ def __exit__(self, *exc_info):
287
+ self.close()
288
+ # return None, so if an exception occurred, it will propagate
289
+
290
+ class ZipFile(BaseZipFile):
291
+
292
+ def __enter__(self):
293
+ return self
294
+
295
+ def __exit__(self, *exc_info):
296
+ self.close()
297
+ # return None, so if an exception occurred, it will propagate
298
+
299
+ def open(self, *args, **kwargs):
300
+ base = BaseZipFile.open(self, *args, **kwargs)
301
+ return ZipExtFile(base)
302
+
303
+
304
+ try:
305
+ from platform import python_implementation
306
+ except ImportError: # pragma: no cover
307
+
308
+ def python_implementation():
309
+ """Return a string identifying the Python implementation."""
310
+ if 'PyPy' in sys.version:
311
+ return 'PyPy'
312
+ if os.name == 'java':
313
+ return 'Jython'
314
+ if sys.version.startswith('IronPython'):
315
+ return 'IronPython'
316
+ return 'CPython'
317
+
318
+
319
+ import sysconfig
320
+
321
+ try:
322
+ callable = callable
323
+ except NameError: # pragma: no cover
324
+ from collections.abc import Callable
325
+
326
+ def callable(obj):
327
+ return isinstance(obj, Callable)
328
+
329
+
330
+ try:
331
+ fsencode = os.fsencode
332
+ fsdecode = os.fsdecode
333
+ except AttributeError: # pragma: no cover
334
+ # Issue #99: on some systems (e.g. containerised),
335
+ # sys.getfilesystemencoding() returns None, and we need a real value,
336
+ # so fall back to utf-8. From the CPython 2.7 docs relating to Unix and
337
+ # sys.getfilesystemencoding(): the return value is "the user’s preference
338
+ # according to the result of nl_langinfo(CODESET), or None if the
339
+ # nl_langinfo(CODESET) failed."
340
+ _fsencoding = sys.getfilesystemencoding() or 'utf-8'
341
+ if _fsencoding == 'mbcs':
342
+ _fserrors = 'strict'
343
+ else:
344
+ _fserrors = 'surrogateescape'
345
+
346
+ def fsencode(filename):
347
+ if isinstance(filename, bytes):
348
+ return filename
349
+ elif isinstance(filename, text_type):
350
+ return filename.encode(_fsencoding, _fserrors)
351
+ else:
352
+ raise TypeError("expect bytes or str, not %s" %
353
+ type(filename).__name__)
354
+
355
+ def fsdecode(filename):
356
+ if isinstance(filename, text_type):
357
+ return filename
358
+ elif isinstance(filename, bytes):
359
+ return filename.decode(_fsencoding, _fserrors)
360
+ else:
361
+ raise TypeError("expect bytes or str, not %s" %
362
+ type(filename).__name__)
363
+
364
+
365
+ try:
366
+ from tokenize import detect_encoding
367
+ except ImportError: # pragma: no cover
368
+ from codecs import BOM_UTF8, lookup
369
+
370
+ cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
371
+
372
+ def _get_normal_name(orig_enc):
373
+ """Imitates get_normal_name in tokenizer.c."""
374
+ # Only care about the first 12 characters.
375
+ enc = orig_enc[:12].lower().replace("_", "-")
376
+ if enc == "utf-8" or enc.startswith("utf-8-"):
377
+ return "utf-8"
378
+ if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \
379
+ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")):
380
+ return "iso-8859-1"
381
+ return orig_enc
382
+
383
+ def detect_encoding(readline):
384
+ """
385
+ The detect_encoding() function is used to detect the encoding that should
386
+ be used to decode a Python source file. It requires one argument, readline,
387
+ in the same way as the tokenize() generator.
388
+
389
+ It will call readline a maximum of twice, and return the encoding used
390
+ (as a string) and a list of any lines (left as bytes) it has read in.
391
+
392
+ It detects the encoding from the presence of a utf-8 bom or an encoding
393
+ cookie as specified in pep-0263. If both a bom and a cookie are present,
394
+ but disagree, a SyntaxError will be raised. If the encoding cookie is an
395
+ invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
396
+ 'utf-8-sig' is returned.
397
+
398
+ If no encoding is specified, then the default of 'utf-8' will be returned.
399
+ """
400
+ try:
401
+ filename = readline.__self__.name
402
+ except AttributeError:
403
+ filename = None
404
+ bom_found = False
405
+ encoding = None
406
+ default = 'utf-8'
407
+
408
+ def read_or_stop():
409
+ try:
410
+ return readline()
411
+ except StopIteration:
412
+ return b''
413
+
414
+ def find_cookie(line):
415
+ try:
416
+ # Decode as UTF-8. Either the line is an encoding declaration,
417
+ # in which case it should be pure ASCII, or it must be UTF-8
418
+ # per default encoding.
419
+ line_string = line.decode('utf-8')
420
+ except UnicodeDecodeError:
421
+ msg = "invalid or missing encoding declaration"
422
+ if filename is not None:
423
+ msg = '{} for {!r}'.format(msg, filename)
424
+ raise SyntaxError(msg)
425
+
426
+ matches = cookie_re.findall(line_string)
427
+ if not matches:
428
+ return None
429
+ encoding = _get_normal_name(matches[0])
430
+ try:
431
+ codec = lookup(encoding)
432
+ except LookupError:
433
+ # This behaviour mimics the Python interpreter
434
+ if filename is None:
435
+ msg = "unknown encoding: " + encoding
436
+ else:
437
+ msg = "unknown encoding for {!r}: {}".format(
438
+ filename, encoding)
439
+ raise SyntaxError(msg)
440
+
441
+ if bom_found:
442
+ if codec.name != 'utf-8':
443
+ # This behaviour mimics the Python interpreter
444
+ if filename is None:
445
+ msg = 'encoding problem: utf-8'
446
+ else:
447
+ msg = 'encoding problem for {!r}: utf-8'.format(
448
+ filename)
449
+ raise SyntaxError(msg)
450
+ encoding += '-sig'
451
+ return encoding
452
+
453
+ first = read_or_stop()
454
+ if first.startswith(BOM_UTF8):
455
+ bom_found = True
456
+ first = first[3:]
457
+ default = 'utf-8-sig'
458
+ if not first:
459
+ return default, []
460
+
461
+ encoding = find_cookie(first)
462
+ if encoding:
463
+ return encoding, [first]
464
+
465
+ second = read_or_stop()
466
+ if not second:
467
+ return default, [first]
468
+
469
+ encoding = find_cookie(second)
470
+ if encoding:
471
+ return encoding, [first, second]
472
+
473
+ return default, [first, second]
474
+
475
+
476
+ # For converting & <-> &amp; etc.
477
+ try:
478
+ from html import escape
479
+ except ImportError:
480
+ from cgi import escape
481
+ if sys.version_info[:2] < (3, 4):
482
+ unescape = HTMLParser().unescape
483
+ else:
484
+ from html import unescape
485
+
486
+ try:
487
+ from collections import ChainMap
488
+ except ImportError: # pragma: no cover
489
+ from collections import MutableMapping
490
+
491
+ try:
492
+ from reprlib import recursive_repr as _recursive_repr
493
+ except ImportError:
494
+
495
+ def _recursive_repr(fillvalue='...'):
496
+ '''
497
+ Decorator to make a repr function return fillvalue for a recursive
498
+ call
499
+ '''
500
+
501
+ def decorating_function(user_function):
502
+ repr_running = set()
503
+
504
+ def wrapper(self):
505
+ key = id(self), get_ident()
506
+ if key in repr_running:
507
+ return fillvalue
508
+ repr_running.add(key)
509
+ try:
510
+ result = user_function(self)
511
+ finally:
512
+ repr_running.discard(key)
513
+ return result
514
+
515
+ # Can't use functools.wraps() here because of bootstrap issues
516
+ wrapper.__module__ = getattr(user_function, '__module__')
517
+ wrapper.__doc__ = getattr(user_function, '__doc__')
518
+ wrapper.__name__ = getattr(user_function, '__name__')
519
+ wrapper.__annotations__ = getattr(user_function,
520
+ '__annotations__', {})
521
+ return wrapper
522
+
523
+ return decorating_function
524
+
525
+ class ChainMap(MutableMapping):
526
+ '''
527
+ A ChainMap groups multiple dicts (or other mappings) together
528
+ to create a single, updateable view.
529
+
530
+ The underlying mappings are stored in a list. That list is public and can
531
+ accessed or updated using the *maps* attribute. There is no other state.
532
+
533
+ Lookups search the underlying mappings successively until a key is found.
534
+ In contrast, writes, updates, and deletions only operate on the first
535
+ mapping.
536
+ '''
537
+
538
+ def __init__(self, *maps):
539
+ '''Initialize a ChainMap by setting *maps* to the given mappings.
540
+ If no mappings are provided, a single empty dictionary is used.
541
+
542
+ '''
543
+ self.maps = list(maps) or [{}] # always at least one map
544
+
545
+ def __missing__(self, key):
546
+ raise KeyError(key)
547
+
548
+ def __getitem__(self, key):
549
+ for mapping in self.maps:
550
+ try:
551
+ return mapping[
552
+ key] # can't use 'key in mapping' with defaultdict
553
+ except KeyError:
554
+ pass
555
+ return self.__missing__(
556
+ key) # support subclasses that define __missing__
557
+
558
+ def get(self, key, default=None):
559
+ return self[key] if key in self else default
560
+
561
+ def __len__(self):
562
+ return len(set().union(
563
+ *self.maps)) # reuses stored hash values if possible
564
+
565
+ def __iter__(self):
566
+ return iter(set().union(*self.maps))
567
+
568
+ def __contains__(self, key):
569
+ return any(key in m for m in self.maps)
570
+
571
+ def __bool__(self):
572
+ return any(self.maps)
573
+
574
+ @_recursive_repr()
575
+ def __repr__(self):
576
+ return '{0.__class__.__name__}({1})'.format(
577
+ self, ', '.join(map(repr, self.maps)))
578
+
579
+ @classmethod
580
+ def fromkeys(cls, iterable, *args):
581
+ 'Create a ChainMap with a single dict created from the iterable.'
582
+ return cls(dict.fromkeys(iterable, *args))
583
+
584
+ def copy(self):
585
+ 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
586
+ return self.__class__(self.maps[0].copy(), *self.maps[1:])
587
+
588
+ __copy__ = copy
589
+
590
+ def new_child(self): # like Django's Context.push()
591
+ 'New ChainMap with a new dict followed by all previous maps.'
592
+ return self.__class__({}, *self.maps)
593
+
594
+ @property
595
+ def parents(self): # like Django's Context.pop()
596
+ 'New ChainMap from maps[1:].'
597
+ return self.__class__(*self.maps[1:])
598
+
599
+ def __setitem__(self, key, value):
600
+ self.maps[0][key] = value
601
+
602
+ def __delitem__(self, key):
603
+ try:
604
+ del self.maps[0][key]
605
+ except KeyError:
606
+ raise KeyError(
607
+ 'Key not found in the first mapping: {!r}'.format(key))
608
+
609
+ def popitem(self):
610
+ 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
611
+ try:
612
+ return self.maps[0].popitem()
613
+ except KeyError:
614
+ raise KeyError('No keys found in the first mapping.')
615
+
616
+ def pop(self, key, *args):
617
+ 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
618
+ try:
619
+ return self.maps[0].pop(key, *args)
620
+ except KeyError:
621
+ raise KeyError(
622
+ 'Key not found in the first mapping: {!r}'.format(key))
623
+
624
+ def clear(self):
625
+ 'Clear maps[0], leaving maps[1:] intact.'
626
+ self.maps[0].clear()
627
+
628
+
629
+ try:
630
+ from importlib.util import cache_from_source # Python >= 3.4
631
+ except ImportError: # pragma: no cover
632
+
633
+ def cache_from_source(path, debug_override=None):
634
+ assert path.endswith('.py')
635
+ if debug_override is None:
636
+ debug_override = __debug__
637
+ if debug_override:
638
+ suffix = 'c'
639
+ else:
640
+ suffix = 'o'
641
+ return path + suffix
642
+
643
+
644
+ try:
645
+ from collections import OrderedDict
646
+ except ImportError: # pragma: no cover
647
+ # {{{ http://code.activestate.com/recipes/576693/ (r9)
648
+ # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
649
+ # Passes Python2.7's test suite and incorporates all the latest updates.
650
+ try:
651
+ from thread import get_ident as _get_ident
652
+ except ImportError:
653
+ from dummy_thread import get_ident as _get_ident
654
+
655
+ try:
656
+ from _abcoll import KeysView, ValuesView, ItemsView
657
+ except ImportError:
658
+ pass
659
+
660
+ class OrderedDict(dict):
661
+ 'Dictionary that remembers insertion order'
662
+
663
+ # An inherited dict maps keys to values.
664
+ # The inherited dict provides __getitem__, __len__, __contains__, and get.
665
+ # The remaining methods are order-aware.
666
+ # Big-O running times for all methods are the same as for regular dictionaries.
667
+
668
+ # The internal self.__map dictionary maps keys to links in a doubly linked list.
669
+ # The circular doubly linked list starts and ends with a sentinel element.
670
+ # The sentinel element never gets deleted (this simplifies the algorithm).
671
+ # Each link is stored as a list of length three: [PREV, NEXT, KEY].
672
+
673
+ def __init__(self, *args, **kwds):
674
+ '''Initialize an ordered dictionary. Signature is the same as for
675
+ regular dictionaries, but keyword arguments are not recommended
676
+ because their insertion order is arbitrary.
677
+
678
+ '''
679
+ if len(args) > 1:
680
+ raise TypeError('expected at most 1 arguments, got %d' %
681
+ len(args))
682
+ try:
683
+ self.__root
684
+ except AttributeError:
685
+ self.__root = root = [] # sentinel node
686
+ root[:] = [root, root, None]
687
+ self.__map = {}
688
+ self.__update(*args, **kwds)
689
+
690
+ def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
691
+ 'od.__setitem__(i, y) <==> od[i]=y'
692
+ # Setting a new item creates a new link which goes at the end of the linked
693
+ # list, and the inherited dictionary is updated with the new key/value pair.
694
+ if key not in self:
695
+ root = self.__root
696
+ last = root[0]
697
+ last[1] = root[0] = self.__map[key] = [last, root, key]
698
+ dict_setitem(self, key, value)
699
+
700
+ def __delitem__(self, key, dict_delitem=dict.__delitem__):
701
+ 'od.__delitem__(y) <==> del od[y]'
702
+ # Deleting an existing item uses self.__map to find the link which is
703
+ # then removed by updating the links in the predecessor and successor nodes.
704
+ dict_delitem(self, key)
705
+ link_prev, link_next, key = self.__map.pop(key)
706
+ link_prev[1] = link_next
707
+ link_next[0] = link_prev
708
+
709
+ def __iter__(self):
710
+ 'od.__iter__() <==> iter(od)'
711
+ root = self.__root
712
+ curr = root[1]
713
+ while curr is not root:
714
+ yield curr[2]
715
+ curr = curr[1]
716
+
717
+ def __reversed__(self):
718
+ 'od.__reversed__() <==> reversed(od)'
719
+ root = self.__root
720
+ curr = root[0]
721
+ while curr is not root:
722
+ yield curr[2]
723
+ curr = curr[0]
724
+
725
+ def clear(self):
726
+ 'od.clear() -> None. Remove all items from od.'
727
+ try:
728
+ for node in self.__map.itervalues():
729
+ del node[:]
730
+ root = self.__root
731
+ root[:] = [root, root, None]
732
+ self.__map.clear()
733
+ except AttributeError:
734
+ pass
735
+ dict.clear(self)
736
+
737
+ def popitem(self, last=True):
738
+ '''od.popitem() -> (k, v), return and remove a (key, value) pair.
739
+ Pairs are returned in LIFO order if last is true or FIFO order if false.
740
+
741
+ '''
742
+ if not self:
743
+ raise KeyError('dictionary is empty')
744
+ root = self.__root
745
+ if last:
746
+ link = root[0]
747
+ link_prev = link[0]
748
+ link_prev[1] = root
749
+ root[0] = link_prev
750
+ else:
751
+ link = root[1]
752
+ link_next = link[1]
753
+ root[1] = link_next
754
+ link_next[0] = root
755
+ key = link[2]
756
+ del self.__map[key]
757
+ value = dict.pop(self, key)
758
+ return key, value
759
+
760
+ # -- the following methods do not depend on the internal structure --
761
+
762
+ def keys(self):
763
+ 'od.keys() -> list of keys in od'
764
+ return list(self)
765
+
766
+ def values(self):
767
+ 'od.values() -> list of values in od'
768
+ return [self[key] for key in self]
769
+
770
+ def items(self):
771
+ 'od.items() -> list of (key, value) pairs in od'
772
+ return [(key, self[key]) for key in self]
773
+
774
+ def iterkeys(self):
775
+ 'od.iterkeys() -> an iterator over the keys in od'
776
+ return iter(self)
777
+
778
+ def itervalues(self):
779
+ 'od.itervalues -> an iterator over the values in od'
780
+ for k in self:
781
+ yield self[k]
782
+
783
+ def iteritems(self):
784
+ 'od.iteritems -> an iterator over the (key, value) items in od'
785
+ for k in self:
786
+ yield (k, self[k])
787
+
788
+ def update(*args, **kwds):
789
+ '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
790
+
791
+ If E is a dict instance, does: for k in E: od[k] = E[k]
792
+ If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
793
+ Or if E is an iterable of items, does: for k, v in E: od[k] = v
794
+ In either case, this is followed by: for k, v in F.items(): od[k] = v
795
+
796
+ '''
797
+ if len(args) > 2:
798
+ raise TypeError('update() takes at most 2 positional '
799
+ 'arguments (%d given)' % (len(args), ))
800
+ elif not args:
801
+ raise TypeError('update() takes at least 1 argument (0 given)')
802
+ self = args[0]
803
+ # Make progressively weaker assumptions about "other"
804
+ other = ()
805
+ if len(args) == 2:
806
+ other = args[1]
807
+ if isinstance(other, dict):
808
+ for key in other:
809
+ self[key] = other[key]
810
+ elif hasattr(other, 'keys'):
811
+ for key in other.keys():
812
+ self[key] = other[key]
813
+ else:
814
+ for key, value in other:
815
+ self[key] = value
816
+ for key, value in kwds.items():
817
+ self[key] = value
818
+
819
+ __update = update # let subclasses override update without breaking __init__
820
+
821
+ __marker = object()
822
+
823
+ def pop(self, key, default=__marker):
824
+ '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
825
+ If key is not found, d is returned if given, otherwise KeyError is raised.
826
+
827
+ '''
828
+ if key in self:
829
+ result = self[key]
830
+ del self[key]
831
+ return result
832
+ if default is self.__marker:
833
+ raise KeyError(key)
834
+ return default
835
+
836
+ def setdefault(self, key, default=None):
837
+ 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
838
+ if key in self:
839
+ return self[key]
840
+ self[key] = default
841
+ return default
842
+
843
+ def __repr__(self, _repr_running=None):
844
+ 'od.__repr__() <==> repr(od)'
845
+ if not _repr_running:
846
+ _repr_running = {}
847
+ call_key = id(self), _get_ident()
848
+ if call_key in _repr_running:
849
+ return '...'
850
+ _repr_running[call_key] = 1
851
+ try:
852
+ if not self:
853
+ return '%s()' % (self.__class__.__name__, )
854
+ return '%s(%r)' % (self.__class__.__name__, self.items())
855
+ finally:
856
+ del _repr_running[call_key]
857
+
858
+ def __reduce__(self):
859
+ 'Return state information for pickling'
860
+ items = [[k, self[k]] for k in self]
861
+ inst_dict = vars(self).copy()
862
+ for k in vars(OrderedDict()):
863
+ inst_dict.pop(k, None)
864
+ if inst_dict:
865
+ return (self.__class__, (items, ), inst_dict)
866
+ return self.__class__, (items, )
867
+
868
+ def copy(self):
869
+ 'od.copy() -> a shallow copy of od'
870
+ return self.__class__(self)
871
+
872
+ @classmethod
873
+ def fromkeys(cls, iterable, value=None):
874
+ '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
875
+ and values equal to v (which defaults to None).
876
+
877
+ '''
878
+ d = cls()
879
+ for key in iterable:
880
+ d[key] = value
881
+ return d
882
+
883
+ def __eq__(self, other):
884
+ '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
885
+ while comparison to a regular mapping is order-insensitive.
886
+
887
+ '''
888
+ if isinstance(other, OrderedDict):
889
+ return len(self) == len(
890
+ other) and self.items() == other.items()
891
+ return dict.__eq__(self, other)
892
+
893
+ def __ne__(self, other):
894
+ return not self == other
895
+
896
+ # -- the following methods are only used in Python 2.7 --
897
+
898
+ def viewkeys(self):
899
+ "od.viewkeys() -> a set-like object providing a view on od's keys"
900
+ return KeysView(self)
901
+
902
+ def viewvalues(self):
903
+ "od.viewvalues() -> an object providing a view on od's values"
904
+ return ValuesView(self)
905
+
906
+ def viewitems(self):
907
+ "od.viewitems() -> a set-like object providing a view on od's items"
908
+ return ItemsView(self)
909
+
910
+
911
+ try:
912
+ from logging.config import BaseConfigurator, valid_ident
913
+ except ImportError: # pragma: no cover
914
+ IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I)
915
+
916
+ def valid_ident(s):
917
+ m = IDENTIFIER.match(s)
918
+ if not m:
919
+ raise ValueError('Not a valid Python identifier: %r' % s)
920
+ return True
921
+
922
+ # The ConvertingXXX classes are wrappers around standard Python containers,
923
+ # and they serve to convert any suitable values in the container. The
924
+ # conversion converts base dicts, lists and tuples to their wrapped
925
+ # equivalents, whereas strings which match a conversion format are converted
926
+ # appropriately.
927
+ #
928
+ # Each wrapper should have a configurator attribute holding the actual
929
+ # configurator to use for conversion.
930
+
931
+ class ConvertingDict(dict):
932
+ """A converting dictionary wrapper."""
933
+
934
+ def __getitem__(self, key):
935
+ value = dict.__getitem__(self, key)
936
+ result = self.configurator.convert(value)
937
+ # If the converted value is different, save for next time
938
+ if value is not result:
939
+ self[key] = result
940
+ if type(result) in (ConvertingDict, ConvertingList,
941
+ ConvertingTuple):
942
+ result.parent = self
943
+ result.key = key
944
+ return result
945
+
946
+ def get(self, key, default=None):
947
+ value = dict.get(self, key, default)
948
+ result = self.configurator.convert(value)
949
+ # If the converted value is different, save for next time
950
+ if value is not result:
951
+ self[key] = result
952
+ if type(result) in (ConvertingDict, ConvertingList,
953
+ ConvertingTuple):
954
+ result.parent = self
955
+ result.key = key
956
+ return result
957
+
958
+ def pop(self, key, default=None):
959
+ value = dict.pop(self, key, default)
960
+ result = self.configurator.convert(value)
961
+ if value is not result:
962
+ if type(result) in (ConvertingDict, ConvertingList,
963
+ ConvertingTuple):
964
+ result.parent = self
965
+ result.key = key
966
+ return result
967
+
968
+ class ConvertingList(list):
969
+ """A converting list wrapper."""
970
+
971
+ def __getitem__(self, key):
972
+ value = list.__getitem__(self, key)
973
+ result = self.configurator.convert(value)
974
+ # If the converted value is different, save for next time
975
+ if value is not result:
976
+ self[key] = result
977
+ if type(result) in (ConvertingDict, ConvertingList,
978
+ ConvertingTuple):
979
+ result.parent = self
980
+ result.key = key
981
+ return result
982
+
983
+ def pop(self, idx=-1):
984
+ value = list.pop(self, idx)
985
+ result = self.configurator.convert(value)
986
+ if value is not result:
987
+ if type(result) in (ConvertingDict, ConvertingList,
988
+ ConvertingTuple):
989
+ result.parent = self
990
+ return result
991
+
992
+ class ConvertingTuple(tuple):
993
+ """A converting tuple wrapper."""
994
+
995
+ def __getitem__(self, key):
996
+ value = tuple.__getitem__(self, key)
997
+ result = self.configurator.convert(value)
998
+ if value is not result:
999
+ if type(result) in (ConvertingDict, ConvertingList,
1000
+ ConvertingTuple):
1001
+ result.parent = self
1002
+ result.key = key
1003
+ return result
1004
+
1005
+ class BaseConfigurator(object):
1006
+ """
1007
+ The configurator base class which defines some useful defaults.
1008
+ """
1009
+
1010
+ CONVERT_PATTERN = re.compile(r'^(?P<prefix>[a-z]+)://(?P<suffix>.*)$')
1011
+
1012
+ WORD_PATTERN = re.compile(r'^\s*(\w+)\s*')
1013
+ DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*')
1014
+ INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*')
1015
+ DIGIT_PATTERN = re.compile(r'^\d+$')
1016
+
1017
+ value_converters = {
1018
+ 'ext': 'ext_convert',
1019
+ 'cfg': 'cfg_convert',
1020
+ }
1021
+
1022
+ # We might want to use a different one, e.g. importlib
1023
+ importer = staticmethod(__import__)
1024
+
1025
+ def __init__(self, config):
1026
+ self.config = ConvertingDict(config)
1027
+ self.config.configurator = self
1028
+
1029
+ def resolve(self, s):
1030
+ """
1031
+ Resolve strings to objects using standard import and attribute
1032
+ syntax.
1033
+ """
1034
+ name = s.split('.')
1035
+ used = name.pop(0)
1036
+ try:
1037
+ found = self.importer(used)
1038
+ for frag in name:
1039
+ used += '.' + frag
1040
+ try:
1041
+ found = getattr(found, frag)
1042
+ except AttributeError:
1043
+ self.importer(used)
1044
+ found = getattr(found, frag)
1045
+ return found
1046
+ except ImportError:
1047
+ e, tb = sys.exc_info()[1:]
1048
+ v = ValueError('Cannot resolve %r: %s' % (s, e))
1049
+ v.__cause__, v.__traceback__ = e, tb
1050
+ raise v
1051
+
1052
+ def ext_convert(self, value):
1053
+ """Default converter for the ext:// protocol."""
1054
+ return self.resolve(value)
1055
+
1056
+ def cfg_convert(self, value):
1057
+ """Default converter for the cfg:// protocol."""
1058
+ rest = value
1059
+ m = self.WORD_PATTERN.match(rest)
1060
+ if m is None:
1061
+ raise ValueError("Unable to convert %r" % value)
1062
+ else:
1063
+ rest = rest[m.end():]
1064
+ d = self.config[m.groups()[0]]
1065
+ while rest:
1066
+ m = self.DOT_PATTERN.match(rest)
1067
+ if m:
1068
+ d = d[m.groups()[0]]
1069
+ else:
1070
+ m = self.INDEX_PATTERN.match(rest)
1071
+ if m:
1072
+ idx = m.groups()[0]
1073
+ if not self.DIGIT_PATTERN.match(idx):
1074
+ d = d[idx]
1075
+ else:
1076
+ try:
1077
+ n = int(
1078
+ idx
1079
+ ) # try as number first (most likely)
1080
+ d = d[n]
1081
+ except TypeError:
1082
+ d = d[idx]
1083
+ if m:
1084
+ rest = rest[m.end():]
1085
+ else:
1086
+ raise ValueError('Unable to convert '
1087
+ '%r at %r' % (value, rest))
1088
+ # rest should be empty
1089
+ return d
1090
+
1091
+ def convert(self, value):
1092
+ """
1093
+ Convert values to an appropriate type. dicts, lists and tuples are
1094
+ replaced by their converting alternatives. Strings are checked to
1095
+ see if they have a conversion format and are converted if they do.
1096
+ """
1097
+ if not isinstance(value, ConvertingDict) and isinstance(
1098
+ value, dict):
1099
+ value = ConvertingDict(value)
1100
+ value.configurator = self
1101
+ elif not isinstance(value, ConvertingList) and isinstance(
1102
+ value, list):
1103
+ value = ConvertingList(value)
1104
+ value.configurator = self
1105
+ elif not isinstance(value, ConvertingTuple) and isinstance(value, tuple):
1106
+ value = ConvertingTuple(value)
1107
+ value.configurator = self
1108
+ elif isinstance(value, string_types):
1109
+ m = self.CONVERT_PATTERN.match(value)
1110
+ if m:
1111
+ d = m.groupdict()
1112
+ prefix = d['prefix']
1113
+ converter = self.value_converters.get(prefix, None)
1114
+ if converter:
1115
+ suffix = d['suffix']
1116
+ converter = getattr(self, converter)
1117
+ value = converter(suffix)
1118
+ return value
1119
+
1120
+ def configure_custom(self, config):
1121
+ """Configure an object with a user-supplied factory."""
1122
+ c = config.pop('()')
1123
+ if not callable(c):
1124
+ c = self.resolve(c)
1125
+ props = config.pop('.', None)
1126
+ # Check for valid identifiers
1127
+ kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
1128
+ result = c(**kwargs)
1129
+ if props:
1130
+ for name, value in props.items():
1131
+ setattr(result, name, value)
1132
+ return result
1133
+
1134
+ def as_tuple(self, value):
1135
+ """Utility function which converts lists to tuples."""
1136
+ if isinstance(value, list):
1137
+ value = tuple(value)
1138
+ return value
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/database.py ADDED
@@ -0,0 +1,1359 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012-2023 The Python Software Foundation.
4
+ # See LICENSE.txt and CONTRIBUTORS.txt.
5
+ #
6
+ """PEP 376 implementation."""
7
+
8
+ from __future__ import unicode_literals
9
+
10
+ import base64
11
+ import codecs
12
+ import contextlib
13
+ import hashlib
14
+ import logging
15
+ import os
16
+ import posixpath
17
+ import sys
18
+ import zipimport
19
+
20
+ from . import DistlibException, resources
21
+ from .compat import StringIO
22
+ from .version import get_scheme, UnsupportedVersionError
23
+ from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
24
+ LEGACY_METADATA_FILENAME)
25
+ from .util import (parse_requirement, cached_property, parse_name_and_version,
26
+ read_exports, write_exports, CSVReader, CSVWriter)
27
+
28
+ __all__ = [
29
+ 'Distribution', 'BaseInstalledDistribution', 'InstalledDistribution',
30
+ 'EggInfoDistribution', 'DistributionPath'
31
+ ]
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+ EXPORTS_FILENAME = 'pydist-exports.json'
36
+ COMMANDS_FILENAME = 'pydist-commands.json'
37
+
38
+ DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED',
39
+ 'RESOURCES', EXPORTS_FILENAME, 'SHARED')
40
+
41
+ DISTINFO_EXT = '.dist-info'
42
+
43
+
44
+ class _Cache(object):
45
+ """
46
+ A simple cache mapping names and .dist-info paths to distributions
47
+ """
48
+
49
+ def __init__(self):
50
+ """
51
+ Initialise an instance. There is normally one for each DistributionPath.
52
+ """
53
+ self.name = {}
54
+ self.path = {}
55
+ self.generated = False
56
+
57
+ def clear(self):
58
+ """
59
+ Clear the cache, setting it to its initial state.
60
+ """
61
+ self.name.clear()
62
+ self.path.clear()
63
+ self.generated = False
64
+
65
+ def add(self, dist):
66
+ """
67
+ Add a distribution to the cache.
68
+ :param dist: The distribution to add.
69
+ """
70
+ if dist.path not in self.path:
71
+ self.path[dist.path] = dist
72
+ self.name.setdefault(dist.key, []).append(dist)
73
+
74
+
75
+ class DistributionPath(object):
76
+ """
77
+ Represents a set of distributions installed on a path (typically sys.path).
78
+ """
79
+
80
+ def __init__(self, path=None, include_egg=False):
81
+ """
82
+ Create an instance from a path, optionally including legacy (distutils/
83
+ setuptools/distribute) distributions.
84
+ :param path: The path to use, as a list of directories. If not specified,
85
+ sys.path is used.
86
+ :param include_egg: If True, this instance will look for and return legacy
87
+ distributions as well as those based on PEP 376.
88
+ """
89
+ if path is None:
90
+ path = sys.path
91
+ self.path = path
92
+ self._include_dist = True
93
+ self._include_egg = include_egg
94
+
95
+ self._cache = _Cache()
96
+ self._cache_egg = _Cache()
97
+ self._cache_enabled = True
98
+ self._scheme = get_scheme('default')
99
+
100
+ def _get_cache_enabled(self):
101
+ return self._cache_enabled
102
+
103
+ def _set_cache_enabled(self, value):
104
+ self._cache_enabled = value
105
+
106
+ cache_enabled = property(_get_cache_enabled, _set_cache_enabled)
107
+
108
+ def clear_cache(self):
109
+ """
110
+ Clears the internal cache.
111
+ """
112
+ self._cache.clear()
113
+ self._cache_egg.clear()
114
+
115
+ def _yield_distributions(self):
116
+ """
117
+ Yield .dist-info and/or .egg(-info) distributions.
118
+ """
119
+ # We need to check if we've seen some resources already, because on
120
+ # some Linux systems (e.g. some Debian/Ubuntu variants) there are
121
+ # symlinks which alias other files in the environment.
122
+ seen = set()
123
+ for path in self.path:
124
+ finder = resources.finder_for_path(path)
125
+ if finder is None:
126
+ continue
127
+ r = finder.find('')
128
+ if not r or not r.is_container:
129
+ continue
130
+ rset = sorted(r.resources)
131
+ for entry in rset:
132
+ r = finder.find(entry)
133
+ if not r or r.path in seen:
134
+ continue
135
+ try:
136
+ if self._include_dist and entry.endswith(DISTINFO_EXT):
137
+ possible_filenames = [
138
+ METADATA_FILENAME, WHEEL_METADATA_FILENAME,
139
+ LEGACY_METADATA_FILENAME
140
+ ]
141
+ for metadata_filename in possible_filenames:
142
+ metadata_path = posixpath.join(
143
+ entry, metadata_filename)
144
+ pydist = finder.find(metadata_path)
145
+ if pydist:
146
+ break
147
+ else:
148
+ continue
149
+
150
+ with contextlib.closing(pydist.as_stream()) as stream:
151
+ metadata = Metadata(fileobj=stream,
152
+ scheme='legacy')
153
+ logger.debug('Found %s', r.path)
154
+ seen.add(r.path)
155
+ yield new_dist_class(r.path,
156
+ metadata=metadata,
157
+ env=self)
158
+ elif self._include_egg and entry.endswith(
159
+ ('.egg-info', '.egg')):
160
+ logger.debug('Found %s', r.path)
161
+ seen.add(r.path)
162
+ yield old_dist_class(r.path, self)
163
+ except Exception as e:
164
+ msg = 'Unable to read distribution at %s, perhaps due to bad metadata: %s'
165
+ logger.warning(msg, r.path, e)
166
+ import warnings
167
+ warnings.warn(msg % (r.path, e), stacklevel=2)
168
+
169
+ def _generate_cache(self):
170
+ """
171
+ Scan the path for distributions and populate the cache with
172
+ those that are found.
173
+ """
174
+ gen_dist = not self._cache.generated
175
+ gen_egg = self._include_egg and not self._cache_egg.generated
176
+ if gen_dist or gen_egg:
177
+ for dist in self._yield_distributions():
178
+ if isinstance(dist, InstalledDistribution):
179
+ self._cache.add(dist)
180
+ else:
181
+ self._cache_egg.add(dist)
182
+
183
+ if gen_dist:
184
+ self._cache.generated = True
185
+ if gen_egg:
186
+ self._cache_egg.generated = True
187
+
188
+ @classmethod
189
+ def distinfo_dirname(cls, name, version):
190
+ """
191
+ The *name* and *version* parameters are converted into their
192
+ filename-escaped form, i.e. any ``'-'`` characters are replaced
193
+ with ``'_'`` other than the one in ``'dist-info'`` and the one
194
+ separating the name from the version number.
195
+
196
+ :parameter name: is converted to a standard distribution name by replacing
197
+ any runs of non- alphanumeric characters with a single
198
+ ``'-'``.
199
+ :type name: string
200
+ :parameter version: is converted to a standard version string. Spaces
201
+ become dots, and all other non-alphanumeric characters
202
+ (except dots) become dashes, with runs of multiple
203
+ dashes condensed to a single dash.
204
+ :type version: string
205
+ :returns: directory name
206
+ :rtype: string"""
207
+ name = name.replace('-', '_')
208
+ return '-'.join([name, version]) + DISTINFO_EXT
209
+
210
+ def get_distributions(self):
211
+ """
212
+ Provides an iterator that looks for distributions and returns
213
+ :class:`InstalledDistribution` or
214
+ :class:`EggInfoDistribution` instances for each one of them.
215
+
216
+ :rtype: iterator of :class:`InstalledDistribution` and
217
+ :class:`EggInfoDistribution` instances
218
+ """
219
+ if not self._cache_enabled:
220
+ for dist in self._yield_distributions():
221
+ yield dist
222
+ else:
223
+ self._generate_cache()
224
+
225
+ for dist in self._cache.path.values():
226
+ yield dist
227
+
228
+ if self._include_egg:
229
+ for dist in self._cache_egg.path.values():
230
+ yield dist
231
+
232
+ def get_distribution(self, name):
233
+ """
234
+ Looks for a named distribution on the path.
235
+
236
+ This function only returns the first result found, as no more than one
237
+ value is expected. If nothing is found, ``None`` is returned.
238
+
239
+ :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution`
240
+ or ``None``
241
+ """
242
+ result = None
243
+ name = name.lower()
244
+ if not self._cache_enabled:
245
+ for dist in self._yield_distributions():
246
+ if dist.key == name:
247
+ result = dist
248
+ break
249
+ else:
250
+ self._generate_cache()
251
+
252
+ if name in self._cache.name:
253
+ result = self._cache.name[name][0]
254
+ elif self._include_egg and name in self._cache_egg.name:
255
+ result = self._cache_egg.name[name][0]
256
+ return result
257
+
258
+ def provides_distribution(self, name, version=None):
259
+ """
260
+ Iterates over all distributions to find which distributions provide *name*.
261
+ If a *version* is provided, it will be used to filter the results.
262
+
263
+ This function only returns the first result found, since no more than
264
+ one values are expected. If the directory is not found, returns ``None``.
265
+
266
+ :parameter version: a version specifier that indicates the version
267
+ required, conforming to the format in ``PEP-345``
268
+
269
+ :type name: string
270
+ :type version: string
271
+ """
272
+ matcher = None
273
+ if version is not None:
274
+ try:
275
+ matcher = self._scheme.matcher('%s (%s)' % (name, version))
276
+ except ValueError:
277
+ raise DistlibException('invalid name or version: %r, %r' %
278
+ (name, version))
279
+
280
+ for dist in self.get_distributions():
281
+ # We hit a problem on Travis where enum34 was installed and doesn't
282
+ # have a provides attribute ...
283
+ if not hasattr(dist, 'provides'):
284
+ logger.debug('No "provides": %s', dist)
285
+ else:
286
+ provided = dist.provides
287
+
288
+ for p in provided:
289
+ p_name, p_ver = parse_name_and_version(p)
290
+ if matcher is None:
291
+ if p_name == name:
292
+ yield dist
293
+ break
294
+ else:
295
+ if p_name == name and matcher.match(p_ver):
296
+ yield dist
297
+ break
298
+
299
+ def get_file_path(self, name, relative_path):
300
+ """
301
+ Return the path to a resource file.
302
+ """
303
+ dist = self.get_distribution(name)
304
+ if dist is None:
305
+ raise LookupError('no distribution named %r found' % name)
306
+ return dist.get_resource_path(relative_path)
307
+
308
+ def get_exported_entries(self, category, name=None):
309
+ """
310
+ Return all of the exported entries in a particular category.
311
+
312
+ :param category: The category to search for entries.
313
+ :param name: If specified, only entries with that name are returned.
314
+ """
315
+ for dist in self.get_distributions():
316
+ r = dist.exports
317
+ if category in r:
318
+ d = r[category]
319
+ if name is not None:
320
+ if name in d:
321
+ yield d[name]
322
+ else:
323
+ for v in d.values():
324
+ yield v
325
+
326
+
327
+ class Distribution(object):
328
+ """
329
+ A base class for distributions, whether installed or from indexes.
330
+ Either way, it must have some metadata, so that's all that's needed
331
+ for construction.
332
+ """
333
+
334
+ build_time_dependency = False
335
+ """
336
+ Set to True if it's known to be only a build-time dependency (i.e.
337
+ not needed after installation).
338
+ """
339
+
340
+ requested = False
341
+ """A boolean that indicates whether the ``REQUESTED`` metadata file is
342
+ present (in other words, whether the package was installed by user
343
+ request or it was installed as a dependency)."""
344
+
345
+ def __init__(self, metadata):
346
+ """
347
+ Initialise an instance.
348
+ :param metadata: The instance of :class:`Metadata` describing this
349
+ distribution.
350
+ """
351
+ self.metadata = metadata
352
+ self.name = metadata.name
353
+ self.key = self.name.lower() # for case-insensitive comparisons
354
+ self.version = metadata.version
355
+ self.locator = None
356
+ self.digest = None
357
+ self.extras = None # additional features requested
358
+ self.context = None # environment marker overrides
359
+ self.download_urls = set()
360
+ self.digests = {}
361
+
362
+ @property
363
+ def source_url(self):
364
+ """
365
+ The source archive download URL for this distribution.
366
+ """
367
+ return self.metadata.source_url
368
+
369
+ download_url = source_url # Backward compatibility
370
+
371
+ @property
372
+ def name_and_version(self):
373
+ """
374
+ A utility property which displays the name and version in parentheses.
375
+ """
376
+ return '%s (%s)' % (self.name, self.version)
377
+
378
+ @property
379
+ def provides(self):
380
+ """
381
+ A set of distribution names and versions provided by this distribution.
382
+ :return: A set of "name (version)" strings.
383
+ """
384
+ plist = self.metadata.provides
385
+ s = '%s (%s)' % (self.name, self.version)
386
+ if s not in plist:
387
+ plist.append(s)
388
+ return plist
389
+
390
+ def _get_requirements(self, req_attr):
391
+ md = self.metadata
392
+ reqts = getattr(md, req_attr)
393
+ logger.debug('%s: got requirements %r from metadata: %r', self.name,
394
+ req_attr, reqts)
395
+ return set(
396
+ md.get_requirements(reqts, extras=self.extras, env=self.context))
397
+
398
+ @property
399
+ def run_requires(self):
400
+ return self._get_requirements('run_requires')
401
+
402
+ @property
403
+ def meta_requires(self):
404
+ return self._get_requirements('meta_requires')
405
+
406
+ @property
407
+ def build_requires(self):
408
+ return self._get_requirements('build_requires')
409
+
410
+ @property
411
+ def test_requires(self):
412
+ return self._get_requirements('test_requires')
413
+
414
+ @property
415
+ def dev_requires(self):
416
+ return self._get_requirements('dev_requires')
417
+
418
+ def matches_requirement(self, req):
419
+ """
420
+ Say if this instance matches (fulfills) a requirement.
421
+ :param req: The requirement to match.
422
+ :rtype req: str
423
+ :return: True if it matches, else False.
424
+ """
425
+ # Requirement may contain extras - parse to lose those
426
+ # from what's passed to the matcher
427
+ r = parse_requirement(req)
428
+ scheme = get_scheme(self.metadata.scheme)
429
+ try:
430
+ matcher = scheme.matcher(r.requirement)
431
+ except UnsupportedVersionError:
432
+ # XXX compat-mode if cannot read the version
433
+ logger.warning('could not read version %r - using name only', req)
434
+ name = req.split()[0]
435
+ matcher = scheme.matcher(name)
436
+
437
+ name = matcher.key # case-insensitive
438
+
439
+ result = False
440
+ for p in self.provides:
441
+ p_name, p_ver = parse_name_and_version(p)
442
+ if p_name != name:
443
+ continue
444
+ try:
445
+ result = matcher.match(p_ver)
446
+ break
447
+ except UnsupportedVersionError:
448
+ pass
449
+ return result
450
+
451
+ def __repr__(self):
452
+ """
453
+ Return a textual representation of this instance,
454
+ """
455
+ if self.source_url:
456
+ suffix = ' [%s]' % self.source_url
457
+ else:
458
+ suffix = ''
459
+ return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix)
460
+
461
+ def __eq__(self, other):
462
+ """
463
+ See if this distribution is the same as another.
464
+ :param other: The distribution to compare with. To be equal to one
465
+ another. distributions must have the same type, name,
466
+ version and source_url.
467
+ :return: True if it is the same, else False.
468
+ """
469
+ if type(other) is not type(self):
470
+ result = False
471
+ else:
472
+ result = (self.name == other.name and self.version == other.version
473
+ and self.source_url == other.source_url)
474
+ return result
475
+
476
+ def __hash__(self):
477
+ """
478
+ Compute hash in a way which matches the equality test.
479
+ """
480
+ return hash(self.name) + hash(self.version) + hash(self.source_url)
481
+
482
+
483
+ class BaseInstalledDistribution(Distribution):
484
+ """
485
+ This is the base class for installed distributions (whether PEP 376 or
486
+ legacy).
487
+ """
488
+
489
+ hasher = None
490
+
491
+ def __init__(self, metadata, path, env=None):
492
+ """
493
+ Initialise an instance.
494
+ :param metadata: An instance of :class:`Metadata` which describes the
495
+ distribution. This will normally have been initialised
496
+ from a metadata file in the ``path``.
497
+ :param path: The path of the ``.dist-info`` or ``.egg-info``
498
+ directory for the distribution.
499
+ :param env: This is normally the :class:`DistributionPath`
500
+ instance where this distribution was found.
501
+ """
502
+ super(BaseInstalledDistribution, self).__init__(metadata)
503
+ self.path = path
504
+ self.dist_path = env
505
+
506
+ def get_hash(self, data, hasher=None):
507
+ """
508
+ Get the hash of some data, using a particular hash algorithm, if
509
+ specified.
510
+
511
+ :param data: The data to be hashed.
512
+ :type data: bytes
513
+ :param hasher: The name of a hash implementation, supported by hashlib,
514
+ or ``None``. Examples of valid values are ``'sha1'``,
515
+ ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and
516
+ ``'sha512'``. If no hasher is specified, the ``hasher``
517
+ attribute of the :class:`InstalledDistribution` instance
518
+ is used. If the hasher is determined to be ``None``, MD5
519
+ is used as the hashing algorithm.
520
+ :returns: The hash of the data. If a hasher was explicitly specified,
521
+ the returned hash will be prefixed with the specified hasher
522
+ followed by '='.
523
+ :rtype: str
524
+ """
525
+ if hasher is None:
526
+ hasher = self.hasher
527
+ if hasher is None:
528
+ hasher = hashlib.md5
529
+ prefix = ''
530
+ else:
531
+ hasher = getattr(hashlib, hasher)
532
+ prefix = '%s=' % self.hasher
533
+ digest = hasher(data).digest()
534
+ digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii')
535
+ return '%s%s' % (prefix, digest)
536
+
537
+
538
+ class InstalledDistribution(BaseInstalledDistribution):
539
+ """
540
+ Created with the *path* of the ``.dist-info`` directory provided to the
541
+ constructor. It reads the metadata contained in ``pydist.json`` when it is
542
+ instantiated., or uses a passed in Metadata instance (useful for when
543
+ dry-run mode is being used).
544
+ """
545
+
546
+ hasher = 'sha256'
547
+
548
+ def __init__(self, path, metadata=None, env=None):
549
+ self.modules = []
550
+ self.finder = finder = resources.finder_for_path(path)
551
+ if finder is None:
552
+ raise ValueError('finder unavailable for %s' % path)
553
+ if env and env._cache_enabled and path in env._cache.path:
554
+ metadata = env._cache.path[path].metadata
555
+ elif metadata is None:
556
+ r = finder.find(METADATA_FILENAME)
557
+ # Temporary - for Wheel 0.23 support
558
+ if r is None:
559
+ r = finder.find(WHEEL_METADATA_FILENAME)
560
+ # Temporary - for legacy support
561
+ if r is None:
562
+ r = finder.find(LEGACY_METADATA_FILENAME)
563
+ if r is None:
564
+ raise ValueError('no %s found in %s' %
565
+ (METADATA_FILENAME, path))
566
+ with contextlib.closing(r.as_stream()) as stream:
567
+ metadata = Metadata(fileobj=stream, scheme='legacy')
568
+
569
+ super(InstalledDistribution, self).__init__(metadata, path, env)
570
+
571
+ if env and env._cache_enabled:
572
+ env._cache.add(self)
573
+
574
+ r = finder.find('REQUESTED')
575
+ self.requested = r is not None
576
+ p = os.path.join(path, 'top_level.txt')
577
+ if os.path.exists(p):
578
+ with open(p, 'rb') as f:
579
+ data = f.read().decode('utf-8')
580
+ self.modules = data.splitlines()
581
+
582
+ def __repr__(self):
583
+ return '<InstalledDistribution %r %s at %r>' % (
584
+ self.name, self.version, self.path)
585
+
586
+ def __str__(self):
587
+ return "%s %s" % (self.name, self.version)
588
+
589
+ def _get_records(self):
590
+ """
591
+ Get the list of installed files for the distribution
592
+ :return: A list of tuples of path, hash and size. Note that hash and
593
+ size might be ``None`` for some entries. The path is exactly
594
+ as stored in the file (which is as in PEP 376).
595
+ """
596
+ results = []
597
+ r = self.get_distinfo_resource('RECORD')
598
+ with contextlib.closing(r.as_stream()) as stream:
599
+ with CSVReader(stream=stream) as record_reader:
600
+ # Base location is parent dir of .dist-info dir
601
+ # base_location = os.path.dirname(self.path)
602
+ # base_location = os.path.abspath(base_location)
603
+ for row in record_reader:
604
+ missing = [None for i in range(len(row), 3)]
605
+ path, checksum, size = row + missing
606
+ # if not os.path.isabs(path):
607
+ # path = path.replace('/', os.sep)
608
+ # path = os.path.join(base_location, path)
609
+ results.append((path, checksum, size))
610
+ return results
611
+
612
+ @cached_property
613
+ def exports(self):
614
+ """
615
+ Return the information exported by this distribution.
616
+ :return: A dictionary of exports, mapping an export category to a dict
617
+ of :class:`ExportEntry` instances describing the individual
618
+ export entries, and keyed by name.
619
+ """
620
+ result = {}
621
+ r = self.get_distinfo_resource(EXPORTS_FILENAME)
622
+ if r:
623
+ result = self.read_exports()
624
+ return result
625
+
626
+ def read_exports(self):
627
+ """
628
+ Read exports data from a file in .ini format.
629
+
630
+ :return: A dictionary of exports, mapping an export category to a list
631
+ of :class:`ExportEntry` instances describing the individual
632
+ export entries.
633
+ """
634
+ result = {}
635
+ r = self.get_distinfo_resource(EXPORTS_FILENAME)
636
+ if r:
637
+ with contextlib.closing(r.as_stream()) as stream:
638
+ result = read_exports(stream)
639
+ return result
640
+
641
+ def write_exports(self, exports):
642
+ """
643
+ Write a dictionary of exports to a file in .ini format.
644
+ :param exports: A dictionary of exports, mapping an export category to
645
+ a list of :class:`ExportEntry` instances describing the
646
+ individual export entries.
647
+ """
648
+ rf = self.get_distinfo_file(EXPORTS_FILENAME)
649
+ with open(rf, 'w') as f:
650
+ write_exports(exports, f)
651
+
652
+ def get_resource_path(self, relative_path):
653
+ """
654
+ NOTE: This API may change in the future.
655
+
656
+ Return the absolute path to a resource file with the given relative
657
+ path.
658
+
659
+ :param relative_path: The path, relative to .dist-info, of the resource
660
+ of interest.
661
+ :return: The absolute path where the resource is to be found.
662
+ """
663
+ r = self.get_distinfo_resource('RESOURCES')
664
+ with contextlib.closing(r.as_stream()) as stream:
665
+ with CSVReader(stream=stream) as resources_reader:
666
+ for relative, destination in resources_reader:
667
+ if relative == relative_path:
668
+ return destination
669
+ raise KeyError('no resource file with relative path %r '
670
+ 'is installed' % relative_path)
671
+
672
+ def list_installed_files(self):
673
+ """
674
+ Iterates over the ``RECORD`` entries and returns a tuple
675
+ ``(path, hash, size)`` for each line.
676
+
677
+ :returns: iterator of (path, hash, size)
678
+ """
679
+ for result in self._get_records():
680
+ yield result
681
+
682
+ def write_installed_files(self, paths, prefix, dry_run=False):
683
+ """
684
+ Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any
685
+ existing ``RECORD`` file is silently overwritten.
686
+
687
+ prefix is used to determine when to write absolute paths.
688
+ """
689
+ prefix = os.path.join(prefix, '')
690
+ base = os.path.dirname(self.path)
691
+ base_under_prefix = base.startswith(prefix)
692
+ base = os.path.join(base, '')
693
+ record_path = self.get_distinfo_file('RECORD')
694
+ logger.info('creating %s', record_path)
695
+ if dry_run:
696
+ return None
697
+ with CSVWriter(record_path) as writer:
698
+ for path in paths:
699
+ if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')):
700
+ # do not put size and hash, as in PEP-376
701
+ hash_value = size = ''
702
+ else:
703
+ size = '%d' % os.path.getsize(path)
704
+ with open(path, 'rb') as fp:
705
+ hash_value = self.get_hash(fp.read())
706
+ if path.startswith(base) or (base_under_prefix
707
+ and path.startswith(prefix)):
708
+ path = os.path.relpath(path, base)
709
+ writer.writerow((path, hash_value, size))
710
+
711
+ # add the RECORD file itself
712
+ if record_path.startswith(base):
713
+ record_path = os.path.relpath(record_path, base)
714
+ writer.writerow((record_path, '', ''))
715
+ return record_path
716
+
717
+ def check_installed_files(self):
718
+ """
719
+ Checks that the hashes and sizes of the files in ``RECORD`` are
720
+ matched by the files themselves. Returns a (possibly empty) list of
721
+ mismatches. Each entry in the mismatch list will be a tuple consisting
722
+ of the path, 'exists', 'size' or 'hash' according to what didn't match
723
+ (existence is checked first, then size, then hash), the expected
724
+ value and the actual value.
725
+ """
726
+ mismatches = []
727
+ base = os.path.dirname(self.path)
728
+ record_path = self.get_distinfo_file('RECORD')
729
+ for path, hash_value, size in self.list_installed_files():
730
+ if not os.path.isabs(path):
731
+ path = os.path.join(base, path)
732
+ if path == record_path:
733
+ continue
734
+ if not os.path.exists(path):
735
+ mismatches.append((path, 'exists', True, False))
736
+ elif os.path.isfile(path):
737
+ actual_size = str(os.path.getsize(path))
738
+ if size and actual_size != size:
739
+ mismatches.append((path, 'size', size, actual_size))
740
+ elif hash_value:
741
+ if '=' in hash_value:
742
+ hasher = hash_value.split('=', 1)[0]
743
+ else:
744
+ hasher = None
745
+
746
+ with open(path, 'rb') as f:
747
+ actual_hash = self.get_hash(f.read(), hasher)
748
+ if actual_hash != hash_value:
749
+ mismatches.append(
750
+ (path, 'hash', hash_value, actual_hash))
751
+ return mismatches
752
+
753
+ @cached_property
754
+ def shared_locations(self):
755
+ """
756
+ A dictionary of shared locations whose keys are in the set 'prefix',
757
+ 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'.
758
+ The corresponding value is the absolute path of that category for
759
+ this distribution, and takes into account any paths selected by the
760
+ user at installation time (e.g. via command-line arguments). In the
761
+ case of the 'namespace' key, this would be a list of absolute paths
762
+ for the roots of namespace packages in this distribution.
763
+
764
+ The first time this property is accessed, the relevant information is
765
+ read from the SHARED file in the .dist-info directory.
766
+ """
767
+ result = {}
768
+ shared_path = os.path.join(self.path, 'SHARED')
769
+ if os.path.isfile(shared_path):
770
+ with codecs.open(shared_path, 'r', encoding='utf-8') as f:
771
+ lines = f.read().splitlines()
772
+ for line in lines:
773
+ key, value = line.split('=', 1)
774
+ if key == 'namespace':
775
+ result.setdefault(key, []).append(value)
776
+ else:
777
+ result[key] = value
778
+ return result
779
+
780
+ def write_shared_locations(self, paths, dry_run=False):
781
+ """
782
+ Write shared location information to the SHARED file in .dist-info.
783
+ :param paths: A dictionary as described in the documentation for
784
+ :meth:`shared_locations`.
785
+ :param dry_run: If True, the action is logged but no file is actually
786
+ written.
787
+ :return: The path of the file written to.
788
+ """
789
+ shared_path = os.path.join(self.path, 'SHARED')
790
+ logger.info('creating %s', shared_path)
791
+ if dry_run:
792
+ return None
793
+ lines = []
794
+ for key in ('prefix', 'lib', 'headers', 'scripts', 'data'):
795
+ path = paths[key]
796
+ if os.path.isdir(paths[key]):
797
+ lines.append('%s=%s' % (key, path))
798
+ for ns in paths.get('namespace', ()):
799
+ lines.append('namespace=%s' % ns)
800
+
801
+ with codecs.open(shared_path, 'w', encoding='utf-8') as f:
802
+ f.write('\n'.join(lines))
803
+ return shared_path
804
+
805
+ def get_distinfo_resource(self, path):
806
+ if path not in DIST_FILES:
807
+ raise DistlibException('invalid path for a dist-info file: '
808
+ '%r at %r' % (path, self.path))
809
+ finder = resources.finder_for_path(self.path)
810
+ if finder is None:
811
+ raise DistlibException('Unable to get a finder for %s' % self.path)
812
+ return finder.find(path)
813
+
814
+ def get_distinfo_file(self, path):
815
+ """
816
+ Returns a path located under the ``.dist-info`` directory. Returns a
817
+ string representing the path.
818
+
819
+ :parameter path: a ``'/'``-separated path relative to the
820
+ ``.dist-info`` directory or an absolute path;
821
+ If *path* is an absolute path and doesn't start
822
+ with the ``.dist-info`` directory path,
823
+ a :class:`DistlibException` is raised
824
+ :type path: str
825
+ :rtype: str
826
+ """
827
+ # Check if it is an absolute path # XXX use relpath, add tests
828
+ if path.find(os.sep) >= 0:
829
+ # it's an absolute path?
830
+ distinfo_dirname, path = path.split(os.sep)[-2:]
831
+ if distinfo_dirname != self.path.split(os.sep)[-1]:
832
+ raise DistlibException(
833
+ 'dist-info file %r does not belong to the %r %s '
834
+ 'distribution' % (path, self.name, self.version))
835
+
836
+ # The file must be relative
837
+ if path not in DIST_FILES:
838
+ raise DistlibException('invalid path for a dist-info file: '
839
+ '%r at %r' % (path, self.path))
840
+
841
+ return os.path.join(self.path, path)
842
+
843
+ def list_distinfo_files(self):
844
+ """
845
+ Iterates over the ``RECORD`` entries and returns paths for each line if
846
+ the path is pointing to a file located in the ``.dist-info`` directory
847
+ or one of its subdirectories.
848
+
849
+ :returns: iterator of paths
850
+ """
851
+ base = os.path.dirname(self.path)
852
+ for path, checksum, size in self._get_records():
853
+ # XXX add separator or use real relpath algo
854
+ if not os.path.isabs(path):
855
+ path = os.path.join(base, path)
856
+ if path.startswith(self.path):
857
+ yield path
858
+
859
+ def __eq__(self, other):
860
+ return (isinstance(other, InstalledDistribution)
861
+ and self.path == other.path)
862
+
863
+ # See http://docs.python.org/reference/datamodel#object.__hash__
864
+ __hash__ = object.__hash__
865
+
866
+
867
+ class EggInfoDistribution(BaseInstalledDistribution):
868
+ """Created with the *path* of the ``.egg-info`` directory or file provided
869
+ to the constructor. It reads the metadata contained in the file itself, or
870
+ if the given path happens to be a directory, the metadata is read from the
871
+ file ``PKG-INFO`` under that directory."""
872
+
873
+ requested = True # as we have no way of knowing, assume it was
874
+ shared_locations = {}
875
+
876
+ def __init__(self, path, env=None):
877
+
878
+ def set_name_and_version(s, n, v):
879
+ s.name = n
880
+ s.key = n.lower() # for case-insensitive comparisons
881
+ s.version = v
882
+
883
+ self.path = path
884
+ self.dist_path = env
885
+ if env and env._cache_enabled and path in env._cache_egg.path:
886
+ metadata = env._cache_egg.path[path].metadata
887
+ set_name_and_version(self, metadata.name, metadata.version)
888
+ else:
889
+ metadata = self._get_metadata(path)
890
+
891
+ # Need to be set before caching
892
+ set_name_and_version(self, metadata.name, metadata.version)
893
+
894
+ if env and env._cache_enabled:
895
+ env._cache_egg.add(self)
896
+ super(EggInfoDistribution, self).__init__(metadata, path, env)
897
+
898
+ def _get_metadata(self, path):
899
+ requires = None
900
+
901
+ def parse_requires_data(data):
902
+ """Create a list of dependencies from a requires.txt file.
903
+
904
+ *data*: the contents of a setuptools-produced requires.txt file.
905
+ """
906
+ reqs = []
907
+ lines = data.splitlines()
908
+ for line in lines:
909
+ line = line.strip()
910
+ # sectioned files have bare newlines (separating sections)
911
+ if not line: # pragma: no cover
912
+ continue
913
+ if line.startswith('['): # pragma: no cover
914
+ logger.warning(
915
+ 'Unexpected line: quitting requirement scan: %r', line)
916
+ break
917
+ r = parse_requirement(line)
918
+ if not r: # pragma: no cover
919
+ logger.warning('Not recognised as a requirement: %r', line)
920
+ continue
921
+ if r.extras: # pragma: no cover
922
+ logger.warning('extra requirements in requires.txt are '
923
+ 'not supported')
924
+ if not r.constraints:
925
+ reqs.append(r.name)
926
+ else:
927
+ cons = ', '.join('%s%s' % c for c in r.constraints)
928
+ reqs.append('%s (%s)' % (r.name, cons))
929
+ return reqs
930
+
931
+ def parse_requires_path(req_path):
932
+ """Create a list of dependencies from a requires.txt file.
933
+
934
+ *req_path*: the path to a setuptools-produced requires.txt file.
935
+ """
936
+
937
+ reqs = []
938
+ try:
939
+ with codecs.open(req_path, 'r', 'utf-8') as fp:
940
+ reqs = parse_requires_data(fp.read())
941
+ except IOError:
942
+ pass
943
+ return reqs
944
+
945
+ tl_path = tl_data = None
946
+ if path.endswith('.egg'):
947
+ if os.path.isdir(path):
948
+ p = os.path.join(path, 'EGG-INFO')
949
+ meta_path = os.path.join(p, 'PKG-INFO')
950
+ metadata = Metadata(path=meta_path, scheme='legacy')
951
+ req_path = os.path.join(p, 'requires.txt')
952
+ tl_path = os.path.join(p, 'top_level.txt')
953
+ requires = parse_requires_path(req_path)
954
+ else:
955
+ # FIXME handle the case where zipfile is not available
956
+ zipf = zipimport.zipimporter(path)
957
+ fileobj = StringIO(
958
+ zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8'))
959
+ metadata = Metadata(fileobj=fileobj, scheme='legacy')
960
+ try:
961
+ data = zipf.get_data('EGG-INFO/requires.txt')
962
+ tl_data = zipf.get_data('EGG-INFO/top_level.txt').decode(
963
+ 'utf-8')
964
+ requires = parse_requires_data(data.decode('utf-8'))
965
+ except IOError:
966
+ requires = None
967
+ elif path.endswith('.egg-info'):
968
+ if os.path.isdir(path):
969
+ req_path = os.path.join(path, 'requires.txt')
970
+ requires = parse_requires_path(req_path)
971
+ path = os.path.join(path, 'PKG-INFO')
972
+ tl_path = os.path.join(path, 'top_level.txt')
973
+ metadata = Metadata(path=path, scheme='legacy')
974
+ else:
975
+ raise DistlibException('path must end with .egg-info or .egg, '
976
+ 'got %r' % path)
977
+
978
+ if requires:
979
+ metadata.add_requirements(requires)
980
+ # look for top-level modules in top_level.txt, if present
981
+ if tl_data is None:
982
+ if tl_path is not None and os.path.exists(tl_path):
983
+ with open(tl_path, 'rb') as f:
984
+ tl_data = f.read().decode('utf-8')
985
+ if not tl_data:
986
+ tl_data = []
987
+ else:
988
+ tl_data = tl_data.splitlines()
989
+ self.modules = tl_data
990
+ return metadata
991
+
992
+ def __repr__(self):
993
+ return '<EggInfoDistribution %r %s at %r>' % (self.name, self.version,
994
+ self.path)
995
+
996
+ def __str__(self):
997
+ return "%s %s" % (self.name, self.version)
998
+
999
+ def check_installed_files(self):
1000
+ """
1001
+ Checks that the hashes and sizes of the files in ``RECORD`` are
1002
+ matched by the files themselves. Returns a (possibly empty) list of
1003
+ mismatches. Each entry in the mismatch list will be a tuple consisting
1004
+ of the path, 'exists', 'size' or 'hash' according to what didn't match
1005
+ (existence is checked first, then size, then hash), the expected
1006
+ value and the actual value.
1007
+ """
1008
+ mismatches = []
1009
+ record_path = os.path.join(self.path, 'installed-files.txt')
1010
+ if os.path.exists(record_path):
1011
+ for path, _, _ in self.list_installed_files():
1012
+ if path == record_path:
1013
+ continue
1014
+ if not os.path.exists(path):
1015
+ mismatches.append((path, 'exists', True, False))
1016
+ return mismatches
1017
+
1018
+ def list_installed_files(self):
1019
+ """
1020
+ Iterates over the ``installed-files.txt`` entries and returns a tuple
1021
+ ``(path, hash, size)`` for each line.
1022
+
1023
+ :returns: a list of (path, hash, size)
1024
+ """
1025
+
1026
+ def _md5(path):
1027
+ f = open(path, 'rb')
1028
+ try:
1029
+ content = f.read()
1030
+ finally:
1031
+ f.close()
1032
+ return hashlib.md5(content).hexdigest()
1033
+
1034
+ def _size(path):
1035
+ return os.stat(path).st_size
1036
+
1037
+ record_path = os.path.join(self.path, 'installed-files.txt')
1038
+ result = []
1039
+ if os.path.exists(record_path):
1040
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
1041
+ for line in f:
1042
+ line = line.strip()
1043
+ p = os.path.normpath(os.path.join(self.path, line))
1044
+ # "./" is present as a marker between installed files
1045
+ # and installation metadata files
1046
+ if not os.path.exists(p):
1047
+ logger.warning('Non-existent file: %s', p)
1048
+ if p.endswith(('.pyc', '.pyo')):
1049
+ continue
1050
+ # otherwise fall through and fail
1051
+ if not os.path.isdir(p):
1052
+ result.append((p, _md5(p), _size(p)))
1053
+ result.append((record_path, None, None))
1054
+ return result
1055
+
1056
+ def list_distinfo_files(self, absolute=False):
1057
+ """
1058
+ Iterates over the ``installed-files.txt`` entries and returns paths for
1059
+ each line if the path is pointing to a file located in the
1060
+ ``.egg-info`` directory or one of its subdirectories.
1061
+
1062
+ :parameter absolute: If *absolute* is ``True``, each returned path is
1063
+ transformed into a local absolute path. Otherwise the
1064
+ raw value from ``installed-files.txt`` is returned.
1065
+ :type absolute: boolean
1066
+ :returns: iterator of paths
1067
+ """
1068
+ record_path = os.path.join(self.path, 'installed-files.txt')
1069
+ if os.path.exists(record_path):
1070
+ skip = True
1071
+ with codecs.open(record_path, 'r', encoding='utf-8') as f:
1072
+ for line in f:
1073
+ line = line.strip()
1074
+ if line == './':
1075
+ skip = False
1076
+ continue
1077
+ if not skip:
1078
+ p = os.path.normpath(os.path.join(self.path, line))
1079
+ if p.startswith(self.path):
1080
+ if absolute:
1081
+ yield p
1082
+ else:
1083
+ yield line
1084
+
1085
+ def __eq__(self, other):
1086
+ return (isinstance(other, EggInfoDistribution)
1087
+ and self.path == other.path)
1088
+
1089
+ # See http://docs.python.org/reference/datamodel#object.__hash__
1090
+ __hash__ = object.__hash__
1091
+
1092
+
1093
+ new_dist_class = InstalledDistribution
1094
+ old_dist_class = EggInfoDistribution
1095
+
1096
+
1097
+ class DependencyGraph(object):
1098
+ """
1099
+ Represents a dependency graph between distributions.
1100
+
1101
+ The dependency relationships are stored in an ``adjacency_list`` that maps
1102
+ distributions to a list of ``(other, label)`` tuples where ``other``
1103
+ is a distribution and the edge is labeled with ``label`` (i.e. the version
1104
+ specifier, if such was provided). Also, for more efficient traversal, for
1105
+ every distribution ``x``, a list of predecessors is kept in
1106
+ ``reverse_list[x]``. An edge from distribution ``a`` to
1107
+ distribution ``b`` means that ``a`` depends on ``b``. If any missing
1108
+ dependencies are found, they are stored in ``missing``, which is a
1109
+ dictionary that maps distributions to a list of requirements that were not
1110
+ provided by any other distributions.
1111
+ """
1112
+
1113
+ def __init__(self):
1114
+ self.adjacency_list = {}
1115
+ self.reverse_list = {}
1116
+ self.missing = {}
1117
+
1118
+ def add_distribution(self, distribution):
1119
+ """Add the *distribution* to the graph.
1120
+
1121
+ :type distribution: :class:`distutils2.database.InstalledDistribution`
1122
+ or :class:`distutils2.database.EggInfoDistribution`
1123
+ """
1124
+ self.adjacency_list[distribution] = []
1125
+ self.reverse_list[distribution] = []
1126
+ # self.missing[distribution] = []
1127
+
1128
+ def add_edge(self, x, y, label=None):
1129
+ """Add an edge from distribution *x* to distribution *y* with the given
1130
+ *label*.
1131
+
1132
+ :type x: :class:`distutils2.database.InstalledDistribution` or
1133
+ :class:`distutils2.database.EggInfoDistribution`
1134
+ :type y: :class:`distutils2.database.InstalledDistribution` or
1135
+ :class:`distutils2.database.EggInfoDistribution`
1136
+ :type label: ``str`` or ``None``
1137
+ """
1138
+ self.adjacency_list[x].append((y, label))
1139
+ # multiple edges are allowed, so be careful
1140
+ if x not in self.reverse_list[y]:
1141
+ self.reverse_list[y].append(x)
1142
+
1143
+ def add_missing(self, distribution, requirement):
1144
+ """
1145
+ Add a missing *requirement* for the given *distribution*.
1146
+
1147
+ :type distribution: :class:`distutils2.database.InstalledDistribution`
1148
+ or :class:`distutils2.database.EggInfoDistribution`
1149
+ :type requirement: ``str``
1150
+ """
1151
+ logger.debug('%s missing %r', distribution, requirement)
1152
+ self.missing.setdefault(distribution, []).append(requirement)
1153
+
1154
+ def _repr_dist(self, dist):
1155
+ return '%s %s' % (dist.name, dist.version)
1156
+
1157
+ def repr_node(self, dist, level=1):
1158
+ """Prints only a subgraph"""
1159
+ output = [self._repr_dist(dist)]
1160
+ for other, label in self.adjacency_list[dist]:
1161
+ dist = self._repr_dist(other)
1162
+ if label is not None:
1163
+ dist = '%s [%s]' % (dist, label)
1164
+ output.append(' ' * level + str(dist))
1165
+ suboutput = self.repr_node(other, level + 1)
1166
+ subs = suboutput.split('\n')
1167
+ output.extend(subs[1:])
1168
+ return '\n'.join(output)
1169
+
1170
+ def to_dot(self, f, skip_disconnected=True):
1171
+ """Writes a DOT output for the graph to the provided file *f*.
1172
+
1173
+ If *skip_disconnected* is set to ``True``, then all distributions
1174
+ that are not dependent on any other distribution are skipped.
1175
+
1176
+ :type f: has to support ``file``-like operations
1177
+ :type skip_disconnected: ``bool``
1178
+ """
1179
+ disconnected = []
1180
+
1181
+ f.write("digraph dependencies {\n")
1182
+ for dist, adjs in self.adjacency_list.items():
1183
+ if len(adjs) == 0 and not skip_disconnected:
1184
+ disconnected.append(dist)
1185
+ for other, label in adjs:
1186
+ if label is not None:
1187
+ f.write('"%s" -> "%s" [label="%s"]\n' %
1188
+ (dist.name, other.name, label))
1189
+ else:
1190
+ f.write('"%s" -> "%s"\n' % (dist.name, other.name))
1191
+ if not skip_disconnected and len(disconnected) > 0:
1192
+ f.write('subgraph disconnected {\n')
1193
+ f.write('label = "Disconnected"\n')
1194
+ f.write('bgcolor = red\n')
1195
+
1196
+ for dist in disconnected:
1197
+ f.write('"%s"' % dist.name)
1198
+ f.write('\n')
1199
+ f.write('}\n')
1200
+ f.write('}\n')
1201
+
1202
+ def topological_sort(self):
1203
+ """
1204
+ Perform a topological sort of the graph.
1205
+ :return: A tuple, the first element of which is a topologically sorted
1206
+ list of distributions, and the second element of which is a
1207
+ list of distributions that cannot be sorted because they have
1208
+ circular dependencies and so form a cycle.
1209
+ """
1210
+ result = []
1211
+ # Make a shallow copy of the adjacency list
1212
+ alist = {}
1213
+ for k, v in self.adjacency_list.items():
1214
+ alist[k] = v[:]
1215
+ while True:
1216
+ # See what we can remove in this run
1217
+ to_remove = []
1218
+ for k, v in list(alist.items())[:]:
1219
+ if not v:
1220
+ to_remove.append(k)
1221
+ del alist[k]
1222
+ if not to_remove:
1223
+ # What's left in alist (if anything) is a cycle.
1224
+ break
1225
+ # Remove from the adjacency list of others
1226
+ for k, v in alist.items():
1227
+ alist[k] = [(d, r) for d, r in v if d not in to_remove]
1228
+ logger.debug('Moving to result: %s',
1229
+ ['%s (%s)' % (d.name, d.version) for d in to_remove])
1230
+ result.extend(to_remove)
1231
+ return result, list(alist.keys())
1232
+
1233
+ def __repr__(self):
1234
+ """Representation of the graph"""
1235
+ output = []
1236
+ for dist, adjs in self.adjacency_list.items():
1237
+ output.append(self.repr_node(dist))
1238
+ return '\n'.join(output)
1239
+
1240
+
1241
+ def make_graph(dists, scheme='default'):
1242
+ """Makes a dependency graph from the given distributions.
1243
+
1244
+ :parameter dists: a list of distributions
1245
+ :type dists: list of :class:`distutils2.database.InstalledDistribution` and
1246
+ :class:`distutils2.database.EggInfoDistribution` instances
1247
+ :rtype: a :class:`DependencyGraph` instance
1248
+ """
1249
+ scheme = get_scheme(scheme)
1250
+ graph = DependencyGraph()
1251
+ provided = {} # maps names to lists of (version, dist) tuples
1252
+
1253
+ # first, build the graph and find out what's provided
1254
+ for dist in dists:
1255
+ graph.add_distribution(dist)
1256
+
1257
+ for p in dist.provides:
1258
+ name, version = parse_name_and_version(p)
1259
+ logger.debug('Add to provided: %s, %s, %s', name, version, dist)
1260
+ provided.setdefault(name, []).append((version, dist))
1261
+
1262
+ # now make the edges
1263
+ for dist in dists:
1264
+ requires = (dist.run_requires | dist.meta_requires
1265
+ | dist.build_requires | dist.dev_requires)
1266
+ for req in requires:
1267
+ try:
1268
+ matcher = scheme.matcher(req)
1269
+ except UnsupportedVersionError:
1270
+ # XXX compat-mode if cannot read the version
1271
+ logger.warning('could not read version %r - using name only',
1272
+ req)
1273
+ name = req.split()[0]
1274
+ matcher = scheme.matcher(name)
1275
+
1276
+ name = matcher.key # case-insensitive
1277
+
1278
+ matched = False
1279
+ if name in provided:
1280
+ for version, provider in provided[name]:
1281
+ try:
1282
+ match = matcher.match(version)
1283
+ except UnsupportedVersionError:
1284
+ match = False
1285
+
1286
+ if match:
1287
+ graph.add_edge(dist, provider, req)
1288
+ matched = True
1289
+ break
1290
+ if not matched:
1291
+ graph.add_missing(dist, req)
1292
+ return graph
1293
+
1294
+
1295
+ def get_dependent_dists(dists, dist):
1296
+ """Recursively generate a list of distributions from *dists* that are
1297
+ dependent on *dist*.
1298
+
1299
+ :param dists: a list of distributions
1300
+ :param dist: a distribution, member of *dists* for which we are interested
1301
+ """
1302
+ if dist not in dists:
1303
+ raise DistlibException('given distribution %r is not a member '
1304
+ 'of the list' % dist.name)
1305
+ graph = make_graph(dists)
1306
+
1307
+ dep = [dist] # dependent distributions
1308
+ todo = graph.reverse_list[dist] # list of nodes we should inspect
1309
+
1310
+ while todo:
1311
+ d = todo.pop()
1312
+ dep.append(d)
1313
+ for succ in graph.reverse_list[d]:
1314
+ if succ not in dep:
1315
+ todo.append(succ)
1316
+
1317
+ dep.pop(0) # remove dist from dep, was there to prevent infinite loops
1318
+ return dep
1319
+
1320
+
1321
+ def get_required_dists(dists, dist):
1322
+ """Recursively generate a list of distributions from *dists* that are
1323
+ required by *dist*.
1324
+
1325
+ :param dists: a list of distributions
1326
+ :param dist: a distribution, member of *dists* for which we are interested
1327
+ in finding the dependencies.
1328
+ """
1329
+ if dist not in dists:
1330
+ raise DistlibException('given distribution %r is not a member '
1331
+ 'of the list' % dist.name)
1332
+ graph = make_graph(dists)
1333
+
1334
+ req = set() # required distributions
1335
+ todo = graph.adjacency_list[dist] # list of nodes we should inspect
1336
+ seen = set(t[0] for t in todo) # already added to todo
1337
+
1338
+ while todo:
1339
+ d = todo.pop()[0]
1340
+ req.add(d)
1341
+ pred_list = graph.adjacency_list[d]
1342
+ for pred in pred_list:
1343
+ d = pred[0]
1344
+ if d not in req and d not in seen:
1345
+ seen.add(d)
1346
+ todo.append(pred)
1347
+ return req
1348
+
1349
+
1350
+ def make_dist(name, version, **kwargs):
1351
+ """
1352
+ A convenience method for making a dist given just a name and version.
1353
+ """
1354
+ summary = kwargs.pop('summary', 'Placeholder for summary')
1355
+ md = Metadata(**kwargs)
1356
+ md.name = name
1357
+ md.version = version
1358
+ md.summary = summary or 'Placeholder for summary'
1359
+ return Distribution(md)
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/index.py ADDED
@@ -0,0 +1,508 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2013-2023 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ import hashlib
8
+ import logging
9
+ import os
10
+ import shutil
11
+ import subprocess
12
+ import tempfile
13
+ try:
14
+ from threading import Thread
15
+ except ImportError: # pragma: no cover
16
+ from dummy_threading import Thread
17
+
18
+ from . import DistlibException
19
+ from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
20
+ urlparse, build_opener, string_types)
21
+ from .util import zip_dir, ServerProxy
22
+
23
+ logger = logging.getLogger(__name__)
24
+
25
+ DEFAULT_INDEX = 'https://pypi.org/pypi'
26
+ DEFAULT_REALM = 'pypi'
27
+
28
+
29
+ class PackageIndex(object):
30
+ """
31
+ This class represents a package index compatible with PyPI, the Python
32
+ Package Index.
33
+ """
34
+
35
+ boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
36
+
37
+ def __init__(self, url=None):
38
+ """
39
+ Initialise an instance.
40
+
41
+ :param url: The URL of the index. If not specified, the URL for PyPI is
42
+ used.
43
+ """
44
+ self.url = url or DEFAULT_INDEX
45
+ self.read_configuration()
46
+ scheme, netloc, path, params, query, frag = urlparse(self.url)
47
+ if params or query or frag or scheme not in ('http', 'https'):
48
+ raise DistlibException('invalid repository: %s' % self.url)
49
+ self.password_handler = None
50
+ self.ssl_verifier = None
51
+ self.gpg = None
52
+ self.gpg_home = None
53
+ with open(os.devnull, 'w') as sink:
54
+ # Use gpg by default rather than gpg2, as gpg2 insists on
55
+ # prompting for passwords
56
+ for s in ('gpg', 'gpg2'):
57
+ try:
58
+ rc = subprocess.check_call([s, '--version'], stdout=sink,
59
+ stderr=sink)
60
+ if rc == 0:
61
+ self.gpg = s
62
+ break
63
+ except OSError:
64
+ pass
65
+
66
+ def _get_pypirc_command(self):
67
+ """
68
+ Get the distutils command for interacting with PyPI configurations.
69
+ :return: the command.
70
+ """
71
+ from .util import _get_pypirc_command as cmd
72
+ return cmd()
73
+
74
+ def read_configuration(self):
75
+ """
76
+ Read the PyPI access configuration as supported by distutils. This populates
77
+ ``username``, ``password``, ``realm`` and ``url`` attributes from the
78
+ configuration.
79
+ """
80
+ from .util import _load_pypirc
81
+ cfg = _load_pypirc(self)
82
+ self.username = cfg.get('username')
83
+ self.password = cfg.get('password')
84
+ self.realm = cfg.get('realm', 'pypi')
85
+ self.url = cfg.get('repository', self.url)
86
+
87
+ def save_configuration(self):
88
+ """
89
+ Save the PyPI access configuration. You must have set ``username`` and
90
+ ``password`` attributes before calling this method.
91
+ """
92
+ self.check_credentials()
93
+ from .util import _store_pypirc
94
+ _store_pypirc(self)
95
+
96
+ def check_credentials(self):
97
+ """
98
+ Check that ``username`` and ``password`` have been set, and raise an
99
+ exception if not.
100
+ """
101
+ if self.username is None or self.password is None:
102
+ raise DistlibException('username and password must be set')
103
+ pm = HTTPPasswordMgr()
104
+ _, netloc, _, _, _, _ = urlparse(self.url)
105
+ pm.add_password(self.realm, netloc, self.username, self.password)
106
+ self.password_handler = HTTPBasicAuthHandler(pm)
107
+
108
+ def register(self, metadata): # pragma: no cover
109
+ """
110
+ Register a distribution on PyPI, using the provided metadata.
111
+
112
+ :param metadata: A :class:`Metadata` instance defining at least a name
113
+ and version number for the distribution to be
114
+ registered.
115
+ :return: The HTTP response received from PyPI upon submission of the
116
+ request.
117
+ """
118
+ self.check_credentials()
119
+ metadata.validate()
120
+ d = metadata.todict()
121
+ d[':action'] = 'verify'
122
+ request = self.encode_request(d.items(), [])
123
+ self.send_request(request)
124
+ d[':action'] = 'submit'
125
+ request = self.encode_request(d.items(), [])
126
+ return self.send_request(request)
127
+
128
+ def _reader(self, name, stream, outbuf):
129
+ """
130
+ Thread runner for reading lines of from a subprocess into a buffer.
131
+
132
+ :param name: The logical name of the stream (used for logging only).
133
+ :param stream: The stream to read from. This will typically a pipe
134
+ connected to the output stream of a subprocess.
135
+ :param outbuf: The list to append the read lines to.
136
+ """
137
+ while True:
138
+ s = stream.readline()
139
+ if not s:
140
+ break
141
+ s = s.decode('utf-8').rstrip()
142
+ outbuf.append(s)
143
+ logger.debug('%s: %s' % (name, s))
144
+ stream.close()
145
+
146
+ def get_sign_command(self, filename, signer, sign_password, keystore=None): # pragma: no cover
147
+ """
148
+ Return a suitable command for signing a file.
149
+
150
+ :param filename: The pathname to the file to be signed.
151
+ :param signer: The identifier of the signer of the file.
152
+ :param sign_password: The passphrase for the signer's
153
+ private key used for signing.
154
+ :param keystore: The path to a directory which contains the keys
155
+ used in verification. If not specified, the
156
+ instance's ``gpg_home`` attribute is used instead.
157
+ :return: The signing command as a list suitable to be
158
+ passed to :class:`subprocess.Popen`.
159
+ """
160
+ cmd = [self.gpg, '--status-fd', '2', '--no-tty']
161
+ if keystore is None:
162
+ keystore = self.gpg_home
163
+ if keystore:
164
+ cmd.extend(['--homedir', keystore])
165
+ if sign_password is not None:
166
+ cmd.extend(['--batch', '--passphrase-fd', '0'])
167
+ td = tempfile.mkdtemp()
168
+ sf = os.path.join(td, os.path.basename(filename) + '.asc')
169
+ cmd.extend(['--detach-sign', '--armor', '--local-user',
170
+ signer, '--output', sf, filename])
171
+ logger.debug('invoking: %s', ' '.join(cmd))
172
+ return cmd, sf
173
+
174
+ def run_command(self, cmd, input_data=None):
175
+ """
176
+ Run a command in a child process , passing it any input data specified.
177
+
178
+ :param cmd: The command to run.
179
+ :param input_data: If specified, this must be a byte string containing
180
+ data to be sent to the child process.
181
+ :return: A tuple consisting of the subprocess' exit code, a list of
182
+ lines read from the subprocess' ``stdout``, and a list of
183
+ lines read from the subprocess' ``stderr``.
184
+ """
185
+ kwargs = {
186
+ 'stdout': subprocess.PIPE,
187
+ 'stderr': subprocess.PIPE,
188
+ }
189
+ if input_data is not None:
190
+ kwargs['stdin'] = subprocess.PIPE
191
+ stdout = []
192
+ stderr = []
193
+ p = subprocess.Popen(cmd, **kwargs)
194
+ # We don't use communicate() here because we may need to
195
+ # get clever with interacting with the command
196
+ t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
197
+ t1.start()
198
+ t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
199
+ t2.start()
200
+ if input_data is not None:
201
+ p.stdin.write(input_data)
202
+ p.stdin.close()
203
+
204
+ p.wait()
205
+ t1.join()
206
+ t2.join()
207
+ return p.returncode, stdout, stderr
208
+
209
+ def sign_file(self, filename, signer, sign_password, keystore=None): # pragma: no cover
210
+ """
211
+ Sign a file.
212
+
213
+ :param filename: The pathname to the file to be signed.
214
+ :param signer: The identifier of the signer of the file.
215
+ :param sign_password: The passphrase for the signer's
216
+ private key used for signing.
217
+ :param keystore: The path to a directory which contains the keys
218
+ used in signing. If not specified, the instance's
219
+ ``gpg_home`` attribute is used instead.
220
+ :return: The absolute pathname of the file where the signature is
221
+ stored.
222
+ """
223
+ cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
224
+ keystore)
225
+ rc, stdout, stderr = self.run_command(cmd,
226
+ sign_password.encode('utf-8'))
227
+ if rc != 0:
228
+ raise DistlibException('sign command failed with error '
229
+ 'code %s' % rc)
230
+ return sig_file
231
+
232
+ def upload_file(self, metadata, filename, signer=None, sign_password=None,
233
+ filetype='sdist', pyversion='source', keystore=None):
234
+ """
235
+ Upload a release file to the index.
236
+
237
+ :param metadata: A :class:`Metadata` instance defining at least a name
238
+ and version number for the file to be uploaded.
239
+ :param filename: The pathname of the file to be uploaded.
240
+ :param signer: The identifier of the signer of the file.
241
+ :param sign_password: The passphrase for the signer's
242
+ private key used for signing.
243
+ :param filetype: The type of the file being uploaded. This is the
244
+ distutils command which produced that file, e.g.
245
+ ``sdist`` or ``bdist_wheel``.
246
+ :param pyversion: The version of Python which the release relates
247
+ to. For code compatible with any Python, this would
248
+ be ``source``, otherwise it would be e.g. ``3.2``.
249
+ :param keystore: The path to a directory which contains the keys
250
+ used in signing. If not specified, the instance's
251
+ ``gpg_home`` attribute is used instead.
252
+ :return: The HTTP response received from PyPI upon submission of the
253
+ request.
254
+ """
255
+ self.check_credentials()
256
+ if not os.path.exists(filename):
257
+ raise DistlibException('not found: %s' % filename)
258
+ metadata.validate()
259
+ d = metadata.todict()
260
+ sig_file = None
261
+ if signer:
262
+ if not self.gpg:
263
+ logger.warning('no signing program available - not signed')
264
+ else:
265
+ sig_file = self.sign_file(filename, signer, sign_password,
266
+ keystore)
267
+ with open(filename, 'rb') as f:
268
+ file_data = f.read()
269
+ md5_digest = hashlib.md5(file_data).hexdigest()
270
+ sha256_digest = hashlib.sha256(file_data).hexdigest()
271
+ d.update({
272
+ ':action': 'file_upload',
273
+ 'protocol_version': '1',
274
+ 'filetype': filetype,
275
+ 'pyversion': pyversion,
276
+ 'md5_digest': md5_digest,
277
+ 'sha256_digest': sha256_digest,
278
+ })
279
+ files = [('content', os.path.basename(filename), file_data)]
280
+ if sig_file:
281
+ with open(sig_file, 'rb') as f:
282
+ sig_data = f.read()
283
+ files.append(('gpg_signature', os.path.basename(sig_file),
284
+ sig_data))
285
+ shutil.rmtree(os.path.dirname(sig_file))
286
+ request = self.encode_request(d.items(), files)
287
+ return self.send_request(request)
288
+
289
+ def upload_documentation(self, metadata, doc_dir): # pragma: no cover
290
+ """
291
+ Upload documentation to the index.
292
+
293
+ :param metadata: A :class:`Metadata` instance defining at least a name
294
+ and version number for the documentation to be
295
+ uploaded.
296
+ :param doc_dir: The pathname of the directory which contains the
297
+ documentation. This should be the directory that
298
+ contains the ``index.html`` for the documentation.
299
+ :return: The HTTP response received from PyPI upon submission of the
300
+ request.
301
+ """
302
+ self.check_credentials()
303
+ if not os.path.isdir(doc_dir):
304
+ raise DistlibException('not a directory: %r' % doc_dir)
305
+ fn = os.path.join(doc_dir, 'index.html')
306
+ if not os.path.exists(fn):
307
+ raise DistlibException('not found: %r' % fn)
308
+ metadata.validate()
309
+ name, version = metadata.name, metadata.version
310
+ zip_data = zip_dir(doc_dir).getvalue()
311
+ fields = [(':action', 'doc_upload'),
312
+ ('name', name), ('version', version)]
313
+ files = [('content', name, zip_data)]
314
+ request = self.encode_request(fields, files)
315
+ return self.send_request(request)
316
+
317
+ def get_verify_command(self, signature_filename, data_filename,
318
+ keystore=None):
319
+ """
320
+ Return a suitable command for verifying a file.
321
+
322
+ :param signature_filename: The pathname to the file containing the
323
+ signature.
324
+ :param data_filename: The pathname to the file containing the
325
+ signed data.
326
+ :param keystore: The path to a directory which contains the keys
327
+ used in verification. If not specified, the
328
+ instance's ``gpg_home`` attribute is used instead.
329
+ :return: The verifying command as a list suitable to be
330
+ passed to :class:`subprocess.Popen`.
331
+ """
332
+ cmd = [self.gpg, '--status-fd', '2', '--no-tty']
333
+ if keystore is None:
334
+ keystore = self.gpg_home
335
+ if keystore:
336
+ cmd.extend(['--homedir', keystore])
337
+ cmd.extend(['--verify', signature_filename, data_filename])
338
+ logger.debug('invoking: %s', ' '.join(cmd))
339
+ return cmd
340
+
341
+ def verify_signature(self, signature_filename, data_filename,
342
+ keystore=None):
343
+ """
344
+ Verify a signature for a file.
345
+
346
+ :param signature_filename: The pathname to the file containing the
347
+ signature.
348
+ :param data_filename: The pathname to the file containing the
349
+ signed data.
350
+ :param keystore: The path to a directory which contains the keys
351
+ used in verification. If not specified, the
352
+ instance's ``gpg_home`` attribute is used instead.
353
+ :return: True if the signature was verified, else False.
354
+ """
355
+ if not self.gpg:
356
+ raise DistlibException('verification unavailable because gpg '
357
+ 'unavailable')
358
+ cmd = self.get_verify_command(signature_filename, data_filename,
359
+ keystore)
360
+ rc, stdout, stderr = self.run_command(cmd)
361
+ if rc not in (0, 1):
362
+ raise DistlibException('verify command failed with error code %s' % rc)
363
+ return rc == 0
364
+
365
+ def download_file(self, url, destfile, digest=None, reporthook=None):
366
+ """
367
+ This is a convenience method for downloading a file from an URL.
368
+ Normally, this will be a file from the index, though currently
369
+ no check is made for this (i.e. a file can be downloaded from
370
+ anywhere).
371
+
372
+ The method is just like the :func:`urlretrieve` function in the
373
+ standard library, except that it allows digest computation to be
374
+ done during download and checking that the downloaded data
375
+ matched any expected value.
376
+
377
+ :param url: The URL of the file to be downloaded (assumed to be
378
+ available via an HTTP GET request).
379
+ :param destfile: The pathname where the downloaded file is to be
380
+ saved.
381
+ :param digest: If specified, this must be a (hasher, value)
382
+ tuple, where hasher is the algorithm used (e.g.
383
+ ``'md5'``) and ``value`` is the expected value.
384
+ :param reporthook: The same as for :func:`urlretrieve` in the
385
+ standard library.
386
+ """
387
+ if digest is None:
388
+ digester = None
389
+ logger.debug('No digest specified')
390
+ else:
391
+ if isinstance(digest, (list, tuple)):
392
+ hasher, digest = digest
393
+ else:
394
+ hasher = 'md5'
395
+ digester = getattr(hashlib, hasher)()
396
+ logger.debug('Digest specified: %s' % digest)
397
+ # The following code is equivalent to urlretrieve.
398
+ # We need to do it this way so that we can compute the
399
+ # digest of the file as we go.
400
+ with open(destfile, 'wb') as dfp:
401
+ # addinfourl is not a context manager on 2.x
402
+ # so we have to use try/finally
403
+ sfp = self.send_request(Request(url))
404
+ try:
405
+ headers = sfp.info()
406
+ blocksize = 8192
407
+ size = -1
408
+ read = 0
409
+ blocknum = 0
410
+ if "content-length" in headers:
411
+ size = int(headers["Content-Length"])
412
+ if reporthook:
413
+ reporthook(blocknum, blocksize, size)
414
+ while True:
415
+ block = sfp.read(blocksize)
416
+ if not block:
417
+ break
418
+ read += len(block)
419
+ dfp.write(block)
420
+ if digester:
421
+ digester.update(block)
422
+ blocknum += 1
423
+ if reporthook:
424
+ reporthook(blocknum, blocksize, size)
425
+ finally:
426
+ sfp.close()
427
+
428
+ # check that we got the whole file, if we can
429
+ if size >= 0 and read < size:
430
+ raise DistlibException(
431
+ 'retrieval incomplete: got only %d out of %d bytes'
432
+ % (read, size))
433
+ # if we have a digest, it must match.
434
+ if digester:
435
+ actual = digester.hexdigest()
436
+ if digest != actual:
437
+ raise DistlibException('%s digest mismatch for %s: expected '
438
+ '%s, got %s' % (hasher, destfile,
439
+ digest, actual))
440
+ logger.debug('Digest verified: %s', digest)
441
+
442
+ def send_request(self, req):
443
+ """
444
+ Send a standard library :class:`Request` to PyPI and return its
445
+ response.
446
+
447
+ :param req: The request to send.
448
+ :return: The HTTP response from PyPI (a standard library HTTPResponse).
449
+ """
450
+ handlers = []
451
+ if self.password_handler:
452
+ handlers.append(self.password_handler)
453
+ if self.ssl_verifier:
454
+ handlers.append(self.ssl_verifier)
455
+ opener = build_opener(*handlers)
456
+ return opener.open(req)
457
+
458
+ def encode_request(self, fields, files):
459
+ """
460
+ Encode fields and files for posting to an HTTP server.
461
+
462
+ :param fields: The fields to send as a list of (fieldname, value)
463
+ tuples.
464
+ :param files: The files to send as a list of (fieldname, filename,
465
+ file_bytes) tuple.
466
+ """
467
+ # Adapted from packaging, which in turn was adapted from
468
+ # http://code.activestate.com/recipes/146306
469
+
470
+ parts = []
471
+ boundary = self.boundary
472
+ for k, values in fields:
473
+ if not isinstance(values, (list, tuple)):
474
+ values = [values]
475
+
476
+ for v in values:
477
+ parts.extend((
478
+ b'--' + boundary,
479
+ ('Content-Disposition: form-data; name="%s"' %
480
+ k).encode('utf-8'),
481
+ b'',
482
+ v.encode('utf-8')))
483
+ for key, filename, value in files:
484
+ parts.extend((
485
+ b'--' + boundary,
486
+ ('Content-Disposition: form-data; name="%s"; filename="%s"' %
487
+ (key, filename)).encode('utf-8'),
488
+ b'',
489
+ value))
490
+
491
+ parts.extend((b'--' + boundary + b'--', b''))
492
+
493
+ body = b'\r\n'.join(parts)
494
+ ct = b'multipart/form-data; boundary=' + boundary
495
+ headers = {
496
+ 'Content-type': ct,
497
+ 'Content-length': str(len(body))
498
+ }
499
+ return Request(self.url, body, headers)
500
+
501
+ def search(self, terms, operator=None): # pragma: no cover
502
+ if isinstance(terms, string_types):
503
+ terms = {'name': terms}
504
+ rpc_proxy = ServerProxy(self.url, timeout=3.0)
505
+ try:
506
+ return rpc_proxy.search(terms, operator or 'and')
507
+ finally:
508
+ rpc_proxy('close')()
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/manifest.py ADDED
@@ -0,0 +1,384 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012-2023 Python Software Foundation.
4
+ # See LICENSE.txt and CONTRIBUTORS.txt.
5
+ #
6
+ """
7
+ Class representing the list of files in a distribution.
8
+
9
+ Equivalent to distutils.filelist, but fixes some problems.
10
+ """
11
+ import fnmatch
12
+ import logging
13
+ import os
14
+ import re
15
+ import sys
16
+
17
+ from . import DistlibException
18
+ from .compat import fsdecode
19
+ from .util import convert_path
20
+
21
+
22
+ __all__ = ['Manifest']
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ # a \ followed by some spaces + EOL
27
+ _COLLAPSE_PATTERN = re.compile('\\\\w*\n', re.M)
28
+ _COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S)
29
+
30
+ #
31
+ # Due to the different results returned by fnmatch.translate, we need
32
+ # to do slightly different processing for Python 2.7 and 3.2 ... this needed
33
+ # to be brought in for Python 3.6 onwards.
34
+ #
35
+ _PYTHON_VERSION = sys.version_info[:2]
36
+
37
+
38
+ class Manifest(object):
39
+ """
40
+ A list of files built by exploring the filesystem and filtered by applying various
41
+ patterns to what we find there.
42
+ """
43
+
44
+ def __init__(self, base=None):
45
+ """
46
+ Initialise an instance.
47
+
48
+ :param base: The base directory to explore under.
49
+ """
50
+ self.base = os.path.abspath(os.path.normpath(base or os.getcwd()))
51
+ self.prefix = self.base + os.sep
52
+ self.allfiles = None
53
+ self.files = set()
54
+
55
+ #
56
+ # Public API
57
+ #
58
+
59
+ def findall(self):
60
+ """Find all files under the base and set ``allfiles`` to the absolute
61
+ pathnames of files found.
62
+ """
63
+ from stat import S_ISREG, S_ISDIR, S_ISLNK
64
+
65
+ self.allfiles = allfiles = []
66
+ root = self.base
67
+ stack = [root]
68
+ pop = stack.pop
69
+ push = stack.append
70
+
71
+ while stack:
72
+ root = pop()
73
+ names = os.listdir(root)
74
+
75
+ for name in names:
76
+ fullname = os.path.join(root, name)
77
+
78
+ # Avoid excess stat calls -- just one will do, thank you!
79
+ stat = os.stat(fullname)
80
+ mode = stat.st_mode
81
+ if S_ISREG(mode):
82
+ allfiles.append(fsdecode(fullname))
83
+ elif S_ISDIR(mode) and not S_ISLNK(mode):
84
+ push(fullname)
85
+
86
+ def add(self, item):
87
+ """
88
+ Add a file to the manifest.
89
+
90
+ :param item: The pathname to add. This can be relative to the base.
91
+ """
92
+ if not item.startswith(self.prefix):
93
+ item = os.path.join(self.base, item)
94
+ self.files.add(os.path.normpath(item))
95
+
96
+ def add_many(self, items):
97
+ """
98
+ Add a list of files to the manifest.
99
+
100
+ :param items: The pathnames to add. These can be relative to the base.
101
+ """
102
+ for item in items:
103
+ self.add(item)
104
+
105
+ def sorted(self, wantdirs=False):
106
+ """
107
+ Return sorted files in directory order
108
+ """
109
+
110
+ def add_dir(dirs, d):
111
+ dirs.add(d)
112
+ logger.debug('add_dir added %s', d)
113
+ if d != self.base:
114
+ parent, _ = os.path.split(d)
115
+ assert parent not in ('', '/')
116
+ add_dir(dirs, parent)
117
+
118
+ result = set(self.files) # make a copy!
119
+ if wantdirs:
120
+ dirs = set()
121
+ for f in result:
122
+ add_dir(dirs, os.path.dirname(f))
123
+ result |= dirs
124
+ return [os.path.join(*path_tuple) for path_tuple in
125
+ sorted(os.path.split(path) for path in result)]
126
+
127
+ def clear(self):
128
+ """Clear all collected files."""
129
+ self.files = set()
130
+ self.allfiles = []
131
+
132
+ def process_directive(self, directive):
133
+ """
134
+ Process a directive which either adds some files from ``allfiles`` to
135
+ ``files``, or removes some files from ``files``.
136
+
137
+ :param directive: The directive to process. This should be in a format
138
+ compatible with distutils ``MANIFEST.in`` files:
139
+
140
+ http://docs.python.org/distutils/sourcedist.html#commands
141
+ """
142
+ # Parse the line: split it up, make sure the right number of words
143
+ # is there, and return the relevant words. 'action' is always
144
+ # defined: it's the first word of the line. Which of the other
145
+ # three are defined depends on the action; it'll be either
146
+ # patterns, (dir and patterns), or (dirpattern).
147
+ action, patterns, thedir, dirpattern = self._parse_directive(directive)
148
+
149
+ # OK, now we know that the action is valid and we have the
150
+ # right number of words on the line for that action -- so we
151
+ # can proceed with minimal error-checking.
152
+ if action == 'include':
153
+ for pattern in patterns:
154
+ if not self._include_pattern(pattern, anchor=True):
155
+ logger.warning('no files found matching %r', pattern)
156
+
157
+ elif action == 'exclude':
158
+ for pattern in patterns:
159
+ self._exclude_pattern(pattern, anchor=True)
160
+
161
+ elif action == 'global-include':
162
+ for pattern in patterns:
163
+ if not self._include_pattern(pattern, anchor=False):
164
+ logger.warning('no files found matching %r '
165
+ 'anywhere in distribution', pattern)
166
+
167
+ elif action == 'global-exclude':
168
+ for pattern in patterns:
169
+ self._exclude_pattern(pattern, anchor=False)
170
+
171
+ elif action == 'recursive-include':
172
+ for pattern in patterns:
173
+ if not self._include_pattern(pattern, prefix=thedir):
174
+ logger.warning('no files found matching %r '
175
+ 'under directory %r', pattern, thedir)
176
+
177
+ elif action == 'recursive-exclude':
178
+ for pattern in patterns:
179
+ self._exclude_pattern(pattern, prefix=thedir)
180
+
181
+ elif action == 'graft':
182
+ if not self._include_pattern(None, prefix=dirpattern):
183
+ logger.warning('no directories found matching %r',
184
+ dirpattern)
185
+
186
+ elif action == 'prune':
187
+ if not self._exclude_pattern(None, prefix=dirpattern):
188
+ logger.warning('no previously-included directories found '
189
+ 'matching %r', dirpattern)
190
+ else: # pragma: no cover
191
+ # This should never happen, as it should be caught in
192
+ # _parse_template_line
193
+ raise DistlibException(
194
+ 'invalid action %r' % action)
195
+
196
+ #
197
+ # Private API
198
+ #
199
+
200
+ def _parse_directive(self, directive):
201
+ """
202
+ Validate a directive.
203
+ :param directive: The directive to validate.
204
+ :return: A tuple of action, patterns, thedir, dir_patterns
205
+ """
206
+ words = directive.split()
207
+ if len(words) == 1 and words[0] not in ('include', 'exclude',
208
+ 'global-include',
209
+ 'global-exclude',
210
+ 'recursive-include',
211
+ 'recursive-exclude',
212
+ 'graft', 'prune'):
213
+ # no action given, let's use the default 'include'
214
+ words.insert(0, 'include')
215
+
216
+ action = words[0]
217
+ patterns = thedir = dir_pattern = None
218
+
219
+ if action in ('include', 'exclude',
220
+ 'global-include', 'global-exclude'):
221
+ if len(words) < 2:
222
+ raise DistlibException(
223
+ '%r expects <pattern1> <pattern2> ...' % action)
224
+
225
+ patterns = [convert_path(word) for word in words[1:]]
226
+
227
+ elif action in ('recursive-include', 'recursive-exclude'):
228
+ if len(words) < 3:
229
+ raise DistlibException(
230
+ '%r expects <dir> <pattern1> <pattern2> ...' % action)
231
+
232
+ thedir = convert_path(words[1])
233
+ patterns = [convert_path(word) for word in words[2:]]
234
+
235
+ elif action in ('graft', 'prune'):
236
+ if len(words) != 2:
237
+ raise DistlibException(
238
+ '%r expects a single <dir_pattern>' % action)
239
+
240
+ dir_pattern = convert_path(words[1])
241
+
242
+ else:
243
+ raise DistlibException('unknown action %r' % action)
244
+
245
+ return action, patterns, thedir, dir_pattern
246
+
247
+ def _include_pattern(self, pattern, anchor=True, prefix=None,
248
+ is_regex=False):
249
+ """Select strings (presumably filenames) from 'self.files' that
250
+ match 'pattern', a Unix-style wildcard (glob) pattern.
251
+
252
+ Patterns are not quite the same as implemented by the 'fnmatch'
253
+ module: '*' and '?' match non-special characters, where "special"
254
+ is platform-dependent: slash on Unix; colon, slash, and backslash on
255
+ DOS/Windows; and colon on Mac OS.
256
+
257
+ If 'anchor' is true (the default), then the pattern match is more
258
+ stringent: "*.py" will match "foo.py" but not "foo/bar.py". If
259
+ 'anchor' is false, both of these will match.
260
+
261
+ If 'prefix' is supplied, then only filenames starting with 'prefix'
262
+ (itself a pattern) and ending with 'pattern', with anything in between
263
+ them, will match. 'anchor' is ignored in this case.
264
+
265
+ If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
266
+ 'pattern' is assumed to be either a string containing a regex or a
267
+ regex object -- no translation is done, the regex is just compiled
268
+ and used as-is.
269
+
270
+ Selected strings will be added to self.files.
271
+
272
+ Return True if files are found.
273
+ """
274
+ # XXX docstring lying about what the special chars are?
275
+ found = False
276
+ pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
277
+
278
+ # delayed loading of allfiles list
279
+ if self.allfiles is None:
280
+ self.findall()
281
+
282
+ for name in self.allfiles:
283
+ if pattern_re.search(name):
284
+ self.files.add(name)
285
+ found = True
286
+ return found
287
+
288
+ def _exclude_pattern(self, pattern, anchor=True, prefix=None,
289
+ is_regex=False):
290
+ """Remove strings (presumably filenames) from 'files' that match
291
+ 'pattern'.
292
+
293
+ Other parameters are the same as for 'include_pattern()', above.
294
+ The list 'self.files' is modified in place. Return True if files are
295
+ found.
296
+
297
+ This API is public to allow e.g. exclusion of SCM subdirs, e.g. when
298
+ packaging source distributions
299
+ """
300
+ found = False
301
+ pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex)
302
+ for f in list(self.files):
303
+ if pattern_re.search(f):
304
+ self.files.remove(f)
305
+ found = True
306
+ return found
307
+
308
+ def _translate_pattern(self, pattern, anchor=True, prefix=None,
309
+ is_regex=False):
310
+ """Translate a shell-like wildcard pattern to a compiled regular
311
+ expression.
312
+
313
+ Return the compiled regex. If 'is_regex' true,
314
+ then 'pattern' is directly compiled to a regex (if it's a string)
315
+ or just returned as-is (assumes it's a regex object).
316
+ """
317
+ if is_regex:
318
+ if isinstance(pattern, str):
319
+ return re.compile(pattern)
320
+ else:
321
+ return pattern
322
+
323
+ if _PYTHON_VERSION > (3, 2):
324
+ # ditch start and end characters
325
+ start, _, end = self._glob_to_re('_').partition('_')
326
+
327
+ if pattern:
328
+ pattern_re = self._glob_to_re(pattern)
329
+ if _PYTHON_VERSION > (3, 2):
330
+ assert pattern_re.startswith(start) and pattern_re.endswith(end)
331
+ else:
332
+ pattern_re = ''
333
+
334
+ base = re.escape(os.path.join(self.base, ''))
335
+ if prefix is not None:
336
+ # ditch end of pattern character
337
+ if _PYTHON_VERSION <= (3, 2):
338
+ empty_pattern = self._glob_to_re('')
339
+ prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)]
340
+ else:
341
+ prefix_re = self._glob_to_re(prefix)
342
+ assert prefix_re.startswith(start) and prefix_re.endswith(end)
343
+ prefix_re = prefix_re[len(start): len(prefix_re) - len(end)]
344
+ sep = os.sep
345
+ if os.sep == '\\':
346
+ sep = r'\\'
347
+ if _PYTHON_VERSION <= (3, 2):
348
+ pattern_re = '^' + base + sep.join((prefix_re,
349
+ '.*' + pattern_re))
350
+ else:
351
+ pattern_re = pattern_re[len(start): len(pattern_re) - len(end)]
352
+ pattern_re = r'%s%s%s%s.*%s%s' % (start, base, prefix_re, sep,
353
+ pattern_re, end)
354
+ else: # no prefix -- respect anchor flag
355
+ if anchor:
356
+ if _PYTHON_VERSION <= (3, 2):
357
+ pattern_re = '^' + base + pattern_re
358
+ else:
359
+ pattern_re = r'%s%s%s' % (start, base, pattern_re[len(start):])
360
+
361
+ return re.compile(pattern_re)
362
+
363
+ def _glob_to_re(self, pattern):
364
+ """Translate a shell-like glob pattern to a regular expression.
365
+
366
+ Return a string containing the regex. Differs from
367
+ 'fnmatch.translate()' in that '*' does not match "special characters"
368
+ (which are platform-specific).
369
+ """
370
+ pattern_re = fnmatch.translate(pattern)
371
+
372
+ # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
373
+ # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
374
+ # and by extension they shouldn't match such "special characters" under
375
+ # any OS. So change all non-escaped dots in the RE to match any
376
+ # character except the special characters (currently: just os.sep).
377
+ sep = os.sep
378
+ if os.sep == '\\':
379
+ # we're using a regex to manipulate a regex, so we need
380
+ # to escape the backslash twice
381
+ sep = r'\\\\'
382
+ escaped = r'\1[^%s]' % sep
383
+ pattern_re = re.sub(r'((?<!\\)(\\\\)*)\.', escaped, pattern_re)
384
+ return pattern_re
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/markers.py ADDED
@@ -0,0 +1,167 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012-2023 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ """
8
+ Parser for the environment markers micro-language defined in PEP 508.
9
+ """
10
+
11
+ # Note: In PEP 345, the micro-language was Python compatible, so the ast
12
+ # module could be used to parse it. However, PEP 508 introduced operators such
13
+ # as ~= and === which aren't in Python, necessitating a different approach.
14
+
15
+ import os
16
+ import re
17
+ import sys
18
+ import platform
19
+
20
+ from .compat import string_types
21
+ from .util import in_venv, parse_marker
22
+ from .version import LegacyVersion as LV
23
+
24
+ __all__ = ['interpret']
25
+
26
+ _VERSION_PATTERN = re.compile(
27
+ r'((\d+(\.\d+)*\w*)|\'(\d+(\.\d+)*\w*)\'|\"(\d+(\.\d+)*\w*)\")')
28
+ _VERSION_MARKERS = {'python_version', 'python_full_version'}
29
+
30
+
31
+ def _is_version_marker(s):
32
+ return isinstance(s, string_types) and s in _VERSION_MARKERS
33
+
34
+
35
+ def _is_literal(o):
36
+ if not isinstance(o, string_types) or not o:
37
+ return False
38
+ return o[0] in '\'"'
39
+
40
+
41
+ def _get_versions(s):
42
+ return {LV(m.groups()[0]) for m in _VERSION_PATTERN.finditer(s)}
43
+
44
+
45
+ class Evaluator(object):
46
+ """
47
+ This class is used to evaluate marker expressions.
48
+ """
49
+
50
+ operations = {
51
+ '==': lambda x, y: x == y,
52
+ '===': lambda x, y: x == y,
53
+ '~=': lambda x, y: x == y or x > y,
54
+ '!=': lambda x, y: x != y,
55
+ '<': lambda x, y: x < y,
56
+ '<=': lambda x, y: x == y or x < y,
57
+ '>': lambda x, y: x > y,
58
+ '>=': lambda x, y: x == y or x > y,
59
+ 'and': lambda x, y: x and y,
60
+ 'or': lambda x, y: x or y,
61
+ 'in': lambda x, y: x in y,
62
+ 'not in': lambda x, y: x not in y,
63
+ }
64
+
65
+ def evaluate(self, expr, context):
66
+ """
67
+ Evaluate a marker expression returned by the :func:`parse_requirement`
68
+ function in the specified context.
69
+ """
70
+ if isinstance(expr, string_types):
71
+ if expr[0] in '\'"':
72
+ result = expr[1:-1]
73
+ else:
74
+ if expr not in context:
75
+ raise SyntaxError('unknown variable: %s' % expr)
76
+ result = context[expr]
77
+ else:
78
+ assert isinstance(expr, dict)
79
+ op = expr['op']
80
+ if op not in self.operations:
81
+ raise NotImplementedError('op not implemented: %s' % op)
82
+ elhs = expr['lhs']
83
+ erhs = expr['rhs']
84
+ if _is_literal(expr['lhs']) and _is_literal(expr['rhs']):
85
+ raise SyntaxError('invalid comparison: %s %s %s' %
86
+ (elhs, op, erhs))
87
+
88
+ lhs = self.evaluate(elhs, context)
89
+ rhs = self.evaluate(erhs, context)
90
+ if ((_is_version_marker(elhs) or _is_version_marker(erhs))
91
+ and op in ('<', '<=', '>', '>=', '===', '==', '!=', '~=')):
92
+ lhs = LV(lhs)
93
+ rhs = LV(rhs)
94
+ elif _is_version_marker(elhs) and op in ('in', 'not in'):
95
+ lhs = LV(lhs)
96
+ rhs = _get_versions(rhs)
97
+ result = self.operations[op](lhs, rhs)
98
+ return result
99
+
100
+
101
+ _DIGITS = re.compile(r'\d+\.\d+')
102
+
103
+
104
+ def default_context():
105
+
106
+ def format_full_version(info):
107
+ version = '%s.%s.%s' % (info.major, info.minor, info.micro)
108
+ kind = info.releaselevel
109
+ if kind != 'final':
110
+ version += kind[0] + str(info.serial)
111
+ return version
112
+
113
+ if hasattr(sys, 'implementation'):
114
+ implementation_version = format_full_version(
115
+ sys.implementation.version)
116
+ implementation_name = sys.implementation.name
117
+ else:
118
+ implementation_version = '0'
119
+ implementation_name = ''
120
+
121
+ ppv = platform.python_version()
122
+ m = _DIGITS.match(ppv)
123
+ pv = m.group(0)
124
+ result = {
125
+ 'implementation_name': implementation_name,
126
+ 'implementation_version': implementation_version,
127
+ 'os_name': os.name,
128
+ 'platform_machine': platform.machine(),
129
+ 'platform_python_implementation': platform.python_implementation(),
130
+ 'platform_release': platform.release(),
131
+ 'platform_system': platform.system(),
132
+ 'platform_version': platform.version(),
133
+ 'platform_in_venv': str(in_venv()),
134
+ 'python_full_version': ppv,
135
+ 'python_version': pv,
136
+ 'sys_platform': sys.platform,
137
+ }
138
+ return result
139
+
140
+
141
+ DEFAULT_CONTEXT = default_context()
142
+ del default_context
143
+
144
+ evaluator = Evaluator()
145
+
146
+
147
+ def interpret(marker, execution_context=None):
148
+ """
149
+ Interpret a marker and return a result depending on environment.
150
+
151
+ :param marker: The marker to interpret.
152
+ :type marker: str
153
+ :param execution_context: The context used for name lookup.
154
+ :type execution_context: mapping
155
+ """
156
+ try:
157
+ expr, rest = parse_marker(marker)
158
+ except Exception as e:
159
+ raise SyntaxError('Unable to interpret marker syntax: %s: %s' %
160
+ (marker, e))
161
+ if rest and rest[0] != '#':
162
+ raise SyntaxError('unexpected trailing data in marker: %s: %s' %
163
+ (marker, rest))
164
+ context = dict(DEFAULT_CONTEXT)
165
+ if execution_context:
166
+ context.update(execution_context)
167
+ return evaluator.evaluate(expr, context)
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/metadata.py ADDED
@@ -0,0 +1,1068 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012 The Python Software Foundation.
4
+ # See LICENSE.txt and CONTRIBUTORS.txt.
5
+ #
6
+ """Implementation of the Metadata for Python packages PEPs.
7
+
8
+ Supports all metadata formats (1.0, 1.1, 1.2, 1.3/2.1 and 2.2).
9
+ """
10
+ from __future__ import unicode_literals
11
+
12
+ import codecs
13
+ from email import message_from_file
14
+ import json
15
+ import logging
16
+ import re
17
+
18
+
19
+ from . import DistlibException, __version__
20
+ from .compat import StringIO, string_types, text_type
21
+ from .markers import interpret
22
+ from .util import extract_by_key, get_extras
23
+ from .version import get_scheme, PEP440_VERSION_RE
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+
28
+ class MetadataMissingError(DistlibException):
29
+ """A required metadata is missing"""
30
+
31
+
32
+ class MetadataConflictError(DistlibException):
33
+ """Attempt to read or write metadata fields that are conflictual."""
34
+
35
+
36
+ class MetadataUnrecognizedVersionError(DistlibException):
37
+ """Unknown metadata version number."""
38
+
39
+
40
+ class MetadataInvalidError(DistlibException):
41
+ """A metadata value is invalid"""
42
+
43
+ # public API of this module
44
+ __all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION']
45
+
46
+ # Encoding used for the PKG-INFO files
47
+ PKG_INFO_ENCODING = 'utf-8'
48
+
49
+ # preferred version. Hopefully will be changed
50
+ # to 1.2 once PEP 345 is supported everywhere
51
+ PKG_INFO_PREFERRED_VERSION = '1.1'
52
+
53
+ _LINE_PREFIX_1_2 = re.compile('\n \\|')
54
+ _LINE_PREFIX_PRE_1_2 = re.compile('\n ')
55
+ _241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
56
+ 'Summary', 'Description',
57
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
58
+ 'License')
59
+
60
+ _314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
61
+ 'Supported-Platform', 'Summary', 'Description',
62
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
63
+ 'License', 'Classifier', 'Download-URL', 'Obsoletes',
64
+ 'Provides', 'Requires')
65
+
66
+ _314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier',
67
+ 'Download-URL')
68
+
69
+ _345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
70
+ 'Supported-Platform', 'Summary', 'Description',
71
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
72
+ 'Maintainer', 'Maintainer-email', 'License',
73
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
74
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
75
+ 'Requires-Python', 'Requires-External')
76
+
77
+ _345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python',
78
+ 'Obsoletes-Dist', 'Requires-External', 'Maintainer',
79
+ 'Maintainer-email', 'Project-URL')
80
+
81
+ _426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform',
82
+ 'Supported-Platform', 'Summary', 'Description',
83
+ 'Keywords', 'Home-page', 'Author', 'Author-email',
84
+ 'Maintainer', 'Maintainer-email', 'License',
85
+ 'Classifier', 'Download-URL', 'Obsoletes-Dist',
86
+ 'Project-URL', 'Provides-Dist', 'Requires-Dist',
87
+ 'Requires-Python', 'Requires-External', 'Private-Version',
88
+ 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension',
89
+ 'Provides-Extra')
90
+
91
+ _426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By',
92
+ 'Setup-Requires-Dist', 'Extension')
93
+
94
+ # See issue #106: Sometimes 'Requires' and 'Provides' occur wrongly in
95
+ # the metadata. Include them in the tuple literal below to allow them
96
+ # (for now).
97
+ # Ditto for Obsoletes - see issue #140.
98
+ _566_FIELDS = _426_FIELDS + ('Description-Content-Type',
99
+ 'Requires', 'Provides', 'Obsoletes')
100
+
101
+ _566_MARKERS = ('Description-Content-Type',)
102
+
103
+ _643_MARKERS = ('Dynamic', 'License-File')
104
+
105
+ _643_FIELDS = _566_FIELDS + _643_MARKERS
106
+
107
+ _ALL_FIELDS = set()
108
+ _ALL_FIELDS.update(_241_FIELDS)
109
+ _ALL_FIELDS.update(_314_FIELDS)
110
+ _ALL_FIELDS.update(_345_FIELDS)
111
+ _ALL_FIELDS.update(_426_FIELDS)
112
+ _ALL_FIELDS.update(_566_FIELDS)
113
+ _ALL_FIELDS.update(_643_FIELDS)
114
+
115
+ EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''')
116
+
117
+
118
+ def _version2fieldlist(version):
119
+ if version == '1.0':
120
+ return _241_FIELDS
121
+ elif version == '1.1':
122
+ return _314_FIELDS
123
+ elif version == '1.2':
124
+ return _345_FIELDS
125
+ elif version in ('1.3', '2.1'):
126
+ # avoid adding field names if already there
127
+ return _345_FIELDS + tuple(f for f in _566_FIELDS if f not in _345_FIELDS)
128
+ elif version == '2.0':
129
+ raise ValueError('Metadata 2.0 is withdrawn and not supported')
130
+ # return _426_FIELDS
131
+ elif version == '2.2':
132
+ return _643_FIELDS
133
+ raise MetadataUnrecognizedVersionError(version)
134
+
135
+
136
+ def _best_version(fields):
137
+ """Detect the best version depending on the fields used."""
138
+ def _has_marker(keys, markers):
139
+ return any(marker in keys for marker in markers)
140
+
141
+ keys = [key for key, value in fields.items() if value not in ([], 'UNKNOWN', None)]
142
+ possible_versions = ['1.0', '1.1', '1.2', '1.3', '2.1', '2.2'] # 2.0 removed
143
+
144
+ # first let's try to see if a field is not part of one of the version
145
+ for key in keys:
146
+ if key not in _241_FIELDS and '1.0' in possible_versions:
147
+ possible_versions.remove('1.0')
148
+ logger.debug('Removed 1.0 due to %s', key)
149
+ if key not in _314_FIELDS and '1.1' in possible_versions:
150
+ possible_versions.remove('1.1')
151
+ logger.debug('Removed 1.1 due to %s', key)
152
+ if key not in _345_FIELDS and '1.2' in possible_versions:
153
+ possible_versions.remove('1.2')
154
+ logger.debug('Removed 1.2 due to %s', key)
155
+ if key not in _566_FIELDS and '1.3' in possible_versions:
156
+ possible_versions.remove('1.3')
157
+ logger.debug('Removed 1.3 due to %s', key)
158
+ if key not in _566_FIELDS and '2.1' in possible_versions:
159
+ if key != 'Description': # In 2.1, description allowed after headers
160
+ possible_versions.remove('2.1')
161
+ logger.debug('Removed 2.1 due to %s', key)
162
+ if key not in _643_FIELDS and '2.2' in possible_versions:
163
+ possible_versions.remove('2.2')
164
+ logger.debug('Removed 2.2 due to %s', key)
165
+ # if key not in _426_FIELDS and '2.0' in possible_versions:
166
+ # possible_versions.remove('2.0')
167
+ # logger.debug('Removed 2.0 due to %s', key)
168
+
169
+ # possible_version contains qualified versions
170
+ if len(possible_versions) == 1:
171
+ return possible_versions[0] # found !
172
+ elif len(possible_versions) == 0:
173
+ logger.debug('Out of options - unknown metadata set: %s', fields)
174
+ raise MetadataConflictError('Unknown metadata set')
175
+
176
+ # let's see if one unique marker is found
177
+ is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS)
178
+ is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS)
179
+ is_2_1 = '2.1' in possible_versions and _has_marker(keys, _566_MARKERS)
180
+ # is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS)
181
+ is_2_2 = '2.2' in possible_versions and _has_marker(keys, _643_MARKERS)
182
+ if int(is_1_1) + int(is_1_2) + int(is_2_1) + int(is_2_2) > 1:
183
+ raise MetadataConflictError('You used incompatible 1.1/1.2/2.1/2.2 fields')
184
+
185
+ # we have the choice, 1.0, or 1.2, 2.1 or 2.2
186
+ # - 1.0 has a broken Summary field but works with all tools
187
+ # - 1.1 is to avoid
188
+ # - 1.2 fixes Summary but has little adoption
189
+ # - 2.1 adds more features
190
+ # - 2.2 is the latest
191
+ if not is_1_1 and not is_1_2 and not is_2_1 and not is_2_2:
192
+ # we couldn't find any specific marker
193
+ if PKG_INFO_PREFERRED_VERSION in possible_versions:
194
+ return PKG_INFO_PREFERRED_VERSION
195
+ if is_1_1:
196
+ return '1.1'
197
+ if is_1_2:
198
+ return '1.2'
199
+ if is_2_1:
200
+ return '2.1'
201
+ # if is_2_2:
202
+ # return '2.2'
203
+
204
+ return '2.2'
205
+
206
+ # This follows the rules about transforming keys as described in
207
+ # https://www.python.org/dev/peps/pep-0566/#id17
208
+ _ATTR2FIELD = {
209
+ name.lower().replace("-", "_"): name for name in _ALL_FIELDS
210
+ }
211
+ _FIELD2ATTR = {field: attr for attr, field in _ATTR2FIELD.items()}
212
+
213
+ _PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist')
214
+ _VERSIONS_FIELDS = ('Requires-Python',)
215
+ _VERSION_FIELDS = ('Version',)
216
+ _LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes',
217
+ 'Requires', 'Provides', 'Obsoletes-Dist',
218
+ 'Provides-Dist', 'Requires-Dist', 'Requires-External',
219
+ 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist',
220
+ 'Provides-Extra', 'Extension', 'License-File')
221
+ _LISTTUPLEFIELDS = ('Project-URL',)
222
+
223
+ _ELEMENTSFIELD = ('Keywords',)
224
+
225
+ _UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description')
226
+
227
+ _MISSING = object()
228
+
229
+ _FILESAFE = re.compile('[^A-Za-z0-9.]+')
230
+
231
+
232
+ def _get_name_and_version(name, version, for_filename=False):
233
+ """Return the distribution name with version.
234
+
235
+ If for_filename is true, return a filename-escaped form."""
236
+ if for_filename:
237
+ # For both name and version any runs of non-alphanumeric or '.'
238
+ # characters are replaced with a single '-'. Additionally any
239
+ # spaces in the version string become '.'
240
+ name = _FILESAFE.sub('-', name)
241
+ version = _FILESAFE.sub('-', version.replace(' ', '.'))
242
+ return '%s-%s' % (name, version)
243
+
244
+
245
+ class LegacyMetadata(object):
246
+ """The legacy metadata of a release.
247
+
248
+ Supports versions 1.0, 1.1, 1.2, 2.0 and 1.3/2.1 (auto-detected). You can
249
+ instantiate the class with one of these arguments (or none):
250
+ - *path*, the path to a metadata file
251
+ - *fileobj* give a file-like object with metadata as content
252
+ - *mapping* is a dict-like object
253
+ - *scheme* is a version scheme name
254
+ """
255
+ # TODO document the mapping API and UNKNOWN default key
256
+
257
+ def __init__(self, path=None, fileobj=None, mapping=None,
258
+ scheme='default'):
259
+ if [path, fileobj, mapping].count(None) < 2:
260
+ raise TypeError('path, fileobj and mapping are exclusive')
261
+ self._fields = {}
262
+ self.requires_files = []
263
+ self._dependencies = None
264
+ self.scheme = scheme
265
+ if path is not None:
266
+ self.read(path)
267
+ elif fileobj is not None:
268
+ self.read_file(fileobj)
269
+ elif mapping is not None:
270
+ self.update(mapping)
271
+ self.set_metadata_version()
272
+
273
+ def set_metadata_version(self):
274
+ self._fields['Metadata-Version'] = _best_version(self._fields)
275
+
276
+ def _write_field(self, fileobj, name, value):
277
+ fileobj.write('%s: %s\n' % (name, value))
278
+
279
+ def __getitem__(self, name):
280
+ return self.get(name)
281
+
282
+ def __setitem__(self, name, value):
283
+ return self.set(name, value)
284
+
285
+ def __delitem__(self, name):
286
+ field_name = self._convert_name(name)
287
+ try:
288
+ del self._fields[field_name]
289
+ except KeyError:
290
+ raise KeyError(name)
291
+
292
+ def __contains__(self, name):
293
+ return (name in self._fields or
294
+ self._convert_name(name) in self._fields)
295
+
296
+ def _convert_name(self, name):
297
+ if name in _ALL_FIELDS:
298
+ return name
299
+ name = name.replace('-', '_').lower()
300
+ return _ATTR2FIELD.get(name, name)
301
+
302
+ def _default_value(self, name):
303
+ if name in _LISTFIELDS or name in _ELEMENTSFIELD:
304
+ return []
305
+ return 'UNKNOWN'
306
+
307
+ def _remove_line_prefix(self, value):
308
+ if self.metadata_version in ('1.0', '1.1'):
309
+ return _LINE_PREFIX_PRE_1_2.sub('\n', value)
310
+ else:
311
+ return _LINE_PREFIX_1_2.sub('\n', value)
312
+
313
+ def __getattr__(self, name):
314
+ if name in _ATTR2FIELD:
315
+ return self[name]
316
+ raise AttributeError(name)
317
+
318
+ #
319
+ # Public API
320
+ #
321
+
322
+ # dependencies = property(_get_dependencies, _set_dependencies)
323
+
324
+ def get_fullname(self, filesafe=False):
325
+ """Return the distribution name with version.
326
+
327
+ If filesafe is true, return a filename-escaped form."""
328
+ return _get_name_and_version(self['Name'], self['Version'], filesafe)
329
+
330
+ def is_field(self, name):
331
+ """return True if name is a valid metadata key"""
332
+ name = self._convert_name(name)
333
+ return name in _ALL_FIELDS
334
+
335
+ def is_multi_field(self, name):
336
+ name = self._convert_name(name)
337
+ return name in _LISTFIELDS
338
+
339
+ def read(self, filepath):
340
+ """Read the metadata values from a file path."""
341
+ fp = codecs.open(filepath, 'r', encoding='utf-8')
342
+ try:
343
+ self.read_file(fp)
344
+ finally:
345
+ fp.close()
346
+
347
+ def read_file(self, fileob):
348
+ """Read the metadata values from a file object."""
349
+ msg = message_from_file(fileob)
350
+ self._fields['Metadata-Version'] = msg['metadata-version']
351
+
352
+ # When reading, get all the fields we can
353
+ for field in _ALL_FIELDS:
354
+ if field not in msg:
355
+ continue
356
+ if field in _LISTFIELDS:
357
+ # we can have multiple lines
358
+ values = msg.get_all(field)
359
+ if field in _LISTTUPLEFIELDS and values is not None:
360
+ values = [tuple(value.split(',')) for value in values]
361
+ self.set(field, values)
362
+ else:
363
+ # single line
364
+ value = msg[field]
365
+ if value is not None and value != 'UNKNOWN':
366
+ self.set(field, value)
367
+
368
+ # PEP 566 specifies that the body be used for the description, if
369
+ # available
370
+ body = msg.get_payload()
371
+ self["Description"] = body if body else self["Description"]
372
+ # logger.debug('Attempting to set metadata for %s', self)
373
+ # self.set_metadata_version()
374
+
375
+ def write(self, filepath, skip_unknown=False):
376
+ """Write the metadata fields to filepath."""
377
+ fp = codecs.open(filepath, 'w', encoding='utf-8')
378
+ try:
379
+ self.write_file(fp, skip_unknown)
380
+ finally:
381
+ fp.close()
382
+
383
+ def write_file(self, fileobject, skip_unknown=False):
384
+ """Write the PKG-INFO format data to a file object."""
385
+ self.set_metadata_version()
386
+
387
+ for field in _version2fieldlist(self['Metadata-Version']):
388
+ values = self.get(field)
389
+ if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']):
390
+ continue
391
+ if field in _ELEMENTSFIELD:
392
+ self._write_field(fileobject, field, ','.join(values))
393
+ continue
394
+ if field not in _LISTFIELDS:
395
+ if field == 'Description':
396
+ if self.metadata_version in ('1.0', '1.1'):
397
+ values = values.replace('\n', '\n ')
398
+ else:
399
+ values = values.replace('\n', '\n |')
400
+ values = [values]
401
+
402
+ if field in _LISTTUPLEFIELDS:
403
+ values = [','.join(value) for value in values]
404
+
405
+ for value in values:
406
+ self._write_field(fileobject, field, value)
407
+
408
+ def update(self, other=None, **kwargs):
409
+ """Set metadata values from the given iterable `other` and kwargs.
410
+
411
+ Behavior is like `dict.update`: If `other` has a ``keys`` method,
412
+ they are looped over and ``self[key]`` is assigned ``other[key]``.
413
+ Else, ``other`` is an iterable of ``(key, value)`` iterables.
414
+
415
+ Keys that don't match a metadata field or that have an empty value are
416
+ dropped.
417
+ """
418
+ def _set(key, value):
419
+ if key in _ATTR2FIELD and value:
420
+ self.set(self._convert_name(key), value)
421
+
422
+ if not other:
423
+ # other is None or empty container
424
+ pass
425
+ elif hasattr(other, 'keys'):
426
+ for k in other.keys():
427
+ _set(k, other[k])
428
+ else:
429
+ for k, v in other:
430
+ _set(k, v)
431
+
432
+ if kwargs:
433
+ for k, v in kwargs.items():
434
+ _set(k, v)
435
+
436
+ def set(self, name, value):
437
+ """Control then set a metadata field."""
438
+ name = self._convert_name(name)
439
+
440
+ if ((name in _ELEMENTSFIELD or name == 'Platform') and
441
+ not isinstance(value, (list, tuple))):
442
+ if isinstance(value, string_types):
443
+ value = [v.strip() for v in value.split(',')]
444
+ else:
445
+ value = []
446
+ elif (name in _LISTFIELDS and
447
+ not isinstance(value, (list, tuple))):
448
+ if isinstance(value, string_types):
449
+ value = [value]
450
+ else:
451
+ value = []
452
+
453
+ if logger.isEnabledFor(logging.WARNING):
454
+ project_name = self['Name']
455
+
456
+ scheme = get_scheme(self.scheme)
457
+ if name in _PREDICATE_FIELDS and value is not None:
458
+ for v in value:
459
+ # check that the values are valid
460
+ if not scheme.is_valid_matcher(v.split(';')[0]):
461
+ logger.warning(
462
+ "'%s': '%s' is not valid (field '%s')",
463
+ project_name, v, name)
464
+ # FIXME this rejects UNKNOWN, is that right?
465
+ elif name in _VERSIONS_FIELDS and value is not None:
466
+ if not scheme.is_valid_constraint_list(value):
467
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
468
+ project_name, value, name)
469
+ elif name in _VERSION_FIELDS and value is not None:
470
+ if not scheme.is_valid_version(value):
471
+ logger.warning("'%s': '%s' is not a valid version (field '%s')",
472
+ project_name, value, name)
473
+
474
+ if name in _UNICODEFIELDS:
475
+ if name == 'Description':
476
+ value = self._remove_line_prefix(value)
477
+
478
+ self._fields[name] = value
479
+
480
+ def get(self, name, default=_MISSING):
481
+ """Get a metadata field."""
482
+ name = self._convert_name(name)
483
+ if name not in self._fields:
484
+ if default is _MISSING:
485
+ default = self._default_value(name)
486
+ return default
487
+ if name in _UNICODEFIELDS:
488
+ value = self._fields[name]
489
+ return value
490
+ elif name in _LISTFIELDS:
491
+ value = self._fields[name]
492
+ if value is None:
493
+ return []
494
+ res = []
495
+ for val in value:
496
+ if name not in _LISTTUPLEFIELDS:
497
+ res.append(val)
498
+ else:
499
+ # That's for Project-URL
500
+ res.append((val[0], val[1]))
501
+ return res
502
+
503
+ elif name in _ELEMENTSFIELD:
504
+ value = self._fields[name]
505
+ if isinstance(value, string_types):
506
+ return value.split(',')
507
+ return self._fields[name]
508
+
509
+ def check(self, strict=False):
510
+ """Check if the metadata is compliant. If strict is True then raise if
511
+ no Name or Version are provided"""
512
+ self.set_metadata_version()
513
+
514
+ # XXX should check the versions (if the file was loaded)
515
+ missing, warnings = [], []
516
+
517
+ for attr in ('Name', 'Version'): # required by PEP 345
518
+ if attr not in self:
519
+ missing.append(attr)
520
+
521
+ if strict and missing != []:
522
+ msg = 'missing required metadata: %s' % ', '.join(missing)
523
+ raise MetadataMissingError(msg)
524
+
525
+ for attr in ('Home-page', 'Author'):
526
+ if attr not in self:
527
+ missing.append(attr)
528
+
529
+ # checking metadata 1.2 (XXX needs to check 1.1, 1.0)
530
+ if self['Metadata-Version'] != '1.2':
531
+ return missing, warnings
532
+
533
+ scheme = get_scheme(self.scheme)
534
+
535
+ def are_valid_constraints(value):
536
+ for v in value:
537
+ if not scheme.is_valid_matcher(v.split(';')[0]):
538
+ return False
539
+ return True
540
+
541
+ for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints),
542
+ (_VERSIONS_FIELDS,
543
+ scheme.is_valid_constraint_list),
544
+ (_VERSION_FIELDS,
545
+ scheme.is_valid_version)):
546
+ for field in fields:
547
+ value = self.get(field, None)
548
+ if value is not None and not controller(value):
549
+ warnings.append("Wrong value for '%s': %s" % (field, value))
550
+
551
+ return missing, warnings
552
+
553
+ def todict(self, skip_missing=False):
554
+ """Return fields as a dict.
555
+
556
+ Field names will be converted to use the underscore-lowercase style
557
+ instead of hyphen-mixed case (i.e. home_page instead of Home-page).
558
+ This is as per https://www.python.org/dev/peps/pep-0566/#id17.
559
+ """
560
+ self.set_metadata_version()
561
+
562
+ fields = _version2fieldlist(self['Metadata-Version'])
563
+
564
+ data = {}
565
+
566
+ for field_name in fields:
567
+ if not skip_missing or field_name in self._fields:
568
+ key = _FIELD2ATTR[field_name]
569
+ if key != 'project_url':
570
+ data[key] = self[field_name]
571
+ else:
572
+ data[key] = [','.join(u) for u in self[field_name]]
573
+
574
+ return data
575
+
576
+ def add_requirements(self, requirements):
577
+ if self['Metadata-Version'] == '1.1':
578
+ # we can't have 1.1 metadata *and* Setuptools requires
579
+ for field in ('Obsoletes', 'Requires', 'Provides'):
580
+ if field in self:
581
+ del self[field]
582
+ self['Requires-Dist'] += requirements
583
+
584
+ # Mapping API
585
+ # TODO could add iter* variants
586
+
587
+ def keys(self):
588
+ return list(_version2fieldlist(self['Metadata-Version']))
589
+
590
+ def __iter__(self):
591
+ for key in self.keys():
592
+ yield key
593
+
594
+ def values(self):
595
+ return [self[key] for key in self.keys()]
596
+
597
+ def items(self):
598
+ return [(key, self[key]) for key in self.keys()]
599
+
600
+ def __repr__(self):
601
+ return '<%s %s %s>' % (self.__class__.__name__, self.name,
602
+ self.version)
603
+
604
+
605
+ METADATA_FILENAME = 'pydist.json'
606
+ WHEEL_METADATA_FILENAME = 'metadata.json'
607
+ LEGACY_METADATA_FILENAME = 'METADATA'
608
+
609
+
610
+ class Metadata(object):
611
+ """
612
+ The metadata of a release. This implementation uses 2.1
613
+ metadata where possible. If not possible, it wraps a LegacyMetadata
614
+ instance which handles the key-value metadata format.
615
+ """
616
+
617
+ METADATA_VERSION_MATCHER = re.compile(r'^\d+(\.\d+)*$')
618
+
619
+ NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I)
620
+
621
+ FIELDNAME_MATCHER = re.compile('^[A-Z]([0-9A-Z-]*[0-9A-Z])?$', re.I)
622
+
623
+ VERSION_MATCHER = PEP440_VERSION_RE
624
+
625
+ SUMMARY_MATCHER = re.compile('.{1,2047}')
626
+
627
+ METADATA_VERSION = '2.0'
628
+
629
+ GENERATOR = 'distlib (%s)' % __version__
630
+
631
+ MANDATORY_KEYS = {
632
+ 'name': (),
633
+ 'version': (),
634
+ 'summary': ('legacy',),
635
+ }
636
+
637
+ INDEX_KEYS = ('name version license summary description author '
638
+ 'author_email keywords platform home_page classifiers '
639
+ 'download_url')
640
+
641
+ DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires '
642
+ 'dev_requires provides meta_requires obsoleted_by '
643
+ 'supports_environments')
644
+
645
+ SYNTAX_VALIDATORS = {
646
+ 'metadata_version': (METADATA_VERSION_MATCHER, ()),
647
+ 'name': (NAME_MATCHER, ('legacy',)),
648
+ 'version': (VERSION_MATCHER, ('legacy',)),
649
+ 'summary': (SUMMARY_MATCHER, ('legacy',)),
650
+ 'dynamic': (FIELDNAME_MATCHER, ('legacy',)),
651
+ }
652
+
653
+ __slots__ = ('_legacy', '_data', 'scheme')
654
+
655
+ def __init__(self, path=None, fileobj=None, mapping=None,
656
+ scheme='default'):
657
+ if [path, fileobj, mapping].count(None) < 2:
658
+ raise TypeError('path, fileobj and mapping are exclusive')
659
+ self._legacy = None
660
+ self._data = None
661
+ self.scheme = scheme
662
+ #import pdb; pdb.set_trace()
663
+ if mapping is not None:
664
+ try:
665
+ self._validate_mapping(mapping, scheme)
666
+ self._data = mapping
667
+ except MetadataUnrecognizedVersionError:
668
+ self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme)
669
+ self.validate()
670
+ else:
671
+ data = None
672
+ if path:
673
+ with open(path, 'rb') as f:
674
+ data = f.read()
675
+ elif fileobj:
676
+ data = fileobj.read()
677
+ if data is None:
678
+ # Initialised with no args - to be added
679
+ self._data = {
680
+ 'metadata_version': self.METADATA_VERSION,
681
+ 'generator': self.GENERATOR,
682
+ }
683
+ else:
684
+ if not isinstance(data, text_type):
685
+ data = data.decode('utf-8')
686
+ try:
687
+ self._data = json.loads(data)
688
+ self._validate_mapping(self._data, scheme)
689
+ except ValueError:
690
+ # Note: MetadataUnrecognizedVersionError does not
691
+ # inherit from ValueError (it's a DistlibException,
692
+ # which should not inherit from ValueError).
693
+ # The ValueError comes from the json.load - if that
694
+ # succeeds and we get a validation error, we want
695
+ # that to propagate
696
+ self._legacy = LegacyMetadata(fileobj=StringIO(data),
697
+ scheme=scheme)
698
+ self.validate()
699
+
700
+ common_keys = set(('name', 'version', 'license', 'keywords', 'summary'))
701
+
702
+ none_list = (None, list)
703
+ none_dict = (None, dict)
704
+
705
+ mapped_keys = {
706
+ 'run_requires': ('Requires-Dist', list),
707
+ 'build_requires': ('Setup-Requires-Dist', list),
708
+ 'dev_requires': none_list,
709
+ 'test_requires': none_list,
710
+ 'meta_requires': none_list,
711
+ 'extras': ('Provides-Extra', list),
712
+ 'modules': none_list,
713
+ 'namespaces': none_list,
714
+ 'exports': none_dict,
715
+ 'commands': none_dict,
716
+ 'classifiers': ('Classifier', list),
717
+ 'source_url': ('Download-URL', None),
718
+ 'metadata_version': ('Metadata-Version', None),
719
+ }
720
+
721
+ del none_list, none_dict
722
+
723
+ def __getattribute__(self, key):
724
+ common = object.__getattribute__(self, 'common_keys')
725
+ mapped = object.__getattribute__(self, 'mapped_keys')
726
+ if key in mapped:
727
+ lk, maker = mapped[key]
728
+ if self._legacy:
729
+ if lk is None:
730
+ result = None if maker is None else maker()
731
+ else:
732
+ result = self._legacy.get(lk)
733
+ else:
734
+ value = None if maker is None else maker()
735
+ if key not in ('commands', 'exports', 'modules', 'namespaces',
736
+ 'classifiers'):
737
+ result = self._data.get(key, value)
738
+ else:
739
+ # special cases for PEP 459
740
+ sentinel = object()
741
+ result = sentinel
742
+ d = self._data.get('extensions')
743
+ if d:
744
+ if key == 'commands':
745
+ result = d.get('python.commands', value)
746
+ elif key == 'classifiers':
747
+ d = d.get('python.details')
748
+ if d:
749
+ result = d.get(key, value)
750
+ else:
751
+ d = d.get('python.exports')
752
+ if not d:
753
+ d = self._data.get('python.exports')
754
+ if d:
755
+ result = d.get(key, value)
756
+ if result is sentinel:
757
+ result = value
758
+ elif key not in common:
759
+ result = object.__getattribute__(self, key)
760
+ elif self._legacy:
761
+ result = self._legacy.get(key)
762
+ else:
763
+ result = self._data.get(key)
764
+ return result
765
+
766
+ def _validate_value(self, key, value, scheme=None):
767
+ if key in self.SYNTAX_VALIDATORS:
768
+ pattern, exclusions = self.SYNTAX_VALIDATORS[key]
769
+ if (scheme or self.scheme) not in exclusions:
770
+ m = pattern.match(value)
771
+ if not m:
772
+ raise MetadataInvalidError("'%s' is an invalid value for "
773
+ "the '%s' property" % (value,
774
+ key))
775
+
776
+ def __setattr__(self, key, value):
777
+ self._validate_value(key, value)
778
+ common = object.__getattribute__(self, 'common_keys')
779
+ mapped = object.__getattribute__(self, 'mapped_keys')
780
+ if key in mapped:
781
+ lk, _ = mapped[key]
782
+ if self._legacy:
783
+ if lk is None:
784
+ raise NotImplementedError
785
+ self._legacy[lk] = value
786
+ elif key not in ('commands', 'exports', 'modules', 'namespaces',
787
+ 'classifiers'):
788
+ self._data[key] = value
789
+ else:
790
+ # special cases for PEP 459
791
+ d = self._data.setdefault('extensions', {})
792
+ if key == 'commands':
793
+ d['python.commands'] = value
794
+ elif key == 'classifiers':
795
+ d = d.setdefault('python.details', {})
796
+ d[key] = value
797
+ else:
798
+ d = d.setdefault('python.exports', {})
799
+ d[key] = value
800
+ elif key not in common:
801
+ object.__setattr__(self, key, value)
802
+ else:
803
+ if key == 'keywords':
804
+ if isinstance(value, string_types):
805
+ value = value.strip()
806
+ if value:
807
+ value = value.split()
808
+ else:
809
+ value = []
810
+ if self._legacy:
811
+ self._legacy[key] = value
812
+ else:
813
+ self._data[key] = value
814
+
815
+ @property
816
+ def name_and_version(self):
817
+ return _get_name_and_version(self.name, self.version, True)
818
+
819
+ @property
820
+ def provides(self):
821
+ if self._legacy:
822
+ result = self._legacy['Provides-Dist']
823
+ else:
824
+ result = self._data.setdefault('provides', [])
825
+ s = '%s (%s)' % (self.name, self.version)
826
+ if s not in result:
827
+ result.append(s)
828
+ return result
829
+
830
+ @provides.setter
831
+ def provides(self, value):
832
+ if self._legacy:
833
+ self._legacy['Provides-Dist'] = value
834
+ else:
835
+ self._data['provides'] = value
836
+
837
+ def get_requirements(self, reqts, extras=None, env=None):
838
+ """
839
+ Base method to get dependencies, given a set of extras
840
+ to satisfy and an optional environment context.
841
+ :param reqts: A list of sometimes-wanted dependencies,
842
+ perhaps dependent on extras and environment.
843
+ :param extras: A list of optional components being requested.
844
+ :param env: An optional environment for marker evaluation.
845
+ """
846
+ if self._legacy:
847
+ result = reqts
848
+ else:
849
+ result = []
850
+ extras = get_extras(extras or [], self.extras)
851
+ for d in reqts:
852
+ if 'extra' not in d and 'environment' not in d:
853
+ # unconditional
854
+ include = True
855
+ else:
856
+ if 'extra' not in d:
857
+ # Not extra-dependent - only environment-dependent
858
+ include = True
859
+ else:
860
+ include = d.get('extra') in extras
861
+ if include:
862
+ # Not excluded because of extras, check environment
863
+ marker = d.get('environment')
864
+ if marker:
865
+ include = interpret(marker, env)
866
+ if include:
867
+ result.extend(d['requires'])
868
+ for key in ('build', 'dev', 'test'):
869
+ e = ':%s:' % key
870
+ if e in extras:
871
+ extras.remove(e)
872
+ # A recursive call, but it should terminate since 'test'
873
+ # has been removed from the extras
874
+ reqts = self._data.get('%s_requires' % key, [])
875
+ result.extend(self.get_requirements(reqts, extras=extras,
876
+ env=env))
877
+ return result
878
+
879
+ @property
880
+ def dictionary(self):
881
+ if self._legacy:
882
+ return self._from_legacy()
883
+ return self._data
884
+
885
+ @property
886
+ def dependencies(self):
887
+ if self._legacy:
888
+ raise NotImplementedError
889
+ else:
890
+ return extract_by_key(self._data, self.DEPENDENCY_KEYS)
891
+
892
+ @dependencies.setter
893
+ def dependencies(self, value):
894
+ if self._legacy:
895
+ raise NotImplementedError
896
+ else:
897
+ self._data.update(value)
898
+
899
+ def _validate_mapping(self, mapping, scheme):
900
+ if mapping.get('metadata_version') != self.METADATA_VERSION:
901
+ raise MetadataUnrecognizedVersionError()
902
+ missing = []
903
+ for key, exclusions in self.MANDATORY_KEYS.items():
904
+ if key not in mapping:
905
+ if scheme not in exclusions:
906
+ missing.append(key)
907
+ if missing:
908
+ msg = 'Missing metadata items: %s' % ', '.join(missing)
909
+ raise MetadataMissingError(msg)
910
+ for k, v in mapping.items():
911
+ self._validate_value(k, v, scheme)
912
+
913
+ def validate(self):
914
+ if self._legacy:
915
+ missing, warnings = self._legacy.check(True)
916
+ if missing or warnings:
917
+ logger.warning('Metadata: missing: %s, warnings: %s',
918
+ missing, warnings)
919
+ else:
920
+ self._validate_mapping(self._data, self.scheme)
921
+
922
+ def todict(self):
923
+ if self._legacy:
924
+ return self._legacy.todict(True)
925
+ else:
926
+ result = extract_by_key(self._data, self.INDEX_KEYS)
927
+ return result
928
+
929
+ def _from_legacy(self):
930
+ assert self._legacy and not self._data
931
+ result = {
932
+ 'metadata_version': self.METADATA_VERSION,
933
+ 'generator': self.GENERATOR,
934
+ }
935
+ lmd = self._legacy.todict(True) # skip missing ones
936
+ for k in ('name', 'version', 'license', 'summary', 'description',
937
+ 'classifier'):
938
+ if k in lmd:
939
+ if k == 'classifier':
940
+ nk = 'classifiers'
941
+ else:
942
+ nk = k
943
+ result[nk] = lmd[k]
944
+ kw = lmd.get('Keywords', [])
945
+ if kw == ['']:
946
+ kw = []
947
+ result['keywords'] = kw
948
+ keys = (('requires_dist', 'run_requires'),
949
+ ('setup_requires_dist', 'build_requires'))
950
+ for ok, nk in keys:
951
+ if ok in lmd and lmd[ok]:
952
+ result[nk] = [{'requires': lmd[ok]}]
953
+ result['provides'] = self.provides
954
+ author = {}
955
+ maintainer = {}
956
+ return result
957
+
958
+ LEGACY_MAPPING = {
959
+ 'name': 'Name',
960
+ 'version': 'Version',
961
+ ('extensions', 'python.details', 'license'): 'License',
962
+ 'summary': 'Summary',
963
+ 'description': 'Description',
964
+ ('extensions', 'python.project', 'project_urls', 'Home'): 'Home-page',
965
+ ('extensions', 'python.project', 'contacts', 0, 'name'): 'Author',
966
+ ('extensions', 'python.project', 'contacts', 0, 'email'): 'Author-email',
967
+ 'source_url': 'Download-URL',
968
+ ('extensions', 'python.details', 'classifiers'): 'Classifier',
969
+ }
970
+
971
+ def _to_legacy(self):
972
+ def process_entries(entries):
973
+ reqts = set()
974
+ for e in entries:
975
+ extra = e.get('extra')
976
+ env = e.get('environment')
977
+ rlist = e['requires']
978
+ for r in rlist:
979
+ if not env and not extra:
980
+ reqts.add(r)
981
+ else:
982
+ marker = ''
983
+ if extra:
984
+ marker = 'extra == "%s"' % extra
985
+ if env:
986
+ if marker:
987
+ marker = '(%s) and %s' % (env, marker)
988
+ else:
989
+ marker = env
990
+ reqts.add(';'.join((r, marker)))
991
+ return reqts
992
+
993
+ assert self._data and not self._legacy
994
+ result = LegacyMetadata()
995
+ nmd = self._data
996
+ # import pdb; pdb.set_trace()
997
+ for nk, ok in self.LEGACY_MAPPING.items():
998
+ if not isinstance(nk, tuple):
999
+ if nk in nmd:
1000
+ result[ok] = nmd[nk]
1001
+ else:
1002
+ d = nmd
1003
+ found = True
1004
+ for k in nk:
1005
+ try:
1006
+ d = d[k]
1007
+ except (KeyError, IndexError):
1008
+ found = False
1009
+ break
1010
+ if found:
1011
+ result[ok] = d
1012
+ r1 = process_entries(self.run_requires + self.meta_requires)
1013
+ r2 = process_entries(self.build_requires + self.dev_requires)
1014
+ if self.extras:
1015
+ result['Provides-Extra'] = sorted(self.extras)
1016
+ result['Requires-Dist'] = sorted(r1)
1017
+ result['Setup-Requires-Dist'] = sorted(r2)
1018
+ # TODO: any other fields wanted
1019
+ return result
1020
+
1021
+ def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
1022
+ if [path, fileobj].count(None) != 1:
1023
+ raise ValueError('Exactly one of path and fileobj is needed')
1024
+ self.validate()
1025
+ if legacy:
1026
+ if self._legacy:
1027
+ legacy_md = self._legacy
1028
+ else:
1029
+ legacy_md = self._to_legacy()
1030
+ if path:
1031
+ legacy_md.write(path, skip_unknown=skip_unknown)
1032
+ else:
1033
+ legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
1034
+ else:
1035
+ if self._legacy:
1036
+ d = self._from_legacy()
1037
+ else:
1038
+ d = self._data
1039
+ if fileobj:
1040
+ json.dump(d, fileobj, ensure_ascii=True, indent=2,
1041
+ sort_keys=True)
1042
+ else:
1043
+ with codecs.open(path, 'w', 'utf-8') as f:
1044
+ json.dump(d, f, ensure_ascii=True, indent=2,
1045
+ sort_keys=True)
1046
+
1047
+ def add_requirements(self, requirements):
1048
+ if self._legacy:
1049
+ self._legacy.add_requirements(requirements)
1050
+ else:
1051
+ run_requires = self._data.setdefault('run_requires', [])
1052
+ always = None
1053
+ for entry in run_requires:
1054
+ if 'environment' not in entry and 'extra' not in entry:
1055
+ always = entry
1056
+ break
1057
+ if always is None:
1058
+ always = { 'requires': requirements }
1059
+ run_requires.insert(0, always)
1060
+ else:
1061
+ rset = set(always['requires']) | set(requirements)
1062
+ always['requires'] = sorted(rset)
1063
+
1064
+ def __repr__(self):
1065
+ name = self.name or '(no name)'
1066
+ version = self.version or 'no version'
1067
+ return '<%s %s %s (%s)>' % (self.__class__.__name__,
1068
+ self.metadata_version, name, version)
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/resources.py ADDED
@@ -0,0 +1,358 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2013-2017 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ from __future__ import unicode_literals
8
+
9
+ import bisect
10
+ import io
11
+ import logging
12
+ import os
13
+ import pkgutil
14
+ import sys
15
+ import types
16
+ import zipimport
17
+
18
+ from . import DistlibException
19
+ from .util import cached_property, get_cache_base, Cache
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ cache = None # created when needed
25
+
26
+
27
+ class ResourceCache(Cache):
28
+ def __init__(self, base=None):
29
+ if base is None:
30
+ # Use native string to avoid issues on 2.x: see Python #20140.
31
+ base = os.path.join(get_cache_base(), str('resource-cache'))
32
+ super(ResourceCache, self).__init__(base)
33
+
34
+ def is_stale(self, resource, path):
35
+ """
36
+ Is the cache stale for the given resource?
37
+
38
+ :param resource: The :class:`Resource` being cached.
39
+ :param path: The path of the resource in the cache.
40
+ :return: True if the cache is stale.
41
+ """
42
+ # Cache invalidation is a hard problem :-)
43
+ return True
44
+
45
+ def get(self, resource):
46
+ """
47
+ Get a resource into the cache,
48
+
49
+ :param resource: A :class:`Resource` instance.
50
+ :return: The pathname of the resource in the cache.
51
+ """
52
+ prefix, path = resource.finder.get_cache_info(resource)
53
+ if prefix is None:
54
+ result = path
55
+ else:
56
+ result = os.path.join(self.base, self.prefix_to_dir(prefix), path)
57
+ dirname = os.path.dirname(result)
58
+ if not os.path.isdir(dirname):
59
+ os.makedirs(dirname)
60
+ if not os.path.exists(result):
61
+ stale = True
62
+ else:
63
+ stale = self.is_stale(resource, path)
64
+ if stale:
65
+ # write the bytes of the resource to the cache location
66
+ with open(result, 'wb') as f:
67
+ f.write(resource.bytes)
68
+ return result
69
+
70
+
71
+ class ResourceBase(object):
72
+ def __init__(self, finder, name):
73
+ self.finder = finder
74
+ self.name = name
75
+
76
+
77
+ class Resource(ResourceBase):
78
+ """
79
+ A class representing an in-package resource, such as a data file. This is
80
+ not normally instantiated by user code, but rather by a
81
+ :class:`ResourceFinder` which manages the resource.
82
+ """
83
+ is_container = False # Backwards compatibility
84
+
85
+ def as_stream(self):
86
+ """
87
+ Get the resource as a stream.
88
+
89
+ This is not a property to make it obvious that it returns a new stream
90
+ each time.
91
+ """
92
+ return self.finder.get_stream(self)
93
+
94
+ @cached_property
95
+ def file_path(self):
96
+ global cache
97
+ if cache is None:
98
+ cache = ResourceCache()
99
+ return cache.get(self)
100
+
101
+ @cached_property
102
+ def bytes(self):
103
+ return self.finder.get_bytes(self)
104
+
105
+ @cached_property
106
+ def size(self):
107
+ return self.finder.get_size(self)
108
+
109
+
110
+ class ResourceContainer(ResourceBase):
111
+ is_container = True # Backwards compatibility
112
+
113
+ @cached_property
114
+ def resources(self):
115
+ return self.finder.get_resources(self)
116
+
117
+
118
+ class ResourceFinder(object):
119
+ """
120
+ Resource finder for file system resources.
121
+ """
122
+
123
+ if sys.platform.startswith('java'):
124
+ skipped_extensions = ('.pyc', '.pyo', '.class')
125
+ else:
126
+ skipped_extensions = ('.pyc', '.pyo')
127
+
128
+ def __init__(self, module):
129
+ self.module = module
130
+ self.loader = getattr(module, '__loader__', None)
131
+ self.base = os.path.dirname(getattr(module, '__file__', ''))
132
+
133
+ def _adjust_path(self, path):
134
+ return os.path.realpath(path)
135
+
136
+ def _make_path(self, resource_name):
137
+ # Issue #50: need to preserve type of path on Python 2.x
138
+ # like os.path._get_sep
139
+ if isinstance(resource_name, bytes): # should only happen on 2.x
140
+ sep = b'/'
141
+ else:
142
+ sep = '/'
143
+ parts = resource_name.split(sep)
144
+ parts.insert(0, self.base)
145
+ result = os.path.join(*parts)
146
+ return self._adjust_path(result)
147
+
148
+ def _find(self, path):
149
+ return os.path.exists(path)
150
+
151
+ def get_cache_info(self, resource):
152
+ return None, resource.path
153
+
154
+ def find(self, resource_name):
155
+ path = self._make_path(resource_name)
156
+ if not self._find(path):
157
+ result = None
158
+ else:
159
+ if self._is_directory(path):
160
+ result = ResourceContainer(self, resource_name)
161
+ else:
162
+ result = Resource(self, resource_name)
163
+ result.path = path
164
+ return result
165
+
166
+ def get_stream(self, resource):
167
+ return open(resource.path, 'rb')
168
+
169
+ def get_bytes(self, resource):
170
+ with open(resource.path, 'rb') as f:
171
+ return f.read()
172
+
173
+ def get_size(self, resource):
174
+ return os.path.getsize(resource.path)
175
+
176
+ def get_resources(self, resource):
177
+ def allowed(f):
178
+ return (f != '__pycache__' and not
179
+ f.endswith(self.skipped_extensions))
180
+ return set([f for f in os.listdir(resource.path) if allowed(f)])
181
+
182
+ def is_container(self, resource):
183
+ return self._is_directory(resource.path)
184
+
185
+ _is_directory = staticmethod(os.path.isdir)
186
+
187
+ def iterator(self, resource_name):
188
+ resource = self.find(resource_name)
189
+ if resource is not None:
190
+ todo = [resource]
191
+ while todo:
192
+ resource = todo.pop(0)
193
+ yield resource
194
+ if resource.is_container:
195
+ rname = resource.name
196
+ for name in resource.resources:
197
+ if not rname:
198
+ new_name = name
199
+ else:
200
+ new_name = '/'.join([rname, name])
201
+ child = self.find(new_name)
202
+ if child.is_container:
203
+ todo.append(child)
204
+ else:
205
+ yield child
206
+
207
+
208
+ class ZipResourceFinder(ResourceFinder):
209
+ """
210
+ Resource finder for resources in .zip files.
211
+ """
212
+ def __init__(self, module):
213
+ super(ZipResourceFinder, self).__init__(module)
214
+ archive = self.loader.archive
215
+ self.prefix_len = 1 + len(archive)
216
+ # PyPy doesn't have a _files attr on zipimporter, and you can't set one
217
+ if hasattr(self.loader, '_files'):
218
+ self._files = self.loader._files
219
+ else:
220
+ self._files = zipimport._zip_directory_cache[archive]
221
+ self.index = sorted(self._files)
222
+
223
+ def _adjust_path(self, path):
224
+ return path
225
+
226
+ def _find(self, path):
227
+ path = path[self.prefix_len:]
228
+ if path in self._files:
229
+ result = True
230
+ else:
231
+ if path and path[-1] != os.sep:
232
+ path = path + os.sep
233
+ i = bisect.bisect(self.index, path)
234
+ try:
235
+ result = self.index[i].startswith(path)
236
+ except IndexError:
237
+ result = False
238
+ if not result:
239
+ logger.debug('_find failed: %r %r', path, self.loader.prefix)
240
+ else:
241
+ logger.debug('_find worked: %r %r', path, self.loader.prefix)
242
+ return result
243
+
244
+ def get_cache_info(self, resource):
245
+ prefix = self.loader.archive
246
+ path = resource.path[1 + len(prefix):]
247
+ return prefix, path
248
+
249
+ def get_bytes(self, resource):
250
+ return self.loader.get_data(resource.path)
251
+
252
+ def get_stream(self, resource):
253
+ return io.BytesIO(self.get_bytes(resource))
254
+
255
+ def get_size(self, resource):
256
+ path = resource.path[self.prefix_len:]
257
+ return self._files[path][3]
258
+
259
+ def get_resources(self, resource):
260
+ path = resource.path[self.prefix_len:]
261
+ if path and path[-1] != os.sep:
262
+ path += os.sep
263
+ plen = len(path)
264
+ result = set()
265
+ i = bisect.bisect(self.index, path)
266
+ while i < len(self.index):
267
+ if not self.index[i].startswith(path):
268
+ break
269
+ s = self.index[i][plen:]
270
+ result.add(s.split(os.sep, 1)[0]) # only immediate children
271
+ i += 1
272
+ return result
273
+
274
+ def _is_directory(self, path):
275
+ path = path[self.prefix_len:]
276
+ if path and path[-1] != os.sep:
277
+ path += os.sep
278
+ i = bisect.bisect(self.index, path)
279
+ try:
280
+ result = self.index[i].startswith(path)
281
+ except IndexError:
282
+ result = False
283
+ return result
284
+
285
+
286
+ _finder_registry = {
287
+ type(None): ResourceFinder,
288
+ zipimport.zipimporter: ZipResourceFinder
289
+ }
290
+
291
+ try:
292
+ # In Python 3.6, _frozen_importlib -> _frozen_importlib_external
293
+ try:
294
+ import _frozen_importlib_external as _fi
295
+ except ImportError:
296
+ import _frozen_importlib as _fi
297
+ _finder_registry[_fi.SourceFileLoader] = ResourceFinder
298
+ _finder_registry[_fi.FileFinder] = ResourceFinder
299
+ # See issue #146
300
+ _finder_registry[_fi.SourcelessFileLoader] = ResourceFinder
301
+ del _fi
302
+ except (ImportError, AttributeError):
303
+ pass
304
+
305
+
306
+ def register_finder(loader, finder_maker):
307
+ _finder_registry[type(loader)] = finder_maker
308
+
309
+
310
+ _finder_cache = {}
311
+
312
+
313
+ def finder(package):
314
+ """
315
+ Return a resource finder for a package.
316
+ :param package: The name of the package.
317
+ :return: A :class:`ResourceFinder` instance for the package.
318
+ """
319
+ if package in _finder_cache:
320
+ result = _finder_cache[package]
321
+ else:
322
+ if package not in sys.modules:
323
+ __import__(package)
324
+ module = sys.modules[package]
325
+ path = getattr(module, '__path__', None)
326
+ if path is None:
327
+ raise DistlibException('You cannot get a finder for a module, '
328
+ 'only for a package')
329
+ loader = getattr(module, '__loader__', None)
330
+ finder_maker = _finder_registry.get(type(loader))
331
+ if finder_maker is None:
332
+ raise DistlibException('Unable to locate finder for %r' % package)
333
+ result = finder_maker(module)
334
+ _finder_cache[package] = result
335
+ return result
336
+
337
+
338
+ _dummy_module = types.ModuleType(str('__dummy__'))
339
+
340
+
341
+ def finder_for_path(path):
342
+ """
343
+ Return a resource finder for a path, which should represent a container.
344
+
345
+ :param path: The path.
346
+ :return: A :class:`ResourceFinder` instance for the path.
347
+ """
348
+ result = None
349
+ # calls any path hooks, gets importer into cache
350
+ pkgutil.get_importer(path)
351
+ loader = sys.path_importer_cache.get(path)
352
+ finder = _finder_registry.get(type(loader))
353
+ if finder:
354
+ module = _dummy_module
355
+ module.__file__ = os.path.join(path, '')
356
+ module.__loader__ = loader
357
+ result = finder(module)
358
+ return result
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/scripts.py ADDED
@@ -0,0 +1,452 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2013-2023 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ from io import BytesIO
8
+ import logging
9
+ import os
10
+ import re
11
+ import struct
12
+ import sys
13
+ import time
14
+ from zipfile import ZipInfo
15
+
16
+ from .compat import sysconfig, detect_encoding, ZipFile
17
+ from .resources import finder
18
+ from .util import (FileOperator, get_export_entry, convert_path,
19
+ get_executable, get_platform, in_venv)
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+ _DEFAULT_MANIFEST = '''
24
+ <?xml version="1.0" encoding="UTF-8" standalone="yes"?>
25
+ <assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
26
+ <assemblyIdentity version="1.0.0.0"
27
+ processorArchitecture="X86"
28
+ name="%s"
29
+ type="win32"/>
30
+
31
+ <!-- Identify the application security requirements. -->
32
+ <trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
33
+ <security>
34
+ <requestedPrivileges>
35
+ <requestedExecutionLevel level="asInvoker" uiAccess="false"/>
36
+ </requestedPrivileges>
37
+ </security>
38
+ </trustInfo>
39
+ </assembly>'''.strip()
40
+
41
+ # check if Python is called on the first line with this expression
42
+ FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
43
+ SCRIPT_TEMPLATE = r'''# -*- coding: utf-8 -*-
44
+ import re
45
+ import sys
46
+ from %(module)s import %(import_name)s
47
+ if __name__ == '__main__':
48
+ sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
49
+ sys.exit(%(func)s())
50
+ '''
51
+
52
+
53
+ def enquote_executable(executable):
54
+ if ' ' in executable:
55
+ # make sure we quote only the executable in case of env
56
+ # for example /usr/bin/env "/dir with spaces/bin/jython"
57
+ # instead of "/usr/bin/env /dir with spaces/bin/jython"
58
+ # otherwise whole
59
+ if executable.startswith('/usr/bin/env '):
60
+ env, _executable = executable.split(' ', 1)
61
+ if ' ' in _executable and not _executable.startswith('"'):
62
+ executable = '%s "%s"' % (env, _executable)
63
+ else:
64
+ if not executable.startswith('"'):
65
+ executable = '"%s"' % executable
66
+ return executable
67
+
68
+
69
+ # Keep the old name around (for now), as there is at least one project using it!
70
+ _enquote_executable = enquote_executable
71
+
72
+
73
+ class ScriptMaker(object):
74
+ """
75
+ A class to copy or create scripts from source scripts or callable
76
+ specifications.
77
+ """
78
+ script_template = SCRIPT_TEMPLATE
79
+
80
+ executable = None # for shebangs
81
+
82
+ def __init__(self,
83
+ source_dir,
84
+ target_dir,
85
+ add_launchers=True,
86
+ dry_run=False,
87
+ fileop=None):
88
+ self.source_dir = source_dir
89
+ self.target_dir = target_dir
90
+ self.add_launchers = add_launchers
91
+ self.force = False
92
+ self.clobber = False
93
+ # It only makes sense to set mode bits on POSIX.
94
+ self.set_mode = (os.name == 'posix') or (os.name == 'java'
95
+ and os._name == 'posix')
96
+ self.variants = set(('', 'X.Y'))
97
+ self._fileop = fileop or FileOperator(dry_run)
98
+
99
+ self._is_nt = os.name == 'nt' or (os.name == 'java'
100
+ and os._name == 'nt')
101
+ self.version_info = sys.version_info
102
+
103
+ def _get_alternate_executable(self, executable, options):
104
+ if options.get('gui', False) and self._is_nt: # pragma: no cover
105
+ dn, fn = os.path.split(executable)
106
+ fn = fn.replace('python', 'pythonw')
107
+ executable = os.path.join(dn, fn)
108
+ return executable
109
+
110
+ if sys.platform.startswith('java'): # pragma: no cover
111
+
112
+ def _is_shell(self, executable):
113
+ """
114
+ Determine if the specified executable is a script
115
+ (contains a #! line)
116
+ """
117
+ try:
118
+ with open(executable) as fp:
119
+ return fp.read(2) == '#!'
120
+ except (OSError, IOError):
121
+ logger.warning('Failed to open %s', executable)
122
+ return False
123
+
124
+ def _fix_jython_executable(self, executable):
125
+ if self._is_shell(executable):
126
+ # Workaround for Jython is not needed on Linux systems.
127
+ import java
128
+
129
+ if java.lang.System.getProperty('os.name') == 'Linux':
130
+ return executable
131
+ elif executable.lower().endswith('jython.exe'):
132
+ # Use wrapper exe for Jython on Windows
133
+ return executable
134
+ return '/usr/bin/env %s' % executable
135
+
136
+ def _build_shebang(self, executable, post_interp):
137
+ """
138
+ Build a shebang line. In the simple case (on Windows, or a shebang line
139
+ which is not too long or contains spaces) use a simple formulation for
140
+ the shebang. Otherwise, use /bin/sh as the executable, with a contrived
141
+ shebang which allows the script to run either under Python or sh, using
142
+ suitable quoting. Thanks to Harald Nordgren for his input.
143
+
144
+ See also: http://www.in-ulm.de/~mascheck/various/shebang/#length
145
+ https://hg.mozilla.org/mozilla-central/file/tip/mach
146
+ """
147
+ if os.name != 'posix':
148
+ simple_shebang = True
149
+ else:
150
+ # Add 3 for '#!' prefix and newline suffix.
151
+ shebang_length = len(executable) + len(post_interp) + 3
152
+ if sys.platform == 'darwin':
153
+ max_shebang_length = 512
154
+ else:
155
+ max_shebang_length = 127
156
+ simple_shebang = ((b' ' not in executable)
157
+ and (shebang_length <= max_shebang_length))
158
+
159
+ if simple_shebang:
160
+ result = b'#!' + executable + post_interp + b'\n'
161
+ else:
162
+ result = b'#!/bin/sh\n'
163
+ result += b"'''exec' " + executable + post_interp + b' "$0" "$@"\n'
164
+ result += b"' '''"
165
+ return result
166
+
167
+ def _get_shebang(self, encoding, post_interp=b'', options=None):
168
+ enquote = True
169
+ if self.executable:
170
+ executable = self.executable
171
+ enquote = False # assume this will be taken care of
172
+ elif not sysconfig.is_python_build():
173
+ executable = get_executable()
174
+ elif in_venv(): # pragma: no cover
175
+ executable = os.path.join(
176
+ sysconfig.get_path('scripts'),
177
+ 'python%s' % sysconfig.get_config_var('EXE'))
178
+ else: # pragma: no cover
179
+ if os.name == 'nt':
180
+ # for Python builds from source on Windows, no Python executables with
181
+ # a version suffix are created, so we use python.exe
182
+ executable = os.path.join(
183
+ sysconfig.get_config_var('BINDIR'),
184
+ 'python%s' % (sysconfig.get_config_var('EXE')))
185
+ else:
186
+ executable = os.path.join(
187
+ sysconfig.get_config_var('BINDIR'),
188
+ 'python%s%s' % (sysconfig.get_config_var('VERSION'),
189
+ sysconfig.get_config_var('EXE')))
190
+ if options:
191
+ executable = self._get_alternate_executable(executable, options)
192
+
193
+ if sys.platform.startswith('java'): # pragma: no cover
194
+ executable = self._fix_jython_executable(executable)
195
+
196
+ # Normalise case for Windows - COMMENTED OUT
197
+ # executable = os.path.normcase(executable)
198
+ # N.B. The normalising operation above has been commented out: See
199
+ # issue #124. Although paths in Windows are generally case-insensitive,
200
+ # they aren't always. For example, a path containing a ẞ (which is a
201
+ # LATIN CAPITAL LETTER SHARP S - U+1E9E) is normcased to ß (which is a
202
+ # LATIN SMALL LETTER SHARP S' - U+00DF). The two are not considered by
203
+ # Windows as equivalent in path names.
204
+
205
+ # If the user didn't specify an executable, it may be necessary to
206
+ # cater for executable paths with spaces (not uncommon on Windows)
207
+ if enquote:
208
+ executable = enquote_executable(executable)
209
+ # Issue #51: don't use fsencode, since we later try to
210
+ # check that the shebang is decodable using utf-8.
211
+ executable = executable.encode('utf-8')
212
+ # in case of IronPython, play safe and enable frames support
213
+ if (sys.platform == 'cli' and '-X:Frames' not in post_interp
214
+ and '-X:FullFrames' not in post_interp): # pragma: no cover
215
+ post_interp += b' -X:Frames'
216
+ shebang = self._build_shebang(executable, post_interp)
217
+ # Python parser starts to read a script using UTF-8 until
218
+ # it gets a #coding:xxx cookie. The shebang has to be the
219
+ # first line of a file, the #coding:xxx cookie cannot be
220
+ # written before. So the shebang has to be decodable from
221
+ # UTF-8.
222
+ try:
223
+ shebang.decode('utf-8')
224
+ except UnicodeDecodeError: # pragma: no cover
225
+ raise ValueError('The shebang (%r) is not decodable from utf-8' %
226
+ shebang)
227
+ # If the script is encoded to a custom encoding (use a
228
+ # #coding:xxx cookie), the shebang has to be decodable from
229
+ # the script encoding too.
230
+ if encoding != 'utf-8':
231
+ try:
232
+ shebang.decode(encoding)
233
+ except UnicodeDecodeError: # pragma: no cover
234
+ raise ValueError('The shebang (%r) is not decodable '
235
+ 'from the script encoding (%r)' %
236
+ (shebang, encoding))
237
+ return shebang
238
+
239
+ def _get_script_text(self, entry):
240
+ return self.script_template % dict(
241
+ module=entry.prefix,
242
+ import_name=entry.suffix.split('.')[0],
243
+ func=entry.suffix)
244
+
245
+ manifest = _DEFAULT_MANIFEST
246
+
247
+ def get_manifest(self, exename):
248
+ base = os.path.basename(exename)
249
+ return self.manifest % base
250
+
251
+ def _write_script(self, names, shebang, script_bytes, filenames, ext):
252
+ use_launcher = self.add_launchers and self._is_nt
253
+ linesep = os.linesep.encode('utf-8')
254
+ if not shebang.endswith(linesep):
255
+ shebang += linesep
256
+ if not use_launcher:
257
+ script_bytes = shebang + script_bytes
258
+ else: # pragma: no cover
259
+ if ext == 'py':
260
+ launcher = self._get_launcher('t')
261
+ else:
262
+ launcher = self._get_launcher('w')
263
+ stream = BytesIO()
264
+ with ZipFile(stream, 'w') as zf:
265
+ source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
266
+ if source_date_epoch:
267
+ date_time = time.gmtime(int(source_date_epoch))[:6]
268
+ zinfo = ZipInfo(filename='__main__.py',
269
+ date_time=date_time)
270
+ zf.writestr(zinfo, script_bytes)
271
+ else:
272
+ zf.writestr('__main__.py', script_bytes)
273
+ zip_data = stream.getvalue()
274
+ script_bytes = launcher + shebang + zip_data
275
+ for name in names:
276
+ outname = os.path.join(self.target_dir, name)
277
+ if use_launcher: # pragma: no cover
278
+ n, e = os.path.splitext(outname)
279
+ if e.startswith('.py'):
280
+ outname = n
281
+ outname = '%s.exe' % outname
282
+ try:
283
+ self._fileop.write_binary_file(outname, script_bytes)
284
+ except Exception:
285
+ # Failed writing an executable - it might be in use.
286
+ logger.warning('Failed to write executable - trying to '
287
+ 'use .deleteme logic')
288
+ dfname = '%s.deleteme' % outname
289
+ if os.path.exists(dfname):
290
+ os.remove(dfname) # Not allowed to fail here
291
+ os.rename(outname, dfname) # nor here
292
+ self._fileop.write_binary_file(outname, script_bytes)
293
+ logger.debug('Able to replace executable using '
294
+ '.deleteme logic')
295
+ try:
296
+ os.remove(dfname)
297
+ except Exception:
298
+ pass # still in use - ignore error
299
+ else:
300
+ if self._is_nt and not outname.endswith(
301
+ '.' + ext): # pragma: no cover
302
+ outname = '%s.%s' % (outname, ext)
303
+ if os.path.exists(outname) and not self.clobber:
304
+ logger.warning('Skipping existing file %s', outname)
305
+ continue
306
+ self._fileop.write_binary_file(outname, script_bytes)
307
+ if self.set_mode:
308
+ self._fileop.set_executable_mode([outname])
309
+ filenames.append(outname)
310
+
311
+ variant_separator = '-'
312
+
313
+ def get_script_filenames(self, name):
314
+ result = set()
315
+ if '' in self.variants:
316
+ result.add(name)
317
+ if 'X' in self.variants:
318
+ result.add('%s%s' % (name, self.version_info[0]))
319
+ if 'X.Y' in self.variants:
320
+ result.add('%s%s%s.%s' %
321
+ (name, self.variant_separator, self.version_info[0],
322
+ self.version_info[1]))
323
+ return result
324
+
325
+ def _make_script(self, entry, filenames, options=None):
326
+ post_interp = b''
327
+ if options:
328
+ args = options.get('interpreter_args', [])
329
+ if args:
330
+ args = ' %s' % ' '.join(args)
331
+ post_interp = args.encode('utf-8')
332
+ shebang = self._get_shebang('utf-8', post_interp, options=options)
333
+ script = self._get_script_text(entry).encode('utf-8')
334
+ scriptnames = self.get_script_filenames(entry.name)
335
+ if options and options.get('gui', False):
336
+ ext = 'pyw'
337
+ else:
338
+ ext = 'py'
339
+ self._write_script(scriptnames, shebang, script, filenames, ext)
340
+
341
+ def _copy_script(self, script, filenames):
342
+ adjust = False
343
+ script = os.path.join(self.source_dir, convert_path(script))
344
+ outname = os.path.join(self.target_dir, os.path.basename(script))
345
+ if not self.force and not self._fileop.newer(script, outname):
346
+ logger.debug('not copying %s (up-to-date)', script)
347
+ return
348
+
349
+ # Always open the file, but ignore failures in dry-run mode --
350
+ # that way, we'll get accurate feedback if we can read the
351
+ # script.
352
+ try:
353
+ f = open(script, 'rb')
354
+ except IOError: # pragma: no cover
355
+ if not self.dry_run:
356
+ raise
357
+ f = None
358
+ else:
359
+ first_line = f.readline()
360
+ if not first_line: # pragma: no cover
361
+ logger.warning('%s is an empty file (skipping)', script)
362
+ return
363
+
364
+ match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
365
+ if match:
366
+ adjust = True
367
+ post_interp = match.group(1) or b''
368
+
369
+ if not adjust:
370
+ if f:
371
+ f.close()
372
+ self._fileop.copy_file(script, outname)
373
+ if self.set_mode:
374
+ self._fileop.set_executable_mode([outname])
375
+ filenames.append(outname)
376
+ else:
377
+ logger.info('copying and adjusting %s -> %s', script,
378
+ self.target_dir)
379
+ if not self._fileop.dry_run:
380
+ encoding, lines = detect_encoding(f.readline)
381
+ f.seek(0)
382
+ shebang = self._get_shebang(encoding, post_interp)
383
+ if b'pythonw' in first_line: # pragma: no cover
384
+ ext = 'pyw'
385
+ else:
386
+ ext = 'py'
387
+ n = os.path.basename(outname)
388
+ self._write_script([n], shebang, f.read(), filenames, ext)
389
+ if f:
390
+ f.close()
391
+
392
+ @property
393
+ def dry_run(self):
394
+ return self._fileop.dry_run
395
+
396
+ @dry_run.setter
397
+ def dry_run(self, value):
398
+ self._fileop.dry_run = value
399
+
400
+ if os.name == 'nt' or (os.name == 'java'
401
+ and os._name == 'nt'): # pragma: no cover
402
+ # Executable launcher support.
403
+ # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
404
+
405
+ def _get_launcher(self, kind):
406
+ if struct.calcsize('P') == 8: # 64-bit
407
+ bits = '64'
408
+ else:
409
+ bits = '32'
410
+ platform_suffix = '-arm' if get_platform() == 'win-arm64' else ''
411
+ name = '%s%s%s.exe' % (kind, bits, platform_suffix)
412
+ # Issue 31: don't hardcode an absolute package name, but
413
+ # determine it relative to the current package
414
+ distlib_package = __name__.rsplit('.', 1)[0]
415
+ resource = finder(distlib_package).find(name)
416
+ if not resource:
417
+ msg = ('Unable to find resource %s in package %s' %
418
+ (name, distlib_package))
419
+ raise ValueError(msg)
420
+ return resource.bytes
421
+
422
+ # Public API follows
423
+
424
+ def make(self, specification, options=None):
425
+ """
426
+ Make a script.
427
+
428
+ :param specification: The specification, which is either a valid export
429
+ entry specification (to make a script from a
430
+ callable) or a filename (to make a script by
431
+ copying from a source location).
432
+ :param options: A dictionary of options controlling script generation.
433
+ :return: A list of all absolute pathnames written to.
434
+ """
435
+ filenames = []
436
+ entry = get_export_entry(specification)
437
+ if entry is None:
438
+ self._copy_script(specification, filenames)
439
+ else:
440
+ self._make_script(entry, filenames, options=options)
441
+ return filenames
442
+
443
+ def make_multiple(self, specifications, options=None):
444
+ """
445
+ Take a list of specifications and make scripts from them,
446
+ :param specifications: A list of specifications.
447
+ :return: A list of all absolute pathnames written to,
448
+ """
449
+ filenames = []
450
+ for specification in specifications:
451
+ filenames.extend(self.make(specification, options))
452
+ return filenames
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/t32.exe ADDED
Binary file (97.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/version.py ADDED
@@ -0,0 +1,751 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2012-2023 The Python Software Foundation.
4
+ # See LICENSE.txt and CONTRIBUTORS.txt.
5
+ #
6
+ """
7
+ Implementation of a flexible versioning scheme providing support for PEP-440,
8
+ setuptools-compatible and semantic versioning.
9
+ """
10
+
11
+ import logging
12
+ import re
13
+
14
+ from .compat import string_types
15
+ from .util import parse_requirement
16
+
17
+ __all__ = ['NormalizedVersion', 'NormalizedMatcher',
18
+ 'LegacyVersion', 'LegacyMatcher',
19
+ 'SemanticVersion', 'SemanticMatcher',
20
+ 'UnsupportedVersionError', 'get_scheme']
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class UnsupportedVersionError(ValueError):
26
+ """This is an unsupported version."""
27
+ pass
28
+
29
+
30
+ class Version(object):
31
+ def __init__(self, s):
32
+ self._string = s = s.strip()
33
+ self._parts = parts = self.parse(s)
34
+ assert isinstance(parts, tuple)
35
+ assert len(parts) > 0
36
+
37
+ def parse(self, s):
38
+ raise NotImplementedError('please implement in a subclass')
39
+
40
+ def _check_compatible(self, other):
41
+ if type(self) != type(other):
42
+ raise TypeError('cannot compare %r and %r' % (self, other))
43
+
44
+ def __eq__(self, other):
45
+ self._check_compatible(other)
46
+ return self._parts == other._parts
47
+
48
+ def __ne__(self, other):
49
+ return not self.__eq__(other)
50
+
51
+ def __lt__(self, other):
52
+ self._check_compatible(other)
53
+ return self._parts < other._parts
54
+
55
+ def __gt__(self, other):
56
+ return not (self.__lt__(other) or self.__eq__(other))
57
+
58
+ def __le__(self, other):
59
+ return self.__lt__(other) or self.__eq__(other)
60
+
61
+ def __ge__(self, other):
62
+ return self.__gt__(other) or self.__eq__(other)
63
+
64
+ # See http://docs.python.org/reference/datamodel#object.__hash__
65
+ def __hash__(self):
66
+ return hash(self._parts)
67
+
68
+ def __repr__(self):
69
+ return "%s('%s')" % (self.__class__.__name__, self._string)
70
+
71
+ def __str__(self):
72
+ return self._string
73
+
74
+ @property
75
+ def is_prerelease(self):
76
+ raise NotImplementedError('Please implement in subclasses.')
77
+
78
+
79
+ class Matcher(object):
80
+ version_class = None
81
+
82
+ # value is either a callable or the name of a method
83
+ _operators = {
84
+ '<': lambda v, c, p: v < c,
85
+ '>': lambda v, c, p: v > c,
86
+ '<=': lambda v, c, p: v == c or v < c,
87
+ '>=': lambda v, c, p: v == c or v > c,
88
+ '==': lambda v, c, p: v == c,
89
+ '===': lambda v, c, p: v == c,
90
+ # by default, compatible => >=.
91
+ '~=': lambda v, c, p: v == c or v > c,
92
+ '!=': lambda v, c, p: v != c,
93
+ }
94
+
95
+ # this is a method only to support alternative implementations
96
+ # via overriding
97
+ def parse_requirement(self, s):
98
+ return parse_requirement(s)
99
+
100
+ def __init__(self, s):
101
+ if self.version_class is None:
102
+ raise ValueError('Please specify a version class')
103
+ self._string = s = s.strip()
104
+ r = self.parse_requirement(s)
105
+ if not r:
106
+ raise ValueError('Not valid: %r' % s)
107
+ self.name = r.name
108
+ self.key = self.name.lower() # for case-insensitive comparisons
109
+ clist = []
110
+ if r.constraints:
111
+ # import pdb; pdb.set_trace()
112
+ for op, s in r.constraints:
113
+ if s.endswith('.*'):
114
+ if op not in ('==', '!='):
115
+ raise ValueError('\'.*\' not allowed for '
116
+ '%r constraints' % op)
117
+ # Could be a partial version (e.g. for '2.*') which
118
+ # won't parse as a version, so keep it as a string
119
+ vn, prefix = s[:-2], True
120
+ # Just to check that vn is a valid version
121
+ self.version_class(vn)
122
+ else:
123
+ # Should parse as a version, so we can create an
124
+ # instance for the comparison
125
+ vn, prefix = self.version_class(s), False
126
+ clist.append((op, vn, prefix))
127
+ self._parts = tuple(clist)
128
+
129
+ def match(self, version):
130
+ """
131
+ Check if the provided version matches the constraints.
132
+
133
+ :param version: The version to match against this instance.
134
+ :type version: String or :class:`Version` instance.
135
+ """
136
+ if isinstance(version, string_types):
137
+ version = self.version_class(version)
138
+ for operator, constraint, prefix in self._parts:
139
+ f = self._operators.get(operator)
140
+ if isinstance(f, string_types):
141
+ f = getattr(self, f)
142
+ if not f:
143
+ msg = ('%r not implemented '
144
+ 'for %s' % (operator, self.__class__.__name__))
145
+ raise NotImplementedError(msg)
146
+ if not f(version, constraint, prefix):
147
+ return False
148
+ return True
149
+
150
+ @property
151
+ def exact_version(self):
152
+ result = None
153
+ if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='):
154
+ result = self._parts[0][1]
155
+ return result
156
+
157
+ def _check_compatible(self, other):
158
+ if type(self) != type(other) or self.name != other.name:
159
+ raise TypeError('cannot compare %s and %s' % (self, other))
160
+
161
+ def __eq__(self, other):
162
+ self._check_compatible(other)
163
+ return self.key == other.key and self._parts == other._parts
164
+
165
+ def __ne__(self, other):
166
+ return not self.__eq__(other)
167
+
168
+ # See http://docs.python.org/reference/datamodel#object.__hash__
169
+ def __hash__(self):
170
+ return hash(self.key) + hash(self._parts)
171
+
172
+ def __repr__(self):
173
+ return "%s(%r)" % (self.__class__.__name__, self._string)
174
+
175
+ def __str__(self):
176
+ return self._string
177
+
178
+
179
+ PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|alpha|b|beta|c|rc|pre|preview)(\d+)?)?'
180
+ r'(\.(post|r|rev)(\d+)?)?([._-]?(dev)(\d+)?)?'
181
+ r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$', re.I)
182
+
183
+
184
+ def _pep_440_key(s):
185
+ s = s.strip()
186
+ m = PEP440_VERSION_RE.match(s)
187
+ if not m:
188
+ raise UnsupportedVersionError('Not a valid version: %s' % s)
189
+ groups = m.groups()
190
+ nums = tuple(int(v) for v in groups[1].split('.'))
191
+ while len(nums) > 1 and nums[-1] == 0:
192
+ nums = nums[:-1]
193
+
194
+ if not groups[0]:
195
+ epoch = 0
196
+ else:
197
+ epoch = int(groups[0][:-1])
198
+ pre = groups[4:6]
199
+ post = groups[7:9]
200
+ dev = groups[10:12]
201
+ local = groups[13]
202
+ if pre == (None, None):
203
+ pre = ()
204
+ else:
205
+ if pre[1] is None:
206
+ pre = pre[0], 0
207
+ else:
208
+ pre = pre[0], int(pre[1])
209
+ if post == (None, None):
210
+ post = ()
211
+ else:
212
+ if post[1] is None:
213
+ post = post[0], 0
214
+ else:
215
+ post = post[0], int(post[1])
216
+ if dev == (None, None):
217
+ dev = ()
218
+ else:
219
+ if dev[1] is None:
220
+ dev = dev[0], 0
221
+ else:
222
+ dev = dev[0], int(dev[1])
223
+ if local is None:
224
+ local = ()
225
+ else:
226
+ parts = []
227
+ for part in local.split('.'):
228
+ # to ensure that numeric compares as > lexicographic, avoid
229
+ # comparing them directly, but encode a tuple which ensures
230
+ # correct sorting
231
+ if part.isdigit():
232
+ part = (1, int(part))
233
+ else:
234
+ part = (0, part)
235
+ parts.append(part)
236
+ local = tuple(parts)
237
+ if not pre:
238
+ # either before pre-release, or final release and after
239
+ if not post and dev:
240
+ # before pre-release
241
+ pre = ('a', -1) # to sort before a0
242
+ else:
243
+ pre = ('z',) # to sort after all pre-releases
244
+ # now look at the state of post and dev.
245
+ if not post:
246
+ post = ('_',) # sort before 'a'
247
+ if not dev:
248
+ dev = ('final',)
249
+
250
+ return epoch, nums, pre, post, dev, local
251
+
252
+
253
+ _normalized_key = _pep_440_key
254
+
255
+
256
+ class NormalizedVersion(Version):
257
+ """A rational version.
258
+
259
+ Good:
260
+ 1.2 # equivalent to "1.2.0"
261
+ 1.2.0
262
+ 1.2a1
263
+ 1.2.3a2
264
+ 1.2.3b1
265
+ 1.2.3c1
266
+ 1.2.3.4
267
+ TODO: fill this out
268
+
269
+ Bad:
270
+ 1 # minimum two numbers
271
+ 1.2a # release level must have a release serial
272
+ 1.2.3b
273
+ """
274
+ def parse(self, s):
275
+ result = _normalized_key(s)
276
+ # _normalized_key loses trailing zeroes in the release
277
+ # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0
278
+ # However, PEP 440 prefix matching needs it: for example,
279
+ # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0).
280
+ m = PEP440_VERSION_RE.match(s) # must succeed
281
+ groups = m.groups()
282
+ self._release_clause = tuple(int(v) for v in groups[1].split('.'))
283
+ return result
284
+
285
+ PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev'])
286
+
287
+ @property
288
+ def is_prerelease(self):
289
+ return any(t[0] in self.PREREL_TAGS for t in self._parts if t)
290
+
291
+
292
+ def _match_prefix(x, y):
293
+ x = str(x)
294
+ y = str(y)
295
+ if x == y:
296
+ return True
297
+ if not x.startswith(y):
298
+ return False
299
+ n = len(y)
300
+ return x[n] == '.'
301
+
302
+
303
+ class NormalizedMatcher(Matcher):
304
+ version_class = NormalizedVersion
305
+
306
+ # value is either a callable or the name of a method
307
+ _operators = {
308
+ '~=': '_match_compatible',
309
+ '<': '_match_lt',
310
+ '>': '_match_gt',
311
+ '<=': '_match_le',
312
+ '>=': '_match_ge',
313
+ '==': '_match_eq',
314
+ '===': '_match_arbitrary',
315
+ '!=': '_match_ne',
316
+ }
317
+
318
+ def _adjust_local(self, version, constraint, prefix):
319
+ if prefix:
320
+ strip_local = '+' not in constraint and version._parts[-1]
321
+ else:
322
+ # both constraint and version are
323
+ # NormalizedVersion instances.
324
+ # If constraint does not have a local component,
325
+ # ensure the version doesn't, either.
326
+ strip_local = not constraint._parts[-1] and version._parts[-1]
327
+ if strip_local:
328
+ s = version._string.split('+', 1)[0]
329
+ version = self.version_class(s)
330
+ return version, constraint
331
+
332
+ def _match_lt(self, version, constraint, prefix):
333
+ version, constraint = self._adjust_local(version, constraint, prefix)
334
+ if version >= constraint:
335
+ return False
336
+ release_clause = constraint._release_clause
337
+ pfx = '.'.join([str(i) for i in release_clause])
338
+ return not _match_prefix(version, pfx)
339
+
340
+ def _match_gt(self, version, constraint, prefix):
341
+ version, constraint = self._adjust_local(version, constraint, prefix)
342
+ if version <= constraint:
343
+ return False
344
+ release_clause = constraint._release_clause
345
+ pfx = '.'.join([str(i) for i in release_clause])
346
+ return not _match_prefix(version, pfx)
347
+
348
+ def _match_le(self, version, constraint, prefix):
349
+ version, constraint = self._adjust_local(version, constraint, prefix)
350
+ return version <= constraint
351
+
352
+ def _match_ge(self, version, constraint, prefix):
353
+ version, constraint = self._adjust_local(version, constraint, prefix)
354
+ return version >= constraint
355
+
356
+ def _match_eq(self, version, constraint, prefix):
357
+ version, constraint = self._adjust_local(version, constraint, prefix)
358
+ if not prefix:
359
+ result = (version == constraint)
360
+ else:
361
+ result = _match_prefix(version, constraint)
362
+ return result
363
+
364
+ def _match_arbitrary(self, version, constraint, prefix):
365
+ return str(version) == str(constraint)
366
+
367
+ def _match_ne(self, version, constraint, prefix):
368
+ version, constraint = self._adjust_local(version, constraint, prefix)
369
+ if not prefix:
370
+ result = (version != constraint)
371
+ else:
372
+ result = not _match_prefix(version, constraint)
373
+ return result
374
+
375
+ def _match_compatible(self, version, constraint, prefix):
376
+ version, constraint = self._adjust_local(version, constraint, prefix)
377
+ if version == constraint:
378
+ return True
379
+ if version < constraint:
380
+ return False
381
+ # if not prefix:
382
+ # return True
383
+ release_clause = constraint._release_clause
384
+ if len(release_clause) > 1:
385
+ release_clause = release_clause[:-1]
386
+ pfx = '.'.join([str(i) for i in release_clause])
387
+ return _match_prefix(version, pfx)
388
+
389
+
390
+ _REPLACEMENTS = (
391
+ (re.compile('[.+-]$'), ''), # remove trailing puncts
392
+ (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start
393
+ (re.compile('^[.-]'), ''), # remove leading puncts
394
+ (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses
395
+ (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
396
+ (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion)
397
+ (re.compile('[.]{2,}'), '.'), # multiple runs of '.'
398
+ (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha
399
+ (re.compile(r'\b(pre-alpha|prealpha)\b'),
400
+ 'pre.alpha'), # standardise
401
+ (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses
402
+ )
403
+
404
+ _SUFFIX_REPLACEMENTS = (
405
+ (re.compile('^[:~._+-]+'), ''), # remove leading puncts
406
+ (re.compile('[,*")([\\]]'), ''), # remove unwanted chars
407
+ (re.compile('[~:+_ -]'), '.'), # replace illegal chars
408
+ (re.compile('[.]{2,}'), '.'), # multiple runs of '.'
409
+ (re.compile(r'\.$'), ''), # trailing '.'
410
+ )
411
+
412
+ _NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)')
413
+
414
+
415
+ def _suggest_semantic_version(s):
416
+ """
417
+ Try to suggest a semantic form for a version for which
418
+ _suggest_normalized_version couldn't come up with anything.
419
+ """
420
+ result = s.strip().lower()
421
+ for pat, repl in _REPLACEMENTS:
422
+ result = pat.sub(repl, result)
423
+ if not result:
424
+ result = '0.0.0'
425
+
426
+ # Now look for numeric prefix, and separate it out from
427
+ # the rest.
428
+ # import pdb; pdb.set_trace()
429
+ m = _NUMERIC_PREFIX.match(result)
430
+ if not m:
431
+ prefix = '0.0.0'
432
+ suffix = result
433
+ else:
434
+ prefix = m.groups()[0].split('.')
435
+ prefix = [int(i) for i in prefix]
436
+ while len(prefix) < 3:
437
+ prefix.append(0)
438
+ if len(prefix) == 3:
439
+ suffix = result[m.end():]
440
+ else:
441
+ suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():]
442
+ prefix = prefix[:3]
443
+ prefix = '.'.join([str(i) for i in prefix])
444
+ suffix = suffix.strip()
445
+ if suffix:
446
+ # import pdb; pdb.set_trace()
447
+ # massage the suffix.
448
+ for pat, repl in _SUFFIX_REPLACEMENTS:
449
+ suffix = pat.sub(repl, suffix)
450
+
451
+ if not suffix:
452
+ result = prefix
453
+ else:
454
+ sep = '-' if 'dev' in suffix else '+'
455
+ result = prefix + sep + suffix
456
+ if not is_semver(result):
457
+ result = None
458
+ return result
459
+
460
+
461
+ def _suggest_normalized_version(s):
462
+ """Suggest a normalized version close to the given version string.
463
+
464
+ If you have a version string that isn't rational (i.e. NormalizedVersion
465
+ doesn't like it) then you might be able to get an equivalent (or close)
466
+ rational version from this function.
467
+
468
+ This does a number of simple normalizations to the given string, based
469
+ on observation of versions currently in use on PyPI. Given a dump of
470
+ those version during PyCon 2009, 4287 of them:
471
+ - 2312 (53.93%) match NormalizedVersion without change
472
+ with the automatic suggestion
473
+ - 3474 (81.04%) match when using this suggestion method
474
+
475
+ @param s {str} An irrational version string.
476
+ @returns A rational version string, or None, if couldn't determine one.
477
+ """
478
+ try:
479
+ _normalized_key(s)
480
+ return s # already rational
481
+ except UnsupportedVersionError:
482
+ pass
483
+
484
+ rs = s.lower()
485
+
486
+ # part of this could use maketrans
487
+ for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'),
488
+ ('beta', 'b'), ('rc', 'c'), ('-final', ''),
489
+ ('-pre', 'c'),
490
+ ('-release', ''), ('.release', ''), ('-stable', ''),
491
+ ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''),
492
+ ('final', '')):
493
+ rs = rs.replace(orig, repl)
494
+
495
+ # if something ends with dev or pre, we add a 0
496
+ rs = re.sub(r"pre$", r"pre0", rs)
497
+ rs = re.sub(r"dev$", r"dev0", rs)
498
+
499
+ # if we have something like "b-2" or "a.2" at the end of the
500
+ # version, that is probably beta, alpha, etc
501
+ # let's remove the dash or dot
502
+ rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs)
503
+
504
+ # 1.0-dev-r371 -> 1.0.dev371
505
+ # 0.1-dev-r79 -> 0.1.dev79
506
+ rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs)
507
+
508
+ # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1
509
+ rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs)
510
+
511
+ # Clean: v0.3, v1.0
512
+ if rs.startswith('v'):
513
+ rs = rs[1:]
514
+
515
+ # Clean leading '0's on numbers.
516
+ # TODO: unintended side-effect on, e.g., "2003.05.09"
517
+ # PyPI stats: 77 (~2%) better
518
+ rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs)
519
+
520
+ # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers
521
+ # zero.
522
+ # PyPI stats: 245 (7.56%) better
523
+ rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs)
524
+
525
+ # the 'dev-rNNN' tag is a dev tag
526
+ rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs)
527
+
528
+ # clean the - when used as a pre delimiter
529
+ rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs)
530
+
531
+ # a terminal "dev" or "devel" can be changed into ".dev0"
532
+ rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs)
533
+
534
+ # a terminal "dev" can be changed into ".dev0"
535
+ rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs)
536
+
537
+ # a terminal "final" or "stable" can be removed
538
+ rs = re.sub(r"(final|stable)$", "", rs)
539
+
540
+ # The 'r' and the '-' tags are post release tags
541
+ # 0.4a1.r10 -> 0.4a1.post10
542
+ # 0.9.33-17222 -> 0.9.33.post17222
543
+ # 0.9.33-r17222 -> 0.9.33.post17222
544
+ rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs)
545
+
546
+ # Clean 'r' instead of 'dev' usage:
547
+ # 0.9.33+r17222 -> 0.9.33.dev17222
548
+ # 1.0dev123 -> 1.0.dev123
549
+ # 1.0.git123 -> 1.0.dev123
550
+ # 1.0.bzr123 -> 1.0.dev123
551
+ # 0.1a0dev.123 -> 0.1a0.dev123
552
+ # PyPI stats: ~150 (~4%) better
553
+ rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs)
554
+
555
+ # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage:
556
+ # 0.2.pre1 -> 0.2c1
557
+ # 0.2-c1 -> 0.2c1
558
+ # 1.0preview123 -> 1.0c123
559
+ # PyPI stats: ~21 (0.62%) better
560
+ rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs)
561
+
562
+ # Tcl/Tk uses "px" for their post release markers
563
+ rs = re.sub(r"p(\d+)$", r".post\1", rs)
564
+
565
+ try:
566
+ _normalized_key(rs)
567
+ except UnsupportedVersionError:
568
+ rs = None
569
+ return rs
570
+
571
+ #
572
+ # Legacy version processing (distribute-compatible)
573
+ #
574
+
575
+
576
+ _VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I)
577
+ _VERSION_REPLACE = {
578
+ 'pre': 'c',
579
+ 'preview': 'c',
580
+ '-': 'final-',
581
+ 'rc': 'c',
582
+ 'dev': '@',
583
+ '': None,
584
+ '.': None,
585
+ }
586
+
587
+
588
+ def _legacy_key(s):
589
+ def get_parts(s):
590
+ result = []
591
+ for p in _VERSION_PART.split(s.lower()):
592
+ p = _VERSION_REPLACE.get(p, p)
593
+ if p:
594
+ if '0' <= p[:1] <= '9':
595
+ p = p.zfill(8)
596
+ else:
597
+ p = '*' + p
598
+ result.append(p)
599
+ result.append('*final')
600
+ return result
601
+
602
+ result = []
603
+ for p in get_parts(s):
604
+ if p.startswith('*'):
605
+ if p < '*final':
606
+ while result and result[-1] == '*final-':
607
+ result.pop()
608
+ while result and result[-1] == '00000000':
609
+ result.pop()
610
+ result.append(p)
611
+ return tuple(result)
612
+
613
+
614
+ class LegacyVersion(Version):
615
+ def parse(self, s):
616
+ return _legacy_key(s)
617
+
618
+ @property
619
+ def is_prerelease(self):
620
+ result = False
621
+ for x in self._parts:
622
+ if (isinstance(x, string_types) and x.startswith('*') and
623
+ x < '*final'):
624
+ result = True
625
+ break
626
+ return result
627
+
628
+
629
+ class LegacyMatcher(Matcher):
630
+ version_class = LegacyVersion
631
+
632
+ _operators = dict(Matcher._operators)
633
+ _operators['~='] = '_match_compatible'
634
+
635
+ numeric_re = re.compile(r'^(\d+(\.\d+)*)')
636
+
637
+ def _match_compatible(self, version, constraint, prefix):
638
+ if version < constraint:
639
+ return False
640
+ m = self.numeric_re.match(str(constraint))
641
+ if not m:
642
+ logger.warning('Cannot compute compatible match for version %s '
643
+ ' and constraint %s', version, constraint)
644
+ return True
645
+ s = m.groups()[0]
646
+ if '.' in s:
647
+ s = s.rsplit('.', 1)[0]
648
+ return _match_prefix(version, s)
649
+
650
+ #
651
+ # Semantic versioning
652
+ #
653
+
654
+
655
+ _SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)'
656
+ r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?'
657
+ r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I)
658
+
659
+
660
+ def is_semver(s):
661
+ return _SEMVER_RE.match(s)
662
+
663
+
664
+ def _semantic_key(s):
665
+ def make_tuple(s, absent):
666
+ if s is None:
667
+ result = (absent,)
668
+ else:
669
+ parts = s[1:].split('.')
670
+ # We can't compare ints and strings on Python 3, so fudge it
671
+ # by zero-filling numeric values so simulate a numeric comparison
672
+ result = tuple([p.zfill(8) if p.isdigit() else p for p in parts])
673
+ return result
674
+
675
+ m = is_semver(s)
676
+ if not m:
677
+ raise UnsupportedVersionError(s)
678
+ groups = m.groups()
679
+ major, minor, patch = [int(i) for i in groups[:3]]
680
+ # choose the '|' and '*' so that versions sort correctly
681
+ pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*')
682
+ return (major, minor, patch), pre, build
683
+
684
+
685
+ class SemanticVersion(Version):
686
+ def parse(self, s):
687
+ return _semantic_key(s)
688
+
689
+ @property
690
+ def is_prerelease(self):
691
+ return self._parts[1][0] != '|'
692
+
693
+
694
+ class SemanticMatcher(Matcher):
695
+ version_class = SemanticVersion
696
+
697
+
698
+ class VersionScheme(object):
699
+ def __init__(self, key, matcher, suggester=None):
700
+ self.key = key
701
+ self.matcher = matcher
702
+ self.suggester = suggester
703
+
704
+ def is_valid_version(self, s):
705
+ try:
706
+ self.matcher.version_class(s)
707
+ result = True
708
+ except UnsupportedVersionError:
709
+ result = False
710
+ return result
711
+
712
+ def is_valid_matcher(self, s):
713
+ try:
714
+ self.matcher(s)
715
+ result = True
716
+ except UnsupportedVersionError:
717
+ result = False
718
+ return result
719
+
720
+ def is_valid_constraint_list(self, s):
721
+ """
722
+ Used for processing some metadata fields
723
+ """
724
+ # See issue #140. Be tolerant of a single trailing comma.
725
+ if s.endswith(','):
726
+ s = s[:-1]
727
+ return self.is_valid_matcher('dummy_name (%s)' % s)
728
+
729
+ def suggest(self, s):
730
+ if self.suggester is None:
731
+ result = None
732
+ else:
733
+ result = self.suggester(s)
734
+ return result
735
+
736
+
737
+ _SCHEMES = {
738
+ 'normalized': VersionScheme(_normalized_key, NormalizedMatcher,
739
+ _suggest_normalized_version),
740
+ 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s),
741
+ 'semantic': VersionScheme(_semantic_key, SemanticMatcher,
742
+ _suggest_semantic_version),
743
+ }
744
+
745
+ _SCHEMES['default'] = _SCHEMES['normalized']
746
+
747
+
748
+ def get_scheme(name):
749
+ if name not in _SCHEMES:
750
+ raise ValueError('unknown scheme name: %r' % name)
751
+ return _SCHEMES[name]
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/w32.exe ADDED
Binary file (91.6 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/distlib/wheel.py ADDED
@@ -0,0 +1,1099 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ #
3
+ # Copyright (C) 2013-2023 Vinay Sajip.
4
+ # Licensed to the Python Software Foundation under a contributor agreement.
5
+ # See LICENSE.txt and CONTRIBUTORS.txt.
6
+ #
7
+ from __future__ import unicode_literals
8
+
9
+ import base64
10
+ import codecs
11
+ import datetime
12
+ from email import message_from_file
13
+ import hashlib
14
+ import json
15
+ import logging
16
+ import os
17
+ import posixpath
18
+ import re
19
+ import shutil
20
+ import sys
21
+ import tempfile
22
+ import zipfile
23
+
24
+ from . import __version__, DistlibException
25
+ from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
26
+ from .database import InstalledDistribution
27
+ from .metadata import Metadata, WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME
28
+ from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
29
+ cached_property, get_cache_base, read_exports, tempdir,
30
+ get_platform)
31
+ from .version import NormalizedVersion, UnsupportedVersionError
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+ cache = None # created when needed
36
+
37
+ if hasattr(sys, 'pypy_version_info'): # pragma: no cover
38
+ IMP_PREFIX = 'pp'
39
+ elif sys.platform.startswith('java'): # pragma: no cover
40
+ IMP_PREFIX = 'jy'
41
+ elif sys.platform == 'cli': # pragma: no cover
42
+ IMP_PREFIX = 'ip'
43
+ else:
44
+ IMP_PREFIX = 'cp'
45
+
46
+ VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
47
+ if not VER_SUFFIX: # pragma: no cover
48
+ VER_SUFFIX = '%s%s' % sys.version_info[:2]
49
+ PYVER = 'py' + VER_SUFFIX
50
+ IMPVER = IMP_PREFIX + VER_SUFFIX
51
+
52
+ ARCH = get_platform().replace('-', '_').replace('.', '_')
53
+
54
+ ABI = sysconfig.get_config_var('SOABI')
55
+ if ABI and ABI.startswith('cpython-'):
56
+ ABI = ABI.replace('cpython-', 'cp').split('-')[0]
57
+ else:
58
+
59
+ def _derive_abi():
60
+ parts = ['cp', VER_SUFFIX]
61
+ if sysconfig.get_config_var('Py_DEBUG'):
62
+ parts.append('d')
63
+ if IMP_PREFIX == 'cp':
64
+ vi = sys.version_info[:2]
65
+ if vi < (3, 8):
66
+ wpm = sysconfig.get_config_var('WITH_PYMALLOC')
67
+ if wpm is None:
68
+ wpm = True
69
+ if wpm:
70
+ parts.append('m')
71
+ if vi < (3, 3):
72
+ us = sysconfig.get_config_var('Py_UNICODE_SIZE')
73
+ if us == 4 or (us is None and sys.maxunicode == 0x10FFFF):
74
+ parts.append('u')
75
+ return ''.join(parts)
76
+
77
+ ABI = _derive_abi()
78
+ del _derive_abi
79
+
80
+ FILENAME_RE = re.compile(
81
+ r'''
82
+ (?P<nm>[^-]+)
83
+ -(?P<vn>\d+[^-]*)
84
+ (-(?P<bn>\d+[^-]*))?
85
+ -(?P<py>\w+\d+(\.\w+\d+)*)
86
+ -(?P<bi>\w+)
87
+ -(?P<ar>\w+(\.\w+)*)
88
+ \.whl$
89
+ ''', re.IGNORECASE | re.VERBOSE)
90
+
91
+ NAME_VERSION_RE = re.compile(
92
+ r'''
93
+ (?P<nm>[^-]+)
94
+ -(?P<vn>\d+[^-]*)
95
+ (-(?P<bn>\d+[^-]*))?$
96
+ ''', re.IGNORECASE | re.VERBOSE)
97
+
98
+ SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
99
+ SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
100
+ SHEBANG_PYTHON = b'#!python'
101
+ SHEBANG_PYTHONW = b'#!pythonw'
102
+
103
+ if os.sep == '/':
104
+ to_posix = lambda o: o
105
+ else:
106
+ to_posix = lambda o: o.replace(os.sep, '/')
107
+
108
+ if sys.version_info[0] < 3:
109
+ import imp
110
+ else:
111
+ imp = None
112
+ import importlib.machinery
113
+ import importlib.util
114
+
115
+
116
+ def _get_suffixes():
117
+ if imp:
118
+ return [s[0] for s in imp.get_suffixes()]
119
+ else:
120
+ return importlib.machinery.EXTENSION_SUFFIXES
121
+
122
+
123
+ def _load_dynamic(name, path):
124
+ # https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
125
+ if imp:
126
+ return imp.load_dynamic(name, path)
127
+ else:
128
+ spec = importlib.util.spec_from_file_location(name, path)
129
+ module = importlib.util.module_from_spec(spec)
130
+ sys.modules[name] = module
131
+ spec.loader.exec_module(module)
132
+ return module
133
+
134
+
135
+ class Mounter(object):
136
+
137
+ def __init__(self):
138
+ self.impure_wheels = {}
139
+ self.libs = {}
140
+
141
+ def add(self, pathname, extensions):
142
+ self.impure_wheels[pathname] = extensions
143
+ self.libs.update(extensions)
144
+
145
+ def remove(self, pathname):
146
+ extensions = self.impure_wheels.pop(pathname)
147
+ for k, v in extensions:
148
+ if k in self.libs:
149
+ del self.libs[k]
150
+
151
+ def find_module(self, fullname, path=None):
152
+ if fullname in self.libs:
153
+ result = self
154
+ else:
155
+ result = None
156
+ return result
157
+
158
+ def load_module(self, fullname):
159
+ if fullname in sys.modules:
160
+ result = sys.modules[fullname]
161
+ else:
162
+ if fullname not in self.libs:
163
+ raise ImportError('unable to find extension for %s' % fullname)
164
+ result = _load_dynamic(fullname, self.libs[fullname])
165
+ result.__loader__ = self
166
+ parts = fullname.rsplit('.', 1)
167
+ if len(parts) > 1:
168
+ result.__package__ = parts[0]
169
+ return result
170
+
171
+
172
+ _hook = Mounter()
173
+
174
+
175
+ class Wheel(object):
176
+ """
177
+ Class to build and install from Wheel files (PEP 427).
178
+ """
179
+
180
+ wheel_version = (1, 1)
181
+ hash_kind = 'sha256'
182
+
183
+ def __init__(self, filename=None, sign=False, verify=False):
184
+ """
185
+ Initialise an instance using a (valid) filename.
186
+ """
187
+ self.sign = sign
188
+ self.should_verify = verify
189
+ self.buildver = ''
190
+ self.pyver = [PYVER]
191
+ self.abi = ['none']
192
+ self.arch = ['any']
193
+ self.dirname = os.getcwd()
194
+ if filename is None:
195
+ self.name = 'dummy'
196
+ self.version = '0.1'
197
+ self._filename = self.filename
198
+ else:
199
+ m = NAME_VERSION_RE.match(filename)
200
+ if m:
201
+ info = m.groupdict('')
202
+ self.name = info['nm']
203
+ # Reinstate the local version separator
204
+ self.version = info['vn'].replace('_', '-')
205
+ self.buildver = info['bn']
206
+ self._filename = self.filename
207
+ else:
208
+ dirname, filename = os.path.split(filename)
209
+ m = FILENAME_RE.match(filename)
210
+ if not m:
211
+ raise DistlibException('Invalid name or '
212
+ 'filename: %r' % filename)
213
+ if dirname:
214
+ self.dirname = os.path.abspath(dirname)
215
+ self._filename = filename
216
+ info = m.groupdict('')
217
+ self.name = info['nm']
218
+ self.version = info['vn']
219
+ self.buildver = info['bn']
220
+ self.pyver = info['py'].split('.')
221
+ self.abi = info['bi'].split('.')
222
+ self.arch = info['ar'].split('.')
223
+
224
+ @property
225
+ def filename(self):
226
+ """
227
+ Build and return a filename from the various components.
228
+ """
229
+ if self.buildver:
230
+ buildver = '-' + self.buildver
231
+ else:
232
+ buildver = ''
233
+ pyver = '.'.join(self.pyver)
234
+ abi = '.'.join(self.abi)
235
+ arch = '.'.join(self.arch)
236
+ # replace - with _ as a local version separator
237
+ version = self.version.replace('-', '_')
238
+ return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, pyver,
239
+ abi, arch)
240
+
241
+ @property
242
+ def exists(self):
243
+ path = os.path.join(self.dirname, self.filename)
244
+ return os.path.isfile(path)
245
+
246
+ @property
247
+ def tags(self):
248
+ for pyver in self.pyver:
249
+ for abi in self.abi:
250
+ for arch in self.arch:
251
+ yield pyver, abi, arch
252
+
253
+ @cached_property
254
+ def metadata(self):
255
+ pathname = os.path.join(self.dirname, self.filename)
256
+ name_ver = '%s-%s' % (self.name, self.version)
257
+ info_dir = '%s.dist-info' % name_ver
258
+ wrapper = codecs.getreader('utf-8')
259
+ with ZipFile(pathname, 'r') as zf:
260
+ self.get_wheel_metadata(zf)
261
+ # wv = wheel_metadata['Wheel-Version'].split('.', 1)
262
+ # file_version = tuple([int(i) for i in wv])
263
+ # if file_version < (1, 1):
264
+ # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
265
+ # LEGACY_METADATA_FILENAME]
266
+ # else:
267
+ # fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
268
+ fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
269
+ result = None
270
+ for fn in fns:
271
+ try:
272
+ metadata_filename = posixpath.join(info_dir, fn)
273
+ with zf.open(metadata_filename) as bf:
274
+ wf = wrapper(bf)
275
+ result = Metadata(fileobj=wf)
276
+ if result:
277
+ break
278
+ except KeyError:
279
+ pass
280
+ if not result:
281
+ raise ValueError('Invalid wheel, because metadata is '
282
+ 'missing: looked in %s' % ', '.join(fns))
283
+ return result
284
+
285
+ def get_wheel_metadata(self, zf):
286
+ name_ver = '%s-%s' % (self.name, self.version)
287
+ info_dir = '%s.dist-info' % name_ver
288
+ metadata_filename = posixpath.join(info_dir, 'WHEEL')
289
+ with zf.open(metadata_filename) as bf:
290
+ wf = codecs.getreader('utf-8')(bf)
291
+ message = message_from_file(wf)
292
+ return dict(message)
293
+
294
+ @cached_property
295
+ def info(self):
296
+ pathname = os.path.join(self.dirname, self.filename)
297
+ with ZipFile(pathname, 'r') as zf:
298
+ result = self.get_wheel_metadata(zf)
299
+ return result
300
+
301
+ def process_shebang(self, data):
302
+ m = SHEBANG_RE.match(data)
303
+ if m:
304
+ end = m.end()
305
+ shebang, data_after_shebang = data[:end], data[end:]
306
+ # Preserve any arguments after the interpreter
307
+ if b'pythonw' in shebang.lower():
308
+ shebang_python = SHEBANG_PYTHONW
309
+ else:
310
+ shebang_python = SHEBANG_PYTHON
311
+ m = SHEBANG_DETAIL_RE.match(shebang)
312
+ if m:
313
+ args = b' ' + m.groups()[-1]
314
+ else:
315
+ args = b''
316
+ shebang = shebang_python + args
317
+ data = shebang + data_after_shebang
318
+ else:
319
+ cr = data.find(b'\r')
320
+ lf = data.find(b'\n')
321
+ if cr < 0 or cr > lf:
322
+ term = b'\n'
323
+ else:
324
+ if data[cr:cr + 2] == b'\r\n':
325
+ term = b'\r\n'
326
+ else:
327
+ term = b'\r'
328
+ data = SHEBANG_PYTHON + term + data
329
+ return data
330
+
331
+ def get_hash(self, data, hash_kind=None):
332
+ if hash_kind is None:
333
+ hash_kind = self.hash_kind
334
+ try:
335
+ hasher = getattr(hashlib, hash_kind)
336
+ except AttributeError:
337
+ raise DistlibException('Unsupported hash algorithm: %r' %
338
+ hash_kind)
339
+ result = hasher(data).digest()
340
+ result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
341
+ return hash_kind, result
342
+
343
+ def write_record(self, records, record_path, archive_record_path):
344
+ records = list(records) # make a copy, as mutated
345
+ records.append((archive_record_path, '', ''))
346
+ with CSVWriter(record_path) as writer:
347
+ for row in records:
348
+ writer.writerow(row)
349
+
350
+ def write_records(self, info, libdir, archive_paths):
351
+ records = []
352
+ distinfo, info_dir = info
353
+ # hasher = getattr(hashlib, self.hash_kind)
354
+ for ap, p in archive_paths:
355
+ with open(p, 'rb') as f:
356
+ data = f.read()
357
+ digest = '%s=%s' % self.get_hash(data)
358
+ size = os.path.getsize(p)
359
+ records.append((ap, digest, size))
360
+
361
+ p = os.path.join(distinfo, 'RECORD')
362
+ ap = to_posix(os.path.join(info_dir, 'RECORD'))
363
+ self.write_record(records, p, ap)
364
+ archive_paths.append((ap, p))
365
+
366
+ def build_zip(self, pathname, archive_paths):
367
+ with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
368
+ for ap, p in archive_paths:
369
+ logger.debug('Wrote %s to %s in wheel', p, ap)
370
+ zf.write(p, ap)
371
+
372
+ def build(self, paths, tags=None, wheel_version=None):
373
+ """
374
+ Build a wheel from files in specified paths, and use any specified tags
375
+ when determining the name of the wheel.
376
+ """
377
+ if tags is None:
378
+ tags = {}
379
+
380
+ libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
381
+ if libkey == 'platlib':
382
+ is_pure = 'false'
383
+ default_pyver = [IMPVER]
384
+ default_abi = [ABI]
385
+ default_arch = [ARCH]
386
+ else:
387
+ is_pure = 'true'
388
+ default_pyver = [PYVER]
389
+ default_abi = ['none']
390
+ default_arch = ['any']
391
+
392
+ self.pyver = tags.get('pyver', default_pyver)
393
+ self.abi = tags.get('abi', default_abi)
394
+ self.arch = tags.get('arch', default_arch)
395
+
396
+ libdir = paths[libkey]
397
+
398
+ name_ver = '%s-%s' % (self.name, self.version)
399
+ data_dir = '%s.data' % name_ver
400
+ info_dir = '%s.dist-info' % name_ver
401
+
402
+ archive_paths = []
403
+
404
+ # First, stuff which is not in site-packages
405
+ for key in ('data', 'headers', 'scripts'):
406
+ if key not in paths:
407
+ continue
408
+ path = paths[key]
409
+ if os.path.isdir(path):
410
+ for root, dirs, files in os.walk(path):
411
+ for fn in files:
412
+ p = fsdecode(os.path.join(root, fn))
413
+ rp = os.path.relpath(p, path)
414
+ ap = to_posix(os.path.join(data_dir, key, rp))
415
+ archive_paths.append((ap, p))
416
+ if key == 'scripts' and not p.endswith('.exe'):
417
+ with open(p, 'rb') as f:
418
+ data = f.read()
419
+ data = self.process_shebang(data)
420
+ with open(p, 'wb') as f:
421
+ f.write(data)
422
+
423
+ # Now, stuff which is in site-packages, other than the
424
+ # distinfo stuff.
425
+ path = libdir
426
+ distinfo = None
427
+ for root, dirs, files in os.walk(path):
428
+ if root == path:
429
+ # At the top level only, save distinfo for later
430
+ # and skip it for now
431
+ for i, dn in enumerate(dirs):
432
+ dn = fsdecode(dn)
433
+ if dn.endswith('.dist-info'):
434
+ distinfo = os.path.join(root, dn)
435
+ del dirs[i]
436
+ break
437
+ assert distinfo, '.dist-info directory expected, not found'
438
+
439
+ for fn in files:
440
+ # comment out next suite to leave .pyc files in
441
+ if fsdecode(fn).endswith(('.pyc', '.pyo')):
442
+ continue
443
+ p = os.path.join(root, fn)
444
+ rp = to_posix(os.path.relpath(p, path))
445
+ archive_paths.append((rp, p))
446
+
447
+ # Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
448
+ files = os.listdir(distinfo)
449
+ for fn in files:
450
+ if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
451
+ p = fsdecode(os.path.join(distinfo, fn))
452
+ ap = to_posix(os.path.join(info_dir, fn))
453
+ archive_paths.append((ap, p))
454
+
455
+ wheel_metadata = [
456
+ 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
457
+ 'Generator: distlib %s' % __version__,
458
+ 'Root-Is-Purelib: %s' % is_pure,
459
+ ]
460
+ for pyver, abi, arch in self.tags:
461
+ wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
462
+ p = os.path.join(distinfo, 'WHEEL')
463
+ with open(p, 'w') as f:
464
+ f.write('\n'.join(wheel_metadata))
465
+ ap = to_posix(os.path.join(info_dir, 'WHEEL'))
466
+ archive_paths.append((ap, p))
467
+
468
+ # sort the entries by archive path. Not needed by any spec, but it
469
+ # keeps the archive listing and RECORD tidier than they would otherwise
470
+ # be. Use the number of path segments to keep directory entries together,
471
+ # and keep the dist-info stuff at the end.
472
+ def sorter(t):
473
+ ap = t[0]
474
+ n = ap.count('/')
475
+ if '.dist-info' in ap:
476
+ n += 10000
477
+ return (n, ap)
478
+
479
+ archive_paths = sorted(archive_paths, key=sorter)
480
+
481
+ # Now, at last, RECORD.
482
+ # Paths in here are archive paths - nothing else makes sense.
483
+ self.write_records((distinfo, info_dir), libdir, archive_paths)
484
+ # Now, ready to build the zip file
485
+ pathname = os.path.join(self.dirname, self.filename)
486
+ self.build_zip(pathname, archive_paths)
487
+ return pathname
488
+
489
+ def skip_entry(self, arcname):
490
+ """
491
+ Determine whether an archive entry should be skipped when verifying
492
+ or installing.
493
+ """
494
+ # The signature file won't be in RECORD,
495
+ # and we don't currently don't do anything with it
496
+ # We also skip directories, as they won't be in RECORD
497
+ # either. See:
498
+ #
499
+ # https://github.com/pypa/wheel/issues/294
500
+ # https://github.com/pypa/wheel/issues/287
501
+ # https://github.com/pypa/wheel/pull/289
502
+ #
503
+ return arcname.endswith(('/', '/RECORD.jws'))
504
+
505
+ def install(self, paths, maker, **kwargs):
506
+ """
507
+ Install a wheel to the specified paths. If kwarg ``warner`` is
508
+ specified, it should be a callable, which will be called with two
509
+ tuples indicating the wheel version of this software and the wheel
510
+ version in the file, if there is a discrepancy in the versions.
511
+ This can be used to issue any warnings to raise any exceptions.
512
+ If kwarg ``lib_only`` is True, only the purelib/platlib files are
513
+ installed, and the headers, scripts, data and dist-info metadata are
514
+ not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
515
+ bytecode will try to use file-hash based invalidation (PEP-552) on
516
+ supported interpreter versions (CPython 2.7+).
517
+
518
+ The return value is a :class:`InstalledDistribution` instance unless
519
+ ``options.lib_only`` is True, in which case the return value is ``None``.
520
+ """
521
+
522
+ dry_run = maker.dry_run
523
+ warner = kwargs.get('warner')
524
+ lib_only = kwargs.get('lib_only', False)
525
+ bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation',
526
+ False)
527
+
528
+ pathname = os.path.join(self.dirname, self.filename)
529
+ name_ver = '%s-%s' % (self.name, self.version)
530
+ data_dir = '%s.data' % name_ver
531
+ info_dir = '%s.dist-info' % name_ver
532
+
533
+ metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
534
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
535
+ record_name = posixpath.join(info_dir, 'RECORD')
536
+
537
+ wrapper = codecs.getreader('utf-8')
538
+
539
+ with ZipFile(pathname, 'r') as zf:
540
+ with zf.open(wheel_metadata_name) as bwf:
541
+ wf = wrapper(bwf)
542
+ message = message_from_file(wf)
543
+ wv = message['Wheel-Version'].split('.', 1)
544
+ file_version = tuple([int(i) for i in wv])
545
+ if (file_version != self.wheel_version) and warner:
546
+ warner(self.wheel_version, file_version)
547
+
548
+ if message['Root-Is-Purelib'] == 'true':
549
+ libdir = paths['purelib']
550
+ else:
551
+ libdir = paths['platlib']
552
+
553
+ records = {}
554
+ with zf.open(record_name) as bf:
555
+ with CSVReader(stream=bf) as reader:
556
+ for row in reader:
557
+ p = row[0]
558
+ records[p] = row
559
+
560
+ data_pfx = posixpath.join(data_dir, '')
561
+ info_pfx = posixpath.join(info_dir, '')
562
+ script_pfx = posixpath.join(data_dir, 'scripts', '')
563
+
564
+ # make a new instance rather than a copy of maker's,
565
+ # as we mutate it
566
+ fileop = FileOperator(dry_run=dry_run)
567
+ fileop.record = True # so we can rollback if needed
568
+
569
+ bc = not sys.dont_write_bytecode # Double negatives. Lovely!
570
+
571
+ outfiles = [] # for RECORD writing
572
+
573
+ # for script copying/shebang processing
574
+ workdir = tempfile.mkdtemp()
575
+ # set target dir later
576
+ # we default add_launchers to False, as the
577
+ # Python Launcher should be used instead
578
+ maker.source_dir = workdir
579
+ maker.target_dir = None
580
+ try:
581
+ for zinfo in zf.infolist():
582
+ arcname = zinfo.filename
583
+ if isinstance(arcname, text_type):
584
+ u_arcname = arcname
585
+ else:
586
+ u_arcname = arcname.decode('utf-8')
587
+ if self.skip_entry(u_arcname):
588
+ continue
589
+ row = records[u_arcname]
590
+ if row[2] and str(zinfo.file_size) != row[2]:
591
+ raise DistlibException('size mismatch for '
592
+ '%s' % u_arcname)
593
+ if row[1]:
594
+ kind, value = row[1].split('=', 1)
595
+ with zf.open(arcname) as bf:
596
+ data = bf.read()
597
+ _, digest = self.get_hash(data, kind)
598
+ if digest != value:
599
+ raise DistlibException('digest mismatch for '
600
+ '%s' % arcname)
601
+
602
+ if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
603
+ logger.debug('lib_only: skipping %s', u_arcname)
604
+ continue
605
+ is_script = (u_arcname.startswith(script_pfx)
606
+ and not u_arcname.endswith('.exe'))
607
+
608
+ if u_arcname.startswith(data_pfx):
609
+ _, where, rp = u_arcname.split('/', 2)
610
+ outfile = os.path.join(paths[where], convert_path(rp))
611
+ else:
612
+ # meant for site-packages.
613
+ if u_arcname in (wheel_metadata_name, record_name):
614
+ continue
615
+ outfile = os.path.join(libdir, convert_path(u_arcname))
616
+ if not is_script:
617
+ with zf.open(arcname) as bf:
618
+ fileop.copy_stream(bf, outfile)
619
+ # Issue #147: permission bits aren't preserved. Using
620
+ # zf.extract(zinfo, libdir) should have worked, but didn't,
621
+ # see https://www.thetopsites.net/article/53834422.shtml
622
+ # So ... manually preserve permission bits as given in zinfo
623
+ if os.name == 'posix':
624
+ # just set the normal permission bits
625
+ os.chmod(outfile,
626
+ (zinfo.external_attr >> 16) & 0x1FF)
627
+ outfiles.append(outfile)
628
+ # Double check the digest of the written file
629
+ if not dry_run and row[1]:
630
+ with open(outfile, 'rb') as bf:
631
+ data = bf.read()
632
+ _, newdigest = self.get_hash(data, kind)
633
+ if newdigest != digest:
634
+ raise DistlibException('digest mismatch '
635
+ 'on write for '
636
+ '%s' % outfile)
637
+ if bc and outfile.endswith('.py'):
638
+ try:
639
+ pyc = fileop.byte_compile(
640
+ outfile,
641
+ hashed_invalidation=bc_hashed_invalidation)
642
+ outfiles.append(pyc)
643
+ except Exception:
644
+ # Don't give up if byte-compilation fails,
645
+ # but log it and perhaps warn the user
646
+ logger.warning('Byte-compilation failed',
647
+ exc_info=True)
648
+ else:
649
+ fn = os.path.basename(convert_path(arcname))
650
+ workname = os.path.join(workdir, fn)
651
+ with zf.open(arcname) as bf:
652
+ fileop.copy_stream(bf, workname)
653
+
654
+ dn, fn = os.path.split(outfile)
655
+ maker.target_dir = dn
656
+ filenames = maker.make(fn)
657
+ fileop.set_executable_mode(filenames)
658
+ outfiles.extend(filenames)
659
+
660
+ if lib_only:
661
+ logger.debug('lib_only: returning None')
662
+ dist = None
663
+ else:
664
+ # Generate scripts
665
+
666
+ # Try to get pydist.json so we can see if there are
667
+ # any commands to generate. If this fails (e.g. because
668
+ # of a legacy wheel), log a warning but don't give up.
669
+ commands = None
670
+ file_version = self.info['Wheel-Version']
671
+ if file_version == '1.0':
672
+ # Use legacy info
673
+ ep = posixpath.join(info_dir, 'entry_points.txt')
674
+ try:
675
+ with zf.open(ep) as bwf:
676
+ epdata = read_exports(bwf)
677
+ commands = {}
678
+ for key in ('console', 'gui'):
679
+ k = '%s_scripts' % key
680
+ if k in epdata:
681
+ commands['wrap_%s' % key] = d = {}
682
+ for v in epdata[k].values():
683
+ s = '%s:%s' % (v.prefix, v.suffix)
684
+ if v.flags:
685
+ s += ' [%s]' % ','.join(v.flags)
686
+ d[v.name] = s
687
+ except Exception:
688
+ logger.warning('Unable to read legacy script '
689
+ 'metadata, so cannot generate '
690
+ 'scripts')
691
+ else:
692
+ try:
693
+ with zf.open(metadata_name) as bwf:
694
+ wf = wrapper(bwf)
695
+ commands = json.load(wf).get('extensions')
696
+ if commands:
697
+ commands = commands.get('python.commands')
698
+ except Exception:
699
+ logger.warning('Unable to read JSON metadata, so '
700
+ 'cannot generate scripts')
701
+ if commands:
702
+ console_scripts = commands.get('wrap_console', {})
703
+ gui_scripts = commands.get('wrap_gui', {})
704
+ if console_scripts or gui_scripts:
705
+ script_dir = paths.get('scripts', '')
706
+ if not os.path.isdir(script_dir):
707
+ raise ValueError('Valid script path not '
708
+ 'specified')
709
+ maker.target_dir = script_dir
710
+ for k, v in console_scripts.items():
711
+ script = '%s = %s' % (k, v)
712
+ filenames = maker.make(script)
713
+ fileop.set_executable_mode(filenames)
714
+
715
+ if gui_scripts:
716
+ options = {'gui': True}
717
+ for k, v in gui_scripts.items():
718
+ script = '%s = %s' % (k, v)
719
+ filenames = maker.make(script, options)
720
+ fileop.set_executable_mode(filenames)
721
+
722
+ p = os.path.join(libdir, info_dir)
723
+ dist = InstalledDistribution(p)
724
+
725
+ # Write SHARED
726
+ paths = dict(paths) # don't change passed in dict
727
+ del paths['purelib']
728
+ del paths['platlib']
729
+ paths['lib'] = libdir
730
+ p = dist.write_shared_locations(paths, dry_run)
731
+ if p:
732
+ outfiles.append(p)
733
+
734
+ # Write RECORD
735
+ dist.write_installed_files(outfiles, paths['prefix'],
736
+ dry_run)
737
+ return dist
738
+ except Exception: # pragma: no cover
739
+ logger.exception('installation failed.')
740
+ fileop.rollback()
741
+ raise
742
+ finally:
743
+ shutil.rmtree(workdir)
744
+
745
+ def _get_dylib_cache(self):
746
+ global cache
747
+ if cache is None:
748
+ # Use native string to avoid issues on 2.x: see Python #20140.
749
+ base = os.path.join(get_cache_base(), str('dylib-cache'),
750
+ '%s.%s' % sys.version_info[:2])
751
+ cache = Cache(base)
752
+ return cache
753
+
754
+ def _get_extensions(self):
755
+ pathname = os.path.join(self.dirname, self.filename)
756
+ name_ver = '%s-%s' % (self.name, self.version)
757
+ info_dir = '%s.dist-info' % name_ver
758
+ arcname = posixpath.join(info_dir, 'EXTENSIONS')
759
+ wrapper = codecs.getreader('utf-8')
760
+ result = []
761
+ with ZipFile(pathname, 'r') as zf:
762
+ try:
763
+ with zf.open(arcname) as bf:
764
+ wf = wrapper(bf)
765
+ extensions = json.load(wf)
766
+ cache = self._get_dylib_cache()
767
+ prefix = cache.prefix_to_dir(pathname)
768
+ cache_base = os.path.join(cache.base, prefix)
769
+ if not os.path.isdir(cache_base):
770
+ os.makedirs(cache_base)
771
+ for name, relpath in extensions.items():
772
+ dest = os.path.join(cache_base, convert_path(relpath))
773
+ if not os.path.exists(dest):
774
+ extract = True
775
+ else:
776
+ file_time = os.stat(dest).st_mtime
777
+ file_time = datetime.datetime.fromtimestamp(
778
+ file_time)
779
+ info = zf.getinfo(relpath)
780
+ wheel_time = datetime.datetime(*info.date_time)
781
+ extract = wheel_time > file_time
782
+ if extract:
783
+ zf.extract(relpath, cache_base)
784
+ result.append((name, dest))
785
+ except KeyError:
786
+ pass
787
+ return result
788
+
789
+ def is_compatible(self):
790
+ """
791
+ Determine if a wheel is compatible with the running system.
792
+ """
793
+ return is_compatible(self)
794
+
795
+ def is_mountable(self):
796
+ """
797
+ Determine if a wheel is asserted as mountable by its metadata.
798
+ """
799
+ return True # for now - metadata details TBD
800
+
801
+ def mount(self, append=False):
802
+ pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
803
+ if not self.is_compatible():
804
+ msg = 'Wheel %s not compatible with this Python.' % pathname
805
+ raise DistlibException(msg)
806
+ if not self.is_mountable():
807
+ msg = 'Wheel %s is marked as not mountable.' % pathname
808
+ raise DistlibException(msg)
809
+ if pathname in sys.path:
810
+ logger.debug('%s already in path', pathname)
811
+ else:
812
+ if append:
813
+ sys.path.append(pathname)
814
+ else:
815
+ sys.path.insert(0, pathname)
816
+ extensions = self._get_extensions()
817
+ if extensions:
818
+ if _hook not in sys.meta_path:
819
+ sys.meta_path.append(_hook)
820
+ _hook.add(pathname, extensions)
821
+
822
+ def unmount(self):
823
+ pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
824
+ if pathname not in sys.path:
825
+ logger.debug('%s not in path', pathname)
826
+ else:
827
+ sys.path.remove(pathname)
828
+ if pathname in _hook.impure_wheels:
829
+ _hook.remove(pathname)
830
+ if not _hook.impure_wheels:
831
+ if _hook in sys.meta_path:
832
+ sys.meta_path.remove(_hook)
833
+
834
+ def verify(self):
835
+ pathname = os.path.join(self.dirname, self.filename)
836
+ name_ver = '%s-%s' % (self.name, self.version)
837
+ # data_dir = '%s.data' % name_ver
838
+ info_dir = '%s.dist-info' % name_ver
839
+
840
+ # metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
841
+ wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
842
+ record_name = posixpath.join(info_dir, 'RECORD')
843
+
844
+ wrapper = codecs.getreader('utf-8')
845
+
846
+ with ZipFile(pathname, 'r') as zf:
847
+ with zf.open(wheel_metadata_name) as bwf:
848
+ wf = wrapper(bwf)
849
+ message_from_file(wf)
850
+ # wv = message['Wheel-Version'].split('.', 1)
851
+ # file_version = tuple([int(i) for i in wv])
852
+ # TODO version verification
853
+
854
+ records = {}
855
+ with zf.open(record_name) as bf:
856
+ with CSVReader(stream=bf) as reader:
857
+ for row in reader:
858
+ p = row[0]
859
+ records[p] = row
860
+
861
+ for zinfo in zf.infolist():
862
+ arcname = zinfo.filename
863
+ if isinstance(arcname, text_type):
864
+ u_arcname = arcname
865
+ else:
866
+ u_arcname = arcname.decode('utf-8')
867
+ # See issue #115: some wheels have .. in their entries, but
868
+ # in the filename ... e.g. __main__..py ! So the check is
869
+ # updated to look for .. in the directory portions
870
+ p = u_arcname.split('/')
871
+ if '..' in p:
872
+ raise DistlibException('invalid entry in '
873
+ 'wheel: %r' % u_arcname)
874
+
875
+ if self.skip_entry(u_arcname):
876
+ continue
877
+ row = records[u_arcname]
878
+ if row[2] and str(zinfo.file_size) != row[2]:
879
+ raise DistlibException('size mismatch for '
880
+ '%s' % u_arcname)
881
+ if row[1]:
882
+ kind, value = row[1].split('=', 1)
883
+ with zf.open(arcname) as bf:
884
+ data = bf.read()
885
+ _, digest = self.get_hash(data, kind)
886
+ if digest != value:
887
+ raise DistlibException('digest mismatch for '
888
+ '%s' % arcname)
889
+
890
+ def update(self, modifier, dest_dir=None, **kwargs):
891
+ """
892
+ Update the contents of a wheel in a generic way. The modifier should
893
+ be a callable which expects a dictionary argument: its keys are
894
+ archive-entry paths, and its values are absolute filesystem paths
895
+ where the contents the corresponding archive entries can be found. The
896
+ modifier is free to change the contents of the files pointed to, add
897
+ new entries and remove entries, before returning. This method will
898
+ extract the entire contents of the wheel to a temporary location, call
899
+ the modifier, and then use the passed (and possibly updated)
900
+ dictionary to write a new wheel. If ``dest_dir`` is specified, the new
901
+ wheel is written there -- otherwise, the original wheel is overwritten.
902
+
903
+ The modifier should return True if it updated the wheel, else False.
904
+ This method returns the same value the modifier returns.
905
+ """
906
+
907
+ def get_version(path_map, info_dir):
908
+ version = path = None
909
+ key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
910
+ if key not in path_map:
911
+ key = '%s/PKG-INFO' % info_dir
912
+ if key in path_map:
913
+ path = path_map[key]
914
+ version = Metadata(path=path).version
915
+ return version, path
916
+
917
+ def update_version(version, path):
918
+ updated = None
919
+ try:
920
+ NormalizedVersion(version)
921
+ i = version.find('-')
922
+ if i < 0:
923
+ updated = '%s+1' % version
924
+ else:
925
+ parts = [int(s) for s in version[i + 1:].split('.')]
926
+ parts[-1] += 1
927
+ updated = '%s+%s' % (version[:i], '.'.join(
928
+ str(i) for i in parts))
929
+ except UnsupportedVersionError:
930
+ logger.debug(
931
+ 'Cannot update non-compliant (PEP-440) '
932
+ 'version %r', version)
933
+ if updated:
934
+ md = Metadata(path=path)
935
+ md.version = updated
936
+ legacy = path.endswith(LEGACY_METADATA_FILENAME)
937
+ md.write(path=path, legacy=legacy)
938
+ logger.debug('Version updated from %r to %r', version, updated)
939
+
940
+ pathname = os.path.join(self.dirname, self.filename)
941
+ name_ver = '%s-%s' % (self.name, self.version)
942
+ info_dir = '%s.dist-info' % name_ver
943
+ record_name = posixpath.join(info_dir, 'RECORD')
944
+ with tempdir() as workdir:
945
+ with ZipFile(pathname, 'r') as zf:
946
+ path_map = {}
947
+ for zinfo in zf.infolist():
948
+ arcname = zinfo.filename
949
+ if isinstance(arcname, text_type):
950
+ u_arcname = arcname
951
+ else:
952
+ u_arcname = arcname.decode('utf-8')
953
+ if u_arcname == record_name:
954
+ continue
955
+ if '..' in u_arcname:
956
+ raise DistlibException('invalid entry in '
957
+ 'wheel: %r' % u_arcname)
958
+ zf.extract(zinfo, workdir)
959
+ path = os.path.join(workdir, convert_path(u_arcname))
960
+ path_map[u_arcname] = path
961
+
962
+ # Remember the version.
963
+ original_version, _ = get_version(path_map, info_dir)
964
+ # Files extracted. Call the modifier.
965
+ modified = modifier(path_map, **kwargs)
966
+ if modified:
967
+ # Something changed - need to build a new wheel.
968
+ current_version, path = get_version(path_map, info_dir)
969
+ if current_version and (current_version == original_version):
970
+ # Add or update local version to signify changes.
971
+ update_version(current_version, path)
972
+ # Decide where the new wheel goes.
973
+ if dest_dir is None:
974
+ fd, newpath = tempfile.mkstemp(suffix='.whl',
975
+ prefix='wheel-update-',
976
+ dir=workdir)
977
+ os.close(fd)
978
+ else:
979
+ if not os.path.isdir(dest_dir):
980
+ raise DistlibException('Not a directory: %r' %
981
+ dest_dir)
982
+ newpath = os.path.join(dest_dir, self.filename)
983
+ archive_paths = list(path_map.items())
984
+ distinfo = os.path.join(workdir, info_dir)
985
+ info = distinfo, info_dir
986
+ self.write_records(info, workdir, archive_paths)
987
+ self.build_zip(newpath, archive_paths)
988
+ if dest_dir is None:
989
+ shutil.copyfile(newpath, pathname)
990
+ return modified
991
+
992
+
993
+ def _get_glibc_version():
994
+ import platform
995
+ ver = platform.libc_ver()
996
+ result = []
997
+ if ver[0] == 'glibc':
998
+ for s in ver[1].split('.'):
999
+ result.append(int(s) if s.isdigit() else 0)
1000
+ result = tuple(result)
1001
+ return result
1002
+
1003
+
1004
+ def compatible_tags():
1005
+ """
1006
+ Return (pyver, abi, arch) tuples compatible with this Python.
1007
+ """
1008
+ versions = [VER_SUFFIX]
1009
+ major = VER_SUFFIX[0]
1010
+ for minor in range(sys.version_info[1] - 1, -1, -1):
1011
+ versions.append(''.join([major, str(minor)]))
1012
+
1013
+ abis = []
1014
+ for suffix in _get_suffixes():
1015
+ if suffix.startswith('.abi'):
1016
+ abis.append(suffix.split('.', 2)[1])
1017
+ abis.sort()
1018
+ if ABI != 'none':
1019
+ abis.insert(0, ABI)
1020
+ abis.append('none')
1021
+ result = []
1022
+
1023
+ arches = [ARCH]
1024
+ if sys.platform == 'darwin':
1025
+ m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
1026
+ if m:
1027
+ name, major, minor, arch = m.groups()
1028
+ minor = int(minor)
1029
+ matches = [arch]
1030
+ if arch in ('i386', 'ppc'):
1031
+ matches.append('fat')
1032
+ if arch in ('i386', 'ppc', 'x86_64'):
1033
+ matches.append('fat3')
1034
+ if arch in ('ppc64', 'x86_64'):
1035
+ matches.append('fat64')
1036
+ if arch in ('i386', 'x86_64'):
1037
+ matches.append('intel')
1038
+ if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
1039
+ matches.append('universal')
1040
+ while minor >= 0:
1041
+ for match in matches:
1042
+ s = '%s_%s_%s_%s' % (name, major, minor, match)
1043
+ if s != ARCH: # already there
1044
+ arches.append(s)
1045
+ minor -= 1
1046
+
1047
+ # Most specific - our Python version, ABI and arch
1048
+ for abi in abis:
1049
+ for arch in arches:
1050
+ result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
1051
+ # manylinux
1052
+ if abi != 'none' and sys.platform.startswith('linux'):
1053
+ arch = arch.replace('linux_', '')
1054
+ parts = _get_glibc_version()
1055
+ if len(parts) == 2:
1056
+ if parts >= (2, 5):
1057
+ result.append((''.join((IMP_PREFIX, versions[0])), abi,
1058
+ 'manylinux1_%s' % arch))
1059
+ if parts >= (2, 12):
1060
+ result.append((''.join((IMP_PREFIX, versions[0])), abi,
1061
+ 'manylinux2010_%s' % arch))
1062
+ if parts >= (2, 17):
1063
+ result.append((''.join((IMP_PREFIX, versions[0])), abi,
1064
+ 'manylinux2014_%s' % arch))
1065
+ result.append(
1066
+ (''.join((IMP_PREFIX, versions[0])), abi,
1067
+ 'manylinux_%s_%s_%s' % (parts[0], parts[1], arch)))
1068
+
1069
+ # where no ABI / arch dependency, but IMP_PREFIX dependency
1070
+ for i, version in enumerate(versions):
1071
+ result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
1072
+ if i == 0:
1073
+ result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
1074
+
1075
+ # no IMP_PREFIX, ABI or arch dependency
1076
+ for i, version in enumerate(versions):
1077
+ result.append((''.join(('py', version)), 'none', 'any'))
1078
+ if i == 0:
1079
+ result.append((''.join(('py', version[0])), 'none', 'any'))
1080
+
1081
+ return set(result)
1082
+
1083
+
1084
+ COMPATIBLE_TAGS = compatible_tags()
1085
+
1086
+ del compatible_tags
1087
+
1088
+
1089
+ def is_compatible(wheel, tags=None):
1090
+ if not isinstance(wheel, Wheel):
1091
+ wheel = Wheel(wheel) # assume it's a filename
1092
+ result = False
1093
+ if tags is None:
1094
+ tags = COMPATIBLE_TAGS
1095
+ for ver, abi, arch in tags:
1096
+ if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
1097
+ result = True
1098
+ break
1099
+ return result
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__about__.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ __all__ = [
6
+ "__title__",
7
+ "__summary__",
8
+ "__uri__",
9
+ "__version__",
10
+ "__author__",
11
+ "__email__",
12
+ "__license__",
13
+ "__copyright__",
14
+ ]
15
+
16
+ __title__ = "packaging"
17
+ __summary__ = "Core utilities for Python packages"
18
+ __uri__ = "https://github.com/pypa/packaging"
19
+
20
+ __version__ = "21.3"
21
+
22
+ __author__ = "Donald Stufft and individual contributors"
23
+ __email__ = "donald@stufft.io"
24
+
25
+ __license__ = "BSD-2-Clause or Apache-2.0"
26
+ __copyright__ = "2014-2019 %s" % __author__
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__init__.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ from .__about__ import (
6
+ __author__,
7
+ __copyright__,
8
+ __email__,
9
+ __license__,
10
+ __summary__,
11
+ __title__,
12
+ __uri__,
13
+ __version__,
14
+ )
15
+
16
+ __all__ = [
17
+ "__title__",
18
+ "__summary__",
19
+ "__uri__",
20
+ "__version__",
21
+ "__author__",
22
+ "__email__",
23
+ "__license__",
24
+ "__copyright__",
25
+ ]
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-311.pyc ADDED
Binary file (635 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (556 Bytes). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-311.pyc ADDED
Binary file (13.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-311.pyc ADDED
Binary file (7.99 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-311.pyc ADDED
Binary file (3.68 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-311.pyc ADDED
Binary file (16.5 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-311.pyc ADDED
Binary file (7.63 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-311.pyc ADDED
Binary file (34.4 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-311.pyc ADDED
Binary file (21.3 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-311.pyc ADDED
Binary file (6.68 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-311.pyc ADDED
Binary file (21.9 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/_manylinux.py ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import functools
3
+ import os
4
+ import re
5
+ import struct
6
+ import sys
7
+ import warnings
8
+ from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
9
+
10
+
11
+ # Python does not provide platform information at sufficient granularity to
12
+ # identify the architecture of the running executable in some cases, so we
13
+ # determine it dynamically by reading the information from the running
14
+ # process. This only applies on Linux, which uses the ELF format.
15
+ class _ELFFileHeader:
16
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
17
+ class _InvalidELFFileHeader(ValueError):
18
+ """
19
+ An invalid ELF file header was found.
20
+ """
21
+
22
+ ELF_MAGIC_NUMBER = 0x7F454C46
23
+ ELFCLASS32 = 1
24
+ ELFCLASS64 = 2
25
+ ELFDATA2LSB = 1
26
+ ELFDATA2MSB = 2
27
+ EM_386 = 3
28
+ EM_S390 = 22
29
+ EM_ARM = 40
30
+ EM_X86_64 = 62
31
+ EF_ARM_ABIMASK = 0xFF000000
32
+ EF_ARM_ABI_VER5 = 0x05000000
33
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
34
+
35
+ def __init__(self, file: IO[bytes]) -> None:
36
+ def unpack(fmt: str) -> int:
37
+ try:
38
+ data = file.read(struct.calcsize(fmt))
39
+ result: Tuple[int, ...] = struct.unpack(fmt, data)
40
+ except struct.error:
41
+ raise _ELFFileHeader._InvalidELFFileHeader()
42
+ return result[0]
43
+
44
+ self.e_ident_magic = unpack(">I")
45
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
46
+ raise _ELFFileHeader._InvalidELFFileHeader()
47
+ self.e_ident_class = unpack("B")
48
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
49
+ raise _ELFFileHeader._InvalidELFFileHeader()
50
+ self.e_ident_data = unpack("B")
51
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
52
+ raise _ELFFileHeader._InvalidELFFileHeader()
53
+ self.e_ident_version = unpack("B")
54
+ self.e_ident_osabi = unpack("B")
55
+ self.e_ident_abiversion = unpack("B")
56
+ self.e_ident_pad = file.read(7)
57
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
58
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
59
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
60
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
61
+ self.e_type = unpack(format_h)
62
+ self.e_machine = unpack(format_h)
63
+ self.e_version = unpack(format_i)
64
+ self.e_entry = unpack(format_p)
65
+ self.e_phoff = unpack(format_p)
66
+ self.e_shoff = unpack(format_p)
67
+ self.e_flags = unpack(format_i)
68
+ self.e_ehsize = unpack(format_h)
69
+ self.e_phentsize = unpack(format_h)
70
+ self.e_phnum = unpack(format_h)
71
+ self.e_shentsize = unpack(format_h)
72
+ self.e_shnum = unpack(format_h)
73
+ self.e_shstrndx = unpack(format_h)
74
+
75
+
76
+ def _get_elf_header() -> Optional[_ELFFileHeader]:
77
+ try:
78
+ with open(sys.executable, "rb") as f:
79
+ elf_header = _ELFFileHeader(f)
80
+ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
81
+ return None
82
+ return elf_header
83
+
84
+
85
+ def _is_linux_armhf() -> bool:
86
+ # hard-float ABI can be detected from the ELF header of the running
87
+ # process
88
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
89
+ elf_header = _get_elf_header()
90
+ if elf_header is None:
91
+ return False
92
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
93
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
94
+ result &= elf_header.e_machine == elf_header.EM_ARM
95
+ result &= (
96
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
97
+ ) == elf_header.EF_ARM_ABI_VER5
98
+ result &= (
99
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
100
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
101
+ return result
102
+
103
+
104
+ def _is_linux_i686() -> bool:
105
+ elf_header = _get_elf_header()
106
+ if elf_header is None:
107
+ return False
108
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
109
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
110
+ result &= elf_header.e_machine == elf_header.EM_386
111
+ return result
112
+
113
+
114
+ def _have_compatible_abi(arch: str) -> bool:
115
+ if arch == "armv7l":
116
+ return _is_linux_armhf()
117
+ if arch == "i686":
118
+ return _is_linux_i686()
119
+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
120
+
121
+
122
+ # If glibc ever changes its major version, we need to know what the last
123
+ # minor version was, so we can build the complete list of all versions.
124
+ # For now, guess what the highest minor version might be, assume it will
125
+ # be 50 for testing. Once this actually happens, update the dictionary
126
+ # with the actual value.
127
+ _LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
128
+
129
+
130
+ class _GLibCVersion(NamedTuple):
131
+ major: int
132
+ minor: int
133
+
134
+
135
+ def _glibc_version_string_confstr() -> Optional[str]:
136
+ """
137
+ Primary implementation of glibc_version_string using os.confstr.
138
+ """
139
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
140
+ # to be broken or missing. This strategy is used in the standard library
141
+ # platform module.
142
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
143
+ try:
144
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
145
+ version_string = os.confstr("CS_GNU_LIBC_VERSION")
146
+ assert version_string is not None
147
+ _, version = version_string.split()
148
+ except (AssertionError, AttributeError, OSError, ValueError):
149
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
150
+ return None
151
+ return version
152
+
153
+
154
+ def _glibc_version_string_ctypes() -> Optional[str]:
155
+ """
156
+ Fallback implementation of glibc_version_string using ctypes.
157
+ """
158
+ try:
159
+ import ctypes
160
+ except ImportError:
161
+ return None
162
+
163
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
164
+ # manpage says, "If filename is NULL, then the returned handle is for the
165
+ # main program". This way we can let the linker do the work to figure out
166
+ # which libc our process is actually using.
167
+ #
168
+ # We must also handle the special case where the executable is not a
169
+ # dynamically linked executable. This can occur when using musl libc,
170
+ # for example. In this situation, dlopen() will error, leading to an
171
+ # OSError. Interestingly, at least in the case of musl, there is no
172
+ # errno set on the OSError. The single string argument used to construct
173
+ # OSError comes from libc itself and is therefore not portable to
174
+ # hard code here. In any case, failure to call dlopen() means we
175
+ # can proceed, so we bail on our attempt.
176
+ try:
177
+ process_namespace = ctypes.CDLL(None)
178
+ except OSError:
179
+ return None
180
+
181
+ try:
182
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
183
+ except AttributeError:
184
+ # Symbol doesn't exist -> therefore, we are not linked to
185
+ # glibc.
186
+ return None
187
+
188
+ # Call gnu_get_libc_version, which returns a string like "2.5"
189
+ gnu_get_libc_version.restype = ctypes.c_char_p
190
+ version_str: str = gnu_get_libc_version()
191
+ # py2 / py3 compatibility:
192
+ if not isinstance(version_str, str):
193
+ version_str = version_str.decode("ascii")
194
+
195
+ return version_str
196
+
197
+
198
+ def _glibc_version_string() -> Optional[str]:
199
+ """Returns glibc version string, or None if not using glibc."""
200
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
201
+
202
+
203
+ def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
204
+ """Parse glibc version.
205
+
206
+ We use a regexp instead of str.split because we want to discard any
207
+ random junk that might come after the minor version -- this might happen
208
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
209
+ uses version strings like "2.20-2014.11"). See gh-3588.
210
+ """
211
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
212
+ if not m:
213
+ warnings.warn(
214
+ "Expected glibc version with 2 components major.minor,"
215
+ " got: %s" % version_str,
216
+ RuntimeWarning,
217
+ )
218
+ return -1, -1
219
+ return int(m.group("major")), int(m.group("minor"))
220
+
221
+
222
+ @functools.lru_cache()
223
+ def _get_glibc_version() -> Tuple[int, int]:
224
+ version_str = _glibc_version_string()
225
+ if version_str is None:
226
+ return (-1, -1)
227
+ return _parse_glibc_version(version_str)
228
+
229
+
230
+ # From PEP 513, PEP 600
231
+ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
232
+ sys_glibc = _get_glibc_version()
233
+ if sys_glibc < version:
234
+ return False
235
+ # Check for presence of _manylinux module.
236
+ try:
237
+ import _manylinux # noqa
238
+ except ImportError:
239
+ return True
240
+ if hasattr(_manylinux, "manylinux_compatible"):
241
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
242
+ if result is not None:
243
+ return bool(result)
244
+ return True
245
+ if version == _GLibCVersion(2, 5):
246
+ if hasattr(_manylinux, "manylinux1_compatible"):
247
+ return bool(_manylinux.manylinux1_compatible)
248
+ if version == _GLibCVersion(2, 12):
249
+ if hasattr(_manylinux, "manylinux2010_compatible"):
250
+ return bool(_manylinux.manylinux2010_compatible)
251
+ if version == _GLibCVersion(2, 17):
252
+ if hasattr(_manylinux, "manylinux2014_compatible"):
253
+ return bool(_manylinux.manylinux2014_compatible)
254
+ return True
255
+
256
+
257
+ _LEGACY_MANYLINUX_MAP = {
258
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
259
+ (2, 17): "manylinux2014",
260
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
261
+ (2, 12): "manylinux2010",
262
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
263
+ (2, 5): "manylinux1",
264
+ }
265
+
266
+
267
+ def platform_tags(linux: str, arch: str) -> Iterator[str]:
268
+ if not _have_compatible_abi(arch):
269
+ return
270
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
271
+ too_old_glibc2 = _GLibCVersion(2, 16)
272
+ if arch in {"x86_64", "i686"}:
273
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
274
+ too_old_glibc2 = _GLibCVersion(2, 4)
275
+ current_glibc = _GLibCVersion(*_get_glibc_version())
276
+ glibc_max_list = [current_glibc]
277
+ # We can assume compatibility across glibc major versions.
278
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
279
+ #
280
+ # Build a list of maximum glibc versions so that we can
281
+ # output the canonical list of all glibc from current_glibc
282
+ # down to too_old_glibc2, including all intermediary versions.
283
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
284
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
285
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
286
+ for glibc_max in glibc_max_list:
287
+ if glibc_max.major == too_old_glibc2.major:
288
+ min_minor = too_old_glibc2.minor
289
+ else:
290
+ # For other glibc major versions oldest supported is (x, 0).
291
+ min_minor = -1
292
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
293
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
294
+ tag = "manylinux_{}_{}".format(*glibc_version)
295
+ if _is_compatible(tag, arch, glibc_version):
296
+ yield linux.replace("linux", tag)
297
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
298
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
299
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
300
+ if _is_compatible(legacy_tag, arch, glibc_version):
301
+ yield linux.replace("linux", legacy_tag)
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/_musllinux.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PEP 656 support.
2
+
3
+ This module implements logic to detect if the currently running Python is
4
+ linked against musl, and what musl version is used.
5
+ """
6
+
7
+ import contextlib
8
+ import functools
9
+ import operator
10
+ import os
11
+ import re
12
+ import struct
13
+ import subprocess
14
+ import sys
15
+ from typing import IO, Iterator, NamedTuple, Optional, Tuple
16
+
17
+
18
+ def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
19
+ return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
20
+
21
+
22
+ def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
23
+ """Detect musl libc location by parsing the Python executable.
24
+
25
+ Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
26
+ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
27
+ """
28
+ f.seek(0)
29
+ try:
30
+ ident = _read_unpacked(f, "16B")
31
+ except struct.error:
32
+ return None
33
+ if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
34
+ return None
35
+ f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
36
+
37
+ try:
38
+ # e_fmt: Format for program header.
39
+ # p_fmt: Format for section header.
40
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
41
+ e_fmt, p_fmt, p_idx = {
42
+ 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
43
+ 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
44
+ }[ident[4]]
45
+ except KeyError:
46
+ return None
47
+ else:
48
+ p_get = operator.itemgetter(*p_idx)
49
+
50
+ # Find the interpreter section and return its content.
51
+ try:
52
+ _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
53
+ except struct.error:
54
+ return None
55
+ for i in range(e_phnum + 1):
56
+ f.seek(e_phoff + e_phentsize * i)
57
+ try:
58
+ p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
59
+ except struct.error:
60
+ return None
61
+ if p_type != 3: # Not PT_INTERP.
62
+ continue
63
+ f.seek(p_offset)
64
+ interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
65
+ if "musl" not in interpreter:
66
+ return None
67
+ return interpreter
68
+ return None
69
+
70
+
71
+ class _MuslVersion(NamedTuple):
72
+ major: int
73
+ minor: int
74
+
75
+
76
+ def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
77
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
78
+ if len(lines) < 2 or lines[0][:4] != "musl":
79
+ return None
80
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
81
+ if not m:
82
+ return None
83
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
84
+
85
+
86
+ @functools.lru_cache()
87
+ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
88
+ """Detect currently-running musl runtime version.
89
+
90
+ This is done by checking the specified executable's dynamic linking
91
+ information, and invoking the loader to parse its output for a version
92
+ string. If the loader is musl, the output would be something like::
93
+
94
+ musl libc (x86_64)
95
+ Version 1.2.2
96
+ Dynamic Program Loader
97
+ """
98
+ with contextlib.ExitStack() as stack:
99
+ try:
100
+ f = stack.enter_context(open(executable, "rb"))
101
+ except OSError:
102
+ return None
103
+ ld = _parse_ld_musl_from_elf(f)
104
+ if not ld:
105
+ return None
106
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
107
+ return _parse_musl_version(proc.stderr)
108
+
109
+
110
+ def platform_tags(arch: str) -> Iterator[str]:
111
+ """Generate musllinux tags compatible to the current platform.
112
+
113
+ :param arch: Should be the part of platform tag after the ``linux_``
114
+ prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
115
+ prerequisite for the current platform to be musllinux-compatible.
116
+
117
+ :returns: An iterator of compatible musllinux tags.
118
+ """
119
+ sys_musl = _get_musl_version(sys.executable)
120
+ if sys_musl is None: # Python not dynamically linked against musl.
121
+ return
122
+ for minor in range(sys_musl.minor, -1, -1):
123
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
124
+
125
+
126
+ if __name__ == "__main__": # pragma: no cover
127
+ import sysconfig
128
+
129
+ plat = sysconfig.get_platform()
130
+ assert plat.startswith("linux-"), "not linux"
131
+
132
+ print("plat:", plat)
133
+ print("musl:", _get_musl_version(sys.executable))
134
+ print("tags:", end=" ")
135
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
136
+ print(t, end="\n ")
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/_structures.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+
6
+ class InfinityType:
7
+ def __repr__(self) -> str:
8
+ return "Infinity"
9
+
10
+ def __hash__(self) -> int:
11
+ return hash(repr(self))
12
+
13
+ def __lt__(self, other: object) -> bool:
14
+ return False
15
+
16
+ def __le__(self, other: object) -> bool:
17
+ return False
18
+
19
+ def __eq__(self, other: object) -> bool:
20
+ return isinstance(other, self.__class__)
21
+
22
+ def __gt__(self, other: object) -> bool:
23
+ return True
24
+
25
+ def __ge__(self, other: object) -> bool:
26
+ return True
27
+
28
+ def __neg__(self: object) -> "NegativeInfinityType":
29
+ return NegativeInfinity
30
+
31
+
32
+ Infinity = InfinityType()
33
+
34
+
35
+ class NegativeInfinityType:
36
+ def __repr__(self) -> str:
37
+ return "-Infinity"
38
+
39
+ def __hash__(self) -> int:
40
+ return hash(repr(self))
41
+
42
+ def __lt__(self, other: object) -> bool:
43
+ return True
44
+
45
+ def __le__(self, other: object) -> bool:
46
+ return True
47
+
48
+ def __eq__(self, other: object) -> bool:
49
+ return isinstance(other, self.__class__)
50
+
51
+ def __gt__(self, other: object) -> bool:
52
+ return False
53
+
54
+ def __ge__(self, other: object) -> bool:
55
+ return False
56
+
57
+ def __neg__(self: object) -> InfinityType:
58
+ return Infinity
59
+
60
+
61
+ NegativeInfinity = NegativeInfinityType()
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/markers.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import operator
6
+ import os
7
+ import platform
8
+ import sys
9
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
10
+
11
+ from pip._vendor.pyparsing import ( # noqa: N817
12
+ Forward,
13
+ Group,
14
+ Literal as L,
15
+ ParseException,
16
+ ParseResults,
17
+ QuotedString,
18
+ ZeroOrMore,
19
+ stringEnd,
20
+ stringStart,
21
+ )
22
+
23
+ from .specifiers import InvalidSpecifier, Specifier
24
+
25
+ __all__ = [
26
+ "InvalidMarker",
27
+ "UndefinedComparison",
28
+ "UndefinedEnvironmentName",
29
+ "Marker",
30
+ "default_environment",
31
+ ]
32
+
33
+ Operator = Callable[[str, str], bool]
34
+
35
+
36
+ class InvalidMarker(ValueError):
37
+ """
38
+ An invalid marker was found, users should refer to PEP 508.
39
+ """
40
+
41
+
42
+ class UndefinedComparison(ValueError):
43
+ """
44
+ An invalid operation was attempted on a value that doesn't support it.
45
+ """
46
+
47
+
48
+ class UndefinedEnvironmentName(ValueError):
49
+ """
50
+ A name was attempted to be used that does not exist inside of the
51
+ environment.
52
+ """
53
+
54
+
55
+ class Node:
56
+ def __init__(self, value: Any) -> None:
57
+ self.value = value
58
+
59
+ def __str__(self) -> str:
60
+ return str(self.value)
61
+
62
+ def __repr__(self) -> str:
63
+ return f"<{self.__class__.__name__}('{self}')>"
64
+
65
+ def serialize(self) -> str:
66
+ raise NotImplementedError
67
+
68
+
69
+ class Variable(Node):
70
+ def serialize(self) -> str:
71
+ return str(self)
72
+
73
+
74
+ class Value(Node):
75
+ def serialize(self) -> str:
76
+ return f'"{self}"'
77
+
78
+
79
+ class Op(Node):
80
+ def serialize(self) -> str:
81
+ return str(self)
82
+
83
+
84
+ VARIABLE = (
85
+ L("implementation_version")
86
+ | L("platform_python_implementation")
87
+ | L("implementation_name")
88
+ | L("python_full_version")
89
+ | L("platform_release")
90
+ | L("platform_version")
91
+ | L("platform_machine")
92
+ | L("platform_system")
93
+ | L("python_version")
94
+ | L("sys_platform")
95
+ | L("os_name")
96
+ | L("os.name") # PEP-345
97
+ | L("sys.platform") # PEP-345
98
+ | L("platform.version") # PEP-345
99
+ | L("platform.machine") # PEP-345
100
+ | L("platform.python_implementation") # PEP-345
101
+ | L("python_implementation") # undocumented setuptools legacy
102
+ | L("extra") # PEP-508
103
+ )
104
+ ALIASES = {
105
+ "os.name": "os_name",
106
+ "sys.platform": "sys_platform",
107
+ "platform.version": "platform_version",
108
+ "platform.machine": "platform_machine",
109
+ "platform.python_implementation": "platform_python_implementation",
110
+ "python_implementation": "platform_python_implementation",
111
+ }
112
+ VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
113
+
114
+ VERSION_CMP = (
115
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
116
+ )
117
+
118
+ MARKER_OP = VERSION_CMP | L("not in") | L("in")
119
+ MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
120
+
121
+ MARKER_VALUE = QuotedString("'") | QuotedString('"')
122
+ MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
123
+
124
+ BOOLOP = L("and") | L("or")
125
+
126
+ MARKER_VAR = VARIABLE | MARKER_VALUE
127
+
128
+ MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
129
+ MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
130
+
131
+ LPAREN = L("(").suppress()
132
+ RPAREN = L(")").suppress()
133
+
134
+ MARKER_EXPR = Forward()
135
+ MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
136
+ MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
137
+
138
+ MARKER = stringStart + MARKER_EXPR + stringEnd
139
+
140
+
141
+ def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
142
+ if isinstance(results, ParseResults):
143
+ return [_coerce_parse_result(i) for i in results]
144
+ else:
145
+ return results
146
+
147
+
148
+ def _format_marker(
149
+ marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
150
+ ) -> str:
151
+
152
+ assert isinstance(marker, (list, tuple, str))
153
+
154
+ # Sometimes we have a structure like [[...]] which is a single item list
155
+ # where the single item is itself it's own list. In that case we want skip
156
+ # the rest of this function so that we don't get extraneous () on the
157
+ # outside.
158
+ if (
159
+ isinstance(marker, list)
160
+ and len(marker) == 1
161
+ and isinstance(marker[0], (list, tuple))
162
+ ):
163
+ return _format_marker(marker[0])
164
+
165
+ if isinstance(marker, list):
166
+ inner = (_format_marker(m, first=False) for m in marker)
167
+ if first:
168
+ return " ".join(inner)
169
+ else:
170
+ return "(" + " ".join(inner) + ")"
171
+ elif isinstance(marker, tuple):
172
+ return " ".join([m.serialize() for m in marker])
173
+ else:
174
+ return marker
175
+
176
+
177
+ _operators: Dict[str, Operator] = {
178
+ "in": lambda lhs, rhs: lhs in rhs,
179
+ "not in": lambda lhs, rhs: lhs not in rhs,
180
+ "<": operator.lt,
181
+ "<=": operator.le,
182
+ "==": operator.eq,
183
+ "!=": operator.ne,
184
+ ">=": operator.ge,
185
+ ">": operator.gt,
186
+ }
187
+
188
+
189
+ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
190
+ try:
191
+ spec = Specifier("".join([op.serialize(), rhs]))
192
+ except InvalidSpecifier:
193
+ pass
194
+ else:
195
+ return spec.contains(lhs)
196
+
197
+ oper: Optional[Operator] = _operators.get(op.serialize())
198
+ if oper is None:
199
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
200
+
201
+ return oper(lhs, rhs)
202
+
203
+
204
+ class Undefined:
205
+ pass
206
+
207
+
208
+ _undefined = Undefined()
209
+
210
+
211
+ def _get_env(environment: Dict[str, str], name: str) -> str:
212
+ value: Union[str, Undefined] = environment.get(name, _undefined)
213
+
214
+ if isinstance(value, Undefined):
215
+ raise UndefinedEnvironmentName(
216
+ f"{name!r} does not exist in evaluation environment."
217
+ )
218
+
219
+ return value
220
+
221
+
222
+ def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
223
+ groups: List[List[bool]] = [[]]
224
+
225
+ for marker in markers:
226
+ assert isinstance(marker, (list, tuple, str))
227
+
228
+ if isinstance(marker, list):
229
+ groups[-1].append(_evaluate_markers(marker, environment))
230
+ elif isinstance(marker, tuple):
231
+ lhs, op, rhs = marker
232
+
233
+ if isinstance(lhs, Variable):
234
+ lhs_value = _get_env(environment, lhs.value)
235
+ rhs_value = rhs.value
236
+ else:
237
+ lhs_value = lhs.value
238
+ rhs_value = _get_env(environment, rhs.value)
239
+
240
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
241
+ else:
242
+ assert marker in ["and", "or"]
243
+ if marker == "or":
244
+ groups.append([])
245
+
246
+ return any(all(item) for item in groups)
247
+
248
+
249
+ def format_full_version(info: "sys._version_info") -> str:
250
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
251
+ kind = info.releaselevel
252
+ if kind != "final":
253
+ version += kind[0] + str(info.serial)
254
+ return version
255
+
256
+
257
+ def default_environment() -> Dict[str, str]:
258
+ iver = format_full_version(sys.implementation.version)
259
+ implementation_name = sys.implementation.name
260
+ return {
261
+ "implementation_name": implementation_name,
262
+ "implementation_version": iver,
263
+ "os_name": os.name,
264
+ "platform_machine": platform.machine(),
265
+ "platform_release": platform.release(),
266
+ "platform_system": platform.system(),
267
+ "platform_version": platform.version(),
268
+ "python_full_version": platform.python_version(),
269
+ "platform_python_implementation": platform.python_implementation(),
270
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
271
+ "sys_platform": sys.platform,
272
+ }
273
+
274
+
275
+ class Marker:
276
+ def __init__(self, marker: str) -> None:
277
+ try:
278
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
279
+ except ParseException as e:
280
+ raise InvalidMarker(
281
+ f"Invalid marker: {marker!r}, parse error at "
282
+ f"{marker[e.loc : e.loc + 8]!r}"
283
+ )
284
+
285
+ def __str__(self) -> str:
286
+ return _format_marker(self._markers)
287
+
288
+ def __repr__(self) -> str:
289
+ return f"<Marker('{self}')>"
290
+
291
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
292
+ """Evaluate a marker.
293
+
294
+ Return the boolean from evaluating the given marker against the
295
+ environment. environment is an optional argument to override all or
296
+ part of the determined environment.
297
+
298
+ The environment is determined from the current Python process.
299
+ """
300
+ current_environment = default_environment()
301
+ if environment is not None:
302
+ current_environment.update(environment)
303
+
304
+ return _evaluate_markers(self._markers, current_environment)
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/py.typed ADDED
File without changes
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/requirements.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import re
6
+ import string
7
+ import urllib.parse
8
+ from typing import List, Optional as TOptional, Set
9
+
10
+ from pip._vendor.pyparsing import ( # noqa
11
+ Combine,
12
+ Literal as L,
13
+ Optional,
14
+ ParseException,
15
+ Regex,
16
+ Word,
17
+ ZeroOrMore,
18
+ originalTextFor,
19
+ stringEnd,
20
+ stringStart,
21
+ )
22
+
23
+ from .markers import MARKER_EXPR, Marker
24
+ from .specifiers import LegacySpecifier, Specifier, SpecifierSet
25
+
26
+
27
+ class InvalidRequirement(ValueError):
28
+ """
29
+ An invalid requirement was found, users should refer to PEP 508.
30
+ """
31
+
32
+
33
+ ALPHANUM = Word(string.ascii_letters + string.digits)
34
+
35
+ LBRACKET = L("[").suppress()
36
+ RBRACKET = L("]").suppress()
37
+ LPAREN = L("(").suppress()
38
+ RPAREN = L(")").suppress()
39
+ COMMA = L(",").suppress()
40
+ SEMICOLON = L(";").suppress()
41
+ AT = L("@").suppress()
42
+
43
+ PUNCTUATION = Word("-_.")
44
+ IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
45
+ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
46
+
47
+ NAME = IDENTIFIER("name")
48
+ EXTRA = IDENTIFIER
49
+
50
+ URI = Regex(r"[^ ]+")("url")
51
+ URL = AT + URI
52
+
53
+ EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
54
+ EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
55
+
56
+ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
57
+ VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
58
+
59
+ VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
60
+ VERSION_MANY = Combine(
61
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
62
+ )("_raw_spec")
63
+ _VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
64
+ _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
65
+
66
+ VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
67
+ VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
68
+
69
+ MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
70
+ MARKER_EXPR.setParseAction(
71
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
72
+ )
73
+ MARKER_SEPARATOR = SEMICOLON
74
+ MARKER = MARKER_SEPARATOR + MARKER_EXPR
75
+
76
+ VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
77
+ URL_AND_MARKER = URL + Optional(MARKER)
78
+
79
+ NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
80
+
81
+ REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
82
+ # pyparsing isn't thread safe during initialization, so we do it eagerly, see
83
+ # issue #104
84
+ REQUIREMENT.parseString("x[]")
85
+
86
+
87
+ class Requirement:
88
+ """Parse a requirement.
89
+
90
+ Parse a given requirement string into its parts, such as name, specifier,
91
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
92
+ string.
93
+ """
94
+
95
+ # TODO: Can we test whether something is contained within a requirement?
96
+ # If so how do we do that? Do we need to test against the _name_ of
97
+ # the thing as well as the version? What about the markers?
98
+ # TODO: Can we normalize the name and extra name?
99
+
100
+ def __init__(self, requirement_string: str) -> None:
101
+ try:
102
+ req = REQUIREMENT.parseString(requirement_string)
103
+ except ParseException as e:
104
+ raise InvalidRequirement(
105
+ f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
106
+ )
107
+
108
+ self.name: str = req.name
109
+ if req.url:
110
+ parsed_url = urllib.parse.urlparse(req.url)
111
+ if parsed_url.scheme == "file":
112
+ if urllib.parse.urlunparse(parsed_url) != req.url:
113
+ raise InvalidRequirement("Invalid URL given")
114
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
115
+ not parsed_url.scheme and not parsed_url.netloc
116
+ ):
117
+ raise InvalidRequirement(f"Invalid URL: {req.url}")
118
+ self.url: TOptional[str] = req.url
119
+ else:
120
+ self.url = None
121
+ self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
122
+ self.specifier: SpecifierSet = SpecifierSet(req.specifier)
123
+ self.marker: TOptional[Marker] = req.marker if req.marker else None
124
+
125
+ def __str__(self) -> str:
126
+ parts: List[str] = [self.name]
127
+
128
+ if self.extras:
129
+ formatted_extras = ",".join(sorted(self.extras))
130
+ parts.append(f"[{formatted_extras}]")
131
+
132
+ if self.specifier:
133
+ parts.append(str(self.specifier))
134
+
135
+ if self.url:
136
+ parts.append(f"@ {self.url}")
137
+ if self.marker:
138
+ parts.append(" ")
139
+
140
+ if self.marker:
141
+ parts.append(f"; {self.marker}")
142
+
143
+ return "".join(parts)
144
+
145
+ def __repr__(self) -> str:
146
+ return f"<Requirement('{self}')>"
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/specifiers.py ADDED
@@ -0,0 +1,802 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import abc
6
+ import functools
7
+ import itertools
8
+ import re
9
+ import warnings
10
+ from typing import (
11
+ Callable,
12
+ Dict,
13
+ Iterable,
14
+ Iterator,
15
+ List,
16
+ Optional,
17
+ Pattern,
18
+ Set,
19
+ Tuple,
20
+ TypeVar,
21
+ Union,
22
+ )
23
+
24
+ from .utils import canonicalize_version
25
+ from .version import LegacyVersion, Version, parse
26
+
27
+ ParsedVersion = Union[Version, LegacyVersion]
28
+ UnparsedVersion = Union[Version, LegacyVersion, str]
29
+ VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
30
+ CallableOperator = Callable[[ParsedVersion, str], bool]
31
+
32
+
33
+ class InvalidSpecifier(ValueError):
34
+ """
35
+ An invalid specifier was found, users should refer to PEP 440.
36
+ """
37
+
38
+
39
+ class BaseSpecifier(metaclass=abc.ABCMeta):
40
+ @abc.abstractmethod
41
+ def __str__(self) -> str:
42
+ """
43
+ Returns the str representation of this Specifier like object. This
44
+ should be representative of the Specifier itself.
45
+ """
46
+
47
+ @abc.abstractmethod
48
+ def __hash__(self) -> int:
49
+ """
50
+ Returns a hash value for this Specifier like object.
51
+ """
52
+
53
+ @abc.abstractmethod
54
+ def __eq__(self, other: object) -> bool:
55
+ """
56
+ Returns a boolean representing whether or not the two Specifier like
57
+ objects are equal.
58
+ """
59
+
60
+ @abc.abstractproperty
61
+ def prereleases(self) -> Optional[bool]:
62
+ """
63
+ Returns whether or not pre-releases as a whole are allowed by this
64
+ specifier.
65
+ """
66
+
67
+ @prereleases.setter
68
+ def prereleases(self, value: bool) -> None:
69
+ """
70
+ Sets whether or not pre-releases as a whole are allowed by this
71
+ specifier.
72
+ """
73
+
74
+ @abc.abstractmethod
75
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
76
+ """
77
+ Determines if the given item is contained within this specifier.
78
+ """
79
+
80
+ @abc.abstractmethod
81
+ def filter(
82
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
83
+ ) -> Iterable[VersionTypeVar]:
84
+ """
85
+ Takes an iterable of items and filters them so that only items which
86
+ are contained within this specifier are allowed in it.
87
+ """
88
+
89
+
90
+ class _IndividualSpecifier(BaseSpecifier):
91
+
92
+ _operators: Dict[str, str] = {}
93
+ _regex: Pattern[str]
94
+
95
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
96
+ match = self._regex.search(spec)
97
+ if not match:
98
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
99
+
100
+ self._spec: Tuple[str, str] = (
101
+ match.group("operator").strip(),
102
+ match.group("version").strip(),
103
+ )
104
+
105
+ # Store whether or not this Specifier should accept prereleases
106
+ self._prereleases = prereleases
107
+
108
+ def __repr__(self) -> str:
109
+ pre = (
110
+ f", prereleases={self.prereleases!r}"
111
+ if self._prereleases is not None
112
+ else ""
113
+ )
114
+
115
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
116
+
117
+ def __str__(self) -> str:
118
+ return "{}{}".format(*self._spec)
119
+
120
+ @property
121
+ def _canonical_spec(self) -> Tuple[str, str]:
122
+ return self._spec[0], canonicalize_version(self._spec[1])
123
+
124
+ def __hash__(self) -> int:
125
+ return hash(self._canonical_spec)
126
+
127
+ def __eq__(self, other: object) -> bool:
128
+ if isinstance(other, str):
129
+ try:
130
+ other = self.__class__(str(other))
131
+ except InvalidSpecifier:
132
+ return NotImplemented
133
+ elif not isinstance(other, self.__class__):
134
+ return NotImplemented
135
+
136
+ return self._canonical_spec == other._canonical_spec
137
+
138
+ def _get_operator(self, op: str) -> CallableOperator:
139
+ operator_callable: CallableOperator = getattr(
140
+ self, f"_compare_{self._operators[op]}"
141
+ )
142
+ return operator_callable
143
+
144
+ def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
145
+ if not isinstance(version, (LegacyVersion, Version)):
146
+ version = parse(version)
147
+ return version
148
+
149
+ @property
150
+ def operator(self) -> str:
151
+ return self._spec[0]
152
+
153
+ @property
154
+ def version(self) -> str:
155
+ return self._spec[1]
156
+
157
+ @property
158
+ def prereleases(self) -> Optional[bool]:
159
+ return self._prereleases
160
+
161
+ @prereleases.setter
162
+ def prereleases(self, value: bool) -> None:
163
+ self._prereleases = value
164
+
165
+ def __contains__(self, item: str) -> bool:
166
+ return self.contains(item)
167
+
168
+ def contains(
169
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
170
+ ) -> bool:
171
+
172
+ # Determine if prereleases are to be allowed or not.
173
+ if prereleases is None:
174
+ prereleases = self.prereleases
175
+
176
+ # Normalize item to a Version or LegacyVersion, this allows us to have
177
+ # a shortcut for ``"2.0" in Specifier(">=2")
178
+ normalized_item = self._coerce_version(item)
179
+
180
+ # Determine if we should be supporting prereleases in this specifier
181
+ # or not, if we do not support prereleases than we can short circuit
182
+ # logic if this version is a prereleases.
183
+ if normalized_item.is_prerelease and not prereleases:
184
+ return False
185
+
186
+ # Actually do the comparison to determine if this item is contained
187
+ # within this Specifier or not.
188
+ operator_callable: CallableOperator = self._get_operator(self.operator)
189
+ return operator_callable(normalized_item, self.version)
190
+
191
+ def filter(
192
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
193
+ ) -> Iterable[VersionTypeVar]:
194
+
195
+ yielded = False
196
+ found_prereleases = []
197
+
198
+ kw = {"prereleases": prereleases if prereleases is not None else True}
199
+
200
+ # Attempt to iterate over all the values in the iterable and if any of
201
+ # them match, yield them.
202
+ for version in iterable:
203
+ parsed_version = self._coerce_version(version)
204
+
205
+ if self.contains(parsed_version, **kw):
206
+ # If our version is a prerelease, and we were not set to allow
207
+ # prereleases, then we'll store it for later in case nothing
208
+ # else matches this specifier.
209
+ if parsed_version.is_prerelease and not (
210
+ prereleases or self.prereleases
211
+ ):
212
+ found_prereleases.append(version)
213
+ # Either this is not a prerelease, or we should have been
214
+ # accepting prereleases from the beginning.
215
+ else:
216
+ yielded = True
217
+ yield version
218
+
219
+ # Now that we've iterated over everything, determine if we've yielded
220
+ # any values, and if we have not and we have any prereleases stored up
221
+ # then we will go ahead and yield the prereleases.
222
+ if not yielded and found_prereleases:
223
+ for version in found_prereleases:
224
+ yield version
225
+
226
+
227
+ class LegacySpecifier(_IndividualSpecifier):
228
+
229
+ _regex_str = r"""
230
+ (?P<operator>(==|!=|<=|>=|<|>))
231
+ \s*
232
+ (?P<version>
233
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
234
+ # string can be just about anything, we match everything
235
+ # except for whitespace, a semi-colon for marker support,
236
+ # a closing paren since versions can be enclosed in
237
+ # them, and a comma since it's a version separator.
238
+ )
239
+ """
240
+
241
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
242
+
243
+ _operators = {
244
+ "==": "equal",
245
+ "!=": "not_equal",
246
+ "<=": "less_than_equal",
247
+ ">=": "greater_than_equal",
248
+ "<": "less_than",
249
+ ">": "greater_than",
250
+ }
251
+
252
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
253
+ super().__init__(spec, prereleases)
254
+
255
+ warnings.warn(
256
+ "Creating a LegacyVersion has been deprecated and will be "
257
+ "removed in the next major release",
258
+ DeprecationWarning,
259
+ )
260
+
261
+ def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
262
+ if not isinstance(version, LegacyVersion):
263
+ version = LegacyVersion(str(version))
264
+ return version
265
+
266
+ def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
267
+ return prospective == self._coerce_version(spec)
268
+
269
+ def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
270
+ return prospective != self._coerce_version(spec)
271
+
272
+ def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
273
+ return prospective <= self._coerce_version(spec)
274
+
275
+ def _compare_greater_than_equal(
276
+ self, prospective: LegacyVersion, spec: str
277
+ ) -> bool:
278
+ return prospective >= self._coerce_version(spec)
279
+
280
+ def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
281
+ return prospective < self._coerce_version(spec)
282
+
283
+ def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
284
+ return prospective > self._coerce_version(spec)
285
+
286
+
287
+ def _require_version_compare(
288
+ fn: Callable[["Specifier", ParsedVersion, str], bool]
289
+ ) -> Callable[["Specifier", ParsedVersion, str], bool]:
290
+ @functools.wraps(fn)
291
+ def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
292
+ if not isinstance(prospective, Version):
293
+ return False
294
+ return fn(self, prospective, spec)
295
+
296
+ return wrapped
297
+
298
+
299
+ class Specifier(_IndividualSpecifier):
300
+
301
+ _regex_str = r"""
302
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
303
+ (?P<version>
304
+ (?:
305
+ # The identity operators allow for an escape hatch that will
306
+ # do an exact string match of the version you wish to install.
307
+ # This will not be parsed by PEP 440 and we cannot determine
308
+ # any semantic meaning from it. This operator is discouraged
309
+ # but included entirely as an escape hatch.
310
+ (?<====) # Only match for the identity operator
311
+ \s*
312
+ [^\s]* # We just match everything, except for whitespace
313
+ # since we are only testing for strict identity.
314
+ )
315
+ |
316
+ (?:
317
+ # The (non)equality operators allow for wild card and local
318
+ # versions to be specified so we have to define these two
319
+ # operators separately to enable that.
320
+ (?<===|!=) # Only match for equals and not equals
321
+
322
+ \s*
323
+ v?
324
+ (?:[0-9]+!)? # epoch
325
+ [0-9]+(?:\.[0-9]+)* # release
326
+ (?: # pre release
327
+ [-_\.]?
328
+ (a|b|c|rc|alpha|beta|pre|preview)
329
+ [-_\.]?
330
+ [0-9]*
331
+ )?
332
+ (?: # post release
333
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
334
+ )?
335
+
336
+ # You cannot use a wild card and a dev or local version
337
+ # together so group them with a | and make them optional.
338
+ (?:
339
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
340
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
341
+ |
342
+ \.\* # Wild card syntax of .*
343
+ )?
344
+ )
345
+ |
346
+ (?:
347
+ # The compatible operator requires at least two digits in the
348
+ # release segment.
349
+ (?<=~=) # Only match for the compatible operator
350
+
351
+ \s*
352
+ v?
353
+ (?:[0-9]+!)? # epoch
354
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
355
+ (?: # pre release
356
+ [-_\.]?
357
+ (a|b|c|rc|alpha|beta|pre|preview)
358
+ [-_\.]?
359
+ [0-9]*
360
+ )?
361
+ (?: # post release
362
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
363
+ )?
364
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
365
+ )
366
+ |
367
+ (?:
368
+ # All other operators only allow a sub set of what the
369
+ # (non)equality operators do. Specifically they do not allow
370
+ # local versions to be specified nor do they allow the prefix
371
+ # matching wild cards.
372
+ (?<!==|!=|~=) # We have special cases for these
373
+ # operators so we want to make sure they
374
+ # don't match here.
375
+
376
+ \s*
377
+ v?
378
+ (?:[0-9]+!)? # epoch
379
+ [0-9]+(?:\.[0-9]+)* # release
380
+ (?: # pre release
381
+ [-_\.]?
382
+ (a|b|c|rc|alpha|beta|pre|preview)
383
+ [-_\.]?
384
+ [0-9]*
385
+ )?
386
+ (?: # post release
387
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
388
+ )?
389
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
390
+ )
391
+ )
392
+ """
393
+
394
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
395
+
396
+ _operators = {
397
+ "~=": "compatible",
398
+ "==": "equal",
399
+ "!=": "not_equal",
400
+ "<=": "less_than_equal",
401
+ ">=": "greater_than_equal",
402
+ "<": "less_than",
403
+ ">": "greater_than",
404
+ "===": "arbitrary",
405
+ }
406
+
407
+ @_require_version_compare
408
+ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
409
+
410
+ # Compatible releases have an equivalent combination of >= and ==. That
411
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
412
+ # implement this in terms of the other specifiers instead of
413
+ # implementing it ourselves. The only thing we need to do is construct
414
+ # the other specifiers.
415
+
416
+ # We want everything but the last item in the version, but we want to
417
+ # ignore suffix segments.
418
+ prefix = ".".join(
419
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
420
+ )
421
+
422
+ # Add the prefix notation to the end of our string
423
+ prefix += ".*"
424
+
425
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
426
+ prospective, prefix
427
+ )
428
+
429
+ @_require_version_compare
430
+ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
431
+
432
+ # We need special logic to handle prefix matching
433
+ if spec.endswith(".*"):
434
+ # In the case of prefix matching we want to ignore local segment.
435
+ prospective = Version(prospective.public)
436
+ # Split the spec out by dots, and pretend that there is an implicit
437
+ # dot in between a release segment and a pre-release segment.
438
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
439
+
440
+ # Split the prospective version out by dots, and pretend that there
441
+ # is an implicit dot in between a release segment and a pre-release
442
+ # segment.
443
+ split_prospective = _version_split(str(prospective))
444
+
445
+ # Shorten the prospective version to be the same length as the spec
446
+ # so that we can determine if the specifier is a prefix of the
447
+ # prospective version or not.
448
+ shortened_prospective = split_prospective[: len(split_spec)]
449
+
450
+ # Pad out our two sides with zeros so that they both equal the same
451
+ # length.
452
+ padded_spec, padded_prospective = _pad_version(
453
+ split_spec, shortened_prospective
454
+ )
455
+
456
+ return padded_prospective == padded_spec
457
+ else:
458
+ # Convert our spec string into a Version
459
+ spec_version = Version(spec)
460
+
461
+ # If the specifier does not have a local segment, then we want to
462
+ # act as if the prospective version also does not have a local
463
+ # segment.
464
+ if not spec_version.local:
465
+ prospective = Version(prospective.public)
466
+
467
+ return prospective == spec_version
468
+
469
+ @_require_version_compare
470
+ def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
471
+ return not self._compare_equal(prospective, spec)
472
+
473
+ @_require_version_compare
474
+ def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
475
+
476
+ # NB: Local version identifiers are NOT permitted in the version
477
+ # specifier, so local version labels can be universally removed from
478
+ # the prospective version.
479
+ return Version(prospective.public) <= Version(spec)
480
+
481
+ @_require_version_compare
482
+ def _compare_greater_than_equal(
483
+ self, prospective: ParsedVersion, spec: str
484
+ ) -> bool:
485
+
486
+ # NB: Local version identifiers are NOT permitted in the version
487
+ # specifier, so local version labels can be universally removed from
488
+ # the prospective version.
489
+ return Version(prospective.public) >= Version(spec)
490
+
491
+ @_require_version_compare
492
+ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
493
+
494
+ # Convert our spec to a Version instance, since we'll want to work with
495
+ # it as a version.
496
+ spec = Version(spec_str)
497
+
498
+ # Check to see if the prospective version is less than the spec
499
+ # version. If it's not we can short circuit and just return False now
500
+ # instead of doing extra unneeded work.
501
+ if not prospective < spec:
502
+ return False
503
+
504
+ # This special case is here so that, unless the specifier itself
505
+ # includes is a pre-release version, that we do not accept pre-release
506
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
507
+ # not match 3.1.dev0, but should match 3.0.dev0).
508
+ if not spec.is_prerelease and prospective.is_prerelease:
509
+ if Version(prospective.base_version) == Version(spec.base_version):
510
+ return False
511
+
512
+ # If we've gotten to here, it means that prospective version is both
513
+ # less than the spec version *and* it's not a pre-release of the same
514
+ # version in the spec.
515
+ return True
516
+
517
+ @_require_version_compare
518
+ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
519
+
520
+ # Convert our spec to a Version instance, since we'll want to work with
521
+ # it as a version.
522
+ spec = Version(spec_str)
523
+
524
+ # Check to see if the prospective version is greater than the spec
525
+ # version. If it's not we can short circuit and just return False now
526
+ # instead of doing extra unneeded work.
527
+ if not prospective > spec:
528
+ return False
529
+
530
+ # This special case is here so that, unless the specifier itself
531
+ # includes is a post-release version, that we do not accept
532
+ # post-release versions for the version mentioned in the specifier
533
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
534
+ if not spec.is_postrelease and prospective.is_postrelease:
535
+ if Version(prospective.base_version) == Version(spec.base_version):
536
+ return False
537
+
538
+ # Ensure that we do not allow a local version of the version mentioned
539
+ # in the specifier, which is technically greater than, to match.
540
+ if prospective.local is not None:
541
+ if Version(prospective.base_version) == Version(spec.base_version):
542
+ return False
543
+
544
+ # If we've gotten to here, it means that prospective version is both
545
+ # greater than the spec version *and* it's not a pre-release of the
546
+ # same version in the spec.
547
+ return True
548
+
549
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
550
+ return str(prospective).lower() == str(spec).lower()
551
+
552
+ @property
553
+ def prereleases(self) -> bool:
554
+
555
+ # If there is an explicit prereleases set for this, then we'll just
556
+ # blindly use that.
557
+ if self._prereleases is not None:
558
+ return self._prereleases
559
+
560
+ # Look at all of our specifiers and determine if they are inclusive
561
+ # operators, and if they are if they are including an explicit
562
+ # prerelease.
563
+ operator, version = self._spec
564
+ if operator in ["==", ">=", "<=", "~=", "==="]:
565
+ # The == specifier can include a trailing .*, if it does we
566
+ # want to remove before parsing.
567
+ if operator == "==" and version.endswith(".*"):
568
+ version = version[:-2]
569
+
570
+ # Parse the version, and if it is a pre-release than this
571
+ # specifier allows pre-releases.
572
+ if parse(version).is_prerelease:
573
+ return True
574
+
575
+ return False
576
+
577
+ @prereleases.setter
578
+ def prereleases(self, value: bool) -> None:
579
+ self._prereleases = value
580
+
581
+
582
+ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
583
+
584
+
585
+ def _version_split(version: str) -> List[str]:
586
+ result: List[str] = []
587
+ for item in version.split("."):
588
+ match = _prefix_regex.search(item)
589
+ if match:
590
+ result.extend(match.groups())
591
+ else:
592
+ result.append(item)
593
+ return result
594
+
595
+
596
+ def _is_not_suffix(segment: str) -> bool:
597
+ return not any(
598
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
599
+ )
600
+
601
+
602
+ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
603
+ left_split, right_split = [], []
604
+
605
+ # Get the release segment of our versions
606
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
607
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
608
+
609
+ # Get the rest of our versions
610
+ left_split.append(left[len(left_split[0]) :])
611
+ right_split.append(right[len(right_split[0]) :])
612
+
613
+ # Insert our padding
614
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
615
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
616
+
617
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
618
+
619
+
620
+ class SpecifierSet(BaseSpecifier):
621
+ def __init__(
622
+ self, specifiers: str = "", prereleases: Optional[bool] = None
623
+ ) -> None:
624
+
625
+ # Split on , to break each individual specifier into it's own item, and
626
+ # strip each item to remove leading/trailing whitespace.
627
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
628
+
629
+ # Parsed each individual specifier, attempting first to make it a
630
+ # Specifier and falling back to a LegacySpecifier.
631
+ parsed: Set[_IndividualSpecifier] = set()
632
+ for specifier in split_specifiers:
633
+ try:
634
+ parsed.add(Specifier(specifier))
635
+ except InvalidSpecifier:
636
+ parsed.add(LegacySpecifier(specifier))
637
+
638
+ # Turn our parsed specifiers into a frozen set and save them for later.
639
+ self._specs = frozenset(parsed)
640
+
641
+ # Store our prereleases value so we can use it later to determine if
642
+ # we accept prereleases or not.
643
+ self._prereleases = prereleases
644
+
645
+ def __repr__(self) -> str:
646
+ pre = (
647
+ f", prereleases={self.prereleases!r}"
648
+ if self._prereleases is not None
649
+ else ""
650
+ )
651
+
652
+ return f"<SpecifierSet({str(self)!r}{pre})>"
653
+
654
+ def __str__(self) -> str:
655
+ return ",".join(sorted(str(s) for s in self._specs))
656
+
657
+ def __hash__(self) -> int:
658
+ return hash(self._specs)
659
+
660
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
661
+ if isinstance(other, str):
662
+ other = SpecifierSet(other)
663
+ elif not isinstance(other, SpecifierSet):
664
+ return NotImplemented
665
+
666
+ specifier = SpecifierSet()
667
+ specifier._specs = frozenset(self._specs | other._specs)
668
+
669
+ if self._prereleases is None and other._prereleases is not None:
670
+ specifier._prereleases = other._prereleases
671
+ elif self._prereleases is not None and other._prereleases is None:
672
+ specifier._prereleases = self._prereleases
673
+ elif self._prereleases == other._prereleases:
674
+ specifier._prereleases = self._prereleases
675
+ else:
676
+ raise ValueError(
677
+ "Cannot combine SpecifierSets with True and False prerelease "
678
+ "overrides."
679
+ )
680
+
681
+ return specifier
682
+
683
+ def __eq__(self, other: object) -> bool:
684
+ if isinstance(other, (str, _IndividualSpecifier)):
685
+ other = SpecifierSet(str(other))
686
+ elif not isinstance(other, SpecifierSet):
687
+ return NotImplemented
688
+
689
+ return self._specs == other._specs
690
+
691
+ def __len__(self) -> int:
692
+ return len(self._specs)
693
+
694
+ def __iter__(self) -> Iterator[_IndividualSpecifier]:
695
+ return iter(self._specs)
696
+
697
+ @property
698
+ def prereleases(self) -> Optional[bool]:
699
+
700
+ # If we have been given an explicit prerelease modifier, then we'll
701
+ # pass that through here.
702
+ if self._prereleases is not None:
703
+ return self._prereleases
704
+
705
+ # If we don't have any specifiers, and we don't have a forced value,
706
+ # then we'll just return None since we don't know if this should have
707
+ # pre-releases or not.
708
+ if not self._specs:
709
+ return None
710
+
711
+ # Otherwise we'll see if any of the given specifiers accept
712
+ # prereleases, if any of them do we'll return True, otherwise False.
713
+ return any(s.prereleases for s in self._specs)
714
+
715
+ @prereleases.setter
716
+ def prereleases(self, value: bool) -> None:
717
+ self._prereleases = value
718
+
719
+ def __contains__(self, item: UnparsedVersion) -> bool:
720
+ return self.contains(item)
721
+
722
+ def contains(
723
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
724
+ ) -> bool:
725
+
726
+ # Ensure that our item is a Version or LegacyVersion instance.
727
+ if not isinstance(item, (LegacyVersion, Version)):
728
+ item = parse(item)
729
+
730
+ # Determine if we're forcing a prerelease or not, if we're not forcing
731
+ # one for this particular filter call, then we'll use whatever the
732
+ # SpecifierSet thinks for whether or not we should support prereleases.
733
+ if prereleases is None:
734
+ prereleases = self.prereleases
735
+
736
+ # We can determine if we're going to allow pre-releases by looking to
737
+ # see if any of the underlying items supports them. If none of them do
738
+ # and this item is a pre-release then we do not allow it and we can
739
+ # short circuit that here.
740
+ # Note: This means that 1.0.dev1 would not be contained in something
741
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
742
+ if not prereleases and item.is_prerelease:
743
+ return False
744
+
745
+ # We simply dispatch to the underlying specs here to make sure that the
746
+ # given version is contained within all of them.
747
+ # Note: This use of all() here means that an empty set of specifiers
748
+ # will always return True, this is an explicit design decision.
749
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
750
+
751
+ def filter(
752
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
753
+ ) -> Iterable[VersionTypeVar]:
754
+
755
+ # Determine if we're forcing a prerelease or not, if we're not forcing
756
+ # one for this particular filter call, then we'll use whatever the
757
+ # SpecifierSet thinks for whether or not we should support prereleases.
758
+ if prereleases is None:
759
+ prereleases = self.prereleases
760
+
761
+ # If we have any specifiers, then we want to wrap our iterable in the
762
+ # filter method for each one, this will act as a logical AND amongst
763
+ # each specifier.
764
+ if self._specs:
765
+ for spec in self._specs:
766
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
767
+ return iterable
768
+ # If we do not have any specifiers, then we need to have a rough filter
769
+ # which will filter out any pre-releases, unless there are no final
770
+ # releases, and which will filter out LegacyVersion in general.
771
+ else:
772
+ filtered: List[VersionTypeVar] = []
773
+ found_prereleases: List[VersionTypeVar] = []
774
+
775
+ item: UnparsedVersion
776
+ parsed_version: Union[Version, LegacyVersion]
777
+
778
+ for item in iterable:
779
+ # Ensure that we some kind of Version class for this item.
780
+ if not isinstance(item, (LegacyVersion, Version)):
781
+ parsed_version = parse(item)
782
+ else:
783
+ parsed_version = item
784
+
785
+ # Filter out any item which is parsed as a LegacyVersion
786
+ if isinstance(parsed_version, LegacyVersion):
787
+ continue
788
+
789
+ # Store any item which is a pre-release for later unless we've
790
+ # already found a final version or we are accepting prereleases
791
+ if parsed_version.is_prerelease and not prereleases:
792
+ if not filtered:
793
+ found_prereleases.append(item)
794
+ else:
795
+ filtered.append(item)
796
+
797
+ # If we've found no items except for pre-releases, then we'll go
798
+ # ahead and use the pre-releases
799
+ if not filtered and found_prereleases and prereleases is None:
800
+ return found_prereleases
801
+
802
+ return filtered
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/tags.py ADDED
@@ -0,0 +1,487 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import logging
6
+ import platform
7
+ import sys
8
+ import sysconfig
9
+ from importlib.machinery import EXTENSION_SUFFIXES
10
+ from typing import (
11
+ Dict,
12
+ FrozenSet,
13
+ Iterable,
14
+ Iterator,
15
+ List,
16
+ Optional,
17
+ Sequence,
18
+ Tuple,
19
+ Union,
20
+ cast,
21
+ )
22
+
23
+ from . import _manylinux, _musllinux
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+ PythonVersion = Sequence[int]
28
+ MacVersion = Tuple[int, int]
29
+
30
+ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
31
+ "python": "py", # Generic.
32
+ "cpython": "cp",
33
+ "pypy": "pp",
34
+ "ironpython": "ip",
35
+ "jython": "jy",
36
+ }
37
+
38
+
39
+ _32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
40
+
41
+
42
+ class Tag:
43
+ """
44
+ A representation of the tag triple for a wheel.
45
+
46
+ Instances are considered immutable and thus are hashable. Equality checking
47
+ is also supported.
48
+ """
49
+
50
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
51
+
52
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
53
+ self._interpreter = interpreter.lower()
54
+ self._abi = abi.lower()
55
+ self._platform = platform.lower()
56
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
57
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
58
+ # times when scanning a page of links for packages with tags matching that
59
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
60
+ # downstream consumers.
61
+ self._hash = hash((self._interpreter, self._abi, self._platform))
62
+
63
+ @property
64
+ def interpreter(self) -> str:
65
+ return self._interpreter
66
+
67
+ @property
68
+ def abi(self) -> str:
69
+ return self._abi
70
+
71
+ @property
72
+ def platform(self) -> str:
73
+ return self._platform
74
+
75
+ def __eq__(self, other: object) -> bool:
76
+ if not isinstance(other, Tag):
77
+ return NotImplemented
78
+
79
+ return (
80
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
81
+ and (self._platform == other._platform)
82
+ and (self._abi == other._abi)
83
+ and (self._interpreter == other._interpreter)
84
+ )
85
+
86
+ def __hash__(self) -> int:
87
+ return self._hash
88
+
89
+ def __str__(self) -> str:
90
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
91
+
92
+ def __repr__(self) -> str:
93
+ return f"<{self} @ {id(self)}>"
94
+
95
+
96
+ def parse_tag(tag: str) -> FrozenSet[Tag]:
97
+ """
98
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
99
+
100
+ Returning a set is required due to the possibility that the tag is a
101
+ compressed tag set.
102
+ """
103
+ tags = set()
104
+ interpreters, abis, platforms = tag.split("-")
105
+ for interpreter in interpreters.split("."):
106
+ for abi in abis.split("."):
107
+ for platform_ in platforms.split("."):
108
+ tags.add(Tag(interpreter, abi, platform_))
109
+ return frozenset(tags)
110
+
111
+
112
+ def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
113
+ value = sysconfig.get_config_var(name)
114
+ if value is None and warn:
115
+ logger.debug(
116
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
117
+ )
118
+ return value
119
+
120
+
121
+ def _normalize_string(string: str) -> str:
122
+ return string.replace(".", "_").replace("-", "_")
123
+
124
+
125
+ def _abi3_applies(python_version: PythonVersion) -> bool:
126
+ """
127
+ Determine if the Python version supports abi3.
128
+
129
+ PEP 384 was first implemented in Python 3.2.
130
+ """
131
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
132
+
133
+
134
+ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
135
+ py_version = tuple(py_version) # To allow for version comparison.
136
+ abis = []
137
+ version = _version_nodot(py_version[:2])
138
+ debug = pymalloc = ucs4 = ""
139
+ with_debug = _get_config_var("Py_DEBUG", warn)
140
+ has_refcount = hasattr(sys, "gettotalrefcount")
141
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
142
+ # extension modules is the best option.
143
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
144
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
145
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
146
+ debug = "d"
147
+ if py_version < (3, 8):
148
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
149
+ if with_pymalloc or with_pymalloc is None:
150
+ pymalloc = "m"
151
+ if py_version < (3, 3):
152
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
153
+ if unicode_size == 4 or (
154
+ unicode_size is None and sys.maxunicode == 0x10FFFF
155
+ ):
156
+ ucs4 = "u"
157
+ elif debug:
158
+ # Debug builds can also load "normal" extension modules.
159
+ # We can also assume no UCS-4 or pymalloc requirement.
160
+ abis.append(f"cp{version}")
161
+ abis.insert(
162
+ 0,
163
+ "cp{version}{debug}{pymalloc}{ucs4}".format(
164
+ version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
165
+ ),
166
+ )
167
+ return abis
168
+
169
+
170
+ def cpython_tags(
171
+ python_version: Optional[PythonVersion] = None,
172
+ abis: Optional[Iterable[str]] = None,
173
+ platforms: Optional[Iterable[str]] = None,
174
+ *,
175
+ warn: bool = False,
176
+ ) -> Iterator[Tag]:
177
+ """
178
+ Yields the tags for a CPython interpreter.
179
+
180
+ The tags consist of:
181
+ - cp<python_version>-<abi>-<platform>
182
+ - cp<python_version>-abi3-<platform>
183
+ - cp<python_version>-none-<platform>
184
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
185
+
186
+ If python_version only specifies a major version then user-provided ABIs and
187
+ the 'none' ABItag will be used.
188
+
189
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
190
+ their normal position and not at the beginning.
191
+ """
192
+ if not python_version:
193
+ python_version = sys.version_info[:2]
194
+
195
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
196
+
197
+ if abis is None:
198
+ if len(python_version) > 1:
199
+ abis = _cpython_abis(python_version, warn)
200
+ else:
201
+ abis = []
202
+ abis = list(abis)
203
+ # 'abi3' and 'none' are explicitly handled later.
204
+ for explicit_abi in ("abi3", "none"):
205
+ try:
206
+ abis.remove(explicit_abi)
207
+ except ValueError:
208
+ pass
209
+
210
+ platforms = list(platforms or platform_tags())
211
+ for abi in abis:
212
+ for platform_ in platforms:
213
+ yield Tag(interpreter, abi, platform_)
214
+ if _abi3_applies(python_version):
215
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
216
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
217
+
218
+ if _abi3_applies(python_version):
219
+ for minor_version in range(python_version[1] - 1, 1, -1):
220
+ for platform_ in platforms:
221
+ interpreter = "cp{version}".format(
222
+ version=_version_nodot((python_version[0], minor_version))
223
+ )
224
+ yield Tag(interpreter, "abi3", platform_)
225
+
226
+
227
+ def _generic_abi() -> Iterator[str]:
228
+ abi = sysconfig.get_config_var("SOABI")
229
+ if abi:
230
+ yield _normalize_string(abi)
231
+
232
+
233
+ def generic_tags(
234
+ interpreter: Optional[str] = None,
235
+ abis: Optional[Iterable[str]] = None,
236
+ platforms: Optional[Iterable[str]] = None,
237
+ *,
238
+ warn: bool = False,
239
+ ) -> Iterator[Tag]:
240
+ """
241
+ Yields the tags for a generic interpreter.
242
+
243
+ The tags consist of:
244
+ - <interpreter>-<abi>-<platform>
245
+
246
+ The "none" ABI will be added if it was not explicitly provided.
247
+ """
248
+ if not interpreter:
249
+ interp_name = interpreter_name()
250
+ interp_version = interpreter_version(warn=warn)
251
+ interpreter = "".join([interp_name, interp_version])
252
+ if abis is None:
253
+ abis = _generic_abi()
254
+ platforms = list(platforms or platform_tags())
255
+ abis = list(abis)
256
+ if "none" not in abis:
257
+ abis.append("none")
258
+ for abi in abis:
259
+ for platform_ in platforms:
260
+ yield Tag(interpreter, abi, platform_)
261
+
262
+
263
+ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
264
+ """
265
+ Yields Python versions in descending order.
266
+
267
+ After the latest version, the major-only version will be yielded, and then
268
+ all previous versions of that major version.
269
+ """
270
+ if len(py_version) > 1:
271
+ yield f"py{_version_nodot(py_version[:2])}"
272
+ yield f"py{py_version[0]}"
273
+ if len(py_version) > 1:
274
+ for minor in range(py_version[1] - 1, -1, -1):
275
+ yield f"py{_version_nodot((py_version[0], minor))}"
276
+
277
+
278
+ def compatible_tags(
279
+ python_version: Optional[PythonVersion] = None,
280
+ interpreter: Optional[str] = None,
281
+ platforms: Optional[Iterable[str]] = None,
282
+ ) -> Iterator[Tag]:
283
+ """
284
+ Yields the sequence of tags that are compatible with a specific version of Python.
285
+
286
+ The tags consist of:
287
+ - py*-none-<platform>
288
+ - <interpreter>-none-any # ... if `interpreter` is provided.
289
+ - py*-none-any
290
+ """
291
+ if not python_version:
292
+ python_version = sys.version_info[:2]
293
+ platforms = list(platforms or platform_tags())
294
+ for version in _py_interpreter_range(python_version):
295
+ for platform_ in platforms:
296
+ yield Tag(version, "none", platform_)
297
+ if interpreter:
298
+ yield Tag(interpreter, "none", "any")
299
+ for version in _py_interpreter_range(python_version):
300
+ yield Tag(version, "none", "any")
301
+
302
+
303
+ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
304
+ if not is_32bit:
305
+ return arch
306
+
307
+ if arch.startswith("ppc"):
308
+ return "ppc"
309
+
310
+ return "i386"
311
+
312
+
313
+ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
314
+ formats = [cpu_arch]
315
+ if cpu_arch == "x86_64":
316
+ if version < (10, 4):
317
+ return []
318
+ formats.extend(["intel", "fat64", "fat32"])
319
+
320
+ elif cpu_arch == "i386":
321
+ if version < (10, 4):
322
+ return []
323
+ formats.extend(["intel", "fat32", "fat"])
324
+
325
+ elif cpu_arch == "ppc64":
326
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
327
+ if version > (10, 5) or version < (10, 4):
328
+ return []
329
+ formats.append("fat64")
330
+
331
+ elif cpu_arch == "ppc":
332
+ if version > (10, 6):
333
+ return []
334
+ formats.extend(["fat32", "fat"])
335
+
336
+ if cpu_arch in {"arm64", "x86_64"}:
337
+ formats.append("universal2")
338
+
339
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
340
+ formats.append("universal")
341
+
342
+ return formats
343
+
344
+
345
+ def mac_platforms(
346
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
347
+ ) -> Iterator[str]:
348
+ """
349
+ Yields the platform tags for a macOS system.
350
+
351
+ The `version` parameter is a two-item tuple specifying the macOS version to
352
+ generate platform tags for. The `arch` parameter is the CPU architecture to
353
+ generate platform tags for. Both parameters default to the appropriate value
354
+ for the current system.
355
+ """
356
+ version_str, _, cpu_arch = platform.mac_ver()
357
+ if version is None:
358
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
359
+ else:
360
+ version = version
361
+ if arch is None:
362
+ arch = _mac_arch(cpu_arch)
363
+ else:
364
+ arch = arch
365
+
366
+ if (10, 0) <= version and version < (11, 0):
367
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
368
+ # "minor" version number. The major version was always 10.
369
+ for minor_version in range(version[1], -1, -1):
370
+ compat_version = 10, minor_version
371
+ binary_formats = _mac_binary_formats(compat_version, arch)
372
+ for binary_format in binary_formats:
373
+ yield "macosx_{major}_{minor}_{binary_format}".format(
374
+ major=10, minor=minor_version, binary_format=binary_format
375
+ )
376
+
377
+ if version >= (11, 0):
378
+ # Starting with Mac OS 11, each yearly release bumps the major version
379
+ # number. The minor versions are now the midyear updates.
380
+ for major_version in range(version[0], 10, -1):
381
+ compat_version = major_version, 0
382
+ binary_formats = _mac_binary_formats(compat_version, arch)
383
+ for binary_format in binary_formats:
384
+ yield "macosx_{major}_{minor}_{binary_format}".format(
385
+ major=major_version, minor=0, binary_format=binary_format
386
+ )
387
+
388
+ if version >= (11, 0):
389
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
390
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
391
+ # releases exist.
392
+ #
393
+ # However, the "universal2" binary format can have a
394
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
395
+ # that version of macOS.
396
+ if arch == "x86_64":
397
+ for minor_version in range(16, 3, -1):
398
+ compat_version = 10, minor_version
399
+ binary_formats = _mac_binary_formats(compat_version, arch)
400
+ for binary_format in binary_formats:
401
+ yield "macosx_{major}_{minor}_{binary_format}".format(
402
+ major=compat_version[0],
403
+ minor=compat_version[1],
404
+ binary_format=binary_format,
405
+ )
406
+ else:
407
+ for minor_version in range(16, 3, -1):
408
+ compat_version = 10, minor_version
409
+ binary_format = "universal2"
410
+ yield "macosx_{major}_{minor}_{binary_format}".format(
411
+ major=compat_version[0],
412
+ minor=compat_version[1],
413
+ binary_format=binary_format,
414
+ )
415
+
416
+
417
+ def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
418
+ linux = _normalize_string(sysconfig.get_platform())
419
+ if is_32bit:
420
+ if linux == "linux_x86_64":
421
+ linux = "linux_i686"
422
+ elif linux == "linux_aarch64":
423
+ linux = "linux_armv7l"
424
+ _, arch = linux.split("_", 1)
425
+ yield from _manylinux.platform_tags(linux, arch)
426
+ yield from _musllinux.platform_tags(arch)
427
+ yield linux
428
+
429
+
430
+ def _generic_platforms() -> Iterator[str]:
431
+ yield _normalize_string(sysconfig.get_platform())
432
+
433
+
434
+ def platform_tags() -> Iterator[str]:
435
+ """
436
+ Provides the platform tags for this installation.
437
+ """
438
+ if platform.system() == "Darwin":
439
+ return mac_platforms()
440
+ elif platform.system() == "Linux":
441
+ return _linux_platforms()
442
+ else:
443
+ return _generic_platforms()
444
+
445
+
446
+ def interpreter_name() -> str:
447
+ """
448
+ Returns the name of the running interpreter.
449
+ """
450
+ name = sys.implementation.name
451
+ return INTERPRETER_SHORT_NAMES.get(name) or name
452
+
453
+
454
+ def interpreter_version(*, warn: bool = False) -> str:
455
+ """
456
+ Returns the version of the running interpreter.
457
+ """
458
+ version = _get_config_var("py_version_nodot", warn=warn)
459
+ if version:
460
+ version = str(version)
461
+ else:
462
+ version = _version_nodot(sys.version_info[:2])
463
+ return version
464
+
465
+
466
+ def _version_nodot(version: PythonVersion) -> str:
467
+ return "".join(map(str, version))
468
+
469
+
470
+ def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
471
+ """
472
+ Returns the sequence of tag triples for the running interpreter.
473
+
474
+ The order of the sequence corresponds to priority order for the
475
+ interpreter, from most to least important.
476
+ """
477
+
478
+ interp_name = interpreter_name()
479
+ if interp_name == "cp":
480
+ yield from cpython_tags(warn=warn)
481
+ else:
482
+ yield from generic_tags()
483
+
484
+ if interp_name == "pp":
485
+ yield from compatible_tags(interpreter="pp3")
486
+ else:
487
+ yield from compatible_tags()
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/utils.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import re
6
+ from typing import FrozenSet, NewType, Tuple, Union, cast
7
+
8
+ from .tags import Tag, parse_tag
9
+ from .version import InvalidVersion, Version
10
+
11
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
12
+ NormalizedName = NewType("NormalizedName", str)
13
+
14
+
15
+ class InvalidWheelFilename(ValueError):
16
+ """
17
+ An invalid wheel filename was found, users should refer to PEP 427.
18
+ """
19
+
20
+
21
+ class InvalidSdistFilename(ValueError):
22
+ """
23
+ An invalid sdist filename was found, users should refer to the packaging user guide.
24
+ """
25
+
26
+
27
+ _canonicalize_regex = re.compile(r"[-_.]+")
28
+ # PEP 427: The build number must start with a digit.
29
+ _build_tag_regex = re.compile(r"(\d+)(.*)")
30
+
31
+
32
+ def canonicalize_name(name: str) -> NormalizedName:
33
+ # This is taken from PEP 503.
34
+ value = _canonicalize_regex.sub("-", name).lower()
35
+ return cast(NormalizedName, value)
36
+
37
+
38
+ def canonicalize_version(version: Union[Version, str]) -> str:
39
+ """
40
+ This is very similar to Version.__str__, but has one subtle difference
41
+ with the way it handles the release segment.
42
+ """
43
+ if isinstance(version, str):
44
+ try:
45
+ parsed = Version(version)
46
+ except InvalidVersion:
47
+ # Legacy versions cannot be normalized
48
+ return version
49
+ else:
50
+ parsed = version
51
+
52
+ parts = []
53
+
54
+ # Epoch
55
+ if parsed.epoch != 0:
56
+ parts.append(f"{parsed.epoch}!")
57
+
58
+ # Release segment
59
+ # NB: This strips trailing '.0's to normalize
60
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
61
+
62
+ # Pre-release
63
+ if parsed.pre is not None:
64
+ parts.append("".join(str(x) for x in parsed.pre))
65
+
66
+ # Post-release
67
+ if parsed.post is not None:
68
+ parts.append(f".post{parsed.post}")
69
+
70
+ # Development release
71
+ if parsed.dev is not None:
72
+ parts.append(f".dev{parsed.dev}")
73
+
74
+ # Local version segment
75
+ if parsed.local is not None:
76
+ parts.append(f"+{parsed.local}")
77
+
78
+ return "".join(parts)
79
+
80
+
81
+ def parse_wheel_filename(
82
+ filename: str,
83
+ ) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
84
+ if not filename.endswith(".whl"):
85
+ raise InvalidWheelFilename(
86
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
87
+ )
88
+
89
+ filename = filename[:-4]
90
+ dashes = filename.count("-")
91
+ if dashes not in (4, 5):
92
+ raise InvalidWheelFilename(
93
+ f"Invalid wheel filename (wrong number of parts): {filename}"
94
+ )
95
+
96
+ parts = filename.split("-", dashes - 2)
97
+ name_part = parts[0]
98
+ # See PEP 427 for the rules on escaping the project name
99
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
100
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
101
+ name = canonicalize_name(name_part)
102
+ version = Version(parts[1])
103
+ if dashes == 5:
104
+ build_part = parts[2]
105
+ build_match = _build_tag_regex.match(build_part)
106
+ if build_match is None:
107
+ raise InvalidWheelFilename(
108
+ f"Invalid build number: {build_part} in '{filename}'"
109
+ )
110
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
111
+ else:
112
+ build = ()
113
+ tags = parse_tag(parts[-1])
114
+ return (name, version, build, tags)
115
+
116
+
117
+ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
118
+ if filename.endswith(".tar.gz"):
119
+ file_stem = filename[: -len(".tar.gz")]
120
+ elif filename.endswith(".zip"):
121
+ file_stem = filename[: -len(".zip")]
122
+ else:
123
+ raise InvalidSdistFilename(
124
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
125
+ f" {filename}"
126
+ )
127
+
128
+ # We are requiring a PEP 440 version, which cannot contain dashes,
129
+ # so we split on the last dash.
130
+ name_part, sep, version_part = file_stem.rpartition("-")
131
+ if not sep:
132
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
133
+
134
+ name = canonicalize_name(name_part)
135
+ version = Version(version_part)
136
+ return (name, version)
.venv/lib/python3.11/site-packages/pip/_vendor/packaging/version.py ADDED
@@ -0,0 +1,504 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import collections
6
+ import itertools
7
+ import re
8
+ import warnings
9
+ from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
10
+
11
+ from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
12
+
13
+ __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
14
+
15
+ InfiniteTypes = Union[InfinityType, NegativeInfinityType]
16
+ PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
17
+ SubLocalType = Union[InfiniteTypes, int, str]
18
+ LocalType = Union[
19
+ NegativeInfinityType,
20
+ Tuple[
21
+ Union[
22
+ SubLocalType,
23
+ Tuple[SubLocalType, str],
24
+ Tuple[NegativeInfinityType, SubLocalType],
25
+ ],
26
+ ...,
27
+ ],
28
+ ]
29
+ CmpKey = Tuple[
30
+ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
31
+ ]
32
+ LegacyCmpKey = Tuple[int, Tuple[str, ...]]
33
+ VersionComparisonMethod = Callable[
34
+ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
35
+ ]
36
+
37
+ _Version = collections.namedtuple(
38
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
39
+ )
40
+
41
+
42
+ def parse(version: str) -> Union["LegacyVersion", "Version"]:
43
+ """
44
+ Parse the given version string and return either a :class:`Version` object
45
+ or a :class:`LegacyVersion` object depending on if the given version is
46
+ a valid PEP 440 version or a legacy version.
47
+ """
48
+ try:
49
+ return Version(version)
50
+ except InvalidVersion:
51
+ return LegacyVersion(version)
52
+
53
+
54
+ class InvalidVersion(ValueError):
55
+ """
56
+ An invalid version was found, users should refer to PEP 440.
57
+ """
58
+
59
+
60
+ class _BaseVersion:
61
+ _key: Union[CmpKey, LegacyCmpKey]
62
+
63
+ def __hash__(self) -> int:
64
+ return hash(self._key)
65
+
66
+ # Please keep the duplicated `isinstance` check
67
+ # in the six comparisons hereunder
68
+ # unless you find a way to avoid adding overhead function calls.
69
+ def __lt__(self, other: "_BaseVersion") -> bool:
70
+ if not isinstance(other, _BaseVersion):
71
+ return NotImplemented
72
+
73
+ return self._key < other._key
74
+
75
+ def __le__(self, other: "_BaseVersion") -> bool:
76
+ if not isinstance(other, _BaseVersion):
77
+ return NotImplemented
78
+
79
+ return self._key <= other._key
80
+
81
+ def __eq__(self, other: object) -> bool:
82
+ if not isinstance(other, _BaseVersion):
83
+ return NotImplemented
84
+
85
+ return self._key == other._key
86
+
87
+ def __ge__(self, other: "_BaseVersion") -> bool:
88
+ if not isinstance(other, _BaseVersion):
89
+ return NotImplemented
90
+
91
+ return self._key >= other._key
92
+
93
+ def __gt__(self, other: "_BaseVersion") -> bool:
94
+ if not isinstance(other, _BaseVersion):
95
+ return NotImplemented
96
+
97
+ return self._key > other._key
98
+
99
+ def __ne__(self, other: object) -> bool:
100
+ if not isinstance(other, _BaseVersion):
101
+ return NotImplemented
102
+
103
+ return self._key != other._key
104
+
105
+
106
+ class LegacyVersion(_BaseVersion):
107
+ def __init__(self, version: str) -> None:
108
+ self._version = str(version)
109
+ self._key = _legacy_cmpkey(self._version)
110
+
111
+ warnings.warn(
112
+ "Creating a LegacyVersion has been deprecated and will be "
113
+ "removed in the next major release",
114
+ DeprecationWarning,
115
+ )
116
+
117
+ def __str__(self) -> str:
118
+ return self._version
119
+
120
+ def __repr__(self) -> str:
121
+ return f"<LegacyVersion('{self}')>"
122
+
123
+ @property
124
+ def public(self) -> str:
125
+ return self._version
126
+
127
+ @property
128
+ def base_version(self) -> str:
129
+ return self._version
130
+
131
+ @property
132
+ def epoch(self) -> int:
133
+ return -1
134
+
135
+ @property
136
+ def release(self) -> None:
137
+ return None
138
+
139
+ @property
140
+ def pre(self) -> None:
141
+ return None
142
+
143
+ @property
144
+ def post(self) -> None:
145
+ return None
146
+
147
+ @property
148
+ def dev(self) -> None:
149
+ return None
150
+
151
+ @property
152
+ def local(self) -> None:
153
+ return None
154
+
155
+ @property
156
+ def is_prerelease(self) -> bool:
157
+ return False
158
+
159
+ @property
160
+ def is_postrelease(self) -> bool:
161
+ return False
162
+
163
+ @property
164
+ def is_devrelease(self) -> bool:
165
+ return False
166
+
167
+
168
+ _legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
169
+
170
+ _legacy_version_replacement_map = {
171
+ "pre": "c",
172
+ "preview": "c",
173
+ "-": "final-",
174
+ "rc": "c",
175
+ "dev": "@",
176
+ }
177
+
178
+
179
+ def _parse_version_parts(s: str) -> Iterator[str]:
180
+ for part in _legacy_version_component_re.split(s):
181
+ part = _legacy_version_replacement_map.get(part, part)
182
+
183
+ if not part or part == ".":
184
+ continue
185
+
186
+ if part[:1] in "0123456789":
187
+ # pad for numeric comparison
188
+ yield part.zfill(8)
189
+ else:
190
+ yield "*" + part
191
+
192
+ # ensure that alpha/beta/candidate are before final
193
+ yield "*final"
194
+
195
+
196
+ def _legacy_cmpkey(version: str) -> LegacyCmpKey:
197
+
198
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
199
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
200
+ # which uses the defacto standard originally implemented by setuptools,
201
+ # as before all PEP 440 versions.
202
+ epoch = -1
203
+
204
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
205
+ # it's adoption of the packaging library.
206
+ parts: List[str] = []
207
+ for part in _parse_version_parts(version.lower()):
208
+ if part.startswith("*"):
209
+ # remove "-" before a prerelease tag
210
+ if part < "*final":
211
+ while parts and parts[-1] == "*final-":
212
+ parts.pop()
213
+
214
+ # remove trailing zeros from each series of numeric parts
215
+ while parts and parts[-1] == "00000000":
216
+ parts.pop()
217
+
218
+ parts.append(part)
219
+
220
+ return epoch, tuple(parts)
221
+
222
+
223
+ # Deliberately not anchored to the start and end of the string, to make it
224
+ # easier for 3rd party code to reuse
225
+ VERSION_PATTERN = r"""
226
+ v?
227
+ (?:
228
+ (?:(?P<epoch>[0-9]+)!)? # epoch
229
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
230
+ (?P<pre> # pre-release
231
+ [-_\.]?
232
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
233
+ [-_\.]?
234
+ (?P<pre_n>[0-9]+)?
235
+ )?
236
+ (?P<post> # post release
237
+ (?:-(?P<post_n1>[0-9]+))
238
+ |
239
+ (?:
240
+ [-_\.]?
241
+ (?P<post_l>post|rev|r)
242
+ [-_\.]?
243
+ (?P<post_n2>[0-9]+)?
244
+ )
245
+ )?
246
+ (?P<dev> # dev release
247
+ [-_\.]?
248
+ (?P<dev_l>dev)
249
+ [-_\.]?
250
+ (?P<dev_n>[0-9]+)?
251
+ )?
252
+ )
253
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
254
+ """
255
+
256
+
257
+ class Version(_BaseVersion):
258
+
259
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
260
+
261
+ def __init__(self, version: str) -> None:
262
+
263
+ # Validate the version and parse it into pieces
264
+ match = self._regex.search(version)
265
+ if not match:
266
+ raise InvalidVersion(f"Invalid version: '{version}'")
267
+
268
+ # Store the parsed out pieces of the version
269
+ self._version = _Version(
270
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
271
+ release=tuple(int(i) for i in match.group("release").split(".")),
272
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
273
+ post=_parse_letter_version(
274
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
275
+ ),
276
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
277
+ local=_parse_local_version(match.group("local")),
278
+ )
279
+
280
+ # Generate a key which will be used for sorting
281
+ self._key = _cmpkey(
282
+ self._version.epoch,
283
+ self._version.release,
284
+ self._version.pre,
285
+ self._version.post,
286
+ self._version.dev,
287
+ self._version.local,
288
+ )
289
+
290
+ def __repr__(self) -> str:
291
+ return f"<Version('{self}')>"
292
+
293
+ def __str__(self) -> str:
294
+ parts = []
295
+
296
+ # Epoch
297
+ if self.epoch != 0:
298
+ parts.append(f"{self.epoch}!")
299
+
300
+ # Release segment
301
+ parts.append(".".join(str(x) for x in self.release))
302
+
303
+ # Pre-release
304
+ if self.pre is not None:
305
+ parts.append("".join(str(x) for x in self.pre))
306
+
307
+ # Post-release
308
+ if self.post is not None:
309
+ parts.append(f".post{self.post}")
310
+
311
+ # Development release
312
+ if self.dev is not None:
313
+ parts.append(f".dev{self.dev}")
314
+
315
+ # Local version segment
316
+ if self.local is not None:
317
+ parts.append(f"+{self.local}")
318
+
319
+ return "".join(parts)
320
+
321
+ @property
322
+ def epoch(self) -> int:
323
+ _epoch: int = self._version.epoch
324
+ return _epoch
325
+
326
+ @property
327
+ def release(self) -> Tuple[int, ...]:
328
+ _release: Tuple[int, ...] = self._version.release
329
+ return _release
330
+
331
+ @property
332
+ def pre(self) -> Optional[Tuple[str, int]]:
333
+ _pre: Optional[Tuple[str, int]] = self._version.pre
334
+ return _pre
335
+
336
+ @property
337
+ def post(self) -> Optional[int]:
338
+ return self._version.post[1] if self._version.post else None
339
+
340
+ @property
341
+ def dev(self) -> Optional[int]:
342
+ return self._version.dev[1] if self._version.dev else None
343
+
344
+ @property
345
+ def local(self) -> Optional[str]:
346
+ if self._version.local:
347
+ return ".".join(str(x) for x in self._version.local)
348
+ else:
349
+ return None
350
+
351
+ @property
352
+ def public(self) -> str:
353
+ return str(self).split("+", 1)[0]
354
+
355
+ @property
356
+ def base_version(self) -> str:
357
+ parts = []
358
+
359
+ # Epoch
360
+ if self.epoch != 0:
361
+ parts.append(f"{self.epoch}!")
362
+
363
+ # Release segment
364
+ parts.append(".".join(str(x) for x in self.release))
365
+
366
+ return "".join(parts)
367
+
368
+ @property
369
+ def is_prerelease(self) -> bool:
370
+ return self.dev is not None or self.pre is not None
371
+
372
+ @property
373
+ def is_postrelease(self) -> bool:
374
+ return self.post is not None
375
+
376
+ @property
377
+ def is_devrelease(self) -> bool:
378
+ return self.dev is not None
379
+
380
+ @property
381
+ def major(self) -> int:
382
+ return self.release[0] if len(self.release) >= 1 else 0
383
+
384
+ @property
385
+ def minor(self) -> int:
386
+ return self.release[1] if len(self.release) >= 2 else 0
387
+
388
+ @property
389
+ def micro(self) -> int:
390
+ return self.release[2] if len(self.release) >= 3 else 0
391
+
392
+
393
+ def _parse_letter_version(
394
+ letter: str, number: Union[str, bytes, SupportsInt]
395
+ ) -> Optional[Tuple[str, int]]:
396
+
397
+ if letter:
398
+ # We consider there to be an implicit 0 in a pre-release if there is
399
+ # not a numeral associated with it.
400
+ if number is None:
401
+ number = 0
402
+
403
+ # We normalize any letters to their lower case form
404
+ letter = letter.lower()
405
+
406
+ # We consider some words to be alternate spellings of other words and
407
+ # in those cases we want to normalize the spellings to our preferred
408
+ # spelling.
409
+ if letter == "alpha":
410
+ letter = "a"
411
+ elif letter == "beta":
412
+ letter = "b"
413
+ elif letter in ["c", "pre", "preview"]:
414
+ letter = "rc"
415
+ elif letter in ["rev", "r"]:
416
+ letter = "post"
417
+
418
+ return letter, int(number)
419
+ if not letter and number:
420
+ # We assume if we are given a number, but we are not given a letter
421
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
422
+ letter = "post"
423
+
424
+ return letter, int(number)
425
+
426
+ return None
427
+
428
+
429
+ _local_version_separators = re.compile(r"[\._-]")
430
+
431
+
432
+ def _parse_local_version(local: str) -> Optional[LocalType]:
433
+ """
434
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
435
+ """
436
+ if local is not None:
437
+ return tuple(
438
+ part.lower() if not part.isdigit() else int(part)
439
+ for part in _local_version_separators.split(local)
440
+ )
441
+ return None
442
+
443
+
444
+ def _cmpkey(
445
+ epoch: int,
446
+ release: Tuple[int, ...],
447
+ pre: Optional[Tuple[str, int]],
448
+ post: Optional[Tuple[str, int]],
449
+ dev: Optional[Tuple[str, int]],
450
+ local: Optional[Tuple[SubLocalType]],
451
+ ) -> CmpKey:
452
+
453
+ # When we compare a release version, we want to compare it with all of the
454
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
455
+ # leading zeros until we come to something non zero, then take the rest
456
+ # re-reverse it back into the correct order and make it a tuple and use
457
+ # that for our sorting key.
458
+ _release = tuple(
459
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
460
+ )
461
+
462
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
463
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
464
+ # if there is not a pre or a post segment. If we have one of those then
465
+ # the normal sorting rules will handle this case correctly.
466
+ if pre is None and post is None and dev is not None:
467
+ _pre: PrePostDevType = NegativeInfinity
468
+ # Versions without a pre-release (except as noted above) should sort after
469
+ # those with one.
470
+ elif pre is None:
471
+ _pre = Infinity
472
+ else:
473
+ _pre = pre
474
+
475
+ # Versions without a post segment should sort before those with one.
476
+ if post is None:
477
+ _post: PrePostDevType = NegativeInfinity
478
+
479
+ else:
480
+ _post = post
481
+
482
+ # Versions without a development segment should sort after those with one.
483
+ if dev is None:
484
+ _dev: PrePostDevType = Infinity
485
+
486
+ else:
487
+ _dev = dev
488
+
489
+ if local is None:
490
+ # Versions without a local segment should sort before those with one.
491
+ _local: LocalType = NegativeInfinity
492
+ else:
493
+ # Versions with a local segment need that segment parsed to implement
494
+ # the sorting rules in PEP440.
495
+ # - Alpha numeric segments sort before numeric segments
496
+ # - Alpha numeric segments sort lexicographically
497
+ # - Numeric segments sort numerically
498
+ # - Shorter versions sort before longer versions when the prefixes
499
+ # match exactly
500
+ _local = tuple(
501
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
502
+ )
503
+
504
+ return epoch, _release, _pre, _post, _dev, _local
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__init__.py ADDED
@@ -0,0 +1,322 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # module pyparsing.py
2
+ #
3
+ # Copyright (c) 2003-2022 Paul T. McGuire
4
+ #
5
+ # Permission is hereby granted, free of charge, to any person obtaining
6
+ # a copy of this software and associated documentation files (the
7
+ # "Software"), to deal in the Software without restriction, including
8
+ # without limitation the rights to use, copy, modify, merge, publish,
9
+ # distribute, sublicense, and/or sell copies of the Software, and to
10
+ # permit persons to whom the Software is furnished to do so, subject to
11
+ # the following conditions:
12
+ #
13
+ # The above copyright notice and this permission notice shall be
14
+ # included in all copies or substantial portions of the Software.
15
+ #
16
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
19
+ # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
20
+ # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21
+ # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22
+ # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
+ #
24
+
25
+ __doc__ = """
26
+ pyparsing module - Classes and methods to define and execute parsing grammars
27
+ =============================================================================
28
+
29
+ The pyparsing module is an alternative approach to creating and
30
+ executing simple grammars, vs. the traditional lex/yacc approach, or the
31
+ use of regular expressions. With pyparsing, you don't need to learn
32
+ a new syntax for defining grammars or matching expressions - the parsing
33
+ module provides a library of classes that you use to construct the
34
+ grammar directly in Python.
35
+
36
+ Here is a program to parse "Hello, World!" (or any greeting of the form
37
+ ``"<salutation>, <addressee>!"``), built up using :class:`Word`,
38
+ :class:`Literal`, and :class:`And` elements
39
+ (the :meth:`'+'<ParserElement.__add__>` operators create :class:`And` expressions,
40
+ and the strings are auto-converted to :class:`Literal` expressions)::
41
+
42
+ from pip._vendor.pyparsing import Word, alphas
43
+
44
+ # define grammar of a greeting
45
+ greet = Word(alphas) + "," + Word(alphas) + "!"
46
+
47
+ hello = "Hello, World!"
48
+ print(hello, "->", greet.parse_string(hello))
49
+
50
+ The program outputs the following::
51
+
52
+ Hello, World! -> ['Hello', ',', 'World', '!']
53
+
54
+ The Python representation of the grammar is quite readable, owing to the
55
+ self-explanatory class names, and the use of :class:`'+'<And>`,
56
+ :class:`'|'<MatchFirst>`, :class:`'^'<Or>` and :class:`'&'<Each>` operators.
57
+
58
+ The :class:`ParseResults` object returned from
59
+ :class:`ParserElement.parse_string` can be
60
+ accessed as a nested list, a dictionary, or an object with named
61
+ attributes.
62
+
63
+ The pyparsing module handles some of the problems that are typically
64
+ vexing when writing text parsers:
65
+
66
+ - extra or missing whitespace (the above program will also handle
67
+ "Hello,World!", "Hello , World !", etc.)
68
+ - quoted strings
69
+ - embedded comments
70
+
71
+
72
+ Getting Started -
73
+ -----------------
74
+ Visit the classes :class:`ParserElement` and :class:`ParseResults` to
75
+ see the base classes that most other pyparsing
76
+ classes inherit from. Use the docstrings for examples of how to:
77
+
78
+ - construct literal match expressions from :class:`Literal` and
79
+ :class:`CaselessLiteral` classes
80
+ - construct character word-group expressions using the :class:`Word`
81
+ class
82
+ - see how to create repetitive expressions using :class:`ZeroOrMore`
83
+ and :class:`OneOrMore` classes
84
+ - use :class:`'+'<And>`, :class:`'|'<MatchFirst>`, :class:`'^'<Or>`,
85
+ and :class:`'&'<Each>` operators to combine simple expressions into
86
+ more complex ones
87
+ - associate names with your parsed results using
88
+ :class:`ParserElement.set_results_name`
89
+ - access the parsed data, which is returned as a :class:`ParseResults`
90
+ object
91
+ - find some helpful expression short-cuts like :class:`DelimitedList`
92
+ and :class:`one_of`
93
+ - find more useful common expressions in the :class:`pyparsing_common`
94
+ namespace class
95
+ """
96
+ from typing import NamedTuple
97
+
98
+
99
+ class version_info(NamedTuple):
100
+ major: int
101
+ minor: int
102
+ micro: int
103
+ releaselevel: str
104
+ serial: int
105
+
106
+ @property
107
+ def __version__(self):
108
+ return (
109
+ f"{self.major}.{self.minor}.{self.micro}"
110
+ + (
111
+ f"{'r' if self.releaselevel[0] == 'c' else ''}{self.releaselevel[0]}{self.serial}",
112
+ "",
113
+ )[self.releaselevel == "final"]
114
+ )
115
+
116
+ def __str__(self):
117
+ return f"{__name__} {self.__version__} / {__version_time__}"
118
+
119
+ def __repr__(self):
120
+ return f"{__name__}.{type(self).__name__}({', '.join('{}={!r}'.format(*nv) for nv in zip(self._fields, self))})"
121
+
122
+
123
+ __version_info__ = version_info(3, 1, 0, "final", 1)
124
+ __version_time__ = "18 Jun 2023 14:05 UTC"
125
+ __version__ = __version_info__.__version__
126
+ __versionTime__ = __version_time__
127
+ __author__ = "Paul McGuire <ptmcg.gm+pyparsing@gmail.com>"
128
+
129
+ from .util import *
130
+ from .exceptions import *
131
+ from .actions import *
132
+ from .core import __diag__, __compat__
133
+ from .results import *
134
+ from .core import * # type: ignore[misc, assignment]
135
+ from .core import _builtin_exprs as core_builtin_exprs
136
+ from .helpers import * # type: ignore[misc, assignment]
137
+ from .helpers import _builtin_exprs as helper_builtin_exprs
138
+
139
+ from .unicode import unicode_set, UnicodeRangeList, pyparsing_unicode as unicode
140
+ from .testing import pyparsing_test as testing
141
+ from .common import (
142
+ pyparsing_common as common,
143
+ _builtin_exprs as common_builtin_exprs,
144
+ )
145
+
146
+ # define backward compat synonyms
147
+ if "pyparsing_unicode" not in globals():
148
+ pyparsing_unicode = unicode # type: ignore[misc]
149
+ if "pyparsing_common" not in globals():
150
+ pyparsing_common = common # type: ignore[misc]
151
+ if "pyparsing_test" not in globals():
152
+ pyparsing_test = testing # type: ignore[misc]
153
+
154
+ core_builtin_exprs += common_builtin_exprs + helper_builtin_exprs
155
+
156
+
157
+ __all__ = [
158
+ "__version__",
159
+ "__version_time__",
160
+ "__author__",
161
+ "__compat__",
162
+ "__diag__",
163
+ "And",
164
+ "AtLineStart",
165
+ "AtStringStart",
166
+ "CaselessKeyword",
167
+ "CaselessLiteral",
168
+ "CharsNotIn",
169
+ "CloseMatch",
170
+ "Combine",
171
+ "DelimitedList",
172
+ "Dict",
173
+ "Each",
174
+ "Empty",
175
+ "FollowedBy",
176
+ "Forward",
177
+ "GoToColumn",
178
+ "Group",
179
+ "IndentedBlock",
180
+ "Keyword",
181
+ "LineEnd",
182
+ "LineStart",
183
+ "Literal",
184
+ "Located",
185
+ "PrecededBy",
186
+ "MatchFirst",
187
+ "NoMatch",
188
+ "NotAny",
189
+ "OneOrMore",
190
+ "OnlyOnce",
191
+ "OpAssoc",
192
+ "Opt",
193
+ "Optional",
194
+ "Or",
195
+ "ParseBaseException",
196
+ "ParseElementEnhance",
197
+ "ParseException",
198
+ "ParseExpression",
199
+ "ParseFatalException",
200
+ "ParseResults",
201
+ "ParseSyntaxException",
202
+ "ParserElement",
203
+ "PositionToken",
204
+ "QuotedString",
205
+ "RecursiveGrammarException",
206
+ "Regex",
207
+ "SkipTo",
208
+ "StringEnd",
209
+ "StringStart",
210
+ "Suppress",
211
+ "Token",
212
+ "TokenConverter",
213
+ "White",
214
+ "Word",
215
+ "WordEnd",
216
+ "WordStart",
217
+ "ZeroOrMore",
218
+ "Char",
219
+ "alphanums",
220
+ "alphas",
221
+ "alphas8bit",
222
+ "any_close_tag",
223
+ "any_open_tag",
224
+ "autoname_elements",
225
+ "c_style_comment",
226
+ "col",
227
+ "common_html_entity",
228
+ "condition_as_parse_action",
229
+ "counted_array",
230
+ "cpp_style_comment",
231
+ "dbl_quoted_string",
232
+ "dbl_slash_comment",
233
+ "delimited_list",
234
+ "dict_of",
235
+ "empty",
236
+ "hexnums",
237
+ "html_comment",
238
+ "identchars",
239
+ "identbodychars",
240
+ "infix_notation",
241
+ "java_style_comment",
242
+ "line",
243
+ "line_end",
244
+ "line_start",
245
+ "lineno",
246
+ "make_html_tags",
247
+ "make_xml_tags",
248
+ "match_only_at_col",
249
+ "match_previous_expr",
250
+ "match_previous_literal",
251
+ "nested_expr",
252
+ "null_debug_action",
253
+ "nums",
254
+ "one_of",
255
+ "original_text_for",
256
+ "printables",
257
+ "punc8bit",
258
+ "pyparsing_common",
259
+ "pyparsing_test",
260
+ "pyparsing_unicode",
261
+ "python_style_comment",
262
+ "quoted_string",
263
+ "remove_quotes",
264
+ "replace_with",
265
+ "replace_html_entity",
266
+ "rest_of_line",
267
+ "sgl_quoted_string",
268
+ "srange",
269
+ "string_end",
270
+ "string_start",
271
+ "token_map",
272
+ "trace_parse_action",
273
+ "ungroup",
274
+ "unicode_set",
275
+ "unicode_string",
276
+ "with_attribute",
277
+ "with_class",
278
+ # pre-PEP8 compatibility names
279
+ "__versionTime__",
280
+ "anyCloseTag",
281
+ "anyOpenTag",
282
+ "cStyleComment",
283
+ "commonHTMLEntity",
284
+ "conditionAsParseAction",
285
+ "countedArray",
286
+ "cppStyleComment",
287
+ "dblQuotedString",
288
+ "dblSlashComment",
289
+ "delimitedList",
290
+ "dictOf",
291
+ "htmlComment",
292
+ "indentedBlock",
293
+ "infixNotation",
294
+ "javaStyleComment",
295
+ "lineEnd",
296
+ "lineStart",
297
+ "locatedExpr",
298
+ "makeHTMLTags",
299
+ "makeXMLTags",
300
+ "matchOnlyAtCol",
301
+ "matchPreviousExpr",
302
+ "matchPreviousLiteral",
303
+ "nestedExpr",
304
+ "nullDebugAction",
305
+ "oneOf",
306
+ "opAssoc",
307
+ "originalTextFor",
308
+ "pythonStyleComment",
309
+ "quotedString",
310
+ "removeQuotes",
311
+ "replaceHTMLEntity",
312
+ "replaceWith",
313
+ "restOfLine",
314
+ "sglQuotedString",
315
+ "stringEnd",
316
+ "stringStart",
317
+ "tokenMap",
318
+ "traceParseAction",
319
+ "unicodeString",
320
+ "withAttribute",
321
+ "withClass",
322
+ ]
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (8.22 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/actions.cpython-311.pyc ADDED
Binary file (9.11 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/common.cpython-311.pyc ADDED
Binary file (14.9 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/exceptions.cpython-311.pyc ADDED
Binary file (13.7 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/helpers.cpython-311.pyc ADDED
Binary file (54.1 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/results.cpython-311.pyc ADDED
Binary file (37.8 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/testing.cpython-311.pyc ADDED
Binary file (19.5 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/unicode.cpython-311.pyc ADDED
Binary file (15.2 kB). View file
 
.venv/lib/python3.11/site-packages/pip/_vendor/pyparsing/__pycache__/util.cpython-311.pyc ADDED
Binary file (16.8 kB). View file