Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .gitattributes +1 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_collections.py +355 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_version.py +2 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/connection.py +572 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/connectionpool.py +1140 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +36 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc +0 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py +519 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py +397 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/appengine.py +314 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py +130 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py +518 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/securetransport.py +920 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/socks.py +216 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/filepost.py +98 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/request.py +191 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/response.py +879 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__init__.py +49 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/ssl_.py +504 -0
- llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/ssl_match_hostname.py +159 -0
- minigpt2/lib/python3.10/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/__init__.py +100 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/decoders/__init__.py +14 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/decoders/__init__.pyi +270 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/decoders/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/implementations/__init__.py +6 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/implementations/__pycache__/__init__.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/implementations/__pycache__/sentencepiece_unigram.cpython-310.pyc +0 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/implementations/base_tokenizer.py +418 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/implementations/byte_level_bpe.py +122 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/implementations/sentencepiece_bpe.py +102 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/models/__init__.py +8 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/models/__init__.pyi +562 -0
- minigpt2/lib/python3.10/site-packages/tokenizers/models/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -1358,3 +1358,4 @@ minigpt2/lib/python3.10/site-packages/Crypto/PublicKey/_ed25519.abi3.so filter=l
|
|
| 1358 |
llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1359 |
minigpt2/lib/python3.10/site-packages/Crypto/Math/_modexp.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1360 |
minigpt2/lib/python3.10/site-packages/decord/libdecord.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 1358 |
llava/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/_emoji_codes.cpython-310.pyc filter=lfs diff=lfs merge=lfs -text
|
| 1359 |
minigpt2/lib/python3.10/site-packages/Crypto/Math/_modexp.abi3.so filter=lfs diff=lfs merge=lfs -text
|
| 1360 |
minigpt2/lib/python3.10/site-packages/decord/libdecord.so filter=lfs diff=lfs merge=lfs -text
|
| 1361 |
+
minigpt2/lib/python3.10/site-packages/tokenizers/tokenizers.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.91 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/cmdline.cpython-310.pyc
ADDED
|
Binary file (15.5 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-310.pyc
ADDED
|
Binary file (1.87 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/sphinxext.cpython-310.pyc
ADDED
|
Binary file (7.75 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/token.cpython-310.pyc
ADDED
|
Binary file (4.68 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_collections.py
ADDED
|
@@ -0,0 +1,355 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
try:
|
| 4 |
+
from collections.abc import Mapping, MutableMapping
|
| 5 |
+
except ImportError:
|
| 6 |
+
from collections import Mapping, MutableMapping
|
| 7 |
+
try:
|
| 8 |
+
from threading import RLock
|
| 9 |
+
except ImportError: # Platform-specific: No threads available
|
| 10 |
+
|
| 11 |
+
class RLock:
|
| 12 |
+
def __enter__(self):
|
| 13 |
+
pass
|
| 14 |
+
|
| 15 |
+
def __exit__(self, exc_type, exc_value, traceback):
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
from collections import OrderedDict
|
| 20 |
+
|
| 21 |
+
from .exceptions import InvalidHeader
|
| 22 |
+
from .packages import six
|
| 23 |
+
from .packages.six import iterkeys, itervalues
|
| 24 |
+
|
| 25 |
+
__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
_Null = object()
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class RecentlyUsedContainer(MutableMapping):
|
| 32 |
+
"""
|
| 33 |
+
Provides a thread-safe dict-like container which maintains up to
|
| 34 |
+
``maxsize`` keys while throwing away the least-recently-used keys beyond
|
| 35 |
+
``maxsize``.
|
| 36 |
+
|
| 37 |
+
:param maxsize:
|
| 38 |
+
Maximum number of recent elements to retain.
|
| 39 |
+
|
| 40 |
+
:param dispose_func:
|
| 41 |
+
Every time an item is evicted from the container,
|
| 42 |
+
``dispose_func(value)`` is called. Callback which will get called
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
ContainerCls = OrderedDict
|
| 46 |
+
|
| 47 |
+
def __init__(self, maxsize=10, dispose_func=None):
|
| 48 |
+
self._maxsize = maxsize
|
| 49 |
+
self.dispose_func = dispose_func
|
| 50 |
+
|
| 51 |
+
self._container = self.ContainerCls()
|
| 52 |
+
self.lock = RLock()
|
| 53 |
+
|
| 54 |
+
def __getitem__(self, key):
|
| 55 |
+
# Re-insert the item, moving it to the end of the eviction line.
|
| 56 |
+
with self.lock:
|
| 57 |
+
item = self._container.pop(key)
|
| 58 |
+
self._container[key] = item
|
| 59 |
+
return item
|
| 60 |
+
|
| 61 |
+
def __setitem__(self, key, value):
|
| 62 |
+
evicted_value = _Null
|
| 63 |
+
with self.lock:
|
| 64 |
+
# Possibly evict the existing value of 'key'
|
| 65 |
+
evicted_value = self._container.get(key, _Null)
|
| 66 |
+
self._container[key] = value
|
| 67 |
+
|
| 68 |
+
# If we didn't evict an existing value, we might have to evict the
|
| 69 |
+
# least recently used item from the beginning of the container.
|
| 70 |
+
if len(self._container) > self._maxsize:
|
| 71 |
+
_key, evicted_value = self._container.popitem(last=False)
|
| 72 |
+
|
| 73 |
+
if self.dispose_func and evicted_value is not _Null:
|
| 74 |
+
self.dispose_func(evicted_value)
|
| 75 |
+
|
| 76 |
+
def __delitem__(self, key):
|
| 77 |
+
with self.lock:
|
| 78 |
+
value = self._container.pop(key)
|
| 79 |
+
|
| 80 |
+
if self.dispose_func:
|
| 81 |
+
self.dispose_func(value)
|
| 82 |
+
|
| 83 |
+
def __len__(self):
|
| 84 |
+
with self.lock:
|
| 85 |
+
return len(self._container)
|
| 86 |
+
|
| 87 |
+
def __iter__(self):
|
| 88 |
+
raise NotImplementedError(
|
| 89 |
+
"Iteration over this class is unlikely to be threadsafe."
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
def clear(self):
|
| 93 |
+
with self.lock:
|
| 94 |
+
# Copy pointers to all values, then wipe the mapping
|
| 95 |
+
values = list(itervalues(self._container))
|
| 96 |
+
self._container.clear()
|
| 97 |
+
|
| 98 |
+
if self.dispose_func:
|
| 99 |
+
for value in values:
|
| 100 |
+
self.dispose_func(value)
|
| 101 |
+
|
| 102 |
+
def keys(self):
|
| 103 |
+
with self.lock:
|
| 104 |
+
return list(iterkeys(self._container))
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class HTTPHeaderDict(MutableMapping):
|
| 108 |
+
"""
|
| 109 |
+
:param headers:
|
| 110 |
+
An iterable of field-value pairs. Must not contain multiple field names
|
| 111 |
+
when compared case-insensitively.
|
| 112 |
+
|
| 113 |
+
:param kwargs:
|
| 114 |
+
Additional field-value pairs to pass in to ``dict.update``.
|
| 115 |
+
|
| 116 |
+
A ``dict`` like container for storing HTTP Headers.
|
| 117 |
+
|
| 118 |
+
Field names are stored and compared case-insensitively in compliance with
|
| 119 |
+
RFC 7230. Iteration provides the first case-sensitive key seen for each
|
| 120 |
+
case-insensitive pair.
|
| 121 |
+
|
| 122 |
+
Using ``__setitem__`` syntax overwrites fields that compare equal
|
| 123 |
+
case-insensitively in order to maintain ``dict``'s api. For fields that
|
| 124 |
+
compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
|
| 125 |
+
in a loop.
|
| 126 |
+
|
| 127 |
+
If multiple fields that are equal case-insensitively are passed to the
|
| 128 |
+
constructor or ``.update``, the behavior is undefined and some will be
|
| 129 |
+
lost.
|
| 130 |
+
|
| 131 |
+
>>> headers = HTTPHeaderDict()
|
| 132 |
+
>>> headers.add('Set-Cookie', 'foo=bar')
|
| 133 |
+
>>> headers.add('set-cookie', 'baz=quxx')
|
| 134 |
+
>>> headers['content-length'] = '7'
|
| 135 |
+
>>> headers['SET-cookie']
|
| 136 |
+
'foo=bar, baz=quxx'
|
| 137 |
+
>>> headers['Content-Length']
|
| 138 |
+
'7'
|
| 139 |
+
"""
|
| 140 |
+
|
| 141 |
+
def __init__(self, headers=None, **kwargs):
|
| 142 |
+
super(HTTPHeaderDict, self).__init__()
|
| 143 |
+
self._container = OrderedDict()
|
| 144 |
+
if headers is not None:
|
| 145 |
+
if isinstance(headers, HTTPHeaderDict):
|
| 146 |
+
self._copy_from(headers)
|
| 147 |
+
else:
|
| 148 |
+
self.extend(headers)
|
| 149 |
+
if kwargs:
|
| 150 |
+
self.extend(kwargs)
|
| 151 |
+
|
| 152 |
+
def __setitem__(self, key, val):
|
| 153 |
+
self._container[key.lower()] = [key, val]
|
| 154 |
+
return self._container[key.lower()]
|
| 155 |
+
|
| 156 |
+
def __getitem__(self, key):
|
| 157 |
+
val = self._container[key.lower()]
|
| 158 |
+
return ", ".join(val[1:])
|
| 159 |
+
|
| 160 |
+
def __delitem__(self, key):
|
| 161 |
+
del self._container[key.lower()]
|
| 162 |
+
|
| 163 |
+
def __contains__(self, key):
|
| 164 |
+
return key.lower() in self._container
|
| 165 |
+
|
| 166 |
+
def __eq__(self, other):
|
| 167 |
+
if not isinstance(other, Mapping) and not hasattr(other, "keys"):
|
| 168 |
+
return False
|
| 169 |
+
if not isinstance(other, type(self)):
|
| 170 |
+
other = type(self)(other)
|
| 171 |
+
return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
|
| 172 |
+
(k.lower(), v) for k, v in other.itermerged()
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
def __ne__(self, other):
|
| 176 |
+
return not self.__eq__(other)
|
| 177 |
+
|
| 178 |
+
if six.PY2: # Python 2
|
| 179 |
+
iterkeys = MutableMapping.iterkeys
|
| 180 |
+
itervalues = MutableMapping.itervalues
|
| 181 |
+
|
| 182 |
+
__marker = object()
|
| 183 |
+
|
| 184 |
+
def __len__(self):
|
| 185 |
+
return len(self._container)
|
| 186 |
+
|
| 187 |
+
def __iter__(self):
|
| 188 |
+
# Only provide the originally cased names
|
| 189 |
+
for vals in self._container.values():
|
| 190 |
+
yield vals[0]
|
| 191 |
+
|
| 192 |
+
def pop(self, key, default=__marker):
|
| 193 |
+
"""D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
| 194 |
+
If key is not found, d is returned if given, otherwise KeyError is raised.
|
| 195 |
+
"""
|
| 196 |
+
# Using the MutableMapping function directly fails due to the private marker.
|
| 197 |
+
# Using ordinary dict.pop would expose the internal structures.
|
| 198 |
+
# So let's reinvent the wheel.
|
| 199 |
+
try:
|
| 200 |
+
value = self[key]
|
| 201 |
+
except KeyError:
|
| 202 |
+
if default is self.__marker:
|
| 203 |
+
raise
|
| 204 |
+
return default
|
| 205 |
+
else:
|
| 206 |
+
del self[key]
|
| 207 |
+
return value
|
| 208 |
+
|
| 209 |
+
def discard(self, key):
|
| 210 |
+
try:
|
| 211 |
+
del self[key]
|
| 212 |
+
except KeyError:
|
| 213 |
+
pass
|
| 214 |
+
|
| 215 |
+
def add(self, key, val):
|
| 216 |
+
"""Adds a (name, value) pair, doesn't overwrite the value if it already
|
| 217 |
+
exists.
|
| 218 |
+
|
| 219 |
+
>>> headers = HTTPHeaderDict(foo='bar')
|
| 220 |
+
>>> headers.add('Foo', 'baz')
|
| 221 |
+
>>> headers['foo']
|
| 222 |
+
'bar, baz'
|
| 223 |
+
"""
|
| 224 |
+
key_lower = key.lower()
|
| 225 |
+
new_vals = [key, val]
|
| 226 |
+
# Keep the common case aka no item present as fast as possible
|
| 227 |
+
vals = self._container.setdefault(key_lower, new_vals)
|
| 228 |
+
if new_vals is not vals:
|
| 229 |
+
vals.append(val)
|
| 230 |
+
|
| 231 |
+
def extend(self, *args, **kwargs):
|
| 232 |
+
"""Generic import function for any type of header-like object.
|
| 233 |
+
Adapted version of MutableMapping.update in order to insert items
|
| 234 |
+
with self.add instead of self.__setitem__
|
| 235 |
+
"""
|
| 236 |
+
if len(args) > 1:
|
| 237 |
+
raise TypeError(
|
| 238 |
+
"extend() takes at most 1 positional "
|
| 239 |
+
"arguments ({0} given)".format(len(args))
|
| 240 |
+
)
|
| 241 |
+
other = args[0] if len(args) >= 1 else ()
|
| 242 |
+
|
| 243 |
+
if isinstance(other, HTTPHeaderDict):
|
| 244 |
+
for key, val in other.iteritems():
|
| 245 |
+
self.add(key, val)
|
| 246 |
+
elif isinstance(other, Mapping):
|
| 247 |
+
for key in other:
|
| 248 |
+
self.add(key, other[key])
|
| 249 |
+
elif hasattr(other, "keys"):
|
| 250 |
+
for key in other.keys():
|
| 251 |
+
self.add(key, other[key])
|
| 252 |
+
else:
|
| 253 |
+
for key, value in other:
|
| 254 |
+
self.add(key, value)
|
| 255 |
+
|
| 256 |
+
for key, value in kwargs.items():
|
| 257 |
+
self.add(key, value)
|
| 258 |
+
|
| 259 |
+
def getlist(self, key, default=__marker):
|
| 260 |
+
"""Returns a list of all the values for the named field. Returns an
|
| 261 |
+
empty list if the key doesn't exist."""
|
| 262 |
+
try:
|
| 263 |
+
vals = self._container[key.lower()]
|
| 264 |
+
except KeyError:
|
| 265 |
+
if default is self.__marker:
|
| 266 |
+
return []
|
| 267 |
+
return default
|
| 268 |
+
else:
|
| 269 |
+
return vals[1:]
|
| 270 |
+
|
| 271 |
+
def _prepare_for_method_change(self):
|
| 272 |
+
"""
|
| 273 |
+
Remove content-specific header fields before changing the request
|
| 274 |
+
method to GET or HEAD according to RFC 9110, Section 15.4.
|
| 275 |
+
"""
|
| 276 |
+
content_specific_headers = [
|
| 277 |
+
"Content-Encoding",
|
| 278 |
+
"Content-Language",
|
| 279 |
+
"Content-Location",
|
| 280 |
+
"Content-Type",
|
| 281 |
+
"Content-Length",
|
| 282 |
+
"Digest",
|
| 283 |
+
"Last-Modified",
|
| 284 |
+
]
|
| 285 |
+
for header in content_specific_headers:
|
| 286 |
+
self.discard(header)
|
| 287 |
+
return self
|
| 288 |
+
|
| 289 |
+
# Backwards compatibility for httplib
|
| 290 |
+
getheaders = getlist
|
| 291 |
+
getallmatchingheaders = getlist
|
| 292 |
+
iget = getlist
|
| 293 |
+
|
| 294 |
+
# Backwards compatibility for http.cookiejar
|
| 295 |
+
get_all = getlist
|
| 296 |
+
|
| 297 |
+
def __repr__(self):
|
| 298 |
+
return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
|
| 299 |
+
|
| 300 |
+
def _copy_from(self, other):
|
| 301 |
+
for key in other:
|
| 302 |
+
val = other.getlist(key)
|
| 303 |
+
if isinstance(val, list):
|
| 304 |
+
# Don't need to convert tuples
|
| 305 |
+
val = list(val)
|
| 306 |
+
self._container[key.lower()] = [key] + val
|
| 307 |
+
|
| 308 |
+
def copy(self):
|
| 309 |
+
clone = type(self)()
|
| 310 |
+
clone._copy_from(self)
|
| 311 |
+
return clone
|
| 312 |
+
|
| 313 |
+
def iteritems(self):
|
| 314 |
+
"""Iterate over all header lines, including duplicate ones."""
|
| 315 |
+
for key in self:
|
| 316 |
+
vals = self._container[key.lower()]
|
| 317 |
+
for val in vals[1:]:
|
| 318 |
+
yield vals[0], val
|
| 319 |
+
|
| 320 |
+
def itermerged(self):
|
| 321 |
+
"""Iterate over all headers, merging duplicate ones together."""
|
| 322 |
+
for key in self:
|
| 323 |
+
val = self._container[key.lower()]
|
| 324 |
+
yield val[0], ", ".join(val[1:])
|
| 325 |
+
|
| 326 |
+
def items(self):
|
| 327 |
+
return list(self.iteritems())
|
| 328 |
+
|
| 329 |
+
@classmethod
|
| 330 |
+
def from_httplib(cls, message): # Python 2
|
| 331 |
+
"""Read headers from a Python 2 httplib message object."""
|
| 332 |
+
# python2.7 does not expose a proper API for exporting multiheaders
|
| 333 |
+
# efficiently. This function re-reads raw lines from the message
|
| 334 |
+
# object and extracts the multiheaders properly.
|
| 335 |
+
obs_fold_continued_leaders = (" ", "\t")
|
| 336 |
+
headers = []
|
| 337 |
+
|
| 338 |
+
for line in message.headers:
|
| 339 |
+
if line.startswith(obs_fold_continued_leaders):
|
| 340 |
+
if not headers:
|
| 341 |
+
# We received a header line that starts with OWS as described
|
| 342 |
+
# in RFC-7230 S3.2.4. This indicates a multiline header, but
|
| 343 |
+
# there exists no previous header to which we can attach it.
|
| 344 |
+
raise InvalidHeader(
|
| 345 |
+
"Header continuation with no previous header: %s" % line
|
| 346 |
+
)
|
| 347 |
+
else:
|
| 348 |
+
key, value = headers[-1]
|
| 349 |
+
headers[-1] = (key, value + " " + line.strip())
|
| 350 |
+
continue
|
| 351 |
+
|
| 352 |
+
key, value = line.split(":", 1)
|
| 353 |
+
headers.append((key, value.strip()))
|
| 354 |
+
|
| 355 |
+
return cls(headers)
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_version.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is protected via CODEOWNERS
|
| 2 |
+
__version__ = "1.26.20"
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/connection.py
ADDED
|
@@ -0,0 +1,572 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import datetime
|
| 4 |
+
import logging
|
| 5 |
+
import os
|
| 6 |
+
import re
|
| 7 |
+
import socket
|
| 8 |
+
import warnings
|
| 9 |
+
from socket import error as SocketError
|
| 10 |
+
from socket import timeout as SocketTimeout
|
| 11 |
+
|
| 12 |
+
from .packages import six
|
| 13 |
+
from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
|
| 14 |
+
from .packages.six.moves.http_client import HTTPException # noqa: F401
|
| 15 |
+
from .util.proxy import create_proxy_ssl_context
|
| 16 |
+
|
| 17 |
+
try: # Compiled with SSL?
|
| 18 |
+
import ssl
|
| 19 |
+
|
| 20 |
+
BaseSSLError = ssl.SSLError
|
| 21 |
+
except (ImportError, AttributeError): # Platform-specific: No SSL.
|
| 22 |
+
ssl = None
|
| 23 |
+
|
| 24 |
+
class BaseSSLError(BaseException):
|
| 25 |
+
pass
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
# Python 3: not a no-op, we're adding this to the namespace so it can be imported.
|
| 30 |
+
ConnectionError = ConnectionError
|
| 31 |
+
except NameError:
|
| 32 |
+
# Python 2
|
| 33 |
+
class ConnectionError(Exception):
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
try: # Python 3:
|
| 38 |
+
# Not a no-op, we're adding this to the namespace so it can be imported.
|
| 39 |
+
BrokenPipeError = BrokenPipeError
|
| 40 |
+
except NameError: # Python 2:
|
| 41 |
+
|
| 42 |
+
class BrokenPipeError(Exception):
|
| 43 |
+
pass
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
|
| 47 |
+
from ._version import __version__
|
| 48 |
+
from .exceptions import (
|
| 49 |
+
ConnectTimeoutError,
|
| 50 |
+
NewConnectionError,
|
| 51 |
+
SubjectAltNameWarning,
|
| 52 |
+
SystemTimeWarning,
|
| 53 |
+
)
|
| 54 |
+
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
|
| 55 |
+
from .util.ssl_ import (
|
| 56 |
+
assert_fingerprint,
|
| 57 |
+
create_urllib3_context,
|
| 58 |
+
is_ipaddress,
|
| 59 |
+
resolve_cert_reqs,
|
| 60 |
+
resolve_ssl_version,
|
| 61 |
+
ssl_wrap_socket,
|
| 62 |
+
)
|
| 63 |
+
from .util.ssl_match_hostname import CertificateError, match_hostname
|
| 64 |
+
|
| 65 |
+
log = logging.getLogger(__name__)
|
| 66 |
+
|
| 67 |
+
port_by_scheme = {"http": 80, "https": 443}
|
| 68 |
+
|
| 69 |
+
# When it comes time to update this value as a part of regular maintenance
|
| 70 |
+
# (ie test_recent_date is failing) update it to ~6 months before the current date.
|
| 71 |
+
RECENT_DATE = datetime.date(2024, 1, 1)
|
| 72 |
+
|
| 73 |
+
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class HTTPConnection(_HTTPConnection, object):
|
| 77 |
+
"""
|
| 78 |
+
Based on :class:`http.client.HTTPConnection` but provides an extra constructor
|
| 79 |
+
backwards-compatibility layer between older and newer Pythons.
|
| 80 |
+
|
| 81 |
+
Additional keyword parameters are used to configure attributes of the connection.
|
| 82 |
+
Accepted parameters include:
|
| 83 |
+
|
| 84 |
+
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
| 85 |
+
- ``source_address``: Set the source address for the current connection.
|
| 86 |
+
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
| 87 |
+
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
| 88 |
+
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
| 89 |
+
|
| 90 |
+
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
| 91 |
+
you might pass:
|
| 92 |
+
|
| 93 |
+
.. code-block:: python
|
| 94 |
+
|
| 95 |
+
HTTPConnection.default_socket_options + [
|
| 96 |
+
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
| 97 |
+
]
|
| 98 |
+
|
| 99 |
+
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
default_port = port_by_scheme["http"]
|
| 103 |
+
|
| 104 |
+
#: Disable Nagle's algorithm by default.
|
| 105 |
+
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
|
| 106 |
+
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
|
| 107 |
+
|
| 108 |
+
#: Whether this connection verifies the host's certificate.
|
| 109 |
+
is_verified = False
|
| 110 |
+
|
| 111 |
+
#: Whether this proxy connection (if used) verifies the proxy host's
|
| 112 |
+
#: certificate.
|
| 113 |
+
proxy_is_verified = None
|
| 114 |
+
|
| 115 |
+
def __init__(self, *args, **kw):
|
| 116 |
+
if not six.PY2:
|
| 117 |
+
kw.pop("strict", None)
|
| 118 |
+
|
| 119 |
+
# Pre-set source_address.
|
| 120 |
+
self.source_address = kw.get("source_address")
|
| 121 |
+
|
| 122 |
+
#: The socket options provided by the user. If no options are
|
| 123 |
+
#: provided, we use the default options.
|
| 124 |
+
self.socket_options = kw.pop("socket_options", self.default_socket_options)
|
| 125 |
+
|
| 126 |
+
# Proxy options provided by the user.
|
| 127 |
+
self.proxy = kw.pop("proxy", None)
|
| 128 |
+
self.proxy_config = kw.pop("proxy_config", None)
|
| 129 |
+
|
| 130 |
+
_HTTPConnection.__init__(self, *args, **kw)
|
| 131 |
+
|
| 132 |
+
@property
|
| 133 |
+
def host(self):
|
| 134 |
+
"""
|
| 135 |
+
Getter method to remove any trailing dots that indicate the hostname is an FQDN.
|
| 136 |
+
|
| 137 |
+
In general, SSL certificates don't include the trailing dot indicating a
|
| 138 |
+
fully-qualified domain name, and thus, they don't validate properly when
|
| 139 |
+
checked against a domain name that includes the dot. In addition, some
|
| 140 |
+
servers may not expect to receive the trailing dot when provided.
|
| 141 |
+
|
| 142 |
+
However, the hostname with trailing dot is critical to DNS resolution; doing a
|
| 143 |
+
lookup with the trailing dot will properly only resolve the appropriate FQDN,
|
| 144 |
+
whereas a lookup without a trailing dot will search the system's search domain
|
| 145 |
+
list. Thus, it's important to keep the original host around for use only in
|
| 146 |
+
those cases where it's appropriate (i.e., when doing DNS lookup to establish the
|
| 147 |
+
actual TCP connection across which we're going to send HTTP requests).
|
| 148 |
+
"""
|
| 149 |
+
return self._dns_host.rstrip(".")
|
| 150 |
+
|
| 151 |
+
@host.setter
|
| 152 |
+
def host(self, value):
|
| 153 |
+
"""
|
| 154 |
+
Setter for the `host` property.
|
| 155 |
+
|
| 156 |
+
We assume that only urllib3 uses the _dns_host attribute; httplib itself
|
| 157 |
+
only uses `host`, and it seems reasonable that other libraries follow suit.
|
| 158 |
+
"""
|
| 159 |
+
self._dns_host = value
|
| 160 |
+
|
| 161 |
+
def _new_conn(self):
|
| 162 |
+
"""Establish a socket connection and set nodelay settings on it.
|
| 163 |
+
|
| 164 |
+
:return: New socket connection.
|
| 165 |
+
"""
|
| 166 |
+
extra_kw = {}
|
| 167 |
+
if self.source_address:
|
| 168 |
+
extra_kw["source_address"] = self.source_address
|
| 169 |
+
|
| 170 |
+
if self.socket_options:
|
| 171 |
+
extra_kw["socket_options"] = self.socket_options
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
conn = connection.create_connection(
|
| 175 |
+
(self._dns_host, self.port), self.timeout, **extra_kw
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
except SocketTimeout:
|
| 179 |
+
raise ConnectTimeoutError(
|
| 180 |
+
self,
|
| 181 |
+
"Connection to %s timed out. (connect timeout=%s)"
|
| 182 |
+
% (self.host, self.timeout),
|
| 183 |
+
)
|
| 184 |
+
|
| 185 |
+
except SocketError as e:
|
| 186 |
+
raise NewConnectionError(
|
| 187 |
+
self, "Failed to establish a new connection: %s" % e
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
return conn
|
| 191 |
+
|
| 192 |
+
def _is_using_tunnel(self):
|
| 193 |
+
# Google App Engine's httplib does not define _tunnel_host
|
| 194 |
+
return getattr(self, "_tunnel_host", None)
|
| 195 |
+
|
| 196 |
+
def _prepare_conn(self, conn):
|
| 197 |
+
self.sock = conn
|
| 198 |
+
if self._is_using_tunnel():
|
| 199 |
+
# TODO: Fix tunnel so it doesn't depend on self.sock state.
|
| 200 |
+
self._tunnel()
|
| 201 |
+
# Mark this connection as not reusable
|
| 202 |
+
self.auto_open = 0
|
| 203 |
+
|
| 204 |
+
def connect(self):
|
| 205 |
+
conn = self._new_conn()
|
| 206 |
+
self._prepare_conn(conn)
|
| 207 |
+
|
| 208 |
+
def putrequest(self, method, url, *args, **kwargs):
|
| 209 |
+
""" """
|
| 210 |
+
# Empty docstring because the indentation of CPython's implementation
|
| 211 |
+
# is broken but we don't want this method in our documentation.
|
| 212 |
+
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
| 213 |
+
if match:
|
| 214 |
+
raise ValueError(
|
| 215 |
+
"Method cannot contain non-token characters %r (found at least %r)"
|
| 216 |
+
% (method, match.group())
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
|
| 220 |
+
|
| 221 |
+
def putheader(self, header, *values):
|
| 222 |
+
""" """
|
| 223 |
+
if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
|
| 224 |
+
_HTTPConnection.putheader(self, header, *values)
|
| 225 |
+
elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
|
| 226 |
+
raise ValueError(
|
| 227 |
+
"urllib3.util.SKIP_HEADER only supports '%s'"
|
| 228 |
+
% ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
|
| 229 |
+
)
|
| 230 |
+
|
| 231 |
+
def request(self, method, url, body=None, headers=None):
|
| 232 |
+
# Update the inner socket's timeout value to send the request.
|
| 233 |
+
# This only triggers if the connection is re-used.
|
| 234 |
+
if getattr(self, "sock", None) is not None:
|
| 235 |
+
self.sock.settimeout(self.timeout)
|
| 236 |
+
|
| 237 |
+
if headers is None:
|
| 238 |
+
headers = {}
|
| 239 |
+
else:
|
| 240 |
+
# Avoid modifying the headers passed into .request()
|
| 241 |
+
headers = headers.copy()
|
| 242 |
+
if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
|
| 243 |
+
headers["User-Agent"] = _get_default_user_agent()
|
| 244 |
+
super(HTTPConnection, self).request(method, url, body=body, headers=headers)
|
| 245 |
+
|
| 246 |
+
def request_chunked(self, method, url, body=None, headers=None):
|
| 247 |
+
"""
|
| 248 |
+
Alternative to the common request method, which sends the
|
| 249 |
+
body with chunked encoding and not as one block
|
| 250 |
+
"""
|
| 251 |
+
headers = headers or {}
|
| 252 |
+
header_keys = set([six.ensure_str(k.lower()) for k in headers])
|
| 253 |
+
skip_accept_encoding = "accept-encoding" in header_keys
|
| 254 |
+
skip_host = "host" in header_keys
|
| 255 |
+
self.putrequest(
|
| 256 |
+
method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
|
| 257 |
+
)
|
| 258 |
+
if "user-agent" not in header_keys:
|
| 259 |
+
self.putheader("User-Agent", _get_default_user_agent())
|
| 260 |
+
for header, value in headers.items():
|
| 261 |
+
self.putheader(header, value)
|
| 262 |
+
if "transfer-encoding" not in header_keys:
|
| 263 |
+
self.putheader("Transfer-Encoding", "chunked")
|
| 264 |
+
self.endheaders()
|
| 265 |
+
|
| 266 |
+
if body is not None:
|
| 267 |
+
stringish_types = six.string_types + (bytes,)
|
| 268 |
+
if isinstance(body, stringish_types):
|
| 269 |
+
body = (body,)
|
| 270 |
+
for chunk in body:
|
| 271 |
+
if not chunk:
|
| 272 |
+
continue
|
| 273 |
+
if not isinstance(chunk, bytes):
|
| 274 |
+
chunk = chunk.encode("utf8")
|
| 275 |
+
len_str = hex(len(chunk))[2:]
|
| 276 |
+
to_send = bytearray(len_str.encode())
|
| 277 |
+
to_send += b"\r\n"
|
| 278 |
+
to_send += chunk
|
| 279 |
+
to_send += b"\r\n"
|
| 280 |
+
self.send(to_send)
|
| 281 |
+
|
| 282 |
+
# After the if clause, to always have a closed body
|
| 283 |
+
self.send(b"0\r\n\r\n")
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
class HTTPSConnection(HTTPConnection):
|
| 287 |
+
"""
|
| 288 |
+
Many of the parameters to this constructor are passed to the underlying SSL
|
| 289 |
+
socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
|
| 290 |
+
"""
|
| 291 |
+
|
| 292 |
+
default_port = port_by_scheme["https"]
|
| 293 |
+
|
| 294 |
+
cert_reqs = None
|
| 295 |
+
ca_certs = None
|
| 296 |
+
ca_cert_dir = None
|
| 297 |
+
ca_cert_data = None
|
| 298 |
+
ssl_version = None
|
| 299 |
+
assert_fingerprint = None
|
| 300 |
+
tls_in_tls_required = False
|
| 301 |
+
|
| 302 |
+
def __init__(
|
| 303 |
+
self,
|
| 304 |
+
host,
|
| 305 |
+
port=None,
|
| 306 |
+
key_file=None,
|
| 307 |
+
cert_file=None,
|
| 308 |
+
key_password=None,
|
| 309 |
+
strict=None,
|
| 310 |
+
timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
|
| 311 |
+
ssl_context=None,
|
| 312 |
+
server_hostname=None,
|
| 313 |
+
**kw
|
| 314 |
+
):
|
| 315 |
+
|
| 316 |
+
HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
|
| 317 |
+
|
| 318 |
+
self.key_file = key_file
|
| 319 |
+
self.cert_file = cert_file
|
| 320 |
+
self.key_password = key_password
|
| 321 |
+
self.ssl_context = ssl_context
|
| 322 |
+
self.server_hostname = server_hostname
|
| 323 |
+
|
| 324 |
+
# Required property for Google AppEngine 1.9.0 which otherwise causes
|
| 325 |
+
# HTTPS requests to go out as HTTP. (See Issue #356)
|
| 326 |
+
self._protocol = "https"
|
| 327 |
+
|
| 328 |
+
def set_cert(
|
| 329 |
+
self,
|
| 330 |
+
key_file=None,
|
| 331 |
+
cert_file=None,
|
| 332 |
+
cert_reqs=None,
|
| 333 |
+
key_password=None,
|
| 334 |
+
ca_certs=None,
|
| 335 |
+
assert_hostname=None,
|
| 336 |
+
assert_fingerprint=None,
|
| 337 |
+
ca_cert_dir=None,
|
| 338 |
+
ca_cert_data=None,
|
| 339 |
+
):
|
| 340 |
+
"""
|
| 341 |
+
This method should only be called once, before the connection is used.
|
| 342 |
+
"""
|
| 343 |
+
# If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
|
| 344 |
+
# have an SSLContext object in which case we'll use its verify_mode.
|
| 345 |
+
if cert_reqs is None:
|
| 346 |
+
if self.ssl_context is not None:
|
| 347 |
+
cert_reqs = self.ssl_context.verify_mode
|
| 348 |
+
else:
|
| 349 |
+
cert_reqs = resolve_cert_reqs(None)
|
| 350 |
+
|
| 351 |
+
self.key_file = key_file
|
| 352 |
+
self.cert_file = cert_file
|
| 353 |
+
self.cert_reqs = cert_reqs
|
| 354 |
+
self.key_password = key_password
|
| 355 |
+
self.assert_hostname = assert_hostname
|
| 356 |
+
self.assert_fingerprint = assert_fingerprint
|
| 357 |
+
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
| 358 |
+
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
| 359 |
+
self.ca_cert_data = ca_cert_data
|
| 360 |
+
|
| 361 |
+
def connect(self):
|
| 362 |
+
# Add certificate verification
|
| 363 |
+
self.sock = conn = self._new_conn()
|
| 364 |
+
hostname = self.host
|
| 365 |
+
tls_in_tls = False
|
| 366 |
+
|
| 367 |
+
if self._is_using_tunnel():
|
| 368 |
+
if self.tls_in_tls_required:
|
| 369 |
+
self.sock = conn = self._connect_tls_proxy(hostname, conn)
|
| 370 |
+
tls_in_tls = True
|
| 371 |
+
|
| 372 |
+
# Calls self._set_hostport(), so self.host is
|
| 373 |
+
# self._tunnel_host below.
|
| 374 |
+
self._tunnel()
|
| 375 |
+
# Mark this connection as not reusable
|
| 376 |
+
self.auto_open = 0
|
| 377 |
+
|
| 378 |
+
# Override the host with the one we're requesting data from.
|
| 379 |
+
hostname = self._tunnel_host
|
| 380 |
+
|
| 381 |
+
server_hostname = hostname
|
| 382 |
+
if self.server_hostname is not None:
|
| 383 |
+
server_hostname = self.server_hostname
|
| 384 |
+
|
| 385 |
+
is_time_off = datetime.date.today() < RECENT_DATE
|
| 386 |
+
if is_time_off:
|
| 387 |
+
warnings.warn(
|
| 388 |
+
(
|
| 389 |
+
"System time is way off (before {0}). This will probably "
|
| 390 |
+
"lead to SSL verification errors"
|
| 391 |
+
).format(RECENT_DATE),
|
| 392 |
+
SystemTimeWarning,
|
| 393 |
+
)
|
| 394 |
+
|
| 395 |
+
# Wrap socket using verification with the root certs in
|
| 396 |
+
# trusted_root_certs
|
| 397 |
+
default_ssl_context = False
|
| 398 |
+
if self.ssl_context is None:
|
| 399 |
+
default_ssl_context = True
|
| 400 |
+
self.ssl_context = create_urllib3_context(
|
| 401 |
+
ssl_version=resolve_ssl_version(self.ssl_version),
|
| 402 |
+
cert_reqs=resolve_cert_reqs(self.cert_reqs),
|
| 403 |
+
)
|
| 404 |
+
|
| 405 |
+
context = self.ssl_context
|
| 406 |
+
context.verify_mode = resolve_cert_reqs(self.cert_reqs)
|
| 407 |
+
|
| 408 |
+
# Try to load OS default certs if none are given.
|
| 409 |
+
# Works well on Windows (requires Python3.4+)
|
| 410 |
+
if (
|
| 411 |
+
not self.ca_certs
|
| 412 |
+
and not self.ca_cert_dir
|
| 413 |
+
and not self.ca_cert_data
|
| 414 |
+
and default_ssl_context
|
| 415 |
+
and hasattr(context, "load_default_certs")
|
| 416 |
+
):
|
| 417 |
+
context.load_default_certs()
|
| 418 |
+
|
| 419 |
+
self.sock = ssl_wrap_socket(
|
| 420 |
+
sock=conn,
|
| 421 |
+
keyfile=self.key_file,
|
| 422 |
+
certfile=self.cert_file,
|
| 423 |
+
key_password=self.key_password,
|
| 424 |
+
ca_certs=self.ca_certs,
|
| 425 |
+
ca_cert_dir=self.ca_cert_dir,
|
| 426 |
+
ca_cert_data=self.ca_cert_data,
|
| 427 |
+
server_hostname=server_hostname,
|
| 428 |
+
ssl_context=context,
|
| 429 |
+
tls_in_tls=tls_in_tls,
|
| 430 |
+
)
|
| 431 |
+
|
| 432 |
+
# If we're using all defaults and the connection
|
| 433 |
+
# is TLSv1 or TLSv1.1 we throw a DeprecationWarning
|
| 434 |
+
# for the host.
|
| 435 |
+
if (
|
| 436 |
+
default_ssl_context
|
| 437 |
+
and self.ssl_version is None
|
| 438 |
+
and hasattr(self.sock, "version")
|
| 439 |
+
and self.sock.version() in {"TLSv1", "TLSv1.1"}
|
| 440 |
+
): # Defensive:
|
| 441 |
+
warnings.warn(
|
| 442 |
+
"Negotiating TLSv1/TLSv1.1 by default is deprecated "
|
| 443 |
+
"and will be disabled in urllib3 v2.0.0. Connecting to "
|
| 444 |
+
"'%s' with '%s' can be enabled by explicitly opting-in "
|
| 445 |
+
"with 'ssl_version'" % (self.host, self.sock.version()),
|
| 446 |
+
DeprecationWarning,
|
| 447 |
+
)
|
| 448 |
+
|
| 449 |
+
if self.assert_fingerprint:
|
| 450 |
+
assert_fingerprint(
|
| 451 |
+
self.sock.getpeercert(binary_form=True), self.assert_fingerprint
|
| 452 |
+
)
|
| 453 |
+
elif (
|
| 454 |
+
context.verify_mode != ssl.CERT_NONE
|
| 455 |
+
and not getattr(context, "check_hostname", False)
|
| 456 |
+
and self.assert_hostname is not False
|
| 457 |
+
):
|
| 458 |
+
# While urllib3 attempts to always turn off hostname matching from
|
| 459 |
+
# the TLS library, this cannot always be done. So we check whether
|
| 460 |
+
# the TLS Library still thinks it's matching hostnames.
|
| 461 |
+
cert = self.sock.getpeercert()
|
| 462 |
+
if not cert.get("subjectAltName", ()):
|
| 463 |
+
warnings.warn(
|
| 464 |
+
(
|
| 465 |
+
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
|
| 466 |
+
"`commonName` for now. This feature is being removed by major browsers and "
|
| 467 |
+
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
|
| 468 |
+
"for details.)".format(hostname)
|
| 469 |
+
),
|
| 470 |
+
SubjectAltNameWarning,
|
| 471 |
+
)
|
| 472 |
+
_match_hostname(cert, self.assert_hostname or server_hostname)
|
| 473 |
+
|
| 474 |
+
self.is_verified = (
|
| 475 |
+
context.verify_mode == ssl.CERT_REQUIRED
|
| 476 |
+
or self.assert_fingerprint is not None
|
| 477 |
+
)
|
| 478 |
+
|
| 479 |
+
def _connect_tls_proxy(self, hostname, conn):
|
| 480 |
+
"""
|
| 481 |
+
Establish a TLS connection to the proxy using the provided SSL context.
|
| 482 |
+
"""
|
| 483 |
+
proxy_config = self.proxy_config
|
| 484 |
+
ssl_context = proxy_config.ssl_context
|
| 485 |
+
if ssl_context:
|
| 486 |
+
# If the user provided a proxy context, we assume CA and client
|
| 487 |
+
# certificates have already been set
|
| 488 |
+
return ssl_wrap_socket(
|
| 489 |
+
sock=conn,
|
| 490 |
+
server_hostname=hostname,
|
| 491 |
+
ssl_context=ssl_context,
|
| 492 |
+
)
|
| 493 |
+
|
| 494 |
+
ssl_context = create_proxy_ssl_context(
|
| 495 |
+
self.ssl_version,
|
| 496 |
+
self.cert_reqs,
|
| 497 |
+
self.ca_certs,
|
| 498 |
+
self.ca_cert_dir,
|
| 499 |
+
self.ca_cert_data,
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
# If no cert was provided, use only the default options for server
|
| 503 |
+
# certificate validation
|
| 504 |
+
socket = ssl_wrap_socket(
|
| 505 |
+
sock=conn,
|
| 506 |
+
ca_certs=self.ca_certs,
|
| 507 |
+
ca_cert_dir=self.ca_cert_dir,
|
| 508 |
+
ca_cert_data=self.ca_cert_data,
|
| 509 |
+
server_hostname=hostname,
|
| 510 |
+
ssl_context=ssl_context,
|
| 511 |
+
)
|
| 512 |
+
|
| 513 |
+
if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
|
| 514 |
+
ssl_context, "check_hostname", False
|
| 515 |
+
):
|
| 516 |
+
# While urllib3 attempts to always turn off hostname matching from
|
| 517 |
+
# the TLS library, this cannot always be done. So we check whether
|
| 518 |
+
# the TLS Library still thinks it's matching hostnames.
|
| 519 |
+
cert = socket.getpeercert()
|
| 520 |
+
if not cert.get("subjectAltName", ()):
|
| 521 |
+
warnings.warn(
|
| 522 |
+
(
|
| 523 |
+
"Certificate for {0} has no `subjectAltName`, falling back to check for a "
|
| 524 |
+
"`commonName` for now. This feature is being removed by major browsers and "
|
| 525 |
+
"deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
|
| 526 |
+
"for details.)".format(hostname)
|
| 527 |
+
),
|
| 528 |
+
SubjectAltNameWarning,
|
| 529 |
+
)
|
| 530 |
+
_match_hostname(cert, hostname)
|
| 531 |
+
|
| 532 |
+
self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
|
| 533 |
+
return socket
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
def _match_hostname(cert, asserted_hostname):
|
| 537 |
+
# Our upstream implementation of ssl.match_hostname()
|
| 538 |
+
# only applies this normalization to IP addresses so it doesn't
|
| 539 |
+
# match DNS SANs so we do the same thing!
|
| 540 |
+
stripped_hostname = asserted_hostname.strip("u[]")
|
| 541 |
+
if is_ipaddress(stripped_hostname):
|
| 542 |
+
asserted_hostname = stripped_hostname
|
| 543 |
+
|
| 544 |
+
try:
|
| 545 |
+
match_hostname(cert, asserted_hostname)
|
| 546 |
+
except CertificateError as e:
|
| 547 |
+
log.warning(
|
| 548 |
+
"Certificate did not match expected hostname: %s. Certificate: %s",
|
| 549 |
+
asserted_hostname,
|
| 550 |
+
cert,
|
| 551 |
+
)
|
| 552 |
+
# Add cert to exception and reraise so client code can inspect
|
| 553 |
+
# the cert when catching the exception, if they want to
|
| 554 |
+
e._peer_cert = cert
|
| 555 |
+
raise
|
| 556 |
+
|
| 557 |
+
|
| 558 |
+
def _get_default_user_agent():
|
| 559 |
+
return "python-urllib3/%s" % __version__
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
class DummyConnection(object):
|
| 563 |
+
"""Used to detect a failed ConnectionCls import."""
|
| 564 |
+
|
| 565 |
+
pass
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
if not ssl:
|
| 569 |
+
HTTPSConnection = DummyConnection # noqa: F811
|
| 570 |
+
|
| 571 |
+
|
| 572 |
+
VerifiedHTTPSConnection = HTTPSConnection
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/connectionpool.py
ADDED
|
@@ -0,0 +1,1140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import errno
|
| 4 |
+
import logging
|
| 5 |
+
import re
|
| 6 |
+
import socket
|
| 7 |
+
import sys
|
| 8 |
+
import warnings
|
| 9 |
+
from socket import error as SocketError
|
| 10 |
+
from socket import timeout as SocketTimeout
|
| 11 |
+
|
| 12 |
+
from ._collections import HTTPHeaderDict
|
| 13 |
+
from .connection import (
|
| 14 |
+
BaseSSLError,
|
| 15 |
+
BrokenPipeError,
|
| 16 |
+
DummyConnection,
|
| 17 |
+
HTTPConnection,
|
| 18 |
+
HTTPException,
|
| 19 |
+
HTTPSConnection,
|
| 20 |
+
VerifiedHTTPSConnection,
|
| 21 |
+
port_by_scheme,
|
| 22 |
+
)
|
| 23 |
+
from .exceptions import (
|
| 24 |
+
ClosedPoolError,
|
| 25 |
+
EmptyPoolError,
|
| 26 |
+
HeaderParsingError,
|
| 27 |
+
HostChangedError,
|
| 28 |
+
InsecureRequestWarning,
|
| 29 |
+
LocationValueError,
|
| 30 |
+
MaxRetryError,
|
| 31 |
+
NewConnectionError,
|
| 32 |
+
ProtocolError,
|
| 33 |
+
ProxyError,
|
| 34 |
+
ReadTimeoutError,
|
| 35 |
+
SSLError,
|
| 36 |
+
TimeoutError,
|
| 37 |
+
)
|
| 38 |
+
from .packages import six
|
| 39 |
+
from .packages.six.moves import queue
|
| 40 |
+
from .request import RequestMethods
|
| 41 |
+
from .response import HTTPResponse
|
| 42 |
+
from .util.connection import is_connection_dropped
|
| 43 |
+
from .util.proxy import connection_requires_http_tunnel
|
| 44 |
+
from .util.queue import LifoQueue
|
| 45 |
+
from .util.request import set_file_position
|
| 46 |
+
from .util.response import assert_header_parsing
|
| 47 |
+
from .util.retry import Retry
|
| 48 |
+
from .util.ssl_match_hostname import CertificateError
|
| 49 |
+
from .util.timeout import Timeout
|
| 50 |
+
from .util.url import Url, _encode_target
|
| 51 |
+
from .util.url import _normalize_host as normalize_host
|
| 52 |
+
from .util.url import get_host, parse_url
|
| 53 |
+
|
| 54 |
+
try: # Platform-specific: Python 3
|
| 55 |
+
import weakref
|
| 56 |
+
|
| 57 |
+
weakref_finalize = weakref.finalize
|
| 58 |
+
except AttributeError: # Platform-specific: Python 2
|
| 59 |
+
from .packages.backports.weakref_finalize import weakref_finalize
|
| 60 |
+
|
| 61 |
+
xrange = six.moves.xrange
|
| 62 |
+
|
| 63 |
+
log = logging.getLogger(__name__)
|
| 64 |
+
|
| 65 |
+
_Default = object()
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# Pool objects
|
| 69 |
+
class ConnectionPool(object):
|
| 70 |
+
"""
|
| 71 |
+
Base class for all connection pools, such as
|
| 72 |
+
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
|
| 73 |
+
|
| 74 |
+
.. note::
|
| 75 |
+
ConnectionPool.urlopen() does not normalize or percent-encode target URIs
|
| 76 |
+
which is useful if your target server doesn't support percent-encoded
|
| 77 |
+
target URIs.
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
scheme = None
|
| 81 |
+
QueueCls = LifoQueue
|
| 82 |
+
|
| 83 |
+
def __init__(self, host, port=None):
|
| 84 |
+
if not host:
|
| 85 |
+
raise LocationValueError("No host specified.")
|
| 86 |
+
|
| 87 |
+
self.host = _normalize_host(host, scheme=self.scheme)
|
| 88 |
+
self._proxy_host = host.lower()
|
| 89 |
+
self.port = port
|
| 90 |
+
|
| 91 |
+
def __str__(self):
|
| 92 |
+
return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
|
| 93 |
+
|
| 94 |
+
def __enter__(self):
|
| 95 |
+
return self
|
| 96 |
+
|
| 97 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 98 |
+
self.close()
|
| 99 |
+
# Return False to re-raise any potential exceptions
|
| 100 |
+
return False
|
| 101 |
+
|
| 102 |
+
def close(self):
|
| 103 |
+
"""
|
| 104 |
+
Close all pooled connections and disable the pool.
|
| 105 |
+
"""
|
| 106 |
+
pass
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
|
| 110 |
+
_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
| 114 |
+
"""
|
| 115 |
+
Thread-safe connection pool for one host.
|
| 116 |
+
|
| 117 |
+
:param host:
|
| 118 |
+
Host used for this HTTP Connection (e.g. "localhost"), passed into
|
| 119 |
+
:class:`http.client.HTTPConnection`.
|
| 120 |
+
|
| 121 |
+
:param port:
|
| 122 |
+
Port used for this HTTP Connection (None is equivalent to 80), passed
|
| 123 |
+
into :class:`http.client.HTTPConnection`.
|
| 124 |
+
|
| 125 |
+
:param strict:
|
| 126 |
+
Causes BadStatusLine to be raised if the status line can't be parsed
|
| 127 |
+
as a valid HTTP/1.0 or 1.1 status line, passed into
|
| 128 |
+
:class:`http.client.HTTPConnection`.
|
| 129 |
+
|
| 130 |
+
.. note::
|
| 131 |
+
Only works in Python 2. This parameter is ignored in Python 3.
|
| 132 |
+
|
| 133 |
+
:param timeout:
|
| 134 |
+
Socket timeout in seconds for each individual connection. This can
|
| 135 |
+
be a float or integer, which sets the timeout for the HTTP request,
|
| 136 |
+
or an instance of :class:`urllib3.util.Timeout` which gives you more
|
| 137 |
+
fine-grained control over request timeouts. After the constructor has
|
| 138 |
+
been parsed, this is always a `urllib3.util.Timeout` object.
|
| 139 |
+
|
| 140 |
+
:param maxsize:
|
| 141 |
+
Number of connections to save that can be reused. More than 1 is useful
|
| 142 |
+
in multithreaded situations. If ``block`` is set to False, more
|
| 143 |
+
connections will be created but they will not be saved once they've
|
| 144 |
+
been used.
|
| 145 |
+
|
| 146 |
+
:param block:
|
| 147 |
+
If set to True, no more than ``maxsize`` connections will be used at
|
| 148 |
+
a time. When no free connections are available, the call will block
|
| 149 |
+
until a connection has been released. This is a useful side effect for
|
| 150 |
+
particular multithreaded situations where one does not want to use more
|
| 151 |
+
than maxsize connections per host to prevent flooding.
|
| 152 |
+
|
| 153 |
+
:param headers:
|
| 154 |
+
Headers to include with all requests, unless other headers are given
|
| 155 |
+
explicitly.
|
| 156 |
+
|
| 157 |
+
:param retries:
|
| 158 |
+
Retry configuration to use by default with requests in this pool.
|
| 159 |
+
|
| 160 |
+
:param _proxy:
|
| 161 |
+
Parsed proxy URL, should not be used directly, instead, see
|
| 162 |
+
:class:`urllib3.ProxyManager`
|
| 163 |
+
|
| 164 |
+
:param _proxy_headers:
|
| 165 |
+
A dictionary with proxy headers, should not be used directly,
|
| 166 |
+
instead, see :class:`urllib3.ProxyManager`
|
| 167 |
+
|
| 168 |
+
:param \\**conn_kw:
|
| 169 |
+
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
|
| 170 |
+
:class:`urllib3.connection.HTTPSConnection` instances.
|
| 171 |
+
"""
|
| 172 |
+
|
| 173 |
+
scheme = "http"
|
| 174 |
+
ConnectionCls = HTTPConnection
|
| 175 |
+
ResponseCls = HTTPResponse
|
| 176 |
+
|
| 177 |
+
def __init__(
|
| 178 |
+
self,
|
| 179 |
+
host,
|
| 180 |
+
port=None,
|
| 181 |
+
strict=False,
|
| 182 |
+
timeout=Timeout.DEFAULT_TIMEOUT,
|
| 183 |
+
maxsize=1,
|
| 184 |
+
block=False,
|
| 185 |
+
headers=None,
|
| 186 |
+
retries=None,
|
| 187 |
+
_proxy=None,
|
| 188 |
+
_proxy_headers=None,
|
| 189 |
+
_proxy_config=None,
|
| 190 |
+
**conn_kw
|
| 191 |
+
):
|
| 192 |
+
ConnectionPool.__init__(self, host, port)
|
| 193 |
+
RequestMethods.__init__(self, headers)
|
| 194 |
+
|
| 195 |
+
self.strict = strict
|
| 196 |
+
|
| 197 |
+
if not isinstance(timeout, Timeout):
|
| 198 |
+
timeout = Timeout.from_float(timeout)
|
| 199 |
+
|
| 200 |
+
if retries is None:
|
| 201 |
+
retries = Retry.DEFAULT
|
| 202 |
+
|
| 203 |
+
self.timeout = timeout
|
| 204 |
+
self.retries = retries
|
| 205 |
+
|
| 206 |
+
self.pool = self.QueueCls(maxsize)
|
| 207 |
+
self.block = block
|
| 208 |
+
|
| 209 |
+
self.proxy = _proxy
|
| 210 |
+
self.proxy_headers = _proxy_headers or {}
|
| 211 |
+
self.proxy_config = _proxy_config
|
| 212 |
+
|
| 213 |
+
# Fill the queue up so that doing get() on it will block properly
|
| 214 |
+
for _ in xrange(maxsize):
|
| 215 |
+
self.pool.put(None)
|
| 216 |
+
|
| 217 |
+
# These are mostly for testing and debugging purposes.
|
| 218 |
+
self.num_connections = 0
|
| 219 |
+
self.num_requests = 0
|
| 220 |
+
self.conn_kw = conn_kw
|
| 221 |
+
|
| 222 |
+
if self.proxy:
|
| 223 |
+
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
|
| 224 |
+
# We cannot know if the user has added default socket options, so we cannot replace the
|
| 225 |
+
# list.
|
| 226 |
+
self.conn_kw.setdefault("socket_options", [])
|
| 227 |
+
|
| 228 |
+
self.conn_kw["proxy"] = self.proxy
|
| 229 |
+
self.conn_kw["proxy_config"] = self.proxy_config
|
| 230 |
+
|
| 231 |
+
# Do not pass 'self' as callback to 'finalize'.
|
| 232 |
+
# Then the 'finalize' would keep an endless living (leak) to self.
|
| 233 |
+
# By just passing a reference to the pool allows the garbage collector
|
| 234 |
+
# to free self if nobody else has a reference to it.
|
| 235 |
+
pool = self.pool
|
| 236 |
+
|
| 237 |
+
# Close all the HTTPConnections in the pool before the
|
| 238 |
+
# HTTPConnectionPool object is garbage collected.
|
| 239 |
+
weakref_finalize(self, _close_pool_connections, pool)
|
| 240 |
+
|
| 241 |
+
def _new_conn(self):
|
| 242 |
+
"""
|
| 243 |
+
Return a fresh :class:`HTTPConnection`.
|
| 244 |
+
"""
|
| 245 |
+
self.num_connections += 1
|
| 246 |
+
log.debug(
|
| 247 |
+
"Starting new HTTP connection (%d): %s:%s",
|
| 248 |
+
self.num_connections,
|
| 249 |
+
self.host,
|
| 250 |
+
self.port or "80",
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
conn = self.ConnectionCls(
|
| 254 |
+
host=self.host,
|
| 255 |
+
port=self.port,
|
| 256 |
+
timeout=self.timeout.connect_timeout,
|
| 257 |
+
strict=self.strict,
|
| 258 |
+
**self.conn_kw
|
| 259 |
+
)
|
| 260 |
+
return conn
|
| 261 |
+
|
| 262 |
+
def _get_conn(self, timeout=None):
|
| 263 |
+
"""
|
| 264 |
+
Get a connection. Will return a pooled connection if one is available.
|
| 265 |
+
|
| 266 |
+
If no connections are available and :prop:`.block` is ``False``, then a
|
| 267 |
+
fresh connection is returned.
|
| 268 |
+
|
| 269 |
+
:param timeout:
|
| 270 |
+
Seconds to wait before giving up and raising
|
| 271 |
+
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
|
| 272 |
+
:prop:`.block` is ``True``.
|
| 273 |
+
"""
|
| 274 |
+
conn = None
|
| 275 |
+
try:
|
| 276 |
+
conn = self.pool.get(block=self.block, timeout=timeout)
|
| 277 |
+
|
| 278 |
+
except AttributeError: # self.pool is None
|
| 279 |
+
raise ClosedPoolError(self, "Pool is closed.")
|
| 280 |
+
|
| 281 |
+
except queue.Empty:
|
| 282 |
+
if self.block:
|
| 283 |
+
raise EmptyPoolError(
|
| 284 |
+
self,
|
| 285 |
+
"Pool reached maximum size and no more connections are allowed.",
|
| 286 |
+
)
|
| 287 |
+
pass # Oh well, we'll create a new connection then
|
| 288 |
+
|
| 289 |
+
# If this is a persistent connection, check if it got disconnected
|
| 290 |
+
if conn and is_connection_dropped(conn):
|
| 291 |
+
log.debug("Resetting dropped connection: %s", self.host)
|
| 292 |
+
conn.close()
|
| 293 |
+
if getattr(conn, "auto_open", 1) == 0:
|
| 294 |
+
# This is a proxied connection that has been mutated by
|
| 295 |
+
# http.client._tunnel() and cannot be reused (since it would
|
| 296 |
+
# attempt to bypass the proxy)
|
| 297 |
+
conn = None
|
| 298 |
+
|
| 299 |
+
return conn or self._new_conn()
|
| 300 |
+
|
| 301 |
+
def _put_conn(self, conn):
|
| 302 |
+
"""
|
| 303 |
+
Put a connection back into the pool.
|
| 304 |
+
|
| 305 |
+
:param conn:
|
| 306 |
+
Connection object for the current host and port as returned by
|
| 307 |
+
:meth:`._new_conn` or :meth:`._get_conn`.
|
| 308 |
+
|
| 309 |
+
If the pool is already full, the connection is closed and discarded
|
| 310 |
+
because we exceeded maxsize. If connections are discarded frequently,
|
| 311 |
+
then maxsize should be increased.
|
| 312 |
+
|
| 313 |
+
If the pool is closed, then the connection will be closed and discarded.
|
| 314 |
+
"""
|
| 315 |
+
try:
|
| 316 |
+
self.pool.put(conn, block=False)
|
| 317 |
+
return # Everything is dandy, done.
|
| 318 |
+
except AttributeError:
|
| 319 |
+
# self.pool is None.
|
| 320 |
+
pass
|
| 321 |
+
except queue.Full:
|
| 322 |
+
# This should never happen if self.block == True
|
| 323 |
+
log.warning(
|
| 324 |
+
"Connection pool is full, discarding connection: %s. Connection pool size: %s",
|
| 325 |
+
self.host,
|
| 326 |
+
self.pool.qsize(),
|
| 327 |
+
)
|
| 328 |
+
# Connection never got put back into the pool, close it.
|
| 329 |
+
if conn:
|
| 330 |
+
conn.close()
|
| 331 |
+
|
| 332 |
+
def _validate_conn(self, conn):
|
| 333 |
+
"""
|
| 334 |
+
Called right before a request is made, after the socket is created.
|
| 335 |
+
"""
|
| 336 |
+
pass
|
| 337 |
+
|
| 338 |
+
def _prepare_proxy(self, conn):
|
| 339 |
+
# Nothing to do for HTTP connections.
|
| 340 |
+
pass
|
| 341 |
+
|
| 342 |
+
def _get_timeout(self, timeout):
|
| 343 |
+
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
|
| 344 |
+
if timeout is _Default:
|
| 345 |
+
return self.timeout.clone()
|
| 346 |
+
|
| 347 |
+
if isinstance(timeout, Timeout):
|
| 348 |
+
return timeout.clone()
|
| 349 |
+
else:
|
| 350 |
+
# User passed us an int/float. This is for backwards compatibility,
|
| 351 |
+
# can be removed later
|
| 352 |
+
return Timeout.from_float(timeout)
|
| 353 |
+
|
| 354 |
+
def _raise_timeout(self, err, url, timeout_value):
|
| 355 |
+
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
|
| 356 |
+
|
| 357 |
+
if isinstance(err, SocketTimeout):
|
| 358 |
+
raise ReadTimeoutError(
|
| 359 |
+
self, url, "Read timed out. (read timeout=%s)" % timeout_value
|
| 360 |
+
)
|
| 361 |
+
|
| 362 |
+
# See the above comment about EAGAIN in Python 3. In Python 2 we have
|
| 363 |
+
# to specifically catch it and throw the timeout error
|
| 364 |
+
if hasattr(err, "errno") and err.errno in _blocking_errnos:
|
| 365 |
+
raise ReadTimeoutError(
|
| 366 |
+
self, url, "Read timed out. (read timeout=%s)" % timeout_value
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
# Catch possible read timeouts thrown as SSL errors. If not the
|
| 370 |
+
# case, rethrow the original. We need to do this because of:
|
| 371 |
+
# http://bugs.python.org/issue10272
|
| 372 |
+
if "timed out" in str(err) or "did not complete (read)" in str(
|
| 373 |
+
err
|
| 374 |
+
): # Python < 2.7.4
|
| 375 |
+
raise ReadTimeoutError(
|
| 376 |
+
self, url, "Read timed out. (read timeout=%s)" % timeout_value
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
def _make_request(
|
| 380 |
+
self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
|
| 381 |
+
):
|
| 382 |
+
"""
|
| 383 |
+
Perform a request on a given urllib connection object taken from our
|
| 384 |
+
pool.
|
| 385 |
+
|
| 386 |
+
:param conn:
|
| 387 |
+
a connection from one of our connection pools
|
| 388 |
+
|
| 389 |
+
:param timeout:
|
| 390 |
+
Socket timeout in seconds for the request. This can be a
|
| 391 |
+
float or integer, which will set the same timeout value for
|
| 392 |
+
the socket connect and the socket read, or an instance of
|
| 393 |
+
:class:`urllib3.util.Timeout`, which gives you more fine-grained
|
| 394 |
+
control over your timeouts.
|
| 395 |
+
"""
|
| 396 |
+
self.num_requests += 1
|
| 397 |
+
|
| 398 |
+
timeout_obj = self._get_timeout(timeout)
|
| 399 |
+
timeout_obj.start_connect()
|
| 400 |
+
conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
|
| 401 |
+
|
| 402 |
+
# Trigger any extra validation we need to do.
|
| 403 |
+
try:
|
| 404 |
+
self._validate_conn(conn)
|
| 405 |
+
except (SocketTimeout, BaseSSLError) as e:
|
| 406 |
+
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
|
| 407 |
+
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
|
| 408 |
+
raise
|
| 409 |
+
|
| 410 |
+
# conn.request() calls http.client.*.request, not the method in
|
| 411 |
+
# urllib3.request. It also calls makefile (recv) on the socket.
|
| 412 |
+
try:
|
| 413 |
+
if chunked:
|
| 414 |
+
conn.request_chunked(method, url, **httplib_request_kw)
|
| 415 |
+
else:
|
| 416 |
+
conn.request(method, url, **httplib_request_kw)
|
| 417 |
+
|
| 418 |
+
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
|
| 419 |
+
# legitimately able to close the connection after sending a valid response.
|
| 420 |
+
# With this behaviour, the received response is still readable.
|
| 421 |
+
except BrokenPipeError:
|
| 422 |
+
# Python 3
|
| 423 |
+
pass
|
| 424 |
+
except IOError as e:
|
| 425 |
+
# Python 2 and macOS/Linux
|
| 426 |
+
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE/ECONNRESET are needed on macOS
|
| 427 |
+
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
|
| 428 |
+
if e.errno not in {
|
| 429 |
+
errno.EPIPE,
|
| 430 |
+
errno.ESHUTDOWN,
|
| 431 |
+
errno.EPROTOTYPE,
|
| 432 |
+
errno.ECONNRESET,
|
| 433 |
+
}:
|
| 434 |
+
raise
|
| 435 |
+
|
| 436 |
+
# Reset the timeout for the recv() on the socket
|
| 437 |
+
read_timeout = timeout_obj.read_timeout
|
| 438 |
+
|
| 439 |
+
# App Engine doesn't have a sock attr
|
| 440 |
+
if getattr(conn, "sock", None):
|
| 441 |
+
# In Python 3 socket.py will catch EAGAIN and return None when you
|
| 442 |
+
# try and read into the file pointer created by http.client, which
|
| 443 |
+
# instead raises a BadStatusLine exception. Instead of catching
|
| 444 |
+
# the exception and assuming all BadStatusLine exceptions are read
|
| 445 |
+
# timeouts, check for a zero timeout before making the request.
|
| 446 |
+
if read_timeout == 0:
|
| 447 |
+
raise ReadTimeoutError(
|
| 448 |
+
self, url, "Read timed out. (read timeout=%s)" % read_timeout
|
| 449 |
+
)
|
| 450 |
+
if read_timeout is Timeout.DEFAULT_TIMEOUT:
|
| 451 |
+
conn.sock.settimeout(socket.getdefaulttimeout())
|
| 452 |
+
else: # None or a value
|
| 453 |
+
conn.sock.settimeout(read_timeout)
|
| 454 |
+
|
| 455 |
+
# Receive the response from the server
|
| 456 |
+
try:
|
| 457 |
+
try:
|
| 458 |
+
# Python 2.7, use buffering of HTTP responses
|
| 459 |
+
httplib_response = conn.getresponse(buffering=True)
|
| 460 |
+
except TypeError:
|
| 461 |
+
# Python 3
|
| 462 |
+
try:
|
| 463 |
+
httplib_response = conn.getresponse()
|
| 464 |
+
except BaseException as e:
|
| 465 |
+
# Remove the TypeError from the exception chain in
|
| 466 |
+
# Python 3 (including for exceptions like SystemExit).
|
| 467 |
+
# Otherwise it looks like a bug in the code.
|
| 468 |
+
six.raise_from(e, None)
|
| 469 |
+
except (SocketTimeout, BaseSSLError, SocketError) as e:
|
| 470 |
+
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
|
| 471 |
+
raise
|
| 472 |
+
|
| 473 |
+
# AppEngine doesn't have a version attr.
|
| 474 |
+
http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
|
| 475 |
+
log.debug(
|
| 476 |
+
'%s://%s:%s "%s %s %s" %s %s',
|
| 477 |
+
self.scheme,
|
| 478 |
+
self.host,
|
| 479 |
+
self.port,
|
| 480 |
+
method,
|
| 481 |
+
url,
|
| 482 |
+
http_version,
|
| 483 |
+
httplib_response.status,
|
| 484 |
+
httplib_response.length,
|
| 485 |
+
)
|
| 486 |
+
|
| 487 |
+
try:
|
| 488 |
+
assert_header_parsing(httplib_response.msg)
|
| 489 |
+
except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
|
| 490 |
+
log.warning(
|
| 491 |
+
"Failed to parse headers (url=%s): %s",
|
| 492 |
+
self._absolute_url(url),
|
| 493 |
+
hpe,
|
| 494 |
+
exc_info=True,
|
| 495 |
+
)
|
| 496 |
+
|
| 497 |
+
return httplib_response
|
| 498 |
+
|
| 499 |
+
def _absolute_url(self, path):
|
| 500 |
+
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
|
| 501 |
+
|
| 502 |
+
def close(self):
|
| 503 |
+
"""
|
| 504 |
+
Close all pooled connections and disable the pool.
|
| 505 |
+
"""
|
| 506 |
+
if self.pool is None:
|
| 507 |
+
return
|
| 508 |
+
# Disable access to the pool
|
| 509 |
+
old_pool, self.pool = self.pool, None
|
| 510 |
+
|
| 511 |
+
# Close all the HTTPConnections in the pool.
|
| 512 |
+
_close_pool_connections(old_pool)
|
| 513 |
+
|
| 514 |
+
def is_same_host(self, url):
|
| 515 |
+
"""
|
| 516 |
+
Check if the given ``url`` is a member of the same host as this
|
| 517 |
+
connection pool.
|
| 518 |
+
"""
|
| 519 |
+
if url.startswith("/"):
|
| 520 |
+
return True
|
| 521 |
+
|
| 522 |
+
# TODO: Add optional support for socket.gethostbyname checking.
|
| 523 |
+
scheme, host, port = get_host(url)
|
| 524 |
+
if host is not None:
|
| 525 |
+
host = _normalize_host(host, scheme=scheme)
|
| 526 |
+
|
| 527 |
+
# Use explicit default port for comparison when none is given
|
| 528 |
+
if self.port and not port:
|
| 529 |
+
port = port_by_scheme.get(scheme)
|
| 530 |
+
elif not self.port and port == port_by_scheme.get(scheme):
|
| 531 |
+
port = None
|
| 532 |
+
|
| 533 |
+
return (scheme, host, port) == (self.scheme, self.host, self.port)
|
| 534 |
+
|
| 535 |
+
def urlopen(
|
| 536 |
+
self,
|
| 537 |
+
method,
|
| 538 |
+
url,
|
| 539 |
+
body=None,
|
| 540 |
+
headers=None,
|
| 541 |
+
retries=None,
|
| 542 |
+
redirect=True,
|
| 543 |
+
assert_same_host=True,
|
| 544 |
+
timeout=_Default,
|
| 545 |
+
pool_timeout=None,
|
| 546 |
+
release_conn=None,
|
| 547 |
+
chunked=False,
|
| 548 |
+
body_pos=None,
|
| 549 |
+
**response_kw
|
| 550 |
+
):
|
| 551 |
+
"""
|
| 552 |
+
Get a connection from the pool and perform an HTTP request. This is the
|
| 553 |
+
lowest level call for making a request, so you'll need to specify all
|
| 554 |
+
the raw details.
|
| 555 |
+
|
| 556 |
+
.. note::
|
| 557 |
+
|
| 558 |
+
More commonly, it's appropriate to use a convenience method provided
|
| 559 |
+
by :class:`.RequestMethods`, such as :meth:`request`.
|
| 560 |
+
|
| 561 |
+
.. note::
|
| 562 |
+
|
| 563 |
+
`release_conn` will only behave as expected if
|
| 564 |
+
`preload_content=False` because we want to make
|
| 565 |
+
`preload_content=False` the default behaviour someday soon without
|
| 566 |
+
breaking backwards compatibility.
|
| 567 |
+
|
| 568 |
+
:param method:
|
| 569 |
+
HTTP request method (such as GET, POST, PUT, etc.)
|
| 570 |
+
|
| 571 |
+
:param url:
|
| 572 |
+
The URL to perform the request on.
|
| 573 |
+
|
| 574 |
+
:param body:
|
| 575 |
+
Data to send in the request body, either :class:`str`, :class:`bytes`,
|
| 576 |
+
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
|
| 577 |
+
|
| 578 |
+
:param headers:
|
| 579 |
+
Dictionary of custom headers to send, such as User-Agent,
|
| 580 |
+
If-None-Match, etc. If None, pool headers are used. If provided,
|
| 581 |
+
these headers completely replace any pool-specific headers.
|
| 582 |
+
|
| 583 |
+
:param retries:
|
| 584 |
+
Configure the number of retries to allow before raising a
|
| 585 |
+
:class:`~urllib3.exceptions.MaxRetryError` exception.
|
| 586 |
+
|
| 587 |
+
Pass ``None`` to retry until you receive a response. Pass a
|
| 588 |
+
:class:`~urllib3.util.retry.Retry` object for fine-grained control
|
| 589 |
+
over different types of retries.
|
| 590 |
+
Pass an integer number to retry connection errors that many times,
|
| 591 |
+
but no other types of errors. Pass zero to never retry.
|
| 592 |
+
|
| 593 |
+
If ``False``, then retries are disabled and any exception is raised
|
| 594 |
+
immediately. Also, instead of raising a MaxRetryError on redirects,
|
| 595 |
+
the redirect response will be returned.
|
| 596 |
+
|
| 597 |
+
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
|
| 598 |
+
|
| 599 |
+
:param redirect:
|
| 600 |
+
If True, automatically handle redirects (status codes 301, 302,
|
| 601 |
+
303, 307, 308). Each redirect counts as a retry. Disabling retries
|
| 602 |
+
will disable redirect, too.
|
| 603 |
+
|
| 604 |
+
:param assert_same_host:
|
| 605 |
+
If ``True``, will make sure that the host of the pool requests is
|
| 606 |
+
consistent else will raise HostChangedError. When ``False``, you can
|
| 607 |
+
use the pool on an HTTP proxy and request foreign hosts.
|
| 608 |
+
|
| 609 |
+
:param timeout:
|
| 610 |
+
If specified, overrides the default timeout for this one
|
| 611 |
+
request. It may be a float (in seconds) or an instance of
|
| 612 |
+
:class:`urllib3.util.Timeout`.
|
| 613 |
+
|
| 614 |
+
:param pool_timeout:
|
| 615 |
+
If set and the pool is set to block=True, then this method will
|
| 616 |
+
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
|
| 617 |
+
connection is available within the time period.
|
| 618 |
+
|
| 619 |
+
:param release_conn:
|
| 620 |
+
If False, then the urlopen call will not release the connection
|
| 621 |
+
back into the pool once a response is received (but will release if
|
| 622 |
+
you read the entire contents of the response such as when
|
| 623 |
+
`preload_content=True`). This is useful if you're not preloading
|
| 624 |
+
the response's content immediately. You will need to call
|
| 625 |
+
``r.release_conn()`` on the response ``r`` to return the connection
|
| 626 |
+
back into the pool. If None, it takes the value of
|
| 627 |
+
``response_kw.get('preload_content', True)``.
|
| 628 |
+
|
| 629 |
+
:param chunked:
|
| 630 |
+
If True, urllib3 will send the body using chunked transfer
|
| 631 |
+
encoding. Otherwise, urllib3 will send the body using the standard
|
| 632 |
+
content-length form. Defaults to False.
|
| 633 |
+
|
| 634 |
+
:param int body_pos:
|
| 635 |
+
Position to seek to in file-like body in the event of a retry or
|
| 636 |
+
redirect. Typically this won't need to be set because urllib3 will
|
| 637 |
+
auto-populate the value when needed.
|
| 638 |
+
|
| 639 |
+
:param \\**response_kw:
|
| 640 |
+
Additional parameters are passed to
|
| 641 |
+
:meth:`urllib3.response.HTTPResponse.from_httplib`
|
| 642 |
+
"""
|
| 643 |
+
|
| 644 |
+
parsed_url = parse_url(url)
|
| 645 |
+
destination_scheme = parsed_url.scheme
|
| 646 |
+
|
| 647 |
+
if headers is None:
|
| 648 |
+
headers = self.headers
|
| 649 |
+
|
| 650 |
+
if not isinstance(retries, Retry):
|
| 651 |
+
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
|
| 652 |
+
|
| 653 |
+
if release_conn is None:
|
| 654 |
+
release_conn = response_kw.get("preload_content", True)
|
| 655 |
+
|
| 656 |
+
# Check host
|
| 657 |
+
if assert_same_host and not self.is_same_host(url):
|
| 658 |
+
raise HostChangedError(self, url, retries)
|
| 659 |
+
|
| 660 |
+
# Ensure that the URL we're connecting to is properly encoded
|
| 661 |
+
if url.startswith("/"):
|
| 662 |
+
url = six.ensure_str(_encode_target(url))
|
| 663 |
+
else:
|
| 664 |
+
url = six.ensure_str(parsed_url.url)
|
| 665 |
+
|
| 666 |
+
conn = None
|
| 667 |
+
|
| 668 |
+
# Track whether `conn` needs to be released before
|
| 669 |
+
# returning/raising/recursing. Update this variable if necessary, and
|
| 670 |
+
# leave `release_conn` constant throughout the function. That way, if
|
| 671 |
+
# the function recurses, the original value of `release_conn` will be
|
| 672 |
+
# passed down into the recursive call, and its value will be respected.
|
| 673 |
+
#
|
| 674 |
+
# See issue #651 [1] for details.
|
| 675 |
+
#
|
| 676 |
+
# [1] <https://github.com/urllib3/urllib3/issues/651>
|
| 677 |
+
release_this_conn = release_conn
|
| 678 |
+
|
| 679 |
+
http_tunnel_required = connection_requires_http_tunnel(
|
| 680 |
+
self.proxy, self.proxy_config, destination_scheme
|
| 681 |
+
)
|
| 682 |
+
|
| 683 |
+
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
|
| 684 |
+
# have to copy the headers dict so we can safely change it without those
|
| 685 |
+
# changes being reflected in anyone else's copy.
|
| 686 |
+
if not http_tunnel_required:
|
| 687 |
+
headers = headers.copy()
|
| 688 |
+
headers.update(self.proxy_headers)
|
| 689 |
+
|
| 690 |
+
# Must keep the exception bound to a separate variable or else Python 3
|
| 691 |
+
# complains about UnboundLocalError.
|
| 692 |
+
err = None
|
| 693 |
+
|
| 694 |
+
# Keep track of whether we cleanly exited the except block. This
|
| 695 |
+
# ensures we do proper cleanup in finally.
|
| 696 |
+
clean_exit = False
|
| 697 |
+
|
| 698 |
+
# Rewind body position, if needed. Record current position
|
| 699 |
+
# for future rewinds in the event of a redirect/retry.
|
| 700 |
+
body_pos = set_file_position(body, body_pos)
|
| 701 |
+
|
| 702 |
+
try:
|
| 703 |
+
# Request a connection from the queue.
|
| 704 |
+
timeout_obj = self._get_timeout(timeout)
|
| 705 |
+
conn = self._get_conn(timeout=pool_timeout)
|
| 706 |
+
|
| 707 |
+
conn.timeout = timeout_obj.connect_timeout
|
| 708 |
+
|
| 709 |
+
is_new_proxy_conn = self.proxy is not None and not getattr(
|
| 710 |
+
conn, "sock", None
|
| 711 |
+
)
|
| 712 |
+
if is_new_proxy_conn and http_tunnel_required:
|
| 713 |
+
self._prepare_proxy(conn)
|
| 714 |
+
|
| 715 |
+
# Make the request on the httplib connection object.
|
| 716 |
+
httplib_response = self._make_request(
|
| 717 |
+
conn,
|
| 718 |
+
method,
|
| 719 |
+
url,
|
| 720 |
+
timeout=timeout_obj,
|
| 721 |
+
body=body,
|
| 722 |
+
headers=headers,
|
| 723 |
+
chunked=chunked,
|
| 724 |
+
)
|
| 725 |
+
|
| 726 |
+
# If we're going to release the connection in ``finally:``, then
|
| 727 |
+
# the response doesn't need to know about the connection. Otherwise
|
| 728 |
+
# it will also try to release it and we'll have a double-release
|
| 729 |
+
# mess.
|
| 730 |
+
response_conn = conn if not release_conn else None
|
| 731 |
+
|
| 732 |
+
# Pass method to Response for length checking
|
| 733 |
+
response_kw["request_method"] = method
|
| 734 |
+
|
| 735 |
+
# Import httplib's response into our own wrapper object
|
| 736 |
+
response = self.ResponseCls.from_httplib(
|
| 737 |
+
httplib_response,
|
| 738 |
+
pool=self,
|
| 739 |
+
connection=response_conn,
|
| 740 |
+
retries=retries,
|
| 741 |
+
**response_kw
|
| 742 |
+
)
|
| 743 |
+
|
| 744 |
+
# Everything went great!
|
| 745 |
+
clean_exit = True
|
| 746 |
+
|
| 747 |
+
except EmptyPoolError:
|
| 748 |
+
# Didn't get a connection from the pool, no need to clean up
|
| 749 |
+
clean_exit = True
|
| 750 |
+
release_this_conn = False
|
| 751 |
+
raise
|
| 752 |
+
|
| 753 |
+
except (
|
| 754 |
+
TimeoutError,
|
| 755 |
+
HTTPException,
|
| 756 |
+
SocketError,
|
| 757 |
+
ProtocolError,
|
| 758 |
+
BaseSSLError,
|
| 759 |
+
SSLError,
|
| 760 |
+
CertificateError,
|
| 761 |
+
) as e:
|
| 762 |
+
# Discard the connection for these exceptions. It will be
|
| 763 |
+
# replaced during the next _get_conn() call.
|
| 764 |
+
clean_exit = False
|
| 765 |
+
|
| 766 |
+
def _is_ssl_error_message_from_http_proxy(ssl_error):
|
| 767 |
+
# We're trying to detect the message 'WRONG_VERSION_NUMBER' but
|
| 768 |
+
# SSLErrors are kinda all over the place when it comes to the message,
|
| 769 |
+
# so we try to cover our bases here!
|
| 770 |
+
message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
|
| 771 |
+
return (
|
| 772 |
+
"wrong version number" in message
|
| 773 |
+
or "unknown protocol" in message
|
| 774 |
+
or "record layer failure" in message
|
| 775 |
+
)
|
| 776 |
+
|
| 777 |
+
# Try to detect a common user error with proxies which is to
|
| 778 |
+
# set an HTTP proxy to be HTTPS when it should be 'http://'
|
| 779 |
+
# (ie {'http': 'http://proxy', 'https': 'https://proxy'})
|
| 780 |
+
# Instead we add a nice error message and point to a URL.
|
| 781 |
+
if (
|
| 782 |
+
isinstance(e, BaseSSLError)
|
| 783 |
+
and self.proxy
|
| 784 |
+
and _is_ssl_error_message_from_http_proxy(e)
|
| 785 |
+
and conn.proxy
|
| 786 |
+
and conn.proxy.scheme == "https"
|
| 787 |
+
):
|
| 788 |
+
e = ProxyError(
|
| 789 |
+
"Your proxy appears to only use HTTP and not HTTPS, "
|
| 790 |
+
"try changing your proxy URL to be HTTP. See: "
|
| 791 |
+
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
| 792 |
+
"#https-proxy-error-http-proxy",
|
| 793 |
+
SSLError(e),
|
| 794 |
+
)
|
| 795 |
+
elif isinstance(e, (BaseSSLError, CertificateError)):
|
| 796 |
+
e = SSLError(e)
|
| 797 |
+
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
|
| 798 |
+
e = ProxyError("Cannot connect to proxy.", e)
|
| 799 |
+
elif isinstance(e, (SocketError, HTTPException)):
|
| 800 |
+
e = ProtocolError("Connection aborted.", e)
|
| 801 |
+
|
| 802 |
+
retries = retries.increment(
|
| 803 |
+
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
|
| 804 |
+
)
|
| 805 |
+
retries.sleep()
|
| 806 |
+
|
| 807 |
+
# Keep track of the error for the retry warning.
|
| 808 |
+
err = e
|
| 809 |
+
|
| 810 |
+
finally:
|
| 811 |
+
if not clean_exit:
|
| 812 |
+
# We hit some kind of exception, handled or otherwise. We need
|
| 813 |
+
# to throw the connection away unless explicitly told not to.
|
| 814 |
+
# Close the connection, set the variable to None, and make sure
|
| 815 |
+
# we put the None back in the pool to avoid leaking it.
|
| 816 |
+
conn = conn and conn.close()
|
| 817 |
+
release_this_conn = True
|
| 818 |
+
|
| 819 |
+
if release_this_conn:
|
| 820 |
+
# Put the connection back to be reused. If the connection is
|
| 821 |
+
# expired then it will be None, which will get replaced with a
|
| 822 |
+
# fresh connection during _get_conn.
|
| 823 |
+
self._put_conn(conn)
|
| 824 |
+
|
| 825 |
+
if not conn:
|
| 826 |
+
# Try again
|
| 827 |
+
log.warning(
|
| 828 |
+
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
|
| 829 |
+
)
|
| 830 |
+
return self.urlopen(
|
| 831 |
+
method,
|
| 832 |
+
url,
|
| 833 |
+
body,
|
| 834 |
+
headers,
|
| 835 |
+
retries,
|
| 836 |
+
redirect,
|
| 837 |
+
assert_same_host,
|
| 838 |
+
timeout=timeout,
|
| 839 |
+
pool_timeout=pool_timeout,
|
| 840 |
+
release_conn=release_conn,
|
| 841 |
+
chunked=chunked,
|
| 842 |
+
body_pos=body_pos,
|
| 843 |
+
**response_kw
|
| 844 |
+
)
|
| 845 |
+
|
| 846 |
+
# Handle redirect?
|
| 847 |
+
redirect_location = redirect and response.get_redirect_location()
|
| 848 |
+
if redirect_location:
|
| 849 |
+
if response.status == 303:
|
| 850 |
+
# Change the method according to RFC 9110, Section 15.4.4.
|
| 851 |
+
method = "GET"
|
| 852 |
+
# And lose the body not to transfer anything sensitive.
|
| 853 |
+
body = None
|
| 854 |
+
headers = HTTPHeaderDict(headers)._prepare_for_method_change()
|
| 855 |
+
|
| 856 |
+
try:
|
| 857 |
+
retries = retries.increment(method, url, response=response, _pool=self)
|
| 858 |
+
except MaxRetryError:
|
| 859 |
+
if retries.raise_on_redirect:
|
| 860 |
+
response.drain_conn()
|
| 861 |
+
raise
|
| 862 |
+
return response
|
| 863 |
+
|
| 864 |
+
response.drain_conn()
|
| 865 |
+
retries.sleep_for_retry(response)
|
| 866 |
+
log.debug("Redirecting %s -> %s", url, redirect_location)
|
| 867 |
+
return self.urlopen(
|
| 868 |
+
method,
|
| 869 |
+
redirect_location,
|
| 870 |
+
body,
|
| 871 |
+
headers,
|
| 872 |
+
retries=retries,
|
| 873 |
+
redirect=redirect,
|
| 874 |
+
assert_same_host=assert_same_host,
|
| 875 |
+
timeout=timeout,
|
| 876 |
+
pool_timeout=pool_timeout,
|
| 877 |
+
release_conn=release_conn,
|
| 878 |
+
chunked=chunked,
|
| 879 |
+
body_pos=body_pos,
|
| 880 |
+
**response_kw
|
| 881 |
+
)
|
| 882 |
+
|
| 883 |
+
# Check if we should retry the HTTP response.
|
| 884 |
+
has_retry_after = bool(response.headers.get("Retry-After"))
|
| 885 |
+
if retries.is_retry(method, response.status, has_retry_after):
|
| 886 |
+
try:
|
| 887 |
+
retries = retries.increment(method, url, response=response, _pool=self)
|
| 888 |
+
except MaxRetryError:
|
| 889 |
+
if retries.raise_on_status:
|
| 890 |
+
response.drain_conn()
|
| 891 |
+
raise
|
| 892 |
+
return response
|
| 893 |
+
|
| 894 |
+
response.drain_conn()
|
| 895 |
+
retries.sleep(response)
|
| 896 |
+
log.debug("Retry: %s", url)
|
| 897 |
+
return self.urlopen(
|
| 898 |
+
method,
|
| 899 |
+
url,
|
| 900 |
+
body,
|
| 901 |
+
headers,
|
| 902 |
+
retries=retries,
|
| 903 |
+
redirect=redirect,
|
| 904 |
+
assert_same_host=assert_same_host,
|
| 905 |
+
timeout=timeout,
|
| 906 |
+
pool_timeout=pool_timeout,
|
| 907 |
+
release_conn=release_conn,
|
| 908 |
+
chunked=chunked,
|
| 909 |
+
body_pos=body_pos,
|
| 910 |
+
**response_kw
|
| 911 |
+
)
|
| 912 |
+
|
| 913 |
+
return response
|
| 914 |
+
|
| 915 |
+
|
| 916 |
+
class HTTPSConnectionPool(HTTPConnectionPool):
|
| 917 |
+
"""
|
| 918 |
+
Same as :class:`.HTTPConnectionPool`, but HTTPS.
|
| 919 |
+
|
| 920 |
+
:class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
|
| 921 |
+
``assert_hostname`` and ``host`` in this order to verify connections.
|
| 922 |
+
If ``assert_hostname`` is False, no verification is done.
|
| 923 |
+
|
| 924 |
+
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
|
| 925 |
+
``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
|
| 926 |
+
is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
|
| 927 |
+
the connection socket into an SSL socket.
|
| 928 |
+
"""
|
| 929 |
+
|
| 930 |
+
scheme = "https"
|
| 931 |
+
ConnectionCls = HTTPSConnection
|
| 932 |
+
|
| 933 |
+
def __init__(
|
| 934 |
+
self,
|
| 935 |
+
host,
|
| 936 |
+
port=None,
|
| 937 |
+
strict=False,
|
| 938 |
+
timeout=Timeout.DEFAULT_TIMEOUT,
|
| 939 |
+
maxsize=1,
|
| 940 |
+
block=False,
|
| 941 |
+
headers=None,
|
| 942 |
+
retries=None,
|
| 943 |
+
_proxy=None,
|
| 944 |
+
_proxy_headers=None,
|
| 945 |
+
key_file=None,
|
| 946 |
+
cert_file=None,
|
| 947 |
+
cert_reqs=None,
|
| 948 |
+
key_password=None,
|
| 949 |
+
ca_certs=None,
|
| 950 |
+
ssl_version=None,
|
| 951 |
+
assert_hostname=None,
|
| 952 |
+
assert_fingerprint=None,
|
| 953 |
+
ca_cert_dir=None,
|
| 954 |
+
**conn_kw
|
| 955 |
+
):
|
| 956 |
+
|
| 957 |
+
HTTPConnectionPool.__init__(
|
| 958 |
+
self,
|
| 959 |
+
host,
|
| 960 |
+
port,
|
| 961 |
+
strict,
|
| 962 |
+
timeout,
|
| 963 |
+
maxsize,
|
| 964 |
+
block,
|
| 965 |
+
headers,
|
| 966 |
+
retries,
|
| 967 |
+
_proxy,
|
| 968 |
+
_proxy_headers,
|
| 969 |
+
**conn_kw
|
| 970 |
+
)
|
| 971 |
+
|
| 972 |
+
self.key_file = key_file
|
| 973 |
+
self.cert_file = cert_file
|
| 974 |
+
self.cert_reqs = cert_reqs
|
| 975 |
+
self.key_password = key_password
|
| 976 |
+
self.ca_certs = ca_certs
|
| 977 |
+
self.ca_cert_dir = ca_cert_dir
|
| 978 |
+
self.ssl_version = ssl_version
|
| 979 |
+
self.assert_hostname = assert_hostname
|
| 980 |
+
self.assert_fingerprint = assert_fingerprint
|
| 981 |
+
|
| 982 |
+
def _prepare_conn(self, conn):
|
| 983 |
+
"""
|
| 984 |
+
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
|
| 985 |
+
and establish the tunnel if proxy is used.
|
| 986 |
+
"""
|
| 987 |
+
|
| 988 |
+
if isinstance(conn, VerifiedHTTPSConnection):
|
| 989 |
+
conn.set_cert(
|
| 990 |
+
key_file=self.key_file,
|
| 991 |
+
key_password=self.key_password,
|
| 992 |
+
cert_file=self.cert_file,
|
| 993 |
+
cert_reqs=self.cert_reqs,
|
| 994 |
+
ca_certs=self.ca_certs,
|
| 995 |
+
ca_cert_dir=self.ca_cert_dir,
|
| 996 |
+
assert_hostname=self.assert_hostname,
|
| 997 |
+
assert_fingerprint=self.assert_fingerprint,
|
| 998 |
+
)
|
| 999 |
+
conn.ssl_version = self.ssl_version
|
| 1000 |
+
return conn
|
| 1001 |
+
|
| 1002 |
+
def _prepare_proxy(self, conn):
|
| 1003 |
+
"""
|
| 1004 |
+
Establishes a tunnel connection through HTTP CONNECT.
|
| 1005 |
+
|
| 1006 |
+
Tunnel connection is established early because otherwise httplib would
|
| 1007 |
+
improperly set Host: header to proxy's IP:port.
|
| 1008 |
+
"""
|
| 1009 |
+
|
| 1010 |
+
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
|
| 1011 |
+
|
| 1012 |
+
if self.proxy.scheme == "https":
|
| 1013 |
+
conn.tls_in_tls_required = True
|
| 1014 |
+
|
| 1015 |
+
conn.connect()
|
| 1016 |
+
|
| 1017 |
+
def _new_conn(self):
|
| 1018 |
+
"""
|
| 1019 |
+
Return a fresh :class:`http.client.HTTPSConnection`.
|
| 1020 |
+
"""
|
| 1021 |
+
self.num_connections += 1
|
| 1022 |
+
log.debug(
|
| 1023 |
+
"Starting new HTTPS connection (%d): %s:%s",
|
| 1024 |
+
self.num_connections,
|
| 1025 |
+
self.host,
|
| 1026 |
+
self.port or "443",
|
| 1027 |
+
)
|
| 1028 |
+
|
| 1029 |
+
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
|
| 1030 |
+
raise SSLError(
|
| 1031 |
+
"Can't connect to HTTPS URL because the SSL module is not available."
|
| 1032 |
+
)
|
| 1033 |
+
|
| 1034 |
+
actual_host = self.host
|
| 1035 |
+
actual_port = self.port
|
| 1036 |
+
if self.proxy is not None:
|
| 1037 |
+
actual_host = self.proxy.host
|
| 1038 |
+
actual_port = self.proxy.port
|
| 1039 |
+
|
| 1040 |
+
conn = self.ConnectionCls(
|
| 1041 |
+
host=actual_host,
|
| 1042 |
+
port=actual_port,
|
| 1043 |
+
timeout=self.timeout.connect_timeout,
|
| 1044 |
+
strict=self.strict,
|
| 1045 |
+
cert_file=self.cert_file,
|
| 1046 |
+
key_file=self.key_file,
|
| 1047 |
+
key_password=self.key_password,
|
| 1048 |
+
**self.conn_kw
|
| 1049 |
+
)
|
| 1050 |
+
|
| 1051 |
+
return self._prepare_conn(conn)
|
| 1052 |
+
|
| 1053 |
+
def _validate_conn(self, conn):
|
| 1054 |
+
"""
|
| 1055 |
+
Called right before a request is made, after the socket is created.
|
| 1056 |
+
"""
|
| 1057 |
+
super(HTTPSConnectionPool, self)._validate_conn(conn)
|
| 1058 |
+
|
| 1059 |
+
# Force connect early to allow us to validate the connection.
|
| 1060 |
+
if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
|
| 1061 |
+
conn.connect()
|
| 1062 |
+
|
| 1063 |
+
if not conn.is_verified:
|
| 1064 |
+
warnings.warn(
|
| 1065 |
+
(
|
| 1066 |
+
"Unverified HTTPS request is being made to host '%s'. "
|
| 1067 |
+
"Adding certificate verification is strongly advised. See: "
|
| 1068 |
+
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
| 1069 |
+
"#ssl-warnings" % conn.host
|
| 1070 |
+
),
|
| 1071 |
+
InsecureRequestWarning,
|
| 1072 |
+
)
|
| 1073 |
+
|
| 1074 |
+
if getattr(conn, "proxy_is_verified", None) is False:
|
| 1075 |
+
warnings.warn(
|
| 1076 |
+
(
|
| 1077 |
+
"Unverified HTTPS connection done to an HTTPS proxy. "
|
| 1078 |
+
"Adding certificate verification is strongly advised. See: "
|
| 1079 |
+
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
| 1080 |
+
"#ssl-warnings"
|
| 1081 |
+
),
|
| 1082 |
+
InsecureRequestWarning,
|
| 1083 |
+
)
|
| 1084 |
+
|
| 1085 |
+
|
| 1086 |
+
def connection_from_url(url, **kw):
|
| 1087 |
+
"""
|
| 1088 |
+
Given a url, return an :class:`.ConnectionPool` instance of its host.
|
| 1089 |
+
|
| 1090 |
+
This is a shortcut for not having to parse out the scheme, host, and port
|
| 1091 |
+
of the url before creating an :class:`.ConnectionPool` instance.
|
| 1092 |
+
|
| 1093 |
+
:param url:
|
| 1094 |
+
Absolute URL string that must include the scheme. Port is optional.
|
| 1095 |
+
|
| 1096 |
+
:param \\**kw:
|
| 1097 |
+
Passes additional parameters to the constructor of the appropriate
|
| 1098 |
+
:class:`.ConnectionPool`. Useful for specifying things like
|
| 1099 |
+
timeout, maxsize, headers, etc.
|
| 1100 |
+
|
| 1101 |
+
Example::
|
| 1102 |
+
|
| 1103 |
+
>>> conn = connection_from_url('http://google.com/')
|
| 1104 |
+
>>> r = conn.request('GET', '/')
|
| 1105 |
+
"""
|
| 1106 |
+
scheme, host, port = get_host(url)
|
| 1107 |
+
port = port or port_by_scheme.get(scheme, 80)
|
| 1108 |
+
if scheme == "https":
|
| 1109 |
+
return HTTPSConnectionPool(host, port=port, **kw)
|
| 1110 |
+
else:
|
| 1111 |
+
return HTTPConnectionPool(host, port=port, **kw)
|
| 1112 |
+
|
| 1113 |
+
|
| 1114 |
+
def _normalize_host(host, scheme):
|
| 1115 |
+
"""
|
| 1116 |
+
Normalize hosts for comparisons and use with sockets.
|
| 1117 |
+
"""
|
| 1118 |
+
|
| 1119 |
+
host = normalize_host(host, scheme)
|
| 1120 |
+
|
| 1121 |
+
# httplib doesn't like it when we include brackets in IPv6 addresses
|
| 1122 |
+
# Specifically, if we include brackets but also pass the port then
|
| 1123 |
+
# httplib crazily doubles up the square brackets on the Host header.
|
| 1124 |
+
# Instead, we need to make sure we never pass ``None`` as the port.
|
| 1125 |
+
# However, for backward compatibility reasons we can't actually
|
| 1126 |
+
# *assert* that. See http://bugs.python.org/issue28539
|
| 1127 |
+
if host.startswith("[") and host.endswith("]"):
|
| 1128 |
+
host = host[1:-1]
|
| 1129 |
+
return host
|
| 1130 |
+
|
| 1131 |
+
|
| 1132 |
+
def _close_pool_connections(pool):
|
| 1133 |
+
"""Drains a queue of connections and closes each one."""
|
| 1134 |
+
try:
|
| 1135 |
+
while True:
|
| 1136 |
+
conn = pool.get(block=False)
|
| 1137 |
+
if conn:
|
| 1138 |
+
conn.close()
|
| 1139 |
+
except queue.Empty:
|
| 1140 |
+
pass # Done.
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py
ADDED
|
File without changes
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (178 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc
ADDED
|
Binary file (1.36 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc
ADDED
|
Binary file (8.18 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc
ADDED
|
Binary file (3.6 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc
ADDED
|
Binary file (15.8 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc
ADDED
|
Binary file (21.9 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc
ADDED
|
Binary file (5.58 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module provides means to detect the App Engine environment.
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
import os
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def is_appengine():
|
| 9 |
+
return is_local_appengine() or is_prod_appengine()
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
def is_appengine_sandbox():
|
| 13 |
+
"""Reports if the app is running in the first generation sandbox.
|
| 14 |
+
|
| 15 |
+
The second generation runtimes are technically still in a sandbox, but it
|
| 16 |
+
is much less restrictive, so generally you shouldn't need to check for it.
|
| 17 |
+
see https://cloud.google.com/appengine/docs/standard/runtimes
|
| 18 |
+
"""
|
| 19 |
+
return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def is_local_appengine():
|
| 23 |
+
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
|
| 24 |
+
"SERVER_SOFTWARE", ""
|
| 25 |
+
).startswith("Development/")
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def is_prod_appengine():
|
| 29 |
+
return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
|
| 30 |
+
"SERVER_SOFTWARE", ""
|
| 31 |
+
).startswith("Google App Engine/")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def is_prod_appengine_mvms():
|
| 35 |
+
"""Deprecated."""
|
| 36 |
+
return False
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py
ADDED
|
File without changes
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (195 Bytes). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc
ADDED
|
Binary file (10.7 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc
ADDED
|
Binary file (9.08 kB). View file
|
|
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py
ADDED
|
@@ -0,0 +1,519 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module uses ctypes to bind a whole bunch of functions and constants from
|
| 3 |
+
SecureTransport. The goal here is to provide the low-level API to
|
| 4 |
+
SecureTransport. These are essentially the C-level functions and constants, and
|
| 5 |
+
they're pretty gross to work with.
|
| 6 |
+
|
| 7 |
+
This code is a bastardised version of the code found in Will Bond's oscrypto
|
| 8 |
+
library. An enormous debt is owed to him for blazing this trail for us. For
|
| 9 |
+
that reason, this code should be considered to be covered both by urllib3's
|
| 10 |
+
license and by oscrypto's:
|
| 11 |
+
|
| 12 |
+
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
| 13 |
+
|
| 14 |
+
Permission is hereby granted, free of charge, to any person obtaining a
|
| 15 |
+
copy of this software and associated documentation files (the "Software"),
|
| 16 |
+
to deal in the Software without restriction, including without limitation
|
| 17 |
+
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
| 18 |
+
and/or sell copies of the Software, and to permit persons to whom the
|
| 19 |
+
Software is furnished to do so, subject to the following conditions:
|
| 20 |
+
|
| 21 |
+
The above copyright notice and this permission notice shall be included in
|
| 22 |
+
all copies or substantial portions of the Software.
|
| 23 |
+
|
| 24 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 25 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 26 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 27 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 28 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 29 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 30 |
+
DEALINGS IN THE SOFTWARE.
|
| 31 |
+
"""
|
| 32 |
+
from __future__ import absolute_import
|
| 33 |
+
|
| 34 |
+
import platform
|
| 35 |
+
from ctypes import (
|
| 36 |
+
CDLL,
|
| 37 |
+
CFUNCTYPE,
|
| 38 |
+
POINTER,
|
| 39 |
+
c_bool,
|
| 40 |
+
c_byte,
|
| 41 |
+
c_char_p,
|
| 42 |
+
c_int32,
|
| 43 |
+
c_long,
|
| 44 |
+
c_size_t,
|
| 45 |
+
c_uint32,
|
| 46 |
+
c_ulong,
|
| 47 |
+
c_void_p,
|
| 48 |
+
)
|
| 49 |
+
from ctypes.util import find_library
|
| 50 |
+
|
| 51 |
+
from ...packages.six import raise_from
|
| 52 |
+
|
| 53 |
+
if platform.system() != "Darwin":
|
| 54 |
+
raise ImportError("Only macOS is supported")
|
| 55 |
+
|
| 56 |
+
version = platform.mac_ver()[0]
|
| 57 |
+
version_info = tuple(map(int, version.split(".")))
|
| 58 |
+
if version_info < (10, 8):
|
| 59 |
+
raise OSError(
|
| 60 |
+
"Only OS X 10.8 and newer are supported, not %s.%s"
|
| 61 |
+
% (version_info[0], version_info[1])
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def load_cdll(name, macos10_16_path):
|
| 66 |
+
"""Loads a CDLL by name, falling back to known path on 10.16+"""
|
| 67 |
+
try:
|
| 68 |
+
# Big Sur is technically 11 but we use 10.16 due to the Big Sur
|
| 69 |
+
# beta being labeled as 10.16.
|
| 70 |
+
if version_info >= (10, 16):
|
| 71 |
+
path = macos10_16_path
|
| 72 |
+
else:
|
| 73 |
+
path = find_library(name)
|
| 74 |
+
if not path:
|
| 75 |
+
raise OSError # Caught and reraised as 'ImportError'
|
| 76 |
+
return CDLL(path, use_errno=True)
|
| 77 |
+
except OSError:
|
| 78 |
+
raise_from(ImportError("The library %s failed to load" % name), None)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
Security = load_cdll(
|
| 82 |
+
"Security", "/System/Library/Frameworks/Security.framework/Security"
|
| 83 |
+
)
|
| 84 |
+
CoreFoundation = load_cdll(
|
| 85 |
+
"CoreFoundation",
|
| 86 |
+
"/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
Boolean = c_bool
|
| 91 |
+
CFIndex = c_long
|
| 92 |
+
CFStringEncoding = c_uint32
|
| 93 |
+
CFData = c_void_p
|
| 94 |
+
CFString = c_void_p
|
| 95 |
+
CFArray = c_void_p
|
| 96 |
+
CFMutableArray = c_void_p
|
| 97 |
+
CFDictionary = c_void_p
|
| 98 |
+
CFError = c_void_p
|
| 99 |
+
CFType = c_void_p
|
| 100 |
+
CFTypeID = c_ulong
|
| 101 |
+
|
| 102 |
+
CFTypeRef = POINTER(CFType)
|
| 103 |
+
CFAllocatorRef = c_void_p
|
| 104 |
+
|
| 105 |
+
OSStatus = c_int32
|
| 106 |
+
|
| 107 |
+
CFDataRef = POINTER(CFData)
|
| 108 |
+
CFStringRef = POINTER(CFString)
|
| 109 |
+
CFArrayRef = POINTER(CFArray)
|
| 110 |
+
CFMutableArrayRef = POINTER(CFMutableArray)
|
| 111 |
+
CFDictionaryRef = POINTER(CFDictionary)
|
| 112 |
+
CFArrayCallBacks = c_void_p
|
| 113 |
+
CFDictionaryKeyCallBacks = c_void_p
|
| 114 |
+
CFDictionaryValueCallBacks = c_void_p
|
| 115 |
+
|
| 116 |
+
SecCertificateRef = POINTER(c_void_p)
|
| 117 |
+
SecExternalFormat = c_uint32
|
| 118 |
+
SecExternalItemType = c_uint32
|
| 119 |
+
SecIdentityRef = POINTER(c_void_p)
|
| 120 |
+
SecItemImportExportFlags = c_uint32
|
| 121 |
+
SecItemImportExportKeyParameters = c_void_p
|
| 122 |
+
SecKeychainRef = POINTER(c_void_p)
|
| 123 |
+
SSLProtocol = c_uint32
|
| 124 |
+
SSLCipherSuite = c_uint32
|
| 125 |
+
SSLContextRef = POINTER(c_void_p)
|
| 126 |
+
SecTrustRef = POINTER(c_void_p)
|
| 127 |
+
SSLConnectionRef = c_uint32
|
| 128 |
+
SecTrustResultType = c_uint32
|
| 129 |
+
SecTrustOptionFlags = c_uint32
|
| 130 |
+
SSLProtocolSide = c_uint32
|
| 131 |
+
SSLConnectionType = c_uint32
|
| 132 |
+
SSLSessionOption = c_uint32
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
try:
|
| 136 |
+
Security.SecItemImport.argtypes = [
|
| 137 |
+
CFDataRef,
|
| 138 |
+
CFStringRef,
|
| 139 |
+
POINTER(SecExternalFormat),
|
| 140 |
+
POINTER(SecExternalItemType),
|
| 141 |
+
SecItemImportExportFlags,
|
| 142 |
+
POINTER(SecItemImportExportKeyParameters),
|
| 143 |
+
SecKeychainRef,
|
| 144 |
+
POINTER(CFArrayRef),
|
| 145 |
+
]
|
| 146 |
+
Security.SecItemImport.restype = OSStatus
|
| 147 |
+
|
| 148 |
+
Security.SecCertificateGetTypeID.argtypes = []
|
| 149 |
+
Security.SecCertificateGetTypeID.restype = CFTypeID
|
| 150 |
+
|
| 151 |
+
Security.SecIdentityGetTypeID.argtypes = []
|
| 152 |
+
Security.SecIdentityGetTypeID.restype = CFTypeID
|
| 153 |
+
|
| 154 |
+
Security.SecKeyGetTypeID.argtypes = []
|
| 155 |
+
Security.SecKeyGetTypeID.restype = CFTypeID
|
| 156 |
+
|
| 157 |
+
Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
|
| 158 |
+
Security.SecCertificateCreateWithData.restype = SecCertificateRef
|
| 159 |
+
|
| 160 |
+
Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
|
| 161 |
+
Security.SecCertificateCopyData.restype = CFDataRef
|
| 162 |
+
|
| 163 |
+
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
| 164 |
+
Security.SecCopyErrorMessageString.restype = CFStringRef
|
| 165 |
+
|
| 166 |
+
Security.SecIdentityCreateWithCertificate.argtypes = [
|
| 167 |
+
CFTypeRef,
|
| 168 |
+
SecCertificateRef,
|
| 169 |
+
POINTER(SecIdentityRef),
|
| 170 |
+
]
|
| 171 |
+
Security.SecIdentityCreateWithCertificate.restype = OSStatus
|
| 172 |
+
|
| 173 |
+
Security.SecKeychainCreate.argtypes = [
|
| 174 |
+
c_char_p,
|
| 175 |
+
c_uint32,
|
| 176 |
+
c_void_p,
|
| 177 |
+
Boolean,
|
| 178 |
+
c_void_p,
|
| 179 |
+
POINTER(SecKeychainRef),
|
| 180 |
+
]
|
| 181 |
+
Security.SecKeychainCreate.restype = OSStatus
|
| 182 |
+
|
| 183 |
+
Security.SecKeychainDelete.argtypes = [SecKeychainRef]
|
| 184 |
+
Security.SecKeychainDelete.restype = OSStatus
|
| 185 |
+
|
| 186 |
+
Security.SecPKCS12Import.argtypes = [
|
| 187 |
+
CFDataRef,
|
| 188 |
+
CFDictionaryRef,
|
| 189 |
+
POINTER(CFArrayRef),
|
| 190 |
+
]
|
| 191 |
+
Security.SecPKCS12Import.restype = OSStatus
|
| 192 |
+
|
| 193 |
+
SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
|
| 194 |
+
SSLWriteFunc = CFUNCTYPE(
|
| 195 |
+
OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
|
| 196 |
+
)
|
| 197 |
+
|
| 198 |
+
Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
|
| 199 |
+
Security.SSLSetIOFuncs.restype = OSStatus
|
| 200 |
+
|
| 201 |
+
Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
|
| 202 |
+
Security.SSLSetPeerID.restype = OSStatus
|
| 203 |
+
|
| 204 |
+
Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
|
| 205 |
+
Security.SSLSetCertificate.restype = OSStatus
|
| 206 |
+
|
| 207 |
+
Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
|
| 208 |
+
Security.SSLSetCertificateAuthorities.restype = OSStatus
|
| 209 |
+
|
| 210 |
+
Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
|
| 211 |
+
Security.SSLSetConnection.restype = OSStatus
|
| 212 |
+
|
| 213 |
+
Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
|
| 214 |
+
Security.SSLSetPeerDomainName.restype = OSStatus
|
| 215 |
+
|
| 216 |
+
Security.SSLHandshake.argtypes = [SSLContextRef]
|
| 217 |
+
Security.SSLHandshake.restype = OSStatus
|
| 218 |
+
|
| 219 |
+
Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
|
| 220 |
+
Security.SSLRead.restype = OSStatus
|
| 221 |
+
|
| 222 |
+
Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
|
| 223 |
+
Security.SSLWrite.restype = OSStatus
|
| 224 |
+
|
| 225 |
+
Security.SSLClose.argtypes = [SSLContextRef]
|
| 226 |
+
Security.SSLClose.restype = OSStatus
|
| 227 |
+
|
| 228 |
+
Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
|
| 229 |
+
Security.SSLGetNumberSupportedCiphers.restype = OSStatus
|
| 230 |
+
|
| 231 |
+
Security.SSLGetSupportedCiphers.argtypes = [
|
| 232 |
+
SSLContextRef,
|
| 233 |
+
POINTER(SSLCipherSuite),
|
| 234 |
+
POINTER(c_size_t),
|
| 235 |
+
]
|
| 236 |
+
Security.SSLGetSupportedCiphers.restype = OSStatus
|
| 237 |
+
|
| 238 |
+
Security.SSLSetEnabledCiphers.argtypes = [
|
| 239 |
+
SSLContextRef,
|
| 240 |
+
POINTER(SSLCipherSuite),
|
| 241 |
+
c_size_t,
|
| 242 |
+
]
|
| 243 |
+
Security.SSLSetEnabledCiphers.restype = OSStatus
|
| 244 |
+
|
| 245 |
+
Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
|
| 246 |
+
Security.SSLGetNumberEnabledCiphers.restype = OSStatus
|
| 247 |
+
|
| 248 |
+
Security.SSLGetEnabledCiphers.argtypes = [
|
| 249 |
+
SSLContextRef,
|
| 250 |
+
POINTER(SSLCipherSuite),
|
| 251 |
+
POINTER(c_size_t),
|
| 252 |
+
]
|
| 253 |
+
Security.SSLGetEnabledCiphers.restype = OSStatus
|
| 254 |
+
|
| 255 |
+
Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
|
| 256 |
+
Security.SSLGetNegotiatedCipher.restype = OSStatus
|
| 257 |
+
|
| 258 |
+
Security.SSLGetNegotiatedProtocolVersion.argtypes = [
|
| 259 |
+
SSLContextRef,
|
| 260 |
+
POINTER(SSLProtocol),
|
| 261 |
+
]
|
| 262 |
+
Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
|
| 263 |
+
|
| 264 |
+
Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
|
| 265 |
+
Security.SSLCopyPeerTrust.restype = OSStatus
|
| 266 |
+
|
| 267 |
+
Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
|
| 268 |
+
Security.SecTrustSetAnchorCertificates.restype = OSStatus
|
| 269 |
+
|
| 270 |
+
Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
|
| 271 |
+
Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
|
| 272 |
+
|
| 273 |
+
Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
|
| 274 |
+
Security.SecTrustEvaluate.restype = OSStatus
|
| 275 |
+
|
| 276 |
+
Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
|
| 277 |
+
Security.SecTrustGetCertificateCount.restype = CFIndex
|
| 278 |
+
|
| 279 |
+
Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
|
| 280 |
+
Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
|
| 281 |
+
|
| 282 |
+
Security.SSLCreateContext.argtypes = [
|
| 283 |
+
CFAllocatorRef,
|
| 284 |
+
SSLProtocolSide,
|
| 285 |
+
SSLConnectionType,
|
| 286 |
+
]
|
| 287 |
+
Security.SSLCreateContext.restype = SSLContextRef
|
| 288 |
+
|
| 289 |
+
Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
|
| 290 |
+
Security.SSLSetSessionOption.restype = OSStatus
|
| 291 |
+
|
| 292 |
+
Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
|
| 293 |
+
Security.SSLSetProtocolVersionMin.restype = OSStatus
|
| 294 |
+
|
| 295 |
+
Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
|
| 296 |
+
Security.SSLSetProtocolVersionMax.restype = OSStatus
|
| 297 |
+
|
| 298 |
+
try:
|
| 299 |
+
Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
|
| 300 |
+
Security.SSLSetALPNProtocols.restype = OSStatus
|
| 301 |
+
except AttributeError:
|
| 302 |
+
# Supported only in 10.12+
|
| 303 |
+
pass
|
| 304 |
+
|
| 305 |
+
Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
| 306 |
+
Security.SecCopyErrorMessageString.restype = CFStringRef
|
| 307 |
+
|
| 308 |
+
Security.SSLReadFunc = SSLReadFunc
|
| 309 |
+
Security.SSLWriteFunc = SSLWriteFunc
|
| 310 |
+
Security.SSLContextRef = SSLContextRef
|
| 311 |
+
Security.SSLProtocol = SSLProtocol
|
| 312 |
+
Security.SSLCipherSuite = SSLCipherSuite
|
| 313 |
+
Security.SecIdentityRef = SecIdentityRef
|
| 314 |
+
Security.SecKeychainRef = SecKeychainRef
|
| 315 |
+
Security.SecTrustRef = SecTrustRef
|
| 316 |
+
Security.SecTrustResultType = SecTrustResultType
|
| 317 |
+
Security.SecExternalFormat = SecExternalFormat
|
| 318 |
+
Security.OSStatus = OSStatus
|
| 319 |
+
|
| 320 |
+
Security.kSecImportExportPassphrase = CFStringRef.in_dll(
|
| 321 |
+
Security, "kSecImportExportPassphrase"
|
| 322 |
+
)
|
| 323 |
+
Security.kSecImportItemIdentity = CFStringRef.in_dll(
|
| 324 |
+
Security, "kSecImportItemIdentity"
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
# CoreFoundation time!
|
| 328 |
+
CoreFoundation.CFRetain.argtypes = [CFTypeRef]
|
| 329 |
+
CoreFoundation.CFRetain.restype = CFTypeRef
|
| 330 |
+
|
| 331 |
+
CoreFoundation.CFRelease.argtypes = [CFTypeRef]
|
| 332 |
+
CoreFoundation.CFRelease.restype = None
|
| 333 |
+
|
| 334 |
+
CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
|
| 335 |
+
CoreFoundation.CFGetTypeID.restype = CFTypeID
|
| 336 |
+
|
| 337 |
+
CoreFoundation.CFStringCreateWithCString.argtypes = [
|
| 338 |
+
CFAllocatorRef,
|
| 339 |
+
c_char_p,
|
| 340 |
+
CFStringEncoding,
|
| 341 |
+
]
|
| 342 |
+
CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
|
| 343 |
+
|
| 344 |
+
CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
|
| 345 |
+
CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
|
| 346 |
+
|
| 347 |
+
CoreFoundation.CFStringGetCString.argtypes = [
|
| 348 |
+
CFStringRef,
|
| 349 |
+
c_char_p,
|
| 350 |
+
CFIndex,
|
| 351 |
+
CFStringEncoding,
|
| 352 |
+
]
|
| 353 |
+
CoreFoundation.CFStringGetCString.restype = c_bool
|
| 354 |
+
|
| 355 |
+
CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
|
| 356 |
+
CoreFoundation.CFDataCreate.restype = CFDataRef
|
| 357 |
+
|
| 358 |
+
CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
|
| 359 |
+
CoreFoundation.CFDataGetLength.restype = CFIndex
|
| 360 |
+
|
| 361 |
+
CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
|
| 362 |
+
CoreFoundation.CFDataGetBytePtr.restype = c_void_p
|
| 363 |
+
|
| 364 |
+
CoreFoundation.CFDictionaryCreate.argtypes = [
|
| 365 |
+
CFAllocatorRef,
|
| 366 |
+
POINTER(CFTypeRef),
|
| 367 |
+
POINTER(CFTypeRef),
|
| 368 |
+
CFIndex,
|
| 369 |
+
CFDictionaryKeyCallBacks,
|
| 370 |
+
CFDictionaryValueCallBacks,
|
| 371 |
+
]
|
| 372 |
+
CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
|
| 373 |
+
|
| 374 |
+
CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
|
| 375 |
+
CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
|
| 376 |
+
|
| 377 |
+
CoreFoundation.CFArrayCreate.argtypes = [
|
| 378 |
+
CFAllocatorRef,
|
| 379 |
+
POINTER(CFTypeRef),
|
| 380 |
+
CFIndex,
|
| 381 |
+
CFArrayCallBacks,
|
| 382 |
+
]
|
| 383 |
+
CoreFoundation.CFArrayCreate.restype = CFArrayRef
|
| 384 |
+
|
| 385 |
+
CoreFoundation.CFArrayCreateMutable.argtypes = [
|
| 386 |
+
CFAllocatorRef,
|
| 387 |
+
CFIndex,
|
| 388 |
+
CFArrayCallBacks,
|
| 389 |
+
]
|
| 390 |
+
CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
|
| 391 |
+
|
| 392 |
+
CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
|
| 393 |
+
CoreFoundation.CFArrayAppendValue.restype = None
|
| 394 |
+
|
| 395 |
+
CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
|
| 396 |
+
CoreFoundation.CFArrayGetCount.restype = CFIndex
|
| 397 |
+
|
| 398 |
+
CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
|
| 399 |
+
CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
|
| 400 |
+
|
| 401 |
+
CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
|
| 402 |
+
CoreFoundation, "kCFAllocatorDefault"
|
| 403 |
+
)
|
| 404 |
+
CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
|
| 405 |
+
CoreFoundation, "kCFTypeArrayCallBacks"
|
| 406 |
+
)
|
| 407 |
+
CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
|
| 408 |
+
CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
|
| 409 |
+
)
|
| 410 |
+
CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
|
| 411 |
+
CoreFoundation, "kCFTypeDictionaryValueCallBacks"
|
| 412 |
+
)
|
| 413 |
+
|
| 414 |
+
CoreFoundation.CFTypeRef = CFTypeRef
|
| 415 |
+
CoreFoundation.CFArrayRef = CFArrayRef
|
| 416 |
+
CoreFoundation.CFStringRef = CFStringRef
|
| 417 |
+
CoreFoundation.CFDictionaryRef = CFDictionaryRef
|
| 418 |
+
|
| 419 |
+
except (AttributeError):
|
| 420 |
+
raise ImportError("Error initializing ctypes")
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
class CFConst(object):
|
| 424 |
+
"""
|
| 425 |
+
A class object that acts as essentially a namespace for CoreFoundation
|
| 426 |
+
constants.
|
| 427 |
+
"""
|
| 428 |
+
|
| 429 |
+
kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
class SecurityConst(object):
|
| 433 |
+
"""
|
| 434 |
+
A class object that acts as essentially a namespace for Security constants.
|
| 435 |
+
"""
|
| 436 |
+
|
| 437 |
+
kSSLSessionOptionBreakOnServerAuth = 0
|
| 438 |
+
|
| 439 |
+
kSSLProtocol2 = 1
|
| 440 |
+
kSSLProtocol3 = 2
|
| 441 |
+
kTLSProtocol1 = 4
|
| 442 |
+
kTLSProtocol11 = 7
|
| 443 |
+
kTLSProtocol12 = 8
|
| 444 |
+
# SecureTransport does not support TLS 1.3 even if there's a constant for it
|
| 445 |
+
kTLSProtocol13 = 10
|
| 446 |
+
kTLSProtocolMaxSupported = 999
|
| 447 |
+
|
| 448 |
+
kSSLClientSide = 1
|
| 449 |
+
kSSLStreamType = 0
|
| 450 |
+
|
| 451 |
+
kSecFormatPEMSequence = 10
|
| 452 |
+
|
| 453 |
+
kSecTrustResultInvalid = 0
|
| 454 |
+
kSecTrustResultProceed = 1
|
| 455 |
+
# This gap is present on purpose: this was kSecTrustResultConfirm, which
|
| 456 |
+
# is deprecated.
|
| 457 |
+
kSecTrustResultDeny = 3
|
| 458 |
+
kSecTrustResultUnspecified = 4
|
| 459 |
+
kSecTrustResultRecoverableTrustFailure = 5
|
| 460 |
+
kSecTrustResultFatalTrustFailure = 6
|
| 461 |
+
kSecTrustResultOtherError = 7
|
| 462 |
+
|
| 463 |
+
errSSLProtocol = -9800
|
| 464 |
+
errSSLWouldBlock = -9803
|
| 465 |
+
errSSLClosedGraceful = -9805
|
| 466 |
+
errSSLClosedNoNotify = -9816
|
| 467 |
+
errSSLClosedAbort = -9806
|
| 468 |
+
|
| 469 |
+
errSSLXCertChainInvalid = -9807
|
| 470 |
+
errSSLCrypto = -9809
|
| 471 |
+
errSSLInternal = -9810
|
| 472 |
+
errSSLCertExpired = -9814
|
| 473 |
+
errSSLCertNotYetValid = -9815
|
| 474 |
+
errSSLUnknownRootCert = -9812
|
| 475 |
+
errSSLNoRootCert = -9813
|
| 476 |
+
errSSLHostNameMismatch = -9843
|
| 477 |
+
errSSLPeerHandshakeFail = -9824
|
| 478 |
+
errSSLPeerUserCancelled = -9839
|
| 479 |
+
errSSLWeakPeerEphemeralDHKey = -9850
|
| 480 |
+
errSSLServerAuthCompleted = -9841
|
| 481 |
+
errSSLRecordOverflow = -9847
|
| 482 |
+
|
| 483 |
+
errSecVerifyFailed = -67808
|
| 484 |
+
errSecNoTrustSettings = -25263
|
| 485 |
+
errSecItemNotFound = -25300
|
| 486 |
+
errSecInvalidTrustSettings = -25262
|
| 487 |
+
|
| 488 |
+
# Cipher suites. We only pick the ones our default cipher string allows.
|
| 489 |
+
# Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
|
| 490 |
+
TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
|
| 491 |
+
TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
|
| 492 |
+
TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
|
| 493 |
+
TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
|
| 494 |
+
TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
|
| 495 |
+
TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
|
| 496 |
+
TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
|
| 497 |
+
TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
|
| 498 |
+
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
|
| 499 |
+
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
|
| 500 |
+
TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
|
| 501 |
+
TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
|
| 502 |
+
TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
|
| 503 |
+
TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
|
| 504 |
+
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
|
| 505 |
+
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
|
| 506 |
+
TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
|
| 507 |
+
TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
|
| 508 |
+
TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
|
| 509 |
+
TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
|
| 510 |
+
TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
|
| 511 |
+
TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
|
| 512 |
+
TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
|
| 513 |
+
TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
|
| 514 |
+
TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
|
| 515 |
+
TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
|
| 516 |
+
TLS_AES_128_GCM_SHA256 = 0x1301
|
| 517 |
+
TLS_AES_256_GCM_SHA384 = 0x1302
|
| 518 |
+
TLS_AES_128_CCM_8_SHA256 = 0x1305
|
| 519 |
+
TLS_AES_128_CCM_SHA256 = 0x1304
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py
ADDED
|
@@ -0,0 +1,397 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Low-level helpers for the SecureTransport bindings.
|
| 3 |
+
|
| 4 |
+
These are Python functions that are not directly related to the high-level APIs
|
| 5 |
+
but are necessary to get them to work. They include a whole bunch of low-level
|
| 6 |
+
CoreFoundation messing about and memory management. The concerns in this module
|
| 7 |
+
are almost entirely about trying to avoid memory leaks and providing
|
| 8 |
+
appropriate and useful assistance to the higher-level code.
|
| 9 |
+
"""
|
| 10 |
+
import base64
|
| 11 |
+
import ctypes
|
| 12 |
+
import itertools
|
| 13 |
+
import os
|
| 14 |
+
import re
|
| 15 |
+
import ssl
|
| 16 |
+
import struct
|
| 17 |
+
import tempfile
|
| 18 |
+
|
| 19 |
+
from .bindings import CFConst, CoreFoundation, Security
|
| 20 |
+
|
| 21 |
+
# This regular expression is used to grab PEM data out of a PEM bundle.
|
| 22 |
+
_PEM_CERTS_RE = re.compile(
|
| 23 |
+
b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _cf_data_from_bytes(bytestring):
|
| 28 |
+
"""
|
| 29 |
+
Given a bytestring, create a CFData object from it. This CFData object must
|
| 30 |
+
be CFReleased by the caller.
|
| 31 |
+
"""
|
| 32 |
+
return CoreFoundation.CFDataCreate(
|
| 33 |
+
CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def _cf_dictionary_from_tuples(tuples):
|
| 38 |
+
"""
|
| 39 |
+
Given a list of Python tuples, create an associated CFDictionary.
|
| 40 |
+
"""
|
| 41 |
+
dictionary_size = len(tuples)
|
| 42 |
+
|
| 43 |
+
# We need to get the dictionary keys and values out in the same order.
|
| 44 |
+
keys = (t[0] for t in tuples)
|
| 45 |
+
values = (t[1] for t in tuples)
|
| 46 |
+
cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
|
| 47 |
+
cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
|
| 48 |
+
|
| 49 |
+
return CoreFoundation.CFDictionaryCreate(
|
| 50 |
+
CoreFoundation.kCFAllocatorDefault,
|
| 51 |
+
cf_keys,
|
| 52 |
+
cf_values,
|
| 53 |
+
dictionary_size,
|
| 54 |
+
CoreFoundation.kCFTypeDictionaryKeyCallBacks,
|
| 55 |
+
CoreFoundation.kCFTypeDictionaryValueCallBacks,
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def _cfstr(py_bstr):
|
| 60 |
+
"""
|
| 61 |
+
Given a Python binary data, create a CFString.
|
| 62 |
+
The string must be CFReleased by the caller.
|
| 63 |
+
"""
|
| 64 |
+
c_str = ctypes.c_char_p(py_bstr)
|
| 65 |
+
cf_str = CoreFoundation.CFStringCreateWithCString(
|
| 66 |
+
CoreFoundation.kCFAllocatorDefault,
|
| 67 |
+
c_str,
|
| 68 |
+
CFConst.kCFStringEncodingUTF8,
|
| 69 |
+
)
|
| 70 |
+
return cf_str
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def _create_cfstring_array(lst):
|
| 74 |
+
"""
|
| 75 |
+
Given a list of Python binary data, create an associated CFMutableArray.
|
| 76 |
+
The array must be CFReleased by the caller.
|
| 77 |
+
|
| 78 |
+
Raises an ssl.SSLError on failure.
|
| 79 |
+
"""
|
| 80 |
+
cf_arr = None
|
| 81 |
+
try:
|
| 82 |
+
cf_arr = CoreFoundation.CFArrayCreateMutable(
|
| 83 |
+
CoreFoundation.kCFAllocatorDefault,
|
| 84 |
+
0,
|
| 85 |
+
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
| 86 |
+
)
|
| 87 |
+
if not cf_arr:
|
| 88 |
+
raise MemoryError("Unable to allocate memory!")
|
| 89 |
+
for item in lst:
|
| 90 |
+
cf_str = _cfstr(item)
|
| 91 |
+
if not cf_str:
|
| 92 |
+
raise MemoryError("Unable to allocate memory!")
|
| 93 |
+
try:
|
| 94 |
+
CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
|
| 95 |
+
finally:
|
| 96 |
+
CoreFoundation.CFRelease(cf_str)
|
| 97 |
+
except BaseException as e:
|
| 98 |
+
if cf_arr:
|
| 99 |
+
CoreFoundation.CFRelease(cf_arr)
|
| 100 |
+
raise ssl.SSLError("Unable to allocate array: %s" % (e,))
|
| 101 |
+
return cf_arr
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def _cf_string_to_unicode(value):
|
| 105 |
+
"""
|
| 106 |
+
Creates a Unicode string from a CFString object. Used entirely for error
|
| 107 |
+
reporting.
|
| 108 |
+
|
| 109 |
+
Yes, it annoys me quite a lot that this function is this complex.
|
| 110 |
+
"""
|
| 111 |
+
value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
|
| 112 |
+
|
| 113 |
+
string = CoreFoundation.CFStringGetCStringPtr(
|
| 114 |
+
value_as_void_p, CFConst.kCFStringEncodingUTF8
|
| 115 |
+
)
|
| 116 |
+
if string is None:
|
| 117 |
+
buffer = ctypes.create_string_buffer(1024)
|
| 118 |
+
result = CoreFoundation.CFStringGetCString(
|
| 119 |
+
value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
|
| 120 |
+
)
|
| 121 |
+
if not result:
|
| 122 |
+
raise OSError("Error copying C string from CFStringRef")
|
| 123 |
+
string = buffer.value
|
| 124 |
+
if string is not None:
|
| 125 |
+
string = string.decode("utf-8")
|
| 126 |
+
return string
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def _assert_no_error(error, exception_class=None):
|
| 130 |
+
"""
|
| 131 |
+
Checks the return code and throws an exception if there is an error to
|
| 132 |
+
report
|
| 133 |
+
"""
|
| 134 |
+
if error == 0:
|
| 135 |
+
return
|
| 136 |
+
|
| 137 |
+
cf_error_string = Security.SecCopyErrorMessageString(error, None)
|
| 138 |
+
output = _cf_string_to_unicode(cf_error_string)
|
| 139 |
+
CoreFoundation.CFRelease(cf_error_string)
|
| 140 |
+
|
| 141 |
+
if output is None or output == u"":
|
| 142 |
+
output = u"OSStatus %s" % error
|
| 143 |
+
|
| 144 |
+
if exception_class is None:
|
| 145 |
+
exception_class = ssl.SSLError
|
| 146 |
+
|
| 147 |
+
raise exception_class(output)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
def _cert_array_from_pem(pem_bundle):
|
| 151 |
+
"""
|
| 152 |
+
Given a bundle of certs in PEM format, turns them into a CFArray of certs
|
| 153 |
+
that can be used to validate a cert chain.
|
| 154 |
+
"""
|
| 155 |
+
# Normalize the PEM bundle's line endings.
|
| 156 |
+
pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
|
| 157 |
+
|
| 158 |
+
der_certs = [
|
| 159 |
+
base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
|
| 160 |
+
]
|
| 161 |
+
if not der_certs:
|
| 162 |
+
raise ssl.SSLError("No root certificates specified")
|
| 163 |
+
|
| 164 |
+
cert_array = CoreFoundation.CFArrayCreateMutable(
|
| 165 |
+
CoreFoundation.kCFAllocatorDefault,
|
| 166 |
+
0,
|
| 167 |
+
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
| 168 |
+
)
|
| 169 |
+
if not cert_array:
|
| 170 |
+
raise ssl.SSLError("Unable to allocate memory!")
|
| 171 |
+
|
| 172 |
+
try:
|
| 173 |
+
for der_bytes in der_certs:
|
| 174 |
+
certdata = _cf_data_from_bytes(der_bytes)
|
| 175 |
+
if not certdata:
|
| 176 |
+
raise ssl.SSLError("Unable to allocate memory!")
|
| 177 |
+
cert = Security.SecCertificateCreateWithData(
|
| 178 |
+
CoreFoundation.kCFAllocatorDefault, certdata
|
| 179 |
+
)
|
| 180 |
+
CoreFoundation.CFRelease(certdata)
|
| 181 |
+
if not cert:
|
| 182 |
+
raise ssl.SSLError("Unable to build cert object!")
|
| 183 |
+
|
| 184 |
+
CoreFoundation.CFArrayAppendValue(cert_array, cert)
|
| 185 |
+
CoreFoundation.CFRelease(cert)
|
| 186 |
+
except Exception:
|
| 187 |
+
# We need to free the array before the exception bubbles further.
|
| 188 |
+
# We only want to do that if an error occurs: otherwise, the caller
|
| 189 |
+
# should free.
|
| 190 |
+
CoreFoundation.CFRelease(cert_array)
|
| 191 |
+
raise
|
| 192 |
+
|
| 193 |
+
return cert_array
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def _is_cert(item):
|
| 197 |
+
"""
|
| 198 |
+
Returns True if a given CFTypeRef is a certificate.
|
| 199 |
+
"""
|
| 200 |
+
expected = Security.SecCertificateGetTypeID()
|
| 201 |
+
return CoreFoundation.CFGetTypeID(item) == expected
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
def _is_identity(item):
|
| 205 |
+
"""
|
| 206 |
+
Returns True if a given CFTypeRef is an identity.
|
| 207 |
+
"""
|
| 208 |
+
expected = Security.SecIdentityGetTypeID()
|
| 209 |
+
return CoreFoundation.CFGetTypeID(item) == expected
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def _temporary_keychain():
|
| 213 |
+
"""
|
| 214 |
+
This function creates a temporary Mac keychain that we can use to work with
|
| 215 |
+
credentials. This keychain uses a one-time password and a temporary file to
|
| 216 |
+
store the data. We expect to have one keychain per socket. The returned
|
| 217 |
+
SecKeychainRef must be freed by the caller, including calling
|
| 218 |
+
SecKeychainDelete.
|
| 219 |
+
|
| 220 |
+
Returns a tuple of the SecKeychainRef and the path to the temporary
|
| 221 |
+
directory that contains it.
|
| 222 |
+
"""
|
| 223 |
+
# Unfortunately, SecKeychainCreate requires a path to a keychain. This
|
| 224 |
+
# means we cannot use mkstemp to use a generic temporary file. Instead,
|
| 225 |
+
# we're going to create a temporary directory and a filename to use there.
|
| 226 |
+
# This filename will be 8 random bytes expanded into base64. We also need
|
| 227 |
+
# some random bytes to password-protect the keychain we're creating, so we
|
| 228 |
+
# ask for 40 random bytes.
|
| 229 |
+
random_bytes = os.urandom(40)
|
| 230 |
+
filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
|
| 231 |
+
password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
|
| 232 |
+
tempdirectory = tempfile.mkdtemp()
|
| 233 |
+
|
| 234 |
+
keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
|
| 235 |
+
|
| 236 |
+
# We now want to create the keychain itself.
|
| 237 |
+
keychain = Security.SecKeychainRef()
|
| 238 |
+
status = Security.SecKeychainCreate(
|
| 239 |
+
keychain_path, len(password), password, False, None, ctypes.byref(keychain)
|
| 240 |
+
)
|
| 241 |
+
_assert_no_error(status)
|
| 242 |
+
|
| 243 |
+
# Having created the keychain, we want to pass it off to the caller.
|
| 244 |
+
return keychain, tempdirectory
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
def _load_items_from_file(keychain, path):
|
| 248 |
+
"""
|
| 249 |
+
Given a single file, loads all the trust objects from it into arrays and
|
| 250 |
+
the keychain.
|
| 251 |
+
Returns a tuple of lists: the first list is a list of identities, the
|
| 252 |
+
second a list of certs.
|
| 253 |
+
"""
|
| 254 |
+
certificates = []
|
| 255 |
+
identities = []
|
| 256 |
+
result_array = None
|
| 257 |
+
|
| 258 |
+
with open(path, "rb") as f:
|
| 259 |
+
raw_filedata = f.read()
|
| 260 |
+
|
| 261 |
+
try:
|
| 262 |
+
filedata = CoreFoundation.CFDataCreate(
|
| 263 |
+
CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
|
| 264 |
+
)
|
| 265 |
+
result_array = CoreFoundation.CFArrayRef()
|
| 266 |
+
result = Security.SecItemImport(
|
| 267 |
+
filedata, # cert data
|
| 268 |
+
None, # Filename, leaving it out for now
|
| 269 |
+
None, # What the type of the file is, we don't care
|
| 270 |
+
None, # what's in the file, we don't care
|
| 271 |
+
0, # import flags
|
| 272 |
+
None, # key params, can include passphrase in the future
|
| 273 |
+
keychain, # The keychain to insert into
|
| 274 |
+
ctypes.byref(result_array), # Results
|
| 275 |
+
)
|
| 276 |
+
_assert_no_error(result)
|
| 277 |
+
|
| 278 |
+
# A CFArray is not very useful to us as an intermediary
|
| 279 |
+
# representation, so we are going to extract the objects we want
|
| 280 |
+
# and then free the array. We don't need to keep hold of keys: the
|
| 281 |
+
# keychain already has them!
|
| 282 |
+
result_count = CoreFoundation.CFArrayGetCount(result_array)
|
| 283 |
+
for index in range(result_count):
|
| 284 |
+
item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
|
| 285 |
+
item = ctypes.cast(item, CoreFoundation.CFTypeRef)
|
| 286 |
+
|
| 287 |
+
if _is_cert(item):
|
| 288 |
+
CoreFoundation.CFRetain(item)
|
| 289 |
+
certificates.append(item)
|
| 290 |
+
elif _is_identity(item):
|
| 291 |
+
CoreFoundation.CFRetain(item)
|
| 292 |
+
identities.append(item)
|
| 293 |
+
finally:
|
| 294 |
+
if result_array:
|
| 295 |
+
CoreFoundation.CFRelease(result_array)
|
| 296 |
+
|
| 297 |
+
CoreFoundation.CFRelease(filedata)
|
| 298 |
+
|
| 299 |
+
return (identities, certificates)
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
def _load_client_cert_chain(keychain, *paths):
|
| 303 |
+
"""
|
| 304 |
+
Load certificates and maybe keys from a number of files. Has the end goal
|
| 305 |
+
of returning a CFArray containing one SecIdentityRef, and then zero or more
|
| 306 |
+
SecCertificateRef objects, suitable for use as a client certificate trust
|
| 307 |
+
chain.
|
| 308 |
+
"""
|
| 309 |
+
# Ok, the strategy.
|
| 310 |
+
#
|
| 311 |
+
# This relies on knowing that macOS will not give you a SecIdentityRef
|
| 312 |
+
# unless you have imported a key into a keychain. This is a somewhat
|
| 313 |
+
# artificial limitation of macOS (for example, it doesn't necessarily
|
| 314 |
+
# affect iOS), but there is nothing inside Security.framework that lets you
|
| 315 |
+
# get a SecIdentityRef without having a key in a keychain.
|
| 316 |
+
#
|
| 317 |
+
# So the policy here is we take all the files and iterate them in order.
|
| 318 |
+
# Each one will use SecItemImport to have one or more objects loaded from
|
| 319 |
+
# it. We will also point at a keychain that macOS can use to work with the
|
| 320 |
+
# private key.
|
| 321 |
+
#
|
| 322 |
+
# Once we have all the objects, we'll check what we actually have. If we
|
| 323 |
+
# already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
|
| 324 |
+
# we'll take the first certificate (which we assume to be our leaf) and
|
| 325 |
+
# ask the keychain to give us a SecIdentityRef with that cert's associated
|
| 326 |
+
# key.
|
| 327 |
+
#
|
| 328 |
+
# We'll then return a CFArray containing the trust chain: one
|
| 329 |
+
# SecIdentityRef and then zero-or-more SecCertificateRef objects. The
|
| 330 |
+
# responsibility for freeing this CFArray will be with the caller. This
|
| 331 |
+
# CFArray must remain alive for the entire connection, so in practice it
|
| 332 |
+
# will be stored with a single SSLSocket, along with the reference to the
|
| 333 |
+
# keychain.
|
| 334 |
+
certificates = []
|
| 335 |
+
identities = []
|
| 336 |
+
|
| 337 |
+
# Filter out bad paths.
|
| 338 |
+
paths = (path for path in paths if path)
|
| 339 |
+
|
| 340 |
+
try:
|
| 341 |
+
for file_path in paths:
|
| 342 |
+
new_identities, new_certs = _load_items_from_file(keychain, file_path)
|
| 343 |
+
identities.extend(new_identities)
|
| 344 |
+
certificates.extend(new_certs)
|
| 345 |
+
|
| 346 |
+
# Ok, we have everything. The question is: do we have an identity? If
|
| 347 |
+
# not, we want to grab one from the first cert we have.
|
| 348 |
+
if not identities:
|
| 349 |
+
new_identity = Security.SecIdentityRef()
|
| 350 |
+
status = Security.SecIdentityCreateWithCertificate(
|
| 351 |
+
keychain, certificates[0], ctypes.byref(new_identity)
|
| 352 |
+
)
|
| 353 |
+
_assert_no_error(status)
|
| 354 |
+
identities.append(new_identity)
|
| 355 |
+
|
| 356 |
+
# We now want to release the original certificate, as we no longer
|
| 357 |
+
# need it.
|
| 358 |
+
CoreFoundation.CFRelease(certificates.pop(0))
|
| 359 |
+
|
| 360 |
+
# We now need to build a new CFArray that holds the trust chain.
|
| 361 |
+
trust_chain = CoreFoundation.CFArrayCreateMutable(
|
| 362 |
+
CoreFoundation.kCFAllocatorDefault,
|
| 363 |
+
0,
|
| 364 |
+
ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
| 365 |
+
)
|
| 366 |
+
for item in itertools.chain(identities, certificates):
|
| 367 |
+
# ArrayAppendValue does a CFRetain on the item. That's fine,
|
| 368 |
+
# because the finally block will release our other refs to them.
|
| 369 |
+
CoreFoundation.CFArrayAppendValue(trust_chain, item)
|
| 370 |
+
|
| 371 |
+
return trust_chain
|
| 372 |
+
finally:
|
| 373 |
+
for obj in itertools.chain(identities, certificates):
|
| 374 |
+
CoreFoundation.CFRelease(obj)
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
TLS_PROTOCOL_VERSIONS = {
|
| 378 |
+
"SSLv2": (0, 2),
|
| 379 |
+
"SSLv3": (3, 0),
|
| 380 |
+
"TLSv1": (3, 1),
|
| 381 |
+
"TLSv1.1": (3, 2),
|
| 382 |
+
"TLSv1.2": (3, 3),
|
| 383 |
+
}
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
def _build_tls_unknown_ca_alert(version):
|
| 387 |
+
"""
|
| 388 |
+
Builds a TLS alert record for an unknown CA.
|
| 389 |
+
"""
|
| 390 |
+
ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
|
| 391 |
+
severity_fatal = 0x02
|
| 392 |
+
description_unknown_ca = 0x30
|
| 393 |
+
msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
|
| 394 |
+
msg_len = len(msg)
|
| 395 |
+
record_type_alert = 0x15
|
| 396 |
+
record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
|
| 397 |
+
return record
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/appengine.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
This module provides a pool manager that uses Google App Engine's
|
| 3 |
+
`URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
|
| 4 |
+
|
| 5 |
+
Example usage::
|
| 6 |
+
|
| 7 |
+
from pip._vendor.urllib3 import PoolManager
|
| 8 |
+
from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
|
| 9 |
+
|
| 10 |
+
if is_appengine_sandbox():
|
| 11 |
+
# AppEngineManager uses AppEngine's URLFetch API behind the scenes
|
| 12 |
+
http = AppEngineManager()
|
| 13 |
+
else:
|
| 14 |
+
# PoolManager uses a socket-level API behind the scenes
|
| 15 |
+
http = PoolManager()
|
| 16 |
+
|
| 17 |
+
r = http.request('GET', 'https://google.com/')
|
| 18 |
+
|
| 19 |
+
There are `limitations <https://cloud.google.com/appengine/docs/python/\
|
| 20 |
+
urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
|
| 21 |
+
the best choice for your application. There are three options for using
|
| 22 |
+
urllib3 on Google App Engine:
|
| 23 |
+
|
| 24 |
+
1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
|
| 25 |
+
cost-effective in many circumstances as long as your usage is within the
|
| 26 |
+
limitations.
|
| 27 |
+
2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
|
| 28 |
+
Sockets also have `limitations and restrictions
|
| 29 |
+
<https://cloud.google.com/appengine/docs/python/sockets/\
|
| 30 |
+
#limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
|
| 31 |
+
To use sockets, be sure to specify the following in your ``app.yaml``::
|
| 32 |
+
|
| 33 |
+
env_variables:
|
| 34 |
+
GAE_USE_SOCKETS_HTTPLIB : 'true'
|
| 35 |
+
|
| 36 |
+
3. If you are using `App Engine Flexible
|
| 37 |
+
<https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
|
| 38 |
+
:class:`PoolManager` without any configuration or special environment variables.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
from __future__ import absolute_import
|
| 42 |
+
|
| 43 |
+
import io
|
| 44 |
+
import logging
|
| 45 |
+
import warnings
|
| 46 |
+
|
| 47 |
+
from ..exceptions import (
|
| 48 |
+
HTTPError,
|
| 49 |
+
HTTPWarning,
|
| 50 |
+
MaxRetryError,
|
| 51 |
+
ProtocolError,
|
| 52 |
+
SSLError,
|
| 53 |
+
TimeoutError,
|
| 54 |
+
)
|
| 55 |
+
from ..packages.six.moves.urllib.parse import urljoin
|
| 56 |
+
from ..request import RequestMethods
|
| 57 |
+
from ..response import HTTPResponse
|
| 58 |
+
from ..util.retry import Retry
|
| 59 |
+
from ..util.timeout import Timeout
|
| 60 |
+
from . import _appengine_environ
|
| 61 |
+
|
| 62 |
+
try:
|
| 63 |
+
from google.appengine.api import urlfetch
|
| 64 |
+
except ImportError:
|
| 65 |
+
urlfetch = None
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
log = logging.getLogger(__name__)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class AppEnginePlatformWarning(HTTPWarning):
|
| 72 |
+
pass
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class AppEnginePlatformError(HTTPError):
|
| 76 |
+
pass
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class AppEngineManager(RequestMethods):
|
| 80 |
+
"""
|
| 81 |
+
Connection manager for Google App Engine sandbox applications.
|
| 82 |
+
|
| 83 |
+
This manager uses the URLFetch service directly instead of using the
|
| 84 |
+
emulated httplib, and is subject to URLFetch limitations as described in
|
| 85 |
+
the App Engine documentation `here
|
| 86 |
+
<https://cloud.google.com/appengine/docs/python/urlfetch>`_.
|
| 87 |
+
|
| 88 |
+
Notably it will raise an :class:`AppEnginePlatformError` if:
|
| 89 |
+
* URLFetch is not available.
|
| 90 |
+
* If you attempt to use this on App Engine Flexible, as full socket
|
| 91 |
+
support is available.
|
| 92 |
+
* If a request size is more than 10 megabytes.
|
| 93 |
+
* If a response size is more than 32 megabytes.
|
| 94 |
+
* If you use an unsupported request method such as OPTIONS.
|
| 95 |
+
|
| 96 |
+
Beyond those cases, it will raise normal urllib3 errors.
|
| 97 |
+
"""
|
| 98 |
+
|
| 99 |
+
def __init__(
|
| 100 |
+
self,
|
| 101 |
+
headers=None,
|
| 102 |
+
retries=None,
|
| 103 |
+
validate_certificate=True,
|
| 104 |
+
urlfetch_retries=True,
|
| 105 |
+
):
|
| 106 |
+
if not urlfetch:
|
| 107 |
+
raise AppEnginePlatformError(
|
| 108 |
+
"URLFetch is not available in this environment."
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
warnings.warn(
|
| 112 |
+
"urllib3 is using URLFetch on Google App Engine sandbox instead "
|
| 113 |
+
"of sockets. To use sockets directly instead of URLFetch see "
|
| 114 |
+
"https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
|
| 115 |
+
AppEnginePlatformWarning,
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
RequestMethods.__init__(self, headers)
|
| 119 |
+
self.validate_certificate = validate_certificate
|
| 120 |
+
self.urlfetch_retries = urlfetch_retries
|
| 121 |
+
|
| 122 |
+
self.retries = retries or Retry.DEFAULT
|
| 123 |
+
|
| 124 |
+
def __enter__(self):
|
| 125 |
+
return self
|
| 126 |
+
|
| 127 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 128 |
+
# Return False to re-raise any potential exceptions
|
| 129 |
+
return False
|
| 130 |
+
|
| 131 |
+
def urlopen(
|
| 132 |
+
self,
|
| 133 |
+
method,
|
| 134 |
+
url,
|
| 135 |
+
body=None,
|
| 136 |
+
headers=None,
|
| 137 |
+
retries=None,
|
| 138 |
+
redirect=True,
|
| 139 |
+
timeout=Timeout.DEFAULT_TIMEOUT,
|
| 140 |
+
**response_kw
|
| 141 |
+
):
|
| 142 |
+
|
| 143 |
+
retries = self._get_retries(retries, redirect)
|
| 144 |
+
|
| 145 |
+
try:
|
| 146 |
+
follow_redirects = redirect and retries.redirect != 0 and retries.total
|
| 147 |
+
response = urlfetch.fetch(
|
| 148 |
+
url,
|
| 149 |
+
payload=body,
|
| 150 |
+
method=method,
|
| 151 |
+
headers=headers or {},
|
| 152 |
+
allow_truncated=False,
|
| 153 |
+
follow_redirects=self.urlfetch_retries and follow_redirects,
|
| 154 |
+
deadline=self._get_absolute_timeout(timeout),
|
| 155 |
+
validate_certificate=self.validate_certificate,
|
| 156 |
+
)
|
| 157 |
+
except urlfetch.DeadlineExceededError as e:
|
| 158 |
+
raise TimeoutError(self, e)
|
| 159 |
+
|
| 160 |
+
except urlfetch.InvalidURLError as e:
|
| 161 |
+
if "too large" in str(e):
|
| 162 |
+
raise AppEnginePlatformError(
|
| 163 |
+
"URLFetch request too large, URLFetch only "
|
| 164 |
+
"supports requests up to 10mb in size.",
|
| 165 |
+
e,
|
| 166 |
+
)
|
| 167 |
+
raise ProtocolError(e)
|
| 168 |
+
|
| 169 |
+
except urlfetch.DownloadError as e:
|
| 170 |
+
if "Too many redirects" in str(e):
|
| 171 |
+
raise MaxRetryError(self, url, reason=e)
|
| 172 |
+
raise ProtocolError(e)
|
| 173 |
+
|
| 174 |
+
except urlfetch.ResponseTooLargeError as e:
|
| 175 |
+
raise AppEnginePlatformError(
|
| 176 |
+
"URLFetch response too large, URLFetch only supports"
|
| 177 |
+
"responses up to 32mb in size.",
|
| 178 |
+
e,
|
| 179 |
+
)
|
| 180 |
+
|
| 181 |
+
except urlfetch.SSLCertificateError as e:
|
| 182 |
+
raise SSLError(e)
|
| 183 |
+
|
| 184 |
+
except urlfetch.InvalidMethodError as e:
|
| 185 |
+
raise AppEnginePlatformError(
|
| 186 |
+
"URLFetch does not support method: %s" % method, e
|
| 187 |
+
)
|
| 188 |
+
|
| 189 |
+
http_response = self._urlfetch_response_to_http_response(
|
| 190 |
+
response, retries=retries, **response_kw
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
# Handle redirect?
|
| 194 |
+
redirect_location = redirect and http_response.get_redirect_location()
|
| 195 |
+
if redirect_location:
|
| 196 |
+
# Check for redirect response
|
| 197 |
+
if self.urlfetch_retries and retries.raise_on_redirect:
|
| 198 |
+
raise MaxRetryError(self, url, "too many redirects")
|
| 199 |
+
else:
|
| 200 |
+
if http_response.status == 303:
|
| 201 |
+
method = "GET"
|
| 202 |
+
|
| 203 |
+
try:
|
| 204 |
+
retries = retries.increment(
|
| 205 |
+
method, url, response=http_response, _pool=self
|
| 206 |
+
)
|
| 207 |
+
except MaxRetryError:
|
| 208 |
+
if retries.raise_on_redirect:
|
| 209 |
+
raise MaxRetryError(self, url, "too many redirects")
|
| 210 |
+
return http_response
|
| 211 |
+
|
| 212 |
+
retries.sleep_for_retry(http_response)
|
| 213 |
+
log.debug("Redirecting %s -> %s", url, redirect_location)
|
| 214 |
+
redirect_url = urljoin(url, redirect_location)
|
| 215 |
+
return self.urlopen(
|
| 216 |
+
method,
|
| 217 |
+
redirect_url,
|
| 218 |
+
body,
|
| 219 |
+
headers,
|
| 220 |
+
retries=retries,
|
| 221 |
+
redirect=redirect,
|
| 222 |
+
timeout=timeout,
|
| 223 |
+
**response_kw
|
| 224 |
+
)
|
| 225 |
+
|
| 226 |
+
# Check if we should retry the HTTP response.
|
| 227 |
+
has_retry_after = bool(http_response.headers.get("Retry-After"))
|
| 228 |
+
if retries.is_retry(method, http_response.status, has_retry_after):
|
| 229 |
+
retries = retries.increment(method, url, response=http_response, _pool=self)
|
| 230 |
+
log.debug("Retry: %s", url)
|
| 231 |
+
retries.sleep(http_response)
|
| 232 |
+
return self.urlopen(
|
| 233 |
+
method,
|
| 234 |
+
url,
|
| 235 |
+
body=body,
|
| 236 |
+
headers=headers,
|
| 237 |
+
retries=retries,
|
| 238 |
+
redirect=redirect,
|
| 239 |
+
timeout=timeout,
|
| 240 |
+
**response_kw
|
| 241 |
+
)
|
| 242 |
+
|
| 243 |
+
return http_response
|
| 244 |
+
|
| 245 |
+
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
|
| 246 |
+
|
| 247 |
+
if is_prod_appengine():
|
| 248 |
+
# Production GAE handles deflate encoding automatically, but does
|
| 249 |
+
# not remove the encoding header.
|
| 250 |
+
content_encoding = urlfetch_resp.headers.get("content-encoding")
|
| 251 |
+
|
| 252 |
+
if content_encoding == "deflate":
|
| 253 |
+
del urlfetch_resp.headers["content-encoding"]
|
| 254 |
+
|
| 255 |
+
transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
|
| 256 |
+
# We have a full response's content,
|
| 257 |
+
# so let's make sure we don't report ourselves as chunked data.
|
| 258 |
+
if transfer_encoding == "chunked":
|
| 259 |
+
encodings = transfer_encoding.split(",")
|
| 260 |
+
encodings.remove("chunked")
|
| 261 |
+
urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
|
| 262 |
+
|
| 263 |
+
original_response = HTTPResponse(
|
| 264 |
+
# In order for decoding to work, we must present the content as
|
| 265 |
+
# a file-like object.
|
| 266 |
+
body=io.BytesIO(urlfetch_resp.content),
|
| 267 |
+
msg=urlfetch_resp.header_msg,
|
| 268 |
+
headers=urlfetch_resp.headers,
|
| 269 |
+
status=urlfetch_resp.status_code,
|
| 270 |
+
**response_kw
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
return HTTPResponse(
|
| 274 |
+
body=io.BytesIO(urlfetch_resp.content),
|
| 275 |
+
headers=urlfetch_resp.headers,
|
| 276 |
+
status=urlfetch_resp.status_code,
|
| 277 |
+
original_response=original_response,
|
| 278 |
+
**response_kw
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
def _get_absolute_timeout(self, timeout):
|
| 282 |
+
if timeout is Timeout.DEFAULT_TIMEOUT:
|
| 283 |
+
return None # Defer to URLFetch's default.
|
| 284 |
+
if isinstance(timeout, Timeout):
|
| 285 |
+
if timeout._read is not None or timeout._connect is not None:
|
| 286 |
+
warnings.warn(
|
| 287 |
+
"URLFetch does not support granular timeout settings, "
|
| 288 |
+
"reverting to total or default URLFetch timeout.",
|
| 289 |
+
AppEnginePlatformWarning,
|
| 290 |
+
)
|
| 291 |
+
return timeout.total
|
| 292 |
+
return timeout
|
| 293 |
+
|
| 294 |
+
def _get_retries(self, retries, redirect):
|
| 295 |
+
if not isinstance(retries, Retry):
|
| 296 |
+
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
|
| 297 |
+
|
| 298 |
+
if retries.connect or retries.read or retries.redirect:
|
| 299 |
+
warnings.warn(
|
| 300 |
+
"URLFetch only supports total retries and does not "
|
| 301 |
+
"recognize connect, read, or redirect retry parameters.",
|
| 302 |
+
AppEnginePlatformWarning,
|
| 303 |
+
)
|
| 304 |
+
|
| 305 |
+
return retries
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
# Alias methods from _appengine_environ to maintain public API interface.
|
| 309 |
+
|
| 310 |
+
is_appengine = _appengine_environ.is_appengine
|
| 311 |
+
is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
|
| 312 |
+
is_local_appengine = _appengine_environ.is_local_appengine
|
| 313 |
+
is_prod_appengine = _appengine_environ.is_prod_appengine
|
| 314 |
+
is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
NTLM authenticating pool, contributed by erikcederstran
|
| 3 |
+
|
| 4 |
+
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
| 5 |
+
"""
|
| 6 |
+
from __future__ import absolute_import
|
| 7 |
+
|
| 8 |
+
import warnings
|
| 9 |
+
from logging import getLogger
|
| 10 |
+
|
| 11 |
+
from ntlm import ntlm
|
| 12 |
+
|
| 13 |
+
from .. import HTTPSConnectionPool
|
| 14 |
+
from ..packages.six.moves.http_client import HTTPSConnection
|
| 15 |
+
|
| 16 |
+
warnings.warn(
|
| 17 |
+
"The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
|
| 18 |
+
"in urllib3 v2.0 release, urllib3 is not able to support it properly due "
|
| 19 |
+
"to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
|
| 20 |
+
"If you are a user of this module please comment in the mentioned issue.",
|
| 21 |
+
DeprecationWarning,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
log = getLogger(__name__)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class NTLMConnectionPool(HTTPSConnectionPool):
|
| 28 |
+
"""
|
| 29 |
+
Implements an NTLM authentication version of an urllib3 connection pool
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
scheme = "https"
|
| 33 |
+
|
| 34 |
+
def __init__(self, user, pw, authurl, *args, **kwargs):
|
| 35 |
+
"""
|
| 36 |
+
authurl is a random URL on the server that is protected by NTLM.
|
| 37 |
+
user is the Windows user, probably in the DOMAIN\\username format.
|
| 38 |
+
pw is the password for the user.
|
| 39 |
+
"""
|
| 40 |
+
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
|
| 41 |
+
self.authurl = authurl
|
| 42 |
+
self.rawuser = user
|
| 43 |
+
user_parts = user.split("\\", 1)
|
| 44 |
+
self.domain = user_parts[0].upper()
|
| 45 |
+
self.user = user_parts[1]
|
| 46 |
+
self.pw = pw
|
| 47 |
+
|
| 48 |
+
def _new_conn(self):
|
| 49 |
+
# Performs the NTLM handshake that secures the connection. The socket
|
| 50 |
+
# must be kept open while requests are performed.
|
| 51 |
+
self.num_connections += 1
|
| 52 |
+
log.debug(
|
| 53 |
+
"Starting NTLM HTTPS connection no. %d: https://%s%s",
|
| 54 |
+
self.num_connections,
|
| 55 |
+
self.host,
|
| 56 |
+
self.authurl,
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
headers = {"Connection": "Keep-Alive"}
|
| 60 |
+
req_header = "Authorization"
|
| 61 |
+
resp_header = "www-authenticate"
|
| 62 |
+
|
| 63 |
+
conn = HTTPSConnection(host=self.host, port=self.port)
|
| 64 |
+
|
| 65 |
+
# Send negotiation message
|
| 66 |
+
headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
|
| 67 |
+
self.rawuser
|
| 68 |
+
)
|
| 69 |
+
log.debug("Request headers: %s", headers)
|
| 70 |
+
conn.request("GET", self.authurl, None, headers)
|
| 71 |
+
res = conn.getresponse()
|
| 72 |
+
reshdr = dict(res.headers)
|
| 73 |
+
log.debug("Response status: %s %s", res.status, res.reason)
|
| 74 |
+
log.debug("Response headers: %s", reshdr)
|
| 75 |
+
log.debug("Response data: %s [...]", res.read(100))
|
| 76 |
+
|
| 77 |
+
# Remove the reference to the socket, so that it can not be closed by
|
| 78 |
+
# the response object (we want to keep the socket open)
|
| 79 |
+
res.fp = None
|
| 80 |
+
|
| 81 |
+
# Server should respond with a challenge message
|
| 82 |
+
auth_header_values = reshdr[resp_header].split(", ")
|
| 83 |
+
auth_header_value = None
|
| 84 |
+
for s in auth_header_values:
|
| 85 |
+
if s[:5] == "NTLM ":
|
| 86 |
+
auth_header_value = s[5:]
|
| 87 |
+
if auth_header_value is None:
|
| 88 |
+
raise Exception(
|
| 89 |
+
"Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
# Send authentication message
|
| 93 |
+
ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
|
| 94 |
+
auth_header_value
|
| 95 |
+
)
|
| 96 |
+
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
|
| 97 |
+
ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
|
| 98 |
+
)
|
| 99 |
+
headers[req_header] = "NTLM %s" % auth_msg
|
| 100 |
+
log.debug("Request headers: %s", headers)
|
| 101 |
+
conn.request("GET", self.authurl, None, headers)
|
| 102 |
+
res = conn.getresponse()
|
| 103 |
+
log.debug("Response status: %s %s", res.status, res.reason)
|
| 104 |
+
log.debug("Response headers: %s", dict(res.headers))
|
| 105 |
+
log.debug("Response data: %s [...]", res.read()[:100])
|
| 106 |
+
if res.status != 200:
|
| 107 |
+
if res.status == 401:
|
| 108 |
+
raise Exception("Server rejected request: wrong username or password")
|
| 109 |
+
raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
|
| 110 |
+
|
| 111 |
+
res.fp = None
|
| 112 |
+
log.debug("Connection established")
|
| 113 |
+
return conn
|
| 114 |
+
|
| 115 |
+
def urlopen(
|
| 116 |
+
self,
|
| 117 |
+
method,
|
| 118 |
+
url,
|
| 119 |
+
body=None,
|
| 120 |
+
headers=None,
|
| 121 |
+
retries=3,
|
| 122 |
+
redirect=True,
|
| 123 |
+
assert_same_host=True,
|
| 124 |
+
):
|
| 125 |
+
if headers is None:
|
| 126 |
+
headers = {}
|
| 127 |
+
headers["Connection"] = "Keep-Alive"
|
| 128 |
+
return super(NTLMConnectionPool, self).urlopen(
|
| 129 |
+
method, url, body, headers, retries, redirect, assert_same_host
|
| 130 |
+
)
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py
ADDED
|
@@ -0,0 +1,518 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
TLS with SNI_-support for Python 2. Follow these instructions if you would
|
| 3 |
+
like to verify TLS certificates in Python 2. Note, the default libraries do
|
| 4 |
+
*not* do certificate checking; you need to do additional work to validate
|
| 5 |
+
certificates yourself.
|
| 6 |
+
|
| 7 |
+
This needs the following packages installed:
|
| 8 |
+
|
| 9 |
+
* `pyOpenSSL`_ (tested with 16.0.0)
|
| 10 |
+
* `cryptography`_ (minimum 1.3.4, from pyopenssl)
|
| 11 |
+
* `idna`_ (minimum 2.0, from cryptography)
|
| 12 |
+
|
| 13 |
+
However, pyopenssl depends on cryptography, which depends on idna, so while we
|
| 14 |
+
use all three directly here we end up having relatively few packages required.
|
| 15 |
+
|
| 16 |
+
You can install them with the following command:
|
| 17 |
+
|
| 18 |
+
.. code-block:: bash
|
| 19 |
+
|
| 20 |
+
$ python -m pip install pyopenssl cryptography idna
|
| 21 |
+
|
| 22 |
+
To activate certificate checking, call
|
| 23 |
+
:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
| 24 |
+
before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
| 25 |
+
module, or at any other time before your application begins using ``urllib3``,
|
| 26 |
+
like this:
|
| 27 |
+
|
| 28 |
+
.. code-block:: python
|
| 29 |
+
|
| 30 |
+
try:
|
| 31 |
+
import pip._vendor.urllib3.contrib.pyopenssl as pyopenssl
|
| 32 |
+
pyopenssl.inject_into_urllib3()
|
| 33 |
+
except ImportError:
|
| 34 |
+
pass
|
| 35 |
+
|
| 36 |
+
Now you can use :mod:`urllib3` as you normally would, and it will support SNI
|
| 37 |
+
when the required modules are installed.
|
| 38 |
+
|
| 39 |
+
Activating this module also has the positive side effect of disabling SSL/TLS
|
| 40 |
+
compression in Python 2 (see `CRIME attack`_).
|
| 41 |
+
|
| 42 |
+
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
| 43 |
+
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
| 44 |
+
.. _pyopenssl: https://www.pyopenssl.org
|
| 45 |
+
.. _cryptography: https://cryptography.io
|
| 46 |
+
.. _idna: https://github.com/kjd/idna
|
| 47 |
+
"""
|
| 48 |
+
from __future__ import absolute_import
|
| 49 |
+
|
| 50 |
+
import OpenSSL.crypto
|
| 51 |
+
import OpenSSL.SSL
|
| 52 |
+
from cryptography import x509
|
| 53 |
+
from cryptography.hazmat.backends.openssl import backend as openssl_backend
|
| 54 |
+
|
| 55 |
+
try:
|
| 56 |
+
from cryptography.x509 import UnsupportedExtension
|
| 57 |
+
except ImportError:
|
| 58 |
+
# UnsupportedExtension is gone in cryptography >= 2.1.0
|
| 59 |
+
class UnsupportedExtension(Exception):
|
| 60 |
+
pass
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
from io import BytesIO
|
| 64 |
+
from socket import error as SocketError
|
| 65 |
+
from socket import timeout
|
| 66 |
+
|
| 67 |
+
try: # Platform-specific: Python 2
|
| 68 |
+
from socket import _fileobject
|
| 69 |
+
except ImportError: # Platform-specific: Python 3
|
| 70 |
+
_fileobject = None
|
| 71 |
+
from ..packages.backports.makefile import backport_makefile
|
| 72 |
+
|
| 73 |
+
import logging
|
| 74 |
+
import ssl
|
| 75 |
+
import sys
|
| 76 |
+
import warnings
|
| 77 |
+
|
| 78 |
+
from .. import util
|
| 79 |
+
from ..packages import six
|
| 80 |
+
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
|
| 81 |
+
|
| 82 |
+
warnings.warn(
|
| 83 |
+
"'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
|
| 84 |
+
"in a future release of urllib3 2.x. Read more in this issue: "
|
| 85 |
+
"https://github.com/urllib3/urllib3/issues/2680",
|
| 86 |
+
category=DeprecationWarning,
|
| 87 |
+
stacklevel=2,
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
| 91 |
+
|
| 92 |
+
# SNI always works.
|
| 93 |
+
HAS_SNI = True
|
| 94 |
+
|
| 95 |
+
# Map from urllib3 to PyOpenSSL compatible parameter-values.
|
| 96 |
+
_openssl_versions = {
|
| 97 |
+
util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
|
| 98 |
+
PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
|
| 99 |
+
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
|
| 103 |
+
_openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
|
| 104 |
+
|
| 105 |
+
if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
|
| 106 |
+
_openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
|
| 107 |
+
|
| 108 |
+
if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
|
| 109 |
+
_openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
_stdlib_to_openssl_verify = {
|
| 113 |
+
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
|
| 114 |
+
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
|
| 115 |
+
ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
|
| 116 |
+
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
|
| 117 |
+
}
|
| 118 |
+
_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
|
| 119 |
+
|
| 120 |
+
# OpenSSL will only write 16K at a time
|
| 121 |
+
SSL_WRITE_BLOCKSIZE = 16384
|
| 122 |
+
|
| 123 |
+
orig_util_HAS_SNI = util.HAS_SNI
|
| 124 |
+
orig_util_SSLContext = util.ssl_.SSLContext
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
log = logging.getLogger(__name__)
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def inject_into_urllib3():
|
| 131 |
+
"Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
|
| 132 |
+
|
| 133 |
+
_validate_dependencies_met()
|
| 134 |
+
|
| 135 |
+
util.SSLContext = PyOpenSSLContext
|
| 136 |
+
util.ssl_.SSLContext = PyOpenSSLContext
|
| 137 |
+
util.HAS_SNI = HAS_SNI
|
| 138 |
+
util.ssl_.HAS_SNI = HAS_SNI
|
| 139 |
+
util.IS_PYOPENSSL = True
|
| 140 |
+
util.ssl_.IS_PYOPENSSL = True
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def extract_from_urllib3():
|
| 144 |
+
"Undo monkey-patching by :func:`inject_into_urllib3`."
|
| 145 |
+
|
| 146 |
+
util.SSLContext = orig_util_SSLContext
|
| 147 |
+
util.ssl_.SSLContext = orig_util_SSLContext
|
| 148 |
+
util.HAS_SNI = orig_util_HAS_SNI
|
| 149 |
+
util.ssl_.HAS_SNI = orig_util_HAS_SNI
|
| 150 |
+
util.IS_PYOPENSSL = False
|
| 151 |
+
util.ssl_.IS_PYOPENSSL = False
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def _validate_dependencies_met():
|
| 155 |
+
"""
|
| 156 |
+
Verifies that PyOpenSSL's package-level dependencies have been met.
|
| 157 |
+
Throws `ImportError` if they are not met.
|
| 158 |
+
"""
|
| 159 |
+
# Method added in `cryptography==1.1`; not available in older versions
|
| 160 |
+
from cryptography.x509.extensions import Extensions
|
| 161 |
+
|
| 162 |
+
if getattr(Extensions, "get_extension_for_class", None) is None:
|
| 163 |
+
raise ImportError(
|
| 164 |
+
"'cryptography' module missing required functionality. "
|
| 165 |
+
"Try upgrading to v1.3.4 or newer."
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
# pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
|
| 169 |
+
# attribute is only present on those versions.
|
| 170 |
+
from OpenSSL.crypto import X509
|
| 171 |
+
|
| 172 |
+
x509 = X509()
|
| 173 |
+
if getattr(x509, "_x509", None) is None:
|
| 174 |
+
raise ImportError(
|
| 175 |
+
"'pyOpenSSL' module missing required functionality. "
|
| 176 |
+
"Try upgrading to v0.14 or newer."
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def _dnsname_to_stdlib(name):
|
| 181 |
+
"""
|
| 182 |
+
Converts a dNSName SubjectAlternativeName field to the form used by the
|
| 183 |
+
standard library on the given Python version.
|
| 184 |
+
|
| 185 |
+
Cryptography produces a dNSName as a unicode string that was idna-decoded
|
| 186 |
+
from ASCII bytes. We need to idna-encode that string to get it back, and
|
| 187 |
+
then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
|
| 188 |
+
uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
|
| 189 |
+
|
| 190 |
+
If the name cannot be idna-encoded then we return None signalling that
|
| 191 |
+
the name given should be skipped.
|
| 192 |
+
"""
|
| 193 |
+
|
| 194 |
+
def idna_encode(name):
|
| 195 |
+
"""
|
| 196 |
+
Borrowed wholesale from the Python Cryptography Project. It turns out
|
| 197 |
+
that we can't just safely call `idna.encode`: it can explode for
|
| 198 |
+
wildcard names. This avoids that problem.
|
| 199 |
+
"""
|
| 200 |
+
from pip._vendor import idna
|
| 201 |
+
|
| 202 |
+
try:
|
| 203 |
+
for prefix in [u"*.", u"."]:
|
| 204 |
+
if name.startswith(prefix):
|
| 205 |
+
name = name[len(prefix) :]
|
| 206 |
+
return prefix.encode("ascii") + idna.encode(name)
|
| 207 |
+
return idna.encode(name)
|
| 208 |
+
except idna.core.IDNAError:
|
| 209 |
+
return None
|
| 210 |
+
|
| 211 |
+
# Don't send IPv6 addresses through the IDNA encoder.
|
| 212 |
+
if ":" in name:
|
| 213 |
+
return name
|
| 214 |
+
|
| 215 |
+
name = idna_encode(name)
|
| 216 |
+
if name is None:
|
| 217 |
+
return None
|
| 218 |
+
elif sys.version_info >= (3, 0):
|
| 219 |
+
name = name.decode("utf-8")
|
| 220 |
+
return name
|
| 221 |
+
|
| 222 |
+
|
| 223 |
+
def get_subj_alt_name(peer_cert):
|
| 224 |
+
"""
|
| 225 |
+
Given an PyOpenSSL certificate, provides all the subject alternative names.
|
| 226 |
+
"""
|
| 227 |
+
# Pass the cert to cryptography, which has much better APIs for this.
|
| 228 |
+
if hasattr(peer_cert, "to_cryptography"):
|
| 229 |
+
cert = peer_cert.to_cryptography()
|
| 230 |
+
else:
|
| 231 |
+
der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
|
| 232 |
+
cert = x509.load_der_x509_certificate(der, openssl_backend)
|
| 233 |
+
|
| 234 |
+
# We want to find the SAN extension. Ask Cryptography to locate it (it's
|
| 235 |
+
# faster than looping in Python)
|
| 236 |
+
try:
|
| 237 |
+
ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
|
| 238 |
+
except x509.ExtensionNotFound:
|
| 239 |
+
# No such extension, return the empty list.
|
| 240 |
+
return []
|
| 241 |
+
except (
|
| 242 |
+
x509.DuplicateExtension,
|
| 243 |
+
UnsupportedExtension,
|
| 244 |
+
x509.UnsupportedGeneralNameType,
|
| 245 |
+
UnicodeError,
|
| 246 |
+
) as e:
|
| 247 |
+
# A problem has been found with the quality of the certificate. Assume
|
| 248 |
+
# no SAN field is present.
|
| 249 |
+
log.warning(
|
| 250 |
+
"A problem was encountered with the certificate that prevented "
|
| 251 |
+
"urllib3 from finding the SubjectAlternativeName field. This can "
|
| 252 |
+
"affect certificate validation. The error was %s",
|
| 253 |
+
e,
|
| 254 |
+
)
|
| 255 |
+
return []
|
| 256 |
+
|
| 257 |
+
# We want to return dNSName and iPAddress fields. We need to cast the IPs
|
| 258 |
+
# back to strings because the match_hostname function wants them as
|
| 259 |
+
# strings.
|
| 260 |
+
# Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
|
| 261 |
+
# decoded. This is pretty frustrating, but that's what the standard library
|
| 262 |
+
# does with certificates, and so we need to attempt to do the same.
|
| 263 |
+
# We also want to skip over names which cannot be idna encoded.
|
| 264 |
+
names = [
|
| 265 |
+
("DNS", name)
|
| 266 |
+
for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
|
| 267 |
+
if name is not None
|
| 268 |
+
]
|
| 269 |
+
names.extend(
|
| 270 |
+
("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
|
| 271 |
+
)
|
| 272 |
+
|
| 273 |
+
return names
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
class WrappedSocket(object):
|
| 277 |
+
"""API-compatibility wrapper for Python OpenSSL's Connection-class.
|
| 278 |
+
|
| 279 |
+
Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
|
| 280 |
+
collector of pypy.
|
| 281 |
+
"""
|
| 282 |
+
|
| 283 |
+
def __init__(self, connection, socket, suppress_ragged_eofs=True):
|
| 284 |
+
self.connection = connection
|
| 285 |
+
self.socket = socket
|
| 286 |
+
self.suppress_ragged_eofs = suppress_ragged_eofs
|
| 287 |
+
self._makefile_refs = 0
|
| 288 |
+
self._closed = False
|
| 289 |
+
|
| 290 |
+
def fileno(self):
|
| 291 |
+
return self.socket.fileno()
|
| 292 |
+
|
| 293 |
+
# Copy-pasted from Python 3.5 source code
|
| 294 |
+
def _decref_socketios(self):
|
| 295 |
+
if self._makefile_refs > 0:
|
| 296 |
+
self._makefile_refs -= 1
|
| 297 |
+
if self._closed:
|
| 298 |
+
self.close()
|
| 299 |
+
|
| 300 |
+
def recv(self, *args, **kwargs):
|
| 301 |
+
try:
|
| 302 |
+
data = self.connection.recv(*args, **kwargs)
|
| 303 |
+
except OpenSSL.SSL.SysCallError as e:
|
| 304 |
+
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
|
| 305 |
+
return b""
|
| 306 |
+
else:
|
| 307 |
+
raise SocketError(str(e))
|
| 308 |
+
except OpenSSL.SSL.ZeroReturnError:
|
| 309 |
+
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
| 310 |
+
return b""
|
| 311 |
+
else:
|
| 312 |
+
raise
|
| 313 |
+
except OpenSSL.SSL.WantReadError:
|
| 314 |
+
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
|
| 315 |
+
raise timeout("The read operation timed out")
|
| 316 |
+
else:
|
| 317 |
+
return self.recv(*args, **kwargs)
|
| 318 |
+
|
| 319 |
+
# TLS 1.3 post-handshake authentication
|
| 320 |
+
except OpenSSL.SSL.Error as e:
|
| 321 |
+
raise ssl.SSLError("read error: %r" % e)
|
| 322 |
+
else:
|
| 323 |
+
return data
|
| 324 |
+
|
| 325 |
+
def recv_into(self, *args, **kwargs):
|
| 326 |
+
try:
|
| 327 |
+
return self.connection.recv_into(*args, **kwargs)
|
| 328 |
+
except OpenSSL.SSL.SysCallError as e:
|
| 329 |
+
if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
|
| 330 |
+
return 0
|
| 331 |
+
else:
|
| 332 |
+
raise SocketError(str(e))
|
| 333 |
+
except OpenSSL.SSL.ZeroReturnError:
|
| 334 |
+
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
|
| 335 |
+
return 0
|
| 336 |
+
else:
|
| 337 |
+
raise
|
| 338 |
+
except OpenSSL.SSL.WantReadError:
|
| 339 |
+
if not util.wait_for_read(self.socket, self.socket.gettimeout()):
|
| 340 |
+
raise timeout("The read operation timed out")
|
| 341 |
+
else:
|
| 342 |
+
return self.recv_into(*args, **kwargs)
|
| 343 |
+
|
| 344 |
+
# TLS 1.3 post-handshake authentication
|
| 345 |
+
except OpenSSL.SSL.Error as e:
|
| 346 |
+
raise ssl.SSLError("read error: %r" % e)
|
| 347 |
+
|
| 348 |
+
def settimeout(self, timeout):
|
| 349 |
+
return self.socket.settimeout(timeout)
|
| 350 |
+
|
| 351 |
+
def _send_until_done(self, data):
|
| 352 |
+
while True:
|
| 353 |
+
try:
|
| 354 |
+
return self.connection.send(data)
|
| 355 |
+
except OpenSSL.SSL.WantWriteError:
|
| 356 |
+
if not util.wait_for_write(self.socket, self.socket.gettimeout()):
|
| 357 |
+
raise timeout()
|
| 358 |
+
continue
|
| 359 |
+
except OpenSSL.SSL.SysCallError as e:
|
| 360 |
+
raise SocketError(str(e))
|
| 361 |
+
|
| 362 |
+
def sendall(self, data):
|
| 363 |
+
total_sent = 0
|
| 364 |
+
while total_sent < len(data):
|
| 365 |
+
sent = self._send_until_done(
|
| 366 |
+
data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
|
| 367 |
+
)
|
| 368 |
+
total_sent += sent
|
| 369 |
+
|
| 370 |
+
def shutdown(self):
|
| 371 |
+
# FIXME rethrow compatible exceptions should we ever use this
|
| 372 |
+
self.connection.shutdown()
|
| 373 |
+
|
| 374 |
+
def close(self):
|
| 375 |
+
if self._makefile_refs < 1:
|
| 376 |
+
try:
|
| 377 |
+
self._closed = True
|
| 378 |
+
return self.connection.close()
|
| 379 |
+
except OpenSSL.SSL.Error:
|
| 380 |
+
return
|
| 381 |
+
else:
|
| 382 |
+
self._makefile_refs -= 1
|
| 383 |
+
|
| 384 |
+
def getpeercert(self, binary_form=False):
|
| 385 |
+
x509 = self.connection.get_peer_certificate()
|
| 386 |
+
|
| 387 |
+
if not x509:
|
| 388 |
+
return x509
|
| 389 |
+
|
| 390 |
+
if binary_form:
|
| 391 |
+
return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
|
| 392 |
+
|
| 393 |
+
return {
|
| 394 |
+
"subject": ((("commonName", x509.get_subject().CN),),),
|
| 395 |
+
"subjectAltName": get_subj_alt_name(x509),
|
| 396 |
+
}
|
| 397 |
+
|
| 398 |
+
def version(self):
|
| 399 |
+
return self.connection.get_protocol_version_name()
|
| 400 |
+
|
| 401 |
+
def _reuse(self):
|
| 402 |
+
self._makefile_refs += 1
|
| 403 |
+
|
| 404 |
+
def _drop(self):
|
| 405 |
+
if self._makefile_refs < 1:
|
| 406 |
+
self.close()
|
| 407 |
+
else:
|
| 408 |
+
self._makefile_refs -= 1
|
| 409 |
+
|
| 410 |
+
|
| 411 |
+
if _fileobject: # Platform-specific: Python 2
|
| 412 |
+
|
| 413 |
+
def makefile(self, mode, bufsize=-1):
|
| 414 |
+
self._makefile_refs += 1
|
| 415 |
+
return _fileobject(self, mode, bufsize, close=True)
|
| 416 |
+
|
| 417 |
+
else: # Platform-specific: Python 3
|
| 418 |
+
makefile = backport_makefile
|
| 419 |
+
|
| 420 |
+
WrappedSocket.makefile = makefile
|
| 421 |
+
|
| 422 |
+
|
| 423 |
+
class PyOpenSSLContext(object):
|
| 424 |
+
"""
|
| 425 |
+
I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
|
| 426 |
+
for translating the interface of the standard library ``SSLContext`` object
|
| 427 |
+
to calls into PyOpenSSL.
|
| 428 |
+
"""
|
| 429 |
+
|
| 430 |
+
def __init__(self, protocol):
|
| 431 |
+
self.protocol = _openssl_versions[protocol]
|
| 432 |
+
self._ctx = OpenSSL.SSL.Context(self.protocol)
|
| 433 |
+
self._options = 0
|
| 434 |
+
self.check_hostname = False
|
| 435 |
+
|
| 436 |
+
@property
|
| 437 |
+
def options(self):
|
| 438 |
+
return self._options
|
| 439 |
+
|
| 440 |
+
@options.setter
|
| 441 |
+
def options(self, value):
|
| 442 |
+
self._options = value
|
| 443 |
+
self._ctx.set_options(value)
|
| 444 |
+
|
| 445 |
+
@property
|
| 446 |
+
def verify_mode(self):
|
| 447 |
+
return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
|
| 448 |
+
|
| 449 |
+
@verify_mode.setter
|
| 450 |
+
def verify_mode(self, value):
|
| 451 |
+
self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
|
| 452 |
+
|
| 453 |
+
def set_default_verify_paths(self):
|
| 454 |
+
self._ctx.set_default_verify_paths()
|
| 455 |
+
|
| 456 |
+
def set_ciphers(self, ciphers):
|
| 457 |
+
if isinstance(ciphers, six.text_type):
|
| 458 |
+
ciphers = ciphers.encode("utf-8")
|
| 459 |
+
self._ctx.set_cipher_list(ciphers)
|
| 460 |
+
|
| 461 |
+
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
|
| 462 |
+
if cafile is not None:
|
| 463 |
+
cafile = cafile.encode("utf-8")
|
| 464 |
+
if capath is not None:
|
| 465 |
+
capath = capath.encode("utf-8")
|
| 466 |
+
try:
|
| 467 |
+
self._ctx.load_verify_locations(cafile, capath)
|
| 468 |
+
if cadata is not None:
|
| 469 |
+
self._ctx.load_verify_locations(BytesIO(cadata))
|
| 470 |
+
except OpenSSL.SSL.Error as e:
|
| 471 |
+
raise ssl.SSLError("unable to load trusted certificates: %r" % e)
|
| 472 |
+
|
| 473 |
+
def load_cert_chain(self, certfile, keyfile=None, password=None):
|
| 474 |
+
self._ctx.use_certificate_chain_file(certfile)
|
| 475 |
+
if password is not None:
|
| 476 |
+
if not isinstance(password, six.binary_type):
|
| 477 |
+
password = password.encode("utf-8")
|
| 478 |
+
self._ctx.set_passwd_cb(lambda *_: password)
|
| 479 |
+
self._ctx.use_privatekey_file(keyfile or certfile)
|
| 480 |
+
|
| 481 |
+
def set_alpn_protocols(self, protocols):
|
| 482 |
+
protocols = [six.ensure_binary(p) for p in protocols]
|
| 483 |
+
return self._ctx.set_alpn_protos(protocols)
|
| 484 |
+
|
| 485 |
+
def wrap_socket(
|
| 486 |
+
self,
|
| 487 |
+
sock,
|
| 488 |
+
server_side=False,
|
| 489 |
+
do_handshake_on_connect=True,
|
| 490 |
+
suppress_ragged_eofs=True,
|
| 491 |
+
server_hostname=None,
|
| 492 |
+
):
|
| 493 |
+
cnx = OpenSSL.SSL.Connection(self._ctx, sock)
|
| 494 |
+
|
| 495 |
+
if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
|
| 496 |
+
server_hostname = server_hostname.encode("utf-8")
|
| 497 |
+
|
| 498 |
+
if server_hostname is not None:
|
| 499 |
+
cnx.set_tlsext_host_name(server_hostname)
|
| 500 |
+
|
| 501 |
+
cnx.set_connect_state()
|
| 502 |
+
|
| 503 |
+
while True:
|
| 504 |
+
try:
|
| 505 |
+
cnx.do_handshake()
|
| 506 |
+
except OpenSSL.SSL.WantReadError:
|
| 507 |
+
if not util.wait_for_read(sock, sock.gettimeout()):
|
| 508 |
+
raise timeout("select timed out")
|
| 509 |
+
continue
|
| 510 |
+
except OpenSSL.SSL.Error as e:
|
| 511 |
+
raise ssl.SSLError("bad handshake: %r" % e)
|
| 512 |
+
break
|
| 513 |
+
|
| 514 |
+
return WrappedSocket(cnx, sock)
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def _verify_callback(cnx, x509, err_no, err_depth, return_code):
|
| 518 |
+
return err_no == 0
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/securetransport.py
ADDED
|
@@ -0,0 +1,920 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
SecureTranport support for urllib3 via ctypes.
|
| 3 |
+
|
| 4 |
+
This makes platform-native TLS available to urllib3 users on macOS without the
|
| 5 |
+
use of a compiler. This is an important feature because the Python Package
|
| 6 |
+
Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
|
| 7 |
+
that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
|
| 8 |
+
this is to give macOS users an alternative solution to the problem, and that
|
| 9 |
+
solution is to use SecureTransport.
|
| 10 |
+
|
| 11 |
+
We use ctypes here because this solution must not require a compiler. That's
|
| 12 |
+
because pip is not allowed to require a compiler either.
|
| 13 |
+
|
| 14 |
+
This is not intended to be a seriously long-term solution to this problem.
|
| 15 |
+
The hope is that PEP 543 will eventually solve this issue for us, at which
|
| 16 |
+
point we can retire this contrib module. But in the short term, we need to
|
| 17 |
+
solve the impending tire fire that is Python on Mac without this kind of
|
| 18 |
+
contrib module. So...here we are.
|
| 19 |
+
|
| 20 |
+
To use this module, simply import and inject it::
|
| 21 |
+
|
| 22 |
+
import pip._vendor.urllib3.contrib.securetransport as securetransport
|
| 23 |
+
securetransport.inject_into_urllib3()
|
| 24 |
+
|
| 25 |
+
Happy TLSing!
|
| 26 |
+
|
| 27 |
+
This code is a bastardised version of the code found in Will Bond's oscrypto
|
| 28 |
+
library. An enormous debt is owed to him for blazing this trail for us. For
|
| 29 |
+
that reason, this code should be considered to be covered both by urllib3's
|
| 30 |
+
license and by oscrypto's:
|
| 31 |
+
|
| 32 |
+
.. code-block::
|
| 33 |
+
|
| 34 |
+
Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
| 35 |
+
|
| 36 |
+
Permission is hereby granted, free of charge, to any person obtaining a
|
| 37 |
+
copy of this software and associated documentation files (the "Software"),
|
| 38 |
+
to deal in the Software without restriction, including without limitation
|
| 39 |
+
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
| 40 |
+
and/or sell copies of the Software, and to permit persons to whom the
|
| 41 |
+
Software is furnished to do so, subject to the following conditions:
|
| 42 |
+
|
| 43 |
+
The above copyright notice and this permission notice shall be included in
|
| 44 |
+
all copies or substantial portions of the Software.
|
| 45 |
+
|
| 46 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 47 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 48 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 49 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 50 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 51 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
| 52 |
+
DEALINGS IN THE SOFTWARE.
|
| 53 |
+
"""
|
| 54 |
+
from __future__ import absolute_import
|
| 55 |
+
|
| 56 |
+
import contextlib
|
| 57 |
+
import ctypes
|
| 58 |
+
import errno
|
| 59 |
+
import os.path
|
| 60 |
+
import shutil
|
| 61 |
+
import socket
|
| 62 |
+
import ssl
|
| 63 |
+
import struct
|
| 64 |
+
import threading
|
| 65 |
+
import weakref
|
| 66 |
+
|
| 67 |
+
from .. import util
|
| 68 |
+
from ..packages import six
|
| 69 |
+
from ..util.ssl_ import PROTOCOL_TLS_CLIENT
|
| 70 |
+
from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
|
| 71 |
+
from ._securetransport.low_level import (
|
| 72 |
+
_assert_no_error,
|
| 73 |
+
_build_tls_unknown_ca_alert,
|
| 74 |
+
_cert_array_from_pem,
|
| 75 |
+
_create_cfstring_array,
|
| 76 |
+
_load_client_cert_chain,
|
| 77 |
+
_temporary_keychain,
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
try: # Platform-specific: Python 2
|
| 81 |
+
from socket import _fileobject
|
| 82 |
+
except ImportError: # Platform-specific: Python 3
|
| 83 |
+
_fileobject = None
|
| 84 |
+
from ..packages.backports.makefile import backport_makefile
|
| 85 |
+
|
| 86 |
+
__all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
| 87 |
+
|
| 88 |
+
# SNI always works
|
| 89 |
+
HAS_SNI = True
|
| 90 |
+
|
| 91 |
+
orig_util_HAS_SNI = util.HAS_SNI
|
| 92 |
+
orig_util_SSLContext = util.ssl_.SSLContext
|
| 93 |
+
|
| 94 |
+
# This dictionary is used by the read callback to obtain a handle to the
|
| 95 |
+
# calling wrapped socket. This is a pretty silly approach, but for now it'll
|
| 96 |
+
# do. I feel like I should be able to smuggle a handle to the wrapped socket
|
| 97 |
+
# directly in the SSLConnectionRef, but for now this approach will work I
|
| 98 |
+
# guess.
|
| 99 |
+
#
|
| 100 |
+
# We need to lock around this structure for inserts, but we don't do it for
|
| 101 |
+
# reads/writes in the callbacks. The reasoning here goes as follows:
|
| 102 |
+
#
|
| 103 |
+
# 1. It is not possible to call into the callbacks before the dictionary is
|
| 104 |
+
# populated, so once in the callback the id must be in the dictionary.
|
| 105 |
+
# 2. The callbacks don't mutate the dictionary, they only read from it, and
|
| 106 |
+
# so cannot conflict with any of the insertions.
|
| 107 |
+
#
|
| 108 |
+
# This is good: if we had to lock in the callbacks we'd drastically slow down
|
| 109 |
+
# the performance of this code.
|
| 110 |
+
_connection_refs = weakref.WeakValueDictionary()
|
| 111 |
+
_connection_ref_lock = threading.Lock()
|
| 112 |
+
|
| 113 |
+
# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
|
| 114 |
+
# for no better reason than we need *a* limit, and this one is right there.
|
| 115 |
+
SSL_WRITE_BLOCKSIZE = 16384
|
| 116 |
+
|
| 117 |
+
# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
|
| 118 |
+
# individual cipher suites. We need to do this because this is how
|
| 119 |
+
# SecureTransport wants them.
|
| 120 |
+
CIPHER_SUITES = [
|
| 121 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
|
| 122 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
|
| 123 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
|
| 124 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
|
| 125 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
|
| 126 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
|
| 127 |
+
SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
|
| 128 |
+
SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
|
| 129 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
|
| 130 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
|
| 131 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
|
| 132 |
+
SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
|
| 133 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
|
| 134 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
|
| 135 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
|
| 136 |
+
SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
|
| 137 |
+
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
|
| 138 |
+
SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
|
| 139 |
+
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
|
| 140 |
+
SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
|
| 141 |
+
SecurityConst.TLS_AES_256_GCM_SHA384,
|
| 142 |
+
SecurityConst.TLS_AES_128_GCM_SHA256,
|
| 143 |
+
SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
|
| 144 |
+
SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
|
| 145 |
+
SecurityConst.TLS_AES_128_CCM_8_SHA256,
|
| 146 |
+
SecurityConst.TLS_AES_128_CCM_SHA256,
|
| 147 |
+
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
|
| 148 |
+
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
|
| 149 |
+
SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
|
| 150 |
+
SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
|
| 151 |
+
]
|
| 152 |
+
|
| 153 |
+
# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
|
| 154 |
+
# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
|
| 155 |
+
# TLSv1 to 1.2 are supported on macOS 10.8+
|
| 156 |
+
_protocol_to_min_max = {
|
| 157 |
+
util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
|
| 158 |
+
PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
|
| 159 |
+
}
|
| 160 |
+
|
| 161 |
+
if hasattr(ssl, "PROTOCOL_SSLv2"):
|
| 162 |
+
_protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
|
| 163 |
+
SecurityConst.kSSLProtocol2,
|
| 164 |
+
SecurityConst.kSSLProtocol2,
|
| 165 |
+
)
|
| 166 |
+
if hasattr(ssl, "PROTOCOL_SSLv3"):
|
| 167 |
+
_protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
|
| 168 |
+
SecurityConst.kSSLProtocol3,
|
| 169 |
+
SecurityConst.kSSLProtocol3,
|
| 170 |
+
)
|
| 171 |
+
if hasattr(ssl, "PROTOCOL_TLSv1"):
|
| 172 |
+
_protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
|
| 173 |
+
SecurityConst.kTLSProtocol1,
|
| 174 |
+
SecurityConst.kTLSProtocol1,
|
| 175 |
+
)
|
| 176 |
+
if hasattr(ssl, "PROTOCOL_TLSv1_1"):
|
| 177 |
+
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
|
| 178 |
+
SecurityConst.kTLSProtocol11,
|
| 179 |
+
SecurityConst.kTLSProtocol11,
|
| 180 |
+
)
|
| 181 |
+
if hasattr(ssl, "PROTOCOL_TLSv1_2"):
|
| 182 |
+
_protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
|
| 183 |
+
SecurityConst.kTLSProtocol12,
|
| 184 |
+
SecurityConst.kTLSProtocol12,
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def inject_into_urllib3():
|
| 189 |
+
"""
|
| 190 |
+
Monkey-patch urllib3 with SecureTransport-backed SSL-support.
|
| 191 |
+
"""
|
| 192 |
+
util.SSLContext = SecureTransportContext
|
| 193 |
+
util.ssl_.SSLContext = SecureTransportContext
|
| 194 |
+
util.HAS_SNI = HAS_SNI
|
| 195 |
+
util.ssl_.HAS_SNI = HAS_SNI
|
| 196 |
+
util.IS_SECURETRANSPORT = True
|
| 197 |
+
util.ssl_.IS_SECURETRANSPORT = True
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def extract_from_urllib3():
|
| 201 |
+
"""
|
| 202 |
+
Undo monkey-patching by :func:`inject_into_urllib3`.
|
| 203 |
+
"""
|
| 204 |
+
util.SSLContext = orig_util_SSLContext
|
| 205 |
+
util.ssl_.SSLContext = orig_util_SSLContext
|
| 206 |
+
util.HAS_SNI = orig_util_HAS_SNI
|
| 207 |
+
util.ssl_.HAS_SNI = orig_util_HAS_SNI
|
| 208 |
+
util.IS_SECURETRANSPORT = False
|
| 209 |
+
util.ssl_.IS_SECURETRANSPORT = False
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def _read_callback(connection_id, data_buffer, data_length_pointer):
|
| 213 |
+
"""
|
| 214 |
+
SecureTransport read callback. This is called by ST to request that data
|
| 215 |
+
be returned from the socket.
|
| 216 |
+
"""
|
| 217 |
+
wrapped_socket = None
|
| 218 |
+
try:
|
| 219 |
+
wrapped_socket = _connection_refs.get(connection_id)
|
| 220 |
+
if wrapped_socket is None:
|
| 221 |
+
return SecurityConst.errSSLInternal
|
| 222 |
+
base_socket = wrapped_socket.socket
|
| 223 |
+
|
| 224 |
+
requested_length = data_length_pointer[0]
|
| 225 |
+
|
| 226 |
+
timeout = wrapped_socket.gettimeout()
|
| 227 |
+
error = None
|
| 228 |
+
read_count = 0
|
| 229 |
+
|
| 230 |
+
try:
|
| 231 |
+
while read_count < requested_length:
|
| 232 |
+
if timeout is None or timeout >= 0:
|
| 233 |
+
if not util.wait_for_read(base_socket, timeout):
|
| 234 |
+
raise socket.error(errno.EAGAIN, "timed out")
|
| 235 |
+
|
| 236 |
+
remaining = requested_length - read_count
|
| 237 |
+
buffer = (ctypes.c_char * remaining).from_address(
|
| 238 |
+
data_buffer + read_count
|
| 239 |
+
)
|
| 240 |
+
chunk_size = base_socket.recv_into(buffer, remaining)
|
| 241 |
+
read_count += chunk_size
|
| 242 |
+
if not chunk_size:
|
| 243 |
+
if not read_count:
|
| 244 |
+
return SecurityConst.errSSLClosedGraceful
|
| 245 |
+
break
|
| 246 |
+
except (socket.error) as e:
|
| 247 |
+
error = e.errno
|
| 248 |
+
|
| 249 |
+
if error is not None and error != errno.EAGAIN:
|
| 250 |
+
data_length_pointer[0] = read_count
|
| 251 |
+
if error == errno.ECONNRESET or error == errno.EPIPE:
|
| 252 |
+
return SecurityConst.errSSLClosedAbort
|
| 253 |
+
raise
|
| 254 |
+
|
| 255 |
+
data_length_pointer[0] = read_count
|
| 256 |
+
|
| 257 |
+
if read_count != requested_length:
|
| 258 |
+
return SecurityConst.errSSLWouldBlock
|
| 259 |
+
|
| 260 |
+
return 0
|
| 261 |
+
except Exception as e:
|
| 262 |
+
if wrapped_socket is not None:
|
| 263 |
+
wrapped_socket._exception = e
|
| 264 |
+
return SecurityConst.errSSLInternal
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
def _write_callback(connection_id, data_buffer, data_length_pointer):
|
| 268 |
+
"""
|
| 269 |
+
SecureTransport write callback. This is called by ST to request that data
|
| 270 |
+
actually be sent on the network.
|
| 271 |
+
"""
|
| 272 |
+
wrapped_socket = None
|
| 273 |
+
try:
|
| 274 |
+
wrapped_socket = _connection_refs.get(connection_id)
|
| 275 |
+
if wrapped_socket is None:
|
| 276 |
+
return SecurityConst.errSSLInternal
|
| 277 |
+
base_socket = wrapped_socket.socket
|
| 278 |
+
|
| 279 |
+
bytes_to_write = data_length_pointer[0]
|
| 280 |
+
data = ctypes.string_at(data_buffer, bytes_to_write)
|
| 281 |
+
|
| 282 |
+
timeout = wrapped_socket.gettimeout()
|
| 283 |
+
error = None
|
| 284 |
+
sent = 0
|
| 285 |
+
|
| 286 |
+
try:
|
| 287 |
+
while sent < bytes_to_write:
|
| 288 |
+
if timeout is None or timeout >= 0:
|
| 289 |
+
if not util.wait_for_write(base_socket, timeout):
|
| 290 |
+
raise socket.error(errno.EAGAIN, "timed out")
|
| 291 |
+
chunk_sent = base_socket.send(data)
|
| 292 |
+
sent += chunk_sent
|
| 293 |
+
|
| 294 |
+
# This has some needless copying here, but I'm not sure there's
|
| 295 |
+
# much value in optimising this data path.
|
| 296 |
+
data = data[chunk_sent:]
|
| 297 |
+
except (socket.error) as e:
|
| 298 |
+
error = e.errno
|
| 299 |
+
|
| 300 |
+
if error is not None and error != errno.EAGAIN:
|
| 301 |
+
data_length_pointer[0] = sent
|
| 302 |
+
if error == errno.ECONNRESET or error == errno.EPIPE:
|
| 303 |
+
return SecurityConst.errSSLClosedAbort
|
| 304 |
+
raise
|
| 305 |
+
|
| 306 |
+
data_length_pointer[0] = sent
|
| 307 |
+
|
| 308 |
+
if sent != bytes_to_write:
|
| 309 |
+
return SecurityConst.errSSLWouldBlock
|
| 310 |
+
|
| 311 |
+
return 0
|
| 312 |
+
except Exception as e:
|
| 313 |
+
if wrapped_socket is not None:
|
| 314 |
+
wrapped_socket._exception = e
|
| 315 |
+
return SecurityConst.errSSLInternal
|
| 316 |
+
|
| 317 |
+
|
| 318 |
+
# We need to keep these two objects references alive: if they get GC'd while
|
| 319 |
+
# in use then SecureTransport could attempt to call a function that is in freed
|
| 320 |
+
# memory. That would be...uh...bad. Yeah, that's the word. Bad.
|
| 321 |
+
_read_callback_pointer = Security.SSLReadFunc(_read_callback)
|
| 322 |
+
_write_callback_pointer = Security.SSLWriteFunc(_write_callback)
|
| 323 |
+
|
| 324 |
+
|
| 325 |
+
class WrappedSocket(object):
|
| 326 |
+
"""
|
| 327 |
+
API-compatibility wrapper for Python's OpenSSL wrapped socket object.
|
| 328 |
+
|
| 329 |
+
Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
|
| 330 |
+
collector of PyPy.
|
| 331 |
+
"""
|
| 332 |
+
|
| 333 |
+
def __init__(self, socket):
|
| 334 |
+
self.socket = socket
|
| 335 |
+
self.context = None
|
| 336 |
+
self._makefile_refs = 0
|
| 337 |
+
self._closed = False
|
| 338 |
+
self._exception = None
|
| 339 |
+
self._keychain = None
|
| 340 |
+
self._keychain_dir = None
|
| 341 |
+
self._client_cert_chain = None
|
| 342 |
+
|
| 343 |
+
# We save off the previously-configured timeout and then set it to
|
| 344 |
+
# zero. This is done because we use select and friends to handle the
|
| 345 |
+
# timeouts, but if we leave the timeout set on the lower socket then
|
| 346 |
+
# Python will "kindly" call select on that socket again for us. Avoid
|
| 347 |
+
# that by forcing the timeout to zero.
|
| 348 |
+
self._timeout = self.socket.gettimeout()
|
| 349 |
+
self.socket.settimeout(0)
|
| 350 |
+
|
| 351 |
+
@contextlib.contextmanager
|
| 352 |
+
def _raise_on_error(self):
|
| 353 |
+
"""
|
| 354 |
+
A context manager that can be used to wrap calls that do I/O from
|
| 355 |
+
SecureTransport. If any of the I/O callbacks hit an exception, this
|
| 356 |
+
context manager will correctly propagate the exception after the fact.
|
| 357 |
+
This avoids silently swallowing those exceptions.
|
| 358 |
+
|
| 359 |
+
It also correctly forces the socket closed.
|
| 360 |
+
"""
|
| 361 |
+
self._exception = None
|
| 362 |
+
|
| 363 |
+
# We explicitly don't catch around this yield because in the unlikely
|
| 364 |
+
# event that an exception was hit in the block we don't want to swallow
|
| 365 |
+
# it.
|
| 366 |
+
yield
|
| 367 |
+
if self._exception is not None:
|
| 368 |
+
exception, self._exception = self._exception, None
|
| 369 |
+
self.close()
|
| 370 |
+
raise exception
|
| 371 |
+
|
| 372 |
+
def _set_ciphers(self):
|
| 373 |
+
"""
|
| 374 |
+
Sets up the allowed ciphers. By default this matches the set in
|
| 375 |
+
util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
|
| 376 |
+
custom and doesn't allow changing at this time, mostly because parsing
|
| 377 |
+
OpenSSL cipher strings is going to be a freaking nightmare.
|
| 378 |
+
"""
|
| 379 |
+
ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
|
| 380 |
+
result = Security.SSLSetEnabledCiphers(
|
| 381 |
+
self.context, ciphers, len(CIPHER_SUITES)
|
| 382 |
+
)
|
| 383 |
+
_assert_no_error(result)
|
| 384 |
+
|
| 385 |
+
def _set_alpn_protocols(self, protocols):
|
| 386 |
+
"""
|
| 387 |
+
Sets up the ALPN protocols on the context.
|
| 388 |
+
"""
|
| 389 |
+
if not protocols:
|
| 390 |
+
return
|
| 391 |
+
protocols_arr = _create_cfstring_array(protocols)
|
| 392 |
+
try:
|
| 393 |
+
result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
|
| 394 |
+
_assert_no_error(result)
|
| 395 |
+
finally:
|
| 396 |
+
CoreFoundation.CFRelease(protocols_arr)
|
| 397 |
+
|
| 398 |
+
def _custom_validate(self, verify, trust_bundle):
|
| 399 |
+
"""
|
| 400 |
+
Called when we have set custom validation. We do this in two cases:
|
| 401 |
+
first, when cert validation is entirely disabled; and second, when
|
| 402 |
+
using a custom trust DB.
|
| 403 |
+
Raises an SSLError if the connection is not trusted.
|
| 404 |
+
"""
|
| 405 |
+
# If we disabled cert validation, just say: cool.
|
| 406 |
+
if not verify:
|
| 407 |
+
return
|
| 408 |
+
|
| 409 |
+
successes = (
|
| 410 |
+
SecurityConst.kSecTrustResultUnspecified,
|
| 411 |
+
SecurityConst.kSecTrustResultProceed,
|
| 412 |
+
)
|
| 413 |
+
try:
|
| 414 |
+
trust_result = self._evaluate_trust(trust_bundle)
|
| 415 |
+
if trust_result in successes:
|
| 416 |
+
return
|
| 417 |
+
reason = "error code: %d" % (trust_result,)
|
| 418 |
+
except Exception as e:
|
| 419 |
+
# Do not trust on error
|
| 420 |
+
reason = "exception: %r" % (e,)
|
| 421 |
+
|
| 422 |
+
# SecureTransport does not send an alert nor shuts down the connection.
|
| 423 |
+
rec = _build_tls_unknown_ca_alert(self.version())
|
| 424 |
+
self.socket.sendall(rec)
|
| 425 |
+
# close the connection immediately
|
| 426 |
+
# l_onoff = 1, activate linger
|
| 427 |
+
# l_linger = 0, linger for 0 seoncds
|
| 428 |
+
opts = struct.pack("ii", 1, 0)
|
| 429 |
+
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
|
| 430 |
+
self.close()
|
| 431 |
+
raise ssl.SSLError("certificate verify failed, %s" % reason)
|
| 432 |
+
|
| 433 |
+
def _evaluate_trust(self, trust_bundle):
|
| 434 |
+
# We want data in memory, so load it up.
|
| 435 |
+
if os.path.isfile(trust_bundle):
|
| 436 |
+
with open(trust_bundle, "rb") as f:
|
| 437 |
+
trust_bundle = f.read()
|
| 438 |
+
|
| 439 |
+
cert_array = None
|
| 440 |
+
trust = Security.SecTrustRef()
|
| 441 |
+
|
| 442 |
+
try:
|
| 443 |
+
# Get a CFArray that contains the certs we want.
|
| 444 |
+
cert_array = _cert_array_from_pem(trust_bundle)
|
| 445 |
+
|
| 446 |
+
# Ok, now the hard part. We want to get the SecTrustRef that ST has
|
| 447 |
+
# created for this connection, shove our CAs into it, tell ST to
|
| 448 |
+
# ignore everything else it knows, and then ask if it can build a
|
| 449 |
+
# chain. This is a buuuunch of code.
|
| 450 |
+
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
|
| 451 |
+
_assert_no_error(result)
|
| 452 |
+
if not trust:
|
| 453 |
+
raise ssl.SSLError("Failed to copy trust reference")
|
| 454 |
+
|
| 455 |
+
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
|
| 456 |
+
_assert_no_error(result)
|
| 457 |
+
|
| 458 |
+
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
|
| 459 |
+
_assert_no_error(result)
|
| 460 |
+
|
| 461 |
+
trust_result = Security.SecTrustResultType()
|
| 462 |
+
result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
|
| 463 |
+
_assert_no_error(result)
|
| 464 |
+
finally:
|
| 465 |
+
if trust:
|
| 466 |
+
CoreFoundation.CFRelease(trust)
|
| 467 |
+
|
| 468 |
+
if cert_array is not None:
|
| 469 |
+
CoreFoundation.CFRelease(cert_array)
|
| 470 |
+
|
| 471 |
+
return trust_result.value
|
| 472 |
+
|
| 473 |
+
def handshake(
|
| 474 |
+
self,
|
| 475 |
+
server_hostname,
|
| 476 |
+
verify,
|
| 477 |
+
trust_bundle,
|
| 478 |
+
min_version,
|
| 479 |
+
max_version,
|
| 480 |
+
client_cert,
|
| 481 |
+
client_key,
|
| 482 |
+
client_key_passphrase,
|
| 483 |
+
alpn_protocols,
|
| 484 |
+
):
|
| 485 |
+
"""
|
| 486 |
+
Actually performs the TLS handshake. This is run automatically by
|
| 487 |
+
wrapped socket, and shouldn't be needed in user code.
|
| 488 |
+
"""
|
| 489 |
+
# First, we do the initial bits of connection setup. We need to create
|
| 490 |
+
# a context, set its I/O funcs, and set the connection reference.
|
| 491 |
+
self.context = Security.SSLCreateContext(
|
| 492 |
+
None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
|
| 493 |
+
)
|
| 494 |
+
result = Security.SSLSetIOFuncs(
|
| 495 |
+
self.context, _read_callback_pointer, _write_callback_pointer
|
| 496 |
+
)
|
| 497 |
+
_assert_no_error(result)
|
| 498 |
+
|
| 499 |
+
# Here we need to compute the handle to use. We do this by taking the
|
| 500 |
+
# id of self modulo 2**31 - 1. If this is already in the dictionary, we
|
| 501 |
+
# just keep incrementing by one until we find a free space.
|
| 502 |
+
with _connection_ref_lock:
|
| 503 |
+
handle = id(self) % 2147483647
|
| 504 |
+
while handle in _connection_refs:
|
| 505 |
+
handle = (handle + 1) % 2147483647
|
| 506 |
+
_connection_refs[handle] = self
|
| 507 |
+
|
| 508 |
+
result = Security.SSLSetConnection(self.context, handle)
|
| 509 |
+
_assert_no_error(result)
|
| 510 |
+
|
| 511 |
+
# If we have a server hostname, we should set that too.
|
| 512 |
+
if server_hostname:
|
| 513 |
+
if not isinstance(server_hostname, bytes):
|
| 514 |
+
server_hostname = server_hostname.encode("utf-8")
|
| 515 |
+
|
| 516 |
+
result = Security.SSLSetPeerDomainName(
|
| 517 |
+
self.context, server_hostname, len(server_hostname)
|
| 518 |
+
)
|
| 519 |
+
_assert_no_error(result)
|
| 520 |
+
|
| 521 |
+
# Setup the ciphers.
|
| 522 |
+
self._set_ciphers()
|
| 523 |
+
|
| 524 |
+
# Setup the ALPN protocols.
|
| 525 |
+
self._set_alpn_protocols(alpn_protocols)
|
| 526 |
+
|
| 527 |
+
# Set the minimum and maximum TLS versions.
|
| 528 |
+
result = Security.SSLSetProtocolVersionMin(self.context, min_version)
|
| 529 |
+
_assert_no_error(result)
|
| 530 |
+
|
| 531 |
+
result = Security.SSLSetProtocolVersionMax(self.context, max_version)
|
| 532 |
+
_assert_no_error(result)
|
| 533 |
+
|
| 534 |
+
# If there's a trust DB, we need to use it. We do that by telling
|
| 535 |
+
# SecureTransport to break on server auth. We also do that if we don't
|
| 536 |
+
# want to validate the certs at all: we just won't actually do any
|
| 537 |
+
# authing in that case.
|
| 538 |
+
if not verify or trust_bundle is not None:
|
| 539 |
+
result = Security.SSLSetSessionOption(
|
| 540 |
+
self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
|
| 541 |
+
)
|
| 542 |
+
_assert_no_error(result)
|
| 543 |
+
|
| 544 |
+
# If there's a client cert, we need to use it.
|
| 545 |
+
if client_cert:
|
| 546 |
+
self._keychain, self._keychain_dir = _temporary_keychain()
|
| 547 |
+
self._client_cert_chain = _load_client_cert_chain(
|
| 548 |
+
self._keychain, client_cert, client_key
|
| 549 |
+
)
|
| 550 |
+
result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
|
| 551 |
+
_assert_no_error(result)
|
| 552 |
+
|
| 553 |
+
while True:
|
| 554 |
+
with self._raise_on_error():
|
| 555 |
+
result = Security.SSLHandshake(self.context)
|
| 556 |
+
|
| 557 |
+
if result == SecurityConst.errSSLWouldBlock:
|
| 558 |
+
raise socket.timeout("handshake timed out")
|
| 559 |
+
elif result == SecurityConst.errSSLServerAuthCompleted:
|
| 560 |
+
self._custom_validate(verify, trust_bundle)
|
| 561 |
+
continue
|
| 562 |
+
else:
|
| 563 |
+
_assert_no_error(result)
|
| 564 |
+
break
|
| 565 |
+
|
| 566 |
+
def fileno(self):
|
| 567 |
+
return self.socket.fileno()
|
| 568 |
+
|
| 569 |
+
# Copy-pasted from Python 3.5 source code
|
| 570 |
+
def _decref_socketios(self):
|
| 571 |
+
if self._makefile_refs > 0:
|
| 572 |
+
self._makefile_refs -= 1
|
| 573 |
+
if self._closed:
|
| 574 |
+
self.close()
|
| 575 |
+
|
| 576 |
+
def recv(self, bufsiz):
|
| 577 |
+
buffer = ctypes.create_string_buffer(bufsiz)
|
| 578 |
+
bytes_read = self.recv_into(buffer, bufsiz)
|
| 579 |
+
data = buffer[:bytes_read]
|
| 580 |
+
return data
|
| 581 |
+
|
| 582 |
+
def recv_into(self, buffer, nbytes=None):
|
| 583 |
+
# Read short on EOF.
|
| 584 |
+
if self._closed:
|
| 585 |
+
return 0
|
| 586 |
+
|
| 587 |
+
if nbytes is None:
|
| 588 |
+
nbytes = len(buffer)
|
| 589 |
+
|
| 590 |
+
buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
|
| 591 |
+
processed_bytes = ctypes.c_size_t(0)
|
| 592 |
+
|
| 593 |
+
with self._raise_on_error():
|
| 594 |
+
result = Security.SSLRead(
|
| 595 |
+
self.context, buffer, nbytes, ctypes.byref(processed_bytes)
|
| 596 |
+
)
|
| 597 |
+
|
| 598 |
+
# There are some result codes that we want to treat as "not always
|
| 599 |
+
# errors". Specifically, those are errSSLWouldBlock,
|
| 600 |
+
# errSSLClosedGraceful, and errSSLClosedNoNotify.
|
| 601 |
+
if result == SecurityConst.errSSLWouldBlock:
|
| 602 |
+
# If we didn't process any bytes, then this was just a time out.
|
| 603 |
+
# However, we can get errSSLWouldBlock in situations when we *did*
|
| 604 |
+
# read some data, and in those cases we should just read "short"
|
| 605 |
+
# and return.
|
| 606 |
+
if processed_bytes.value == 0:
|
| 607 |
+
# Timed out, no data read.
|
| 608 |
+
raise socket.timeout("recv timed out")
|
| 609 |
+
elif result in (
|
| 610 |
+
SecurityConst.errSSLClosedGraceful,
|
| 611 |
+
SecurityConst.errSSLClosedNoNotify,
|
| 612 |
+
):
|
| 613 |
+
# The remote peer has closed this connection. We should do so as
|
| 614 |
+
# well. Note that we don't actually return here because in
|
| 615 |
+
# principle this could actually be fired along with return data.
|
| 616 |
+
# It's unlikely though.
|
| 617 |
+
self.close()
|
| 618 |
+
else:
|
| 619 |
+
_assert_no_error(result)
|
| 620 |
+
|
| 621 |
+
# Ok, we read and probably succeeded. We should return whatever data
|
| 622 |
+
# was actually read.
|
| 623 |
+
return processed_bytes.value
|
| 624 |
+
|
| 625 |
+
def settimeout(self, timeout):
|
| 626 |
+
self._timeout = timeout
|
| 627 |
+
|
| 628 |
+
def gettimeout(self):
|
| 629 |
+
return self._timeout
|
| 630 |
+
|
| 631 |
+
def send(self, data):
|
| 632 |
+
processed_bytes = ctypes.c_size_t(0)
|
| 633 |
+
|
| 634 |
+
with self._raise_on_error():
|
| 635 |
+
result = Security.SSLWrite(
|
| 636 |
+
self.context, data, len(data), ctypes.byref(processed_bytes)
|
| 637 |
+
)
|
| 638 |
+
|
| 639 |
+
if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
|
| 640 |
+
# Timed out
|
| 641 |
+
raise socket.timeout("send timed out")
|
| 642 |
+
else:
|
| 643 |
+
_assert_no_error(result)
|
| 644 |
+
|
| 645 |
+
# We sent, and probably succeeded. Tell them how much we sent.
|
| 646 |
+
return processed_bytes.value
|
| 647 |
+
|
| 648 |
+
def sendall(self, data):
|
| 649 |
+
total_sent = 0
|
| 650 |
+
while total_sent < len(data):
|
| 651 |
+
sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
|
| 652 |
+
total_sent += sent
|
| 653 |
+
|
| 654 |
+
def shutdown(self):
|
| 655 |
+
with self._raise_on_error():
|
| 656 |
+
Security.SSLClose(self.context)
|
| 657 |
+
|
| 658 |
+
def close(self):
|
| 659 |
+
# TODO: should I do clean shutdown here? Do I have to?
|
| 660 |
+
if self._makefile_refs < 1:
|
| 661 |
+
self._closed = True
|
| 662 |
+
if self.context:
|
| 663 |
+
CoreFoundation.CFRelease(self.context)
|
| 664 |
+
self.context = None
|
| 665 |
+
if self._client_cert_chain:
|
| 666 |
+
CoreFoundation.CFRelease(self._client_cert_chain)
|
| 667 |
+
self._client_cert_chain = None
|
| 668 |
+
if self._keychain:
|
| 669 |
+
Security.SecKeychainDelete(self._keychain)
|
| 670 |
+
CoreFoundation.CFRelease(self._keychain)
|
| 671 |
+
shutil.rmtree(self._keychain_dir)
|
| 672 |
+
self._keychain = self._keychain_dir = None
|
| 673 |
+
return self.socket.close()
|
| 674 |
+
else:
|
| 675 |
+
self._makefile_refs -= 1
|
| 676 |
+
|
| 677 |
+
def getpeercert(self, binary_form=False):
|
| 678 |
+
# Urgh, annoying.
|
| 679 |
+
#
|
| 680 |
+
# Here's how we do this:
|
| 681 |
+
#
|
| 682 |
+
# 1. Call SSLCopyPeerTrust to get hold of the trust object for this
|
| 683 |
+
# connection.
|
| 684 |
+
# 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
|
| 685 |
+
# 3. To get the CN, call SecCertificateCopyCommonName and process that
|
| 686 |
+
# string so that it's of the appropriate type.
|
| 687 |
+
# 4. To get the SAN, we need to do something a bit more complex:
|
| 688 |
+
# a. Call SecCertificateCopyValues to get the data, requesting
|
| 689 |
+
# kSecOIDSubjectAltName.
|
| 690 |
+
# b. Mess about with this dictionary to try to get the SANs out.
|
| 691 |
+
#
|
| 692 |
+
# This is gross. Really gross. It's going to be a few hundred LoC extra
|
| 693 |
+
# just to repeat something that SecureTransport can *already do*. So my
|
| 694 |
+
# operating assumption at this time is that what we want to do is
|
| 695 |
+
# instead to just flag to urllib3 that it shouldn't do its own hostname
|
| 696 |
+
# validation when using SecureTransport.
|
| 697 |
+
if not binary_form:
|
| 698 |
+
raise ValueError("SecureTransport only supports dumping binary certs")
|
| 699 |
+
trust = Security.SecTrustRef()
|
| 700 |
+
certdata = None
|
| 701 |
+
der_bytes = None
|
| 702 |
+
|
| 703 |
+
try:
|
| 704 |
+
# Grab the trust store.
|
| 705 |
+
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
|
| 706 |
+
_assert_no_error(result)
|
| 707 |
+
if not trust:
|
| 708 |
+
# Probably we haven't done the handshake yet. No biggie.
|
| 709 |
+
return None
|
| 710 |
+
|
| 711 |
+
cert_count = Security.SecTrustGetCertificateCount(trust)
|
| 712 |
+
if not cert_count:
|
| 713 |
+
# Also a case that might happen if we haven't handshaked.
|
| 714 |
+
# Handshook? Handshaken?
|
| 715 |
+
return None
|
| 716 |
+
|
| 717 |
+
leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
|
| 718 |
+
assert leaf
|
| 719 |
+
|
| 720 |
+
# Ok, now we want the DER bytes.
|
| 721 |
+
certdata = Security.SecCertificateCopyData(leaf)
|
| 722 |
+
assert certdata
|
| 723 |
+
|
| 724 |
+
data_length = CoreFoundation.CFDataGetLength(certdata)
|
| 725 |
+
data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
|
| 726 |
+
der_bytes = ctypes.string_at(data_buffer, data_length)
|
| 727 |
+
finally:
|
| 728 |
+
if certdata:
|
| 729 |
+
CoreFoundation.CFRelease(certdata)
|
| 730 |
+
if trust:
|
| 731 |
+
CoreFoundation.CFRelease(trust)
|
| 732 |
+
|
| 733 |
+
return der_bytes
|
| 734 |
+
|
| 735 |
+
def version(self):
|
| 736 |
+
protocol = Security.SSLProtocol()
|
| 737 |
+
result = Security.SSLGetNegotiatedProtocolVersion(
|
| 738 |
+
self.context, ctypes.byref(protocol)
|
| 739 |
+
)
|
| 740 |
+
_assert_no_error(result)
|
| 741 |
+
if protocol.value == SecurityConst.kTLSProtocol13:
|
| 742 |
+
raise ssl.SSLError("SecureTransport does not support TLS 1.3")
|
| 743 |
+
elif protocol.value == SecurityConst.kTLSProtocol12:
|
| 744 |
+
return "TLSv1.2"
|
| 745 |
+
elif protocol.value == SecurityConst.kTLSProtocol11:
|
| 746 |
+
return "TLSv1.1"
|
| 747 |
+
elif protocol.value == SecurityConst.kTLSProtocol1:
|
| 748 |
+
return "TLSv1"
|
| 749 |
+
elif protocol.value == SecurityConst.kSSLProtocol3:
|
| 750 |
+
return "SSLv3"
|
| 751 |
+
elif protocol.value == SecurityConst.kSSLProtocol2:
|
| 752 |
+
return "SSLv2"
|
| 753 |
+
else:
|
| 754 |
+
raise ssl.SSLError("Unknown TLS version: %r" % protocol)
|
| 755 |
+
|
| 756 |
+
def _reuse(self):
|
| 757 |
+
self._makefile_refs += 1
|
| 758 |
+
|
| 759 |
+
def _drop(self):
|
| 760 |
+
if self._makefile_refs < 1:
|
| 761 |
+
self.close()
|
| 762 |
+
else:
|
| 763 |
+
self._makefile_refs -= 1
|
| 764 |
+
|
| 765 |
+
|
| 766 |
+
if _fileobject: # Platform-specific: Python 2
|
| 767 |
+
|
| 768 |
+
def makefile(self, mode, bufsize=-1):
|
| 769 |
+
self._makefile_refs += 1
|
| 770 |
+
return _fileobject(self, mode, bufsize, close=True)
|
| 771 |
+
|
| 772 |
+
else: # Platform-specific: Python 3
|
| 773 |
+
|
| 774 |
+
def makefile(self, mode="r", buffering=None, *args, **kwargs):
|
| 775 |
+
# We disable buffering with SecureTransport because it conflicts with
|
| 776 |
+
# the buffering that ST does internally (see issue #1153 for more).
|
| 777 |
+
buffering = 0
|
| 778 |
+
return backport_makefile(self, mode, buffering, *args, **kwargs)
|
| 779 |
+
|
| 780 |
+
|
| 781 |
+
WrappedSocket.makefile = makefile
|
| 782 |
+
|
| 783 |
+
|
| 784 |
+
class SecureTransportContext(object):
|
| 785 |
+
"""
|
| 786 |
+
I am a wrapper class for the SecureTransport library, to translate the
|
| 787 |
+
interface of the standard library ``SSLContext`` object to calls into
|
| 788 |
+
SecureTransport.
|
| 789 |
+
"""
|
| 790 |
+
|
| 791 |
+
def __init__(self, protocol):
|
| 792 |
+
self._min_version, self._max_version = _protocol_to_min_max[protocol]
|
| 793 |
+
self._options = 0
|
| 794 |
+
self._verify = False
|
| 795 |
+
self._trust_bundle = None
|
| 796 |
+
self._client_cert = None
|
| 797 |
+
self._client_key = None
|
| 798 |
+
self._client_key_passphrase = None
|
| 799 |
+
self._alpn_protocols = None
|
| 800 |
+
|
| 801 |
+
@property
|
| 802 |
+
def check_hostname(self):
|
| 803 |
+
"""
|
| 804 |
+
SecureTransport cannot have its hostname checking disabled. For more,
|
| 805 |
+
see the comment on getpeercert() in this file.
|
| 806 |
+
"""
|
| 807 |
+
return True
|
| 808 |
+
|
| 809 |
+
@check_hostname.setter
|
| 810 |
+
def check_hostname(self, value):
|
| 811 |
+
"""
|
| 812 |
+
SecureTransport cannot have its hostname checking disabled. For more,
|
| 813 |
+
see the comment on getpeercert() in this file.
|
| 814 |
+
"""
|
| 815 |
+
pass
|
| 816 |
+
|
| 817 |
+
@property
|
| 818 |
+
def options(self):
|
| 819 |
+
# TODO: Well, crap.
|
| 820 |
+
#
|
| 821 |
+
# So this is the bit of the code that is the most likely to cause us
|
| 822 |
+
# trouble. Essentially we need to enumerate all of the SSL options that
|
| 823 |
+
# users might want to use and try to see if we can sensibly translate
|
| 824 |
+
# them, or whether we should just ignore them.
|
| 825 |
+
return self._options
|
| 826 |
+
|
| 827 |
+
@options.setter
|
| 828 |
+
def options(self, value):
|
| 829 |
+
# TODO: Update in line with above.
|
| 830 |
+
self._options = value
|
| 831 |
+
|
| 832 |
+
@property
|
| 833 |
+
def verify_mode(self):
|
| 834 |
+
return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
|
| 835 |
+
|
| 836 |
+
@verify_mode.setter
|
| 837 |
+
def verify_mode(self, value):
|
| 838 |
+
self._verify = True if value == ssl.CERT_REQUIRED else False
|
| 839 |
+
|
| 840 |
+
def set_default_verify_paths(self):
|
| 841 |
+
# So, this has to do something a bit weird. Specifically, what it does
|
| 842 |
+
# is nothing.
|
| 843 |
+
#
|
| 844 |
+
# This means that, if we had previously had load_verify_locations
|
| 845 |
+
# called, this does not undo that. We need to do that because it turns
|
| 846 |
+
# out that the rest of the urllib3 code will attempt to load the
|
| 847 |
+
# default verify paths if it hasn't been told about any paths, even if
|
| 848 |
+
# the context itself was sometime earlier. We resolve that by just
|
| 849 |
+
# ignoring it.
|
| 850 |
+
pass
|
| 851 |
+
|
| 852 |
+
def load_default_certs(self):
|
| 853 |
+
return self.set_default_verify_paths()
|
| 854 |
+
|
| 855 |
+
def set_ciphers(self, ciphers):
|
| 856 |
+
# For now, we just require the default cipher string.
|
| 857 |
+
if ciphers != util.ssl_.DEFAULT_CIPHERS:
|
| 858 |
+
raise ValueError("SecureTransport doesn't support custom cipher strings")
|
| 859 |
+
|
| 860 |
+
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
|
| 861 |
+
# OK, we only really support cadata and cafile.
|
| 862 |
+
if capath is not None:
|
| 863 |
+
raise ValueError("SecureTransport does not support cert directories")
|
| 864 |
+
|
| 865 |
+
# Raise if cafile does not exist.
|
| 866 |
+
if cafile is not None:
|
| 867 |
+
with open(cafile):
|
| 868 |
+
pass
|
| 869 |
+
|
| 870 |
+
self._trust_bundle = cafile or cadata
|
| 871 |
+
|
| 872 |
+
def load_cert_chain(self, certfile, keyfile=None, password=None):
|
| 873 |
+
self._client_cert = certfile
|
| 874 |
+
self._client_key = keyfile
|
| 875 |
+
self._client_cert_passphrase = password
|
| 876 |
+
|
| 877 |
+
def set_alpn_protocols(self, protocols):
|
| 878 |
+
"""
|
| 879 |
+
Sets the ALPN protocols that will later be set on the context.
|
| 880 |
+
|
| 881 |
+
Raises a NotImplementedError if ALPN is not supported.
|
| 882 |
+
"""
|
| 883 |
+
if not hasattr(Security, "SSLSetALPNProtocols"):
|
| 884 |
+
raise NotImplementedError(
|
| 885 |
+
"SecureTransport supports ALPN only in macOS 10.12+"
|
| 886 |
+
)
|
| 887 |
+
self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
|
| 888 |
+
|
| 889 |
+
def wrap_socket(
|
| 890 |
+
self,
|
| 891 |
+
sock,
|
| 892 |
+
server_side=False,
|
| 893 |
+
do_handshake_on_connect=True,
|
| 894 |
+
suppress_ragged_eofs=True,
|
| 895 |
+
server_hostname=None,
|
| 896 |
+
):
|
| 897 |
+
# So, what do we do here? Firstly, we assert some properties. This is a
|
| 898 |
+
# stripped down shim, so there is some functionality we don't support.
|
| 899 |
+
# See PEP 543 for the real deal.
|
| 900 |
+
assert not server_side
|
| 901 |
+
assert do_handshake_on_connect
|
| 902 |
+
assert suppress_ragged_eofs
|
| 903 |
+
|
| 904 |
+
# Ok, we're good to go. Now we want to create the wrapped socket object
|
| 905 |
+
# and store it in the appropriate place.
|
| 906 |
+
wrapped_socket = WrappedSocket(sock)
|
| 907 |
+
|
| 908 |
+
# Now we can handshake
|
| 909 |
+
wrapped_socket.handshake(
|
| 910 |
+
server_hostname,
|
| 911 |
+
self._verify,
|
| 912 |
+
self._trust_bundle,
|
| 913 |
+
self._min_version,
|
| 914 |
+
self._max_version,
|
| 915 |
+
self._client_cert,
|
| 916 |
+
self._client_key,
|
| 917 |
+
self._client_key_passphrase,
|
| 918 |
+
self._alpn_protocols,
|
| 919 |
+
)
|
| 920 |
+
return wrapped_socket
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/socks.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
"""
|
| 3 |
+
This module contains provisional support for SOCKS proxies from within
|
| 4 |
+
urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
|
| 5 |
+
SOCKS5. To enable its functionality, either install PySocks or install this
|
| 6 |
+
module with the ``socks`` extra.
|
| 7 |
+
|
| 8 |
+
The SOCKS implementation supports the full range of urllib3 features. It also
|
| 9 |
+
supports the following SOCKS features:
|
| 10 |
+
|
| 11 |
+
- SOCKS4A (``proxy_url='socks4a://...``)
|
| 12 |
+
- SOCKS4 (``proxy_url='socks4://...``)
|
| 13 |
+
- SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
|
| 14 |
+
- SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
| 15 |
+
- Usernames and passwords for the SOCKS proxy
|
| 16 |
+
|
| 17 |
+
.. note::
|
| 18 |
+
It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
| 19 |
+
your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
| 20 |
+
server instead of client-side when connecting to a domain name.
|
| 21 |
+
|
| 22 |
+
SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
| 23 |
+
supports IPv4, IPv6, and domain names.
|
| 24 |
+
|
| 25 |
+
When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
| 26 |
+
will be sent as the ``userid`` section of the SOCKS request:
|
| 27 |
+
|
| 28 |
+
.. code-block:: python
|
| 29 |
+
|
| 30 |
+
proxy_url="socks4a://<userid>@proxy-host"
|
| 31 |
+
|
| 32 |
+
When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
| 33 |
+
of the ``proxy_url`` will be sent as the username/password to authenticate
|
| 34 |
+
with the proxy:
|
| 35 |
+
|
| 36 |
+
.. code-block:: python
|
| 37 |
+
|
| 38 |
+
proxy_url="socks5h://<username>:<password>@proxy-host"
|
| 39 |
+
|
| 40 |
+
"""
|
| 41 |
+
from __future__ import absolute_import
|
| 42 |
+
|
| 43 |
+
try:
|
| 44 |
+
import socks
|
| 45 |
+
except ImportError:
|
| 46 |
+
import warnings
|
| 47 |
+
|
| 48 |
+
from ..exceptions import DependencyWarning
|
| 49 |
+
|
| 50 |
+
warnings.warn(
|
| 51 |
+
(
|
| 52 |
+
"SOCKS support in urllib3 requires the installation of optional "
|
| 53 |
+
"dependencies: specifically, PySocks. For more information, see "
|
| 54 |
+
"https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
|
| 55 |
+
),
|
| 56 |
+
DependencyWarning,
|
| 57 |
+
)
|
| 58 |
+
raise
|
| 59 |
+
|
| 60 |
+
from socket import error as SocketError
|
| 61 |
+
from socket import timeout as SocketTimeout
|
| 62 |
+
|
| 63 |
+
from ..connection import HTTPConnection, HTTPSConnection
|
| 64 |
+
from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
| 65 |
+
from ..exceptions import ConnectTimeoutError, NewConnectionError
|
| 66 |
+
from ..poolmanager import PoolManager
|
| 67 |
+
from ..util.url import parse_url
|
| 68 |
+
|
| 69 |
+
try:
|
| 70 |
+
import ssl
|
| 71 |
+
except ImportError:
|
| 72 |
+
ssl = None
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class SOCKSConnection(HTTPConnection):
|
| 76 |
+
"""
|
| 77 |
+
A plain-text HTTP connection that connects via a SOCKS proxy.
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
def __init__(self, *args, **kwargs):
|
| 81 |
+
self._socks_options = kwargs.pop("_socks_options")
|
| 82 |
+
super(SOCKSConnection, self).__init__(*args, **kwargs)
|
| 83 |
+
|
| 84 |
+
def _new_conn(self):
|
| 85 |
+
"""
|
| 86 |
+
Establish a new connection via the SOCKS proxy.
|
| 87 |
+
"""
|
| 88 |
+
extra_kw = {}
|
| 89 |
+
if self.source_address:
|
| 90 |
+
extra_kw["source_address"] = self.source_address
|
| 91 |
+
|
| 92 |
+
if self.socket_options:
|
| 93 |
+
extra_kw["socket_options"] = self.socket_options
|
| 94 |
+
|
| 95 |
+
try:
|
| 96 |
+
conn = socks.create_connection(
|
| 97 |
+
(self.host, self.port),
|
| 98 |
+
proxy_type=self._socks_options["socks_version"],
|
| 99 |
+
proxy_addr=self._socks_options["proxy_host"],
|
| 100 |
+
proxy_port=self._socks_options["proxy_port"],
|
| 101 |
+
proxy_username=self._socks_options["username"],
|
| 102 |
+
proxy_password=self._socks_options["password"],
|
| 103 |
+
proxy_rdns=self._socks_options["rdns"],
|
| 104 |
+
timeout=self.timeout,
|
| 105 |
+
**extra_kw
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
except SocketTimeout:
|
| 109 |
+
raise ConnectTimeoutError(
|
| 110 |
+
self,
|
| 111 |
+
"Connection to %s timed out. (connect timeout=%s)"
|
| 112 |
+
% (self.host, self.timeout),
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
except socks.ProxyError as e:
|
| 116 |
+
# This is fragile as hell, but it seems to be the only way to raise
|
| 117 |
+
# useful errors here.
|
| 118 |
+
if e.socket_err:
|
| 119 |
+
error = e.socket_err
|
| 120 |
+
if isinstance(error, SocketTimeout):
|
| 121 |
+
raise ConnectTimeoutError(
|
| 122 |
+
self,
|
| 123 |
+
"Connection to %s timed out. (connect timeout=%s)"
|
| 124 |
+
% (self.host, self.timeout),
|
| 125 |
+
)
|
| 126 |
+
else:
|
| 127 |
+
raise NewConnectionError(
|
| 128 |
+
self, "Failed to establish a new connection: %s" % error
|
| 129 |
+
)
|
| 130 |
+
else:
|
| 131 |
+
raise NewConnectionError(
|
| 132 |
+
self, "Failed to establish a new connection: %s" % e
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
except SocketError as e: # Defensive: PySocks should catch all these.
|
| 136 |
+
raise NewConnectionError(
|
| 137 |
+
self, "Failed to establish a new connection: %s" % e
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
return conn
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
# We don't need to duplicate the Verified/Unverified distinction from
|
| 144 |
+
# urllib3/connection.py here because the HTTPSConnection will already have been
|
| 145 |
+
# correctly set to either the Verified or Unverified form by that module. This
|
| 146 |
+
# means the SOCKSHTTPSConnection will automatically be the correct type.
|
| 147 |
+
class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
|
| 148 |
+
pass
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class SOCKSHTTPConnectionPool(HTTPConnectionPool):
|
| 152 |
+
ConnectionCls = SOCKSConnection
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
|
| 156 |
+
ConnectionCls = SOCKSHTTPSConnection
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class SOCKSProxyManager(PoolManager):
|
| 160 |
+
"""
|
| 161 |
+
A version of the urllib3 ProxyManager that routes connections via the
|
| 162 |
+
defined SOCKS proxy.
|
| 163 |
+
"""
|
| 164 |
+
|
| 165 |
+
pool_classes_by_scheme = {
|
| 166 |
+
"http": SOCKSHTTPConnectionPool,
|
| 167 |
+
"https": SOCKSHTTPSConnectionPool,
|
| 168 |
+
}
|
| 169 |
+
|
| 170 |
+
def __init__(
|
| 171 |
+
self,
|
| 172 |
+
proxy_url,
|
| 173 |
+
username=None,
|
| 174 |
+
password=None,
|
| 175 |
+
num_pools=10,
|
| 176 |
+
headers=None,
|
| 177 |
+
**connection_pool_kw
|
| 178 |
+
):
|
| 179 |
+
parsed = parse_url(proxy_url)
|
| 180 |
+
|
| 181 |
+
if username is None and password is None and parsed.auth is not None:
|
| 182 |
+
split = parsed.auth.split(":")
|
| 183 |
+
if len(split) == 2:
|
| 184 |
+
username, password = split
|
| 185 |
+
if parsed.scheme == "socks5":
|
| 186 |
+
socks_version = socks.PROXY_TYPE_SOCKS5
|
| 187 |
+
rdns = False
|
| 188 |
+
elif parsed.scheme == "socks5h":
|
| 189 |
+
socks_version = socks.PROXY_TYPE_SOCKS5
|
| 190 |
+
rdns = True
|
| 191 |
+
elif parsed.scheme == "socks4":
|
| 192 |
+
socks_version = socks.PROXY_TYPE_SOCKS4
|
| 193 |
+
rdns = False
|
| 194 |
+
elif parsed.scheme == "socks4a":
|
| 195 |
+
socks_version = socks.PROXY_TYPE_SOCKS4
|
| 196 |
+
rdns = True
|
| 197 |
+
else:
|
| 198 |
+
raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
|
| 199 |
+
|
| 200 |
+
self.proxy_url = proxy_url
|
| 201 |
+
|
| 202 |
+
socks_options = {
|
| 203 |
+
"socks_version": socks_version,
|
| 204 |
+
"proxy_host": parsed.host,
|
| 205 |
+
"proxy_port": parsed.port,
|
| 206 |
+
"username": username,
|
| 207 |
+
"password": password,
|
| 208 |
+
"rdns": rdns,
|
| 209 |
+
}
|
| 210 |
+
connection_pool_kw["_socks_options"] = socks_options
|
| 211 |
+
|
| 212 |
+
super(SOCKSProxyManager, self).__init__(
|
| 213 |
+
num_pools, headers, **connection_pool_kw
|
| 214 |
+
)
|
| 215 |
+
|
| 216 |
+
self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/filepost.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import binascii
|
| 4 |
+
import codecs
|
| 5 |
+
import os
|
| 6 |
+
from io import BytesIO
|
| 7 |
+
|
| 8 |
+
from .fields import RequestField
|
| 9 |
+
from .packages import six
|
| 10 |
+
from .packages.six import b
|
| 11 |
+
|
| 12 |
+
writer = codecs.lookup("utf-8")[3]
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def choose_boundary():
|
| 16 |
+
"""
|
| 17 |
+
Our embarrassingly-simple replacement for mimetools.choose_boundary.
|
| 18 |
+
"""
|
| 19 |
+
boundary = binascii.hexlify(os.urandom(16))
|
| 20 |
+
if not six.PY2:
|
| 21 |
+
boundary = boundary.decode("ascii")
|
| 22 |
+
return boundary
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def iter_field_objects(fields):
|
| 26 |
+
"""
|
| 27 |
+
Iterate over fields.
|
| 28 |
+
|
| 29 |
+
Supports list of (k, v) tuples and dicts, and lists of
|
| 30 |
+
:class:`~urllib3.fields.RequestField`.
|
| 31 |
+
|
| 32 |
+
"""
|
| 33 |
+
if isinstance(fields, dict):
|
| 34 |
+
i = six.iteritems(fields)
|
| 35 |
+
else:
|
| 36 |
+
i = iter(fields)
|
| 37 |
+
|
| 38 |
+
for field in i:
|
| 39 |
+
if isinstance(field, RequestField):
|
| 40 |
+
yield field
|
| 41 |
+
else:
|
| 42 |
+
yield RequestField.from_tuples(*field)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def iter_fields(fields):
|
| 46 |
+
"""
|
| 47 |
+
.. deprecated:: 1.6
|
| 48 |
+
|
| 49 |
+
Iterate over fields.
|
| 50 |
+
|
| 51 |
+
The addition of :class:`~urllib3.fields.RequestField` makes this function
|
| 52 |
+
obsolete. Instead, use :func:`iter_field_objects`, which returns
|
| 53 |
+
:class:`~urllib3.fields.RequestField` objects.
|
| 54 |
+
|
| 55 |
+
Supports list of (k, v) tuples and dicts.
|
| 56 |
+
"""
|
| 57 |
+
if isinstance(fields, dict):
|
| 58 |
+
return ((k, v) for k, v in six.iteritems(fields))
|
| 59 |
+
|
| 60 |
+
return ((k, v) for k, v in fields)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
def encode_multipart_formdata(fields, boundary=None):
|
| 64 |
+
"""
|
| 65 |
+
Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
|
| 66 |
+
|
| 67 |
+
:param fields:
|
| 68 |
+
Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
|
| 69 |
+
|
| 70 |
+
:param boundary:
|
| 71 |
+
If not specified, then a random boundary will be generated using
|
| 72 |
+
:func:`urllib3.filepost.choose_boundary`.
|
| 73 |
+
"""
|
| 74 |
+
body = BytesIO()
|
| 75 |
+
if boundary is None:
|
| 76 |
+
boundary = choose_boundary()
|
| 77 |
+
|
| 78 |
+
for field in iter_field_objects(fields):
|
| 79 |
+
body.write(b("--%s\r\n" % (boundary)))
|
| 80 |
+
|
| 81 |
+
writer(body).write(field.render_headers())
|
| 82 |
+
data = field.data
|
| 83 |
+
|
| 84 |
+
if isinstance(data, int):
|
| 85 |
+
data = str(data) # Backwards compatibility
|
| 86 |
+
|
| 87 |
+
if isinstance(data, six.text_type):
|
| 88 |
+
writer(body).write(data)
|
| 89 |
+
else:
|
| 90 |
+
body.write(data)
|
| 91 |
+
|
| 92 |
+
body.write(b"\r\n")
|
| 93 |
+
|
| 94 |
+
body.write(b("--%s--\r\n" % (boundary)))
|
| 95 |
+
|
| 96 |
+
content_type = str("multipart/form-data; boundary=%s" % boundary)
|
| 97 |
+
|
| 98 |
+
return body.getvalue(), content_type
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/request.py
ADDED
|
@@ -0,0 +1,191 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
from .filepost import encode_multipart_formdata
|
| 6 |
+
from .packages import six
|
| 7 |
+
from .packages.six.moves.urllib.parse import urlencode
|
| 8 |
+
|
| 9 |
+
__all__ = ["RequestMethods"]
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class RequestMethods(object):
|
| 13 |
+
"""
|
| 14 |
+
Convenience mixin for classes who implement a :meth:`urlopen` method, such
|
| 15 |
+
as :class:`urllib3.HTTPConnectionPool` and
|
| 16 |
+
:class:`urllib3.PoolManager`.
|
| 17 |
+
|
| 18 |
+
Provides behavior for making common types of HTTP request methods and
|
| 19 |
+
decides which type of request field encoding to use.
|
| 20 |
+
|
| 21 |
+
Specifically,
|
| 22 |
+
|
| 23 |
+
:meth:`.request_encode_url` is for sending requests whose fields are
|
| 24 |
+
encoded in the URL (such as GET, HEAD, DELETE).
|
| 25 |
+
|
| 26 |
+
:meth:`.request_encode_body` is for sending requests whose fields are
|
| 27 |
+
encoded in the *body* of the request using multipart or www-form-urlencoded
|
| 28 |
+
(such as for POST, PUT, PATCH).
|
| 29 |
+
|
| 30 |
+
:meth:`.request` is for making any kind of request, it will look up the
|
| 31 |
+
appropriate encoding format and use one of the above two methods to make
|
| 32 |
+
the request.
|
| 33 |
+
|
| 34 |
+
Initializer parameters:
|
| 35 |
+
|
| 36 |
+
:param headers:
|
| 37 |
+
Headers to include with all requests, unless other headers are given
|
| 38 |
+
explicitly.
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
_encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
|
| 42 |
+
|
| 43 |
+
def __init__(self, headers=None):
|
| 44 |
+
self.headers = headers or {}
|
| 45 |
+
|
| 46 |
+
def urlopen(
|
| 47 |
+
self,
|
| 48 |
+
method,
|
| 49 |
+
url,
|
| 50 |
+
body=None,
|
| 51 |
+
headers=None,
|
| 52 |
+
encode_multipart=True,
|
| 53 |
+
multipart_boundary=None,
|
| 54 |
+
**kw
|
| 55 |
+
): # Abstract
|
| 56 |
+
raise NotImplementedError(
|
| 57 |
+
"Classes extending RequestMethods must implement "
|
| 58 |
+
"their own ``urlopen`` method."
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
|
| 62 |
+
"""
|
| 63 |
+
Make a request using :meth:`urlopen` with the appropriate encoding of
|
| 64 |
+
``fields`` based on the ``method`` used.
|
| 65 |
+
|
| 66 |
+
This is a convenience method that requires the least amount of manual
|
| 67 |
+
effort. It can be used in most situations, while still having the
|
| 68 |
+
option to drop down to more specific methods when necessary, such as
|
| 69 |
+
:meth:`request_encode_url`, :meth:`request_encode_body`,
|
| 70 |
+
or even the lowest level :meth:`urlopen`.
|
| 71 |
+
"""
|
| 72 |
+
method = method.upper()
|
| 73 |
+
|
| 74 |
+
urlopen_kw["request_url"] = url
|
| 75 |
+
|
| 76 |
+
if method in self._encode_url_methods:
|
| 77 |
+
return self.request_encode_url(
|
| 78 |
+
method, url, fields=fields, headers=headers, **urlopen_kw
|
| 79 |
+
)
|
| 80 |
+
else:
|
| 81 |
+
return self.request_encode_body(
|
| 82 |
+
method, url, fields=fields, headers=headers, **urlopen_kw
|
| 83 |
+
)
|
| 84 |
+
|
| 85 |
+
def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
|
| 86 |
+
"""
|
| 87 |
+
Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
| 88 |
+
the url. This is useful for request methods like GET, HEAD, DELETE, etc.
|
| 89 |
+
"""
|
| 90 |
+
if headers is None:
|
| 91 |
+
headers = self.headers
|
| 92 |
+
|
| 93 |
+
extra_kw = {"headers": headers}
|
| 94 |
+
extra_kw.update(urlopen_kw)
|
| 95 |
+
|
| 96 |
+
if fields:
|
| 97 |
+
url += "?" + urlencode(fields)
|
| 98 |
+
|
| 99 |
+
return self.urlopen(method, url, **extra_kw)
|
| 100 |
+
|
| 101 |
+
def request_encode_body(
|
| 102 |
+
self,
|
| 103 |
+
method,
|
| 104 |
+
url,
|
| 105 |
+
fields=None,
|
| 106 |
+
headers=None,
|
| 107 |
+
encode_multipart=True,
|
| 108 |
+
multipart_boundary=None,
|
| 109 |
+
**urlopen_kw
|
| 110 |
+
):
|
| 111 |
+
"""
|
| 112 |
+
Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
| 113 |
+
the body. This is useful for request methods like POST, PUT, PATCH, etc.
|
| 114 |
+
|
| 115 |
+
When ``encode_multipart=True`` (default), then
|
| 116 |
+
:func:`urllib3.encode_multipart_formdata` is used to encode
|
| 117 |
+
the payload with the appropriate content type. Otherwise
|
| 118 |
+
:func:`urllib.parse.urlencode` is used with the
|
| 119 |
+
'application/x-www-form-urlencoded' content type.
|
| 120 |
+
|
| 121 |
+
Multipart encoding must be used when posting files, and it's reasonably
|
| 122 |
+
safe to use it in other times too. However, it may break request
|
| 123 |
+
signing, such as with OAuth.
|
| 124 |
+
|
| 125 |
+
Supports an optional ``fields`` parameter of key/value strings AND
|
| 126 |
+
key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
|
| 127 |
+
the MIME type is optional. For example::
|
| 128 |
+
|
| 129 |
+
fields = {
|
| 130 |
+
'foo': 'bar',
|
| 131 |
+
'fakefile': ('foofile.txt', 'contents of foofile'),
|
| 132 |
+
'realfile': ('barfile.txt', open('realfile').read()),
|
| 133 |
+
'typedfile': ('bazfile.bin', open('bazfile').read(),
|
| 134 |
+
'image/jpeg'),
|
| 135 |
+
'nonamefile': 'contents of nonamefile field',
|
| 136 |
+
}
|
| 137 |
+
|
| 138 |
+
When uploading a file, providing a filename (the first parameter of the
|
| 139 |
+
tuple) is optional but recommended to best mimic behavior of browsers.
|
| 140 |
+
|
| 141 |
+
Note that if ``headers`` are supplied, the 'Content-Type' header will
|
| 142 |
+
be overwritten because it depends on the dynamic random boundary string
|
| 143 |
+
which is used to compose the body of the request. The random boundary
|
| 144 |
+
string can be explicitly set with the ``multipart_boundary`` parameter.
|
| 145 |
+
"""
|
| 146 |
+
if headers is None:
|
| 147 |
+
headers = self.headers
|
| 148 |
+
|
| 149 |
+
extra_kw = {"headers": {}}
|
| 150 |
+
|
| 151 |
+
if fields:
|
| 152 |
+
if "body" in urlopen_kw:
|
| 153 |
+
raise TypeError(
|
| 154 |
+
"request got values for both 'fields' and 'body', can only specify one."
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
if encode_multipart:
|
| 158 |
+
body, content_type = encode_multipart_formdata(
|
| 159 |
+
fields, boundary=multipart_boundary
|
| 160 |
+
)
|
| 161 |
+
else:
|
| 162 |
+
body, content_type = (
|
| 163 |
+
urlencode(fields),
|
| 164 |
+
"application/x-www-form-urlencoded",
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
extra_kw["body"] = body
|
| 168 |
+
extra_kw["headers"] = {"Content-Type": content_type}
|
| 169 |
+
|
| 170 |
+
extra_kw["headers"].update(headers)
|
| 171 |
+
extra_kw.update(urlopen_kw)
|
| 172 |
+
|
| 173 |
+
return self.urlopen(method, url, **extra_kw)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
if not six.PY2:
|
| 177 |
+
|
| 178 |
+
class RequestModule(sys.modules[__name__].__class__):
|
| 179 |
+
def __call__(self, *args, **kwargs):
|
| 180 |
+
"""
|
| 181 |
+
If user tries to call this module directly urllib3 v2.x style raise an error to the user
|
| 182 |
+
suggesting they may need urllib3 v2
|
| 183 |
+
"""
|
| 184 |
+
raise TypeError(
|
| 185 |
+
"'module' object is not callable\n"
|
| 186 |
+
"urllib3.request() method is not supported in this release, "
|
| 187 |
+
"upgrade to urllib3 v2 to use it\n"
|
| 188 |
+
"see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
sys.modules[__name__].__class__ = RequestModule
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/response.py
ADDED
|
@@ -0,0 +1,879 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import io
|
| 4 |
+
import logging
|
| 5 |
+
import sys
|
| 6 |
+
import warnings
|
| 7 |
+
import zlib
|
| 8 |
+
from contextlib import contextmanager
|
| 9 |
+
from socket import error as SocketError
|
| 10 |
+
from socket import timeout as SocketTimeout
|
| 11 |
+
|
| 12 |
+
brotli = None
|
| 13 |
+
|
| 14 |
+
from . import util
|
| 15 |
+
from ._collections import HTTPHeaderDict
|
| 16 |
+
from .connection import BaseSSLError, HTTPException
|
| 17 |
+
from .exceptions import (
|
| 18 |
+
BodyNotHttplibCompatible,
|
| 19 |
+
DecodeError,
|
| 20 |
+
HTTPError,
|
| 21 |
+
IncompleteRead,
|
| 22 |
+
InvalidChunkLength,
|
| 23 |
+
InvalidHeader,
|
| 24 |
+
ProtocolError,
|
| 25 |
+
ReadTimeoutError,
|
| 26 |
+
ResponseNotChunked,
|
| 27 |
+
SSLError,
|
| 28 |
+
)
|
| 29 |
+
from .packages import six
|
| 30 |
+
from .util.response import is_fp_closed, is_response_to_head
|
| 31 |
+
|
| 32 |
+
log = logging.getLogger(__name__)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class DeflateDecoder(object):
|
| 36 |
+
def __init__(self):
|
| 37 |
+
self._first_try = True
|
| 38 |
+
self._data = b""
|
| 39 |
+
self._obj = zlib.decompressobj()
|
| 40 |
+
|
| 41 |
+
def __getattr__(self, name):
|
| 42 |
+
return getattr(self._obj, name)
|
| 43 |
+
|
| 44 |
+
def decompress(self, data):
|
| 45 |
+
if not data:
|
| 46 |
+
return data
|
| 47 |
+
|
| 48 |
+
if not self._first_try:
|
| 49 |
+
return self._obj.decompress(data)
|
| 50 |
+
|
| 51 |
+
self._data += data
|
| 52 |
+
try:
|
| 53 |
+
decompressed = self._obj.decompress(data)
|
| 54 |
+
if decompressed:
|
| 55 |
+
self._first_try = False
|
| 56 |
+
self._data = None
|
| 57 |
+
return decompressed
|
| 58 |
+
except zlib.error:
|
| 59 |
+
self._first_try = False
|
| 60 |
+
self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
|
| 61 |
+
try:
|
| 62 |
+
return self.decompress(self._data)
|
| 63 |
+
finally:
|
| 64 |
+
self._data = None
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class GzipDecoderState(object):
|
| 68 |
+
|
| 69 |
+
FIRST_MEMBER = 0
|
| 70 |
+
OTHER_MEMBERS = 1
|
| 71 |
+
SWALLOW_DATA = 2
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class GzipDecoder(object):
|
| 75 |
+
def __init__(self):
|
| 76 |
+
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
| 77 |
+
self._state = GzipDecoderState.FIRST_MEMBER
|
| 78 |
+
|
| 79 |
+
def __getattr__(self, name):
|
| 80 |
+
return getattr(self._obj, name)
|
| 81 |
+
|
| 82 |
+
def decompress(self, data):
|
| 83 |
+
ret = bytearray()
|
| 84 |
+
if self._state == GzipDecoderState.SWALLOW_DATA or not data:
|
| 85 |
+
return bytes(ret)
|
| 86 |
+
while True:
|
| 87 |
+
try:
|
| 88 |
+
ret += self._obj.decompress(data)
|
| 89 |
+
except zlib.error:
|
| 90 |
+
previous_state = self._state
|
| 91 |
+
# Ignore data after the first error
|
| 92 |
+
self._state = GzipDecoderState.SWALLOW_DATA
|
| 93 |
+
if previous_state == GzipDecoderState.OTHER_MEMBERS:
|
| 94 |
+
# Allow trailing garbage acceptable in other gzip clients
|
| 95 |
+
return bytes(ret)
|
| 96 |
+
raise
|
| 97 |
+
data = self._obj.unused_data
|
| 98 |
+
if not data:
|
| 99 |
+
return bytes(ret)
|
| 100 |
+
self._state = GzipDecoderState.OTHER_MEMBERS
|
| 101 |
+
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
if brotli is not None:
|
| 105 |
+
|
| 106 |
+
class BrotliDecoder(object):
|
| 107 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
| 108 |
+
# since they share an import name. The top branches
|
| 109 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
| 110 |
+
def __init__(self):
|
| 111 |
+
self._obj = brotli.Decompressor()
|
| 112 |
+
if hasattr(self._obj, "decompress"):
|
| 113 |
+
self.decompress = self._obj.decompress
|
| 114 |
+
else:
|
| 115 |
+
self.decompress = self._obj.process
|
| 116 |
+
|
| 117 |
+
def flush(self):
|
| 118 |
+
if hasattr(self._obj, "flush"):
|
| 119 |
+
return self._obj.flush()
|
| 120 |
+
return b""
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class MultiDecoder(object):
|
| 124 |
+
"""
|
| 125 |
+
From RFC7231:
|
| 126 |
+
If one or more encodings have been applied to a representation, the
|
| 127 |
+
sender that applied the encodings MUST generate a Content-Encoding
|
| 128 |
+
header field that lists the content codings in the order in which
|
| 129 |
+
they were applied.
|
| 130 |
+
"""
|
| 131 |
+
|
| 132 |
+
def __init__(self, modes):
|
| 133 |
+
self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
|
| 134 |
+
|
| 135 |
+
def flush(self):
|
| 136 |
+
return self._decoders[0].flush()
|
| 137 |
+
|
| 138 |
+
def decompress(self, data):
|
| 139 |
+
for d in reversed(self._decoders):
|
| 140 |
+
data = d.decompress(data)
|
| 141 |
+
return data
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def _get_decoder(mode):
|
| 145 |
+
if "," in mode:
|
| 146 |
+
return MultiDecoder(mode)
|
| 147 |
+
|
| 148 |
+
if mode == "gzip":
|
| 149 |
+
return GzipDecoder()
|
| 150 |
+
|
| 151 |
+
if brotli is not None and mode == "br":
|
| 152 |
+
return BrotliDecoder()
|
| 153 |
+
|
| 154 |
+
return DeflateDecoder()
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class HTTPResponse(io.IOBase):
|
| 158 |
+
"""
|
| 159 |
+
HTTP Response container.
|
| 160 |
+
|
| 161 |
+
Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
|
| 162 |
+
loaded and decoded on-demand when the ``data`` property is accessed. This
|
| 163 |
+
class is also compatible with the Python standard library's :mod:`io`
|
| 164 |
+
module, and can hence be treated as a readable object in the context of that
|
| 165 |
+
framework.
|
| 166 |
+
|
| 167 |
+
Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
|
| 168 |
+
|
| 169 |
+
:param preload_content:
|
| 170 |
+
If True, the response's body will be preloaded during construction.
|
| 171 |
+
|
| 172 |
+
:param decode_content:
|
| 173 |
+
If True, will attempt to decode the body based on the
|
| 174 |
+
'content-encoding' header.
|
| 175 |
+
|
| 176 |
+
:param original_response:
|
| 177 |
+
When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
|
| 178 |
+
object, it's convenient to include the original for debug purposes. It's
|
| 179 |
+
otherwise unused.
|
| 180 |
+
|
| 181 |
+
:param retries:
|
| 182 |
+
The retries contains the last :class:`~urllib3.util.retry.Retry` that
|
| 183 |
+
was used during the request.
|
| 184 |
+
|
| 185 |
+
:param enforce_content_length:
|
| 186 |
+
Enforce content length checking. Body returned by server must match
|
| 187 |
+
value of Content-Length header, if present. Otherwise, raise error.
|
| 188 |
+
"""
|
| 189 |
+
|
| 190 |
+
CONTENT_DECODERS = ["gzip", "deflate"]
|
| 191 |
+
if brotli is not None:
|
| 192 |
+
CONTENT_DECODERS += ["br"]
|
| 193 |
+
REDIRECT_STATUSES = [301, 302, 303, 307, 308]
|
| 194 |
+
|
| 195 |
+
def __init__(
|
| 196 |
+
self,
|
| 197 |
+
body="",
|
| 198 |
+
headers=None,
|
| 199 |
+
status=0,
|
| 200 |
+
version=0,
|
| 201 |
+
reason=None,
|
| 202 |
+
strict=0,
|
| 203 |
+
preload_content=True,
|
| 204 |
+
decode_content=True,
|
| 205 |
+
original_response=None,
|
| 206 |
+
pool=None,
|
| 207 |
+
connection=None,
|
| 208 |
+
msg=None,
|
| 209 |
+
retries=None,
|
| 210 |
+
enforce_content_length=False,
|
| 211 |
+
request_method=None,
|
| 212 |
+
request_url=None,
|
| 213 |
+
auto_close=True,
|
| 214 |
+
):
|
| 215 |
+
|
| 216 |
+
if isinstance(headers, HTTPHeaderDict):
|
| 217 |
+
self.headers = headers
|
| 218 |
+
else:
|
| 219 |
+
self.headers = HTTPHeaderDict(headers)
|
| 220 |
+
self.status = status
|
| 221 |
+
self.version = version
|
| 222 |
+
self.reason = reason
|
| 223 |
+
self.strict = strict
|
| 224 |
+
self.decode_content = decode_content
|
| 225 |
+
self.retries = retries
|
| 226 |
+
self.enforce_content_length = enforce_content_length
|
| 227 |
+
self.auto_close = auto_close
|
| 228 |
+
|
| 229 |
+
self._decoder = None
|
| 230 |
+
self._body = None
|
| 231 |
+
self._fp = None
|
| 232 |
+
self._original_response = original_response
|
| 233 |
+
self._fp_bytes_read = 0
|
| 234 |
+
self.msg = msg
|
| 235 |
+
self._request_url = request_url
|
| 236 |
+
|
| 237 |
+
if body and isinstance(body, (six.string_types, bytes)):
|
| 238 |
+
self._body = body
|
| 239 |
+
|
| 240 |
+
self._pool = pool
|
| 241 |
+
self._connection = connection
|
| 242 |
+
|
| 243 |
+
if hasattr(body, "read"):
|
| 244 |
+
self._fp = body
|
| 245 |
+
|
| 246 |
+
# Are we using the chunked-style of transfer encoding?
|
| 247 |
+
self.chunked = False
|
| 248 |
+
self.chunk_left = None
|
| 249 |
+
tr_enc = self.headers.get("transfer-encoding", "").lower()
|
| 250 |
+
# Don't incur the penalty of creating a list and then discarding it
|
| 251 |
+
encodings = (enc.strip() for enc in tr_enc.split(","))
|
| 252 |
+
if "chunked" in encodings:
|
| 253 |
+
self.chunked = True
|
| 254 |
+
|
| 255 |
+
# Determine length of response
|
| 256 |
+
self.length_remaining = self._init_length(request_method)
|
| 257 |
+
|
| 258 |
+
# If requested, preload the body.
|
| 259 |
+
if preload_content and not self._body:
|
| 260 |
+
self._body = self.read(decode_content=decode_content)
|
| 261 |
+
|
| 262 |
+
def get_redirect_location(self):
|
| 263 |
+
"""
|
| 264 |
+
Should we redirect and where to?
|
| 265 |
+
|
| 266 |
+
:returns: Truthy redirect location string if we got a redirect status
|
| 267 |
+
code and valid location. ``None`` if redirect status and no
|
| 268 |
+
location. ``False`` if not a redirect status code.
|
| 269 |
+
"""
|
| 270 |
+
if self.status in self.REDIRECT_STATUSES:
|
| 271 |
+
return self.headers.get("location")
|
| 272 |
+
|
| 273 |
+
return False
|
| 274 |
+
|
| 275 |
+
def release_conn(self):
|
| 276 |
+
if not self._pool or not self._connection:
|
| 277 |
+
return
|
| 278 |
+
|
| 279 |
+
self._pool._put_conn(self._connection)
|
| 280 |
+
self._connection = None
|
| 281 |
+
|
| 282 |
+
def drain_conn(self):
|
| 283 |
+
"""
|
| 284 |
+
Read and discard any remaining HTTP response data in the response connection.
|
| 285 |
+
|
| 286 |
+
Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
|
| 287 |
+
"""
|
| 288 |
+
try:
|
| 289 |
+
self.read()
|
| 290 |
+
except (HTTPError, SocketError, BaseSSLError, HTTPException):
|
| 291 |
+
pass
|
| 292 |
+
|
| 293 |
+
@property
|
| 294 |
+
def data(self):
|
| 295 |
+
# For backwards-compat with earlier urllib3 0.4 and earlier.
|
| 296 |
+
if self._body:
|
| 297 |
+
return self._body
|
| 298 |
+
|
| 299 |
+
if self._fp:
|
| 300 |
+
return self.read(cache_content=True)
|
| 301 |
+
|
| 302 |
+
@property
|
| 303 |
+
def connection(self):
|
| 304 |
+
return self._connection
|
| 305 |
+
|
| 306 |
+
def isclosed(self):
|
| 307 |
+
return is_fp_closed(self._fp)
|
| 308 |
+
|
| 309 |
+
def tell(self):
|
| 310 |
+
"""
|
| 311 |
+
Obtain the number of bytes pulled over the wire so far. May differ from
|
| 312 |
+
the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
|
| 313 |
+
if bytes are encoded on the wire (e.g, compressed).
|
| 314 |
+
"""
|
| 315 |
+
return self._fp_bytes_read
|
| 316 |
+
|
| 317 |
+
def _init_length(self, request_method):
|
| 318 |
+
"""
|
| 319 |
+
Set initial length value for Response content if available.
|
| 320 |
+
"""
|
| 321 |
+
length = self.headers.get("content-length")
|
| 322 |
+
|
| 323 |
+
if length is not None:
|
| 324 |
+
if self.chunked:
|
| 325 |
+
# This Response will fail with an IncompleteRead if it can't be
|
| 326 |
+
# received as chunked. This method falls back to attempt reading
|
| 327 |
+
# the response before raising an exception.
|
| 328 |
+
log.warning(
|
| 329 |
+
"Received response with both Content-Length and "
|
| 330 |
+
"Transfer-Encoding set. This is expressly forbidden "
|
| 331 |
+
"by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
|
| 332 |
+
"attempting to process response as Transfer-Encoding: "
|
| 333 |
+
"chunked."
|
| 334 |
+
)
|
| 335 |
+
return None
|
| 336 |
+
|
| 337 |
+
try:
|
| 338 |
+
# RFC 7230 section 3.3.2 specifies multiple content lengths can
|
| 339 |
+
# be sent in a single Content-Length header
|
| 340 |
+
# (e.g. Content-Length: 42, 42). This line ensures the values
|
| 341 |
+
# are all valid ints and that as long as the `set` length is 1,
|
| 342 |
+
# all values are the same. Otherwise, the header is invalid.
|
| 343 |
+
lengths = set([int(val) for val in length.split(",")])
|
| 344 |
+
if len(lengths) > 1:
|
| 345 |
+
raise InvalidHeader(
|
| 346 |
+
"Content-Length contained multiple "
|
| 347 |
+
"unmatching values (%s)" % length
|
| 348 |
+
)
|
| 349 |
+
length = lengths.pop()
|
| 350 |
+
except ValueError:
|
| 351 |
+
length = None
|
| 352 |
+
else:
|
| 353 |
+
if length < 0:
|
| 354 |
+
length = None
|
| 355 |
+
|
| 356 |
+
# Convert status to int for comparison
|
| 357 |
+
# In some cases, httplib returns a status of "_UNKNOWN"
|
| 358 |
+
try:
|
| 359 |
+
status = int(self.status)
|
| 360 |
+
except ValueError:
|
| 361 |
+
status = 0
|
| 362 |
+
|
| 363 |
+
# Check for responses that shouldn't include a body
|
| 364 |
+
if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
|
| 365 |
+
length = 0
|
| 366 |
+
|
| 367 |
+
return length
|
| 368 |
+
|
| 369 |
+
def _init_decoder(self):
|
| 370 |
+
"""
|
| 371 |
+
Set-up the _decoder attribute if necessary.
|
| 372 |
+
"""
|
| 373 |
+
# Note: content-encoding value should be case-insensitive, per RFC 7230
|
| 374 |
+
# Section 3.2
|
| 375 |
+
content_encoding = self.headers.get("content-encoding", "").lower()
|
| 376 |
+
if self._decoder is None:
|
| 377 |
+
if content_encoding in self.CONTENT_DECODERS:
|
| 378 |
+
self._decoder = _get_decoder(content_encoding)
|
| 379 |
+
elif "," in content_encoding:
|
| 380 |
+
encodings = [
|
| 381 |
+
e.strip()
|
| 382 |
+
for e in content_encoding.split(",")
|
| 383 |
+
if e.strip() in self.CONTENT_DECODERS
|
| 384 |
+
]
|
| 385 |
+
if len(encodings):
|
| 386 |
+
self._decoder = _get_decoder(content_encoding)
|
| 387 |
+
|
| 388 |
+
DECODER_ERROR_CLASSES = (IOError, zlib.error)
|
| 389 |
+
if brotli is not None:
|
| 390 |
+
DECODER_ERROR_CLASSES += (brotli.error,)
|
| 391 |
+
|
| 392 |
+
def _decode(self, data, decode_content, flush_decoder):
|
| 393 |
+
"""
|
| 394 |
+
Decode the data passed in and potentially flush the decoder.
|
| 395 |
+
"""
|
| 396 |
+
if not decode_content:
|
| 397 |
+
return data
|
| 398 |
+
|
| 399 |
+
try:
|
| 400 |
+
if self._decoder:
|
| 401 |
+
data = self._decoder.decompress(data)
|
| 402 |
+
except self.DECODER_ERROR_CLASSES as e:
|
| 403 |
+
content_encoding = self.headers.get("content-encoding", "").lower()
|
| 404 |
+
raise DecodeError(
|
| 405 |
+
"Received response with content-encoding: %s, but "
|
| 406 |
+
"failed to decode it." % content_encoding,
|
| 407 |
+
e,
|
| 408 |
+
)
|
| 409 |
+
if flush_decoder:
|
| 410 |
+
data += self._flush_decoder()
|
| 411 |
+
|
| 412 |
+
return data
|
| 413 |
+
|
| 414 |
+
def _flush_decoder(self):
|
| 415 |
+
"""
|
| 416 |
+
Flushes the decoder. Should only be called if the decoder is actually
|
| 417 |
+
being used.
|
| 418 |
+
"""
|
| 419 |
+
if self._decoder:
|
| 420 |
+
buf = self._decoder.decompress(b"")
|
| 421 |
+
return buf + self._decoder.flush()
|
| 422 |
+
|
| 423 |
+
return b""
|
| 424 |
+
|
| 425 |
+
@contextmanager
|
| 426 |
+
def _error_catcher(self):
|
| 427 |
+
"""
|
| 428 |
+
Catch low-level python exceptions, instead re-raising urllib3
|
| 429 |
+
variants, so that low-level exceptions are not leaked in the
|
| 430 |
+
high-level api.
|
| 431 |
+
|
| 432 |
+
On exit, release the connection back to the pool.
|
| 433 |
+
"""
|
| 434 |
+
clean_exit = False
|
| 435 |
+
|
| 436 |
+
try:
|
| 437 |
+
try:
|
| 438 |
+
yield
|
| 439 |
+
|
| 440 |
+
except SocketTimeout:
|
| 441 |
+
# FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
| 442 |
+
# there is yet no clean way to get at it from this context.
|
| 443 |
+
raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
| 444 |
+
|
| 445 |
+
except BaseSSLError as e:
|
| 446 |
+
# FIXME: Is there a better way to differentiate between SSLErrors?
|
| 447 |
+
if "read operation timed out" not in str(e):
|
| 448 |
+
# SSL errors related to framing/MAC get wrapped and reraised here
|
| 449 |
+
raise SSLError(e)
|
| 450 |
+
|
| 451 |
+
raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
| 452 |
+
|
| 453 |
+
except (HTTPException, SocketError) as e:
|
| 454 |
+
# This includes IncompleteRead.
|
| 455 |
+
raise ProtocolError("Connection broken: %r" % e, e)
|
| 456 |
+
|
| 457 |
+
# If no exception is thrown, we should avoid cleaning up
|
| 458 |
+
# unnecessarily.
|
| 459 |
+
clean_exit = True
|
| 460 |
+
finally:
|
| 461 |
+
# If we didn't terminate cleanly, we need to throw away our
|
| 462 |
+
# connection.
|
| 463 |
+
if not clean_exit:
|
| 464 |
+
# The response may not be closed but we're not going to use it
|
| 465 |
+
# anymore so close it now to ensure that the connection is
|
| 466 |
+
# released back to the pool.
|
| 467 |
+
if self._original_response:
|
| 468 |
+
self._original_response.close()
|
| 469 |
+
|
| 470 |
+
# Closing the response may not actually be sufficient to close
|
| 471 |
+
# everything, so if we have a hold of the connection close that
|
| 472 |
+
# too.
|
| 473 |
+
if self._connection:
|
| 474 |
+
self._connection.close()
|
| 475 |
+
|
| 476 |
+
# If we hold the original response but it's closed now, we should
|
| 477 |
+
# return the connection back to the pool.
|
| 478 |
+
if self._original_response and self._original_response.isclosed():
|
| 479 |
+
self.release_conn()
|
| 480 |
+
|
| 481 |
+
def _fp_read(self, amt):
|
| 482 |
+
"""
|
| 483 |
+
Read a response with the thought that reading the number of bytes
|
| 484 |
+
larger than can fit in a 32-bit int at a time via SSL in some
|
| 485 |
+
known cases leads to an overflow error that has to be prevented
|
| 486 |
+
if `amt` or `self.length_remaining` indicate that a problem may
|
| 487 |
+
happen.
|
| 488 |
+
|
| 489 |
+
The known cases:
|
| 490 |
+
* 3.8 <= CPython < 3.9.7 because of a bug
|
| 491 |
+
https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
|
| 492 |
+
* urllib3 injected with pyOpenSSL-backed SSL-support.
|
| 493 |
+
* CPython < 3.10 only when `amt` does not fit 32-bit int.
|
| 494 |
+
"""
|
| 495 |
+
assert self._fp
|
| 496 |
+
c_int_max = 2 ** 31 - 1
|
| 497 |
+
if (
|
| 498 |
+
(
|
| 499 |
+
(amt and amt > c_int_max)
|
| 500 |
+
or (self.length_remaining and self.length_remaining > c_int_max)
|
| 501 |
+
)
|
| 502 |
+
and not util.IS_SECURETRANSPORT
|
| 503 |
+
and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
|
| 504 |
+
):
|
| 505 |
+
buffer = io.BytesIO()
|
| 506 |
+
# Besides `max_chunk_amt` being a maximum chunk size, it
|
| 507 |
+
# affects memory overhead of reading a response by this
|
| 508 |
+
# method in CPython.
|
| 509 |
+
# `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
|
| 510 |
+
# chunk size that does not lead to an overflow error, but
|
| 511 |
+
# 256 MiB is a compromise.
|
| 512 |
+
max_chunk_amt = 2 ** 28
|
| 513 |
+
while amt is None or amt != 0:
|
| 514 |
+
if amt is not None:
|
| 515 |
+
chunk_amt = min(amt, max_chunk_amt)
|
| 516 |
+
amt -= chunk_amt
|
| 517 |
+
else:
|
| 518 |
+
chunk_amt = max_chunk_amt
|
| 519 |
+
data = self._fp.read(chunk_amt)
|
| 520 |
+
if not data:
|
| 521 |
+
break
|
| 522 |
+
buffer.write(data)
|
| 523 |
+
del data # to reduce peak memory usage by `max_chunk_amt`.
|
| 524 |
+
return buffer.getvalue()
|
| 525 |
+
else:
|
| 526 |
+
# StringIO doesn't like amt=None
|
| 527 |
+
return self._fp.read(amt) if amt is not None else self._fp.read()
|
| 528 |
+
|
| 529 |
+
def read(self, amt=None, decode_content=None, cache_content=False):
|
| 530 |
+
"""
|
| 531 |
+
Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
|
| 532 |
+
parameters: ``decode_content`` and ``cache_content``.
|
| 533 |
+
|
| 534 |
+
:param amt:
|
| 535 |
+
How much of the content to read. If specified, caching is skipped
|
| 536 |
+
because it doesn't make sense to cache partial content as the full
|
| 537 |
+
response.
|
| 538 |
+
|
| 539 |
+
:param decode_content:
|
| 540 |
+
If True, will attempt to decode the body based on the
|
| 541 |
+
'content-encoding' header.
|
| 542 |
+
|
| 543 |
+
:param cache_content:
|
| 544 |
+
If True, will save the returned data such that the same result is
|
| 545 |
+
returned despite of the state of the underlying file object. This
|
| 546 |
+
is useful if you want the ``.data`` property to continue working
|
| 547 |
+
after having ``.read()`` the file object. (Overridden if ``amt`` is
|
| 548 |
+
set.)
|
| 549 |
+
"""
|
| 550 |
+
self._init_decoder()
|
| 551 |
+
if decode_content is None:
|
| 552 |
+
decode_content = self.decode_content
|
| 553 |
+
|
| 554 |
+
if self._fp is None:
|
| 555 |
+
return
|
| 556 |
+
|
| 557 |
+
flush_decoder = False
|
| 558 |
+
fp_closed = getattr(self._fp, "closed", False)
|
| 559 |
+
|
| 560 |
+
with self._error_catcher():
|
| 561 |
+
data = self._fp_read(amt) if not fp_closed else b""
|
| 562 |
+
if amt is None:
|
| 563 |
+
flush_decoder = True
|
| 564 |
+
else:
|
| 565 |
+
cache_content = False
|
| 566 |
+
if (
|
| 567 |
+
amt != 0 and not data
|
| 568 |
+
): # Platform-specific: Buggy versions of Python.
|
| 569 |
+
# Close the connection when no data is returned
|
| 570 |
+
#
|
| 571 |
+
# This is redundant to what httplib/http.client _should_
|
| 572 |
+
# already do. However, versions of python released before
|
| 573 |
+
# December 15, 2012 (http://bugs.python.org/issue16298) do
|
| 574 |
+
# not properly close the connection in all cases. There is
|
| 575 |
+
# no harm in redundantly calling close.
|
| 576 |
+
self._fp.close()
|
| 577 |
+
flush_decoder = True
|
| 578 |
+
if self.enforce_content_length and self.length_remaining not in (
|
| 579 |
+
0,
|
| 580 |
+
None,
|
| 581 |
+
):
|
| 582 |
+
# This is an edge case that httplib failed to cover due
|
| 583 |
+
# to concerns of backward compatibility. We're
|
| 584 |
+
# addressing it here to make sure IncompleteRead is
|
| 585 |
+
# raised during streaming, so all calls with incorrect
|
| 586 |
+
# Content-Length are caught.
|
| 587 |
+
raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
|
| 588 |
+
|
| 589 |
+
if data:
|
| 590 |
+
self._fp_bytes_read += len(data)
|
| 591 |
+
if self.length_remaining is not None:
|
| 592 |
+
self.length_remaining -= len(data)
|
| 593 |
+
|
| 594 |
+
data = self._decode(data, decode_content, flush_decoder)
|
| 595 |
+
|
| 596 |
+
if cache_content:
|
| 597 |
+
self._body = data
|
| 598 |
+
|
| 599 |
+
return data
|
| 600 |
+
|
| 601 |
+
def stream(self, amt=2 ** 16, decode_content=None):
|
| 602 |
+
"""
|
| 603 |
+
A generator wrapper for the read() method. A call will block until
|
| 604 |
+
``amt`` bytes have been read from the connection or until the
|
| 605 |
+
connection is closed.
|
| 606 |
+
|
| 607 |
+
:param amt:
|
| 608 |
+
How much of the content to read. The generator will return up to
|
| 609 |
+
much data per iteration, but may return less. This is particularly
|
| 610 |
+
likely when using compressed data. However, the empty string will
|
| 611 |
+
never be returned.
|
| 612 |
+
|
| 613 |
+
:param decode_content:
|
| 614 |
+
If True, will attempt to decode the body based on the
|
| 615 |
+
'content-encoding' header.
|
| 616 |
+
"""
|
| 617 |
+
if self.chunked and self.supports_chunked_reads():
|
| 618 |
+
for line in self.read_chunked(amt, decode_content=decode_content):
|
| 619 |
+
yield line
|
| 620 |
+
else:
|
| 621 |
+
while not is_fp_closed(self._fp):
|
| 622 |
+
data = self.read(amt=amt, decode_content=decode_content)
|
| 623 |
+
|
| 624 |
+
if data:
|
| 625 |
+
yield data
|
| 626 |
+
|
| 627 |
+
@classmethod
|
| 628 |
+
def from_httplib(ResponseCls, r, **response_kw):
|
| 629 |
+
"""
|
| 630 |
+
Given an :class:`http.client.HTTPResponse` instance ``r``, return a
|
| 631 |
+
corresponding :class:`urllib3.response.HTTPResponse` object.
|
| 632 |
+
|
| 633 |
+
Remaining parameters are passed to the HTTPResponse constructor, along
|
| 634 |
+
with ``original_response=r``.
|
| 635 |
+
"""
|
| 636 |
+
headers = r.msg
|
| 637 |
+
|
| 638 |
+
if not isinstance(headers, HTTPHeaderDict):
|
| 639 |
+
if six.PY2:
|
| 640 |
+
# Python 2.7
|
| 641 |
+
headers = HTTPHeaderDict.from_httplib(headers)
|
| 642 |
+
else:
|
| 643 |
+
headers = HTTPHeaderDict(headers.items())
|
| 644 |
+
|
| 645 |
+
# HTTPResponse objects in Python 3 don't have a .strict attribute
|
| 646 |
+
strict = getattr(r, "strict", 0)
|
| 647 |
+
resp = ResponseCls(
|
| 648 |
+
body=r,
|
| 649 |
+
headers=headers,
|
| 650 |
+
status=r.status,
|
| 651 |
+
version=r.version,
|
| 652 |
+
reason=r.reason,
|
| 653 |
+
strict=strict,
|
| 654 |
+
original_response=r,
|
| 655 |
+
**response_kw
|
| 656 |
+
)
|
| 657 |
+
return resp
|
| 658 |
+
|
| 659 |
+
# Backwards-compatibility methods for http.client.HTTPResponse
|
| 660 |
+
def getheaders(self):
|
| 661 |
+
warnings.warn(
|
| 662 |
+
"HTTPResponse.getheaders() is deprecated and will be removed "
|
| 663 |
+
"in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
|
| 664 |
+
category=DeprecationWarning,
|
| 665 |
+
stacklevel=2,
|
| 666 |
+
)
|
| 667 |
+
return self.headers
|
| 668 |
+
|
| 669 |
+
def getheader(self, name, default=None):
|
| 670 |
+
warnings.warn(
|
| 671 |
+
"HTTPResponse.getheader() is deprecated and will be removed "
|
| 672 |
+
"in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
|
| 673 |
+
category=DeprecationWarning,
|
| 674 |
+
stacklevel=2,
|
| 675 |
+
)
|
| 676 |
+
return self.headers.get(name, default)
|
| 677 |
+
|
| 678 |
+
# Backwards compatibility for http.cookiejar
|
| 679 |
+
def info(self):
|
| 680 |
+
return self.headers
|
| 681 |
+
|
| 682 |
+
# Overrides from io.IOBase
|
| 683 |
+
def close(self):
|
| 684 |
+
if not self.closed:
|
| 685 |
+
self._fp.close()
|
| 686 |
+
|
| 687 |
+
if self._connection:
|
| 688 |
+
self._connection.close()
|
| 689 |
+
|
| 690 |
+
if not self.auto_close:
|
| 691 |
+
io.IOBase.close(self)
|
| 692 |
+
|
| 693 |
+
@property
|
| 694 |
+
def closed(self):
|
| 695 |
+
if not self.auto_close:
|
| 696 |
+
return io.IOBase.closed.__get__(self)
|
| 697 |
+
elif self._fp is None:
|
| 698 |
+
return True
|
| 699 |
+
elif hasattr(self._fp, "isclosed"):
|
| 700 |
+
return self._fp.isclosed()
|
| 701 |
+
elif hasattr(self._fp, "closed"):
|
| 702 |
+
return self._fp.closed
|
| 703 |
+
else:
|
| 704 |
+
return True
|
| 705 |
+
|
| 706 |
+
def fileno(self):
|
| 707 |
+
if self._fp is None:
|
| 708 |
+
raise IOError("HTTPResponse has no file to get a fileno from")
|
| 709 |
+
elif hasattr(self._fp, "fileno"):
|
| 710 |
+
return self._fp.fileno()
|
| 711 |
+
else:
|
| 712 |
+
raise IOError(
|
| 713 |
+
"The file-like object this HTTPResponse is wrapped "
|
| 714 |
+
"around has no file descriptor"
|
| 715 |
+
)
|
| 716 |
+
|
| 717 |
+
def flush(self):
|
| 718 |
+
if (
|
| 719 |
+
self._fp is not None
|
| 720 |
+
and hasattr(self._fp, "flush")
|
| 721 |
+
and not getattr(self._fp, "closed", False)
|
| 722 |
+
):
|
| 723 |
+
return self._fp.flush()
|
| 724 |
+
|
| 725 |
+
def readable(self):
|
| 726 |
+
# This method is required for `io` module compatibility.
|
| 727 |
+
return True
|
| 728 |
+
|
| 729 |
+
def readinto(self, b):
|
| 730 |
+
# This method is required for `io` module compatibility.
|
| 731 |
+
temp = self.read(len(b))
|
| 732 |
+
if len(temp) == 0:
|
| 733 |
+
return 0
|
| 734 |
+
else:
|
| 735 |
+
b[: len(temp)] = temp
|
| 736 |
+
return len(temp)
|
| 737 |
+
|
| 738 |
+
def supports_chunked_reads(self):
|
| 739 |
+
"""
|
| 740 |
+
Checks if the underlying file-like object looks like a
|
| 741 |
+
:class:`http.client.HTTPResponse` object. We do this by testing for
|
| 742 |
+
the fp attribute. If it is present we assume it returns raw chunks as
|
| 743 |
+
processed by read_chunked().
|
| 744 |
+
"""
|
| 745 |
+
return hasattr(self._fp, "fp")
|
| 746 |
+
|
| 747 |
+
def _update_chunk_length(self):
|
| 748 |
+
# First, we'll figure out length of a chunk and then
|
| 749 |
+
# we'll try to read it from socket.
|
| 750 |
+
if self.chunk_left is not None:
|
| 751 |
+
return
|
| 752 |
+
line = self._fp.fp.readline()
|
| 753 |
+
line = line.split(b";", 1)[0]
|
| 754 |
+
try:
|
| 755 |
+
self.chunk_left = int(line, 16)
|
| 756 |
+
except ValueError:
|
| 757 |
+
# Invalid chunked protocol response, abort.
|
| 758 |
+
self.close()
|
| 759 |
+
raise InvalidChunkLength(self, line)
|
| 760 |
+
|
| 761 |
+
def _handle_chunk(self, amt):
|
| 762 |
+
returned_chunk = None
|
| 763 |
+
if amt is None:
|
| 764 |
+
chunk = self._fp._safe_read(self.chunk_left)
|
| 765 |
+
returned_chunk = chunk
|
| 766 |
+
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
| 767 |
+
self.chunk_left = None
|
| 768 |
+
elif amt < self.chunk_left:
|
| 769 |
+
value = self._fp._safe_read(amt)
|
| 770 |
+
self.chunk_left = self.chunk_left - amt
|
| 771 |
+
returned_chunk = value
|
| 772 |
+
elif amt == self.chunk_left:
|
| 773 |
+
value = self._fp._safe_read(amt)
|
| 774 |
+
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
| 775 |
+
self.chunk_left = None
|
| 776 |
+
returned_chunk = value
|
| 777 |
+
else: # amt > self.chunk_left
|
| 778 |
+
returned_chunk = self._fp._safe_read(self.chunk_left)
|
| 779 |
+
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
| 780 |
+
self.chunk_left = None
|
| 781 |
+
return returned_chunk
|
| 782 |
+
|
| 783 |
+
def read_chunked(self, amt=None, decode_content=None):
|
| 784 |
+
"""
|
| 785 |
+
Similar to :meth:`HTTPResponse.read`, but with an additional
|
| 786 |
+
parameter: ``decode_content``.
|
| 787 |
+
|
| 788 |
+
:param amt:
|
| 789 |
+
How much of the content to read. If specified, caching is skipped
|
| 790 |
+
because it doesn't make sense to cache partial content as the full
|
| 791 |
+
response.
|
| 792 |
+
|
| 793 |
+
:param decode_content:
|
| 794 |
+
If True, will attempt to decode the body based on the
|
| 795 |
+
'content-encoding' header.
|
| 796 |
+
"""
|
| 797 |
+
self._init_decoder()
|
| 798 |
+
# FIXME: Rewrite this method and make it a class with a better structured logic.
|
| 799 |
+
if not self.chunked:
|
| 800 |
+
raise ResponseNotChunked(
|
| 801 |
+
"Response is not chunked. "
|
| 802 |
+
"Header 'transfer-encoding: chunked' is missing."
|
| 803 |
+
)
|
| 804 |
+
if not self.supports_chunked_reads():
|
| 805 |
+
raise BodyNotHttplibCompatible(
|
| 806 |
+
"Body should be http.client.HTTPResponse like. "
|
| 807 |
+
"It should have have an fp attribute which returns raw chunks."
|
| 808 |
+
)
|
| 809 |
+
|
| 810 |
+
with self._error_catcher():
|
| 811 |
+
# Don't bother reading the body of a HEAD request.
|
| 812 |
+
if self._original_response and is_response_to_head(self._original_response):
|
| 813 |
+
self._original_response.close()
|
| 814 |
+
return
|
| 815 |
+
|
| 816 |
+
# If a response is already read and closed
|
| 817 |
+
# then return immediately.
|
| 818 |
+
if self._fp.fp is None:
|
| 819 |
+
return
|
| 820 |
+
|
| 821 |
+
while True:
|
| 822 |
+
self._update_chunk_length()
|
| 823 |
+
if self.chunk_left == 0:
|
| 824 |
+
break
|
| 825 |
+
chunk = self._handle_chunk(amt)
|
| 826 |
+
decoded = self._decode(
|
| 827 |
+
chunk, decode_content=decode_content, flush_decoder=False
|
| 828 |
+
)
|
| 829 |
+
if decoded:
|
| 830 |
+
yield decoded
|
| 831 |
+
|
| 832 |
+
if decode_content:
|
| 833 |
+
# On CPython and PyPy, we should never need to flush the
|
| 834 |
+
# decoder. However, on Jython we *might* need to, so
|
| 835 |
+
# lets defensively do it anyway.
|
| 836 |
+
decoded = self._flush_decoder()
|
| 837 |
+
if decoded: # Platform-specific: Jython.
|
| 838 |
+
yield decoded
|
| 839 |
+
|
| 840 |
+
# Chunk content ends with \r\n: discard it.
|
| 841 |
+
while True:
|
| 842 |
+
line = self._fp.fp.readline()
|
| 843 |
+
if not line:
|
| 844 |
+
# Some sites may not end with '\r\n'.
|
| 845 |
+
break
|
| 846 |
+
if line == b"\r\n":
|
| 847 |
+
break
|
| 848 |
+
|
| 849 |
+
# We read everything; close the "file".
|
| 850 |
+
if self._original_response:
|
| 851 |
+
self._original_response.close()
|
| 852 |
+
|
| 853 |
+
def geturl(self):
|
| 854 |
+
"""
|
| 855 |
+
Returns the URL that was the source of this response.
|
| 856 |
+
If the request that generated this response redirected, this method
|
| 857 |
+
will return the final redirect location.
|
| 858 |
+
"""
|
| 859 |
+
if self.retries is not None and len(self.retries.history):
|
| 860 |
+
return self.retries.history[-1].redirect_location
|
| 861 |
+
else:
|
| 862 |
+
return self._request_url
|
| 863 |
+
|
| 864 |
+
def __iter__(self):
|
| 865 |
+
buffer = []
|
| 866 |
+
for chunk in self.stream(decode_content=True):
|
| 867 |
+
if b"\n" in chunk:
|
| 868 |
+
chunk = chunk.split(b"\n")
|
| 869 |
+
yield b"".join(buffer) + chunk[0] + b"\n"
|
| 870 |
+
for x in chunk[1:-1]:
|
| 871 |
+
yield x + b"\n"
|
| 872 |
+
if chunk[-1]:
|
| 873 |
+
buffer = [chunk[-1]]
|
| 874 |
+
else:
|
| 875 |
+
buffer = []
|
| 876 |
+
else:
|
| 877 |
+
buffer.append(chunk)
|
| 878 |
+
if buffer:
|
| 879 |
+
yield b"".join(buffer)
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__init__.py
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
# For backwards compatibility, provide imports that used to be here.
|
| 4 |
+
from .connection import is_connection_dropped
|
| 5 |
+
from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
|
| 6 |
+
from .response import is_fp_closed
|
| 7 |
+
from .retry import Retry
|
| 8 |
+
from .ssl_ import (
|
| 9 |
+
ALPN_PROTOCOLS,
|
| 10 |
+
HAS_SNI,
|
| 11 |
+
IS_PYOPENSSL,
|
| 12 |
+
IS_SECURETRANSPORT,
|
| 13 |
+
PROTOCOL_TLS,
|
| 14 |
+
SSLContext,
|
| 15 |
+
assert_fingerprint,
|
| 16 |
+
resolve_cert_reqs,
|
| 17 |
+
resolve_ssl_version,
|
| 18 |
+
ssl_wrap_socket,
|
| 19 |
+
)
|
| 20 |
+
from .timeout import Timeout, current_time
|
| 21 |
+
from .url import Url, get_host, parse_url, split_first
|
| 22 |
+
from .wait import wait_for_read, wait_for_write
|
| 23 |
+
|
| 24 |
+
__all__ = (
|
| 25 |
+
"HAS_SNI",
|
| 26 |
+
"IS_PYOPENSSL",
|
| 27 |
+
"IS_SECURETRANSPORT",
|
| 28 |
+
"SSLContext",
|
| 29 |
+
"PROTOCOL_TLS",
|
| 30 |
+
"ALPN_PROTOCOLS",
|
| 31 |
+
"Retry",
|
| 32 |
+
"Timeout",
|
| 33 |
+
"Url",
|
| 34 |
+
"assert_fingerprint",
|
| 35 |
+
"current_time",
|
| 36 |
+
"is_connection_dropped",
|
| 37 |
+
"is_fp_closed",
|
| 38 |
+
"get_host",
|
| 39 |
+
"parse_url",
|
| 40 |
+
"make_headers",
|
| 41 |
+
"resolve_cert_reqs",
|
| 42 |
+
"resolve_ssl_version",
|
| 43 |
+
"split_first",
|
| 44 |
+
"ssl_wrap_socket",
|
| 45 |
+
"wait_for_read",
|
| 46 |
+
"wait_for_write",
|
| 47 |
+
"SKIP_HEADER",
|
| 48 |
+
"SKIPPABLE_HEADERS",
|
| 49 |
+
)
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/ssl_.py
ADDED
|
@@ -0,0 +1,504 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import absolute_import
|
| 2 |
+
|
| 3 |
+
import hashlib
|
| 4 |
+
import hmac
|
| 5 |
+
import os
|
| 6 |
+
import sys
|
| 7 |
+
import warnings
|
| 8 |
+
from binascii import hexlify, unhexlify
|
| 9 |
+
|
| 10 |
+
from ..exceptions import (
|
| 11 |
+
InsecurePlatformWarning,
|
| 12 |
+
ProxySchemeUnsupported,
|
| 13 |
+
SNIMissingWarning,
|
| 14 |
+
SSLError,
|
| 15 |
+
)
|
| 16 |
+
from ..packages import six
|
| 17 |
+
from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
|
| 18 |
+
|
| 19 |
+
SSLContext = None
|
| 20 |
+
SSLTransport = None
|
| 21 |
+
HAS_SNI = False
|
| 22 |
+
IS_PYOPENSSL = False
|
| 23 |
+
IS_SECURETRANSPORT = False
|
| 24 |
+
ALPN_PROTOCOLS = ["http/1.1"]
|
| 25 |
+
|
| 26 |
+
# Maps the length of a digest to a possible hash function producing this digest
|
| 27 |
+
HASHFUNC_MAP = {
|
| 28 |
+
length: getattr(hashlib, algorithm, None)
|
| 29 |
+
for length, algorithm in ((32, "md5"), (40, "sha1"), (64, "sha256"))
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def _const_compare_digest_backport(a, b):
|
| 34 |
+
"""
|
| 35 |
+
Compare two digests of equal length in constant time.
|
| 36 |
+
|
| 37 |
+
The digests must be of type str/bytes.
|
| 38 |
+
Returns True if the digests match, and False otherwise.
|
| 39 |
+
"""
|
| 40 |
+
result = abs(len(a) - len(b))
|
| 41 |
+
for left, right in zip(bytearray(a), bytearray(b)):
|
| 42 |
+
result |= left ^ right
|
| 43 |
+
return result == 0
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
|
| 47 |
+
|
| 48 |
+
try: # Test for SSL features
|
| 49 |
+
import ssl
|
| 50 |
+
from ssl import CERT_REQUIRED, wrap_socket
|
| 51 |
+
except ImportError:
|
| 52 |
+
pass
|
| 53 |
+
|
| 54 |
+
try:
|
| 55 |
+
from ssl import HAS_SNI # Has SNI?
|
| 56 |
+
except ImportError:
|
| 57 |
+
pass
|
| 58 |
+
|
| 59 |
+
try:
|
| 60 |
+
from .ssltransport import SSLTransport
|
| 61 |
+
except ImportError:
|
| 62 |
+
pass
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
try: # Platform-specific: Python 3.6
|
| 66 |
+
from ssl import PROTOCOL_TLS
|
| 67 |
+
|
| 68 |
+
PROTOCOL_SSLv23 = PROTOCOL_TLS
|
| 69 |
+
except ImportError:
|
| 70 |
+
try:
|
| 71 |
+
from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
|
| 72 |
+
|
| 73 |
+
PROTOCOL_SSLv23 = PROTOCOL_TLS
|
| 74 |
+
except ImportError:
|
| 75 |
+
PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
|
| 76 |
+
|
| 77 |
+
try:
|
| 78 |
+
from ssl import PROTOCOL_TLS_CLIENT
|
| 79 |
+
except ImportError:
|
| 80 |
+
PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
try:
|
| 84 |
+
from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
|
| 85 |
+
except ImportError:
|
| 86 |
+
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
| 87 |
+
OP_NO_COMPRESSION = 0x20000
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
try: # OP_NO_TICKET was added in Python 3.6
|
| 91 |
+
from ssl import OP_NO_TICKET
|
| 92 |
+
except ImportError:
|
| 93 |
+
OP_NO_TICKET = 0x4000
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
# A secure default.
|
| 97 |
+
# Sources for more information on TLS ciphers:
|
| 98 |
+
#
|
| 99 |
+
# - https://wiki.mozilla.org/Security/Server_Side_TLS
|
| 100 |
+
# - https://www.ssllabs.com/projects/best-practices/index.html
|
| 101 |
+
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
| 102 |
+
#
|
| 103 |
+
# The general intent is:
|
| 104 |
+
# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
|
| 105 |
+
# - prefer ECDHE over DHE for better performance,
|
| 106 |
+
# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and
|
| 107 |
+
# security,
|
| 108 |
+
# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common,
|
| 109 |
+
# - disable NULL authentication, MD5 MACs, DSS, and other
|
| 110 |
+
# insecure ciphers for security reasons.
|
| 111 |
+
# - NOTE: TLS 1.3 cipher suites are managed through a different interface
|
| 112 |
+
# not exposed by CPython (yet!) and are enabled by default if they're available.
|
| 113 |
+
DEFAULT_CIPHERS = ":".join(
|
| 114 |
+
[
|
| 115 |
+
"ECDHE+AESGCM",
|
| 116 |
+
"ECDHE+CHACHA20",
|
| 117 |
+
"DHE+AESGCM",
|
| 118 |
+
"DHE+CHACHA20",
|
| 119 |
+
"ECDH+AESGCM",
|
| 120 |
+
"DH+AESGCM",
|
| 121 |
+
"ECDH+AES",
|
| 122 |
+
"DH+AES",
|
| 123 |
+
"RSA+AESGCM",
|
| 124 |
+
"RSA+AES",
|
| 125 |
+
"!aNULL",
|
| 126 |
+
"!eNULL",
|
| 127 |
+
"!MD5",
|
| 128 |
+
"!DSS",
|
| 129 |
+
]
|
| 130 |
+
)
|
| 131 |
+
|
| 132 |
+
try:
|
| 133 |
+
from ssl import SSLContext # Modern SSL?
|
| 134 |
+
except ImportError:
|
| 135 |
+
|
| 136 |
+
class SSLContext(object): # Platform-specific: Python 2
|
| 137 |
+
def __init__(self, protocol_version):
|
| 138 |
+
self.protocol = protocol_version
|
| 139 |
+
# Use default values from a real SSLContext
|
| 140 |
+
self.check_hostname = False
|
| 141 |
+
self.verify_mode = ssl.CERT_NONE
|
| 142 |
+
self.ca_certs = None
|
| 143 |
+
self.options = 0
|
| 144 |
+
self.certfile = None
|
| 145 |
+
self.keyfile = None
|
| 146 |
+
self.ciphers = None
|
| 147 |
+
|
| 148 |
+
def load_cert_chain(self, certfile, keyfile):
|
| 149 |
+
self.certfile = certfile
|
| 150 |
+
self.keyfile = keyfile
|
| 151 |
+
|
| 152 |
+
def load_verify_locations(self, cafile=None, capath=None, cadata=None):
|
| 153 |
+
self.ca_certs = cafile
|
| 154 |
+
|
| 155 |
+
if capath is not None:
|
| 156 |
+
raise SSLError("CA directories not supported in older Pythons")
|
| 157 |
+
|
| 158 |
+
if cadata is not None:
|
| 159 |
+
raise SSLError("CA data not supported in older Pythons")
|
| 160 |
+
|
| 161 |
+
def set_ciphers(self, cipher_suite):
|
| 162 |
+
self.ciphers = cipher_suite
|
| 163 |
+
|
| 164 |
+
def wrap_socket(self, socket, server_hostname=None, server_side=False):
|
| 165 |
+
warnings.warn(
|
| 166 |
+
"A true SSLContext object is not available. This prevents "
|
| 167 |
+
"urllib3 from configuring SSL appropriately and may cause "
|
| 168 |
+
"certain SSL connections to fail. You can upgrade to a newer "
|
| 169 |
+
"version of Python to solve this. For more information, see "
|
| 170 |
+
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
| 171 |
+
"#ssl-warnings",
|
| 172 |
+
InsecurePlatformWarning,
|
| 173 |
+
)
|
| 174 |
+
kwargs = {
|
| 175 |
+
"keyfile": self.keyfile,
|
| 176 |
+
"certfile": self.certfile,
|
| 177 |
+
"ca_certs": self.ca_certs,
|
| 178 |
+
"cert_reqs": self.verify_mode,
|
| 179 |
+
"ssl_version": self.protocol,
|
| 180 |
+
"server_side": server_side,
|
| 181 |
+
}
|
| 182 |
+
return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def assert_fingerprint(cert, fingerprint):
|
| 186 |
+
"""
|
| 187 |
+
Checks if given fingerprint matches the supplied certificate.
|
| 188 |
+
|
| 189 |
+
:param cert:
|
| 190 |
+
Certificate as bytes object.
|
| 191 |
+
:param fingerprint:
|
| 192 |
+
Fingerprint as string of hexdigits, can be interspersed by colons.
|
| 193 |
+
"""
|
| 194 |
+
|
| 195 |
+
fingerprint = fingerprint.replace(":", "").lower()
|
| 196 |
+
digest_length = len(fingerprint)
|
| 197 |
+
if digest_length not in HASHFUNC_MAP:
|
| 198 |
+
raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint))
|
| 199 |
+
hashfunc = HASHFUNC_MAP.get(digest_length)
|
| 200 |
+
if hashfunc is None:
|
| 201 |
+
raise SSLError(
|
| 202 |
+
"Hash function implementation unavailable for fingerprint length: {0}".format(
|
| 203 |
+
digest_length
|
| 204 |
+
)
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
# We need encode() here for py32; works on py2 and p33.
|
| 208 |
+
fingerprint_bytes = unhexlify(fingerprint.encode())
|
| 209 |
+
|
| 210 |
+
cert_digest = hashfunc(cert).digest()
|
| 211 |
+
|
| 212 |
+
if not _const_compare_digest(cert_digest, fingerprint_bytes):
|
| 213 |
+
raise SSLError(
|
| 214 |
+
'Fingerprints did not match. Expected "{0}", got "{1}".'.format(
|
| 215 |
+
fingerprint, hexlify(cert_digest)
|
| 216 |
+
)
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
def resolve_cert_reqs(candidate):
|
| 221 |
+
"""
|
| 222 |
+
Resolves the argument to a numeric constant, which can be passed to
|
| 223 |
+
the wrap_socket function/method from the ssl module.
|
| 224 |
+
Defaults to :data:`ssl.CERT_REQUIRED`.
|
| 225 |
+
If given a string it is assumed to be the name of the constant in the
|
| 226 |
+
:mod:`ssl` module or its abbreviation.
|
| 227 |
+
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
|
| 228 |
+
If it's neither `None` nor a string we assume it is already the numeric
|
| 229 |
+
constant which can directly be passed to wrap_socket.
|
| 230 |
+
"""
|
| 231 |
+
if candidate is None:
|
| 232 |
+
return CERT_REQUIRED
|
| 233 |
+
|
| 234 |
+
if isinstance(candidate, str):
|
| 235 |
+
res = getattr(ssl, candidate, None)
|
| 236 |
+
if res is None:
|
| 237 |
+
res = getattr(ssl, "CERT_" + candidate)
|
| 238 |
+
return res
|
| 239 |
+
|
| 240 |
+
return candidate
|
| 241 |
+
|
| 242 |
+
|
| 243 |
+
def resolve_ssl_version(candidate):
|
| 244 |
+
"""
|
| 245 |
+
like resolve_cert_reqs
|
| 246 |
+
"""
|
| 247 |
+
if candidate is None:
|
| 248 |
+
return PROTOCOL_TLS
|
| 249 |
+
|
| 250 |
+
if isinstance(candidate, str):
|
| 251 |
+
res = getattr(ssl, candidate, None)
|
| 252 |
+
if res is None:
|
| 253 |
+
res = getattr(ssl, "PROTOCOL_" + candidate)
|
| 254 |
+
return res
|
| 255 |
+
|
| 256 |
+
return candidate
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def create_urllib3_context(
|
| 260 |
+
ssl_version=None, cert_reqs=None, options=None, ciphers=None
|
| 261 |
+
):
|
| 262 |
+
"""All arguments have the same meaning as ``ssl_wrap_socket``.
|
| 263 |
+
|
| 264 |
+
By default, this function does a lot of the same work that
|
| 265 |
+
``ssl.create_default_context`` does on Python 3.4+. It:
|
| 266 |
+
|
| 267 |
+
- Disables SSLv2, SSLv3, and compression
|
| 268 |
+
- Sets a restricted set of server ciphers
|
| 269 |
+
|
| 270 |
+
If you wish to enable SSLv3, you can do::
|
| 271 |
+
|
| 272 |
+
from pip._vendor.urllib3.util import ssl_
|
| 273 |
+
context = ssl_.create_urllib3_context()
|
| 274 |
+
context.options &= ~ssl_.OP_NO_SSLv3
|
| 275 |
+
|
| 276 |
+
You can do the same to enable compression (substituting ``COMPRESSION``
|
| 277 |
+
for ``SSLv3`` in the last line above).
|
| 278 |
+
|
| 279 |
+
:param ssl_version:
|
| 280 |
+
The desired protocol version to use. This will default to
|
| 281 |
+
PROTOCOL_SSLv23 which will negotiate the highest protocol that both
|
| 282 |
+
the server and your installation of OpenSSL support.
|
| 283 |
+
:param cert_reqs:
|
| 284 |
+
Whether to require the certificate verification. This defaults to
|
| 285 |
+
``ssl.CERT_REQUIRED``.
|
| 286 |
+
:param options:
|
| 287 |
+
Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
|
| 288 |
+
``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
|
| 289 |
+
:param ciphers:
|
| 290 |
+
Which cipher suites to allow the server to select.
|
| 291 |
+
:returns:
|
| 292 |
+
Constructed SSLContext object with specified options
|
| 293 |
+
:rtype: SSLContext
|
| 294 |
+
"""
|
| 295 |
+
# PROTOCOL_TLS is deprecated in Python 3.10
|
| 296 |
+
if not ssl_version or ssl_version == PROTOCOL_TLS:
|
| 297 |
+
ssl_version = PROTOCOL_TLS_CLIENT
|
| 298 |
+
|
| 299 |
+
context = SSLContext(ssl_version)
|
| 300 |
+
|
| 301 |
+
context.set_ciphers(ciphers or DEFAULT_CIPHERS)
|
| 302 |
+
|
| 303 |
+
# Setting the default here, as we may have no ssl module on import
|
| 304 |
+
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
|
| 305 |
+
|
| 306 |
+
if options is None:
|
| 307 |
+
options = 0
|
| 308 |
+
# SSLv2 is easily broken and is considered harmful and dangerous
|
| 309 |
+
options |= OP_NO_SSLv2
|
| 310 |
+
# SSLv3 has several problems and is now dangerous
|
| 311 |
+
options |= OP_NO_SSLv3
|
| 312 |
+
# Disable compression to prevent CRIME attacks for OpenSSL 1.0+
|
| 313 |
+
# (issue #309)
|
| 314 |
+
options |= OP_NO_COMPRESSION
|
| 315 |
+
# TLSv1.2 only. Unless set explicitly, do not request tickets.
|
| 316 |
+
# This may save some bandwidth on wire, and although the ticket is encrypted,
|
| 317 |
+
# there is a risk associated with it being on wire,
|
| 318 |
+
# if the server is not rotating its ticketing keys properly.
|
| 319 |
+
options |= OP_NO_TICKET
|
| 320 |
+
|
| 321 |
+
context.options |= options
|
| 322 |
+
|
| 323 |
+
# Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
|
| 324 |
+
# necessary for conditional client cert authentication with TLS 1.3.
|
| 325 |
+
# The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
|
| 326 |
+
# versions of Python. We only enable on Python 3.7.4+ or if certificate
|
| 327 |
+
# verification is enabled to work around Python issue #37428
|
| 328 |
+
# See: https://bugs.python.org/issue37428
|
| 329 |
+
if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
|
| 330 |
+
context, "post_handshake_auth", None
|
| 331 |
+
) is not None:
|
| 332 |
+
context.post_handshake_auth = True
|
| 333 |
+
|
| 334 |
+
def disable_check_hostname():
|
| 335 |
+
if (
|
| 336 |
+
getattr(context, "check_hostname", None) is not None
|
| 337 |
+
): # Platform-specific: Python 3.2
|
| 338 |
+
# We do our own verification, including fingerprints and alternative
|
| 339 |
+
# hostnames. So disable it here
|
| 340 |
+
context.check_hostname = False
|
| 341 |
+
|
| 342 |
+
# The order of the below lines setting verify_mode and check_hostname
|
| 343 |
+
# matter due to safe-guards SSLContext has to prevent an SSLContext with
|
| 344 |
+
# check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
|
| 345 |
+
# complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
|
| 346 |
+
# or not so we don't know the initial state of the freshly created SSLContext.
|
| 347 |
+
if cert_reqs == ssl.CERT_REQUIRED:
|
| 348 |
+
context.verify_mode = cert_reqs
|
| 349 |
+
disable_check_hostname()
|
| 350 |
+
else:
|
| 351 |
+
disable_check_hostname()
|
| 352 |
+
context.verify_mode = cert_reqs
|
| 353 |
+
|
| 354 |
+
# Enable logging of TLS session keys via defacto standard environment variable
|
| 355 |
+
# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
|
| 356 |
+
if hasattr(context, "keylog_filename"):
|
| 357 |
+
sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
|
| 358 |
+
if sslkeylogfile:
|
| 359 |
+
context.keylog_filename = sslkeylogfile
|
| 360 |
+
|
| 361 |
+
return context
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
def ssl_wrap_socket(
|
| 365 |
+
sock,
|
| 366 |
+
keyfile=None,
|
| 367 |
+
certfile=None,
|
| 368 |
+
cert_reqs=None,
|
| 369 |
+
ca_certs=None,
|
| 370 |
+
server_hostname=None,
|
| 371 |
+
ssl_version=None,
|
| 372 |
+
ciphers=None,
|
| 373 |
+
ssl_context=None,
|
| 374 |
+
ca_cert_dir=None,
|
| 375 |
+
key_password=None,
|
| 376 |
+
ca_cert_data=None,
|
| 377 |
+
tls_in_tls=False,
|
| 378 |
+
):
|
| 379 |
+
"""
|
| 380 |
+
All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
| 381 |
+
the same meaning as they do when using :func:`ssl.wrap_socket`.
|
| 382 |
+
|
| 383 |
+
:param server_hostname:
|
| 384 |
+
When SNI is supported, the expected hostname of the certificate
|
| 385 |
+
:param ssl_context:
|
| 386 |
+
A pre-made :class:`SSLContext` object. If none is provided, one will
|
| 387 |
+
be created using :func:`create_urllib3_context`.
|
| 388 |
+
:param ciphers:
|
| 389 |
+
A string of ciphers we wish the client to support.
|
| 390 |
+
:param ca_cert_dir:
|
| 391 |
+
A directory containing CA certificates in multiple separate files, as
|
| 392 |
+
supported by OpenSSL's -CApath flag or the capath argument to
|
| 393 |
+
SSLContext.load_verify_locations().
|
| 394 |
+
:param key_password:
|
| 395 |
+
Optional password if the keyfile is encrypted.
|
| 396 |
+
:param ca_cert_data:
|
| 397 |
+
Optional string containing CA certificates in PEM format suitable for
|
| 398 |
+
passing as the cadata parameter to SSLContext.load_verify_locations()
|
| 399 |
+
:param tls_in_tls:
|
| 400 |
+
Use SSLTransport to wrap the existing socket.
|
| 401 |
+
"""
|
| 402 |
+
context = ssl_context
|
| 403 |
+
if context is None:
|
| 404 |
+
# Note: This branch of code and all the variables in it are no longer
|
| 405 |
+
# used by urllib3 itself. We should consider deprecating and removing
|
| 406 |
+
# this code.
|
| 407 |
+
context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
|
| 408 |
+
|
| 409 |
+
if ca_certs or ca_cert_dir or ca_cert_data:
|
| 410 |
+
try:
|
| 411 |
+
context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
|
| 412 |
+
except (IOError, OSError) as e:
|
| 413 |
+
raise SSLError(e)
|
| 414 |
+
|
| 415 |
+
elif ssl_context is None and hasattr(context, "load_default_certs"):
|
| 416 |
+
# try to load OS default certs; works well on Windows (require Python3.4+)
|
| 417 |
+
context.load_default_certs()
|
| 418 |
+
|
| 419 |
+
# Attempt to detect if we get the goofy behavior of the
|
| 420 |
+
# keyfile being encrypted and OpenSSL asking for the
|
| 421 |
+
# passphrase via the terminal and instead error out.
|
| 422 |
+
if keyfile and key_password is None and _is_key_file_encrypted(keyfile):
|
| 423 |
+
raise SSLError("Client private key is encrypted, password is required")
|
| 424 |
+
|
| 425 |
+
if certfile:
|
| 426 |
+
if key_password is None:
|
| 427 |
+
context.load_cert_chain(certfile, keyfile)
|
| 428 |
+
else:
|
| 429 |
+
context.load_cert_chain(certfile, keyfile, key_password)
|
| 430 |
+
|
| 431 |
+
try:
|
| 432 |
+
if hasattr(context, "set_alpn_protocols"):
|
| 433 |
+
context.set_alpn_protocols(ALPN_PROTOCOLS)
|
| 434 |
+
except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols
|
| 435 |
+
pass
|
| 436 |
+
|
| 437 |
+
# If we detect server_hostname is an IP address then the SNI
|
| 438 |
+
# extension should not be used according to RFC3546 Section 3.1
|
| 439 |
+
use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
|
| 440 |
+
# SecureTransport uses server_hostname in certificate verification.
|
| 441 |
+
send_sni = (use_sni_hostname and HAS_SNI) or (
|
| 442 |
+
IS_SECURETRANSPORT and server_hostname
|
| 443 |
+
)
|
| 444 |
+
# Do not warn the user if server_hostname is an invalid SNI hostname.
|
| 445 |
+
if not HAS_SNI and use_sni_hostname:
|
| 446 |
+
warnings.warn(
|
| 447 |
+
"An HTTPS request has been made, but the SNI (Server Name "
|
| 448 |
+
"Indication) extension to TLS is not available on this platform. "
|
| 449 |
+
"This may cause the server to present an incorrect TLS "
|
| 450 |
+
"certificate, which can cause validation failures. You can upgrade to "
|
| 451 |
+
"a newer version of Python to solve this. For more information, see "
|
| 452 |
+
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
|
| 453 |
+
"#ssl-warnings",
|
| 454 |
+
SNIMissingWarning,
|
| 455 |
+
)
|
| 456 |
+
|
| 457 |
+
if send_sni:
|
| 458 |
+
ssl_sock = _ssl_wrap_socket_impl(
|
| 459 |
+
sock, context, tls_in_tls, server_hostname=server_hostname
|
| 460 |
+
)
|
| 461 |
+
else:
|
| 462 |
+
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
|
| 463 |
+
return ssl_sock
|
| 464 |
+
|
| 465 |
+
|
| 466 |
+
def is_ipaddress(hostname):
|
| 467 |
+
"""Detects whether the hostname given is an IPv4 or IPv6 address.
|
| 468 |
+
Also detects IPv6 addresses with Zone IDs.
|
| 469 |
+
|
| 470 |
+
:param str hostname: Hostname to examine.
|
| 471 |
+
:return: True if the hostname is an IP address, False otherwise.
|
| 472 |
+
"""
|
| 473 |
+
if not six.PY2 and isinstance(hostname, bytes):
|
| 474 |
+
# IDN A-label bytes are ASCII compatible.
|
| 475 |
+
hostname = hostname.decode("ascii")
|
| 476 |
+
return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
|
| 477 |
+
|
| 478 |
+
|
| 479 |
+
def _is_key_file_encrypted(key_file):
|
| 480 |
+
"""Detects if a key file is encrypted or not."""
|
| 481 |
+
with open(key_file, "r") as f:
|
| 482 |
+
for line in f:
|
| 483 |
+
# Look for Proc-Type: 4,ENCRYPTED
|
| 484 |
+
if "ENCRYPTED" in line:
|
| 485 |
+
return True
|
| 486 |
+
|
| 487 |
+
return False
|
| 488 |
+
|
| 489 |
+
|
| 490 |
+
def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
|
| 491 |
+
if tls_in_tls:
|
| 492 |
+
if not SSLTransport:
|
| 493 |
+
# Import error, ssl is not available.
|
| 494 |
+
raise ProxySchemeUnsupported(
|
| 495 |
+
"TLS in TLS requires support for the 'ssl' module"
|
| 496 |
+
)
|
| 497 |
+
|
| 498 |
+
SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
|
| 499 |
+
return SSLTransport(sock, ssl_context, server_hostname)
|
| 500 |
+
|
| 501 |
+
if server_hostname:
|
| 502 |
+
return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
|
| 503 |
+
else:
|
| 504 |
+
return ssl_context.wrap_socket(sock)
|
llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/ssl_match_hostname.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""The match_hostname() function from Python 3.3.3, essential when using SSL."""
|
| 2 |
+
|
| 3 |
+
# Note: This file is under the PSF license as the code comes from the python
|
| 4 |
+
# stdlib. http://docs.python.org/3/license.html
|
| 5 |
+
|
| 6 |
+
import re
|
| 7 |
+
import sys
|
| 8 |
+
|
| 9 |
+
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
|
| 10 |
+
# system, use it to handle IPAddress ServerAltnames (this was added in
|
| 11 |
+
# python-3.5) otherwise only do DNS matching. This allows
|
| 12 |
+
# util.ssl_match_hostname to continue to be used in Python 2.7.
|
| 13 |
+
try:
|
| 14 |
+
import ipaddress
|
| 15 |
+
except ImportError:
|
| 16 |
+
ipaddress = None
|
| 17 |
+
|
| 18 |
+
__version__ = "3.5.0.1"
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class CertificateError(ValueError):
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def _dnsname_match(dn, hostname, max_wildcards=1):
|
| 26 |
+
"""Matching according to RFC 6125, section 6.4.3
|
| 27 |
+
|
| 28 |
+
http://tools.ietf.org/html/rfc6125#section-6.4.3
|
| 29 |
+
"""
|
| 30 |
+
pats = []
|
| 31 |
+
if not dn:
|
| 32 |
+
return False
|
| 33 |
+
|
| 34 |
+
# Ported from python3-syntax:
|
| 35 |
+
# leftmost, *remainder = dn.split(r'.')
|
| 36 |
+
parts = dn.split(r".")
|
| 37 |
+
leftmost = parts[0]
|
| 38 |
+
remainder = parts[1:]
|
| 39 |
+
|
| 40 |
+
wildcards = leftmost.count("*")
|
| 41 |
+
if wildcards > max_wildcards:
|
| 42 |
+
# Issue #17980: avoid denials of service by refusing more
|
| 43 |
+
# than one wildcard per fragment. A survey of established
|
| 44 |
+
# policy among SSL implementations showed it to be a
|
| 45 |
+
# reasonable choice.
|
| 46 |
+
raise CertificateError(
|
| 47 |
+
"too many wildcards in certificate DNS name: " + repr(dn)
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
# speed up common case w/o wildcards
|
| 51 |
+
if not wildcards:
|
| 52 |
+
return dn.lower() == hostname.lower()
|
| 53 |
+
|
| 54 |
+
# RFC 6125, section 6.4.3, subitem 1.
|
| 55 |
+
# The client SHOULD NOT attempt to match a presented identifier in which
|
| 56 |
+
# the wildcard character comprises a label other than the left-most label.
|
| 57 |
+
if leftmost == "*":
|
| 58 |
+
# When '*' is a fragment by itself, it matches a non-empty dotless
|
| 59 |
+
# fragment.
|
| 60 |
+
pats.append("[^.]+")
|
| 61 |
+
elif leftmost.startswith("xn--") or hostname.startswith("xn--"):
|
| 62 |
+
# RFC 6125, section 6.4.3, subitem 3.
|
| 63 |
+
# The client SHOULD NOT attempt to match a presented identifier
|
| 64 |
+
# where the wildcard character is embedded within an A-label or
|
| 65 |
+
# U-label of an internationalized domain name.
|
| 66 |
+
pats.append(re.escape(leftmost))
|
| 67 |
+
else:
|
| 68 |
+
# Otherwise, '*' matches any dotless string, e.g. www*
|
| 69 |
+
pats.append(re.escape(leftmost).replace(r"\*", "[^.]*"))
|
| 70 |
+
|
| 71 |
+
# add the remaining fragments, ignore any wildcards
|
| 72 |
+
for frag in remainder:
|
| 73 |
+
pats.append(re.escape(frag))
|
| 74 |
+
|
| 75 |
+
pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
|
| 76 |
+
return pat.match(hostname)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def _to_unicode(obj):
|
| 80 |
+
if isinstance(obj, str) and sys.version_info < (3,):
|
| 81 |
+
# ignored flake8 # F821 to support python 2.7 function
|
| 82 |
+
obj = unicode(obj, encoding="ascii", errors="strict") # noqa: F821
|
| 83 |
+
return obj
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def _ipaddress_match(ipname, host_ip):
|
| 87 |
+
"""Exact matching of IP addresses.
|
| 88 |
+
|
| 89 |
+
RFC 6125 explicitly doesn't define an algorithm for this
|
| 90 |
+
(section 1.7.2 - "Out of Scope").
|
| 91 |
+
"""
|
| 92 |
+
# OpenSSL may add a trailing newline to a subjectAltName's IP address
|
| 93 |
+
# Divergence from upstream: ipaddress can't handle byte str
|
| 94 |
+
ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
|
| 95 |
+
return ip == host_ip
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def match_hostname(cert, hostname):
|
| 99 |
+
"""Verify that *cert* (in decoded format as returned by
|
| 100 |
+
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
|
| 101 |
+
rules are followed, but IP addresses are not accepted for *hostname*.
|
| 102 |
+
|
| 103 |
+
CertificateError is raised on failure. On success, the function
|
| 104 |
+
returns nothing.
|
| 105 |
+
"""
|
| 106 |
+
if not cert:
|
| 107 |
+
raise ValueError(
|
| 108 |
+
"empty or no certificate, match_hostname needs a "
|
| 109 |
+
"SSL socket or SSL context with either "
|
| 110 |
+
"CERT_OPTIONAL or CERT_REQUIRED"
|
| 111 |
+
)
|
| 112 |
+
try:
|
| 113 |
+
# Divergence from upstream: ipaddress can't handle byte str
|
| 114 |
+
host_ip = ipaddress.ip_address(_to_unicode(hostname))
|
| 115 |
+
except (UnicodeError, ValueError):
|
| 116 |
+
# ValueError: Not an IP address (common case)
|
| 117 |
+
# UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
|
| 118 |
+
# byte strings. addresses should be all ascii, so we consider it not
|
| 119 |
+
# an ipaddress in this case
|
| 120 |
+
host_ip = None
|
| 121 |
+
except AttributeError:
|
| 122 |
+
# Divergence from upstream: Make ipaddress library optional
|
| 123 |
+
if ipaddress is None:
|
| 124 |
+
host_ip = None
|
| 125 |
+
else: # Defensive
|
| 126 |
+
raise
|
| 127 |
+
dnsnames = []
|
| 128 |
+
san = cert.get("subjectAltName", ())
|
| 129 |
+
for key, value in san:
|
| 130 |
+
if key == "DNS":
|
| 131 |
+
if host_ip is None and _dnsname_match(value, hostname):
|
| 132 |
+
return
|
| 133 |
+
dnsnames.append(value)
|
| 134 |
+
elif key == "IP Address":
|
| 135 |
+
if host_ip is not None and _ipaddress_match(value, host_ip):
|
| 136 |
+
return
|
| 137 |
+
dnsnames.append(value)
|
| 138 |
+
if not dnsnames:
|
| 139 |
+
# The subject is only checked when there is no dNSName entry
|
| 140 |
+
# in subjectAltName
|
| 141 |
+
for sub in cert.get("subject", ()):
|
| 142 |
+
for key, value in sub:
|
| 143 |
+
# XXX according to RFC 2818, the most specific Common Name
|
| 144 |
+
# must be used.
|
| 145 |
+
if key == "commonName":
|
| 146 |
+
if _dnsname_match(value, hostname):
|
| 147 |
+
return
|
| 148 |
+
dnsnames.append(value)
|
| 149 |
+
if len(dnsnames) > 1:
|
| 150 |
+
raise CertificateError(
|
| 151 |
+
"hostname %r "
|
| 152 |
+
"doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames)))
|
| 153 |
+
)
|
| 154 |
+
elif len(dnsnames) == 1:
|
| 155 |
+
raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0]))
|
| 156 |
+
else:
|
| 157 |
+
raise CertificateError(
|
| 158 |
+
"no appropriate commonName or subjectAltName fields were found"
|
| 159 |
+
)
|
minigpt2/lib/python3.10/site-packages/anyio/streams/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (167 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tokenizers/__init__.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from enum import Enum
|
| 2 |
+
from typing import List, Tuple, Union
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
Offsets = Tuple[int, int]
|
| 6 |
+
|
| 7 |
+
TextInputSequence = str
|
| 8 |
+
"""A :obj:`str` that represents an input sequence """
|
| 9 |
+
|
| 10 |
+
PreTokenizedInputSequence = Union[List[str], Tuple[str]]
|
| 11 |
+
"""A pre-tokenized input sequence. Can be one of:
|
| 12 |
+
|
| 13 |
+
- A :obj:`List` of :obj:`str`
|
| 14 |
+
- A :obj:`Tuple` of :obj:`str`
|
| 15 |
+
"""
|
| 16 |
+
|
| 17 |
+
TextEncodeInput = Union[
|
| 18 |
+
TextInputSequence,
|
| 19 |
+
Tuple[TextInputSequence, TextInputSequence],
|
| 20 |
+
List[TextInputSequence],
|
| 21 |
+
]
|
| 22 |
+
"""Represents a textual input for encoding. Can be either:
|
| 23 |
+
|
| 24 |
+
- A single sequence: :data:`~tokenizers.TextInputSequence`
|
| 25 |
+
- A pair of sequences:
|
| 26 |
+
|
| 27 |
+
- A :obj:`Tuple` of :data:`~tokenizers.TextInputSequence`
|
| 28 |
+
- Or a :obj:`List` of :data:`~tokenizers.TextInputSequence` of size 2
|
| 29 |
+
"""
|
| 30 |
+
|
| 31 |
+
PreTokenizedEncodeInput = Union[
|
| 32 |
+
PreTokenizedInputSequence,
|
| 33 |
+
Tuple[PreTokenizedInputSequence, PreTokenizedInputSequence],
|
| 34 |
+
List[PreTokenizedInputSequence],
|
| 35 |
+
]
|
| 36 |
+
"""Represents a pre-tokenized input for encoding. Can be either:
|
| 37 |
+
|
| 38 |
+
- A single sequence: :data:`~tokenizers.PreTokenizedInputSequence`
|
| 39 |
+
- A pair of sequences:
|
| 40 |
+
|
| 41 |
+
- A :obj:`Tuple` of :data:`~tokenizers.PreTokenizedInputSequence`
|
| 42 |
+
- Or a :obj:`List` of :data:`~tokenizers.PreTokenizedInputSequence` of size 2
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
InputSequence = Union[TextInputSequence, PreTokenizedInputSequence]
|
| 46 |
+
"""Represents all the possible types of input sequences for encoding. Can be:
|
| 47 |
+
|
| 48 |
+
- When ``is_pretokenized=False``: :data:`~TextInputSequence`
|
| 49 |
+
- When ``is_pretokenized=True``: :data:`~PreTokenizedInputSequence`
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
EncodeInput = Union[TextEncodeInput, PreTokenizedEncodeInput]
|
| 53 |
+
"""Represents all the possible types of input for encoding. Can be:
|
| 54 |
+
|
| 55 |
+
- When ``is_pretokenized=False``: :data:`~TextEncodeInput`
|
| 56 |
+
- When ``is_pretokenized=True``: :data:`~PreTokenizedEncodeInput`
|
| 57 |
+
"""
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class OffsetReferential(Enum):
|
| 61 |
+
ORIGINAL = "original"
|
| 62 |
+
NORMALIZED = "normalized"
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class OffsetType(Enum):
|
| 66 |
+
BYTE = "byte"
|
| 67 |
+
CHAR = "char"
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class SplitDelimiterBehavior(Enum):
|
| 71 |
+
REMOVED = "removed"
|
| 72 |
+
ISOLATED = "isolated"
|
| 73 |
+
MERGED_WITH_PREVIOUS = "merged_with_previous"
|
| 74 |
+
MERGED_WITH_NEXT = "merged_with_next"
|
| 75 |
+
CONTIGUOUS = "contiguous"
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
from .tokenizers import (
|
| 79 |
+
AddedToken,
|
| 80 |
+
Encoding,
|
| 81 |
+
NormalizedString,
|
| 82 |
+
PreTokenizedString,
|
| 83 |
+
Regex,
|
| 84 |
+
Token,
|
| 85 |
+
Tokenizer,
|
| 86 |
+
decoders,
|
| 87 |
+
models,
|
| 88 |
+
normalizers,
|
| 89 |
+
pre_tokenizers,
|
| 90 |
+
processors,
|
| 91 |
+
trainers,
|
| 92 |
+
__version__,
|
| 93 |
+
)
|
| 94 |
+
from .implementations import (
|
| 95 |
+
BertWordPieceTokenizer,
|
| 96 |
+
ByteLevelBPETokenizer,
|
| 97 |
+
CharBPETokenizer,
|
| 98 |
+
SentencePieceBPETokenizer,
|
| 99 |
+
SentencePieceUnigramTokenizer,
|
| 100 |
+
)
|
minigpt2/lib/python3.10/site-packages/tokenizers/decoders/__init__.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .. import decoders
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
Decoder = decoders.Decoder
|
| 5 |
+
ByteLevel = decoders.ByteLevel
|
| 6 |
+
Replace = decoders.Replace
|
| 7 |
+
WordPiece = decoders.WordPiece
|
| 8 |
+
ByteFallback = decoders.ByteFallback
|
| 9 |
+
Fuse = decoders.Fuse
|
| 10 |
+
Strip = decoders.Strip
|
| 11 |
+
Metaspace = decoders.Metaspace
|
| 12 |
+
BPEDecoder = decoders.BPEDecoder
|
| 13 |
+
CTC = decoders.CTC
|
| 14 |
+
Sequence = decoders.Sequence
|
minigpt2/lib/python3.10/site-packages/tokenizers/decoders/__init__.pyi
ADDED
|
@@ -0,0 +1,270 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated content DO NOT EDIT
|
| 2 |
+
class Decoder:
|
| 3 |
+
"""
|
| 4 |
+
Base class for all decoders
|
| 5 |
+
|
| 6 |
+
This class is not supposed to be instantiated directly. Instead, any implementation of
|
| 7 |
+
a Decoder will return an instance of this class when instantiated.
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
def decode(self, tokens):
|
| 11 |
+
"""
|
| 12 |
+
Decode the given list of tokens to a final string
|
| 13 |
+
|
| 14 |
+
Args:
|
| 15 |
+
tokens (:obj:`List[str]`):
|
| 16 |
+
The list of tokens to decode
|
| 17 |
+
|
| 18 |
+
Returns:
|
| 19 |
+
:obj:`str`: The decoded string
|
| 20 |
+
"""
|
| 21 |
+
pass
|
| 22 |
+
|
| 23 |
+
class BPEDecoder(Decoder):
|
| 24 |
+
"""
|
| 25 |
+
BPEDecoder Decoder
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
suffix (:obj:`str`, `optional`, defaults to :obj:`</w>`):
|
| 29 |
+
The suffix that was used to caracterize an end-of-word. This suffix will
|
| 30 |
+
be replaced by whitespaces during the decoding
|
| 31 |
+
"""
|
| 32 |
+
|
| 33 |
+
def __init__(self, suffix="</w>"):
|
| 34 |
+
pass
|
| 35 |
+
def decode(self, tokens):
|
| 36 |
+
"""
|
| 37 |
+
Decode the given list of tokens to a final string
|
| 38 |
+
|
| 39 |
+
Args:
|
| 40 |
+
tokens (:obj:`List[str]`):
|
| 41 |
+
The list of tokens to decode
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
:obj:`str`: The decoded string
|
| 45 |
+
"""
|
| 46 |
+
pass
|
| 47 |
+
|
| 48 |
+
class ByteFallback(Decoder):
|
| 49 |
+
"""
|
| 50 |
+
ByteFallback Decoder
|
| 51 |
+
ByteFallback is a simple trick which converts tokens looking like `<0x61>`
|
| 52 |
+
to pure bytes, and attempts to make them into a string. If the tokens
|
| 53 |
+
cannot be decoded you will get � instead for each inconvertable byte token
|
| 54 |
+
|
| 55 |
+
"""
|
| 56 |
+
|
| 57 |
+
def __init__(self):
|
| 58 |
+
pass
|
| 59 |
+
def decode(self, tokens):
|
| 60 |
+
"""
|
| 61 |
+
Decode the given list of tokens to a final string
|
| 62 |
+
|
| 63 |
+
Args:
|
| 64 |
+
tokens (:obj:`List[str]`):
|
| 65 |
+
The list of tokens to decode
|
| 66 |
+
|
| 67 |
+
Returns:
|
| 68 |
+
:obj:`str`: The decoded string
|
| 69 |
+
"""
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
class ByteLevel(Decoder):
|
| 73 |
+
"""
|
| 74 |
+
ByteLevel Decoder
|
| 75 |
+
|
| 76 |
+
This decoder is to be used in tandem with the :class:`~tokenizers.pre_tokenizers.ByteLevel`
|
| 77 |
+
:class:`~tokenizers.pre_tokenizers.PreTokenizer`.
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
def __init__(self):
|
| 81 |
+
pass
|
| 82 |
+
def decode(self, tokens):
|
| 83 |
+
"""
|
| 84 |
+
Decode the given list of tokens to a final string
|
| 85 |
+
|
| 86 |
+
Args:
|
| 87 |
+
tokens (:obj:`List[str]`):
|
| 88 |
+
The list of tokens to decode
|
| 89 |
+
|
| 90 |
+
Returns:
|
| 91 |
+
:obj:`str`: The decoded string
|
| 92 |
+
"""
|
| 93 |
+
pass
|
| 94 |
+
|
| 95 |
+
class CTC(Decoder):
|
| 96 |
+
"""
|
| 97 |
+
CTC Decoder
|
| 98 |
+
|
| 99 |
+
Args:
|
| 100 |
+
pad_token (:obj:`str`, `optional`, defaults to :obj:`<pad>`):
|
| 101 |
+
The pad token used by CTC to delimit a new token.
|
| 102 |
+
word_delimiter_token (:obj:`str`, `optional`, defaults to :obj:`|`):
|
| 103 |
+
The word delimiter token. It will be replaced by a <space>
|
| 104 |
+
cleanup (:obj:`bool`, `optional`, defaults to :obj:`True`):
|
| 105 |
+
Whether to cleanup some tokenization artifacts.
|
| 106 |
+
Mainly spaces before punctuation, and some abbreviated english forms.
|
| 107 |
+
"""
|
| 108 |
+
|
| 109 |
+
def __init__(self, pad_token="<pad>", word_delimiter_token="|", cleanup=True):
|
| 110 |
+
pass
|
| 111 |
+
def decode(self, tokens):
|
| 112 |
+
"""
|
| 113 |
+
Decode the given list of tokens to a final string
|
| 114 |
+
|
| 115 |
+
Args:
|
| 116 |
+
tokens (:obj:`List[str]`):
|
| 117 |
+
The list of tokens to decode
|
| 118 |
+
|
| 119 |
+
Returns:
|
| 120 |
+
:obj:`str`: The decoded string
|
| 121 |
+
"""
|
| 122 |
+
pass
|
| 123 |
+
|
| 124 |
+
class Fuse(Decoder):
|
| 125 |
+
"""
|
| 126 |
+
Fuse Decoder
|
| 127 |
+
Fuse simply fuses every token into a single string.
|
| 128 |
+
This is the last step of decoding, this decoder exists only if
|
| 129 |
+
there is need to add other decoders *after* the fusion
|
| 130 |
+
"""
|
| 131 |
+
|
| 132 |
+
def __init__(self):
|
| 133 |
+
pass
|
| 134 |
+
def decode(self, tokens):
|
| 135 |
+
"""
|
| 136 |
+
Decode the given list of tokens to a final string
|
| 137 |
+
|
| 138 |
+
Args:
|
| 139 |
+
tokens (:obj:`List[str]`):
|
| 140 |
+
The list of tokens to decode
|
| 141 |
+
|
| 142 |
+
Returns:
|
| 143 |
+
:obj:`str`: The decoded string
|
| 144 |
+
"""
|
| 145 |
+
pass
|
| 146 |
+
|
| 147 |
+
class Metaspace(Decoder):
|
| 148 |
+
"""
|
| 149 |
+
Metaspace Decoder
|
| 150 |
+
|
| 151 |
+
Args:
|
| 152 |
+
replacement (:obj:`str`, `optional`, defaults to :obj:`▁`):
|
| 153 |
+
The replacement character. Must be exactly one character. By default we
|
| 154 |
+
use the `▁` (U+2581) meta symbol (Same as in SentencePiece).
|
| 155 |
+
|
| 156 |
+
add_prefix_space (:obj:`bool`, `optional`, defaults to :obj:`True`):
|
| 157 |
+
Whether to add a space to the first word if there isn't already one. This
|
| 158 |
+
lets us treat `hello` exactly like `say hello`.
|
| 159 |
+
"""
|
| 160 |
+
|
| 161 |
+
def __init__(self, replacement="▁", add_prefix_space=True):
|
| 162 |
+
pass
|
| 163 |
+
def decode(self, tokens):
|
| 164 |
+
"""
|
| 165 |
+
Decode the given list of tokens to a final string
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
tokens (:obj:`List[str]`):
|
| 169 |
+
The list of tokens to decode
|
| 170 |
+
|
| 171 |
+
Returns:
|
| 172 |
+
:obj:`str`: The decoded string
|
| 173 |
+
"""
|
| 174 |
+
pass
|
| 175 |
+
|
| 176 |
+
class Replace(Decoder):
|
| 177 |
+
"""
|
| 178 |
+
Replace Decoder
|
| 179 |
+
|
| 180 |
+
This decoder is to be used in tandem with the :class:`~tokenizers.pre_tokenizers.Replace`
|
| 181 |
+
:class:`~tokenizers.pre_tokenizers.PreTokenizer`.
|
| 182 |
+
"""
|
| 183 |
+
|
| 184 |
+
def __init__(self, pattern, content):
|
| 185 |
+
pass
|
| 186 |
+
def decode(self, tokens):
|
| 187 |
+
"""
|
| 188 |
+
Decode the given list of tokens to a final string
|
| 189 |
+
|
| 190 |
+
Args:
|
| 191 |
+
tokens (:obj:`List[str]`):
|
| 192 |
+
The list of tokens to decode
|
| 193 |
+
|
| 194 |
+
Returns:
|
| 195 |
+
:obj:`str`: The decoded string
|
| 196 |
+
"""
|
| 197 |
+
pass
|
| 198 |
+
|
| 199 |
+
class Sequence(Decoder):
|
| 200 |
+
"""
|
| 201 |
+
Sequence Decoder
|
| 202 |
+
|
| 203 |
+
Args:
|
| 204 |
+
decoders (:obj:`List[Decoder]`)
|
| 205 |
+
The decoders that need to be chained
|
| 206 |
+
"""
|
| 207 |
+
|
| 208 |
+
def __init__(self, decoders):
|
| 209 |
+
pass
|
| 210 |
+
def decode(self, tokens):
|
| 211 |
+
"""
|
| 212 |
+
Decode the given list of tokens to a final string
|
| 213 |
+
|
| 214 |
+
Args:
|
| 215 |
+
tokens (:obj:`List[str]`):
|
| 216 |
+
The list of tokens to decode
|
| 217 |
+
|
| 218 |
+
Returns:
|
| 219 |
+
:obj:`str`: The decoded string
|
| 220 |
+
"""
|
| 221 |
+
pass
|
| 222 |
+
|
| 223 |
+
class Strip(Decoder):
|
| 224 |
+
"""
|
| 225 |
+
Strip normalizer
|
| 226 |
+
Strips n left characters of each token, or n right characters of each token
|
| 227 |
+
"""
|
| 228 |
+
|
| 229 |
+
def __init__(self, content, left=0, right=0):
|
| 230 |
+
pass
|
| 231 |
+
def decode(self, tokens):
|
| 232 |
+
"""
|
| 233 |
+
Decode the given list of tokens to a final string
|
| 234 |
+
|
| 235 |
+
Args:
|
| 236 |
+
tokens (:obj:`List[str]`):
|
| 237 |
+
The list of tokens to decode
|
| 238 |
+
|
| 239 |
+
Returns:
|
| 240 |
+
:obj:`str`: The decoded string
|
| 241 |
+
"""
|
| 242 |
+
pass
|
| 243 |
+
|
| 244 |
+
class WordPiece(Decoder):
|
| 245 |
+
"""
|
| 246 |
+
WordPiece Decoder
|
| 247 |
+
|
| 248 |
+
Args:
|
| 249 |
+
prefix (:obj:`str`, `optional`, defaults to :obj:`##`):
|
| 250 |
+
The prefix to use for subwords that are not a beginning-of-word
|
| 251 |
+
|
| 252 |
+
cleanup (:obj:`bool`, `optional`, defaults to :obj:`True`):
|
| 253 |
+
Whether to cleanup some tokenization artifacts. Mainly spaces before punctuation,
|
| 254 |
+
and some abbreviated english forms.
|
| 255 |
+
"""
|
| 256 |
+
|
| 257 |
+
def __init__(self, prefix="##", cleanup=True):
|
| 258 |
+
pass
|
| 259 |
+
def decode(self, tokens):
|
| 260 |
+
"""
|
| 261 |
+
Decode the given list of tokens to a final string
|
| 262 |
+
|
| 263 |
+
Args:
|
| 264 |
+
tokens (:obj:`List[str]`):
|
| 265 |
+
The list of tokens to decode
|
| 266 |
+
|
| 267 |
+
Returns:
|
| 268 |
+
:obj:`str`: The decoded string
|
| 269 |
+
"""
|
| 270 |
+
pass
|
minigpt2/lib/python3.10/site-packages/tokenizers/decoders/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (399 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tokenizers/implementations/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .base_tokenizer import BaseTokenizer
|
| 2 |
+
from .bert_wordpiece import BertWordPieceTokenizer
|
| 3 |
+
from .byte_level_bpe import ByteLevelBPETokenizer
|
| 4 |
+
from .char_level_bpe import CharBPETokenizer
|
| 5 |
+
from .sentencepiece_bpe import SentencePieceBPETokenizer
|
| 6 |
+
from .sentencepiece_unigram import SentencePieceUnigramTokenizer
|
minigpt2/lib/python3.10/site-packages/tokenizers/implementations/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (550 Bytes). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tokenizers/implementations/__pycache__/sentencepiece_unigram.cpython-310.pyc
ADDED
|
Binary file (6.4 kB). View file
|
|
|
minigpt2/lib/python3.10/site-packages/tokenizers/implementations/base_tokenizer.py
ADDED
|
@@ -0,0 +1,418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
from tokenizers import AddedToken, EncodeInput, Encoding, InputSequence, Tokenizer
|
| 4 |
+
from tokenizers.decoders import Decoder
|
| 5 |
+
from tokenizers.models import Model
|
| 6 |
+
from tokenizers.normalizers import Normalizer
|
| 7 |
+
from tokenizers.pre_tokenizers import PreTokenizer
|
| 8 |
+
from tokenizers.processors import PostProcessor
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
Offsets = Tuple[int, int]
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class BaseTokenizer:
|
| 15 |
+
def __init__(self, tokenizer: Tokenizer, parameters=None):
|
| 16 |
+
self._tokenizer = tokenizer
|
| 17 |
+
self._parameters = parameters if parameters is not None else {}
|
| 18 |
+
|
| 19 |
+
def __repr__(self):
|
| 20 |
+
return "Tokenizer(vocabulary_size={}, {})".format(
|
| 21 |
+
self._tokenizer.get_vocab_size(),
|
| 22 |
+
", ".join(k + "=" + str(v) for k, v in self._parameters.items()),
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
def num_special_tokens_to_add(self, is_pair: bool) -> int:
|
| 26 |
+
"""
|
| 27 |
+
Return the number of special tokens that would be added for single/pair sentences.
|
| 28 |
+
:param is_pair: Boolean indicating if the input would be a single sentence or a pair
|
| 29 |
+
:return:
|
| 30 |
+
"""
|
| 31 |
+
return self._tokenizer.num_special_tokens_to_add(is_pair)
|
| 32 |
+
|
| 33 |
+
def get_vocab(self, with_added_tokens: bool = True) -> Dict[str, int]:
|
| 34 |
+
"""Returns the vocabulary
|
| 35 |
+
|
| 36 |
+
Args:
|
| 37 |
+
with_added_tokens: boolean:
|
| 38 |
+
Whether to include the added tokens in the vocabulary
|
| 39 |
+
|
| 40 |
+
Returns:
|
| 41 |
+
The vocabulary
|
| 42 |
+
"""
|
| 43 |
+
return self._tokenizer.get_vocab(with_added_tokens=with_added_tokens)
|
| 44 |
+
|
| 45 |
+
def get_added_tokens_decoder(self) -> Dict[int, AddedToken]:
|
| 46 |
+
"""Returns the added reverse vocabulary
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
The added vocabulary mapping ints to AddedTokens
|
| 50 |
+
"""
|
| 51 |
+
return self._tokenizer.get_added_tokens_decoder()
|
| 52 |
+
|
| 53 |
+
def get_vocab_size(self, with_added_tokens: bool = True) -> int:
|
| 54 |
+
"""Return the size of vocabulary, with or without added tokens.
|
| 55 |
+
|
| 56 |
+
Args:
|
| 57 |
+
with_added_tokens: (`optional`) bool:
|
| 58 |
+
Whether to count in added special tokens or not
|
| 59 |
+
|
| 60 |
+
Returns:
|
| 61 |
+
Size of vocabulary
|
| 62 |
+
"""
|
| 63 |
+
return self._tokenizer.get_vocab_size(with_added_tokens=with_added_tokens)
|
| 64 |
+
|
| 65 |
+
def enable_padding(
|
| 66 |
+
self,
|
| 67 |
+
direction: Optional[str] = "right",
|
| 68 |
+
pad_to_multiple_of: Optional[int] = None,
|
| 69 |
+
pad_id: Optional[int] = 0,
|
| 70 |
+
pad_type_id: Optional[int] = 0,
|
| 71 |
+
pad_token: Optional[str] = "[PAD]",
|
| 72 |
+
length: Optional[int] = None,
|
| 73 |
+
):
|
| 74 |
+
"""Change the padding strategy
|
| 75 |
+
|
| 76 |
+
Args:
|
| 77 |
+
direction: (`optional`) str:
|
| 78 |
+
Can be one of: `right` or `left`
|
| 79 |
+
|
| 80 |
+
pad_to_multiple_of: (`optional`) unsigned int:
|
| 81 |
+
If specified, the padding length should always snap to the next multiple of
|
| 82 |
+
the given value. For example if we were going to pad with a length of 250 but
|
| 83 |
+
`pad_to_multiple_of=8` then we will pad to 256.
|
| 84 |
+
|
| 85 |
+
pad_id: (`optional`) unsigned int:
|
| 86 |
+
The indice to be used when padding
|
| 87 |
+
|
| 88 |
+
pad_type_id: (`optional`) unsigned int:
|
| 89 |
+
The type indice to be used when padding
|
| 90 |
+
|
| 91 |
+
pad_token: (`optional`) str:
|
| 92 |
+
The pad token to be used when padding
|
| 93 |
+
|
| 94 |
+
length: (`optional`) unsigned int:
|
| 95 |
+
If specified, the length at which to pad. If not specified
|
| 96 |
+
we pad using the size of the longest sequence in a batch
|
| 97 |
+
"""
|
| 98 |
+
return self._tokenizer.enable_padding(
|
| 99 |
+
direction=direction,
|
| 100 |
+
pad_to_multiple_of=pad_to_multiple_of,
|
| 101 |
+
pad_id=pad_id,
|
| 102 |
+
pad_type_id=pad_type_id,
|
| 103 |
+
pad_token=pad_token,
|
| 104 |
+
length=length,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
def no_padding(self):
|
| 108 |
+
"""Disable padding"""
|
| 109 |
+
return self._tokenizer.no_padding()
|
| 110 |
+
|
| 111 |
+
@property
|
| 112 |
+
def padding(self) -> Optional[dict]:
|
| 113 |
+
"""Get the current padding parameters
|
| 114 |
+
|
| 115 |
+
Returns:
|
| 116 |
+
None if padding is disabled, a dict with the currently set parameters
|
| 117 |
+
if the padding is enabled.
|
| 118 |
+
"""
|
| 119 |
+
return self._tokenizer.padding
|
| 120 |
+
|
| 121 |
+
def enable_truncation(self, max_length: int, stride: Optional[int] = 0, strategy: Optional[str] = "longest_first"):
|
| 122 |
+
"""Change the truncation options
|
| 123 |
+
|
| 124 |
+
Args:
|
| 125 |
+
max_length: unsigned int:
|
| 126 |
+
The maximum length at which to truncate
|
| 127 |
+
|
| 128 |
+
stride: (`optional`) unsigned int:
|
| 129 |
+
The length of the previous first sequence to be included
|
| 130 |
+
in the overflowing sequence
|
| 131 |
+
|
| 132 |
+
strategy: (`optional`) str:
|
| 133 |
+
Can be one of `longest_first`, `only_first` or `only_second`
|
| 134 |
+
"""
|
| 135 |
+
return self._tokenizer.enable_truncation(max_length, stride=stride, strategy=strategy)
|
| 136 |
+
|
| 137 |
+
def no_truncation(self):
|
| 138 |
+
"""Disable truncation"""
|
| 139 |
+
return self._tokenizer.no_truncation()
|
| 140 |
+
|
| 141 |
+
@property
|
| 142 |
+
def truncation(self) -> Optional[dict]:
|
| 143 |
+
"""Get the current truncation parameters
|
| 144 |
+
|
| 145 |
+
Returns:
|
| 146 |
+
None if truncation is disabled, a dict with the current truncation parameters if
|
| 147 |
+
truncation is enabled
|
| 148 |
+
"""
|
| 149 |
+
return self._tokenizer.truncation
|
| 150 |
+
|
| 151 |
+
def add_tokens(self, tokens: List[Union[str, AddedToken]]) -> int:
|
| 152 |
+
"""Add the given tokens to the vocabulary
|
| 153 |
+
|
| 154 |
+
Args:
|
| 155 |
+
tokens: List[Union[str, AddedToken]]:
|
| 156 |
+
A list of tokens to add to the vocabulary. Each token can either be
|
| 157 |
+
a string, or an instance of AddedToken
|
| 158 |
+
|
| 159 |
+
Returns:
|
| 160 |
+
The number of tokens that were added to the vocabulary
|
| 161 |
+
"""
|
| 162 |
+
return self._tokenizer.add_tokens(tokens)
|
| 163 |
+
|
| 164 |
+
def add_special_tokens(self, special_tokens: List[Union[str, AddedToken]]) -> int:
|
| 165 |
+
"""Add the given special tokens to the vocabulary, and treat them as special tokens.
|
| 166 |
+
|
| 167 |
+
The special tokens will never be processed by the model, and will be
|
| 168 |
+
removed while decoding.
|
| 169 |
+
|
| 170 |
+
Args:
|
| 171 |
+
tokens: List[Union[str, AddedToken]]:
|
| 172 |
+
A list of special tokens to add to the vocabulary. Each token can either be
|
| 173 |
+
a string, or an instance of AddedToken
|
| 174 |
+
|
| 175 |
+
Returns:
|
| 176 |
+
The number of tokens that were added to the vocabulary
|
| 177 |
+
"""
|
| 178 |
+
return self._tokenizer.add_special_tokens(special_tokens)
|
| 179 |
+
|
| 180 |
+
def normalize(self, sequence: str) -> str:
|
| 181 |
+
"""Normalize the given sequence
|
| 182 |
+
|
| 183 |
+
Args:
|
| 184 |
+
sequence: str:
|
| 185 |
+
The sequence to normalize
|
| 186 |
+
|
| 187 |
+
Returns:
|
| 188 |
+
The normalized string
|
| 189 |
+
"""
|
| 190 |
+
return self._tokenizer.normalize(sequence)
|
| 191 |
+
|
| 192 |
+
def encode(
|
| 193 |
+
self,
|
| 194 |
+
sequence: InputSequence,
|
| 195 |
+
pair: Optional[InputSequence] = None,
|
| 196 |
+
is_pretokenized: bool = False,
|
| 197 |
+
add_special_tokens: bool = True,
|
| 198 |
+
) -> Encoding:
|
| 199 |
+
"""Encode the given sequence and pair. This method can process raw text sequences as well
|
| 200 |
+
as already pre-tokenized sequences.
|
| 201 |
+
|
| 202 |
+
Args:
|
| 203 |
+
sequence: InputSequence:
|
| 204 |
+
The sequence we want to encode. This sequence can be either raw text or
|
| 205 |
+
pre-tokenized, according to the `is_pretokenized` argument:
|
| 206 |
+
|
| 207 |
+
- If `is_pretokenized=False`: `InputSequence` is expected to be `str`
|
| 208 |
+
- If `is_pretokenized=True`: `InputSequence` is expected to be
|
| 209 |
+
`Union[List[str], Tuple[str]]`
|
| 210 |
+
|
| 211 |
+
is_pretokenized: bool:
|
| 212 |
+
Whether the input is already pre-tokenized.
|
| 213 |
+
|
| 214 |
+
add_special_tokens: bool:
|
| 215 |
+
Whether to add the special tokens while encoding.
|
| 216 |
+
|
| 217 |
+
Returns:
|
| 218 |
+
An Encoding
|
| 219 |
+
"""
|
| 220 |
+
if sequence is None:
|
| 221 |
+
raise ValueError("encode: `sequence` can't be `None`")
|
| 222 |
+
|
| 223 |
+
return self._tokenizer.encode(sequence, pair, is_pretokenized, add_special_tokens)
|
| 224 |
+
|
| 225 |
+
def encode_batch(
|
| 226 |
+
self,
|
| 227 |
+
inputs: List[EncodeInput],
|
| 228 |
+
is_pretokenized: bool = False,
|
| 229 |
+
add_special_tokens: bool = True,
|
| 230 |
+
) -> List[Encoding]:
|
| 231 |
+
"""Encode the given inputs. This method accept both raw text sequences as well as already
|
| 232 |
+
pre-tokenized sequences.
|
| 233 |
+
|
| 234 |
+
Args:
|
| 235 |
+
inputs: List[EncodeInput]:
|
| 236 |
+
A list of single sequences or pair sequences to encode. Each `EncodeInput` is
|
| 237 |
+
expected to be of the following form:
|
| 238 |
+
`Union[InputSequence, Tuple[InputSequence, InputSequence]]`
|
| 239 |
+
|
| 240 |
+
Each `InputSequence` can either be raw text or pre-tokenized,
|
| 241 |
+
according to the `is_pretokenized` argument:
|
| 242 |
+
|
| 243 |
+
- If `is_pretokenized=False`: `InputSequence` is expected to be `str`
|
| 244 |
+
- If `is_pretokenized=True`: `InputSequence` is expected to be
|
| 245 |
+
`Union[List[str], Tuple[str]]`
|
| 246 |
+
|
| 247 |
+
is_pretokenized: bool:
|
| 248 |
+
Whether the input is already pre-tokenized.
|
| 249 |
+
|
| 250 |
+
add_special_tokens: bool:
|
| 251 |
+
Whether to add the special tokens while encoding.
|
| 252 |
+
|
| 253 |
+
Returns:
|
| 254 |
+
A list of Encoding
|
| 255 |
+
"""
|
| 256 |
+
|
| 257 |
+
if inputs is None:
|
| 258 |
+
raise ValueError("encode_batch: `inputs` can't be `None`")
|
| 259 |
+
|
| 260 |
+
return self._tokenizer.encode_batch(inputs, is_pretokenized, add_special_tokens)
|
| 261 |
+
|
| 262 |
+
def decode(self, ids: List[int], skip_special_tokens: Optional[bool] = True) -> str:
|
| 263 |
+
"""Decode the given list of ids to a string sequence
|
| 264 |
+
|
| 265 |
+
Args:
|
| 266 |
+
ids: List[unsigned int]:
|
| 267 |
+
A list of ids to be decoded
|
| 268 |
+
|
| 269 |
+
skip_special_tokens: (`optional`) boolean:
|
| 270 |
+
Whether to remove all the special tokens from the output string
|
| 271 |
+
|
| 272 |
+
Returns:
|
| 273 |
+
The decoded string
|
| 274 |
+
"""
|
| 275 |
+
if ids is None:
|
| 276 |
+
raise ValueError("None input is not valid. Should be a list of integers.")
|
| 277 |
+
|
| 278 |
+
return self._tokenizer.decode(ids, skip_special_tokens=skip_special_tokens)
|
| 279 |
+
|
| 280 |
+
def decode_batch(self, sequences: List[List[int]], skip_special_tokens: Optional[bool] = True) -> str:
|
| 281 |
+
"""Decode the list of sequences to a list of string sequences
|
| 282 |
+
|
| 283 |
+
Args:
|
| 284 |
+
sequences: List[List[unsigned int]]:
|
| 285 |
+
A list of sequence of ids to be decoded
|
| 286 |
+
|
| 287 |
+
skip_special_tokens: (`optional`) boolean:
|
| 288 |
+
Whether to remove all the special tokens from the output strings
|
| 289 |
+
|
| 290 |
+
Returns:
|
| 291 |
+
A list of decoded strings
|
| 292 |
+
"""
|
| 293 |
+
if sequences is None:
|
| 294 |
+
raise ValueError("None input is not valid. Should be list of list of integers.")
|
| 295 |
+
|
| 296 |
+
return self._tokenizer.decode_batch(sequences, skip_special_tokens=skip_special_tokens)
|
| 297 |
+
|
| 298 |
+
def token_to_id(self, token: str) -> Optional[int]:
|
| 299 |
+
"""Convert the given token to its corresponding id
|
| 300 |
+
|
| 301 |
+
Args:
|
| 302 |
+
token: str:
|
| 303 |
+
The token to convert
|
| 304 |
+
|
| 305 |
+
Returns:
|
| 306 |
+
The corresponding id if it exists, None otherwise
|
| 307 |
+
"""
|
| 308 |
+
return self._tokenizer.token_to_id(token)
|
| 309 |
+
|
| 310 |
+
def id_to_token(self, id: int) -> Optional[str]:
|
| 311 |
+
"""Convert the given token id to its corresponding string
|
| 312 |
+
|
| 313 |
+
Args:
|
| 314 |
+
token: id:
|
| 315 |
+
The token id to convert
|
| 316 |
+
|
| 317 |
+
Returns:
|
| 318 |
+
The corresponding string if it exists, None otherwise
|
| 319 |
+
"""
|
| 320 |
+
return self._tokenizer.id_to_token(id)
|
| 321 |
+
|
| 322 |
+
def save_model(self, directory: str, prefix: Optional[str] = None):
|
| 323 |
+
"""Save the current model to the given directory
|
| 324 |
+
|
| 325 |
+
Args:
|
| 326 |
+
directory: str:
|
| 327 |
+
A path to the destination directory
|
| 328 |
+
|
| 329 |
+
prefix: (Optional) str:
|
| 330 |
+
An optional prefix, used to prefix each file name
|
| 331 |
+
"""
|
| 332 |
+
return self._tokenizer.model.save(directory, prefix=prefix)
|
| 333 |
+
|
| 334 |
+
def save(self, path: str, pretty: bool = True):
|
| 335 |
+
"""Save the current Tokenizer at the given path
|
| 336 |
+
|
| 337 |
+
Args:
|
| 338 |
+
path: str:
|
| 339 |
+
A path to the destination Tokenizer file
|
| 340 |
+
"""
|
| 341 |
+
return self._tokenizer.save(path, pretty)
|
| 342 |
+
|
| 343 |
+
def to_str(self, pretty: bool = False):
|
| 344 |
+
"""Get a serialized JSON version of the Tokenizer as a str
|
| 345 |
+
|
| 346 |
+
Args:
|
| 347 |
+
pretty: bool:
|
| 348 |
+
Whether the JSON string should be prettified
|
| 349 |
+
|
| 350 |
+
Returns:
|
| 351 |
+
str
|
| 352 |
+
"""
|
| 353 |
+
return self._tokenizer.to_str(pretty)
|
| 354 |
+
|
| 355 |
+
def post_process(
|
| 356 |
+
self, encoding: Encoding, pair: Optional[Encoding] = None, add_special_tokens: bool = True
|
| 357 |
+
) -> Encoding:
|
| 358 |
+
"""Apply all the post-processing steps to the given encodings.
|
| 359 |
+
|
| 360 |
+
The various steps are:
|
| 361 |
+
1. Truncate according to global params (provided to `enable_truncation`)
|
| 362 |
+
2. Apply the PostProcessor
|
| 363 |
+
3. Pad according to global params. (provided to `enable_padding`)
|
| 364 |
+
|
| 365 |
+
Args:
|
| 366 |
+
encoding: Encoding:
|
| 367 |
+
The main Encoding to post process
|
| 368 |
+
|
| 369 |
+
pair: Optional[Encoding]:
|
| 370 |
+
An optional pair Encoding
|
| 371 |
+
|
| 372 |
+
add_special_tokens: bool:
|
| 373 |
+
Whether to add special tokens
|
| 374 |
+
|
| 375 |
+
Returns:
|
| 376 |
+
The resulting Encoding
|
| 377 |
+
"""
|
| 378 |
+
return self._tokenizer.post_process(encoding, pair, add_special_tokens)
|
| 379 |
+
|
| 380 |
+
@property
|
| 381 |
+
def model(self) -> Model:
|
| 382 |
+
return self._tokenizer.model
|
| 383 |
+
|
| 384 |
+
@model.setter
|
| 385 |
+
def model(self, model: Model):
|
| 386 |
+
self._tokenizer.model = model
|
| 387 |
+
|
| 388 |
+
@property
|
| 389 |
+
def normalizer(self) -> Normalizer:
|
| 390 |
+
return self._tokenizer.normalizer
|
| 391 |
+
|
| 392 |
+
@normalizer.setter
|
| 393 |
+
def normalizer(self, normalizer: Normalizer):
|
| 394 |
+
self._tokenizer.normalizer = normalizer
|
| 395 |
+
|
| 396 |
+
@property
|
| 397 |
+
def pre_tokenizer(self) -> PreTokenizer:
|
| 398 |
+
return self._tokenizer.pre_tokenizer
|
| 399 |
+
|
| 400 |
+
@pre_tokenizer.setter
|
| 401 |
+
def pre_tokenizer(self, pre_tokenizer: PreTokenizer):
|
| 402 |
+
self._tokenizer.pre_tokenizer = pre_tokenizer
|
| 403 |
+
|
| 404 |
+
@property
|
| 405 |
+
def post_processor(self) -> PostProcessor:
|
| 406 |
+
return self._tokenizer.post_processor
|
| 407 |
+
|
| 408 |
+
@post_processor.setter
|
| 409 |
+
def post_processor(self, post_processor: PostProcessor):
|
| 410 |
+
self._tokenizer.post_processor = post_processor
|
| 411 |
+
|
| 412 |
+
@property
|
| 413 |
+
def decoder(self) -> Decoder:
|
| 414 |
+
return self._tokenizer.decoder
|
| 415 |
+
|
| 416 |
+
@decoder.setter
|
| 417 |
+
def decoder(self, decoder: Decoder):
|
| 418 |
+
self._tokenizer.decoder = decoder
|
minigpt2/lib/python3.10/site-packages/tokenizers/implementations/byte_level_bpe.py
ADDED
|
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Iterator, List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
from tokenizers import AddedToken, Tokenizer, decoders, pre_tokenizers, processors, trainers
|
| 4 |
+
from tokenizers.models import BPE
|
| 5 |
+
from tokenizers.normalizers import Lowercase, Sequence, unicode_normalizer_from_str
|
| 6 |
+
|
| 7 |
+
from .base_tokenizer import BaseTokenizer
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class ByteLevelBPETokenizer(BaseTokenizer):
|
| 11 |
+
"""ByteLevelBPETokenizer
|
| 12 |
+
|
| 13 |
+
Represents a Byte-level BPE as introduced by OpenAI with their GPT-2 model
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
def __init__(
|
| 17 |
+
self,
|
| 18 |
+
vocab: Optional[Union[str, Dict[str, int]]] = None,
|
| 19 |
+
merges: Optional[Union[str, Dict[Tuple[int, int], Tuple[int, int]]]] = None,
|
| 20 |
+
add_prefix_space: bool = False,
|
| 21 |
+
lowercase: bool = False,
|
| 22 |
+
dropout: Optional[float] = None,
|
| 23 |
+
unicode_normalizer: Optional[str] = None,
|
| 24 |
+
continuing_subword_prefix: Optional[str] = None,
|
| 25 |
+
end_of_word_suffix: Optional[str] = None,
|
| 26 |
+
trim_offsets: bool = False,
|
| 27 |
+
):
|
| 28 |
+
if vocab is not None and merges is not None:
|
| 29 |
+
tokenizer = Tokenizer(
|
| 30 |
+
BPE(
|
| 31 |
+
vocab,
|
| 32 |
+
merges,
|
| 33 |
+
dropout=dropout,
|
| 34 |
+
continuing_subword_prefix=continuing_subword_prefix or "",
|
| 35 |
+
end_of_word_suffix=end_of_word_suffix or "",
|
| 36 |
+
)
|
| 37 |
+
)
|
| 38 |
+
else:
|
| 39 |
+
tokenizer = Tokenizer(BPE())
|
| 40 |
+
|
| 41 |
+
# Check for Unicode normalization first (before everything else)
|
| 42 |
+
normalizers = []
|
| 43 |
+
|
| 44 |
+
if unicode_normalizer:
|
| 45 |
+
normalizers += [unicode_normalizer_from_str(unicode_normalizer)]
|
| 46 |
+
|
| 47 |
+
if lowercase:
|
| 48 |
+
normalizers += [Lowercase()]
|
| 49 |
+
|
| 50 |
+
# Create the normalizer structure
|
| 51 |
+
if len(normalizers) > 0:
|
| 52 |
+
if len(normalizers) > 1:
|
| 53 |
+
tokenizer.normalizer = Sequence(normalizers)
|
| 54 |
+
else:
|
| 55 |
+
tokenizer.normalizer = normalizers[0]
|
| 56 |
+
|
| 57 |
+
tokenizer.pre_tokenizer = pre_tokenizers.ByteLevel(add_prefix_space=add_prefix_space)
|
| 58 |
+
tokenizer.decoder = decoders.ByteLevel()
|
| 59 |
+
tokenizer.post_processor = processors.ByteLevel(trim_offsets=trim_offsets)
|
| 60 |
+
|
| 61 |
+
parameters = {
|
| 62 |
+
"model": "ByteLevelBPE",
|
| 63 |
+
"add_prefix_space": add_prefix_space,
|
| 64 |
+
"lowercase": lowercase,
|
| 65 |
+
"dropout": dropout,
|
| 66 |
+
"unicode_normalizer": unicode_normalizer,
|
| 67 |
+
"continuing_subword_prefix": continuing_subword_prefix,
|
| 68 |
+
"end_of_word_suffix": end_of_word_suffix,
|
| 69 |
+
"trim_offsets": trim_offsets,
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
super().__init__(tokenizer, parameters)
|
| 73 |
+
|
| 74 |
+
@staticmethod
|
| 75 |
+
def from_file(vocab_filename: str, merges_filename: str, **kwargs):
|
| 76 |
+
vocab, merges = BPE.read_file(vocab_filename, merges_filename)
|
| 77 |
+
return ByteLevelBPETokenizer(vocab, merges, **kwargs)
|
| 78 |
+
|
| 79 |
+
def train(
|
| 80 |
+
self,
|
| 81 |
+
files: Union[str, List[str]],
|
| 82 |
+
vocab_size: int = 30000,
|
| 83 |
+
min_frequency: int = 2,
|
| 84 |
+
show_progress: bool = True,
|
| 85 |
+
special_tokens: List[Union[str, AddedToken]] = [],
|
| 86 |
+
):
|
| 87 |
+
"""Train the model using the given files"""
|
| 88 |
+
|
| 89 |
+
trainer = trainers.BpeTrainer(
|
| 90 |
+
vocab_size=vocab_size,
|
| 91 |
+
min_frequency=min_frequency,
|
| 92 |
+
show_progress=show_progress,
|
| 93 |
+
special_tokens=special_tokens,
|
| 94 |
+
initial_alphabet=pre_tokenizers.ByteLevel.alphabet(),
|
| 95 |
+
)
|
| 96 |
+
if isinstance(files, str):
|
| 97 |
+
files = [files]
|
| 98 |
+
self._tokenizer.train(files, trainer=trainer)
|
| 99 |
+
|
| 100 |
+
def train_from_iterator(
|
| 101 |
+
self,
|
| 102 |
+
iterator: Union[Iterator[str], Iterator[Iterator[str]]],
|
| 103 |
+
vocab_size: int = 30000,
|
| 104 |
+
min_frequency: int = 2,
|
| 105 |
+
show_progress: bool = True,
|
| 106 |
+
special_tokens: List[Union[str, AddedToken]] = [],
|
| 107 |
+
length: Optional[int] = None,
|
| 108 |
+
):
|
| 109 |
+
"""Train the model using the given iterator"""
|
| 110 |
+
|
| 111 |
+
trainer = trainers.BpeTrainer(
|
| 112 |
+
vocab_size=vocab_size,
|
| 113 |
+
min_frequency=min_frequency,
|
| 114 |
+
show_progress=show_progress,
|
| 115 |
+
special_tokens=special_tokens,
|
| 116 |
+
initial_alphabet=pre_tokenizers.ByteLevel.alphabet(),
|
| 117 |
+
)
|
| 118 |
+
self._tokenizer.train_from_iterator(
|
| 119 |
+
iterator,
|
| 120 |
+
trainer=trainer,
|
| 121 |
+
length=length,
|
| 122 |
+
)
|
minigpt2/lib/python3.10/site-packages/tokenizers/implementations/sentencepiece_bpe.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, Iterator, List, Optional, Tuple, Union
|
| 2 |
+
|
| 3 |
+
from tokenizers import AddedToken, Tokenizer, decoders, pre_tokenizers, trainers
|
| 4 |
+
from tokenizers.models import BPE
|
| 5 |
+
from tokenizers.normalizers import NFKC
|
| 6 |
+
|
| 7 |
+
from .base_tokenizer import BaseTokenizer
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class SentencePieceBPETokenizer(BaseTokenizer):
|
| 11 |
+
"""SentencePiece BPE Tokenizer
|
| 12 |
+
|
| 13 |
+
Represents the BPE algorithm, with the pretokenization used by SentencePiece
|
| 14 |
+
"""
|
| 15 |
+
|
| 16 |
+
def __init__(
|
| 17 |
+
self,
|
| 18 |
+
vocab: Optional[Union[str, Dict[str, int]]] = None,
|
| 19 |
+
merges: Optional[Union[str, Dict[Tuple[int, int], Tuple[int, int]]]] = None,
|
| 20 |
+
unk_token: Union[str, AddedToken] = "<unk>",
|
| 21 |
+
replacement: str = "▁",
|
| 22 |
+
add_prefix_space: bool = True,
|
| 23 |
+
dropout: Optional[float] = None,
|
| 24 |
+
fuse_unk: Optional[bool] = False,
|
| 25 |
+
):
|
| 26 |
+
if vocab is not None and merges is not None:
|
| 27 |
+
tokenizer = Tokenizer(BPE(vocab, merges, dropout=dropout, unk_token=unk_token, fuse_unk=fuse_unk))
|
| 28 |
+
else:
|
| 29 |
+
tokenizer = Tokenizer(BPE(dropout=dropout, unk_token=unk_token, fuse_unk=fuse_unk))
|
| 30 |
+
|
| 31 |
+
if tokenizer.token_to_id(str(unk_token)) is not None:
|
| 32 |
+
tokenizer.add_special_tokens([str(unk_token)])
|
| 33 |
+
|
| 34 |
+
tokenizer.normalizer = NFKC()
|
| 35 |
+
tokenizer.pre_tokenizer = pre_tokenizers.Metaspace(replacement=replacement, add_prefix_space=add_prefix_space)
|
| 36 |
+
tokenizer.decoder = decoders.Metaspace(replacement=replacement, add_prefix_space=add_prefix_space)
|
| 37 |
+
|
| 38 |
+
parameters = {
|
| 39 |
+
"model": "SentencePieceBPE",
|
| 40 |
+
"unk_token": unk_token,
|
| 41 |
+
"replacement": replacement,
|
| 42 |
+
"add_prefix_space": add_prefix_space,
|
| 43 |
+
"dropout": dropout,
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
super().__init__(tokenizer, parameters)
|
| 47 |
+
|
| 48 |
+
@staticmethod
|
| 49 |
+
def from_file(vocab_filename: str, merges_filename: str, **kwargs):
|
| 50 |
+
vocab, merges = BPE.read_file(vocab_filename, merges_filename)
|
| 51 |
+
return SentencePieceBPETokenizer(vocab, merges, **kwargs)
|
| 52 |
+
|
| 53 |
+
def train(
|
| 54 |
+
self,
|
| 55 |
+
files: Union[str, List[str]],
|
| 56 |
+
vocab_size: int = 30000,
|
| 57 |
+
min_frequency: int = 2,
|
| 58 |
+
special_tokens: List[Union[str, AddedToken]] = ["<unk>"],
|
| 59 |
+
limit_alphabet: int = 1000,
|
| 60 |
+
initial_alphabet: List[str] = [],
|
| 61 |
+
show_progress: bool = True,
|
| 62 |
+
):
|
| 63 |
+
"""Train the model using the given files"""
|
| 64 |
+
|
| 65 |
+
trainer = trainers.BpeTrainer(
|
| 66 |
+
vocab_size=vocab_size,
|
| 67 |
+
min_frequency=min_frequency,
|
| 68 |
+
special_tokens=special_tokens,
|
| 69 |
+
limit_alphabet=limit_alphabet,
|
| 70 |
+
initial_alphabet=initial_alphabet,
|
| 71 |
+
show_progress=show_progress,
|
| 72 |
+
)
|
| 73 |
+
if isinstance(files, str):
|
| 74 |
+
files = [files]
|
| 75 |
+
self._tokenizer.train(files, trainer=trainer)
|
| 76 |
+
|
| 77 |
+
def train_from_iterator(
|
| 78 |
+
self,
|
| 79 |
+
iterator: Union[Iterator[str], Iterator[Iterator[str]]],
|
| 80 |
+
vocab_size: int = 30000,
|
| 81 |
+
min_frequency: int = 2,
|
| 82 |
+
special_tokens: List[Union[str, AddedToken]] = ["<unk>"],
|
| 83 |
+
limit_alphabet: int = 1000,
|
| 84 |
+
initial_alphabet: List[str] = [],
|
| 85 |
+
show_progress: bool = True,
|
| 86 |
+
length: Optional[int] = None,
|
| 87 |
+
):
|
| 88 |
+
"""Train the model using the given iterator"""
|
| 89 |
+
|
| 90 |
+
trainer = trainers.BpeTrainer(
|
| 91 |
+
vocab_size=vocab_size,
|
| 92 |
+
min_frequency=min_frequency,
|
| 93 |
+
special_tokens=special_tokens,
|
| 94 |
+
limit_alphabet=limit_alphabet,
|
| 95 |
+
initial_alphabet=initial_alphabet,
|
| 96 |
+
show_progress=show_progress,
|
| 97 |
+
)
|
| 98 |
+
self._tokenizer.train_from_iterator(
|
| 99 |
+
iterator,
|
| 100 |
+
trainer=trainer,
|
| 101 |
+
length=length,
|
| 102 |
+
)
|
minigpt2/lib/python3.10/site-packages/tokenizers/models/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated content DO NOT EDIT
|
| 2 |
+
from .. import models
|
| 3 |
+
|
| 4 |
+
Model = models.Model
|
| 5 |
+
BPE = models.BPE
|
| 6 |
+
Unigram = models.Unigram
|
| 7 |
+
WordLevel = models.WordLevel
|
| 8 |
+
WordPiece = models.WordPiece
|
minigpt2/lib/python3.10/site-packages/tokenizers/models/__init__.pyi
ADDED
|
@@ -0,0 +1,562 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Generated content DO NOT EDIT
|
| 2 |
+
class Model:
|
| 3 |
+
"""
|
| 4 |
+
Base class for all models
|
| 5 |
+
|
| 6 |
+
The model represents the actual tokenization algorithm. This is the part that
|
| 7 |
+
will contain and manage the learned vocabulary.
|
| 8 |
+
|
| 9 |
+
This class cannot be constructed directly. Please use one of the concrete models.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
def get_trainer(self):
|
| 13 |
+
"""
|
| 14 |
+
Get the associated :class:`~tokenizers.trainers.Trainer`
|
| 15 |
+
|
| 16 |
+
Retrieve the :class:`~tokenizers.trainers.Trainer` associated to this
|
| 17 |
+
:class:`~tokenizers.models.Model`.
|
| 18 |
+
|
| 19 |
+
Returns:
|
| 20 |
+
:class:`~tokenizers.trainers.Trainer`: The Trainer used to train this model
|
| 21 |
+
"""
|
| 22 |
+
pass
|
| 23 |
+
def id_to_token(self, id):
|
| 24 |
+
"""
|
| 25 |
+
Get the token associated to an ID
|
| 26 |
+
|
| 27 |
+
Args:
|
| 28 |
+
id (:obj:`int`):
|
| 29 |
+
An ID to convert to a token
|
| 30 |
+
|
| 31 |
+
Returns:
|
| 32 |
+
:obj:`str`: The token associated to the ID
|
| 33 |
+
"""
|
| 34 |
+
pass
|
| 35 |
+
def save(self, folder, prefix):
|
| 36 |
+
"""
|
| 37 |
+
Save the current model
|
| 38 |
+
|
| 39 |
+
Save the current model in the given folder, using the given prefix for the various
|
| 40 |
+
files that will get created.
|
| 41 |
+
Any file with the same name that already exists in this folder will be overwritten.
|
| 42 |
+
|
| 43 |
+
Args:
|
| 44 |
+
folder (:obj:`str`):
|
| 45 |
+
The path to the target folder in which to save the various files
|
| 46 |
+
|
| 47 |
+
prefix (:obj:`str`, `optional`):
|
| 48 |
+
An optional prefix, used to prefix each file name
|
| 49 |
+
|
| 50 |
+
Returns:
|
| 51 |
+
:obj:`List[str]`: The list of saved files
|
| 52 |
+
"""
|
| 53 |
+
pass
|
| 54 |
+
def token_to_id(self, tokens):
|
| 55 |
+
"""
|
| 56 |
+
Get the ID associated to a token
|
| 57 |
+
|
| 58 |
+
Args:
|
| 59 |
+
token (:obj:`str`):
|
| 60 |
+
A token to convert to an ID
|
| 61 |
+
|
| 62 |
+
Returns:
|
| 63 |
+
:obj:`int`: The ID associated to the token
|
| 64 |
+
"""
|
| 65 |
+
pass
|
| 66 |
+
def tokenize(self, sequence):
|
| 67 |
+
"""
|
| 68 |
+
Tokenize a sequence
|
| 69 |
+
|
| 70 |
+
Args:
|
| 71 |
+
sequence (:obj:`str`):
|
| 72 |
+
A sequence to tokenize
|
| 73 |
+
|
| 74 |
+
Returns:
|
| 75 |
+
A :obj:`List` of :class:`~tokenizers.Token`: The generated tokens
|
| 76 |
+
"""
|
| 77 |
+
pass
|
| 78 |
+
|
| 79 |
+
class BPE(Model):
|
| 80 |
+
"""
|
| 81 |
+
An implementation of the BPE (Byte-Pair Encoding) algorithm
|
| 82 |
+
|
| 83 |
+
Args:
|
| 84 |
+
vocab (:obj:`Dict[str, int]`, `optional`):
|
| 85 |
+
A dictionnary of string keys and their ids :obj:`{"am": 0,...}`
|
| 86 |
+
|
| 87 |
+
merges (:obj:`List[Tuple[str, str]]`, `optional`):
|
| 88 |
+
A list of pairs of tokens (:obj:`Tuple[str, str]`) :obj:`[("a", "b"),...]`
|
| 89 |
+
|
| 90 |
+
cache_capacity (:obj:`int`, `optional`):
|
| 91 |
+
The number of words that the BPE cache can contain. The cache allows
|
| 92 |
+
to speed-up the process by keeping the result of the merge operations
|
| 93 |
+
for a number of words.
|
| 94 |
+
|
| 95 |
+
dropout (:obj:`float`, `optional`):
|
| 96 |
+
A float between 0 and 1 that represents the BPE dropout to use.
|
| 97 |
+
|
| 98 |
+
unk_token (:obj:`str`, `optional`):
|
| 99 |
+
The unknown token to be used by the model.
|
| 100 |
+
|
| 101 |
+
continuing_subword_prefix (:obj:`str`, `optional`):
|
| 102 |
+
The prefix to attach to subword units that don't represent a beginning of word.
|
| 103 |
+
|
| 104 |
+
end_of_word_suffix (:obj:`str`, `optional`):
|
| 105 |
+
The suffix to attach to subword units that represent an end of word.
|
| 106 |
+
|
| 107 |
+
fuse_unk (:obj:`bool`, `optional`):
|
| 108 |
+
Whether to fuse any subsequent unknown tokens into a single one
|
| 109 |
+
|
| 110 |
+
byte_fallback (:obj:`bool`, `optional`):
|
| 111 |
+
Whether to use spm byte-fallback trick (defaults to False)
|
| 112 |
+
"""
|
| 113 |
+
|
| 114 |
+
def __init__(
|
| 115 |
+
self,
|
| 116 |
+
vocab=None,
|
| 117 |
+
merges=None,
|
| 118 |
+
cache_capacity=None,
|
| 119 |
+
dropout=None,
|
| 120 |
+
unk_token=None,
|
| 121 |
+
continuing_subword_prefix=None,
|
| 122 |
+
end_of_word_suffix=None,
|
| 123 |
+
fuse_unk=None,
|
| 124 |
+
byte_fallback=False,
|
| 125 |
+
):
|
| 126 |
+
pass
|
| 127 |
+
@staticmethod
|
| 128 |
+
def from_file(cls, vocab, merge, **kwargs):
|
| 129 |
+
"""
|
| 130 |
+
Instantiate a BPE model from the given files.
|
| 131 |
+
|
| 132 |
+
This method is roughly equivalent to doing::
|
| 133 |
+
|
| 134 |
+
vocab, merges = BPE.read_file(vocab_filename, merges_filename)
|
| 135 |
+
bpe = BPE(vocab, merges)
|
| 136 |
+
|
| 137 |
+
If you don't need to keep the :obj:`vocab, merges` values lying around,
|
| 138 |
+
this method is more optimized than manually calling
|
| 139 |
+
:meth:`~tokenizers.models.BPE.read_file` to initialize a :class:`~tokenizers.models.BPE`
|
| 140 |
+
|
| 141 |
+
Args:
|
| 142 |
+
vocab (:obj:`str`):
|
| 143 |
+
The path to a :obj:`vocab.json` file
|
| 144 |
+
|
| 145 |
+
merges (:obj:`str`):
|
| 146 |
+
The path to a :obj:`merges.txt` file
|
| 147 |
+
|
| 148 |
+
Returns:
|
| 149 |
+
:class:`~tokenizers.models.BPE`: An instance of BPE loaded from these files
|
| 150 |
+
"""
|
| 151 |
+
pass
|
| 152 |
+
def get_trainer(self):
|
| 153 |
+
"""
|
| 154 |
+
Get the associated :class:`~tokenizers.trainers.Trainer`
|
| 155 |
+
|
| 156 |
+
Retrieve the :class:`~tokenizers.trainers.Trainer` associated to this
|
| 157 |
+
:class:`~tokenizers.models.Model`.
|
| 158 |
+
|
| 159 |
+
Returns:
|
| 160 |
+
:class:`~tokenizers.trainers.Trainer`: The Trainer used to train this model
|
| 161 |
+
"""
|
| 162 |
+
pass
|
| 163 |
+
def id_to_token(self, id):
|
| 164 |
+
"""
|
| 165 |
+
Get the token associated to an ID
|
| 166 |
+
|
| 167 |
+
Args:
|
| 168 |
+
id (:obj:`int`):
|
| 169 |
+
An ID to convert to a token
|
| 170 |
+
|
| 171 |
+
Returns:
|
| 172 |
+
:obj:`str`: The token associated to the ID
|
| 173 |
+
"""
|
| 174 |
+
pass
|
| 175 |
+
@staticmethod
|
| 176 |
+
def read_file(self, vocab, merges):
|
| 177 |
+
"""
|
| 178 |
+
Read a :obj:`vocab.json` and a :obj:`merges.txt` files
|
| 179 |
+
|
| 180 |
+
This method provides a way to read and parse the content of these files,
|
| 181 |
+
returning the relevant data structures. If you want to instantiate some BPE models
|
| 182 |
+
from memory, this method gives you the expected input from the standard files.
|
| 183 |
+
|
| 184 |
+
Args:
|
| 185 |
+
vocab (:obj:`str`):
|
| 186 |
+
The path to a :obj:`vocab.json` file
|
| 187 |
+
|
| 188 |
+
merges (:obj:`str`):
|
| 189 |
+
The path to a :obj:`merges.txt` file
|
| 190 |
+
|
| 191 |
+
Returns:
|
| 192 |
+
A :obj:`Tuple` with the vocab and the merges:
|
| 193 |
+
The vocabulary and merges loaded into memory
|
| 194 |
+
"""
|
| 195 |
+
pass
|
| 196 |
+
def save(self, folder, prefix):
|
| 197 |
+
"""
|
| 198 |
+
Save the current model
|
| 199 |
+
|
| 200 |
+
Save the current model in the given folder, using the given prefix for the various
|
| 201 |
+
files that will get created.
|
| 202 |
+
Any file with the same name that already exists in this folder will be overwritten.
|
| 203 |
+
|
| 204 |
+
Args:
|
| 205 |
+
folder (:obj:`str`):
|
| 206 |
+
The path to the target folder in which to save the various files
|
| 207 |
+
|
| 208 |
+
prefix (:obj:`str`, `optional`):
|
| 209 |
+
An optional prefix, used to prefix each file name
|
| 210 |
+
|
| 211 |
+
Returns:
|
| 212 |
+
:obj:`List[str]`: The list of saved files
|
| 213 |
+
"""
|
| 214 |
+
pass
|
| 215 |
+
def token_to_id(self, tokens):
|
| 216 |
+
"""
|
| 217 |
+
Get the ID associated to a token
|
| 218 |
+
|
| 219 |
+
Args:
|
| 220 |
+
token (:obj:`str`):
|
| 221 |
+
A token to convert to an ID
|
| 222 |
+
|
| 223 |
+
Returns:
|
| 224 |
+
:obj:`int`: The ID associated to the token
|
| 225 |
+
"""
|
| 226 |
+
pass
|
| 227 |
+
def tokenize(self, sequence):
|
| 228 |
+
"""
|
| 229 |
+
Tokenize a sequence
|
| 230 |
+
|
| 231 |
+
Args:
|
| 232 |
+
sequence (:obj:`str`):
|
| 233 |
+
A sequence to tokenize
|
| 234 |
+
|
| 235 |
+
Returns:
|
| 236 |
+
A :obj:`List` of :class:`~tokenizers.Token`: The generated tokens
|
| 237 |
+
"""
|
| 238 |
+
pass
|
| 239 |
+
|
| 240 |
+
class Unigram(Model):
|
| 241 |
+
"""
|
| 242 |
+
An implementation of the Unigram algorithm
|
| 243 |
+
|
| 244 |
+
Args:
|
| 245 |
+
vocab (:obj:`List[Tuple[str, float]]`, `optional`, `optional`):
|
| 246 |
+
A list of vocabulary items and their relative score [("am", -0.2442),...]
|
| 247 |
+
"""
|
| 248 |
+
|
| 249 |
+
def __init__(self, vocab, unk_id, byte_fallback):
|
| 250 |
+
pass
|
| 251 |
+
def get_trainer(self):
|
| 252 |
+
"""
|
| 253 |
+
Get the associated :class:`~tokenizers.trainers.Trainer`
|
| 254 |
+
|
| 255 |
+
Retrieve the :class:`~tokenizers.trainers.Trainer` associated to this
|
| 256 |
+
:class:`~tokenizers.models.Model`.
|
| 257 |
+
|
| 258 |
+
Returns:
|
| 259 |
+
:class:`~tokenizers.trainers.Trainer`: The Trainer used to train this model
|
| 260 |
+
"""
|
| 261 |
+
pass
|
| 262 |
+
def id_to_token(self, id):
|
| 263 |
+
"""
|
| 264 |
+
Get the token associated to an ID
|
| 265 |
+
|
| 266 |
+
Args:
|
| 267 |
+
id (:obj:`int`):
|
| 268 |
+
An ID to convert to a token
|
| 269 |
+
|
| 270 |
+
Returns:
|
| 271 |
+
:obj:`str`: The token associated to the ID
|
| 272 |
+
"""
|
| 273 |
+
pass
|
| 274 |
+
def save(self, folder, prefix):
|
| 275 |
+
"""
|
| 276 |
+
Save the current model
|
| 277 |
+
|
| 278 |
+
Save the current model in the given folder, using the given prefix for the various
|
| 279 |
+
files that will get created.
|
| 280 |
+
Any file with the same name that already exists in this folder will be overwritten.
|
| 281 |
+
|
| 282 |
+
Args:
|
| 283 |
+
folder (:obj:`str`):
|
| 284 |
+
The path to the target folder in which to save the various files
|
| 285 |
+
|
| 286 |
+
prefix (:obj:`str`, `optional`):
|
| 287 |
+
An optional prefix, used to prefix each file name
|
| 288 |
+
|
| 289 |
+
Returns:
|
| 290 |
+
:obj:`List[str]`: The list of saved files
|
| 291 |
+
"""
|
| 292 |
+
pass
|
| 293 |
+
def token_to_id(self, tokens):
|
| 294 |
+
"""
|
| 295 |
+
Get the ID associated to a token
|
| 296 |
+
|
| 297 |
+
Args:
|
| 298 |
+
token (:obj:`str`):
|
| 299 |
+
A token to convert to an ID
|
| 300 |
+
|
| 301 |
+
Returns:
|
| 302 |
+
:obj:`int`: The ID associated to the token
|
| 303 |
+
"""
|
| 304 |
+
pass
|
| 305 |
+
def tokenize(self, sequence):
|
| 306 |
+
"""
|
| 307 |
+
Tokenize a sequence
|
| 308 |
+
|
| 309 |
+
Args:
|
| 310 |
+
sequence (:obj:`str`):
|
| 311 |
+
A sequence to tokenize
|
| 312 |
+
|
| 313 |
+
Returns:
|
| 314 |
+
A :obj:`List` of :class:`~tokenizers.Token`: The generated tokens
|
| 315 |
+
"""
|
| 316 |
+
pass
|
| 317 |
+
|
| 318 |
+
class WordLevel(Model):
|
| 319 |
+
"""
|
| 320 |
+
An implementation of the WordLevel algorithm
|
| 321 |
+
|
| 322 |
+
Most simple tokenizer model based on mapping tokens to their corresponding id.
|
| 323 |
+
|
| 324 |
+
Args:
|
| 325 |
+
vocab (:obj:`str`, `optional`):
|
| 326 |
+
A dictionnary of string keys and their ids :obj:`{"am": 0,...}`
|
| 327 |
+
|
| 328 |
+
unk_token (:obj:`str`, `optional`):
|
| 329 |
+
The unknown token to be used by the model.
|
| 330 |
+
"""
|
| 331 |
+
|
| 332 |
+
def __init__(self, vocab, unk_token):
|
| 333 |
+
pass
|
| 334 |
+
@staticmethod
|
| 335 |
+
def from_file(vocab, unk_token):
|
| 336 |
+
"""
|
| 337 |
+
Instantiate a WordLevel model from the given file
|
| 338 |
+
|
| 339 |
+
This method is roughly equivalent to doing::
|
| 340 |
+
|
| 341 |
+
vocab = WordLevel.read_file(vocab_filename)
|
| 342 |
+
wordlevel = WordLevel(vocab)
|
| 343 |
+
|
| 344 |
+
If you don't need to keep the :obj:`vocab` values lying around, this method is
|
| 345 |
+
more optimized than manually calling :meth:`~tokenizers.models.WordLevel.read_file` to
|
| 346 |
+
initialize a :class:`~tokenizers.models.WordLevel`
|
| 347 |
+
|
| 348 |
+
Args:
|
| 349 |
+
vocab (:obj:`str`):
|
| 350 |
+
The path to a :obj:`vocab.json` file
|
| 351 |
+
|
| 352 |
+
Returns:
|
| 353 |
+
:class:`~tokenizers.models.WordLevel`: An instance of WordLevel loaded from file
|
| 354 |
+
"""
|
| 355 |
+
pass
|
| 356 |
+
def get_trainer(self):
|
| 357 |
+
"""
|
| 358 |
+
Get the associated :class:`~tokenizers.trainers.Trainer`
|
| 359 |
+
|
| 360 |
+
Retrieve the :class:`~tokenizers.trainers.Trainer` associated to this
|
| 361 |
+
:class:`~tokenizers.models.Model`.
|
| 362 |
+
|
| 363 |
+
Returns:
|
| 364 |
+
:class:`~tokenizers.trainers.Trainer`: The Trainer used to train this model
|
| 365 |
+
"""
|
| 366 |
+
pass
|
| 367 |
+
def id_to_token(self, id):
|
| 368 |
+
"""
|
| 369 |
+
Get the token associated to an ID
|
| 370 |
+
|
| 371 |
+
Args:
|
| 372 |
+
id (:obj:`int`):
|
| 373 |
+
An ID to convert to a token
|
| 374 |
+
|
| 375 |
+
Returns:
|
| 376 |
+
:obj:`str`: The token associated to the ID
|
| 377 |
+
"""
|
| 378 |
+
pass
|
| 379 |
+
@staticmethod
|
| 380 |
+
def read_file(vocab):
|
| 381 |
+
"""
|
| 382 |
+
Read a :obj:`vocab.json`
|
| 383 |
+
|
| 384 |
+
This method provides a way to read and parse the content of a vocabulary file,
|
| 385 |
+
returning the relevant data structures. If you want to instantiate some WordLevel models
|
| 386 |
+
from memory, this method gives you the expected input from the standard files.
|
| 387 |
+
|
| 388 |
+
Args:
|
| 389 |
+
vocab (:obj:`str`):
|
| 390 |
+
The path to a :obj:`vocab.json` file
|
| 391 |
+
|
| 392 |
+
Returns:
|
| 393 |
+
:obj:`Dict[str, int]`: The vocabulary as a :obj:`dict`
|
| 394 |
+
"""
|
| 395 |
+
pass
|
| 396 |
+
def save(self, folder, prefix):
|
| 397 |
+
"""
|
| 398 |
+
Save the current model
|
| 399 |
+
|
| 400 |
+
Save the current model in the given folder, using the given prefix for the various
|
| 401 |
+
files that will get created.
|
| 402 |
+
Any file with the same name that already exists in this folder will be overwritten.
|
| 403 |
+
|
| 404 |
+
Args:
|
| 405 |
+
folder (:obj:`str`):
|
| 406 |
+
The path to the target folder in which to save the various files
|
| 407 |
+
|
| 408 |
+
prefix (:obj:`str`, `optional`):
|
| 409 |
+
An optional prefix, used to prefix each file name
|
| 410 |
+
|
| 411 |
+
Returns:
|
| 412 |
+
:obj:`List[str]`: The list of saved files
|
| 413 |
+
"""
|
| 414 |
+
pass
|
| 415 |
+
def token_to_id(self, tokens):
|
| 416 |
+
"""
|
| 417 |
+
Get the ID associated to a token
|
| 418 |
+
|
| 419 |
+
Args:
|
| 420 |
+
token (:obj:`str`):
|
| 421 |
+
A token to convert to an ID
|
| 422 |
+
|
| 423 |
+
Returns:
|
| 424 |
+
:obj:`int`: The ID associated to the token
|
| 425 |
+
"""
|
| 426 |
+
pass
|
| 427 |
+
def tokenize(self, sequence):
|
| 428 |
+
"""
|
| 429 |
+
Tokenize a sequence
|
| 430 |
+
|
| 431 |
+
Args:
|
| 432 |
+
sequence (:obj:`str`):
|
| 433 |
+
A sequence to tokenize
|
| 434 |
+
|
| 435 |
+
Returns:
|
| 436 |
+
A :obj:`List` of :class:`~tokenizers.Token`: The generated tokens
|
| 437 |
+
"""
|
| 438 |
+
pass
|
| 439 |
+
|
| 440 |
+
class WordPiece(Model):
|
| 441 |
+
"""
|
| 442 |
+
An implementation of the WordPiece algorithm
|
| 443 |
+
|
| 444 |
+
Args:
|
| 445 |
+
vocab (:obj:`Dict[str, int]`, `optional`):
|
| 446 |
+
A dictionnary of string keys and their ids :obj:`{"am": 0,...}`
|
| 447 |
+
|
| 448 |
+
unk_token (:obj:`str`, `optional`):
|
| 449 |
+
The unknown token to be used by the model.
|
| 450 |
+
|
| 451 |
+
max_input_chars_per_word (:obj:`int`, `optional`):
|
| 452 |
+
The maximum number of characters to authorize in a single word.
|
| 453 |
+
"""
|
| 454 |
+
|
| 455 |
+
def __init__(self, vocab, unk_token, max_input_chars_per_word):
|
| 456 |
+
pass
|
| 457 |
+
@staticmethod
|
| 458 |
+
def from_file(vocab, **kwargs):
|
| 459 |
+
"""
|
| 460 |
+
Instantiate a WordPiece model from the given file
|
| 461 |
+
|
| 462 |
+
This method is roughly equivalent to doing::
|
| 463 |
+
|
| 464 |
+
vocab = WordPiece.read_file(vocab_filename)
|
| 465 |
+
wordpiece = WordPiece(vocab)
|
| 466 |
+
|
| 467 |
+
If you don't need to keep the :obj:`vocab` values lying around, this method is
|
| 468 |
+
more optimized than manually calling :meth:`~tokenizers.models.WordPiece.read_file` to
|
| 469 |
+
initialize a :class:`~tokenizers.models.WordPiece`
|
| 470 |
+
|
| 471 |
+
Args:
|
| 472 |
+
vocab (:obj:`str`):
|
| 473 |
+
The path to a :obj:`vocab.txt` file
|
| 474 |
+
|
| 475 |
+
Returns:
|
| 476 |
+
:class:`~tokenizers.models.WordPiece`: An instance of WordPiece loaded from file
|
| 477 |
+
"""
|
| 478 |
+
pass
|
| 479 |
+
def get_trainer(self):
|
| 480 |
+
"""
|
| 481 |
+
Get the associated :class:`~tokenizers.trainers.Trainer`
|
| 482 |
+
|
| 483 |
+
Retrieve the :class:`~tokenizers.trainers.Trainer` associated to this
|
| 484 |
+
:class:`~tokenizers.models.Model`.
|
| 485 |
+
|
| 486 |
+
Returns:
|
| 487 |
+
:class:`~tokenizers.trainers.Trainer`: The Trainer used to train this model
|
| 488 |
+
"""
|
| 489 |
+
pass
|
| 490 |
+
def id_to_token(self, id):
|
| 491 |
+
"""
|
| 492 |
+
Get the token associated to an ID
|
| 493 |
+
|
| 494 |
+
Args:
|
| 495 |
+
id (:obj:`int`):
|
| 496 |
+
An ID to convert to a token
|
| 497 |
+
|
| 498 |
+
Returns:
|
| 499 |
+
:obj:`str`: The token associated to the ID
|
| 500 |
+
"""
|
| 501 |
+
pass
|
| 502 |
+
@staticmethod
|
| 503 |
+
def read_file(vocab):
|
| 504 |
+
"""
|
| 505 |
+
Read a :obj:`vocab.txt` file
|
| 506 |
+
|
| 507 |
+
This method provides a way to read and parse the content of a standard `vocab.txt`
|
| 508 |
+
file as used by the WordPiece Model, returning the relevant data structures. If you
|
| 509 |
+
want to instantiate some WordPiece models from memory, this method gives you the
|
| 510 |
+
expected input from the standard files.
|
| 511 |
+
|
| 512 |
+
Args:
|
| 513 |
+
vocab (:obj:`str`):
|
| 514 |
+
The path to a :obj:`vocab.txt` file
|
| 515 |
+
|
| 516 |
+
Returns:
|
| 517 |
+
:obj:`Dict[str, int]`: The vocabulary as a :obj:`dict`
|
| 518 |
+
"""
|
| 519 |
+
pass
|
| 520 |
+
def save(self, folder, prefix):
|
| 521 |
+
"""
|
| 522 |
+
Save the current model
|
| 523 |
+
|
| 524 |
+
Save the current model in the given folder, using the given prefix for the various
|
| 525 |
+
files that will get created.
|
| 526 |
+
Any file with the same name that already exists in this folder will be overwritten.
|
| 527 |
+
|
| 528 |
+
Args:
|
| 529 |
+
folder (:obj:`str`):
|
| 530 |
+
The path to the target folder in which to save the various files
|
| 531 |
+
|
| 532 |
+
prefix (:obj:`str`, `optional`):
|
| 533 |
+
An optional prefix, used to prefix each file name
|
| 534 |
+
|
| 535 |
+
Returns:
|
| 536 |
+
:obj:`List[str]`: The list of saved files
|
| 537 |
+
"""
|
| 538 |
+
pass
|
| 539 |
+
def token_to_id(self, tokens):
|
| 540 |
+
"""
|
| 541 |
+
Get the ID associated to a token
|
| 542 |
+
|
| 543 |
+
Args:
|
| 544 |
+
token (:obj:`str`):
|
| 545 |
+
A token to convert to an ID
|
| 546 |
+
|
| 547 |
+
Returns:
|
| 548 |
+
:obj:`int`: The ID associated to the token
|
| 549 |
+
"""
|
| 550 |
+
pass
|
| 551 |
+
def tokenize(self, sequence):
|
| 552 |
+
"""
|
| 553 |
+
Tokenize a sequence
|
| 554 |
+
|
| 555 |
+
Args:
|
| 556 |
+
sequence (:obj:`str`):
|
| 557 |
+
A sequence to tokenize
|
| 558 |
+
|
| 559 |
+
Returns:
|
| 560 |
+
A :obj:`List` of :class:`~tokenizers.Token`: The generated tokens
|
| 561 |
+
"""
|
| 562 |
+
pass
|
minigpt2/lib/python3.10/site-packages/tokenizers/models/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (285 Bytes). View file
|
|
|