ZTWHHH commited on
Commit
d16d52d
·
verified ·
1 Parent(s): c026771

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +4 -0
  2. evalkit_llava/bin/xzcat +3 -0
  3. evalkit_llava/lib/libtinfo.so.6.4 +3 -0
  4. evalkit_llava/lib/libz.so.1.2.13 +3 -0
  5. evalkit_llava/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc +0 -0
  6. evalkit_llava/lib/python3.10/site-packages/pip/__pycache__/__pip-runner__.cpython-310.pyc +0 -0
  7. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc +0 -0
  8. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-310.pyc +0 -0
  9. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/cacert.pem +0 -0
  10. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/core.py +114 -0
  11. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/py.typed +0 -0
  12. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distlib/w64-arm.exe +3 -0
  13. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py +0 -0
  14. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-310.pyc +0 -0
  15. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-310.pyc +0 -0
  16. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/__pycache__/_api.cpython-310.pyc +0 -0
  17. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/__pycache__/_windows.cpython-310.pyc +0 -0
  18. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/_api.py +316 -0
  19. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/_macos.py +571 -0
  20. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/__init__.py +102 -0
  21. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc +0 -0
  22. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_collections.py +355 -0
  23. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_version.py +2 -0
  24. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/connection.py +572 -0
  25. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py +0 -0
  26. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc +0 -0
  27. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc +0 -0
  28. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc +0 -0
  29. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc +0 -0
  30. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc +0 -0
  31. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +36 -0
  32. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/appengine.py +314 -0
  33. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py +130 -0
  34. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py +518 -0
  35. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/securetransport.py +920 -0
  36. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/socks.py +216 -0
  37. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/exceptions.py +323 -0
  38. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/filepost.py +98 -0
  39. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py +0 -0
  40. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/poolmanager.py +540 -0
  41. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/request.py +191 -0
  42. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/response.py +879 -0
  43. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__init__.py +49 -0
  44. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-310.pyc +0 -0
  45. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-310.pyc +0 -0
  46. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-310.pyc +0 -0
  47. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-310.pyc +0 -0
  48. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-310.pyc +0 -0
  49. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-310.pyc +0 -0
  50. evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -79,3 +79,7 @@ evalkit_llava/bin/openssl filter=lfs diff=lfs merge=lfs -text
79
  evalkit_llava/lib/liblzma.so filter=lfs diff=lfs merge=lfs -text
80
  evalkit_llava/lib/libssl.so filter=lfs diff=lfs merge=lfs -text
81
  evalkit_llava/lib/libz.so filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
79
  evalkit_llava/lib/liblzma.so filter=lfs diff=lfs merge=lfs -text
80
  evalkit_llava/lib/libssl.so filter=lfs diff=lfs merge=lfs -text
81
  evalkit_llava/lib/libz.so filter=lfs diff=lfs merge=lfs -text
82
+ evalkit_llava/lib/libtinfo.so.6.4 filter=lfs diff=lfs merge=lfs -text
83
+ evalkit_llava/bin/xzcat filter=lfs diff=lfs merge=lfs -text
84
+ evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distlib/w64-arm.exe filter=lfs diff=lfs merge=lfs -text
85
+ evalkit_llava/lib/libz.so.1.2.13 filter=lfs diff=lfs merge=lfs -text
evalkit_llava/bin/xzcat ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5cc86d36933372b94af4bd9ed22ad711f57b4e16175675627edcd4cb9ea46a61
3
+ size 108336
evalkit_llava/lib/libtinfo.so.6.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51494fcb866849b976d106c411dbad65e42a659567e383a4f3c5edf2ebe63506
3
+ size 287080
evalkit_llava/lib/libz.so.1.2.13 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b0e682a9dc7fd4895a6783288f851b793dc89633f28714027974fa4d66f3914
3
+ size 124744
evalkit_llava/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (618 Bytes). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/__pycache__/__pip-runner__.cpython-310.pyc ADDED
Binary file (1.62 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (291 Bytes). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/__pycache__/core.cpython-310.pyc ADDED
Binary file (2.13 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/cacert.pem ADDED
The diff for this file is too large to render. See raw diff
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/core.py ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ certifi.py
3
+ ~~~~~~~~~~
4
+
5
+ This module returns the installation location of cacert.pem or its contents.
6
+ """
7
+ import sys
8
+ import atexit
9
+
10
+ def exit_cacert_ctx() -> None:
11
+ _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr]
12
+
13
+
14
+ if sys.version_info >= (3, 11):
15
+
16
+ from importlib.resources import as_file, files
17
+
18
+ _CACERT_CTX = None
19
+ _CACERT_PATH = None
20
+
21
+ def where() -> str:
22
+ # This is slightly terrible, but we want to delay extracting the file
23
+ # in cases where we're inside of a zipimport situation until someone
24
+ # actually calls where(), but we don't want to re-extract the file
25
+ # on every call of where(), so we'll do it once then store it in a
26
+ # global variable.
27
+ global _CACERT_CTX
28
+ global _CACERT_PATH
29
+ if _CACERT_PATH is None:
30
+ # This is slightly janky, the importlib.resources API wants you to
31
+ # manage the cleanup of this file, so it doesn't actually return a
32
+ # path, it returns a context manager that will give you the path
33
+ # when you enter it and will do any cleanup when you leave it. In
34
+ # the common case of not needing a temporary file, it will just
35
+ # return the file system location and the __exit__() is a no-op.
36
+ #
37
+ # We also have to hold onto the actual context manager, because
38
+ # it will do the cleanup whenever it gets garbage collected, so
39
+ # we will also store that at the global level as well.
40
+ _CACERT_CTX = as_file(files("pip._vendor.certifi").joinpath("cacert.pem"))
41
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
42
+ atexit.register(exit_cacert_ctx)
43
+
44
+ return _CACERT_PATH
45
+
46
+ def contents() -> str:
47
+ return files("pip._vendor.certifi").joinpath("cacert.pem").read_text(encoding="ascii")
48
+
49
+ elif sys.version_info >= (3, 7):
50
+
51
+ from importlib.resources import path as get_path, read_text
52
+
53
+ _CACERT_CTX = None
54
+ _CACERT_PATH = None
55
+
56
+ def where() -> str:
57
+ # This is slightly terrible, but we want to delay extracting the
58
+ # file in cases where we're inside of a zipimport situation until
59
+ # someone actually calls where(), but we don't want to re-extract
60
+ # the file on every call of where(), so we'll do it once then store
61
+ # it in a global variable.
62
+ global _CACERT_CTX
63
+ global _CACERT_PATH
64
+ if _CACERT_PATH is None:
65
+ # This is slightly janky, the importlib.resources API wants you
66
+ # to manage the cleanup of this file, so it doesn't actually
67
+ # return a path, it returns a context manager that will give
68
+ # you the path when you enter it and will do any cleanup when
69
+ # you leave it. In the common case of not needing a temporary
70
+ # file, it will just return the file system location and the
71
+ # __exit__() is a no-op.
72
+ #
73
+ # We also have to hold onto the actual context manager, because
74
+ # it will do the cleanup whenever it gets garbage collected, so
75
+ # we will also store that at the global level as well.
76
+ _CACERT_CTX = get_path("pip._vendor.certifi", "cacert.pem")
77
+ _CACERT_PATH = str(_CACERT_CTX.__enter__())
78
+ atexit.register(exit_cacert_ctx)
79
+
80
+ return _CACERT_PATH
81
+
82
+ def contents() -> str:
83
+ return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii")
84
+
85
+ else:
86
+ import os
87
+ import types
88
+ from typing import Union
89
+
90
+ Package = Union[types.ModuleType, str]
91
+ Resource = Union[str, "os.PathLike"]
92
+
93
+ # This fallback will work for Python versions prior to 3.7 that lack the
94
+ # importlib.resources module but relies on the existing `where` function
95
+ # so won't address issues with environments like PyOxidizer that don't set
96
+ # __file__ on modules.
97
+ def read_text(
98
+ package: Package,
99
+ resource: Resource,
100
+ encoding: str = 'utf-8',
101
+ errors: str = 'strict'
102
+ ) -> str:
103
+ with open(where(), encoding=encoding) as data:
104
+ return data.read()
105
+
106
+ # If we don't have importlib.resources, then we will just do the old logic
107
+ # of assuming we're on the filesystem and munge the path directly.
108
+ def where() -> str:
109
+ f = os.path.dirname(__file__)
110
+
111
+ return os.path.join(f, "cacert.pem")
112
+
113
+ def contents() -> str:
114
+ return read_text("pip._vendor.certifi", "cacert.pem", encoding="ascii")
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/certifi/py.typed ADDED
File without changes
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/distlib/w64-arm.exe ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5dc9884a8f458371550e09bd396e5418bf375820a31b9899f6499bf391c7b2e
3
+ size 168448
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py ADDED
The diff for this file is too large to render. See raw diff
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/__pycache__/console.cpython-310.pyc ADDED
Binary file (1.88 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/pygments/styles/__pycache__/_mapping.cpython-310.pyc ADDED
Binary file (3.3 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/__pycache__/_api.cpython-310.pyc ADDED
Binary file (10.4 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/__pycache__/_windows.cpython-310.pyc ADDED
Binary file (10.6 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/_api.py ADDED
@@ -0,0 +1,316 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import platform
3
+ import socket
4
+ import ssl
5
+ import sys
6
+ import typing
7
+
8
+ import _ssl # type: ignore[import-not-found]
9
+
10
+ from ._ssl_constants import (
11
+ _original_SSLContext,
12
+ _original_super_SSLContext,
13
+ _truststore_SSLContext_dunder_class,
14
+ _truststore_SSLContext_super_class,
15
+ )
16
+
17
+ if platform.system() == "Windows":
18
+ from ._windows import _configure_context, _verify_peercerts_impl
19
+ elif platform.system() == "Darwin":
20
+ from ._macos import _configure_context, _verify_peercerts_impl
21
+ else:
22
+ from ._openssl import _configure_context, _verify_peercerts_impl
23
+
24
+ if typing.TYPE_CHECKING:
25
+ from pip._vendor.typing_extensions import Buffer
26
+
27
+ # From typeshed/stdlib/ssl.pyi
28
+ _StrOrBytesPath: typing.TypeAlias = str | bytes | os.PathLike[str] | os.PathLike[bytes]
29
+ _PasswordType: typing.TypeAlias = str | bytes | typing.Callable[[], str | bytes]
30
+
31
+
32
+ def inject_into_ssl() -> None:
33
+ """Injects the :class:`truststore.SSLContext` into the ``ssl``
34
+ module by replacing :class:`ssl.SSLContext`.
35
+ """
36
+ setattr(ssl, "SSLContext", SSLContext)
37
+ # urllib3 holds on to its own reference of ssl.SSLContext
38
+ # so we need to replace that reference too.
39
+ try:
40
+ import pip._vendor.urllib3.util.ssl_ as urllib3_ssl
41
+
42
+ setattr(urllib3_ssl, "SSLContext", SSLContext)
43
+ except ImportError:
44
+ pass
45
+
46
+
47
+ def extract_from_ssl() -> None:
48
+ """Restores the :class:`ssl.SSLContext` class to its original state"""
49
+ setattr(ssl, "SSLContext", _original_SSLContext)
50
+ try:
51
+ import pip._vendor.urllib3.util.ssl_ as urllib3_ssl
52
+
53
+ urllib3_ssl.SSLContext = _original_SSLContext # type: ignore[assignment]
54
+ except ImportError:
55
+ pass
56
+
57
+
58
+ class SSLContext(_truststore_SSLContext_super_class): # type: ignore[misc]
59
+ """SSLContext API that uses system certificates on all platforms"""
60
+
61
+ @property # type: ignore[misc]
62
+ def __class__(self) -> type:
63
+ # Dirty hack to get around isinstance() checks
64
+ # for ssl.SSLContext instances in aiohttp/trustme
65
+ # when using non-CPython implementations.
66
+ return _truststore_SSLContext_dunder_class or SSLContext
67
+
68
+ def __init__(self, protocol: int = None) -> None: # type: ignore[assignment]
69
+ self._ctx = _original_SSLContext(protocol)
70
+
71
+ class TruststoreSSLObject(ssl.SSLObject):
72
+ # This object exists because wrap_bio() doesn't
73
+ # immediately do the handshake so we need to do
74
+ # certificate verifications after SSLObject.do_handshake()
75
+
76
+ def do_handshake(self) -> None:
77
+ ret = super().do_handshake()
78
+ _verify_peercerts(self, server_hostname=self.server_hostname)
79
+ return ret
80
+
81
+ self._ctx.sslobject_class = TruststoreSSLObject
82
+
83
+ def wrap_socket(
84
+ self,
85
+ sock: socket.socket,
86
+ server_side: bool = False,
87
+ do_handshake_on_connect: bool = True,
88
+ suppress_ragged_eofs: bool = True,
89
+ server_hostname: str | None = None,
90
+ session: ssl.SSLSession | None = None,
91
+ ) -> ssl.SSLSocket:
92
+ # Use a context manager here because the
93
+ # inner SSLContext holds on to our state
94
+ # but also does the actual handshake.
95
+ with _configure_context(self._ctx):
96
+ ssl_sock = self._ctx.wrap_socket(
97
+ sock,
98
+ server_side=server_side,
99
+ server_hostname=server_hostname,
100
+ do_handshake_on_connect=do_handshake_on_connect,
101
+ suppress_ragged_eofs=suppress_ragged_eofs,
102
+ session=session,
103
+ )
104
+ try:
105
+ _verify_peercerts(ssl_sock, server_hostname=server_hostname)
106
+ except Exception:
107
+ ssl_sock.close()
108
+ raise
109
+ return ssl_sock
110
+
111
+ def wrap_bio(
112
+ self,
113
+ incoming: ssl.MemoryBIO,
114
+ outgoing: ssl.MemoryBIO,
115
+ server_side: bool = False,
116
+ server_hostname: str | None = None,
117
+ session: ssl.SSLSession | None = None,
118
+ ) -> ssl.SSLObject:
119
+ with _configure_context(self._ctx):
120
+ ssl_obj = self._ctx.wrap_bio(
121
+ incoming,
122
+ outgoing,
123
+ server_hostname=server_hostname,
124
+ server_side=server_side,
125
+ session=session,
126
+ )
127
+ return ssl_obj
128
+
129
+ def load_verify_locations(
130
+ self,
131
+ cafile: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None,
132
+ capath: str | bytes | os.PathLike[str] | os.PathLike[bytes] | None = None,
133
+ cadata: typing.Union[str, "Buffer", None] = None,
134
+ ) -> None:
135
+ return self._ctx.load_verify_locations(
136
+ cafile=cafile, capath=capath, cadata=cadata
137
+ )
138
+
139
+ def load_cert_chain(
140
+ self,
141
+ certfile: _StrOrBytesPath,
142
+ keyfile: _StrOrBytesPath | None = None,
143
+ password: _PasswordType | None = None,
144
+ ) -> None:
145
+ return self._ctx.load_cert_chain(
146
+ certfile=certfile, keyfile=keyfile, password=password
147
+ )
148
+
149
+ def load_default_certs(
150
+ self, purpose: ssl.Purpose = ssl.Purpose.SERVER_AUTH
151
+ ) -> None:
152
+ return self._ctx.load_default_certs(purpose)
153
+
154
+ def set_alpn_protocols(self, alpn_protocols: typing.Iterable[str]) -> None:
155
+ return self._ctx.set_alpn_protocols(alpn_protocols)
156
+
157
+ def set_npn_protocols(self, npn_protocols: typing.Iterable[str]) -> None:
158
+ return self._ctx.set_npn_protocols(npn_protocols)
159
+
160
+ def set_ciphers(self, __cipherlist: str) -> None:
161
+ return self._ctx.set_ciphers(__cipherlist)
162
+
163
+ def get_ciphers(self) -> typing.Any:
164
+ return self._ctx.get_ciphers()
165
+
166
+ def session_stats(self) -> dict[str, int]:
167
+ return self._ctx.session_stats()
168
+
169
+ def cert_store_stats(self) -> dict[str, int]:
170
+ raise NotImplementedError()
171
+
172
+ def set_default_verify_paths(self) -> None:
173
+ self._ctx.set_default_verify_paths()
174
+
175
+ @typing.overload
176
+ def get_ca_certs(
177
+ self, binary_form: typing.Literal[False] = ...
178
+ ) -> list[typing.Any]: ...
179
+
180
+ @typing.overload
181
+ def get_ca_certs(self, binary_form: typing.Literal[True] = ...) -> list[bytes]: ...
182
+
183
+ @typing.overload
184
+ def get_ca_certs(self, binary_form: bool = ...) -> typing.Any: ...
185
+
186
+ def get_ca_certs(self, binary_form: bool = False) -> list[typing.Any] | list[bytes]:
187
+ raise NotImplementedError()
188
+
189
+ @property
190
+ def check_hostname(self) -> bool:
191
+ return self._ctx.check_hostname
192
+
193
+ @check_hostname.setter
194
+ def check_hostname(self, value: bool) -> None:
195
+ self._ctx.check_hostname = value
196
+
197
+ @property
198
+ def hostname_checks_common_name(self) -> bool:
199
+ return self._ctx.hostname_checks_common_name
200
+
201
+ @hostname_checks_common_name.setter
202
+ def hostname_checks_common_name(self, value: bool) -> None:
203
+ self._ctx.hostname_checks_common_name = value
204
+
205
+ @property
206
+ def keylog_filename(self) -> str:
207
+ return self._ctx.keylog_filename
208
+
209
+ @keylog_filename.setter
210
+ def keylog_filename(self, value: str) -> None:
211
+ self._ctx.keylog_filename = value
212
+
213
+ @property
214
+ def maximum_version(self) -> ssl.TLSVersion:
215
+ return self._ctx.maximum_version
216
+
217
+ @maximum_version.setter
218
+ def maximum_version(self, value: ssl.TLSVersion) -> None:
219
+ _original_super_SSLContext.maximum_version.__set__( # type: ignore[attr-defined]
220
+ self._ctx, value
221
+ )
222
+
223
+ @property
224
+ def minimum_version(self) -> ssl.TLSVersion:
225
+ return self._ctx.minimum_version
226
+
227
+ @minimum_version.setter
228
+ def minimum_version(self, value: ssl.TLSVersion) -> None:
229
+ _original_super_SSLContext.minimum_version.__set__( # type: ignore[attr-defined]
230
+ self._ctx, value
231
+ )
232
+
233
+ @property
234
+ def options(self) -> ssl.Options:
235
+ return self._ctx.options
236
+
237
+ @options.setter
238
+ def options(self, value: ssl.Options) -> None:
239
+ _original_super_SSLContext.options.__set__( # type: ignore[attr-defined]
240
+ self._ctx, value
241
+ )
242
+
243
+ @property
244
+ def post_handshake_auth(self) -> bool:
245
+ return self._ctx.post_handshake_auth
246
+
247
+ @post_handshake_auth.setter
248
+ def post_handshake_auth(self, value: bool) -> None:
249
+ self._ctx.post_handshake_auth = value
250
+
251
+ @property
252
+ def protocol(self) -> ssl._SSLMethod:
253
+ return self._ctx.protocol
254
+
255
+ @property
256
+ def security_level(self) -> int:
257
+ return self._ctx.security_level
258
+
259
+ @property
260
+ def verify_flags(self) -> ssl.VerifyFlags:
261
+ return self._ctx.verify_flags
262
+
263
+ @verify_flags.setter
264
+ def verify_flags(self, value: ssl.VerifyFlags) -> None:
265
+ _original_super_SSLContext.verify_flags.__set__( # type: ignore[attr-defined]
266
+ self._ctx, value
267
+ )
268
+
269
+ @property
270
+ def verify_mode(self) -> ssl.VerifyMode:
271
+ return self._ctx.verify_mode
272
+
273
+ @verify_mode.setter
274
+ def verify_mode(self, value: ssl.VerifyMode) -> None:
275
+ _original_super_SSLContext.verify_mode.__set__( # type: ignore[attr-defined]
276
+ self._ctx, value
277
+ )
278
+
279
+
280
+ # Python 3.13+ makes get_unverified_chain() a public API that only returns DER
281
+ # encoded certificates. We detect whether we need to call public_bytes() for 3.10->3.12
282
+ # Pre-3.13 returned None instead of an empty list from get_unverified_chain()
283
+ if sys.version_info >= (3, 13):
284
+
285
+ def _get_unverified_chain_bytes(sslobj: ssl.SSLObject) -> list[bytes]:
286
+ unverified_chain = sslobj.get_unverified_chain() or () # type: ignore[attr-defined]
287
+ return [
288
+ cert if isinstance(cert, bytes) else cert.public_bytes(_ssl.ENCODING_DER)
289
+ for cert in unverified_chain
290
+ ]
291
+
292
+ else:
293
+
294
+ def _get_unverified_chain_bytes(sslobj: ssl.SSLObject) -> list[bytes]:
295
+ unverified_chain = sslobj.get_unverified_chain() or () # type: ignore[attr-defined]
296
+ return [cert.public_bytes(_ssl.ENCODING_DER) for cert in unverified_chain]
297
+
298
+
299
+ def _verify_peercerts(
300
+ sock_or_sslobj: ssl.SSLSocket | ssl.SSLObject, server_hostname: str | None
301
+ ) -> None:
302
+ """
303
+ Verifies the peer certificates from an SSLSocket or SSLObject
304
+ against the certificates in the OS trust store.
305
+ """
306
+ sslobj: ssl.SSLObject = sock_or_sslobj # type: ignore[assignment]
307
+ try:
308
+ while not hasattr(sslobj, "get_unverified_chain"):
309
+ sslobj = sslobj._sslobj # type: ignore[attr-defined]
310
+ except AttributeError:
311
+ pass
312
+
313
+ cert_bytes = _get_unverified_chain_bytes(sslobj)
314
+ _verify_peercerts_impl(
315
+ sock_or_sslobj.context, cert_bytes, server_hostname=server_hostname
316
+ )
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/truststore/_macos.py ADDED
@@ -0,0 +1,571 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import ctypes
3
+ import platform
4
+ import ssl
5
+ import typing
6
+ from ctypes import (
7
+ CDLL,
8
+ POINTER,
9
+ c_bool,
10
+ c_char_p,
11
+ c_int32,
12
+ c_long,
13
+ c_uint32,
14
+ c_ulong,
15
+ c_void_p,
16
+ )
17
+ from ctypes.util import find_library
18
+
19
+ from ._ssl_constants import _set_ssl_context_verify_mode
20
+
21
+ _mac_version = platform.mac_ver()[0]
22
+ _mac_version_info = tuple(map(int, _mac_version.split(".")))
23
+ if _mac_version_info < (10, 8):
24
+ raise ImportError(
25
+ f"Only OS X 10.8 and newer are supported, not {_mac_version_info[0]}.{_mac_version_info[1]}"
26
+ )
27
+
28
+ _is_macos_version_10_14_or_later = _mac_version_info >= (10, 14)
29
+
30
+
31
+ def _load_cdll(name: str, macos10_16_path: str) -> CDLL:
32
+ """Loads a CDLL by name, falling back to known path on 10.16+"""
33
+ try:
34
+ # Big Sur is technically 11 but we use 10.16 due to the Big Sur
35
+ # beta being labeled as 10.16.
36
+ path: str | None
37
+ if _mac_version_info >= (10, 16):
38
+ path = macos10_16_path
39
+ else:
40
+ path = find_library(name)
41
+ if not path:
42
+ raise OSError # Caught and reraised as 'ImportError'
43
+ return CDLL(path, use_errno=True)
44
+ except OSError:
45
+ raise ImportError(f"The library {name} failed to load") from None
46
+
47
+
48
+ Security = _load_cdll(
49
+ "Security", "/System/Library/Frameworks/Security.framework/Security"
50
+ )
51
+ CoreFoundation = _load_cdll(
52
+ "CoreFoundation",
53
+ "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
54
+ )
55
+
56
+ Boolean = c_bool
57
+ CFIndex = c_long
58
+ CFStringEncoding = c_uint32
59
+ CFData = c_void_p
60
+ CFString = c_void_p
61
+ CFArray = c_void_p
62
+ CFMutableArray = c_void_p
63
+ CFError = c_void_p
64
+ CFType = c_void_p
65
+ CFTypeID = c_ulong
66
+ CFTypeRef = POINTER(CFType)
67
+ CFAllocatorRef = c_void_p
68
+
69
+ OSStatus = c_int32
70
+
71
+ CFErrorRef = POINTER(CFError)
72
+ CFDataRef = POINTER(CFData)
73
+ CFStringRef = POINTER(CFString)
74
+ CFArrayRef = POINTER(CFArray)
75
+ CFMutableArrayRef = POINTER(CFMutableArray)
76
+ CFArrayCallBacks = c_void_p
77
+ CFOptionFlags = c_uint32
78
+
79
+ SecCertificateRef = POINTER(c_void_p)
80
+ SecPolicyRef = POINTER(c_void_p)
81
+ SecTrustRef = POINTER(c_void_p)
82
+ SecTrustResultType = c_uint32
83
+ SecTrustOptionFlags = c_uint32
84
+
85
+ try:
86
+ Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
87
+ Security.SecCertificateCreateWithData.restype = SecCertificateRef
88
+
89
+ Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
90
+ Security.SecCertificateCopyData.restype = CFDataRef
91
+
92
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
93
+ Security.SecCopyErrorMessageString.restype = CFStringRef
94
+
95
+ Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
96
+ Security.SecTrustSetAnchorCertificates.restype = OSStatus
97
+
98
+ Security.SecTrustSetAnchorCertificatesOnly.argtypes = [SecTrustRef, Boolean]
99
+ Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
100
+
101
+ Security.SecPolicyCreateRevocation.argtypes = [CFOptionFlags]
102
+ Security.SecPolicyCreateRevocation.restype = SecPolicyRef
103
+
104
+ Security.SecPolicyCreateSSL.argtypes = [Boolean, CFStringRef]
105
+ Security.SecPolicyCreateSSL.restype = SecPolicyRef
106
+
107
+ Security.SecTrustCreateWithCertificates.argtypes = [
108
+ CFTypeRef,
109
+ CFTypeRef,
110
+ POINTER(SecTrustRef),
111
+ ]
112
+ Security.SecTrustCreateWithCertificates.restype = OSStatus
113
+
114
+ Security.SecTrustGetTrustResult.argtypes = [
115
+ SecTrustRef,
116
+ POINTER(SecTrustResultType),
117
+ ]
118
+ Security.SecTrustGetTrustResult.restype = OSStatus
119
+
120
+ Security.SecTrustEvaluate.argtypes = [
121
+ SecTrustRef,
122
+ POINTER(SecTrustResultType),
123
+ ]
124
+ Security.SecTrustEvaluate.restype = OSStatus
125
+
126
+ Security.SecTrustRef = SecTrustRef # type: ignore[attr-defined]
127
+ Security.SecTrustResultType = SecTrustResultType # type: ignore[attr-defined]
128
+ Security.OSStatus = OSStatus # type: ignore[attr-defined]
129
+
130
+ kSecRevocationUseAnyAvailableMethod = 3
131
+ kSecRevocationRequirePositiveResponse = 8
132
+
133
+ CoreFoundation.CFRelease.argtypes = [CFTypeRef]
134
+ CoreFoundation.CFRelease.restype = None
135
+
136
+ CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
137
+ CoreFoundation.CFGetTypeID.restype = CFTypeID
138
+
139
+ CoreFoundation.CFStringCreateWithCString.argtypes = [
140
+ CFAllocatorRef,
141
+ c_char_p,
142
+ CFStringEncoding,
143
+ ]
144
+ CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
145
+
146
+ CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
147
+ CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
148
+
149
+ CoreFoundation.CFStringGetCString.argtypes = [
150
+ CFStringRef,
151
+ c_char_p,
152
+ CFIndex,
153
+ CFStringEncoding,
154
+ ]
155
+ CoreFoundation.CFStringGetCString.restype = c_bool
156
+
157
+ CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
158
+ CoreFoundation.CFDataCreate.restype = CFDataRef
159
+
160
+ CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
161
+ CoreFoundation.CFDataGetLength.restype = CFIndex
162
+
163
+ CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
164
+ CoreFoundation.CFDataGetBytePtr.restype = c_void_p
165
+
166
+ CoreFoundation.CFArrayCreate.argtypes = [
167
+ CFAllocatorRef,
168
+ POINTER(CFTypeRef),
169
+ CFIndex,
170
+ CFArrayCallBacks,
171
+ ]
172
+ CoreFoundation.CFArrayCreate.restype = CFArrayRef
173
+
174
+ CoreFoundation.CFArrayCreateMutable.argtypes = [
175
+ CFAllocatorRef,
176
+ CFIndex,
177
+ CFArrayCallBacks,
178
+ ]
179
+ CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
180
+
181
+ CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
182
+ CoreFoundation.CFArrayAppendValue.restype = None
183
+
184
+ CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
185
+ CoreFoundation.CFArrayGetCount.restype = CFIndex
186
+
187
+ CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
188
+ CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
189
+
190
+ CoreFoundation.CFErrorGetCode.argtypes = [CFErrorRef]
191
+ CoreFoundation.CFErrorGetCode.restype = CFIndex
192
+
193
+ CoreFoundation.CFErrorCopyDescription.argtypes = [CFErrorRef]
194
+ CoreFoundation.CFErrorCopyDescription.restype = CFStringRef
195
+
196
+ CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( # type: ignore[attr-defined]
197
+ CoreFoundation, "kCFAllocatorDefault"
198
+ )
199
+ CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( # type: ignore[attr-defined]
200
+ CoreFoundation, "kCFTypeArrayCallBacks"
201
+ )
202
+
203
+ CoreFoundation.CFTypeRef = CFTypeRef # type: ignore[attr-defined]
204
+ CoreFoundation.CFArrayRef = CFArrayRef # type: ignore[attr-defined]
205
+ CoreFoundation.CFStringRef = CFStringRef # type: ignore[attr-defined]
206
+ CoreFoundation.CFErrorRef = CFErrorRef # type: ignore[attr-defined]
207
+
208
+ except AttributeError as e:
209
+ raise ImportError(f"Error initializing ctypes: {e}") from None
210
+
211
+ # SecTrustEvaluateWithError is macOS 10.14+
212
+ if _is_macos_version_10_14_or_later:
213
+ try:
214
+ Security.SecTrustEvaluateWithError.argtypes = [
215
+ SecTrustRef,
216
+ POINTER(CFErrorRef),
217
+ ]
218
+ Security.SecTrustEvaluateWithError.restype = c_bool
219
+ except AttributeError as e:
220
+ raise ImportError(f"Error initializing ctypes: {e}") from None
221
+
222
+
223
+ def _handle_osstatus(result: OSStatus, _: typing.Any, args: typing.Any) -> typing.Any:
224
+ """
225
+ Raises an error if the OSStatus value is non-zero.
226
+ """
227
+ if int(result) == 0:
228
+ return args
229
+
230
+ # Returns a CFString which we need to transform
231
+ # into a UTF-8 Python string.
232
+ error_message_cfstring = None
233
+ try:
234
+ error_message_cfstring = Security.SecCopyErrorMessageString(result, None)
235
+
236
+ # First step is convert the CFString into a C string pointer.
237
+ # We try the fast no-copy way first.
238
+ error_message_cfstring_c_void_p = ctypes.cast(
239
+ error_message_cfstring, ctypes.POINTER(ctypes.c_void_p)
240
+ )
241
+ message = CoreFoundation.CFStringGetCStringPtr(
242
+ error_message_cfstring_c_void_p, CFConst.kCFStringEncodingUTF8
243
+ )
244
+
245
+ # Quoting the Apple dev docs:
246
+ #
247
+ # "A pointer to a C string or NULL if the internal
248
+ # storage of theString does not allow this to be
249
+ # returned efficiently."
250
+ #
251
+ # So we need to get our hands dirty.
252
+ if message is None:
253
+ buffer = ctypes.create_string_buffer(1024)
254
+ result = CoreFoundation.CFStringGetCString(
255
+ error_message_cfstring_c_void_p,
256
+ buffer,
257
+ 1024,
258
+ CFConst.kCFStringEncodingUTF8,
259
+ )
260
+ if not result:
261
+ raise OSError("Error copying C string from CFStringRef")
262
+ message = buffer.value
263
+
264
+ finally:
265
+ if error_message_cfstring is not None:
266
+ CoreFoundation.CFRelease(error_message_cfstring)
267
+
268
+ # If no message can be found for this status we come
269
+ # up with a generic one that forwards the status code.
270
+ if message is None or message == "":
271
+ message = f"SecureTransport operation returned a non-zero OSStatus: {result}"
272
+
273
+ raise ssl.SSLError(message)
274
+
275
+
276
+ Security.SecTrustCreateWithCertificates.errcheck = _handle_osstatus # type: ignore[assignment]
277
+ Security.SecTrustSetAnchorCertificates.errcheck = _handle_osstatus # type: ignore[assignment]
278
+ Security.SecTrustSetAnchorCertificatesOnly.errcheck = _handle_osstatus # type: ignore[assignment]
279
+ Security.SecTrustGetTrustResult.errcheck = _handle_osstatus # type: ignore[assignment]
280
+ Security.SecTrustEvaluate.errcheck = _handle_osstatus # type: ignore[assignment]
281
+
282
+
283
+ class CFConst:
284
+ """CoreFoundation constants"""
285
+
286
+ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
287
+
288
+ errSecIncompleteCertRevocationCheck = -67635
289
+ errSecHostNameMismatch = -67602
290
+ errSecCertificateExpired = -67818
291
+ errSecNotTrusted = -67843
292
+
293
+
294
+ def _bytes_to_cf_data_ref(value: bytes) -> CFDataRef: # type: ignore[valid-type]
295
+ return CoreFoundation.CFDataCreate( # type: ignore[no-any-return]
296
+ CoreFoundation.kCFAllocatorDefault, value, len(value)
297
+ )
298
+
299
+
300
+ def _bytes_to_cf_string(value: bytes) -> CFString:
301
+ """
302
+ Given a Python binary data, create a CFString.
303
+ The string must be CFReleased by the caller.
304
+ """
305
+ c_str = ctypes.c_char_p(value)
306
+ cf_str = CoreFoundation.CFStringCreateWithCString(
307
+ CoreFoundation.kCFAllocatorDefault,
308
+ c_str,
309
+ CFConst.kCFStringEncodingUTF8,
310
+ )
311
+ return cf_str # type: ignore[no-any-return]
312
+
313
+
314
+ def _cf_string_ref_to_str(cf_string_ref: CFStringRef) -> str | None: # type: ignore[valid-type]
315
+ """
316
+ Creates a Unicode string from a CFString object. Used entirely for error
317
+ reporting.
318
+ Yes, it annoys me quite a lot that this function is this complex.
319
+ """
320
+
321
+ string = CoreFoundation.CFStringGetCStringPtr(
322
+ cf_string_ref, CFConst.kCFStringEncodingUTF8
323
+ )
324
+ if string is None:
325
+ buffer = ctypes.create_string_buffer(1024)
326
+ result = CoreFoundation.CFStringGetCString(
327
+ cf_string_ref, buffer, 1024, CFConst.kCFStringEncodingUTF8
328
+ )
329
+ if not result:
330
+ raise OSError("Error copying C string from CFStringRef")
331
+ string = buffer.value
332
+ if string is not None:
333
+ string = string.decode("utf-8")
334
+ return string # type: ignore[no-any-return]
335
+
336
+
337
+ def _der_certs_to_cf_cert_array(certs: list[bytes]) -> CFMutableArrayRef: # type: ignore[valid-type]
338
+ """Builds a CFArray of SecCertificateRefs from a list of DER-encoded certificates.
339
+ Responsibility of the caller to call CoreFoundation.CFRelease on the CFArray.
340
+ """
341
+ cf_array = CoreFoundation.CFArrayCreateMutable(
342
+ CoreFoundation.kCFAllocatorDefault,
343
+ 0,
344
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
345
+ )
346
+ if not cf_array:
347
+ raise MemoryError("Unable to allocate memory!")
348
+
349
+ for cert_data in certs:
350
+ cf_data = None
351
+ sec_cert_ref = None
352
+ try:
353
+ cf_data = _bytes_to_cf_data_ref(cert_data)
354
+ sec_cert_ref = Security.SecCertificateCreateWithData(
355
+ CoreFoundation.kCFAllocatorDefault, cf_data
356
+ )
357
+ CoreFoundation.CFArrayAppendValue(cf_array, sec_cert_ref)
358
+ finally:
359
+ if cf_data:
360
+ CoreFoundation.CFRelease(cf_data)
361
+ if sec_cert_ref:
362
+ CoreFoundation.CFRelease(sec_cert_ref)
363
+
364
+ return cf_array # type: ignore[no-any-return]
365
+
366
+
367
+ @contextlib.contextmanager
368
+ def _configure_context(ctx: ssl.SSLContext) -> typing.Iterator[None]:
369
+ check_hostname = ctx.check_hostname
370
+ verify_mode = ctx.verify_mode
371
+ ctx.check_hostname = False
372
+ _set_ssl_context_verify_mode(ctx, ssl.CERT_NONE)
373
+ try:
374
+ yield
375
+ finally:
376
+ ctx.check_hostname = check_hostname
377
+ _set_ssl_context_verify_mode(ctx, verify_mode)
378
+
379
+
380
+ def _verify_peercerts_impl(
381
+ ssl_context: ssl.SSLContext,
382
+ cert_chain: list[bytes],
383
+ server_hostname: str | None = None,
384
+ ) -> None:
385
+ certs = None
386
+ policies = None
387
+ trust = None
388
+ try:
389
+ # Only set a hostname on the policy if we're verifying the hostname
390
+ # on the leaf certificate.
391
+ if server_hostname is not None and ssl_context.check_hostname:
392
+ cf_str_hostname = None
393
+ try:
394
+ cf_str_hostname = _bytes_to_cf_string(server_hostname.encode("ascii"))
395
+ ssl_policy = Security.SecPolicyCreateSSL(True, cf_str_hostname)
396
+ finally:
397
+ if cf_str_hostname:
398
+ CoreFoundation.CFRelease(cf_str_hostname)
399
+ else:
400
+ ssl_policy = Security.SecPolicyCreateSSL(True, None)
401
+
402
+ policies = ssl_policy
403
+ if ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_CHAIN:
404
+ # Add explicit policy requiring positive revocation checks
405
+ policies = CoreFoundation.CFArrayCreateMutable(
406
+ CoreFoundation.kCFAllocatorDefault,
407
+ 0,
408
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
409
+ )
410
+ CoreFoundation.CFArrayAppendValue(policies, ssl_policy)
411
+ CoreFoundation.CFRelease(ssl_policy)
412
+ revocation_policy = Security.SecPolicyCreateRevocation(
413
+ kSecRevocationUseAnyAvailableMethod
414
+ | kSecRevocationRequirePositiveResponse
415
+ )
416
+ CoreFoundation.CFArrayAppendValue(policies, revocation_policy)
417
+ CoreFoundation.CFRelease(revocation_policy)
418
+ elif ssl_context.verify_flags & ssl.VERIFY_CRL_CHECK_LEAF:
419
+ raise NotImplementedError("VERIFY_CRL_CHECK_LEAF not implemented for macOS")
420
+
421
+ certs = None
422
+ try:
423
+ certs = _der_certs_to_cf_cert_array(cert_chain)
424
+
425
+ # Now that we have certificates loaded and a SecPolicy
426
+ # we can finally create a SecTrust object!
427
+ trust = Security.SecTrustRef()
428
+ Security.SecTrustCreateWithCertificates(
429
+ certs, policies, ctypes.byref(trust)
430
+ )
431
+
432
+ finally:
433
+ # The certs are now being held by SecTrust so we can
434
+ # release our handles for the array.
435
+ if certs:
436
+ CoreFoundation.CFRelease(certs)
437
+
438
+ # If there are additional trust anchors to load we need to transform
439
+ # the list of DER-encoded certificates into a CFArray.
440
+ ctx_ca_certs_der: list[bytes] | None = ssl_context.get_ca_certs(
441
+ binary_form=True
442
+ )
443
+ if ctx_ca_certs_der:
444
+ ctx_ca_certs = None
445
+ try:
446
+ ctx_ca_certs = _der_certs_to_cf_cert_array(ctx_ca_certs_der)
447
+ Security.SecTrustSetAnchorCertificates(trust, ctx_ca_certs)
448
+ finally:
449
+ if ctx_ca_certs:
450
+ CoreFoundation.CFRelease(ctx_ca_certs)
451
+
452
+ # We always want system certificates.
453
+ Security.SecTrustSetAnchorCertificatesOnly(trust, False)
454
+
455
+ # macOS 10.13 and earlier don't support SecTrustEvaluateWithError()
456
+ # so we use SecTrustEvaluate() which means we need to construct error
457
+ # messages ourselves.
458
+ if _is_macos_version_10_14_or_later:
459
+ _verify_peercerts_impl_macos_10_14(ssl_context, trust)
460
+ else:
461
+ _verify_peercerts_impl_macos_10_13(ssl_context, trust)
462
+ finally:
463
+ if policies:
464
+ CoreFoundation.CFRelease(policies)
465
+ if trust:
466
+ CoreFoundation.CFRelease(trust)
467
+
468
+
469
+ def _verify_peercerts_impl_macos_10_13(
470
+ ssl_context: ssl.SSLContext, sec_trust_ref: typing.Any
471
+ ) -> None:
472
+ """Verify using 'SecTrustEvaluate' API for macOS 10.13 and earlier.
473
+ macOS 10.14 added the 'SecTrustEvaluateWithError' API.
474
+ """
475
+ sec_trust_result_type = Security.SecTrustResultType()
476
+ Security.SecTrustEvaluate(sec_trust_ref, ctypes.byref(sec_trust_result_type))
477
+
478
+ try:
479
+ sec_trust_result_type_as_int = int(sec_trust_result_type.value)
480
+ except (ValueError, TypeError):
481
+ sec_trust_result_type_as_int = -1
482
+
483
+ # Apple doesn't document these values in their own API docs.
484
+ # See: https://github.com/xybp888/iOS-SDKs/blob/master/iPhoneOS13.0.sdk/System/Library/Frameworks/Security.framework/Headers/SecTrust.h#L84
485
+ if (
486
+ ssl_context.verify_mode == ssl.CERT_REQUIRED
487
+ and sec_trust_result_type_as_int not in (1, 4)
488
+ ):
489
+ # Note that we're not able to ignore only hostname errors
490
+ # for macOS 10.13 and earlier, so check_hostname=False will
491
+ # still return an error.
492
+ sec_trust_result_type_to_message = {
493
+ 0: "Invalid trust result type",
494
+ # 1: "Trust evaluation succeeded",
495
+ 2: "User confirmation required",
496
+ 3: "User specified that certificate is not trusted",
497
+ # 4: "Trust result is unspecified",
498
+ 5: "Recoverable trust failure occurred",
499
+ 6: "Fatal trust failure occurred",
500
+ 7: "Other error occurred, certificate may be revoked",
501
+ }
502
+ error_message = sec_trust_result_type_to_message.get(
503
+ sec_trust_result_type_as_int,
504
+ f"Unknown trust result: {sec_trust_result_type_as_int}",
505
+ )
506
+
507
+ err = ssl.SSLCertVerificationError(error_message)
508
+ err.verify_message = error_message
509
+ err.verify_code = sec_trust_result_type_as_int
510
+ raise err
511
+
512
+
513
+ def _verify_peercerts_impl_macos_10_14(
514
+ ssl_context: ssl.SSLContext, sec_trust_ref: typing.Any
515
+ ) -> None:
516
+ """Verify using 'SecTrustEvaluateWithError' API for macOS 10.14+."""
517
+ cf_error = CoreFoundation.CFErrorRef()
518
+ sec_trust_eval_result = Security.SecTrustEvaluateWithError(
519
+ sec_trust_ref, ctypes.byref(cf_error)
520
+ )
521
+ # sec_trust_eval_result is a bool (0 or 1)
522
+ # where 1 means that the certs are trusted.
523
+ if sec_trust_eval_result == 1:
524
+ is_trusted = True
525
+ elif sec_trust_eval_result == 0:
526
+ is_trusted = False
527
+ else:
528
+ raise ssl.SSLError(
529
+ f"Unknown result from Security.SecTrustEvaluateWithError: {sec_trust_eval_result!r}"
530
+ )
531
+
532
+ cf_error_code = 0
533
+ if not is_trusted:
534
+ cf_error_code = CoreFoundation.CFErrorGetCode(cf_error)
535
+
536
+ # If the error is a known failure that we're
537
+ # explicitly okay with from SSLContext configuration
538
+ # we can set is_trusted accordingly.
539
+ if ssl_context.verify_mode != ssl.CERT_REQUIRED and (
540
+ cf_error_code == CFConst.errSecNotTrusted
541
+ or cf_error_code == CFConst.errSecCertificateExpired
542
+ ):
543
+ is_trusted = True
544
+
545
+ # If we're still not trusted then we start to
546
+ # construct and raise the SSLCertVerificationError.
547
+ if not is_trusted:
548
+ cf_error_string_ref = None
549
+ try:
550
+ cf_error_string_ref = CoreFoundation.CFErrorCopyDescription(cf_error)
551
+
552
+ # Can this ever return 'None' if there's a CFError?
553
+ cf_error_message = (
554
+ _cf_string_ref_to_str(cf_error_string_ref)
555
+ or "Certificate verification failed"
556
+ )
557
+
558
+ # TODO: Not sure if we need the SecTrustResultType for anything?
559
+ # We only care whether or not it's a success or failure for now.
560
+ sec_trust_result_type = Security.SecTrustResultType()
561
+ Security.SecTrustGetTrustResult(
562
+ sec_trust_ref, ctypes.byref(sec_trust_result_type)
563
+ )
564
+
565
+ err = ssl.SSLCertVerificationError(cf_error_message)
566
+ err.verify_message = cf_error_message
567
+ err.verify_code = cf_error_code
568
+ raise err
569
+ finally:
570
+ if cf_error_string_ref:
571
+ CoreFoundation.CFRelease(cf_error_string_ref)
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/__init__.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
3
+ """
4
+ from __future__ import absolute_import
5
+
6
+ # Set default logging handler to avoid "No handler found" warnings.
7
+ import logging
8
+ import warnings
9
+ from logging import NullHandler
10
+
11
+ from . import exceptions
12
+ from ._version import __version__
13
+ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
14
+ from .filepost import encode_multipart_formdata
15
+ from .poolmanager import PoolManager, ProxyManager, proxy_from_url
16
+ from .response import HTTPResponse
17
+ from .util.request import make_headers
18
+ from .util.retry import Retry
19
+ from .util.timeout import Timeout
20
+ from .util.url import get_host
21
+
22
+ # === NOTE TO REPACKAGERS AND VENDORS ===
23
+ # Please delete this block, this logic is only
24
+ # for urllib3 being distributed via PyPI.
25
+ # See: https://github.com/urllib3/urllib3/issues/2680
26
+ try:
27
+ import urllib3_secure_extra # type: ignore # noqa: F401
28
+ except ImportError:
29
+ pass
30
+ else:
31
+ warnings.warn(
32
+ "'urllib3[secure]' extra is deprecated and will be removed "
33
+ "in a future release of urllib3 2.x. Read more in this issue: "
34
+ "https://github.com/urllib3/urllib3/issues/2680",
35
+ category=DeprecationWarning,
36
+ stacklevel=2,
37
+ )
38
+
39
+ __author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
40
+ __license__ = "MIT"
41
+ __version__ = __version__
42
+
43
+ __all__ = (
44
+ "HTTPConnectionPool",
45
+ "HTTPSConnectionPool",
46
+ "PoolManager",
47
+ "ProxyManager",
48
+ "HTTPResponse",
49
+ "Retry",
50
+ "Timeout",
51
+ "add_stderr_logger",
52
+ "connection_from_url",
53
+ "disable_warnings",
54
+ "encode_multipart_formdata",
55
+ "get_host",
56
+ "make_headers",
57
+ "proxy_from_url",
58
+ )
59
+
60
+ logging.getLogger(__name__).addHandler(NullHandler())
61
+
62
+
63
+ def add_stderr_logger(level=logging.DEBUG):
64
+ """
65
+ Helper for quickly adding a StreamHandler to the logger. Useful for
66
+ debugging.
67
+
68
+ Returns the handler after adding it.
69
+ """
70
+ # This method needs to be in this __init__.py to get the __name__ correct
71
+ # even if urllib3 is vendored within another package.
72
+ logger = logging.getLogger(__name__)
73
+ handler = logging.StreamHandler()
74
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
75
+ logger.addHandler(handler)
76
+ logger.setLevel(level)
77
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
78
+ return handler
79
+
80
+
81
+ # ... Clean up.
82
+ del NullHandler
83
+
84
+
85
+ # All warning filters *must* be appended unless you're really certain that they
86
+ # shouldn't be: otherwise, it's very hard for users to use most Python
87
+ # mechanisms to silence them.
88
+ # SecurityWarning's always go off by default.
89
+ warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
90
+ # SubjectAltNameWarning's should go off once per host
91
+ warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
92
+ # InsecurePlatformWarning's don't vary between requests, so we keep it default.
93
+ warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
94
+ # SNIMissingWarnings should go off only once.
95
+ warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
96
+
97
+
98
+ def disable_warnings(category=exceptions.HTTPWarning):
99
+ """
100
+ Helper for quickly disabling all urllib3 warnings.
101
+ """
102
+ warnings.simplefilter("ignore", category)
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (11 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_collections.py ADDED
@@ -0,0 +1,355 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ try:
4
+ from collections.abc import Mapping, MutableMapping
5
+ except ImportError:
6
+ from collections import Mapping, MutableMapping
7
+ try:
8
+ from threading import RLock
9
+ except ImportError: # Platform-specific: No threads available
10
+
11
+ class RLock:
12
+ def __enter__(self):
13
+ pass
14
+
15
+ def __exit__(self, exc_type, exc_value, traceback):
16
+ pass
17
+
18
+
19
+ from collections import OrderedDict
20
+
21
+ from .exceptions import InvalidHeader
22
+ from .packages import six
23
+ from .packages.six import iterkeys, itervalues
24
+
25
+ __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
26
+
27
+
28
+ _Null = object()
29
+
30
+
31
+ class RecentlyUsedContainer(MutableMapping):
32
+ """
33
+ Provides a thread-safe dict-like container which maintains up to
34
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
35
+ ``maxsize``.
36
+
37
+ :param maxsize:
38
+ Maximum number of recent elements to retain.
39
+
40
+ :param dispose_func:
41
+ Every time an item is evicted from the container,
42
+ ``dispose_func(value)`` is called. Callback which will get called
43
+ """
44
+
45
+ ContainerCls = OrderedDict
46
+
47
+ def __init__(self, maxsize=10, dispose_func=None):
48
+ self._maxsize = maxsize
49
+ self.dispose_func = dispose_func
50
+
51
+ self._container = self.ContainerCls()
52
+ self.lock = RLock()
53
+
54
+ def __getitem__(self, key):
55
+ # Re-insert the item, moving it to the end of the eviction line.
56
+ with self.lock:
57
+ item = self._container.pop(key)
58
+ self._container[key] = item
59
+ return item
60
+
61
+ def __setitem__(self, key, value):
62
+ evicted_value = _Null
63
+ with self.lock:
64
+ # Possibly evict the existing value of 'key'
65
+ evicted_value = self._container.get(key, _Null)
66
+ self._container[key] = value
67
+
68
+ # If we didn't evict an existing value, we might have to evict the
69
+ # least recently used item from the beginning of the container.
70
+ if len(self._container) > self._maxsize:
71
+ _key, evicted_value = self._container.popitem(last=False)
72
+
73
+ if self.dispose_func and evicted_value is not _Null:
74
+ self.dispose_func(evicted_value)
75
+
76
+ def __delitem__(self, key):
77
+ with self.lock:
78
+ value = self._container.pop(key)
79
+
80
+ if self.dispose_func:
81
+ self.dispose_func(value)
82
+
83
+ def __len__(self):
84
+ with self.lock:
85
+ return len(self._container)
86
+
87
+ def __iter__(self):
88
+ raise NotImplementedError(
89
+ "Iteration over this class is unlikely to be threadsafe."
90
+ )
91
+
92
+ def clear(self):
93
+ with self.lock:
94
+ # Copy pointers to all values, then wipe the mapping
95
+ values = list(itervalues(self._container))
96
+ self._container.clear()
97
+
98
+ if self.dispose_func:
99
+ for value in values:
100
+ self.dispose_func(value)
101
+
102
+ def keys(self):
103
+ with self.lock:
104
+ return list(iterkeys(self._container))
105
+
106
+
107
+ class HTTPHeaderDict(MutableMapping):
108
+ """
109
+ :param headers:
110
+ An iterable of field-value pairs. Must not contain multiple field names
111
+ when compared case-insensitively.
112
+
113
+ :param kwargs:
114
+ Additional field-value pairs to pass in to ``dict.update``.
115
+
116
+ A ``dict`` like container for storing HTTP Headers.
117
+
118
+ Field names are stored and compared case-insensitively in compliance with
119
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
120
+ case-insensitive pair.
121
+
122
+ Using ``__setitem__`` syntax overwrites fields that compare equal
123
+ case-insensitively in order to maintain ``dict``'s api. For fields that
124
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
125
+ in a loop.
126
+
127
+ If multiple fields that are equal case-insensitively are passed to the
128
+ constructor or ``.update``, the behavior is undefined and some will be
129
+ lost.
130
+
131
+ >>> headers = HTTPHeaderDict()
132
+ >>> headers.add('Set-Cookie', 'foo=bar')
133
+ >>> headers.add('set-cookie', 'baz=quxx')
134
+ >>> headers['content-length'] = '7'
135
+ >>> headers['SET-cookie']
136
+ 'foo=bar, baz=quxx'
137
+ >>> headers['Content-Length']
138
+ '7'
139
+ """
140
+
141
+ def __init__(self, headers=None, **kwargs):
142
+ super(HTTPHeaderDict, self).__init__()
143
+ self._container = OrderedDict()
144
+ if headers is not None:
145
+ if isinstance(headers, HTTPHeaderDict):
146
+ self._copy_from(headers)
147
+ else:
148
+ self.extend(headers)
149
+ if kwargs:
150
+ self.extend(kwargs)
151
+
152
+ def __setitem__(self, key, val):
153
+ self._container[key.lower()] = [key, val]
154
+ return self._container[key.lower()]
155
+
156
+ def __getitem__(self, key):
157
+ val = self._container[key.lower()]
158
+ return ", ".join(val[1:])
159
+
160
+ def __delitem__(self, key):
161
+ del self._container[key.lower()]
162
+
163
+ def __contains__(self, key):
164
+ return key.lower() in self._container
165
+
166
+ def __eq__(self, other):
167
+ if not isinstance(other, Mapping) and not hasattr(other, "keys"):
168
+ return False
169
+ if not isinstance(other, type(self)):
170
+ other = type(self)(other)
171
+ return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
172
+ (k.lower(), v) for k, v in other.itermerged()
173
+ )
174
+
175
+ def __ne__(self, other):
176
+ return not self.__eq__(other)
177
+
178
+ if six.PY2: # Python 2
179
+ iterkeys = MutableMapping.iterkeys
180
+ itervalues = MutableMapping.itervalues
181
+
182
+ __marker = object()
183
+
184
+ def __len__(self):
185
+ return len(self._container)
186
+
187
+ def __iter__(self):
188
+ # Only provide the originally cased names
189
+ for vals in self._container.values():
190
+ yield vals[0]
191
+
192
+ def pop(self, key, default=__marker):
193
+ """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
194
+ If key is not found, d is returned if given, otherwise KeyError is raised.
195
+ """
196
+ # Using the MutableMapping function directly fails due to the private marker.
197
+ # Using ordinary dict.pop would expose the internal structures.
198
+ # So let's reinvent the wheel.
199
+ try:
200
+ value = self[key]
201
+ except KeyError:
202
+ if default is self.__marker:
203
+ raise
204
+ return default
205
+ else:
206
+ del self[key]
207
+ return value
208
+
209
+ def discard(self, key):
210
+ try:
211
+ del self[key]
212
+ except KeyError:
213
+ pass
214
+
215
+ def add(self, key, val):
216
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
217
+ exists.
218
+
219
+ >>> headers = HTTPHeaderDict(foo='bar')
220
+ >>> headers.add('Foo', 'baz')
221
+ >>> headers['foo']
222
+ 'bar, baz'
223
+ """
224
+ key_lower = key.lower()
225
+ new_vals = [key, val]
226
+ # Keep the common case aka no item present as fast as possible
227
+ vals = self._container.setdefault(key_lower, new_vals)
228
+ if new_vals is not vals:
229
+ vals.append(val)
230
+
231
+ def extend(self, *args, **kwargs):
232
+ """Generic import function for any type of header-like object.
233
+ Adapted version of MutableMapping.update in order to insert items
234
+ with self.add instead of self.__setitem__
235
+ """
236
+ if len(args) > 1:
237
+ raise TypeError(
238
+ "extend() takes at most 1 positional "
239
+ "arguments ({0} given)".format(len(args))
240
+ )
241
+ other = args[0] if len(args) >= 1 else ()
242
+
243
+ if isinstance(other, HTTPHeaderDict):
244
+ for key, val in other.iteritems():
245
+ self.add(key, val)
246
+ elif isinstance(other, Mapping):
247
+ for key in other:
248
+ self.add(key, other[key])
249
+ elif hasattr(other, "keys"):
250
+ for key in other.keys():
251
+ self.add(key, other[key])
252
+ else:
253
+ for key, value in other:
254
+ self.add(key, value)
255
+
256
+ for key, value in kwargs.items():
257
+ self.add(key, value)
258
+
259
+ def getlist(self, key, default=__marker):
260
+ """Returns a list of all the values for the named field. Returns an
261
+ empty list if the key doesn't exist."""
262
+ try:
263
+ vals = self._container[key.lower()]
264
+ except KeyError:
265
+ if default is self.__marker:
266
+ return []
267
+ return default
268
+ else:
269
+ return vals[1:]
270
+
271
+ def _prepare_for_method_change(self):
272
+ """
273
+ Remove content-specific header fields before changing the request
274
+ method to GET or HEAD according to RFC 9110, Section 15.4.
275
+ """
276
+ content_specific_headers = [
277
+ "Content-Encoding",
278
+ "Content-Language",
279
+ "Content-Location",
280
+ "Content-Type",
281
+ "Content-Length",
282
+ "Digest",
283
+ "Last-Modified",
284
+ ]
285
+ for header in content_specific_headers:
286
+ self.discard(header)
287
+ return self
288
+
289
+ # Backwards compatibility for httplib
290
+ getheaders = getlist
291
+ getallmatchingheaders = getlist
292
+ iget = getlist
293
+
294
+ # Backwards compatibility for http.cookiejar
295
+ get_all = getlist
296
+
297
+ def __repr__(self):
298
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
299
+
300
+ def _copy_from(self, other):
301
+ for key in other:
302
+ val = other.getlist(key)
303
+ if isinstance(val, list):
304
+ # Don't need to convert tuples
305
+ val = list(val)
306
+ self._container[key.lower()] = [key] + val
307
+
308
+ def copy(self):
309
+ clone = type(self)()
310
+ clone._copy_from(self)
311
+ return clone
312
+
313
+ def iteritems(self):
314
+ """Iterate over all header lines, including duplicate ones."""
315
+ for key in self:
316
+ vals = self._container[key.lower()]
317
+ for val in vals[1:]:
318
+ yield vals[0], val
319
+
320
+ def itermerged(self):
321
+ """Iterate over all headers, merging duplicate ones together."""
322
+ for key in self:
323
+ val = self._container[key.lower()]
324
+ yield val[0], ", ".join(val[1:])
325
+
326
+ def items(self):
327
+ return list(self.iteritems())
328
+
329
+ @classmethod
330
+ def from_httplib(cls, message): # Python 2
331
+ """Read headers from a Python 2 httplib message object."""
332
+ # python2.7 does not expose a proper API for exporting multiheaders
333
+ # efficiently. This function re-reads raw lines from the message
334
+ # object and extracts the multiheaders properly.
335
+ obs_fold_continued_leaders = (" ", "\t")
336
+ headers = []
337
+
338
+ for line in message.headers:
339
+ if line.startswith(obs_fold_continued_leaders):
340
+ if not headers:
341
+ # We received a header line that starts with OWS as described
342
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
343
+ # there exists no previous header to which we can attach it.
344
+ raise InvalidHeader(
345
+ "Header continuation with no previous header: %s" % line
346
+ )
347
+ else:
348
+ key, value = headers[-1]
349
+ headers[-1] = (key, value + " " + line.strip())
350
+ continue
351
+
352
+ key, value = line.split(":", 1)
353
+ headers.append((key, value.strip()))
354
+
355
+ return cls(headers)
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/_version.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # This file is protected via CODEOWNERS
2
+ __version__ = "1.26.20"
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/connection.py ADDED
@@ -0,0 +1,572 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import datetime
4
+ import logging
5
+ import os
6
+ import re
7
+ import socket
8
+ import warnings
9
+ from socket import error as SocketError
10
+ from socket import timeout as SocketTimeout
11
+
12
+ from .packages import six
13
+ from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
14
+ from .packages.six.moves.http_client import HTTPException # noqa: F401
15
+ from .util.proxy import create_proxy_ssl_context
16
+
17
+ try: # Compiled with SSL?
18
+ import ssl
19
+
20
+ BaseSSLError = ssl.SSLError
21
+ except (ImportError, AttributeError): # Platform-specific: No SSL.
22
+ ssl = None
23
+
24
+ class BaseSSLError(BaseException):
25
+ pass
26
+
27
+
28
+ try:
29
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
30
+ ConnectionError = ConnectionError
31
+ except NameError:
32
+ # Python 2
33
+ class ConnectionError(Exception):
34
+ pass
35
+
36
+
37
+ try: # Python 3:
38
+ # Not a no-op, we're adding this to the namespace so it can be imported.
39
+ BrokenPipeError = BrokenPipeError
40
+ except NameError: # Python 2:
41
+
42
+ class BrokenPipeError(Exception):
43
+ pass
44
+
45
+
46
+ from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
47
+ from ._version import __version__
48
+ from .exceptions import (
49
+ ConnectTimeoutError,
50
+ NewConnectionError,
51
+ SubjectAltNameWarning,
52
+ SystemTimeWarning,
53
+ )
54
+ from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
55
+ from .util.ssl_ import (
56
+ assert_fingerprint,
57
+ create_urllib3_context,
58
+ is_ipaddress,
59
+ resolve_cert_reqs,
60
+ resolve_ssl_version,
61
+ ssl_wrap_socket,
62
+ )
63
+ from .util.ssl_match_hostname import CertificateError, match_hostname
64
+
65
+ log = logging.getLogger(__name__)
66
+
67
+ port_by_scheme = {"http": 80, "https": 443}
68
+
69
+ # When it comes time to update this value as a part of regular maintenance
70
+ # (ie test_recent_date is failing) update it to ~6 months before the current date.
71
+ RECENT_DATE = datetime.date(2024, 1, 1)
72
+
73
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
74
+
75
+
76
+ class HTTPConnection(_HTTPConnection, object):
77
+ """
78
+ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
79
+ backwards-compatibility layer between older and newer Pythons.
80
+
81
+ Additional keyword parameters are used to configure attributes of the connection.
82
+ Accepted parameters include:
83
+
84
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
85
+ - ``source_address``: Set the source address for the current connection.
86
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
87
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
88
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
89
+
90
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
91
+ you might pass:
92
+
93
+ .. code-block:: python
94
+
95
+ HTTPConnection.default_socket_options + [
96
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
97
+ ]
98
+
99
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
100
+ """
101
+
102
+ default_port = port_by_scheme["http"]
103
+
104
+ #: Disable Nagle's algorithm by default.
105
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
106
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
107
+
108
+ #: Whether this connection verifies the host's certificate.
109
+ is_verified = False
110
+
111
+ #: Whether this proxy connection (if used) verifies the proxy host's
112
+ #: certificate.
113
+ proxy_is_verified = None
114
+
115
+ def __init__(self, *args, **kw):
116
+ if not six.PY2:
117
+ kw.pop("strict", None)
118
+
119
+ # Pre-set source_address.
120
+ self.source_address = kw.get("source_address")
121
+
122
+ #: The socket options provided by the user. If no options are
123
+ #: provided, we use the default options.
124
+ self.socket_options = kw.pop("socket_options", self.default_socket_options)
125
+
126
+ # Proxy options provided by the user.
127
+ self.proxy = kw.pop("proxy", None)
128
+ self.proxy_config = kw.pop("proxy_config", None)
129
+
130
+ _HTTPConnection.__init__(self, *args, **kw)
131
+
132
+ @property
133
+ def host(self):
134
+ """
135
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
136
+
137
+ In general, SSL certificates don't include the trailing dot indicating a
138
+ fully-qualified domain name, and thus, they don't validate properly when
139
+ checked against a domain name that includes the dot. In addition, some
140
+ servers may not expect to receive the trailing dot when provided.
141
+
142
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
143
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
144
+ whereas a lookup without a trailing dot will search the system's search domain
145
+ list. Thus, it's important to keep the original host around for use only in
146
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
147
+ actual TCP connection across which we're going to send HTTP requests).
148
+ """
149
+ return self._dns_host.rstrip(".")
150
+
151
+ @host.setter
152
+ def host(self, value):
153
+ """
154
+ Setter for the `host` property.
155
+
156
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
157
+ only uses `host`, and it seems reasonable that other libraries follow suit.
158
+ """
159
+ self._dns_host = value
160
+
161
+ def _new_conn(self):
162
+ """Establish a socket connection and set nodelay settings on it.
163
+
164
+ :return: New socket connection.
165
+ """
166
+ extra_kw = {}
167
+ if self.source_address:
168
+ extra_kw["source_address"] = self.source_address
169
+
170
+ if self.socket_options:
171
+ extra_kw["socket_options"] = self.socket_options
172
+
173
+ try:
174
+ conn = connection.create_connection(
175
+ (self._dns_host, self.port), self.timeout, **extra_kw
176
+ )
177
+
178
+ except SocketTimeout:
179
+ raise ConnectTimeoutError(
180
+ self,
181
+ "Connection to %s timed out. (connect timeout=%s)"
182
+ % (self.host, self.timeout),
183
+ )
184
+
185
+ except SocketError as e:
186
+ raise NewConnectionError(
187
+ self, "Failed to establish a new connection: %s" % e
188
+ )
189
+
190
+ return conn
191
+
192
+ def _is_using_tunnel(self):
193
+ # Google App Engine's httplib does not define _tunnel_host
194
+ return getattr(self, "_tunnel_host", None)
195
+
196
+ def _prepare_conn(self, conn):
197
+ self.sock = conn
198
+ if self._is_using_tunnel():
199
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
200
+ self._tunnel()
201
+ # Mark this connection as not reusable
202
+ self.auto_open = 0
203
+
204
+ def connect(self):
205
+ conn = self._new_conn()
206
+ self._prepare_conn(conn)
207
+
208
+ def putrequest(self, method, url, *args, **kwargs):
209
+ """ """
210
+ # Empty docstring because the indentation of CPython's implementation
211
+ # is broken but we don't want this method in our documentation.
212
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
213
+ if match:
214
+ raise ValueError(
215
+ "Method cannot contain non-token characters %r (found at least %r)"
216
+ % (method, match.group())
217
+ )
218
+
219
+ return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
220
+
221
+ def putheader(self, header, *values):
222
+ """ """
223
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
224
+ _HTTPConnection.putheader(self, header, *values)
225
+ elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
226
+ raise ValueError(
227
+ "urllib3.util.SKIP_HEADER only supports '%s'"
228
+ % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
229
+ )
230
+
231
+ def request(self, method, url, body=None, headers=None):
232
+ # Update the inner socket's timeout value to send the request.
233
+ # This only triggers if the connection is re-used.
234
+ if getattr(self, "sock", None) is not None:
235
+ self.sock.settimeout(self.timeout)
236
+
237
+ if headers is None:
238
+ headers = {}
239
+ else:
240
+ # Avoid modifying the headers passed into .request()
241
+ headers = headers.copy()
242
+ if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
243
+ headers["User-Agent"] = _get_default_user_agent()
244
+ super(HTTPConnection, self).request(method, url, body=body, headers=headers)
245
+
246
+ def request_chunked(self, method, url, body=None, headers=None):
247
+ """
248
+ Alternative to the common request method, which sends the
249
+ body with chunked encoding and not as one block
250
+ """
251
+ headers = headers or {}
252
+ header_keys = set([six.ensure_str(k.lower()) for k in headers])
253
+ skip_accept_encoding = "accept-encoding" in header_keys
254
+ skip_host = "host" in header_keys
255
+ self.putrequest(
256
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
257
+ )
258
+ if "user-agent" not in header_keys:
259
+ self.putheader("User-Agent", _get_default_user_agent())
260
+ for header, value in headers.items():
261
+ self.putheader(header, value)
262
+ if "transfer-encoding" not in header_keys:
263
+ self.putheader("Transfer-Encoding", "chunked")
264
+ self.endheaders()
265
+
266
+ if body is not None:
267
+ stringish_types = six.string_types + (bytes,)
268
+ if isinstance(body, stringish_types):
269
+ body = (body,)
270
+ for chunk in body:
271
+ if not chunk:
272
+ continue
273
+ if not isinstance(chunk, bytes):
274
+ chunk = chunk.encode("utf8")
275
+ len_str = hex(len(chunk))[2:]
276
+ to_send = bytearray(len_str.encode())
277
+ to_send += b"\r\n"
278
+ to_send += chunk
279
+ to_send += b"\r\n"
280
+ self.send(to_send)
281
+
282
+ # After the if clause, to always have a closed body
283
+ self.send(b"0\r\n\r\n")
284
+
285
+
286
+ class HTTPSConnection(HTTPConnection):
287
+ """
288
+ Many of the parameters to this constructor are passed to the underlying SSL
289
+ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
290
+ """
291
+
292
+ default_port = port_by_scheme["https"]
293
+
294
+ cert_reqs = None
295
+ ca_certs = None
296
+ ca_cert_dir = None
297
+ ca_cert_data = None
298
+ ssl_version = None
299
+ assert_fingerprint = None
300
+ tls_in_tls_required = False
301
+
302
+ def __init__(
303
+ self,
304
+ host,
305
+ port=None,
306
+ key_file=None,
307
+ cert_file=None,
308
+ key_password=None,
309
+ strict=None,
310
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
311
+ ssl_context=None,
312
+ server_hostname=None,
313
+ **kw
314
+ ):
315
+
316
+ HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
317
+
318
+ self.key_file = key_file
319
+ self.cert_file = cert_file
320
+ self.key_password = key_password
321
+ self.ssl_context = ssl_context
322
+ self.server_hostname = server_hostname
323
+
324
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
325
+ # HTTPS requests to go out as HTTP. (See Issue #356)
326
+ self._protocol = "https"
327
+
328
+ def set_cert(
329
+ self,
330
+ key_file=None,
331
+ cert_file=None,
332
+ cert_reqs=None,
333
+ key_password=None,
334
+ ca_certs=None,
335
+ assert_hostname=None,
336
+ assert_fingerprint=None,
337
+ ca_cert_dir=None,
338
+ ca_cert_data=None,
339
+ ):
340
+ """
341
+ This method should only be called once, before the connection is used.
342
+ """
343
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
344
+ # have an SSLContext object in which case we'll use its verify_mode.
345
+ if cert_reqs is None:
346
+ if self.ssl_context is not None:
347
+ cert_reqs = self.ssl_context.verify_mode
348
+ else:
349
+ cert_reqs = resolve_cert_reqs(None)
350
+
351
+ self.key_file = key_file
352
+ self.cert_file = cert_file
353
+ self.cert_reqs = cert_reqs
354
+ self.key_password = key_password
355
+ self.assert_hostname = assert_hostname
356
+ self.assert_fingerprint = assert_fingerprint
357
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
358
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
359
+ self.ca_cert_data = ca_cert_data
360
+
361
+ def connect(self):
362
+ # Add certificate verification
363
+ self.sock = conn = self._new_conn()
364
+ hostname = self.host
365
+ tls_in_tls = False
366
+
367
+ if self._is_using_tunnel():
368
+ if self.tls_in_tls_required:
369
+ self.sock = conn = self._connect_tls_proxy(hostname, conn)
370
+ tls_in_tls = True
371
+
372
+ # Calls self._set_hostport(), so self.host is
373
+ # self._tunnel_host below.
374
+ self._tunnel()
375
+ # Mark this connection as not reusable
376
+ self.auto_open = 0
377
+
378
+ # Override the host with the one we're requesting data from.
379
+ hostname = self._tunnel_host
380
+
381
+ server_hostname = hostname
382
+ if self.server_hostname is not None:
383
+ server_hostname = self.server_hostname
384
+
385
+ is_time_off = datetime.date.today() < RECENT_DATE
386
+ if is_time_off:
387
+ warnings.warn(
388
+ (
389
+ "System time is way off (before {0}). This will probably "
390
+ "lead to SSL verification errors"
391
+ ).format(RECENT_DATE),
392
+ SystemTimeWarning,
393
+ )
394
+
395
+ # Wrap socket using verification with the root certs in
396
+ # trusted_root_certs
397
+ default_ssl_context = False
398
+ if self.ssl_context is None:
399
+ default_ssl_context = True
400
+ self.ssl_context = create_urllib3_context(
401
+ ssl_version=resolve_ssl_version(self.ssl_version),
402
+ cert_reqs=resolve_cert_reqs(self.cert_reqs),
403
+ )
404
+
405
+ context = self.ssl_context
406
+ context.verify_mode = resolve_cert_reqs(self.cert_reqs)
407
+
408
+ # Try to load OS default certs if none are given.
409
+ # Works well on Windows (requires Python3.4+)
410
+ if (
411
+ not self.ca_certs
412
+ and not self.ca_cert_dir
413
+ and not self.ca_cert_data
414
+ and default_ssl_context
415
+ and hasattr(context, "load_default_certs")
416
+ ):
417
+ context.load_default_certs()
418
+
419
+ self.sock = ssl_wrap_socket(
420
+ sock=conn,
421
+ keyfile=self.key_file,
422
+ certfile=self.cert_file,
423
+ key_password=self.key_password,
424
+ ca_certs=self.ca_certs,
425
+ ca_cert_dir=self.ca_cert_dir,
426
+ ca_cert_data=self.ca_cert_data,
427
+ server_hostname=server_hostname,
428
+ ssl_context=context,
429
+ tls_in_tls=tls_in_tls,
430
+ )
431
+
432
+ # If we're using all defaults and the connection
433
+ # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
434
+ # for the host.
435
+ if (
436
+ default_ssl_context
437
+ and self.ssl_version is None
438
+ and hasattr(self.sock, "version")
439
+ and self.sock.version() in {"TLSv1", "TLSv1.1"}
440
+ ): # Defensive:
441
+ warnings.warn(
442
+ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
443
+ "and will be disabled in urllib3 v2.0.0. Connecting to "
444
+ "'%s' with '%s' can be enabled by explicitly opting-in "
445
+ "with 'ssl_version'" % (self.host, self.sock.version()),
446
+ DeprecationWarning,
447
+ )
448
+
449
+ if self.assert_fingerprint:
450
+ assert_fingerprint(
451
+ self.sock.getpeercert(binary_form=True), self.assert_fingerprint
452
+ )
453
+ elif (
454
+ context.verify_mode != ssl.CERT_NONE
455
+ and not getattr(context, "check_hostname", False)
456
+ and self.assert_hostname is not False
457
+ ):
458
+ # While urllib3 attempts to always turn off hostname matching from
459
+ # the TLS library, this cannot always be done. So we check whether
460
+ # the TLS Library still thinks it's matching hostnames.
461
+ cert = self.sock.getpeercert()
462
+ if not cert.get("subjectAltName", ()):
463
+ warnings.warn(
464
+ (
465
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
466
+ "`commonName` for now. This feature is being removed by major browsers and "
467
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
468
+ "for details.)".format(hostname)
469
+ ),
470
+ SubjectAltNameWarning,
471
+ )
472
+ _match_hostname(cert, self.assert_hostname or server_hostname)
473
+
474
+ self.is_verified = (
475
+ context.verify_mode == ssl.CERT_REQUIRED
476
+ or self.assert_fingerprint is not None
477
+ )
478
+
479
+ def _connect_tls_proxy(self, hostname, conn):
480
+ """
481
+ Establish a TLS connection to the proxy using the provided SSL context.
482
+ """
483
+ proxy_config = self.proxy_config
484
+ ssl_context = proxy_config.ssl_context
485
+ if ssl_context:
486
+ # If the user provided a proxy context, we assume CA and client
487
+ # certificates have already been set
488
+ return ssl_wrap_socket(
489
+ sock=conn,
490
+ server_hostname=hostname,
491
+ ssl_context=ssl_context,
492
+ )
493
+
494
+ ssl_context = create_proxy_ssl_context(
495
+ self.ssl_version,
496
+ self.cert_reqs,
497
+ self.ca_certs,
498
+ self.ca_cert_dir,
499
+ self.ca_cert_data,
500
+ )
501
+
502
+ # If no cert was provided, use only the default options for server
503
+ # certificate validation
504
+ socket = ssl_wrap_socket(
505
+ sock=conn,
506
+ ca_certs=self.ca_certs,
507
+ ca_cert_dir=self.ca_cert_dir,
508
+ ca_cert_data=self.ca_cert_data,
509
+ server_hostname=hostname,
510
+ ssl_context=ssl_context,
511
+ )
512
+
513
+ if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
514
+ ssl_context, "check_hostname", False
515
+ ):
516
+ # While urllib3 attempts to always turn off hostname matching from
517
+ # the TLS library, this cannot always be done. So we check whether
518
+ # the TLS Library still thinks it's matching hostnames.
519
+ cert = socket.getpeercert()
520
+ if not cert.get("subjectAltName", ()):
521
+ warnings.warn(
522
+ (
523
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
524
+ "`commonName` for now. This feature is being removed by major browsers and "
525
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
526
+ "for details.)".format(hostname)
527
+ ),
528
+ SubjectAltNameWarning,
529
+ )
530
+ _match_hostname(cert, hostname)
531
+
532
+ self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
533
+ return socket
534
+
535
+
536
+ def _match_hostname(cert, asserted_hostname):
537
+ # Our upstream implementation of ssl.match_hostname()
538
+ # only applies this normalization to IP addresses so it doesn't
539
+ # match DNS SANs so we do the same thing!
540
+ stripped_hostname = asserted_hostname.strip("u[]")
541
+ if is_ipaddress(stripped_hostname):
542
+ asserted_hostname = stripped_hostname
543
+
544
+ try:
545
+ match_hostname(cert, asserted_hostname)
546
+ except CertificateError as e:
547
+ log.warning(
548
+ "Certificate did not match expected hostname: %s. Certificate: %s",
549
+ asserted_hostname,
550
+ cert,
551
+ )
552
+ # Add cert to exception and reraise so client code can inspect
553
+ # the cert when catching the exception, if they want to
554
+ e._peer_cert = cert
555
+ raise
556
+
557
+
558
+ def _get_default_user_agent():
559
+ return "python-urllib3/%s" % __version__
560
+
561
+
562
+ class DummyConnection(object):
563
+ """Used to detect a failed ConnectionCls import."""
564
+
565
+ pass
566
+
567
+
568
+ if not ssl:
569
+ HTTPSConnection = DummyConnection # noqa: F811
570
+
571
+
572
+ VerifiedHTTPSConnection = HTTPSConnection
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py ADDED
File without changes
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc ADDED
Binary file (1.37 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc ADDED
Binary file (8.18 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc ADDED
Binary file (3.61 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc ADDED
Binary file (15.8 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc ADDED
Binary file (21.9 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module provides means to detect the App Engine environment.
3
+ """
4
+
5
+ import os
6
+
7
+
8
+ def is_appengine():
9
+ return is_local_appengine() or is_prod_appengine()
10
+
11
+
12
+ def is_appengine_sandbox():
13
+ """Reports if the app is running in the first generation sandbox.
14
+
15
+ The second generation runtimes are technically still in a sandbox, but it
16
+ is much less restrictive, so generally you shouldn't need to check for it.
17
+ see https://cloud.google.com/appengine/docs/standard/runtimes
18
+ """
19
+ return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
20
+
21
+
22
+ def is_local_appengine():
23
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
24
+ "SERVER_SOFTWARE", ""
25
+ ).startswith("Development/")
26
+
27
+
28
+ def is_prod_appengine():
29
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
30
+ "SERVER_SOFTWARE", ""
31
+ ).startswith("Google App Engine/")
32
+
33
+
34
+ def is_prod_appengine_mvms():
35
+ """Deprecated."""
36
+ return False
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/appengine.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module provides a pool manager that uses Google App Engine's
3
+ `URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
4
+
5
+ Example usage::
6
+
7
+ from pip._vendor.urllib3 import PoolManager
8
+ from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
9
+
10
+ if is_appengine_sandbox():
11
+ # AppEngineManager uses AppEngine's URLFetch API behind the scenes
12
+ http = AppEngineManager()
13
+ else:
14
+ # PoolManager uses a socket-level API behind the scenes
15
+ http = PoolManager()
16
+
17
+ r = http.request('GET', 'https://google.com/')
18
+
19
+ There are `limitations <https://cloud.google.com/appengine/docs/python/\
20
+ urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
21
+ the best choice for your application. There are three options for using
22
+ urllib3 on Google App Engine:
23
+
24
+ 1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
25
+ cost-effective in many circumstances as long as your usage is within the
26
+ limitations.
27
+ 2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
28
+ Sockets also have `limitations and restrictions
29
+ <https://cloud.google.com/appengine/docs/python/sockets/\
30
+ #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
31
+ To use sockets, be sure to specify the following in your ``app.yaml``::
32
+
33
+ env_variables:
34
+ GAE_USE_SOCKETS_HTTPLIB : 'true'
35
+
36
+ 3. If you are using `App Engine Flexible
37
+ <https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
38
+ :class:`PoolManager` without any configuration or special environment variables.
39
+ """
40
+
41
+ from __future__ import absolute_import
42
+
43
+ import io
44
+ import logging
45
+ import warnings
46
+
47
+ from ..exceptions import (
48
+ HTTPError,
49
+ HTTPWarning,
50
+ MaxRetryError,
51
+ ProtocolError,
52
+ SSLError,
53
+ TimeoutError,
54
+ )
55
+ from ..packages.six.moves.urllib.parse import urljoin
56
+ from ..request import RequestMethods
57
+ from ..response import HTTPResponse
58
+ from ..util.retry import Retry
59
+ from ..util.timeout import Timeout
60
+ from . import _appengine_environ
61
+
62
+ try:
63
+ from google.appengine.api import urlfetch
64
+ except ImportError:
65
+ urlfetch = None
66
+
67
+
68
+ log = logging.getLogger(__name__)
69
+
70
+
71
+ class AppEnginePlatformWarning(HTTPWarning):
72
+ pass
73
+
74
+
75
+ class AppEnginePlatformError(HTTPError):
76
+ pass
77
+
78
+
79
+ class AppEngineManager(RequestMethods):
80
+ """
81
+ Connection manager for Google App Engine sandbox applications.
82
+
83
+ This manager uses the URLFetch service directly instead of using the
84
+ emulated httplib, and is subject to URLFetch limitations as described in
85
+ the App Engine documentation `here
86
+ <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
87
+
88
+ Notably it will raise an :class:`AppEnginePlatformError` if:
89
+ * URLFetch is not available.
90
+ * If you attempt to use this on App Engine Flexible, as full socket
91
+ support is available.
92
+ * If a request size is more than 10 megabytes.
93
+ * If a response size is more than 32 megabytes.
94
+ * If you use an unsupported request method such as OPTIONS.
95
+
96
+ Beyond those cases, it will raise normal urllib3 errors.
97
+ """
98
+
99
+ def __init__(
100
+ self,
101
+ headers=None,
102
+ retries=None,
103
+ validate_certificate=True,
104
+ urlfetch_retries=True,
105
+ ):
106
+ if not urlfetch:
107
+ raise AppEnginePlatformError(
108
+ "URLFetch is not available in this environment."
109
+ )
110
+
111
+ warnings.warn(
112
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
113
+ "of sockets. To use sockets directly instead of URLFetch see "
114
+ "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
115
+ AppEnginePlatformWarning,
116
+ )
117
+
118
+ RequestMethods.__init__(self, headers)
119
+ self.validate_certificate = validate_certificate
120
+ self.urlfetch_retries = urlfetch_retries
121
+
122
+ self.retries = retries or Retry.DEFAULT
123
+
124
+ def __enter__(self):
125
+ return self
126
+
127
+ def __exit__(self, exc_type, exc_val, exc_tb):
128
+ # Return False to re-raise any potential exceptions
129
+ return False
130
+
131
+ def urlopen(
132
+ self,
133
+ method,
134
+ url,
135
+ body=None,
136
+ headers=None,
137
+ retries=None,
138
+ redirect=True,
139
+ timeout=Timeout.DEFAULT_TIMEOUT,
140
+ **response_kw
141
+ ):
142
+
143
+ retries = self._get_retries(retries, redirect)
144
+
145
+ try:
146
+ follow_redirects = redirect and retries.redirect != 0 and retries.total
147
+ response = urlfetch.fetch(
148
+ url,
149
+ payload=body,
150
+ method=method,
151
+ headers=headers or {},
152
+ allow_truncated=False,
153
+ follow_redirects=self.urlfetch_retries and follow_redirects,
154
+ deadline=self._get_absolute_timeout(timeout),
155
+ validate_certificate=self.validate_certificate,
156
+ )
157
+ except urlfetch.DeadlineExceededError as e:
158
+ raise TimeoutError(self, e)
159
+
160
+ except urlfetch.InvalidURLError as e:
161
+ if "too large" in str(e):
162
+ raise AppEnginePlatformError(
163
+ "URLFetch request too large, URLFetch only "
164
+ "supports requests up to 10mb in size.",
165
+ e,
166
+ )
167
+ raise ProtocolError(e)
168
+
169
+ except urlfetch.DownloadError as e:
170
+ if "Too many redirects" in str(e):
171
+ raise MaxRetryError(self, url, reason=e)
172
+ raise ProtocolError(e)
173
+
174
+ except urlfetch.ResponseTooLargeError as e:
175
+ raise AppEnginePlatformError(
176
+ "URLFetch response too large, URLFetch only supports"
177
+ "responses up to 32mb in size.",
178
+ e,
179
+ )
180
+
181
+ except urlfetch.SSLCertificateError as e:
182
+ raise SSLError(e)
183
+
184
+ except urlfetch.InvalidMethodError as e:
185
+ raise AppEnginePlatformError(
186
+ "URLFetch does not support method: %s" % method, e
187
+ )
188
+
189
+ http_response = self._urlfetch_response_to_http_response(
190
+ response, retries=retries, **response_kw
191
+ )
192
+
193
+ # Handle redirect?
194
+ redirect_location = redirect and http_response.get_redirect_location()
195
+ if redirect_location:
196
+ # Check for redirect response
197
+ if self.urlfetch_retries and retries.raise_on_redirect:
198
+ raise MaxRetryError(self, url, "too many redirects")
199
+ else:
200
+ if http_response.status == 303:
201
+ method = "GET"
202
+
203
+ try:
204
+ retries = retries.increment(
205
+ method, url, response=http_response, _pool=self
206
+ )
207
+ except MaxRetryError:
208
+ if retries.raise_on_redirect:
209
+ raise MaxRetryError(self, url, "too many redirects")
210
+ return http_response
211
+
212
+ retries.sleep_for_retry(http_response)
213
+ log.debug("Redirecting %s -> %s", url, redirect_location)
214
+ redirect_url = urljoin(url, redirect_location)
215
+ return self.urlopen(
216
+ method,
217
+ redirect_url,
218
+ body,
219
+ headers,
220
+ retries=retries,
221
+ redirect=redirect,
222
+ timeout=timeout,
223
+ **response_kw
224
+ )
225
+
226
+ # Check if we should retry the HTTP response.
227
+ has_retry_after = bool(http_response.headers.get("Retry-After"))
228
+ if retries.is_retry(method, http_response.status, has_retry_after):
229
+ retries = retries.increment(method, url, response=http_response, _pool=self)
230
+ log.debug("Retry: %s", url)
231
+ retries.sleep(http_response)
232
+ return self.urlopen(
233
+ method,
234
+ url,
235
+ body=body,
236
+ headers=headers,
237
+ retries=retries,
238
+ redirect=redirect,
239
+ timeout=timeout,
240
+ **response_kw
241
+ )
242
+
243
+ return http_response
244
+
245
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
246
+
247
+ if is_prod_appengine():
248
+ # Production GAE handles deflate encoding automatically, but does
249
+ # not remove the encoding header.
250
+ content_encoding = urlfetch_resp.headers.get("content-encoding")
251
+
252
+ if content_encoding == "deflate":
253
+ del urlfetch_resp.headers["content-encoding"]
254
+
255
+ transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
256
+ # We have a full response's content,
257
+ # so let's make sure we don't report ourselves as chunked data.
258
+ if transfer_encoding == "chunked":
259
+ encodings = transfer_encoding.split(",")
260
+ encodings.remove("chunked")
261
+ urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
262
+
263
+ original_response = HTTPResponse(
264
+ # In order for decoding to work, we must present the content as
265
+ # a file-like object.
266
+ body=io.BytesIO(urlfetch_resp.content),
267
+ msg=urlfetch_resp.header_msg,
268
+ headers=urlfetch_resp.headers,
269
+ status=urlfetch_resp.status_code,
270
+ **response_kw
271
+ )
272
+
273
+ return HTTPResponse(
274
+ body=io.BytesIO(urlfetch_resp.content),
275
+ headers=urlfetch_resp.headers,
276
+ status=urlfetch_resp.status_code,
277
+ original_response=original_response,
278
+ **response_kw
279
+ )
280
+
281
+ def _get_absolute_timeout(self, timeout):
282
+ if timeout is Timeout.DEFAULT_TIMEOUT:
283
+ return None # Defer to URLFetch's default.
284
+ if isinstance(timeout, Timeout):
285
+ if timeout._read is not None or timeout._connect is not None:
286
+ warnings.warn(
287
+ "URLFetch does not support granular timeout settings, "
288
+ "reverting to total or default URLFetch timeout.",
289
+ AppEnginePlatformWarning,
290
+ )
291
+ return timeout.total
292
+ return timeout
293
+
294
+ def _get_retries(self, retries, redirect):
295
+ if not isinstance(retries, Retry):
296
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
297
+
298
+ if retries.connect or retries.read or retries.redirect:
299
+ warnings.warn(
300
+ "URLFetch only supports total retries and does not "
301
+ "recognize connect, read, or redirect retry parameters.",
302
+ AppEnginePlatformWarning,
303
+ )
304
+
305
+ return retries
306
+
307
+
308
+ # Alias methods from _appengine_environ to maintain public API interface.
309
+
310
+ is_appengine = _appengine_environ.is_appengine
311
+ is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
312
+ is_local_appengine = _appengine_environ.is_local_appengine
313
+ is_prod_appengine = _appengine_environ.is_prod_appengine
314
+ is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ NTLM authenticating pool, contributed by erikcederstran
3
+
4
+ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
5
+ """
6
+ from __future__ import absolute_import
7
+
8
+ import warnings
9
+ from logging import getLogger
10
+
11
+ from ntlm import ntlm
12
+
13
+ from .. import HTTPSConnectionPool
14
+ from ..packages.six.moves.http_client import HTTPSConnection
15
+
16
+ warnings.warn(
17
+ "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
18
+ "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
19
+ "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
20
+ "If you are a user of this module please comment in the mentioned issue.",
21
+ DeprecationWarning,
22
+ )
23
+
24
+ log = getLogger(__name__)
25
+
26
+
27
+ class NTLMConnectionPool(HTTPSConnectionPool):
28
+ """
29
+ Implements an NTLM authentication version of an urllib3 connection pool
30
+ """
31
+
32
+ scheme = "https"
33
+
34
+ def __init__(self, user, pw, authurl, *args, **kwargs):
35
+ """
36
+ authurl is a random URL on the server that is protected by NTLM.
37
+ user is the Windows user, probably in the DOMAIN\\username format.
38
+ pw is the password for the user.
39
+ """
40
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
41
+ self.authurl = authurl
42
+ self.rawuser = user
43
+ user_parts = user.split("\\", 1)
44
+ self.domain = user_parts[0].upper()
45
+ self.user = user_parts[1]
46
+ self.pw = pw
47
+
48
+ def _new_conn(self):
49
+ # Performs the NTLM handshake that secures the connection. The socket
50
+ # must be kept open while requests are performed.
51
+ self.num_connections += 1
52
+ log.debug(
53
+ "Starting NTLM HTTPS connection no. %d: https://%s%s",
54
+ self.num_connections,
55
+ self.host,
56
+ self.authurl,
57
+ )
58
+
59
+ headers = {"Connection": "Keep-Alive"}
60
+ req_header = "Authorization"
61
+ resp_header = "www-authenticate"
62
+
63
+ conn = HTTPSConnection(host=self.host, port=self.port)
64
+
65
+ # Send negotiation message
66
+ headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
67
+ self.rawuser
68
+ )
69
+ log.debug("Request headers: %s", headers)
70
+ conn.request("GET", self.authurl, None, headers)
71
+ res = conn.getresponse()
72
+ reshdr = dict(res.headers)
73
+ log.debug("Response status: %s %s", res.status, res.reason)
74
+ log.debug("Response headers: %s", reshdr)
75
+ log.debug("Response data: %s [...]", res.read(100))
76
+
77
+ # Remove the reference to the socket, so that it can not be closed by
78
+ # the response object (we want to keep the socket open)
79
+ res.fp = None
80
+
81
+ # Server should respond with a challenge message
82
+ auth_header_values = reshdr[resp_header].split(", ")
83
+ auth_header_value = None
84
+ for s in auth_header_values:
85
+ if s[:5] == "NTLM ":
86
+ auth_header_value = s[5:]
87
+ if auth_header_value is None:
88
+ raise Exception(
89
+ "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
90
+ )
91
+
92
+ # Send authentication message
93
+ ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
94
+ auth_header_value
95
+ )
96
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
97
+ ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
98
+ )
99
+ headers[req_header] = "NTLM %s" % auth_msg
100
+ log.debug("Request headers: %s", headers)
101
+ conn.request("GET", self.authurl, None, headers)
102
+ res = conn.getresponse()
103
+ log.debug("Response status: %s %s", res.status, res.reason)
104
+ log.debug("Response headers: %s", dict(res.headers))
105
+ log.debug("Response data: %s [...]", res.read()[:100])
106
+ if res.status != 200:
107
+ if res.status == 401:
108
+ raise Exception("Server rejected request: wrong username or password")
109
+ raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
110
+
111
+ res.fp = None
112
+ log.debug("Connection established")
113
+ return conn
114
+
115
+ def urlopen(
116
+ self,
117
+ method,
118
+ url,
119
+ body=None,
120
+ headers=None,
121
+ retries=3,
122
+ redirect=True,
123
+ assert_same_host=True,
124
+ ):
125
+ if headers is None:
126
+ headers = {}
127
+ headers["Connection"] = "Keep-Alive"
128
+ return super(NTLMConnectionPool, self).urlopen(
129
+ method, url, body, headers, retries, redirect, assert_same_host
130
+ )
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py ADDED
@@ -0,0 +1,518 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ TLS with SNI_-support for Python 2. Follow these instructions if you would
3
+ like to verify TLS certificates in Python 2. Note, the default libraries do
4
+ *not* do certificate checking; you need to do additional work to validate
5
+ certificates yourself.
6
+
7
+ This needs the following packages installed:
8
+
9
+ * `pyOpenSSL`_ (tested with 16.0.0)
10
+ * `cryptography`_ (minimum 1.3.4, from pyopenssl)
11
+ * `idna`_ (minimum 2.0, from cryptography)
12
+
13
+ However, pyopenssl depends on cryptography, which depends on idna, so while we
14
+ use all three directly here we end up having relatively few packages required.
15
+
16
+ You can install them with the following command:
17
+
18
+ .. code-block:: bash
19
+
20
+ $ python -m pip install pyopenssl cryptography idna
21
+
22
+ To activate certificate checking, call
23
+ :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
24
+ before you begin making HTTP requests. This can be done in a ``sitecustomize``
25
+ module, or at any other time before your application begins using ``urllib3``,
26
+ like this:
27
+
28
+ .. code-block:: python
29
+
30
+ try:
31
+ import pip._vendor.urllib3.contrib.pyopenssl as pyopenssl
32
+ pyopenssl.inject_into_urllib3()
33
+ except ImportError:
34
+ pass
35
+
36
+ Now you can use :mod:`urllib3` as you normally would, and it will support SNI
37
+ when the required modules are installed.
38
+
39
+ Activating this module also has the positive side effect of disabling SSL/TLS
40
+ compression in Python 2 (see `CRIME attack`_).
41
+
42
+ .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
43
+ .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
44
+ .. _pyopenssl: https://www.pyopenssl.org
45
+ .. _cryptography: https://cryptography.io
46
+ .. _idna: https://github.com/kjd/idna
47
+ """
48
+ from __future__ import absolute_import
49
+
50
+ import OpenSSL.crypto
51
+ import OpenSSL.SSL
52
+ from cryptography import x509
53
+ from cryptography.hazmat.backends.openssl import backend as openssl_backend
54
+
55
+ try:
56
+ from cryptography.x509 import UnsupportedExtension
57
+ except ImportError:
58
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
59
+ class UnsupportedExtension(Exception):
60
+ pass
61
+
62
+
63
+ from io import BytesIO
64
+ from socket import error as SocketError
65
+ from socket import timeout
66
+
67
+ try: # Platform-specific: Python 2
68
+ from socket import _fileobject
69
+ except ImportError: # Platform-specific: Python 3
70
+ _fileobject = None
71
+ from ..packages.backports.makefile import backport_makefile
72
+
73
+ import logging
74
+ import ssl
75
+ import sys
76
+ import warnings
77
+
78
+ from .. import util
79
+ from ..packages import six
80
+ from ..util.ssl_ import PROTOCOL_TLS_CLIENT
81
+
82
+ warnings.warn(
83
+ "'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
84
+ "in a future release of urllib3 2.x. Read more in this issue: "
85
+ "https://github.com/urllib3/urllib3/issues/2680",
86
+ category=DeprecationWarning,
87
+ stacklevel=2,
88
+ )
89
+
90
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
91
+
92
+ # SNI always works.
93
+ HAS_SNI = True
94
+
95
+ # Map from urllib3 to PyOpenSSL compatible parameter-values.
96
+ _openssl_versions = {
97
+ util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
98
+ PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
99
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
100
+ }
101
+
102
+ if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
103
+ _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
104
+
105
+ if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
106
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
107
+
108
+ if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
109
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
110
+
111
+
112
+ _stdlib_to_openssl_verify = {
113
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
114
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
115
+ ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
116
+ + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
117
+ }
118
+ _openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
119
+
120
+ # OpenSSL will only write 16K at a time
121
+ SSL_WRITE_BLOCKSIZE = 16384
122
+
123
+ orig_util_HAS_SNI = util.HAS_SNI
124
+ orig_util_SSLContext = util.ssl_.SSLContext
125
+
126
+
127
+ log = logging.getLogger(__name__)
128
+
129
+
130
+ def inject_into_urllib3():
131
+ "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
132
+
133
+ _validate_dependencies_met()
134
+
135
+ util.SSLContext = PyOpenSSLContext
136
+ util.ssl_.SSLContext = PyOpenSSLContext
137
+ util.HAS_SNI = HAS_SNI
138
+ util.ssl_.HAS_SNI = HAS_SNI
139
+ util.IS_PYOPENSSL = True
140
+ util.ssl_.IS_PYOPENSSL = True
141
+
142
+
143
+ def extract_from_urllib3():
144
+ "Undo monkey-patching by :func:`inject_into_urllib3`."
145
+
146
+ util.SSLContext = orig_util_SSLContext
147
+ util.ssl_.SSLContext = orig_util_SSLContext
148
+ util.HAS_SNI = orig_util_HAS_SNI
149
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
150
+ util.IS_PYOPENSSL = False
151
+ util.ssl_.IS_PYOPENSSL = False
152
+
153
+
154
+ def _validate_dependencies_met():
155
+ """
156
+ Verifies that PyOpenSSL's package-level dependencies have been met.
157
+ Throws `ImportError` if they are not met.
158
+ """
159
+ # Method added in `cryptography==1.1`; not available in older versions
160
+ from cryptography.x509.extensions import Extensions
161
+
162
+ if getattr(Extensions, "get_extension_for_class", None) is None:
163
+ raise ImportError(
164
+ "'cryptography' module missing required functionality. "
165
+ "Try upgrading to v1.3.4 or newer."
166
+ )
167
+
168
+ # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
169
+ # attribute is only present on those versions.
170
+ from OpenSSL.crypto import X509
171
+
172
+ x509 = X509()
173
+ if getattr(x509, "_x509", None) is None:
174
+ raise ImportError(
175
+ "'pyOpenSSL' module missing required functionality. "
176
+ "Try upgrading to v0.14 or newer."
177
+ )
178
+
179
+
180
+ def _dnsname_to_stdlib(name):
181
+ """
182
+ Converts a dNSName SubjectAlternativeName field to the form used by the
183
+ standard library on the given Python version.
184
+
185
+ Cryptography produces a dNSName as a unicode string that was idna-decoded
186
+ from ASCII bytes. We need to idna-encode that string to get it back, and
187
+ then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
188
+ uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
189
+
190
+ If the name cannot be idna-encoded then we return None signalling that
191
+ the name given should be skipped.
192
+ """
193
+
194
+ def idna_encode(name):
195
+ """
196
+ Borrowed wholesale from the Python Cryptography Project. It turns out
197
+ that we can't just safely call `idna.encode`: it can explode for
198
+ wildcard names. This avoids that problem.
199
+ """
200
+ from pip._vendor import idna
201
+
202
+ try:
203
+ for prefix in [u"*.", u"."]:
204
+ if name.startswith(prefix):
205
+ name = name[len(prefix) :]
206
+ return prefix.encode("ascii") + idna.encode(name)
207
+ return idna.encode(name)
208
+ except idna.core.IDNAError:
209
+ return None
210
+
211
+ # Don't send IPv6 addresses through the IDNA encoder.
212
+ if ":" in name:
213
+ return name
214
+
215
+ name = idna_encode(name)
216
+ if name is None:
217
+ return None
218
+ elif sys.version_info >= (3, 0):
219
+ name = name.decode("utf-8")
220
+ return name
221
+
222
+
223
+ def get_subj_alt_name(peer_cert):
224
+ """
225
+ Given an PyOpenSSL certificate, provides all the subject alternative names.
226
+ """
227
+ # Pass the cert to cryptography, which has much better APIs for this.
228
+ if hasattr(peer_cert, "to_cryptography"):
229
+ cert = peer_cert.to_cryptography()
230
+ else:
231
+ der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
232
+ cert = x509.load_der_x509_certificate(der, openssl_backend)
233
+
234
+ # We want to find the SAN extension. Ask Cryptography to locate it (it's
235
+ # faster than looping in Python)
236
+ try:
237
+ ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
238
+ except x509.ExtensionNotFound:
239
+ # No such extension, return the empty list.
240
+ return []
241
+ except (
242
+ x509.DuplicateExtension,
243
+ UnsupportedExtension,
244
+ x509.UnsupportedGeneralNameType,
245
+ UnicodeError,
246
+ ) as e:
247
+ # A problem has been found with the quality of the certificate. Assume
248
+ # no SAN field is present.
249
+ log.warning(
250
+ "A problem was encountered with the certificate that prevented "
251
+ "urllib3 from finding the SubjectAlternativeName field. This can "
252
+ "affect certificate validation. The error was %s",
253
+ e,
254
+ )
255
+ return []
256
+
257
+ # We want to return dNSName and iPAddress fields. We need to cast the IPs
258
+ # back to strings because the match_hostname function wants them as
259
+ # strings.
260
+ # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
261
+ # decoded. This is pretty frustrating, but that's what the standard library
262
+ # does with certificates, and so we need to attempt to do the same.
263
+ # We also want to skip over names which cannot be idna encoded.
264
+ names = [
265
+ ("DNS", name)
266
+ for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
267
+ if name is not None
268
+ ]
269
+ names.extend(
270
+ ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
271
+ )
272
+
273
+ return names
274
+
275
+
276
+ class WrappedSocket(object):
277
+ """API-compatibility wrapper for Python OpenSSL's Connection-class.
278
+
279
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
280
+ collector of pypy.
281
+ """
282
+
283
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
284
+ self.connection = connection
285
+ self.socket = socket
286
+ self.suppress_ragged_eofs = suppress_ragged_eofs
287
+ self._makefile_refs = 0
288
+ self._closed = False
289
+
290
+ def fileno(self):
291
+ return self.socket.fileno()
292
+
293
+ # Copy-pasted from Python 3.5 source code
294
+ def _decref_socketios(self):
295
+ if self._makefile_refs > 0:
296
+ self._makefile_refs -= 1
297
+ if self._closed:
298
+ self.close()
299
+
300
+ def recv(self, *args, **kwargs):
301
+ try:
302
+ data = self.connection.recv(*args, **kwargs)
303
+ except OpenSSL.SSL.SysCallError as e:
304
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
305
+ return b""
306
+ else:
307
+ raise SocketError(str(e))
308
+ except OpenSSL.SSL.ZeroReturnError:
309
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
310
+ return b""
311
+ else:
312
+ raise
313
+ except OpenSSL.SSL.WantReadError:
314
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
315
+ raise timeout("The read operation timed out")
316
+ else:
317
+ return self.recv(*args, **kwargs)
318
+
319
+ # TLS 1.3 post-handshake authentication
320
+ except OpenSSL.SSL.Error as e:
321
+ raise ssl.SSLError("read error: %r" % e)
322
+ else:
323
+ return data
324
+
325
+ def recv_into(self, *args, **kwargs):
326
+ try:
327
+ return self.connection.recv_into(*args, **kwargs)
328
+ except OpenSSL.SSL.SysCallError as e:
329
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
330
+ return 0
331
+ else:
332
+ raise SocketError(str(e))
333
+ except OpenSSL.SSL.ZeroReturnError:
334
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
335
+ return 0
336
+ else:
337
+ raise
338
+ except OpenSSL.SSL.WantReadError:
339
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
340
+ raise timeout("The read operation timed out")
341
+ else:
342
+ return self.recv_into(*args, **kwargs)
343
+
344
+ # TLS 1.3 post-handshake authentication
345
+ except OpenSSL.SSL.Error as e:
346
+ raise ssl.SSLError("read error: %r" % e)
347
+
348
+ def settimeout(self, timeout):
349
+ return self.socket.settimeout(timeout)
350
+
351
+ def _send_until_done(self, data):
352
+ while True:
353
+ try:
354
+ return self.connection.send(data)
355
+ except OpenSSL.SSL.WantWriteError:
356
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
357
+ raise timeout()
358
+ continue
359
+ except OpenSSL.SSL.SysCallError as e:
360
+ raise SocketError(str(e))
361
+
362
+ def sendall(self, data):
363
+ total_sent = 0
364
+ while total_sent < len(data):
365
+ sent = self._send_until_done(
366
+ data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
367
+ )
368
+ total_sent += sent
369
+
370
+ def shutdown(self):
371
+ # FIXME rethrow compatible exceptions should we ever use this
372
+ self.connection.shutdown()
373
+
374
+ def close(self):
375
+ if self._makefile_refs < 1:
376
+ try:
377
+ self._closed = True
378
+ return self.connection.close()
379
+ except OpenSSL.SSL.Error:
380
+ return
381
+ else:
382
+ self._makefile_refs -= 1
383
+
384
+ def getpeercert(self, binary_form=False):
385
+ x509 = self.connection.get_peer_certificate()
386
+
387
+ if not x509:
388
+ return x509
389
+
390
+ if binary_form:
391
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
392
+
393
+ return {
394
+ "subject": ((("commonName", x509.get_subject().CN),),),
395
+ "subjectAltName": get_subj_alt_name(x509),
396
+ }
397
+
398
+ def version(self):
399
+ return self.connection.get_protocol_version_name()
400
+
401
+ def _reuse(self):
402
+ self._makefile_refs += 1
403
+
404
+ def _drop(self):
405
+ if self._makefile_refs < 1:
406
+ self.close()
407
+ else:
408
+ self._makefile_refs -= 1
409
+
410
+
411
+ if _fileobject: # Platform-specific: Python 2
412
+
413
+ def makefile(self, mode, bufsize=-1):
414
+ self._makefile_refs += 1
415
+ return _fileobject(self, mode, bufsize, close=True)
416
+
417
+ else: # Platform-specific: Python 3
418
+ makefile = backport_makefile
419
+
420
+ WrappedSocket.makefile = makefile
421
+
422
+
423
+ class PyOpenSSLContext(object):
424
+ """
425
+ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
426
+ for translating the interface of the standard library ``SSLContext`` object
427
+ to calls into PyOpenSSL.
428
+ """
429
+
430
+ def __init__(self, protocol):
431
+ self.protocol = _openssl_versions[protocol]
432
+ self._ctx = OpenSSL.SSL.Context(self.protocol)
433
+ self._options = 0
434
+ self.check_hostname = False
435
+
436
+ @property
437
+ def options(self):
438
+ return self._options
439
+
440
+ @options.setter
441
+ def options(self, value):
442
+ self._options = value
443
+ self._ctx.set_options(value)
444
+
445
+ @property
446
+ def verify_mode(self):
447
+ return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
448
+
449
+ @verify_mode.setter
450
+ def verify_mode(self, value):
451
+ self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
452
+
453
+ def set_default_verify_paths(self):
454
+ self._ctx.set_default_verify_paths()
455
+
456
+ def set_ciphers(self, ciphers):
457
+ if isinstance(ciphers, six.text_type):
458
+ ciphers = ciphers.encode("utf-8")
459
+ self._ctx.set_cipher_list(ciphers)
460
+
461
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
462
+ if cafile is not None:
463
+ cafile = cafile.encode("utf-8")
464
+ if capath is not None:
465
+ capath = capath.encode("utf-8")
466
+ try:
467
+ self._ctx.load_verify_locations(cafile, capath)
468
+ if cadata is not None:
469
+ self._ctx.load_verify_locations(BytesIO(cadata))
470
+ except OpenSSL.SSL.Error as e:
471
+ raise ssl.SSLError("unable to load trusted certificates: %r" % e)
472
+
473
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
474
+ self._ctx.use_certificate_chain_file(certfile)
475
+ if password is not None:
476
+ if not isinstance(password, six.binary_type):
477
+ password = password.encode("utf-8")
478
+ self._ctx.set_passwd_cb(lambda *_: password)
479
+ self._ctx.use_privatekey_file(keyfile or certfile)
480
+
481
+ def set_alpn_protocols(self, protocols):
482
+ protocols = [six.ensure_binary(p) for p in protocols]
483
+ return self._ctx.set_alpn_protos(protocols)
484
+
485
+ def wrap_socket(
486
+ self,
487
+ sock,
488
+ server_side=False,
489
+ do_handshake_on_connect=True,
490
+ suppress_ragged_eofs=True,
491
+ server_hostname=None,
492
+ ):
493
+ cnx = OpenSSL.SSL.Connection(self._ctx, sock)
494
+
495
+ if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
496
+ server_hostname = server_hostname.encode("utf-8")
497
+
498
+ if server_hostname is not None:
499
+ cnx.set_tlsext_host_name(server_hostname)
500
+
501
+ cnx.set_connect_state()
502
+
503
+ while True:
504
+ try:
505
+ cnx.do_handshake()
506
+ except OpenSSL.SSL.WantReadError:
507
+ if not util.wait_for_read(sock, sock.gettimeout()):
508
+ raise timeout("select timed out")
509
+ continue
510
+ except OpenSSL.SSL.Error as e:
511
+ raise ssl.SSLError("bad handshake: %r" % e)
512
+ break
513
+
514
+ return WrappedSocket(cnx, sock)
515
+
516
+
517
+ def _verify_callback(cnx, x509, err_no, err_depth, return_code):
518
+ return err_no == 0
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/securetransport.py ADDED
@@ -0,0 +1,920 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ SecureTranport support for urllib3 via ctypes.
3
+
4
+ This makes platform-native TLS available to urllib3 users on macOS without the
5
+ use of a compiler. This is an important feature because the Python Package
6
+ Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
7
+ that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
8
+ this is to give macOS users an alternative solution to the problem, and that
9
+ solution is to use SecureTransport.
10
+
11
+ We use ctypes here because this solution must not require a compiler. That's
12
+ because pip is not allowed to require a compiler either.
13
+
14
+ This is not intended to be a seriously long-term solution to this problem.
15
+ The hope is that PEP 543 will eventually solve this issue for us, at which
16
+ point we can retire this contrib module. But in the short term, we need to
17
+ solve the impending tire fire that is Python on Mac without this kind of
18
+ contrib module. So...here we are.
19
+
20
+ To use this module, simply import and inject it::
21
+
22
+ import pip._vendor.urllib3.contrib.securetransport as securetransport
23
+ securetransport.inject_into_urllib3()
24
+
25
+ Happy TLSing!
26
+
27
+ This code is a bastardised version of the code found in Will Bond's oscrypto
28
+ library. An enormous debt is owed to him for blazing this trail for us. For
29
+ that reason, this code should be considered to be covered both by urllib3's
30
+ license and by oscrypto's:
31
+
32
+ .. code-block::
33
+
34
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
35
+
36
+ Permission is hereby granted, free of charge, to any person obtaining a
37
+ copy of this software and associated documentation files (the "Software"),
38
+ to deal in the Software without restriction, including without limitation
39
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
40
+ and/or sell copies of the Software, and to permit persons to whom the
41
+ Software is furnished to do so, subject to the following conditions:
42
+
43
+ The above copyright notice and this permission notice shall be included in
44
+ all copies or substantial portions of the Software.
45
+
46
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
47
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
48
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
49
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
50
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
51
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
52
+ DEALINGS IN THE SOFTWARE.
53
+ """
54
+ from __future__ import absolute_import
55
+
56
+ import contextlib
57
+ import ctypes
58
+ import errno
59
+ import os.path
60
+ import shutil
61
+ import socket
62
+ import ssl
63
+ import struct
64
+ import threading
65
+ import weakref
66
+
67
+ from .. import util
68
+ from ..packages import six
69
+ from ..util.ssl_ import PROTOCOL_TLS_CLIENT
70
+ from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
71
+ from ._securetransport.low_level import (
72
+ _assert_no_error,
73
+ _build_tls_unknown_ca_alert,
74
+ _cert_array_from_pem,
75
+ _create_cfstring_array,
76
+ _load_client_cert_chain,
77
+ _temporary_keychain,
78
+ )
79
+
80
+ try: # Platform-specific: Python 2
81
+ from socket import _fileobject
82
+ except ImportError: # Platform-specific: Python 3
83
+ _fileobject = None
84
+ from ..packages.backports.makefile import backport_makefile
85
+
86
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
87
+
88
+ # SNI always works
89
+ HAS_SNI = True
90
+
91
+ orig_util_HAS_SNI = util.HAS_SNI
92
+ orig_util_SSLContext = util.ssl_.SSLContext
93
+
94
+ # This dictionary is used by the read callback to obtain a handle to the
95
+ # calling wrapped socket. This is a pretty silly approach, but for now it'll
96
+ # do. I feel like I should be able to smuggle a handle to the wrapped socket
97
+ # directly in the SSLConnectionRef, but for now this approach will work I
98
+ # guess.
99
+ #
100
+ # We need to lock around this structure for inserts, but we don't do it for
101
+ # reads/writes in the callbacks. The reasoning here goes as follows:
102
+ #
103
+ # 1. It is not possible to call into the callbacks before the dictionary is
104
+ # populated, so once in the callback the id must be in the dictionary.
105
+ # 2. The callbacks don't mutate the dictionary, they only read from it, and
106
+ # so cannot conflict with any of the insertions.
107
+ #
108
+ # This is good: if we had to lock in the callbacks we'd drastically slow down
109
+ # the performance of this code.
110
+ _connection_refs = weakref.WeakValueDictionary()
111
+ _connection_ref_lock = threading.Lock()
112
+
113
+ # Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
114
+ # for no better reason than we need *a* limit, and this one is right there.
115
+ SSL_WRITE_BLOCKSIZE = 16384
116
+
117
+ # This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
118
+ # individual cipher suites. We need to do this because this is how
119
+ # SecureTransport wants them.
120
+ CIPHER_SUITES = [
121
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
122
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
123
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
124
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
125
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
126
+ SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
127
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
128
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
129
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
130
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
131
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
132
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
133
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
134
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
135
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
136
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
137
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
138
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
139
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
140
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
141
+ SecurityConst.TLS_AES_256_GCM_SHA384,
142
+ SecurityConst.TLS_AES_128_GCM_SHA256,
143
+ SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
144
+ SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
145
+ SecurityConst.TLS_AES_128_CCM_8_SHA256,
146
+ SecurityConst.TLS_AES_128_CCM_SHA256,
147
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
148
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
149
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
150
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
151
+ ]
152
+
153
+ # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
154
+ # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
155
+ # TLSv1 to 1.2 are supported on macOS 10.8+
156
+ _protocol_to_min_max = {
157
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
158
+ PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
159
+ }
160
+
161
+ if hasattr(ssl, "PROTOCOL_SSLv2"):
162
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
163
+ SecurityConst.kSSLProtocol2,
164
+ SecurityConst.kSSLProtocol2,
165
+ )
166
+ if hasattr(ssl, "PROTOCOL_SSLv3"):
167
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
168
+ SecurityConst.kSSLProtocol3,
169
+ SecurityConst.kSSLProtocol3,
170
+ )
171
+ if hasattr(ssl, "PROTOCOL_TLSv1"):
172
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
173
+ SecurityConst.kTLSProtocol1,
174
+ SecurityConst.kTLSProtocol1,
175
+ )
176
+ if hasattr(ssl, "PROTOCOL_TLSv1_1"):
177
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
178
+ SecurityConst.kTLSProtocol11,
179
+ SecurityConst.kTLSProtocol11,
180
+ )
181
+ if hasattr(ssl, "PROTOCOL_TLSv1_2"):
182
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
183
+ SecurityConst.kTLSProtocol12,
184
+ SecurityConst.kTLSProtocol12,
185
+ )
186
+
187
+
188
+ def inject_into_urllib3():
189
+ """
190
+ Monkey-patch urllib3 with SecureTransport-backed SSL-support.
191
+ """
192
+ util.SSLContext = SecureTransportContext
193
+ util.ssl_.SSLContext = SecureTransportContext
194
+ util.HAS_SNI = HAS_SNI
195
+ util.ssl_.HAS_SNI = HAS_SNI
196
+ util.IS_SECURETRANSPORT = True
197
+ util.ssl_.IS_SECURETRANSPORT = True
198
+
199
+
200
+ def extract_from_urllib3():
201
+ """
202
+ Undo monkey-patching by :func:`inject_into_urllib3`.
203
+ """
204
+ util.SSLContext = orig_util_SSLContext
205
+ util.ssl_.SSLContext = orig_util_SSLContext
206
+ util.HAS_SNI = orig_util_HAS_SNI
207
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
208
+ util.IS_SECURETRANSPORT = False
209
+ util.ssl_.IS_SECURETRANSPORT = False
210
+
211
+
212
+ def _read_callback(connection_id, data_buffer, data_length_pointer):
213
+ """
214
+ SecureTransport read callback. This is called by ST to request that data
215
+ be returned from the socket.
216
+ """
217
+ wrapped_socket = None
218
+ try:
219
+ wrapped_socket = _connection_refs.get(connection_id)
220
+ if wrapped_socket is None:
221
+ return SecurityConst.errSSLInternal
222
+ base_socket = wrapped_socket.socket
223
+
224
+ requested_length = data_length_pointer[0]
225
+
226
+ timeout = wrapped_socket.gettimeout()
227
+ error = None
228
+ read_count = 0
229
+
230
+ try:
231
+ while read_count < requested_length:
232
+ if timeout is None or timeout >= 0:
233
+ if not util.wait_for_read(base_socket, timeout):
234
+ raise socket.error(errno.EAGAIN, "timed out")
235
+
236
+ remaining = requested_length - read_count
237
+ buffer = (ctypes.c_char * remaining).from_address(
238
+ data_buffer + read_count
239
+ )
240
+ chunk_size = base_socket.recv_into(buffer, remaining)
241
+ read_count += chunk_size
242
+ if not chunk_size:
243
+ if not read_count:
244
+ return SecurityConst.errSSLClosedGraceful
245
+ break
246
+ except (socket.error) as e:
247
+ error = e.errno
248
+
249
+ if error is not None and error != errno.EAGAIN:
250
+ data_length_pointer[0] = read_count
251
+ if error == errno.ECONNRESET or error == errno.EPIPE:
252
+ return SecurityConst.errSSLClosedAbort
253
+ raise
254
+
255
+ data_length_pointer[0] = read_count
256
+
257
+ if read_count != requested_length:
258
+ return SecurityConst.errSSLWouldBlock
259
+
260
+ return 0
261
+ except Exception as e:
262
+ if wrapped_socket is not None:
263
+ wrapped_socket._exception = e
264
+ return SecurityConst.errSSLInternal
265
+
266
+
267
+ def _write_callback(connection_id, data_buffer, data_length_pointer):
268
+ """
269
+ SecureTransport write callback. This is called by ST to request that data
270
+ actually be sent on the network.
271
+ """
272
+ wrapped_socket = None
273
+ try:
274
+ wrapped_socket = _connection_refs.get(connection_id)
275
+ if wrapped_socket is None:
276
+ return SecurityConst.errSSLInternal
277
+ base_socket = wrapped_socket.socket
278
+
279
+ bytes_to_write = data_length_pointer[0]
280
+ data = ctypes.string_at(data_buffer, bytes_to_write)
281
+
282
+ timeout = wrapped_socket.gettimeout()
283
+ error = None
284
+ sent = 0
285
+
286
+ try:
287
+ while sent < bytes_to_write:
288
+ if timeout is None or timeout >= 0:
289
+ if not util.wait_for_write(base_socket, timeout):
290
+ raise socket.error(errno.EAGAIN, "timed out")
291
+ chunk_sent = base_socket.send(data)
292
+ sent += chunk_sent
293
+
294
+ # This has some needless copying here, but I'm not sure there's
295
+ # much value in optimising this data path.
296
+ data = data[chunk_sent:]
297
+ except (socket.error) as e:
298
+ error = e.errno
299
+
300
+ if error is not None and error != errno.EAGAIN:
301
+ data_length_pointer[0] = sent
302
+ if error == errno.ECONNRESET or error == errno.EPIPE:
303
+ return SecurityConst.errSSLClosedAbort
304
+ raise
305
+
306
+ data_length_pointer[0] = sent
307
+
308
+ if sent != bytes_to_write:
309
+ return SecurityConst.errSSLWouldBlock
310
+
311
+ return 0
312
+ except Exception as e:
313
+ if wrapped_socket is not None:
314
+ wrapped_socket._exception = e
315
+ return SecurityConst.errSSLInternal
316
+
317
+
318
+ # We need to keep these two objects references alive: if they get GC'd while
319
+ # in use then SecureTransport could attempt to call a function that is in freed
320
+ # memory. That would be...uh...bad. Yeah, that's the word. Bad.
321
+ _read_callback_pointer = Security.SSLReadFunc(_read_callback)
322
+ _write_callback_pointer = Security.SSLWriteFunc(_write_callback)
323
+
324
+
325
+ class WrappedSocket(object):
326
+ """
327
+ API-compatibility wrapper for Python's OpenSSL wrapped socket object.
328
+
329
+ Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
330
+ collector of PyPy.
331
+ """
332
+
333
+ def __init__(self, socket):
334
+ self.socket = socket
335
+ self.context = None
336
+ self._makefile_refs = 0
337
+ self._closed = False
338
+ self._exception = None
339
+ self._keychain = None
340
+ self._keychain_dir = None
341
+ self._client_cert_chain = None
342
+
343
+ # We save off the previously-configured timeout and then set it to
344
+ # zero. This is done because we use select and friends to handle the
345
+ # timeouts, but if we leave the timeout set on the lower socket then
346
+ # Python will "kindly" call select on that socket again for us. Avoid
347
+ # that by forcing the timeout to zero.
348
+ self._timeout = self.socket.gettimeout()
349
+ self.socket.settimeout(0)
350
+
351
+ @contextlib.contextmanager
352
+ def _raise_on_error(self):
353
+ """
354
+ A context manager that can be used to wrap calls that do I/O from
355
+ SecureTransport. If any of the I/O callbacks hit an exception, this
356
+ context manager will correctly propagate the exception after the fact.
357
+ This avoids silently swallowing those exceptions.
358
+
359
+ It also correctly forces the socket closed.
360
+ """
361
+ self._exception = None
362
+
363
+ # We explicitly don't catch around this yield because in the unlikely
364
+ # event that an exception was hit in the block we don't want to swallow
365
+ # it.
366
+ yield
367
+ if self._exception is not None:
368
+ exception, self._exception = self._exception, None
369
+ self.close()
370
+ raise exception
371
+
372
+ def _set_ciphers(self):
373
+ """
374
+ Sets up the allowed ciphers. By default this matches the set in
375
+ util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
376
+ custom and doesn't allow changing at this time, mostly because parsing
377
+ OpenSSL cipher strings is going to be a freaking nightmare.
378
+ """
379
+ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
380
+ result = Security.SSLSetEnabledCiphers(
381
+ self.context, ciphers, len(CIPHER_SUITES)
382
+ )
383
+ _assert_no_error(result)
384
+
385
+ def _set_alpn_protocols(self, protocols):
386
+ """
387
+ Sets up the ALPN protocols on the context.
388
+ """
389
+ if not protocols:
390
+ return
391
+ protocols_arr = _create_cfstring_array(protocols)
392
+ try:
393
+ result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
394
+ _assert_no_error(result)
395
+ finally:
396
+ CoreFoundation.CFRelease(protocols_arr)
397
+
398
+ def _custom_validate(self, verify, trust_bundle):
399
+ """
400
+ Called when we have set custom validation. We do this in two cases:
401
+ first, when cert validation is entirely disabled; and second, when
402
+ using a custom trust DB.
403
+ Raises an SSLError if the connection is not trusted.
404
+ """
405
+ # If we disabled cert validation, just say: cool.
406
+ if not verify:
407
+ return
408
+
409
+ successes = (
410
+ SecurityConst.kSecTrustResultUnspecified,
411
+ SecurityConst.kSecTrustResultProceed,
412
+ )
413
+ try:
414
+ trust_result = self._evaluate_trust(trust_bundle)
415
+ if trust_result in successes:
416
+ return
417
+ reason = "error code: %d" % (trust_result,)
418
+ except Exception as e:
419
+ # Do not trust on error
420
+ reason = "exception: %r" % (e,)
421
+
422
+ # SecureTransport does not send an alert nor shuts down the connection.
423
+ rec = _build_tls_unknown_ca_alert(self.version())
424
+ self.socket.sendall(rec)
425
+ # close the connection immediately
426
+ # l_onoff = 1, activate linger
427
+ # l_linger = 0, linger for 0 seoncds
428
+ opts = struct.pack("ii", 1, 0)
429
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
430
+ self.close()
431
+ raise ssl.SSLError("certificate verify failed, %s" % reason)
432
+
433
+ def _evaluate_trust(self, trust_bundle):
434
+ # We want data in memory, so load it up.
435
+ if os.path.isfile(trust_bundle):
436
+ with open(trust_bundle, "rb") as f:
437
+ trust_bundle = f.read()
438
+
439
+ cert_array = None
440
+ trust = Security.SecTrustRef()
441
+
442
+ try:
443
+ # Get a CFArray that contains the certs we want.
444
+ cert_array = _cert_array_from_pem(trust_bundle)
445
+
446
+ # Ok, now the hard part. We want to get the SecTrustRef that ST has
447
+ # created for this connection, shove our CAs into it, tell ST to
448
+ # ignore everything else it knows, and then ask if it can build a
449
+ # chain. This is a buuuunch of code.
450
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
451
+ _assert_no_error(result)
452
+ if not trust:
453
+ raise ssl.SSLError("Failed to copy trust reference")
454
+
455
+ result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
456
+ _assert_no_error(result)
457
+
458
+ result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
459
+ _assert_no_error(result)
460
+
461
+ trust_result = Security.SecTrustResultType()
462
+ result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
463
+ _assert_no_error(result)
464
+ finally:
465
+ if trust:
466
+ CoreFoundation.CFRelease(trust)
467
+
468
+ if cert_array is not None:
469
+ CoreFoundation.CFRelease(cert_array)
470
+
471
+ return trust_result.value
472
+
473
+ def handshake(
474
+ self,
475
+ server_hostname,
476
+ verify,
477
+ trust_bundle,
478
+ min_version,
479
+ max_version,
480
+ client_cert,
481
+ client_key,
482
+ client_key_passphrase,
483
+ alpn_protocols,
484
+ ):
485
+ """
486
+ Actually performs the TLS handshake. This is run automatically by
487
+ wrapped socket, and shouldn't be needed in user code.
488
+ """
489
+ # First, we do the initial bits of connection setup. We need to create
490
+ # a context, set its I/O funcs, and set the connection reference.
491
+ self.context = Security.SSLCreateContext(
492
+ None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
493
+ )
494
+ result = Security.SSLSetIOFuncs(
495
+ self.context, _read_callback_pointer, _write_callback_pointer
496
+ )
497
+ _assert_no_error(result)
498
+
499
+ # Here we need to compute the handle to use. We do this by taking the
500
+ # id of self modulo 2**31 - 1. If this is already in the dictionary, we
501
+ # just keep incrementing by one until we find a free space.
502
+ with _connection_ref_lock:
503
+ handle = id(self) % 2147483647
504
+ while handle in _connection_refs:
505
+ handle = (handle + 1) % 2147483647
506
+ _connection_refs[handle] = self
507
+
508
+ result = Security.SSLSetConnection(self.context, handle)
509
+ _assert_no_error(result)
510
+
511
+ # If we have a server hostname, we should set that too.
512
+ if server_hostname:
513
+ if not isinstance(server_hostname, bytes):
514
+ server_hostname = server_hostname.encode("utf-8")
515
+
516
+ result = Security.SSLSetPeerDomainName(
517
+ self.context, server_hostname, len(server_hostname)
518
+ )
519
+ _assert_no_error(result)
520
+
521
+ # Setup the ciphers.
522
+ self._set_ciphers()
523
+
524
+ # Setup the ALPN protocols.
525
+ self._set_alpn_protocols(alpn_protocols)
526
+
527
+ # Set the minimum and maximum TLS versions.
528
+ result = Security.SSLSetProtocolVersionMin(self.context, min_version)
529
+ _assert_no_error(result)
530
+
531
+ result = Security.SSLSetProtocolVersionMax(self.context, max_version)
532
+ _assert_no_error(result)
533
+
534
+ # If there's a trust DB, we need to use it. We do that by telling
535
+ # SecureTransport to break on server auth. We also do that if we don't
536
+ # want to validate the certs at all: we just won't actually do any
537
+ # authing in that case.
538
+ if not verify or trust_bundle is not None:
539
+ result = Security.SSLSetSessionOption(
540
+ self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
541
+ )
542
+ _assert_no_error(result)
543
+
544
+ # If there's a client cert, we need to use it.
545
+ if client_cert:
546
+ self._keychain, self._keychain_dir = _temporary_keychain()
547
+ self._client_cert_chain = _load_client_cert_chain(
548
+ self._keychain, client_cert, client_key
549
+ )
550
+ result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
551
+ _assert_no_error(result)
552
+
553
+ while True:
554
+ with self._raise_on_error():
555
+ result = Security.SSLHandshake(self.context)
556
+
557
+ if result == SecurityConst.errSSLWouldBlock:
558
+ raise socket.timeout("handshake timed out")
559
+ elif result == SecurityConst.errSSLServerAuthCompleted:
560
+ self._custom_validate(verify, trust_bundle)
561
+ continue
562
+ else:
563
+ _assert_no_error(result)
564
+ break
565
+
566
+ def fileno(self):
567
+ return self.socket.fileno()
568
+
569
+ # Copy-pasted from Python 3.5 source code
570
+ def _decref_socketios(self):
571
+ if self._makefile_refs > 0:
572
+ self._makefile_refs -= 1
573
+ if self._closed:
574
+ self.close()
575
+
576
+ def recv(self, bufsiz):
577
+ buffer = ctypes.create_string_buffer(bufsiz)
578
+ bytes_read = self.recv_into(buffer, bufsiz)
579
+ data = buffer[:bytes_read]
580
+ return data
581
+
582
+ def recv_into(self, buffer, nbytes=None):
583
+ # Read short on EOF.
584
+ if self._closed:
585
+ return 0
586
+
587
+ if nbytes is None:
588
+ nbytes = len(buffer)
589
+
590
+ buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
591
+ processed_bytes = ctypes.c_size_t(0)
592
+
593
+ with self._raise_on_error():
594
+ result = Security.SSLRead(
595
+ self.context, buffer, nbytes, ctypes.byref(processed_bytes)
596
+ )
597
+
598
+ # There are some result codes that we want to treat as "not always
599
+ # errors". Specifically, those are errSSLWouldBlock,
600
+ # errSSLClosedGraceful, and errSSLClosedNoNotify.
601
+ if result == SecurityConst.errSSLWouldBlock:
602
+ # If we didn't process any bytes, then this was just a time out.
603
+ # However, we can get errSSLWouldBlock in situations when we *did*
604
+ # read some data, and in those cases we should just read "short"
605
+ # and return.
606
+ if processed_bytes.value == 0:
607
+ # Timed out, no data read.
608
+ raise socket.timeout("recv timed out")
609
+ elif result in (
610
+ SecurityConst.errSSLClosedGraceful,
611
+ SecurityConst.errSSLClosedNoNotify,
612
+ ):
613
+ # The remote peer has closed this connection. We should do so as
614
+ # well. Note that we don't actually return here because in
615
+ # principle this could actually be fired along with return data.
616
+ # It's unlikely though.
617
+ self.close()
618
+ else:
619
+ _assert_no_error(result)
620
+
621
+ # Ok, we read and probably succeeded. We should return whatever data
622
+ # was actually read.
623
+ return processed_bytes.value
624
+
625
+ def settimeout(self, timeout):
626
+ self._timeout = timeout
627
+
628
+ def gettimeout(self):
629
+ return self._timeout
630
+
631
+ def send(self, data):
632
+ processed_bytes = ctypes.c_size_t(0)
633
+
634
+ with self._raise_on_error():
635
+ result = Security.SSLWrite(
636
+ self.context, data, len(data), ctypes.byref(processed_bytes)
637
+ )
638
+
639
+ if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
640
+ # Timed out
641
+ raise socket.timeout("send timed out")
642
+ else:
643
+ _assert_no_error(result)
644
+
645
+ # We sent, and probably succeeded. Tell them how much we sent.
646
+ return processed_bytes.value
647
+
648
+ def sendall(self, data):
649
+ total_sent = 0
650
+ while total_sent < len(data):
651
+ sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
652
+ total_sent += sent
653
+
654
+ def shutdown(self):
655
+ with self._raise_on_error():
656
+ Security.SSLClose(self.context)
657
+
658
+ def close(self):
659
+ # TODO: should I do clean shutdown here? Do I have to?
660
+ if self._makefile_refs < 1:
661
+ self._closed = True
662
+ if self.context:
663
+ CoreFoundation.CFRelease(self.context)
664
+ self.context = None
665
+ if self._client_cert_chain:
666
+ CoreFoundation.CFRelease(self._client_cert_chain)
667
+ self._client_cert_chain = None
668
+ if self._keychain:
669
+ Security.SecKeychainDelete(self._keychain)
670
+ CoreFoundation.CFRelease(self._keychain)
671
+ shutil.rmtree(self._keychain_dir)
672
+ self._keychain = self._keychain_dir = None
673
+ return self.socket.close()
674
+ else:
675
+ self._makefile_refs -= 1
676
+
677
+ def getpeercert(self, binary_form=False):
678
+ # Urgh, annoying.
679
+ #
680
+ # Here's how we do this:
681
+ #
682
+ # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
683
+ # connection.
684
+ # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
685
+ # 3. To get the CN, call SecCertificateCopyCommonName and process that
686
+ # string so that it's of the appropriate type.
687
+ # 4. To get the SAN, we need to do something a bit more complex:
688
+ # a. Call SecCertificateCopyValues to get the data, requesting
689
+ # kSecOIDSubjectAltName.
690
+ # b. Mess about with this dictionary to try to get the SANs out.
691
+ #
692
+ # This is gross. Really gross. It's going to be a few hundred LoC extra
693
+ # just to repeat something that SecureTransport can *already do*. So my
694
+ # operating assumption at this time is that what we want to do is
695
+ # instead to just flag to urllib3 that it shouldn't do its own hostname
696
+ # validation when using SecureTransport.
697
+ if not binary_form:
698
+ raise ValueError("SecureTransport only supports dumping binary certs")
699
+ trust = Security.SecTrustRef()
700
+ certdata = None
701
+ der_bytes = None
702
+
703
+ try:
704
+ # Grab the trust store.
705
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
706
+ _assert_no_error(result)
707
+ if not trust:
708
+ # Probably we haven't done the handshake yet. No biggie.
709
+ return None
710
+
711
+ cert_count = Security.SecTrustGetCertificateCount(trust)
712
+ if not cert_count:
713
+ # Also a case that might happen if we haven't handshaked.
714
+ # Handshook? Handshaken?
715
+ return None
716
+
717
+ leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
718
+ assert leaf
719
+
720
+ # Ok, now we want the DER bytes.
721
+ certdata = Security.SecCertificateCopyData(leaf)
722
+ assert certdata
723
+
724
+ data_length = CoreFoundation.CFDataGetLength(certdata)
725
+ data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
726
+ der_bytes = ctypes.string_at(data_buffer, data_length)
727
+ finally:
728
+ if certdata:
729
+ CoreFoundation.CFRelease(certdata)
730
+ if trust:
731
+ CoreFoundation.CFRelease(trust)
732
+
733
+ return der_bytes
734
+
735
+ def version(self):
736
+ protocol = Security.SSLProtocol()
737
+ result = Security.SSLGetNegotiatedProtocolVersion(
738
+ self.context, ctypes.byref(protocol)
739
+ )
740
+ _assert_no_error(result)
741
+ if protocol.value == SecurityConst.kTLSProtocol13:
742
+ raise ssl.SSLError("SecureTransport does not support TLS 1.3")
743
+ elif protocol.value == SecurityConst.kTLSProtocol12:
744
+ return "TLSv1.2"
745
+ elif protocol.value == SecurityConst.kTLSProtocol11:
746
+ return "TLSv1.1"
747
+ elif protocol.value == SecurityConst.kTLSProtocol1:
748
+ return "TLSv1"
749
+ elif protocol.value == SecurityConst.kSSLProtocol3:
750
+ return "SSLv3"
751
+ elif protocol.value == SecurityConst.kSSLProtocol2:
752
+ return "SSLv2"
753
+ else:
754
+ raise ssl.SSLError("Unknown TLS version: %r" % protocol)
755
+
756
+ def _reuse(self):
757
+ self._makefile_refs += 1
758
+
759
+ def _drop(self):
760
+ if self._makefile_refs < 1:
761
+ self.close()
762
+ else:
763
+ self._makefile_refs -= 1
764
+
765
+
766
+ if _fileobject: # Platform-specific: Python 2
767
+
768
+ def makefile(self, mode, bufsize=-1):
769
+ self._makefile_refs += 1
770
+ return _fileobject(self, mode, bufsize, close=True)
771
+
772
+ else: # Platform-specific: Python 3
773
+
774
+ def makefile(self, mode="r", buffering=None, *args, **kwargs):
775
+ # We disable buffering with SecureTransport because it conflicts with
776
+ # the buffering that ST does internally (see issue #1153 for more).
777
+ buffering = 0
778
+ return backport_makefile(self, mode, buffering, *args, **kwargs)
779
+
780
+
781
+ WrappedSocket.makefile = makefile
782
+
783
+
784
+ class SecureTransportContext(object):
785
+ """
786
+ I am a wrapper class for the SecureTransport library, to translate the
787
+ interface of the standard library ``SSLContext`` object to calls into
788
+ SecureTransport.
789
+ """
790
+
791
+ def __init__(self, protocol):
792
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
793
+ self._options = 0
794
+ self._verify = False
795
+ self._trust_bundle = None
796
+ self._client_cert = None
797
+ self._client_key = None
798
+ self._client_key_passphrase = None
799
+ self._alpn_protocols = None
800
+
801
+ @property
802
+ def check_hostname(self):
803
+ """
804
+ SecureTransport cannot have its hostname checking disabled. For more,
805
+ see the comment on getpeercert() in this file.
806
+ """
807
+ return True
808
+
809
+ @check_hostname.setter
810
+ def check_hostname(self, value):
811
+ """
812
+ SecureTransport cannot have its hostname checking disabled. For more,
813
+ see the comment on getpeercert() in this file.
814
+ """
815
+ pass
816
+
817
+ @property
818
+ def options(self):
819
+ # TODO: Well, crap.
820
+ #
821
+ # So this is the bit of the code that is the most likely to cause us
822
+ # trouble. Essentially we need to enumerate all of the SSL options that
823
+ # users might want to use and try to see if we can sensibly translate
824
+ # them, or whether we should just ignore them.
825
+ return self._options
826
+
827
+ @options.setter
828
+ def options(self, value):
829
+ # TODO: Update in line with above.
830
+ self._options = value
831
+
832
+ @property
833
+ def verify_mode(self):
834
+ return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
835
+
836
+ @verify_mode.setter
837
+ def verify_mode(self, value):
838
+ self._verify = True if value == ssl.CERT_REQUIRED else False
839
+
840
+ def set_default_verify_paths(self):
841
+ # So, this has to do something a bit weird. Specifically, what it does
842
+ # is nothing.
843
+ #
844
+ # This means that, if we had previously had load_verify_locations
845
+ # called, this does not undo that. We need to do that because it turns
846
+ # out that the rest of the urllib3 code will attempt to load the
847
+ # default verify paths if it hasn't been told about any paths, even if
848
+ # the context itself was sometime earlier. We resolve that by just
849
+ # ignoring it.
850
+ pass
851
+
852
+ def load_default_certs(self):
853
+ return self.set_default_verify_paths()
854
+
855
+ def set_ciphers(self, ciphers):
856
+ # For now, we just require the default cipher string.
857
+ if ciphers != util.ssl_.DEFAULT_CIPHERS:
858
+ raise ValueError("SecureTransport doesn't support custom cipher strings")
859
+
860
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
861
+ # OK, we only really support cadata and cafile.
862
+ if capath is not None:
863
+ raise ValueError("SecureTransport does not support cert directories")
864
+
865
+ # Raise if cafile does not exist.
866
+ if cafile is not None:
867
+ with open(cafile):
868
+ pass
869
+
870
+ self._trust_bundle = cafile or cadata
871
+
872
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
873
+ self._client_cert = certfile
874
+ self._client_key = keyfile
875
+ self._client_cert_passphrase = password
876
+
877
+ def set_alpn_protocols(self, protocols):
878
+ """
879
+ Sets the ALPN protocols that will later be set on the context.
880
+
881
+ Raises a NotImplementedError if ALPN is not supported.
882
+ """
883
+ if not hasattr(Security, "SSLSetALPNProtocols"):
884
+ raise NotImplementedError(
885
+ "SecureTransport supports ALPN only in macOS 10.12+"
886
+ )
887
+ self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
888
+
889
+ def wrap_socket(
890
+ self,
891
+ sock,
892
+ server_side=False,
893
+ do_handshake_on_connect=True,
894
+ suppress_ragged_eofs=True,
895
+ server_hostname=None,
896
+ ):
897
+ # So, what do we do here? Firstly, we assert some properties. This is a
898
+ # stripped down shim, so there is some functionality we don't support.
899
+ # See PEP 543 for the real deal.
900
+ assert not server_side
901
+ assert do_handshake_on_connect
902
+ assert suppress_ragged_eofs
903
+
904
+ # Ok, we're good to go. Now we want to create the wrapped socket object
905
+ # and store it in the appropriate place.
906
+ wrapped_socket = WrappedSocket(sock)
907
+
908
+ # Now we can handshake
909
+ wrapped_socket.handshake(
910
+ server_hostname,
911
+ self._verify,
912
+ self._trust_bundle,
913
+ self._min_version,
914
+ self._max_version,
915
+ self._client_cert,
916
+ self._client_key,
917
+ self._client_key_passphrase,
918
+ self._alpn_protocols,
919
+ )
920
+ return wrapped_socket
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/socks.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ This module contains provisional support for SOCKS proxies from within
4
+ urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
5
+ SOCKS5. To enable its functionality, either install PySocks or install this
6
+ module with the ``socks`` extra.
7
+
8
+ The SOCKS implementation supports the full range of urllib3 features. It also
9
+ supports the following SOCKS features:
10
+
11
+ - SOCKS4A (``proxy_url='socks4a://...``)
12
+ - SOCKS4 (``proxy_url='socks4://...``)
13
+ - SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
14
+ - SOCKS5 with local DNS (``proxy_url='socks5://...``)
15
+ - Usernames and passwords for the SOCKS proxy
16
+
17
+ .. note::
18
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
19
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
20
+ server instead of client-side when connecting to a domain name.
21
+
22
+ SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
23
+ supports IPv4, IPv6, and domain names.
24
+
25
+ When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
26
+ will be sent as the ``userid`` section of the SOCKS request:
27
+
28
+ .. code-block:: python
29
+
30
+ proxy_url="socks4a://<userid>@proxy-host"
31
+
32
+ When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
33
+ of the ``proxy_url`` will be sent as the username/password to authenticate
34
+ with the proxy:
35
+
36
+ .. code-block:: python
37
+
38
+ proxy_url="socks5h://<username>:<password>@proxy-host"
39
+
40
+ """
41
+ from __future__ import absolute_import
42
+
43
+ try:
44
+ import socks
45
+ except ImportError:
46
+ import warnings
47
+
48
+ from ..exceptions import DependencyWarning
49
+
50
+ warnings.warn(
51
+ (
52
+ "SOCKS support in urllib3 requires the installation of optional "
53
+ "dependencies: specifically, PySocks. For more information, see "
54
+ "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
55
+ ),
56
+ DependencyWarning,
57
+ )
58
+ raise
59
+
60
+ from socket import error as SocketError
61
+ from socket import timeout as SocketTimeout
62
+
63
+ from ..connection import HTTPConnection, HTTPSConnection
64
+ from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
65
+ from ..exceptions import ConnectTimeoutError, NewConnectionError
66
+ from ..poolmanager import PoolManager
67
+ from ..util.url import parse_url
68
+
69
+ try:
70
+ import ssl
71
+ except ImportError:
72
+ ssl = None
73
+
74
+
75
+ class SOCKSConnection(HTTPConnection):
76
+ """
77
+ A plain-text HTTP connection that connects via a SOCKS proxy.
78
+ """
79
+
80
+ def __init__(self, *args, **kwargs):
81
+ self._socks_options = kwargs.pop("_socks_options")
82
+ super(SOCKSConnection, self).__init__(*args, **kwargs)
83
+
84
+ def _new_conn(self):
85
+ """
86
+ Establish a new connection via the SOCKS proxy.
87
+ """
88
+ extra_kw = {}
89
+ if self.source_address:
90
+ extra_kw["source_address"] = self.source_address
91
+
92
+ if self.socket_options:
93
+ extra_kw["socket_options"] = self.socket_options
94
+
95
+ try:
96
+ conn = socks.create_connection(
97
+ (self.host, self.port),
98
+ proxy_type=self._socks_options["socks_version"],
99
+ proxy_addr=self._socks_options["proxy_host"],
100
+ proxy_port=self._socks_options["proxy_port"],
101
+ proxy_username=self._socks_options["username"],
102
+ proxy_password=self._socks_options["password"],
103
+ proxy_rdns=self._socks_options["rdns"],
104
+ timeout=self.timeout,
105
+ **extra_kw
106
+ )
107
+
108
+ except SocketTimeout:
109
+ raise ConnectTimeoutError(
110
+ self,
111
+ "Connection to %s timed out. (connect timeout=%s)"
112
+ % (self.host, self.timeout),
113
+ )
114
+
115
+ except socks.ProxyError as e:
116
+ # This is fragile as hell, but it seems to be the only way to raise
117
+ # useful errors here.
118
+ if e.socket_err:
119
+ error = e.socket_err
120
+ if isinstance(error, SocketTimeout):
121
+ raise ConnectTimeoutError(
122
+ self,
123
+ "Connection to %s timed out. (connect timeout=%s)"
124
+ % (self.host, self.timeout),
125
+ )
126
+ else:
127
+ raise NewConnectionError(
128
+ self, "Failed to establish a new connection: %s" % error
129
+ )
130
+ else:
131
+ raise NewConnectionError(
132
+ self, "Failed to establish a new connection: %s" % e
133
+ )
134
+
135
+ except SocketError as e: # Defensive: PySocks should catch all these.
136
+ raise NewConnectionError(
137
+ self, "Failed to establish a new connection: %s" % e
138
+ )
139
+
140
+ return conn
141
+
142
+
143
+ # We don't need to duplicate the Verified/Unverified distinction from
144
+ # urllib3/connection.py here because the HTTPSConnection will already have been
145
+ # correctly set to either the Verified or Unverified form by that module. This
146
+ # means the SOCKSHTTPSConnection will automatically be the correct type.
147
+ class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
148
+ pass
149
+
150
+
151
+ class SOCKSHTTPConnectionPool(HTTPConnectionPool):
152
+ ConnectionCls = SOCKSConnection
153
+
154
+
155
+ class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
156
+ ConnectionCls = SOCKSHTTPSConnection
157
+
158
+
159
+ class SOCKSProxyManager(PoolManager):
160
+ """
161
+ A version of the urllib3 ProxyManager that routes connections via the
162
+ defined SOCKS proxy.
163
+ """
164
+
165
+ pool_classes_by_scheme = {
166
+ "http": SOCKSHTTPConnectionPool,
167
+ "https": SOCKSHTTPSConnectionPool,
168
+ }
169
+
170
+ def __init__(
171
+ self,
172
+ proxy_url,
173
+ username=None,
174
+ password=None,
175
+ num_pools=10,
176
+ headers=None,
177
+ **connection_pool_kw
178
+ ):
179
+ parsed = parse_url(proxy_url)
180
+
181
+ if username is None and password is None and parsed.auth is not None:
182
+ split = parsed.auth.split(":")
183
+ if len(split) == 2:
184
+ username, password = split
185
+ if parsed.scheme == "socks5":
186
+ socks_version = socks.PROXY_TYPE_SOCKS5
187
+ rdns = False
188
+ elif parsed.scheme == "socks5h":
189
+ socks_version = socks.PROXY_TYPE_SOCKS5
190
+ rdns = True
191
+ elif parsed.scheme == "socks4":
192
+ socks_version = socks.PROXY_TYPE_SOCKS4
193
+ rdns = False
194
+ elif parsed.scheme == "socks4a":
195
+ socks_version = socks.PROXY_TYPE_SOCKS4
196
+ rdns = True
197
+ else:
198
+ raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
199
+
200
+ self.proxy_url = proxy_url
201
+
202
+ socks_options = {
203
+ "socks_version": socks_version,
204
+ "proxy_host": parsed.host,
205
+ "proxy_port": parsed.port,
206
+ "username": username,
207
+ "password": password,
208
+ "rdns": rdns,
209
+ }
210
+ connection_pool_kw["_socks_options"] = socks_options
211
+
212
+ super(SOCKSProxyManager, self).__init__(
213
+ num_pools, headers, **connection_pool_kw
214
+ )
215
+
216
+ self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/exceptions.py ADDED
@@ -0,0 +1,323 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
4
+
5
+ # Base Exceptions
6
+
7
+
8
+ class HTTPError(Exception):
9
+ """Base exception used by this module."""
10
+
11
+ pass
12
+
13
+
14
+ class HTTPWarning(Warning):
15
+ """Base warning used by this module."""
16
+
17
+ pass
18
+
19
+
20
+ class PoolError(HTTPError):
21
+ """Base exception for errors caused within a pool."""
22
+
23
+ def __init__(self, pool, message):
24
+ self.pool = pool
25
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
26
+
27
+ def __reduce__(self):
28
+ # For pickling purposes.
29
+ return self.__class__, (None, None)
30
+
31
+
32
+ class RequestError(PoolError):
33
+ """Base exception for PoolErrors that have associated URLs."""
34
+
35
+ def __init__(self, pool, url, message):
36
+ self.url = url
37
+ PoolError.__init__(self, pool, message)
38
+
39
+ def __reduce__(self):
40
+ # For pickling purposes.
41
+ return self.__class__, (None, self.url, None)
42
+
43
+
44
+ class SSLError(HTTPError):
45
+ """Raised when SSL certificate fails in an HTTPS connection."""
46
+
47
+ pass
48
+
49
+
50
+ class ProxyError(HTTPError):
51
+ """Raised when the connection to a proxy fails."""
52
+
53
+ def __init__(self, message, error, *args):
54
+ super(ProxyError, self).__init__(message, error, *args)
55
+ self.original_error = error
56
+
57
+
58
+ class DecodeError(HTTPError):
59
+ """Raised when automatic decoding based on Content-Type fails."""
60
+
61
+ pass
62
+
63
+
64
+ class ProtocolError(HTTPError):
65
+ """Raised when something unexpected happens mid-request/response."""
66
+
67
+ pass
68
+
69
+
70
+ #: Renamed to ProtocolError but aliased for backwards compatibility.
71
+ ConnectionError = ProtocolError
72
+
73
+
74
+ # Leaf Exceptions
75
+
76
+
77
+ class MaxRetryError(RequestError):
78
+ """Raised when the maximum number of retries is exceeded.
79
+
80
+ :param pool: The connection pool
81
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
82
+ :param string url: The requested Url
83
+ :param exceptions.Exception reason: The underlying error
84
+
85
+ """
86
+
87
+ def __init__(self, pool, url, reason=None):
88
+ self.reason = reason
89
+
90
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
91
+
92
+ RequestError.__init__(self, pool, url, message)
93
+
94
+
95
+ class HostChangedError(RequestError):
96
+ """Raised when an existing pool gets a request for a foreign host."""
97
+
98
+ def __init__(self, pool, url, retries=3):
99
+ message = "Tried to open a foreign host with url: %s" % url
100
+ RequestError.__init__(self, pool, url, message)
101
+ self.retries = retries
102
+
103
+
104
+ class TimeoutStateError(HTTPError):
105
+ """Raised when passing an invalid state to a timeout"""
106
+
107
+ pass
108
+
109
+
110
+ class TimeoutError(HTTPError):
111
+ """Raised when a socket timeout error occurs.
112
+
113
+ Catching this error will catch both :exc:`ReadTimeoutErrors
114
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
115
+ """
116
+
117
+ pass
118
+
119
+
120
+ class ReadTimeoutError(TimeoutError, RequestError):
121
+ """Raised when a socket timeout occurs while receiving data from a server"""
122
+
123
+ pass
124
+
125
+
126
+ # This timeout error does not have a URL attached and needs to inherit from the
127
+ # base HTTPError
128
+ class ConnectTimeoutError(TimeoutError):
129
+ """Raised when a socket timeout occurs while connecting to a server"""
130
+
131
+ pass
132
+
133
+
134
+ class NewConnectionError(ConnectTimeoutError, PoolError):
135
+ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
136
+
137
+ pass
138
+
139
+
140
+ class EmptyPoolError(PoolError):
141
+ """Raised when a pool runs out of connections and no more are allowed."""
142
+
143
+ pass
144
+
145
+
146
+ class ClosedPoolError(PoolError):
147
+ """Raised when a request enters a pool after the pool has been closed."""
148
+
149
+ pass
150
+
151
+
152
+ class LocationValueError(ValueError, HTTPError):
153
+ """Raised when there is something wrong with a given URL input."""
154
+
155
+ pass
156
+
157
+
158
+ class LocationParseError(LocationValueError):
159
+ """Raised when get_host or similar fails to parse the URL input."""
160
+
161
+ def __init__(self, location):
162
+ message = "Failed to parse: %s" % location
163
+ HTTPError.__init__(self, message)
164
+
165
+ self.location = location
166
+
167
+
168
+ class URLSchemeUnknown(LocationValueError):
169
+ """Raised when a URL input has an unsupported scheme."""
170
+
171
+ def __init__(self, scheme):
172
+ message = "Not supported URL scheme %s" % scheme
173
+ super(URLSchemeUnknown, self).__init__(message)
174
+
175
+ self.scheme = scheme
176
+
177
+
178
+ class ResponseError(HTTPError):
179
+ """Used as a container for an error reason supplied in a MaxRetryError."""
180
+
181
+ GENERIC_ERROR = "too many error responses"
182
+ SPECIFIC_ERROR = "too many {status_code} error responses"
183
+
184
+
185
+ class SecurityWarning(HTTPWarning):
186
+ """Warned when performing security reducing actions"""
187
+
188
+ pass
189
+
190
+
191
+ class SubjectAltNameWarning(SecurityWarning):
192
+ """Warned when connecting to a host with a certificate missing a SAN."""
193
+
194
+ pass
195
+
196
+
197
+ class InsecureRequestWarning(SecurityWarning):
198
+ """Warned when making an unverified HTTPS request."""
199
+
200
+ pass
201
+
202
+
203
+ class SystemTimeWarning(SecurityWarning):
204
+ """Warned when system time is suspected to be wrong"""
205
+
206
+ pass
207
+
208
+
209
+ class InsecurePlatformWarning(SecurityWarning):
210
+ """Warned when certain TLS/SSL configuration is not available on a platform."""
211
+
212
+ pass
213
+
214
+
215
+ class SNIMissingWarning(HTTPWarning):
216
+ """Warned when making a HTTPS request without SNI available."""
217
+
218
+ pass
219
+
220
+
221
+ class DependencyWarning(HTTPWarning):
222
+ """
223
+ Warned when an attempt is made to import a module with missing optional
224
+ dependencies.
225
+ """
226
+
227
+ pass
228
+
229
+
230
+ class ResponseNotChunked(ProtocolError, ValueError):
231
+ """Response needs to be chunked in order to read it as chunks."""
232
+
233
+ pass
234
+
235
+
236
+ class BodyNotHttplibCompatible(HTTPError):
237
+ """
238
+ Body should be :class:`http.client.HTTPResponse` like
239
+ (have an fp attribute which returns raw chunks) for read_chunked().
240
+ """
241
+
242
+ pass
243
+
244
+
245
+ class IncompleteRead(HTTPError, httplib_IncompleteRead):
246
+ """
247
+ Response length doesn't match expected Content-Length
248
+
249
+ Subclass of :class:`http.client.IncompleteRead` to allow int value
250
+ for ``partial`` to avoid creating large objects on streamed reads.
251
+ """
252
+
253
+ def __init__(self, partial, expected):
254
+ super(IncompleteRead, self).__init__(partial, expected)
255
+
256
+ def __repr__(self):
257
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
258
+ self.partial,
259
+ self.expected,
260
+ )
261
+
262
+
263
+ class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
264
+ """Invalid chunk length in a chunked response."""
265
+
266
+ def __init__(self, response, length):
267
+ super(InvalidChunkLength, self).__init__(
268
+ response.tell(), response.length_remaining
269
+ )
270
+ self.response = response
271
+ self.length = length
272
+
273
+ def __repr__(self):
274
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
275
+ self.length,
276
+ self.partial,
277
+ )
278
+
279
+
280
+ class InvalidHeader(HTTPError):
281
+ """The header provided was somehow invalid."""
282
+
283
+ pass
284
+
285
+
286
+ class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
287
+ """ProxyManager does not support the supplied scheme"""
288
+
289
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
290
+
291
+ def __init__(self, scheme):
292
+ # 'localhost' is here because our URL parser parses
293
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
294
+ if scheme == "localhost":
295
+ scheme = None
296
+ if scheme is None:
297
+ message = "Proxy URL had no scheme, should start with http:// or https://"
298
+ else:
299
+ message = (
300
+ "Proxy URL had unsupported scheme %s, should use http:// or https://"
301
+ % scheme
302
+ )
303
+ super(ProxySchemeUnknown, self).__init__(message)
304
+
305
+
306
+ class ProxySchemeUnsupported(ValueError):
307
+ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
308
+
309
+ pass
310
+
311
+
312
+ class HeaderParsingError(HTTPError):
313
+ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
314
+
315
+ def __init__(self, defects, unparsed_data):
316
+ message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
317
+ super(HeaderParsingError, self).__init__(message)
318
+
319
+
320
+ class UnrewindableBodyError(HTTPError):
321
+ """urllib3 encountered an error when trying to rewind a body"""
322
+
323
+ pass
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/filepost.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import binascii
4
+ import codecs
5
+ import os
6
+ from io import BytesIO
7
+
8
+ from .fields import RequestField
9
+ from .packages import six
10
+ from .packages.six import b
11
+
12
+ writer = codecs.lookup("utf-8")[3]
13
+
14
+
15
+ def choose_boundary():
16
+ """
17
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
18
+ """
19
+ boundary = binascii.hexlify(os.urandom(16))
20
+ if not six.PY2:
21
+ boundary = boundary.decode("ascii")
22
+ return boundary
23
+
24
+
25
+ def iter_field_objects(fields):
26
+ """
27
+ Iterate over fields.
28
+
29
+ Supports list of (k, v) tuples and dicts, and lists of
30
+ :class:`~urllib3.fields.RequestField`.
31
+
32
+ """
33
+ if isinstance(fields, dict):
34
+ i = six.iteritems(fields)
35
+ else:
36
+ i = iter(fields)
37
+
38
+ for field in i:
39
+ if isinstance(field, RequestField):
40
+ yield field
41
+ else:
42
+ yield RequestField.from_tuples(*field)
43
+
44
+
45
+ def iter_fields(fields):
46
+ """
47
+ .. deprecated:: 1.6
48
+
49
+ Iterate over fields.
50
+
51
+ The addition of :class:`~urllib3.fields.RequestField` makes this function
52
+ obsolete. Instead, use :func:`iter_field_objects`, which returns
53
+ :class:`~urllib3.fields.RequestField` objects.
54
+
55
+ Supports list of (k, v) tuples and dicts.
56
+ """
57
+ if isinstance(fields, dict):
58
+ return ((k, v) for k, v in six.iteritems(fields))
59
+
60
+ return ((k, v) for k, v in fields)
61
+
62
+
63
+ def encode_multipart_formdata(fields, boundary=None):
64
+ """
65
+ Encode a dictionary of ``fields`` using the multipart/form-data MIME format.
66
+
67
+ :param fields:
68
+ Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`).
69
+
70
+ :param boundary:
71
+ If not specified, then a random boundary will be generated using
72
+ :func:`urllib3.filepost.choose_boundary`.
73
+ """
74
+ body = BytesIO()
75
+ if boundary is None:
76
+ boundary = choose_boundary()
77
+
78
+ for field in iter_field_objects(fields):
79
+ body.write(b("--%s\r\n" % (boundary)))
80
+
81
+ writer(body).write(field.render_headers())
82
+ data = field.data
83
+
84
+ if isinstance(data, int):
85
+ data = str(data) # Backwards compatibility
86
+
87
+ if isinstance(data, six.text_type):
88
+ writer(body).write(data)
89
+ else:
90
+ body.write(data)
91
+
92
+ body.write(b"\r\n")
93
+
94
+ body.write(b("--%s--\r\n" % (boundary)))
95
+
96
+ content_type = str("multipart/form-data; boundary=%s" % boundary)
97
+
98
+ return body.getvalue(), content_type
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py ADDED
File without changes
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/poolmanager.py ADDED
@@ -0,0 +1,540 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import collections
4
+ import functools
5
+ import logging
6
+
7
+ from ._collections import HTTPHeaderDict, RecentlyUsedContainer
8
+ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
9
+ from .exceptions import (
10
+ LocationValueError,
11
+ MaxRetryError,
12
+ ProxySchemeUnknown,
13
+ ProxySchemeUnsupported,
14
+ URLSchemeUnknown,
15
+ )
16
+ from .packages import six
17
+ from .packages.six.moves.urllib.parse import urljoin
18
+ from .request import RequestMethods
19
+ from .util.proxy import connection_requires_http_tunnel
20
+ from .util.retry import Retry
21
+ from .util.url import parse_url
22
+
23
+ __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
24
+
25
+
26
+ log = logging.getLogger(__name__)
27
+
28
+ SSL_KEYWORDS = (
29
+ "key_file",
30
+ "cert_file",
31
+ "cert_reqs",
32
+ "ca_certs",
33
+ "ssl_version",
34
+ "ca_cert_dir",
35
+ "ssl_context",
36
+ "key_password",
37
+ "server_hostname",
38
+ )
39
+
40
+ # All known keyword arguments that could be provided to the pool manager, its
41
+ # pools, or the underlying connections. This is used to construct a pool key.
42
+ _key_fields = (
43
+ "key_scheme", # str
44
+ "key_host", # str
45
+ "key_port", # int
46
+ "key_timeout", # int or float or Timeout
47
+ "key_retries", # int or Retry
48
+ "key_strict", # bool
49
+ "key_block", # bool
50
+ "key_source_address", # str
51
+ "key_key_file", # str
52
+ "key_key_password", # str
53
+ "key_cert_file", # str
54
+ "key_cert_reqs", # str
55
+ "key_ca_certs", # str
56
+ "key_ssl_version", # str
57
+ "key_ca_cert_dir", # str
58
+ "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
59
+ "key_maxsize", # int
60
+ "key_headers", # dict
61
+ "key__proxy", # parsed proxy url
62
+ "key__proxy_headers", # dict
63
+ "key__proxy_config", # class
64
+ "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
65
+ "key__socks_options", # dict
66
+ "key_assert_hostname", # bool or string
67
+ "key_assert_fingerprint", # str
68
+ "key_server_hostname", # str
69
+ )
70
+
71
+ #: The namedtuple class used to construct keys for the connection pool.
72
+ #: All custom key schemes should include the fields in this key at a minimum.
73
+ PoolKey = collections.namedtuple("PoolKey", _key_fields)
74
+
75
+ _proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
76
+ ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
77
+
78
+
79
+ def _default_key_normalizer(key_class, request_context):
80
+ """
81
+ Create a pool key out of a request context dictionary.
82
+
83
+ According to RFC 3986, both the scheme and host are case-insensitive.
84
+ Therefore, this function normalizes both before constructing the pool
85
+ key for an HTTPS request. If you wish to change this behaviour, provide
86
+ alternate callables to ``key_fn_by_scheme``.
87
+
88
+ :param key_class:
89
+ The class to use when constructing the key. This should be a namedtuple
90
+ with the ``scheme`` and ``host`` keys at a minimum.
91
+ :type key_class: namedtuple
92
+ :param request_context:
93
+ A dictionary-like object that contain the context for a request.
94
+ :type request_context: dict
95
+
96
+ :return: A namedtuple that can be used as a connection pool key.
97
+ :rtype: PoolKey
98
+ """
99
+ # Since we mutate the dictionary, make a copy first
100
+ context = request_context.copy()
101
+ context["scheme"] = context["scheme"].lower()
102
+ context["host"] = context["host"].lower()
103
+
104
+ # These are both dictionaries and need to be transformed into frozensets
105
+ for key in ("headers", "_proxy_headers", "_socks_options"):
106
+ if key in context and context[key] is not None:
107
+ context[key] = frozenset(context[key].items())
108
+
109
+ # The socket_options key may be a list and needs to be transformed into a
110
+ # tuple.
111
+ socket_opts = context.get("socket_options")
112
+ if socket_opts is not None:
113
+ context["socket_options"] = tuple(socket_opts)
114
+
115
+ # Map the kwargs to the names in the namedtuple - this is necessary since
116
+ # namedtuples can't have fields starting with '_'.
117
+ for key in list(context.keys()):
118
+ context["key_" + key] = context.pop(key)
119
+
120
+ # Default to ``None`` for keys missing from the context
121
+ for field in key_class._fields:
122
+ if field not in context:
123
+ context[field] = None
124
+
125
+ return key_class(**context)
126
+
127
+
128
+ #: A dictionary that maps a scheme to a callable that creates a pool key.
129
+ #: This can be used to alter the way pool keys are constructed, if desired.
130
+ #: Each PoolManager makes a copy of this dictionary so they can be configured
131
+ #: globally here, or individually on the instance.
132
+ key_fn_by_scheme = {
133
+ "http": functools.partial(_default_key_normalizer, PoolKey),
134
+ "https": functools.partial(_default_key_normalizer, PoolKey),
135
+ }
136
+
137
+ pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool}
138
+
139
+
140
+ class PoolManager(RequestMethods):
141
+ """
142
+ Allows for arbitrary requests while transparently keeping track of
143
+ necessary connection pools for you.
144
+
145
+ :param num_pools:
146
+ Number of connection pools to cache before discarding the least
147
+ recently used pool.
148
+
149
+ :param headers:
150
+ Headers to include with all requests, unless other headers are given
151
+ explicitly.
152
+
153
+ :param \\**connection_pool_kw:
154
+ Additional parameters are used to create fresh
155
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
156
+
157
+ Example::
158
+
159
+ >>> manager = PoolManager(num_pools=2)
160
+ >>> r = manager.request('GET', 'http://google.com/')
161
+ >>> r = manager.request('GET', 'http://google.com/mail')
162
+ >>> r = manager.request('GET', 'http://yahoo.com/')
163
+ >>> len(manager.pools)
164
+ 2
165
+
166
+ """
167
+
168
+ proxy = None
169
+ proxy_config = None
170
+
171
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
172
+ RequestMethods.__init__(self, headers)
173
+ self.connection_pool_kw = connection_pool_kw
174
+ self.pools = RecentlyUsedContainer(num_pools)
175
+
176
+ # Locally set the pool classes and keys so other PoolManagers can
177
+ # override them.
178
+ self.pool_classes_by_scheme = pool_classes_by_scheme
179
+ self.key_fn_by_scheme = key_fn_by_scheme.copy()
180
+
181
+ def __enter__(self):
182
+ return self
183
+
184
+ def __exit__(self, exc_type, exc_val, exc_tb):
185
+ self.clear()
186
+ # Return False to re-raise any potential exceptions
187
+ return False
188
+
189
+ def _new_pool(self, scheme, host, port, request_context=None):
190
+ """
191
+ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
192
+ any additional pool keyword arguments.
193
+
194
+ If ``request_context`` is provided, it is provided as keyword arguments
195
+ to the pool class used. This method is used to actually create the
196
+ connection pools handed out by :meth:`connection_from_url` and
197
+ companion methods. It is intended to be overridden for customization.
198
+ """
199
+ pool_cls = self.pool_classes_by_scheme[scheme]
200
+ if request_context is None:
201
+ request_context = self.connection_pool_kw.copy()
202
+
203
+ # Although the context has everything necessary to create the pool,
204
+ # this function has historically only used the scheme, host, and port
205
+ # in the positional args. When an API change is acceptable these can
206
+ # be removed.
207
+ for key in ("scheme", "host", "port"):
208
+ request_context.pop(key, None)
209
+
210
+ if scheme == "http":
211
+ for kw in SSL_KEYWORDS:
212
+ request_context.pop(kw, None)
213
+
214
+ return pool_cls(host, port, **request_context)
215
+
216
+ def clear(self):
217
+ """
218
+ Empty our store of pools and direct them all to close.
219
+
220
+ This will not affect in-flight connections, but they will not be
221
+ re-used after completion.
222
+ """
223
+ self.pools.clear()
224
+
225
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
226
+ """
227
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
228
+
229
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
230
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
231
+ provided, it is merged with the instance's ``connection_pool_kw``
232
+ variable and used to create the new connection pool, if one is
233
+ needed.
234
+ """
235
+
236
+ if not host:
237
+ raise LocationValueError("No host specified.")
238
+
239
+ request_context = self._merge_pool_kwargs(pool_kwargs)
240
+ request_context["scheme"] = scheme or "http"
241
+ if not port:
242
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
243
+ request_context["port"] = port
244
+ request_context["host"] = host
245
+
246
+ return self.connection_from_context(request_context)
247
+
248
+ def connection_from_context(self, request_context):
249
+ """
250
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
251
+
252
+ ``request_context`` must at least contain the ``scheme`` key and its
253
+ value must be a key in ``key_fn_by_scheme`` instance variable.
254
+ """
255
+ scheme = request_context["scheme"].lower()
256
+ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
257
+ if not pool_key_constructor:
258
+ raise URLSchemeUnknown(scheme)
259
+ pool_key = pool_key_constructor(request_context)
260
+
261
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
262
+
263
+ def connection_from_pool_key(self, pool_key, request_context=None):
264
+ """
265
+ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
266
+
267
+ ``pool_key`` should be a namedtuple that only contains immutable
268
+ objects. At a minimum it must have the ``scheme``, ``host``, and
269
+ ``port`` fields.
270
+ """
271
+ with self.pools.lock:
272
+ # If the scheme, host, or port doesn't match existing open
273
+ # connections, open a new ConnectionPool.
274
+ pool = self.pools.get(pool_key)
275
+ if pool:
276
+ return pool
277
+
278
+ # Make a fresh ConnectionPool of the desired type
279
+ scheme = request_context["scheme"]
280
+ host = request_context["host"]
281
+ port = request_context["port"]
282
+ pool = self._new_pool(scheme, host, port, request_context=request_context)
283
+ self.pools[pool_key] = pool
284
+
285
+ return pool
286
+
287
+ def connection_from_url(self, url, pool_kwargs=None):
288
+ """
289
+ Similar to :func:`urllib3.connectionpool.connection_from_url`.
290
+
291
+ If ``pool_kwargs`` is not provided and a new pool needs to be
292
+ constructed, ``self.connection_pool_kw`` is used to initialize
293
+ the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs``
294
+ is provided, it is used instead. Note that if a new pool does not
295
+ need to be created for the request, the provided ``pool_kwargs`` are
296
+ not used.
297
+ """
298
+ u = parse_url(url)
299
+ return self.connection_from_host(
300
+ u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs
301
+ )
302
+
303
+ def _merge_pool_kwargs(self, override):
304
+ """
305
+ Merge a dictionary of override values for self.connection_pool_kw.
306
+
307
+ This does not modify self.connection_pool_kw and returns a new dict.
308
+ Any keys in the override dictionary with a value of ``None`` are
309
+ removed from the merged dictionary.
310
+ """
311
+ base_pool_kwargs = self.connection_pool_kw.copy()
312
+ if override:
313
+ for key, value in override.items():
314
+ if value is None:
315
+ try:
316
+ del base_pool_kwargs[key]
317
+ except KeyError:
318
+ pass
319
+ else:
320
+ base_pool_kwargs[key] = value
321
+ return base_pool_kwargs
322
+
323
+ def _proxy_requires_url_absolute_form(self, parsed_url):
324
+ """
325
+ Indicates if the proxy requires the complete destination URL in the
326
+ request. Normally this is only needed when not using an HTTP CONNECT
327
+ tunnel.
328
+ """
329
+ if self.proxy is None:
330
+ return False
331
+
332
+ return not connection_requires_http_tunnel(
333
+ self.proxy, self.proxy_config, parsed_url.scheme
334
+ )
335
+
336
+ def _validate_proxy_scheme_url_selection(self, url_scheme):
337
+ """
338
+ Validates that were not attempting to do TLS in TLS connections on
339
+ Python2 or with unsupported SSL implementations.
340
+ """
341
+ if self.proxy is None or url_scheme != "https":
342
+ return
343
+
344
+ if self.proxy.scheme != "https":
345
+ return
346
+
347
+ if six.PY2 and not self.proxy_config.use_forwarding_for_https:
348
+ raise ProxySchemeUnsupported(
349
+ "Contacting HTTPS destinations through HTTPS proxies "
350
+ "'via CONNECT tunnels' is not supported in Python 2"
351
+ )
352
+
353
+ def urlopen(self, method, url, redirect=True, **kw):
354
+ """
355
+ Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
356
+ with custom cross-host redirect logic and only sends the request-uri
357
+ portion of the ``url``.
358
+
359
+ The given ``url`` parameter must be absolute, such that an appropriate
360
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
361
+ """
362
+ u = parse_url(url)
363
+ self._validate_proxy_scheme_url_selection(u.scheme)
364
+
365
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
366
+
367
+ kw["assert_same_host"] = False
368
+ kw["redirect"] = False
369
+
370
+ if "headers" not in kw:
371
+ kw["headers"] = self.headers.copy()
372
+
373
+ if self._proxy_requires_url_absolute_form(u):
374
+ response = conn.urlopen(method, url, **kw)
375
+ else:
376
+ response = conn.urlopen(method, u.request_uri, **kw)
377
+
378
+ redirect_location = redirect and response.get_redirect_location()
379
+ if not redirect_location:
380
+ return response
381
+
382
+ # Support relative URLs for redirecting.
383
+ redirect_location = urljoin(url, redirect_location)
384
+
385
+ if response.status == 303:
386
+ # Change the method according to RFC 9110, Section 15.4.4.
387
+ method = "GET"
388
+ # And lose the body not to transfer anything sensitive.
389
+ kw["body"] = None
390
+ kw["headers"] = HTTPHeaderDict(kw["headers"])._prepare_for_method_change()
391
+
392
+ retries = kw.get("retries")
393
+ if not isinstance(retries, Retry):
394
+ retries = Retry.from_int(retries, redirect=redirect)
395
+
396
+ # Strip headers marked as unsafe to forward to the redirected location.
397
+ # Check remove_headers_on_redirect to avoid a potential network call within
398
+ # conn.is_same_host() which may use socket.gethostbyname() in the future.
399
+ if retries.remove_headers_on_redirect and not conn.is_same_host(
400
+ redirect_location
401
+ ):
402
+ headers = list(six.iterkeys(kw["headers"]))
403
+ for header in headers:
404
+ if header.lower() in retries.remove_headers_on_redirect:
405
+ kw["headers"].pop(header, None)
406
+
407
+ try:
408
+ retries = retries.increment(method, url, response=response, _pool=conn)
409
+ except MaxRetryError:
410
+ if retries.raise_on_redirect:
411
+ response.drain_conn()
412
+ raise
413
+ return response
414
+
415
+ kw["retries"] = retries
416
+ kw["redirect"] = redirect
417
+
418
+ log.info("Redirecting %s -> %s", url, redirect_location)
419
+
420
+ response.drain_conn()
421
+ return self.urlopen(method, redirect_location, **kw)
422
+
423
+
424
+ class ProxyManager(PoolManager):
425
+ """
426
+ Behaves just like :class:`PoolManager`, but sends all requests through
427
+ the defined proxy, using the CONNECT method for HTTPS URLs.
428
+
429
+ :param proxy_url:
430
+ The URL of the proxy to be used.
431
+
432
+ :param proxy_headers:
433
+ A dictionary containing headers that will be sent to the proxy. In case
434
+ of HTTP they are being sent with each request, while in the
435
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
436
+ authentication.
437
+
438
+ :param proxy_ssl_context:
439
+ The proxy SSL context is used to establish the TLS connection to the
440
+ proxy when using HTTPS proxies.
441
+
442
+ :param use_forwarding_for_https:
443
+ (Defaults to False) If set to True will forward requests to the HTTPS
444
+ proxy to be made on behalf of the client instead of creating a TLS
445
+ tunnel via the CONNECT method. **Enabling this flag means that request
446
+ and response headers and content will be visible from the HTTPS proxy**
447
+ whereas tunneling keeps request and response headers and content
448
+ private. IP address, target hostname, SNI, and port are always visible
449
+ to an HTTPS proxy even when this flag is disabled.
450
+
451
+ Example:
452
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
453
+ >>> r1 = proxy.request('GET', 'http://google.com/')
454
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
455
+ >>> len(proxy.pools)
456
+ 1
457
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
458
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
459
+ >>> len(proxy.pools)
460
+ 3
461
+
462
+ """
463
+
464
+ def __init__(
465
+ self,
466
+ proxy_url,
467
+ num_pools=10,
468
+ headers=None,
469
+ proxy_headers=None,
470
+ proxy_ssl_context=None,
471
+ use_forwarding_for_https=False,
472
+ **connection_pool_kw
473
+ ):
474
+
475
+ if isinstance(proxy_url, HTTPConnectionPool):
476
+ proxy_url = "%s://%s:%i" % (
477
+ proxy_url.scheme,
478
+ proxy_url.host,
479
+ proxy_url.port,
480
+ )
481
+ proxy = parse_url(proxy_url)
482
+
483
+ if proxy.scheme not in ("http", "https"):
484
+ raise ProxySchemeUnknown(proxy.scheme)
485
+
486
+ if not proxy.port:
487
+ port = port_by_scheme.get(proxy.scheme, 80)
488
+ proxy = proxy._replace(port=port)
489
+
490
+ self.proxy = proxy
491
+ self.proxy_headers = proxy_headers or {}
492
+ self.proxy_ssl_context = proxy_ssl_context
493
+ self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
494
+
495
+ connection_pool_kw["_proxy"] = self.proxy
496
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
497
+ connection_pool_kw["_proxy_config"] = self.proxy_config
498
+
499
+ super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
500
+
501
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
502
+ if scheme == "https":
503
+ return super(ProxyManager, self).connection_from_host(
504
+ host, port, scheme, pool_kwargs=pool_kwargs
505
+ )
506
+
507
+ return super(ProxyManager, self).connection_from_host(
508
+ self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs
509
+ )
510
+
511
+ def _set_proxy_headers(self, url, headers=None):
512
+ """
513
+ Sets headers needed by proxies: specifically, the Accept and Host
514
+ headers. Only sets headers not provided by the user.
515
+ """
516
+ headers_ = {"Accept": "*/*"}
517
+
518
+ netloc = parse_url(url).netloc
519
+ if netloc:
520
+ headers_["Host"] = netloc
521
+
522
+ if headers:
523
+ headers_.update(headers)
524
+ return headers_
525
+
526
+ def urlopen(self, method, url, redirect=True, **kw):
527
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
528
+ u = parse_url(url)
529
+ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
530
+ # For connections using HTTP CONNECT, httplib sets the necessary
531
+ # headers on the CONNECT to the proxy. If we're not using CONNECT,
532
+ # we'll definitely need to set 'Host' at the very least.
533
+ headers = kw.get("headers", self.headers)
534
+ kw["headers"] = self._set_proxy_headers(url, headers)
535
+
536
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
537
+
538
+
539
+ def proxy_from_url(url, **kw):
540
+ return ProxyManager(proxy_url=url, **kw)
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/request.py ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import sys
4
+
5
+ from .filepost import encode_multipart_formdata
6
+ from .packages import six
7
+ from .packages.six.moves.urllib.parse import urlencode
8
+
9
+ __all__ = ["RequestMethods"]
10
+
11
+
12
+ class RequestMethods(object):
13
+ """
14
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
15
+ as :class:`urllib3.HTTPConnectionPool` and
16
+ :class:`urllib3.PoolManager`.
17
+
18
+ Provides behavior for making common types of HTTP request methods and
19
+ decides which type of request field encoding to use.
20
+
21
+ Specifically,
22
+
23
+ :meth:`.request_encode_url` is for sending requests whose fields are
24
+ encoded in the URL (such as GET, HEAD, DELETE).
25
+
26
+ :meth:`.request_encode_body` is for sending requests whose fields are
27
+ encoded in the *body* of the request using multipart or www-form-urlencoded
28
+ (such as for POST, PUT, PATCH).
29
+
30
+ :meth:`.request` is for making any kind of request, it will look up the
31
+ appropriate encoding format and use one of the above two methods to make
32
+ the request.
33
+
34
+ Initializer parameters:
35
+
36
+ :param headers:
37
+ Headers to include with all requests, unless other headers are given
38
+ explicitly.
39
+ """
40
+
41
+ _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"}
42
+
43
+ def __init__(self, headers=None):
44
+ self.headers = headers or {}
45
+
46
+ def urlopen(
47
+ self,
48
+ method,
49
+ url,
50
+ body=None,
51
+ headers=None,
52
+ encode_multipart=True,
53
+ multipart_boundary=None,
54
+ **kw
55
+ ): # Abstract
56
+ raise NotImplementedError(
57
+ "Classes extending RequestMethods must implement "
58
+ "their own ``urlopen`` method."
59
+ )
60
+
61
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
62
+ """
63
+ Make a request using :meth:`urlopen` with the appropriate encoding of
64
+ ``fields`` based on the ``method`` used.
65
+
66
+ This is a convenience method that requires the least amount of manual
67
+ effort. It can be used in most situations, while still having the
68
+ option to drop down to more specific methods when necessary, such as
69
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
70
+ or even the lowest level :meth:`urlopen`.
71
+ """
72
+ method = method.upper()
73
+
74
+ urlopen_kw["request_url"] = url
75
+
76
+ if method in self._encode_url_methods:
77
+ return self.request_encode_url(
78
+ method, url, fields=fields, headers=headers, **urlopen_kw
79
+ )
80
+ else:
81
+ return self.request_encode_body(
82
+ method, url, fields=fields, headers=headers, **urlopen_kw
83
+ )
84
+
85
+ def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
86
+ """
87
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
88
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
89
+ """
90
+ if headers is None:
91
+ headers = self.headers
92
+
93
+ extra_kw = {"headers": headers}
94
+ extra_kw.update(urlopen_kw)
95
+
96
+ if fields:
97
+ url += "?" + urlencode(fields)
98
+
99
+ return self.urlopen(method, url, **extra_kw)
100
+
101
+ def request_encode_body(
102
+ self,
103
+ method,
104
+ url,
105
+ fields=None,
106
+ headers=None,
107
+ encode_multipart=True,
108
+ multipart_boundary=None,
109
+ **urlopen_kw
110
+ ):
111
+ """
112
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
113
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
114
+
115
+ When ``encode_multipart=True`` (default), then
116
+ :func:`urllib3.encode_multipart_formdata` is used to encode
117
+ the payload with the appropriate content type. Otherwise
118
+ :func:`urllib.parse.urlencode` is used with the
119
+ 'application/x-www-form-urlencoded' content type.
120
+
121
+ Multipart encoding must be used when posting files, and it's reasonably
122
+ safe to use it in other times too. However, it may break request
123
+ signing, such as with OAuth.
124
+
125
+ Supports an optional ``fields`` parameter of key/value strings AND
126
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
127
+ the MIME type is optional. For example::
128
+
129
+ fields = {
130
+ 'foo': 'bar',
131
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
132
+ 'realfile': ('barfile.txt', open('realfile').read()),
133
+ 'typedfile': ('bazfile.bin', open('bazfile').read(),
134
+ 'image/jpeg'),
135
+ 'nonamefile': 'contents of nonamefile field',
136
+ }
137
+
138
+ When uploading a file, providing a filename (the first parameter of the
139
+ tuple) is optional but recommended to best mimic behavior of browsers.
140
+
141
+ Note that if ``headers`` are supplied, the 'Content-Type' header will
142
+ be overwritten because it depends on the dynamic random boundary string
143
+ which is used to compose the body of the request. The random boundary
144
+ string can be explicitly set with the ``multipart_boundary`` parameter.
145
+ """
146
+ if headers is None:
147
+ headers = self.headers
148
+
149
+ extra_kw = {"headers": {}}
150
+
151
+ if fields:
152
+ if "body" in urlopen_kw:
153
+ raise TypeError(
154
+ "request got values for both 'fields' and 'body', can only specify one."
155
+ )
156
+
157
+ if encode_multipart:
158
+ body, content_type = encode_multipart_formdata(
159
+ fields, boundary=multipart_boundary
160
+ )
161
+ else:
162
+ body, content_type = (
163
+ urlencode(fields),
164
+ "application/x-www-form-urlencoded",
165
+ )
166
+
167
+ extra_kw["body"] = body
168
+ extra_kw["headers"] = {"Content-Type": content_type}
169
+
170
+ extra_kw["headers"].update(headers)
171
+ extra_kw.update(urlopen_kw)
172
+
173
+ return self.urlopen(method, url, **extra_kw)
174
+
175
+
176
+ if not six.PY2:
177
+
178
+ class RequestModule(sys.modules[__name__].__class__):
179
+ def __call__(self, *args, **kwargs):
180
+ """
181
+ If user tries to call this module directly urllib3 v2.x style raise an error to the user
182
+ suggesting they may need urllib3 v2
183
+ """
184
+ raise TypeError(
185
+ "'module' object is not callable\n"
186
+ "urllib3.request() method is not supported in this release, "
187
+ "upgrade to urllib3 v2 to use it\n"
188
+ "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
189
+ )
190
+
191
+ sys.modules[__name__].__class__ = RequestModule
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/response.py ADDED
@@ -0,0 +1,879 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import io
4
+ import logging
5
+ import sys
6
+ import warnings
7
+ import zlib
8
+ from contextlib import contextmanager
9
+ from socket import error as SocketError
10
+ from socket import timeout as SocketTimeout
11
+
12
+ brotli = None
13
+
14
+ from . import util
15
+ from ._collections import HTTPHeaderDict
16
+ from .connection import BaseSSLError, HTTPException
17
+ from .exceptions import (
18
+ BodyNotHttplibCompatible,
19
+ DecodeError,
20
+ HTTPError,
21
+ IncompleteRead,
22
+ InvalidChunkLength,
23
+ InvalidHeader,
24
+ ProtocolError,
25
+ ReadTimeoutError,
26
+ ResponseNotChunked,
27
+ SSLError,
28
+ )
29
+ from .packages import six
30
+ from .util.response import is_fp_closed, is_response_to_head
31
+
32
+ log = logging.getLogger(__name__)
33
+
34
+
35
+ class DeflateDecoder(object):
36
+ def __init__(self):
37
+ self._first_try = True
38
+ self._data = b""
39
+ self._obj = zlib.decompressobj()
40
+
41
+ def __getattr__(self, name):
42
+ return getattr(self._obj, name)
43
+
44
+ def decompress(self, data):
45
+ if not data:
46
+ return data
47
+
48
+ if not self._first_try:
49
+ return self._obj.decompress(data)
50
+
51
+ self._data += data
52
+ try:
53
+ decompressed = self._obj.decompress(data)
54
+ if decompressed:
55
+ self._first_try = False
56
+ self._data = None
57
+ return decompressed
58
+ except zlib.error:
59
+ self._first_try = False
60
+ self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
61
+ try:
62
+ return self.decompress(self._data)
63
+ finally:
64
+ self._data = None
65
+
66
+
67
+ class GzipDecoderState(object):
68
+
69
+ FIRST_MEMBER = 0
70
+ OTHER_MEMBERS = 1
71
+ SWALLOW_DATA = 2
72
+
73
+
74
+ class GzipDecoder(object):
75
+ def __init__(self):
76
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
77
+ self._state = GzipDecoderState.FIRST_MEMBER
78
+
79
+ def __getattr__(self, name):
80
+ return getattr(self._obj, name)
81
+
82
+ def decompress(self, data):
83
+ ret = bytearray()
84
+ if self._state == GzipDecoderState.SWALLOW_DATA or not data:
85
+ return bytes(ret)
86
+ while True:
87
+ try:
88
+ ret += self._obj.decompress(data)
89
+ except zlib.error:
90
+ previous_state = self._state
91
+ # Ignore data after the first error
92
+ self._state = GzipDecoderState.SWALLOW_DATA
93
+ if previous_state == GzipDecoderState.OTHER_MEMBERS:
94
+ # Allow trailing garbage acceptable in other gzip clients
95
+ return bytes(ret)
96
+ raise
97
+ data = self._obj.unused_data
98
+ if not data:
99
+ return bytes(ret)
100
+ self._state = GzipDecoderState.OTHER_MEMBERS
101
+ self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
102
+
103
+
104
+ if brotli is not None:
105
+
106
+ class BrotliDecoder(object):
107
+ # Supports both 'brotlipy' and 'Brotli' packages
108
+ # since they share an import name. The top branches
109
+ # are for 'brotlipy' and bottom branches for 'Brotli'
110
+ def __init__(self):
111
+ self._obj = brotli.Decompressor()
112
+ if hasattr(self._obj, "decompress"):
113
+ self.decompress = self._obj.decompress
114
+ else:
115
+ self.decompress = self._obj.process
116
+
117
+ def flush(self):
118
+ if hasattr(self._obj, "flush"):
119
+ return self._obj.flush()
120
+ return b""
121
+
122
+
123
+ class MultiDecoder(object):
124
+ """
125
+ From RFC7231:
126
+ If one or more encodings have been applied to a representation, the
127
+ sender that applied the encodings MUST generate a Content-Encoding
128
+ header field that lists the content codings in the order in which
129
+ they were applied.
130
+ """
131
+
132
+ def __init__(self, modes):
133
+ self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
134
+
135
+ def flush(self):
136
+ return self._decoders[0].flush()
137
+
138
+ def decompress(self, data):
139
+ for d in reversed(self._decoders):
140
+ data = d.decompress(data)
141
+ return data
142
+
143
+
144
+ def _get_decoder(mode):
145
+ if "," in mode:
146
+ return MultiDecoder(mode)
147
+
148
+ if mode == "gzip":
149
+ return GzipDecoder()
150
+
151
+ if brotli is not None and mode == "br":
152
+ return BrotliDecoder()
153
+
154
+ return DeflateDecoder()
155
+
156
+
157
+ class HTTPResponse(io.IOBase):
158
+ """
159
+ HTTP Response container.
160
+
161
+ Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
162
+ loaded and decoded on-demand when the ``data`` property is accessed. This
163
+ class is also compatible with the Python standard library's :mod:`io`
164
+ module, and can hence be treated as a readable object in the context of that
165
+ framework.
166
+
167
+ Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
168
+
169
+ :param preload_content:
170
+ If True, the response's body will be preloaded during construction.
171
+
172
+ :param decode_content:
173
+ If True, will attempt to decode the body based on the
174
+ 'content-encoding' header.
175
+
176
+ :param original_response:
177
+ When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
178
+ object, it's convenient to include the original for debug purposes. It's
179
+ otherwise unused.
180
+
181
+ :param retries:
182
+ The retries contains the last :class:`~urllib3.util.retry.Retry` that
183
+ was used during the request.
184
+
185
+ :param enforce_content_length:
186
+ Enforce content length checking. Body returned by server must match
187
+ value of Content-Length header, if present. Otherwise, raise error.
188
+ """
189
+
190
+ CONTENT_DECODERS = ["gzip", "deflate"]
191
+ if brotli is not None:
192
+ CONTENT_DECODERS += ["br"]
193
+ REDIRECT_STATUSES = [301, 302, 303, 307, 308]
194
+
195
+ def __init__(
196
+ self,
197
+ body="",
198
+ headers=None,
199
+ status=0,
200
+ version=0,
201
+ reason=None,
202
+ strict=0,
203
+ preload_content=True,
204
+ decode_content=True,
205
+ original_response=None,
206
+ pool=None,
207
+ connection=None,
208
+ msg=None,
209
+ retries=None,
210
+ enforce_content_length=False,
211
+ request_method=None,
212
+ request_url=None,
213
+ auto_close=True,
214
+ ):
215
+
216
+ if isinstance(headers, HTTPHeaderDict):
217
+ self.headers = headers
218
+ else:
219
+ self.headers = HTTPHeaderDict(headers)
220
+ self.status = status
221
+ self.version = version
222
+ self.reason = reason
223
+ self.strict = strict
224
+ self.decode_content = decode_content
225
+ self.retries = retries
226
+ self.enforce_content_length = enforce_content_length
227
+ self.auto_close = auto_close
228
+
229
+ self._decoder = None
230
+ self._body = None
231
+ self._fp = None
232
+ self._original_response = original_response
233
+ self._fp_bytes_read = 0
234
+ self.msg = msg
235
+ self._request_url = request_url
236
+
237
+ if body and isinstance(body, (six.string_types, bytes)):
238
+ self._body = body
239
+
240
+ self._pool = pool
241
+ self._connection = connection
242
+
243
+ if hasattr(body, "read"):
244
+ self._fp = body
245
+
246
+ # Are we using the chunked-style of transfer encoding?
247
+ self.chunked = False
248
+ self.chunk_left = None
249
+ tr_enc = self.headers.get("transfer-encoding", "").lower()
250
+ # Don't incur the penalty of creating a list and then discarding it
251
+ encodings = (enc.strip() for enc in tr_enc.split(","))
252
+ if "chunked" in encodings:
253
+ self.chunked = True
254
+
255
+ # Determine length of response
256
+ self.length_remaining = self._init_length(request_method)
257
+
258
+ # If requested, preload the body.
259
+ if preload_content and not self._body:
260
+ self._body = self.read(decode_content=decode_content)
261
+
262
+ def get_redirect_location(self):
263
+ """
264
+ Should we redirect and where to?
265
+
266
+ :returns: Truthy redirect location string if we got a redirect status
267
+ code and valid location. ``None`` if redirect status and no
268
+ location. ``False`` if not a redirect status code.
269
+ """
270
+ if self.status in self.REDIRECT_STATUSES:
271
+ return self.headers.get("location")
272
+
273
+ return False
274
+
275
+ def release_conn(self):
276
+ if not self._pool or not self._connection:
277
+ return
278
+
279
+ self._pool._put_conn(self._connection)
280
+ self._connection = None
281
+
282
+ def drain_conn(self):
283
+ """
284
+ Read and discard any remaining HTTP response data in the response connection.
285
+
286
+ Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
287
+ """
288
+ try:
289
+ self.read()
290
+ except (HTTPError, SocketError, BaseSSLError, HTTPException):
291
+ pass
292
+
293
+ @property
294
+ def data(self):
295
+ # For backwards-compat with earlier urllib3 0.4 and earlier.
296
+ if self._body:
297
+ return self._body
298
+
299
+ if self._fp:
300
+ return self.read(cache_content=True)
301
+
302
+ @property
303
+ def connection(self):
304
+ return self._connection
305
+
306
+ def isclosed(self):
307
+ return is_fp_closed(self._fp)
308
+
309
+ def tell(self):
310
+ """
311
+ Obtain the number of bytes pulled over the wire so far. May differ from
312
+ the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
313
+ if bytes are encoded on the wire (e.g, compressed).
314
+ """
315
+ return self._fp_bytes_read
316
+
317
+ def _init_length(self, request_method):
318
+ """
319
+ Set initial length value for Response content if available.
320
+ """
321
+ length = self.headers.get("content-length")
322
+
323
+ if length is not None:
324
+ if self.chunked:
325
+ # This Response will fail with an IncompleteRead if it can't be
326
+ # received as chunked. This method falls back to attempt reading
327
+ # the response before raising an exception.
328
+ log.warning(
329
+ "Received response with both Content-Length and "
330
+ "Transfer-Encoding set. This is expressly forbidden "
331
+ "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
332
+ "attempting to process response as Transfer-Encoding: "
333
+ "chunked."
334
+ )
335
+ return None
336
+
337
+ try:
338
+ # RFC 7230 section 3.3.2 specifies multiple content lengths can
339
+ # be sent in a single Content-Length header
340
+ # (e.g. Content-Length: 42, 42). This line ensures the values
341
+ # are all valid ints and that as long as the `set` length is 1,
342
+ # all values are the same. Otherwise, the header is invalid.
343
+ lengths = set([int(val) for val in length.split(",")])
344
+ if len(lengths) > 1:
345
+ raise InvalidHeader(
346
+ "Content-Length contained multiple "
347
+ "unmatching values (%s)" % length
348
+ )
349
+ length = lengths.pop()
350
+ except ValueError:
351
+ length = None
352
+ else:
353
+ if length < 0:
354
+ length = None
355
+
356
+ # Convert status to int for comparison
357
+ # In some cases, httplib returns a status of "_UNKNOWN"
358
+ try:
359
+ status = int(self.status)
360
+ except ValueError:
361
+ status = 0
362
+
363
+ # Check for responses that shouldn't include a body
364
+ if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
365
+ length = 0
366
+
367
+ return length
368
+
369
+ def _init_decoder(self):
370
+ """
371
+ Set-up the _decoder attribute if necessary.
372
+ """
373
+ # Note: content-encoding value should be case-insensitive, per RFC 7230
374
+ # Section 3.2
375
+ content_encoding = self.headers.get("content-encoding", "").lower()
376
+ if self._decoder is None:
377
+ if content_encoding in self.CONTENT_DECODERS:
378
+ self._decoder = _get_decoder(content_encoding)
379
+ elif "," in content_encoding:
380
+ encodings = [
381
+ e.strip()
382
+ for e in content_encoding.split(",")
383
+ if e.strip() in self.CONTENT_DECODERS
384
+ ]
385
+ if len(encodings):
386
+ self._decoder = _get_decoder(content_encoding)
387
+
388
+ DECODER_ERROR_CLASSES = (IOError, zlib.error)
389
+ if brotli is not None:
390
+ DECODER_ERROR_CLASSES += (brotli.error,)
391
+
392
+ def _decode(self, data, decode_content, flush_decoder):
393
+ """
394
+ Decode the data passed in and potentially flush the decoder.
395
+ """
396
+ if not decode_content:
397
+ return data
398
+
399
+ try:
400
+ if self._decoder:
401
+ data = self._decoder.decompress(data)
402
+ except self.DECODER_ERROR_CLASSES as e:
403
+ content_encoding = self.headers.get("content-encoding", "").lower()
404
+ raise DecodeError(
405
+ "Received response with content-encoding: %s, but "
406
+ "failed to decode it." % content_encoding,
407
+ e,
408
+ )
409
+ if flush_decoder:
410
+ data += self._flush_decoder()
411
+
412
+ return data
413
+
414
+ def _flush_decoder(self):
415
+ """
416
+ Flushes the decoder. Should only be called if the decoder is actually
417
+ being used.
418
+ """
419
+ if self._decoder:
420
+ buf = self._decoder.decompress(b"")
421
+ return buf + self._decoder.flush()
422
+
423
+ return b""
424
+
425
+ @contextmanager
426
+ def _error_catcher(self):
427
+ """
428
+ Catch low-level python exceptions, instead re-raising urllib3
429
+ variants, so that low-level exceptions are not leaked in the
430
+ high-level api.
431
+
432
+ On exit, release the connection back to the pool.
433
+ """
434
+ clean_exit = False
435
+
436
+ try:
437
+ try:
438
+ yield
439
+
440
+ except SocketTimeout:
441
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
442
+ # there is yet no clean way to get at it from this context.
443
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
444
+
445
+ except BaseSSLError as e:
446
+ # FIXME: Is there a better way to differentiate between SSLErrors?
447
+ if "read operation timed out" not in str(e):
448
+ # SSL errors related to framing/MAC get wrapped and reraised here
449
+ raise SSLError(e)
450
+
451
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
452
+
453
+ except (HTTPException, SocketError) as e:
454
+ # This includes IncompleteRead.
455
+ raise ProtocolError("Connection broken: %r" % e, e)
456
+
457
+ # If no exception is thrown, we should avoid cleaning up
458
+ # unnecessarily.
459
+ clean_exit = True
460
+ finally:
461
+ # If we didn't terminate cleanly, we need to throw away our
462
+ # connection.
463
+ if not clean_exit:
464
+ # The response may not be closed but we're not going to use it
465
+ # anymore so close it now to ensure that the connection is
466
+ # released back to the pool.
467
+ if self._original_response:
468
+ self._original_response.close()
469
+
470
+ # Closing the response may not actually be sufficient to close
471
+ # everything, so if we have a hold of the connection close that
472
+ # too.
473
+ if self._connection:
474
+ self._connection.close()
475
+
476
+ # If we hold the original response but it's closed now, we should
477
+ # return the connection back to the pool.
478
+ if self._original_response and self._original_response.isclosed():
479
+ self.release_conn()
480
+
481
+ def _fp_read(self, amt):
482
+ """
483
+ Read a response with the thought that reading the number of bytes
484
+ larger than can fit in a 32-bit int at a time via SSL in some
485
+ known cases leads to an overflow error that has to be prevented
486
+ if `amt` or `self.length_remaining` indicate that a problem may
487
+ happen.
488
+
489
+ The known cases:
490
+ * 3.8 <= CPython < 3.9.7 because of a bug
491
+ https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
492
+ * urllib3 injected with pyOpenSSL-backed SSL-support.
493
+ * CPython < 3.10 only when `amt` does not fit 32-bit int.
494
+ """
495
+ assert self._fp
496
+ c_int_max = 2 ** 31 - 1
497
+ if (
498
+ (
499
+ (amt and amt > c_int_max)
500
+ or (self.length_remaining and self.length_remaining > c_int_max)
501
+ )
502
+ and not util.IS_SECURETRANSPORT
503
+ and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
504
+ ):
505
+ buffer = io.BytesIO()
506
+ # Besides `max_chunk_amt` being a maximum chunk size, it
507
+ # affects memory overhead of reading a response by this
508
+ # method in CPython.
509
+ # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
510
+ # chunk size that does not lead to an overflow error, but
511
+ # 256 MiB is a compromise.
512
+ max_chunk_amt = 2 ** 28
513
+ while amt is None or amt != 0:
514
+ if amt is not None:
515
+ chunk_amt = min(amt, max_chunk_amt)
516
+ amt -= chunk_amt
517
+ else:
518
+ chunk_amt = max_chunk_amt
519
+ data = self._fp.read(chunk_amt)
520
+ if not data:
521
+ break
522
+ buffer.write(data)
523
+ del data # to reduce peak memory usage by `max_chunk_amt`.
524
+ return buffer.getvalue()
525
+ else:
526
+ # StringIO doesn't like amt=None
527
+ return self._fp.read(amt) if amt is not None else self._fp.read()
528
+
529
+ def read(self, amt=None, decode_content=None, cache_content=False):
530
+ """
531
+ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
532
+ parameters: ``decode_content`` and ``cache_content``.
533
+
534
+ :param amt:
535
+ How much of the content to read. If specified, caching is skipped
536
+ because it doesn't make sense to cache partial content as the full
537
+ response.
538
+
539
+ :param decode_content:
540
+ If True, will attempt to decode the body based on the
541
+ 'content-encoding' header.
542
+
543
+ :param cache_content:
544
+ If True, will save the returned data such that the same result is
545
+ returned despite of the state of the underlying file object. This
546
+ is useful if you want the ``.data`` property to continue working
547
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
548
+ set.)
549
+ """
550
+ self._init_decoder()
551
+ if decode_content is None:
552
+ decode_content = self.decode_content
553
+
554
+ if self._fp is None:
555
+ return
556
+
557
+ flush_decoder = False
558
+ fp_closed = getattr(self._fp, "closed", False)
559
+
560
+ with self._error_catcher():
561
+ data = self._fp_read(amt) if not fp_closed else b""
562
+ if amt is None:
563
+ flush_decoder = True
564
+ else:
565
+ cache_content = False
566
+ if (
567
+ amt != 0 and not data
568
+ ): # Platform-specific: Buggy versions of Python.
569
+ # Close the connection when no data is returned
570
+ #
571
+ # This is redundant to what httplib/http.client _should_
572
+ # already do. However, versions of python released before
573
+ # December 15, 2012 (http://bugs.python.org/issue16298) do
574
+ # not properly close the connection in all cases. There is
575
+ # no harm in redundantly calling close.
576
+ self._fp.close()
577
+ flush_decoder = True
578
+ if self.enforce_content_length and self.length_remaining not in (
579
+ 0,
580
+ None,
581
+ ):
582
+ # This is an edge case that httplib failed to cover due
583
+ # to concerns of backward compatibility. We're
584
+ # addressing it here to make sure IncompleteRead is
585
+ # raised during streaming, so all calls with incorrect
586
+ # Content-Length are caught.
587
+ raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
588
+
589
+ if data:
590
+ self._fp_bytes_read += len(data)
591
+ if self.length_remaining is not None:
592
+ self.length_remaining -= len(data)
593
+
594
+ data = self._decode(data, decode_content, flush_decoder)
595
+
596
+ if cache_content:
597
+ self._body = data
598
+
599
+ return data
600
+
601
+ def stream(self, amt=2 ** 16, decode_content=None):
602
+ """
603
+ A generator wrapper for the read() method. A call will block until
604
+ ``amt`` bytes have been read from the connection or until the
605
+ connection is closed.
606
+
607
+ :param amt:
608
+ How much of the content to read. The generator will return up to
609
+ much data per iteration, but may return less. This is particularly
610
+ likely when using compressed data. However, the empty string will
611
+ never be returned.
612
+
613
+ :param decode_content:
614
+ If True, will attempt to decode the body based on the
615
+ 'content-encoding' header.
616
+ """
617
+ if self.chunked and self.supports_chunked_reads():
618
+ for line in self.read_chunked(amt, decode_content=decode_content):
619
+ yield line
620
+ else:
621
+ while not is_fp_closed(self._fp):
622
+ data = self.read(amt=amt, decode_content=decode_content)
623
+
624
+ if data:
625
+ yield data
626
+
627
+ @classmethod
628
+ def from_httplib(ResponseCls, r, **response_kw):
629
+ """
630
+ Given an :class:`http.client.HTTPResponse` instance ``r``, return a
631
+ corresponding :class:`urllib3.response.HTTPResponse` object.
632
+
633
+ Remaining parameters are passed to the HTTPResponse constructor, along
634
+ with ``original_response=r``.
635
+ """
636
+ headers = r.msg
637
+
638
+ if not isinstance(headers, HTTPHeaderDict):
639
+ if six.PY2:
640
+ # Python 2.7
641
+ headers = HTTPHeaderDict.from_httplib(headers)
642
+ else:
643
+ headers = HTTPHeaderDict(headers.items())
644
+
645
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
646
+ strict = getattr(r, "strict", 0)
647
+ resp = ResponseCls(
648
+ body=r,
649
+ headers=headers,
650
+ status=r.status,
651
+ version=r.version,
652
+ reason=r.reason,
653
+ strict=strict,
654
+ original_response=r,
655
+ **response_kw
656
+ )
657
+ return resp
658
+
659
+ # Backwards-compatibility methods for http.client.HTTPResponse
660
+ def getheaders(self):
661
+ warnings.warn(
662
+ "HTTPResponse.getheaders() is deprecated and will be removed "
663
+ "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
664
+ category=DeprecationWarning,
665
+ stacklevel=2,
666
+ )
667
+ return self.headers
668
+
669
+ def getheader(self, name, default=None):
670
+ warnings.warn(
671
+ "HTTPResponse.getheader() is deprecated and will be removed "
672
+ "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
673
+ category=DeprecationWarning,
674
+ stacklevel=2,
675
+ )
676
+ return self.headers.get(name, default)
677
+
678
+ # Backwards compatibility for http.cookiejar
679
+ def info(self):
680
+ return self.headers
681
+
682
+ # Overrides from io.IOBase
683
+ def close(self):
684
+ if not self.closed:
685
+ self._fp.close()
686
+
687
+ if self._connection:
688
+ self._connection.close()
689
+
690
+ if not self.auto_close:
691
+ io.IOBase.close(self)
692
+
693
+ @property
694
+ def closed(self):
695
+ if not self.auto_close:
696
+ return io.IOBase.closed.__get__(self)
697
+ elif self._fp is None:
698
+ return True
699
+ elif hasattr(self._fp, "isclosed"):
700
+ return self._fp.isclosed()
701
+ elif hasattr(self._fp, "closed"):
702
+ return self._fp.closed
703
+ else:
704
+ return True
705
+
706
+ def fileno(self):
707
+ if self._fp is None:
708
+ raise IOError("HTTPResponse has no file to get a fileno from")
709
+ elif hasattr(self._fp, "fileno"):
710
+ return self._fp.fileno()
711
+ else:
712
+ raise IOError(
713
+ "The file-like object this HTTPResponse is wrapped "
714
+ "around has no file descriptor"
715
+ )
716
+
717
+ def flush(self):
718
+ if (
719
+ self._fp is not None
720
+ and hasattr(self._fp, "flush")
721
+ and not getattr(self._fp, "closed", False)
722
+ ):
723
+ return self._fp.flush()
724
+
725
+ def readable(self):
726
+ # This method is required for `io` module compatibility.
727
+ return True
728
+
729
+ def readinto(self, b):
730
+ # This method is required for `io` module compatibility.
731
+ temp = self.read(len(b))
732
+ if len(temp) == 0:
733
+ return 0
734
+ else:
735
+ b[: len(temp)] = temp
736
+ return len(temp)
737
+
738
+ def supports_chunked_reads(self):
739
+ """
740
+ Checks if the underlying file-like object looks like a
741
+ :class:`http.client.HTTPResponse` object. We do this by testing for
742
+ the fp attribute. If it is present we assume it returns raw chunks as
743
+ processed by read_chunked().
744
+ """
745
+ return hasattr(self._fp, "fp")
746
+
747
+ def _update_chunk_length(self):
748
+ # First, we'll figure out length of a chunk and then
749
+ # we'll try to read it from socket.
750
+ if self.chunk_left is not None:
751
+ return
752
+ line = self._fp.fp.readline()
753
+ line = line.split(b";", 1)[0]
754
+ try:
755
+ self.chunk_left = int(line, 16)
756
+ except ValueError:
757
+ # Invalid chunked protocol response, abort.
758
+ self.close()
759
+ raise InvalidChunkLength(self, line)
760
+
761
+ def _handle_chunk(self, amt):
762
+ returned_chunk = None
763
+ if amt is None:
764
+ chunk = self._fp._safe_read(self.chunk_left)
765
+ returned_chunk = chunk
766
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
767
+ self.chunk_left = None
768
+ elif amt < self.chunk_left:
769
+ value = self._fp._safe_read(amt)
770
+ self.chunk_left = self.chunk_left - amt
771
+ returned_chunk = value
772
+ elif amt == self.chunk_left:
773
+ value = self._fp._safe_read(amt)
774
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
775
+ self.chunk_left = None
776
+ returned_chunk = value
777
+ else: # amt > self.chunk_left
778
+ returned_chunk = self._fp._safe_read(self.chunk_left)
779
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
780
+ self.chunk_left = None
781
+ return returned_chunk
782
+
783
+ def read_chunked(self, amt=None, decode_content=None):
784
+ """
785
+ Similar to :meth:`HTTPResponse.read`, but with an additional
786
+ parameter: ``decode_content``.
787
+
788
+ :param amt:
789
+ How much of the content to read. If specified, caching is skipped
790
+ because it doesn't make sense to cache partial content as the full
791
+ response.
792
+
793
+ :param decode_content:
794
+ If True, will attempt to decode the body based on the
795
+ 'content-encoding' header.
796
+ """
797
+ self._init_decoder()
798
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
799
+ if not self.chunked:
800
+ raise ResponseNotChunked(
801
+ "Response is not chunked. "
802
+ "Header 'transfer-encoding: chunked' is missing."
803
+ )
804
+ if not self.supports_chunked_reads():
805
+ raise BodyNotHttplibCompatible(
806
+ "Body should be http.client.HTTPResponse like. "
807
+ "It should have have an fp attribute which returns raw chunks."
808
+ )
809
+
810
+ with self._error_catcher():
811
+ # Don't bother reading the body of a HEAD request.
812
+ if self._original_response and is_response_to_head(self._original_response):
813
+ self._original_response.close()
814
+ return
815
+
816
+ # If a response is already read and closed
817
+ # then return immediately.
818
+ if self._fp.fp is None:
819
+ return
820
+
821
+ while True:
822
+ self._update_chunk_length()
823
+ if self.chunk_left == 0:
824
+ break
825
+ chunk = self._handle_chunk(amt)
826
+ decoded = self._decode(
827
+ chunk, decode_content=decode_content, flush_decoder=False
828
+ )
829
+ if decoded:
830
+ yield decoded
831
+
832
+ if decode_content:
833
+ # On CPython and PyPy, we should never need to flush the
834
+ # decoder. However, on Jython we *might* need to, so
835
+ # lets defensively do it anyway.
836
+ decoded = self._flush_decoder()
837
+ if decoded: # Platform-specific: Jython.
838
+ yield decoded
839
+
840
+ # Chunk content ends with \r\n: discard it.
841
+ while True:
842
+ line = self._fp.fp.readline()
843
+ if not line:
844
+ # Some sites may not end with '\r\n'.
845
+ break
846
+ if line == b"\r\n":
847
+ break
848
+
849
+ # We read everything; close the "file".
850
+ if self._original_response:
851
+ self._original_response.close()
852
+
853
+ def geturl(self):
854
+ """
855
+ Returns the URL that was the source of this response.
856
+ If the request that generated this response redirected, this method
857
+ will return the final redirect location.
858
+ """
859
+ if self.retries is not None and len(self.retries.history):
860
+ return self.retries.history[-1].redirect_location
861
+ else:
862
+ return self._request_url
863
+
864
+ def __iter__(self):
865
+ buffer = []
866
+ for chunk in self.stream(decode_content=True):
867
+ if b"\n" in chunk:
868
+ chunk = chunk.split(b"\n")
869
+ yield b"".join(buffer) + chunk[0] + b"\n"
870
+ for x in chunk[1:-1]:
871
+ yield x + b"\n"
872
+ if chunk[-1]:
873
+ buffer = [chunk[-1]]
874
+ else:
875
+ buffer = []
876
+ else:
877
+ buffer.append(chunk)
878
+ if buffer:
879
+ yield b"".join(buffer)
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__init__.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ # For backwards compatibility, provide imports that used to be here.
4
+ from .connection import is_connection_dropped
5
+ from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
6
+ from .response import is_fp_closed
7
+ from .retry import Retry
8
+ from .ssl_ import (
9
+ ALPN_PROTOCOLS,
10
+ HAS_SNI,
11
+ IS_PYOPENSSL,
12
+ IS_SECURETRANSPORT,
13
+ PROTOCOL_TLS,
14
+ SSLContext,
15
+ assert_fingerprint,
16
+ resolve_cert_reqs,
17
+ resolve_ssl_version,
18
+ ssl_wrap_socket,
19
+ )
20
+ from .timeout import Timeout, current_time
21
+ from .url import Url, get_host, parse_url, split_first
22
+ from .wait import wait_for_read, wait_for_write
23
+
24
+ __all__ = (
25
+ "HAS_SNI",
26
+ "IS_PYOPENSSL",
27
+ "IS_SECURETRANSPORT",
28
+ "SSLContext",
29
+ "PROTOCOL_TLS",
30
+ "ALPN_PROTOCOLS",
31
+ "Retry",
32
+ "Timeout",
33
+ "Url",
34
+ "assert_fingerprint",
35
+ "current_time",
36
+ "is_connection_dropped",
37
+ "is_fp_closed",
38
+ "get_host",
39
+ "parse_url",
40
+ "make_headers",
41
+ "resolve_cert_reqs",
42
+ "resolve_ssl_version",
43
+ "split_first",
44
+ "ssl_wrap_socket",
45
+ "wait_for_read",
46
+ "wait_for_write",
47
+ "SKIP_HEADER",
48
+ "SKIPPABLE_HEADERS",
49
+ )
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/connection.cpython-310.pyc ADDED
Binary file (3.42 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/proxy.cpython-310.pyc ADDED
Binary file (1.33 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/queue.cpython-310.pyc ADDED
Binary file (1.05 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/request.cpython-310.pyc ADDED
Binary file (3.36 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/response.cpython-310.pyc ADDED
Binary file (2.34 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
evalkit_llava/lib/python3.10/site-packages/pip/_vendor/urllib3/util/__pycache__/ssl_match_hostname.cpython-310.pyc ADDED
Binary file (3.25 kB). View file