nnilayy commited on
Commit
270a274
·
verified ·
1 Parent(s): d0166bc

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. lib/python3.10/site-packages/_virtualenv.pth +3 -0
  2. lib/python3.10/site-packages/distutils-precedence.pth +3 -0
  3. lib/python3.10/site-packages/execnet-2.1.1.dist-info/INSTALLER +1 -0
  4. lib/python3.10/site-packages/execnet-2.1.1.dist-info/METADATA +76 -0
  5. lib/python3.10/site-packages/execnet-2.1.1.dist-info/RECORD +24 -0
  6. lib/python3.10/site-packages/execnet-2.1.1.dist-info/REQUESTED +0 -0
  7. lib/python3.10/site-packages/execnet-2.1.1.dist-info/WHEEL +4 -0
  8. lib/python3.10/site-packages/execnet-2.1.1.dist-info/licenses/LICENSE +18 -0
  9. lib/python3.10/site-packages/httpx/__init__.py +105 -0
  10. lib/python3.10/site-packages/httpx/__version__.py +3 -0
  11. lib/python3.10/site-packages/httpx/_api.py +479 -0
  12. lib/python3.10/site-packages/httpx/_auth.py +348 -0
  13. lib/python3.10/site-packages/httpx/_client.py +2065 -0
  14. lib/python3.10/site-packages/httpx/_compat.py +63 -0
  15. lib/python3.10/site-packages/httpx/_config.py +372 -0
  16. lib/python3.10/site-packages/httpx/_content.py +238 -0
  17. lib/python3.10/site-packages/httpx/_decoders.py +371 -0
  18. lib/python3.10/site-packages/httpx/_exceptions.py +379 -0
  19. lib/python3.10/site-packages/httpx/_main.py +509 -0
  20. lib/python3.10/site-packages/httpx/_models.py +1211 -0
  21. lib/python3.10/site-packages/httpx/_multipart.py +269 -0
  22. lib/python3.10/site-packages/httpx/_status_codes.py +162 -0
  23. lib/python3.10/site-packages/httpx/_transports/__init__.py +15 -0
  24. lib/python3.10/site-packages/httpx/_transports/asgi.py +174 -0
  25. lib/python3.10/site-packages/httpx/_transports/base.py +86 -0
  26. lib/python3.10/site-packages/httpx/_transports/default.py +389 -0
  27. lib/python3.10/site-packages/httpx/_transports/mock.py +43 -0
  28. lib/python3.10/site-packages/httpx/_transports/wsgi.py +149 -0
  29. lib/python3.10/site-packages/httpx/_types.py +136 -0
  30. lib/python3.10/site-packages/httpx/_urlparse.py +505 -0
  31. lib/python3.10/site-packages/httpx/_urls.py +648 -0
  32. lib/python3.10/site-packages/httpx/_utils.py +440 -0
  33. lib/python3.10/site-packages/httpx/py.typed +0 -0
  34. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/INSTALLER +1 -0
  35. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/LICENSE.txt +27 -0
  36. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/METADATA +669 -0
  37. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/RECORD +10 -0
  38. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/REQUESTED +0 -0
  39. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/WHEEL +6 -0
  40. lib/python3.10/site-packages/parameterized-0.9.0.dist-info/top_level.txt +1 -0
  41. lib/python3.10/site-packages/pyasn1_modules/rfc2315.py +294 -0
  42. lib/python3.10/site-packages/pyasn1_modules/rfc2986.py +75 -0
  43. lib/python3.10/site-packages/pyasn1_modules/rfc3709.py +207 -0
  44. lib/python3.10/site-packages/pyasn1_modules/rfc3739.py +203 -0
  45. lib/python3.10/site-packages/pyasn1_modules/rfc5084.py +97 -0
  46. lib/python3.10/site-packages/pyasn1_modules/rfc5126.py +577 -0
  47. lib/python3.10/site-packages/pyasn1_modules/rfc5208.py +56 -0
  48. lib/python3.10/site-packages/pyasn1_modules/rfc5649.py +33 -0
  49. lib/python3.10/site-packages/pyasn1_modules/rfc6010.py +88 -0
  50. lib/python3.10/site-packages/pyasn1_modules/rfc6019.py +45 -0
lib/python3.10/site-packages/_virtualenv.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69ac3d8f27e679c81b94ab30b3b56e9cd138219b1ba94a1fa3606d5a76a1433d
3
+ size 18
lib/python3.10/site-packages/distutils-precedence.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2638ce9e2500e572a5e0de7faed6661eb569d1b696fcba07b0dd223da5f5d224
3
+ size 151
lib/python3.10/site-packages/execnet-2.1.1.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ uv
lib/python3.10/site-packages/execnet-2.1.1.dist-info/METADATA ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.3
2
+ Name: execnet
3
+ Version: 2.1.1
4
+ Summary: execnet: rapid multi-Python deployment
5
+ Project-URL: Homepage, https://execnet.readthedocs.io/en/latest/
6
+ Author: holger krekel and others
7
+ License-Expression: MIT
8
+ License-File: LICENSE
9
+ Classifier: Development Status :: 5 - Production/Stable
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Operating System :: MacOS :: MacOS X
13
+ Classifier: Operating System :: Microsoft :: Windows
14
+ Classifier: Operating System :: POSIX
15
+ Classifier: Programming Language :: Python :: 3.8
16
+ Classifier: Programming Language :: Python :: 3.9
17
+ Classifier: Programming Language :: Python :: 3.10
18
+ Classifier: Programming Language :: Python :: 3.11
19
+ Classifier: Programming Language :: Python :: 3.12
20
+ Classifier: Programming Language :: Python :: Implementation :: CPython
21
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
22
+ Classifier: Topic :: Software Development :: Libraries
23
+ Classifier: Topic :: System :: Distributed Computing
24
+ Classifier: Topic :: System :: Networking
25
+ Requires-Python: >=3.8
26
+ Provides-Extra: testing
27
+ Requires-Dist: hatch; extra == 'testing'
28
+ Requires-Dist: pre-commit; extra == 'testing'
29
+ Requires-Dist: pytest; extra == 'testing'
30
+ Requires-Dist: tox; extra == 'testing'
31
+ Description-Content-Type: text/x-rst
32
+
33
+ execnet: distributed Python deployment and communication
34
+ ========================================================
35
+
36
+ .. image:: https://img.shields.io/pypi/v/execnet.svg
37
+ :target: https://pypi.org/project/execnet/
38
+
39
+ .. image:: https://anaconda.org/conda-forge/execnet/badges/version.svg
40
+ :target: https://anaconda.org/conda-forge/execnet
41
+
42
+ .. image:: https://img.shields.io/pypi/pyversions/execnet.svg
43
+ :target: https://pypi.org/project/execnet/
44
+
45
+ .. image:: https://github.com/pytest-dev/execnet/workflows/test/badge.svg
46
+ :target: https://github.com/pytest-dev/execnet/actions?query=workflow%3Atest
47
+
48
+ .. image:: https://img.shields.io/badge/code%20style-black-000000.svg
49
+ :target: https://github.com/python/black
50
+
51
+ .. _execnet: https://execnet.readthedocs.io
52
+
53
+ execnet_ provides carefully tested means to ad-hoc interact with Python
54
+ interpreters across version, platform and network barriers. It provides
55
+ a minimal and fast API targeting the following uses:
56
+
57
+ * distribute tasks to local or remote processes
58
+ * write and deploy hybrid multi-process applications
59
+ * write scripts to administer multiple hosts
60
+
61
+ Features
62
+ --------
63
+
64
+ * zero-install bootstrapping: no remote installation required!
65
+
66
+ * flexible communication: send/receive as well as
67
+ callback/queue mechanisms supported
68
+
69
+ * simple serialization of python builtin types (no pickling)
70
+
71
+ * grouped creation and robust termination of processes
72
+
73
+ * interoperable between Windows and Unix-ish systems.
74
+
75
+ * integrates with different threading models, including standard
76
+ os threads, eventlet and gevent based systems.
lib/python3.10/site-packages/execnet-2.1.1.dist-info/RECORD ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ execnet-2.1.1.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
2
+ execnet-2.1.1.dist-info/METADATA,sha256=Ck_CaDzj8hI6UuIOxbYrKYAtisV4-M4oUFE0t9mGB3M,2905
3
+ execnet-2.1.1.dist-info/RECORD,,
4
+ execnet-2.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ execnet-2.1.1.dist-info/WHEEL,sha256=as-1oFTWSeWBgyzh0O_qF439xqBe6AbBgt4MfYe5zwY,87
6
+ execnet-2.1.1.dist-info/licenses/LICENSE,sha256=6J7tEHTTqUMZi6E5uAhE9bRFuGC7p0qK6twGEFZhZOo,1054
7
+ execnet/__init__.py,sha256=bl3NvKtVhQ4UaMUAhsItJejO44BxwWHNuwNQYR_HqDo,1157
8
+ execnet/_version.py,sha256=O82e8yUjRHAyzpOxLpXpGJZbuf3VENU0viCiiMxqvgI,411
9
+ execnet/gateway.py,sha256=Zg904feejeGyDyDI66CHQjlbQzxZtgfxONC0hjY8BUQ,7783
10
+ execnet/gateway_base.py,sha256=G-d6gwetHG6u6rOMbWsruGy1EYy0xEfOUmImsa_OpPE,59089
11
+ execnet/gateway_bootstrap.py,sha256=coudd3HgTnBYrwN_eykLFeLJr82fHWsklKYMLe0dxis,2770
12
+ execnet/gateway_io.py,sha256=ckcF0lIpfIiEgRNfV_qX_RIVi-uTLzYfhlkXwKoBu5E,8052
13
+ execnet/gateway_socket.py,sha256=rQTupVh9X1AwMMeYZsTktqQtOfwMQjUGkOKzackA7p8,2956
14
+ execnet/multi.py,sha256=r3ulJC3Fyx4roKRHbJ1SIVX7nHKuNZ-GGumrZ5weUfc,11714
15
+ execnet/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ execnet/rsync.py,sha256=ZkeXonzL6BPOMFZ-WiNCe-VpcD7V0GtfdjjtVJb4i5w,8874
17
+ execnet/rsync_remote.py,sha256=sMXCP3w7FFuJG_WyFnb1s-ThiSRnP6Lz4FDEHg0rBEA,4208
18
+ execnet/script/__init__.py,sha256=MsSFjiLMLJZ7QhUPpVBWKiyDnCzryquRyr329NoCACI,2
19
+ execnet/script/loop_socketserver.py,sha256=TYYvmXrA7wYilcAJ9tobC_k-czXvxflBKvl1MX5caM4,418
20
+ execnet/script/quitserver.py,sha256=5K7c3JzNjNXO_BPLUpMli1w1-MK141Z9gfdc_ZvTYE8,304
21
+ execnet/script/shell.py,sha256=JQzx_jEb4M13o0ftH2T6a8rQWSjnBgh0kBVI1Nm7cho,2601
22
+ execnet/script/socketserver.py,sha256=J-CweeLM8XjVguXF8CSPR4EezCFo6r2pw6qsea-J8lc,3935
23
+ execnet/script/socketserverservice.py,sha256=P2y51ALvBsJzv8yJx8fyUJ-LRmX02isHi56lWeCI0U0,3181
24
+ execnet/xspec.py,sha256=lluNszrYKjl1V9yQhKprRTJ_6sPGi6uPwmz3yetLa04,2256
lib/python3.10/site-packages/execnet-2.1.1.dist-info/REQUESTED ADDED
File without changes
lib/python3.10/site-packages/execnet-2.1.1.dist-info/WHEEL ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: hatchling 1.22.5
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
lib/python3.10/site-packages/execnet-2.1.1.dist-info/licenses/LICENSE ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ Permission is hereby granted, free of charge, to any person obtaining a copy
3
+ of this software and associated documentation files (the "Software"), to deal
4
+ in the Software without restriction, including without limitation the rights
5
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
6
+ copies of the Software, and to permit persons to whom the Software is
7
+ furnished to do so, subject to the following conditions:
8
+
9
+ The above copyright notice and this permission notice shall be included in all
10
+ copies or substantial portions of the Software.
11
+
12
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
13
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
14
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
15
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
16
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
17
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
18
+ SOFTWARE.
lib/python3.10/site-packages/httpx/__init__.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .__version__ import __description__, __title__, __version__
2
+ from ._api import *
3
+ from ._auth import *
4
+ from ._client import *
5
+ from ._config import *
6
+ from ._content import *
7
+ from ._exceptions import *
8
+ from ._models import *
9
+ from ._status_codes import *
10
+ from ._transports import *
11
+ from ._types import *
12
+ from ._urls import *
13
+
14
+ try:
15
+ from ._main import main
16
+ except ImportError: # pragma: no cover
17
+
18
+ def main() -> None: # type: ignore
19
+ import sys
20
+
21
+ print(
22
+ "The httpx command line client could not run because the required "
23
+ "dependencies were not installed.\nMake sure you've installed "
24
+ "everything with: pip install 'httpx[cli]'"
25
+ )
26
+ sys.exit(1)
27
+
28
+
29
+ __all__ = [
30
+ "__description__",
31
+ "__title__",
32
+ "__version__",
33
+ "ASGITransport",
34
+ "AsyncBaseTransport",
35
+ "AsyncByteStream",
36
+ "AsyncClient",
37
+ "AsyncHTTPTransport",
38
+ "Auth",
39
+ "BaseTransport",
40
+ "BasicAuth",
41
+ "ByteStream",
42
+ "Client",
43
+ "CloseError",
44
+ "codes",
45
+ "ConnectError",
46
+ "ConnectTimeout",
47
+ "CookieConflict",
48
+ "Cookies",
49
+ "create_ssl_context",
50
+ "DecodingError",
51
+ "delete",
52
+ "DigestAuth",
53
+ "get",
54
+ "head",
55
+ "Headers",
56
+ "HTTPError",
57
+ "HTTPStatusError",
58
+ "HTTPTransport",
59
+ "InvalidURL",
60
+ "Limits",
61
+ "LocalProtocolError",
62
+ "main",
63
+ "MockTransport",
64
+ "NetRCAuth",
65
+ "NetworkError",
66
+ "options",
67
+ "patch",
68
+ "PoolTimeout",
69
+ "post",
70
+ "ProtocolError",
71
+ "Proxy",
72
+ "ProxyError",
73
+ "put",
74
+ "QueryParams",
75
+ "ReadError",
76
+ "ReadTimeout",
77
+ "RemoteProtocolError",
78
+ "request",
79
+ "Request",
80
+ "RequestError",
81
+ "RequestNotRead",
82
+ "Response",
83
+ "ResponseNotRead",
84
+ "stream",
85
+ "StreamClosed",
86
+ "StreamConsumed",
87
+ "StreamError",
88
+ "SyncByteStream",
89
+ "Timeout",
90
+ "TimeoutException",
91
+ "TooManyRedirects",
92
+ "TransportError",
93
+ "UnsupportedProtocol",
94
+ "URL",
95
+ "USE_CLIENT_DEFAULT",
96
+ "WriteError",
97
+ "WriteTimeout",
98
+ "WSGITransport",
99
+ ]
100
+
101
+
102
+ __locals = locals()
103
+ for __name in __all__:
104
+ if not __name.startswith("__"):
105
+ setattr(__locals[__name], "__module__", "httpx") # noqa
lib/python3.10/site-packages/httpx/__version__.py ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ __title__ = "httpx"
2
+ __description__ = "A next generation HTTP client, for Python 3."
3
+ __version__ = "0.27.2"
lib/python3.10/site-packages/httpx/_api.py ADDED
@@ -0,0 +1,479 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+ from contextlib import contextmanager
5
+
6
+ from ._client import Client
7
+ from ._config import DEFAULT_TIMEOUT_CONFIG
8
+ from ._models import Response
9
+ from ._types import (
10
+ AuthTypes,
11
+ CertTypes,
12
+ CookieTypes,
13
+ HeaderTypes,
14
+ ProxiesTypes,
15
+ ProxyTypes,
16
+ QueryParamTypes,
17
+ RequestContent,
18
+ RequestData,
19
+ RequestFiles,
20
+ TimeoutTypes,
21
+ VerifyTypes,
22
+ )
23
+ from ._urls import URL
24
+
25
+ __all__ = [
26
+ "delete",
27
+ "get",
28
+ "head",
29
+ "options",
30
+ "patch",
31
+ "post",
32
+ "put",
33
+ "request",
34
+ "stream",
35
+ ]
36
+
37
+
38
+ def request(
39
+ method: str,
40
+ url: URL | str,
41
+ *,
42
+ params: QueryParamTypes | None = None,
43
+ content: RequestContent | None = None,
44
+ data: RequestData | None = None,
45
+ files: RequestFiles | None = None,
46
+ json: typing.Any | None = None,
47
+ headers: HeaderTypes | None = None,
48
+ cookies: CookieTypes | None = None,
49
+ auth: AuthTypes | None = None,
50
+ proxy: ProxyTypes | None = None,
51
+ proxies: ProxiesTypes | None = None,
52
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
53
+ follow_redirects: bool = False,
54
+ verify: VerifyTypes = True,
55
+ cert: CertTypes | None = None,
56
+ trust_env: bool = True,
57
+ ) -> Response:
58
+ """
59
+ Sends an HTTP request.
60
+
61
+ **Parameters:**
62
+
63
+ * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`,
64
+ `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`.
65
+ * **url** - URL for the new `Request` object.
66
+ * **params** - *(optional)* Query parameters to include in the URL, as a
67
+ string, dictionary, or sequence of two-tuples.
68
+ * **content** - *(optional)* Binary content to include in the body of the
69
+ request, as bytes or a byte iterator.
70
+ * **data** - *(optional)* Form data to include in the body of the request,
71
+ as a dictionary.
72
+ * **files** - *(optional)* A dictionary of upload files to include in the
73
+ body of the request.
74
+ * **json** - *(optional)* A JSON serializable object to include in the body
75
+ of the request.
76
+ * **headers** - *(optional)* Dictionary of HTTP headers to include in the
77
+ request.
78
+ * **cookies** - *(optional)* Dictionary of Cookie items to include in the
79
+ request.
80
+ * **auth** - *(optional)* An authentication class to use when sending the
81
+ request.
82
+ * **proxy** - *(optional)* A proxy URL where all the traffic should be routed.
83
+ * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs.
84
+ * **timeout** - *(optional)* The timeout configuration to use when sending
85
+ the request.
86
+ * **follow_redirects** - *(optional)* Enables or disables HTTP redirects.
87
+ * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
88
+ verify the identity of requested hosts. Either `True` (default CA bundle),
89
+ a path to an SSL certificate file, an `ssl.SSLContext`, or `False`
90
+ (which will disable verification).
91
+ * **cert** - *(optional)* An SSL certificate used by the requested host
92
+ to authenticate the client. Either a path to an SSL certificate file, or
93
+ two-tuple of (certificate file, key file), or a three-tuple of (certificate
94
+ file, key file, password).
95
+ * **trust_env** - *(optional)* Enables or disables usage of environment
96
+ variables for configuration.
97
+
98
+ **Returns:** `Response`
99
+
100
+ Usage:
101
+
102
+ ```
103
+ >>> import httpx
104
+ >>> response = httpx.request('GET', 'https://httpbin.org/get')
105
+ >>> response
106
+ <Response [200 OK]>
107
+ ```
108
+ """
109
+ with Client(
110
+ cookies=cookies,
111
+ proxy=proxy,
112
+ proxies=proxies,
113
+ cert=cert,
114
+ verify=verify,
115
+ timeout=timeout,
116
+ trust_env=trust_env,
117
+ ) as client:
118
+ return client.request(
119
+ method=method,
120
+ url=url,
121
+ content=content,
122
+ data=data,
123
+ files=files,
124
+ json=json,
125
+ params=params,
126
+ headers=headers,
127
+ auth=auth,
128
+ follow_redirects=follow_redirects,
129
+ )
130
+
131
+
132
+ @contextmanager
133
+ def stream(
134
+ method: str,
135
+ url: URL | str,
136
+ *,
137
+ params: QueryParamTypes | None = None,
138
+ content: RequestContent | None = None,
139
+ data: RequestData | None = None,
140
+ files: RequestFiles | None = None,
141
+ json: typing.Any | None = None,
142
+ headers: HeaderTypes | None = None,
143
+ cookies: CookieTypes | None = None,
144
+ auth: AuthTypes | None = None,
145
+ proxy: ProxyTypes | None = None,
146
+ proxies: ProxiesTypes | None = None,
147
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
148
+ follow_redirects: bool = False,
149
+ verify: VerifyTypes = True,
150
+ cert: CertTypes | None = None,
151
+ trust_env: bool = True,
152
+ ) -> typing.Iterator[Response]:
153
+ """
154
+ Alternative to `httpx.request()` that streams the response body
155
+ instead of loading it into memory at once.
156
+
157
+ **Parameters**: See `httpx.request`.
158
+
159
+ See also: [Streaming Responses][0]
160
+
161
+ [0]: /quickstart#streaming-responses
162
+ """
163
+ with Client(
164
+ cookies=cookies,
165
+ proxy=proxy,
166
+ proxies=proxies,
167
+ cert=cert,
168
+ verify=verify,
169
+ timeout=timeout,
170
+ trust_env=trust_env,
171
+ ) as client:
172
+ with client.stream(
173
+ method=method,
174
+ url=url,
175
+ content=content,
176
+ data=data,
177
+ files=files,
178
+ json=json,
179
+ params=params,
180
+ headers=headers,
181
+ auth=auth,
182
+ follow_redirects=follow_redirects,
183
+ ) as response:
184
+ yield response
185
+
186
+
187
+ def get(
188
+ url: URL | str,
189
+ *,
190
+ params: QueryParamTypes | None = None,
191
+ headers: HeaderTypes | None = None,
192
+ cookies: CookieTypes | None = None,
193
+ auth: AuthTypes | None = None,
194
+ proxy: ProxyTypes | None = None,
195
+ proxies: ProxiesTypes | None = None,
196
+ follow_redirects: bool = False,
197
+ cert: CertTypes | None = None,
198
+ verify: VerifyTypes = True,
199
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
200
+ trust_env: bool = True,
201
+ ) -> Response:
202
+ """
203
+ Sends a `GET` request.
204
+
205
+ **Parameters**: See `httpx.request`.
206
+
207
+ Note that the `data`, `files`, `json` and `content` parameters are not available
208
+ on this function, as `GET` requests should not include a request body.
209
+ """
210
+ return request(
211
+ "GET",
212
+ url,
213
+ params=params,
214
+ headers=headers,
215
+ cookies=cookies,
216
+ auth=auth,
217
+ proxy=proxy,
218
+ proxies=proxies,
219
+ follow_redirects=follow_redirects,
220
+ cert=cert,
221
+ verify=verify,
222
+ timeout=timeout,
223
+ trust_env=trust_env,
224
+ )
225
+
226
+
227
+ def options(
228
+ url: URL | str,
229
+ *,
230
+ params: QueryParamTypes | None = None,
231
+ headers: HeaderTypes | None = None,
232
+ cookies: CookieTypes | None = None,
233
+ auth: AuthTypes | None = None,
234
+ proxy: ProxyTypes | None = None,
235
+ proxies: ProxiesTypes | None = None,
236
+ follow_redirects: bool = False,
237
+ cert: CertTypes | None = None,
238
+ verify: VerifyTypes = True,
239
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
240
+ trust_env: bool = True,
241
+ ) -> Response:
242
+ """
243
+ Sends an `OPTIONS` request.
244
+
245
+ **Parameters**: See `httpx.request`.
246
+
247
+ Note that the `data`, `files`, `json` and `content` parameters are not available
248
+ on this function, as `OPTIONS` requests should not include a request body.
249
+ """
250
+ return request(
251
+ "OPTIONS",
252
+ url,
253
+ params=params,
254
+ headers=headers,
255
+ cookies=cookies,
256
+ auth=auth,
257
+ proxy=proxy,
258
+ proxies=proxies,
259
+ follow_redirects=follow_redirects,
260
+ cert=cert,
261
+ verify=verify,
262
+ timeout=timeout,
263
+ trust_env=trust_env,
264
+ )
265
+
266
+
267
+ def head(
268
+ url: URL | str,
269
+ *,
270
+ params: QueryParamTypes | None = None,
271
+ headers: HeaderTypes | None = None,
272
+ cookies: CookieTypes | None = None,
273
+ auth: AuthTypes | None = None,
274
+ proxy: ProxyTypes | None = None,
275
+ proxies: ProxiesTypes | None = None,
276
+ follow_redirects: bool = False,
277
+ cert: CertTypes | None = None,
278
+ verify: VerifyTypes = True,
279
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
280
+ trust_env: bool = True,
281
+ ) -> Response:
282
+ """
283
+ Sends a `HEAD` request.
284
+
285
+ **Parameters**: See `httpx.request`.
286
+
287
+ Note that the `data`, `files`, `json` and `content` parameters are not available
288
+ on this function, as `HEAD` requests should not include a request body.
289
+ """
290
+ return request(
291
+ "HEAD",
292
+ url,
293
+ params=params,
294
+ headers=headers,
295
+ cookies=cookies,
296
+ auth=auth,
297
+ proxy=proxy,
298
+ proxies=proxies,
299
+ follow_redirects=follow_redirects,
300
+ cert=cert,
301
+ verify=verify,
302
+ timeout=timeout,
303
+ trust_env=trust_env,
304
+ )
305
+
306
+
307
+ def post(
308
+ url: URL | str,
309
+ *,
310
+ content: RequestContent | None = None,
311
+ data: RequestData | None = None,
312
+ files: RequestFiles | None = None,
313
+ json: typing.Any | None = None,
314
+ params: QueryParamTypes | None = None,
315
+ headers: HeaderTypes | None = None,
316
+ cookies: CookieTypes | None = None,
317
+ auth: AuthTypes | None = None,
318
+ proxy: ProxyTypes | None = None,
319
+ proxies: ProxiesTypes | None = None,
320
+ follow_redirects: bool = False,
321
+ cert: CertTypes | None = None,
322
+ verify: VerifyTypes = True,
323
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
324
+ trust_env: bool = True,
325
+ ) -> Response:
326
+ """
327
+ Sends a `POST` request.
328
+
329
+ **Parameters**: See `httpx.request`.
330
+ """
331
+ return request(
332
+ "POST",
333
+ url,
334
+ content=content,
335
+ data=data,
336
+ files=files,
337
+ json=json,
338
+ params=params,
339
+ headers=headers,
340
+ cookies=cookies,
341
+ auth=auth,
342
+ proxy=proxy,
343
+ proxies=proxies,
344
+ follow_redirects=follow_redirects,
345
+ cert=cert,
346
+ verify=verify,
347
+ timeout=timeout,
348
+ trust_env=trust_env,
349
+ )
350
+
351
+
352
+ def put(
353
+ url: URL | str,
354
+ *,
355
+ content: RequestContent | None = None,
356
+ data: RequestData | None = None,
357
+ files: RequestFiles | None = None,
358
+ json: typing.Any | None = None,
359
+ params: QueryParamTypes | None = None,
360
+ headers: HeaderTypes | None = None,
361
+ cookies: CookieTypes | None = None,
362
+ auth: AuthTypes | None = None,
363
+ proxy: ProxyTypes | None = None,
364
+ proxies: ProxiesTypes | None = None,
365
+ follow_redirects: bool = False,
366
+ cert: CertTypes | None = None,
367
+ verify: VerifyTypes = True,
368
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
369
+ trust_env: bool = True,
370
+ ) -> Response:
371
+ """
372
+ Sends a `PUT` request.
373
+
374
+ **Parameters**: See `httpx.request`.
375
+ """
376
+ return request(
377
+ "PUT",
378
+ url,
379
+ content=content,
380
+ data=data,
381
+ files=files,
382
+ json=json,
383
+ params=params,
384
+ headers=headers,
385
+ cookies=cookies,
386
+ auth=auth,
387
+ proxy=proxy,
388
+ proxies=proxies,
389
+ follow_redirects=follow_redirects,
390
+ cert=cert,
391
+ verify=verify,
392
+ timeout=timeout,
393
+ trust_env=trust_env,
394
+ )
395
+
396
+
397
+ def patch(
398
+ url: URL | str,
399
+ *,
400
+ content: RequestContent | None = None,
401
+ data: RequestData | None = None,
402
+ files: RequestFiles | None = None,
403
+ json: typing.Any | None = None,
404
+ params: QueryParamTypes | None = None,
405
+ headers: HeaderTypes | None = None,
406
+ cookies: CookieTypes | None = None,
407
+ auth: AuthTypes | None = None,
408
+ proxy: ProxyTypes | None = None,
409
+ proxies: ProxiesTypes | None = None,
410
+ follow_redirects: bool = False,
411
+ cert: CertTypes | None = None,
412
+ verify: VerifyTypes = True,
413
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
414
+ trust_env: bool = True,
415
+ ) -> Response:
416
+ """
417
+ Sends a `PATCH` request.
418
+
419
+ **Parameters**: See `httpx.request`.
420
+ """
421
+ return request(
422
+ "PATCH",
423
+ url,
424
+ content=content,
425
+ data=data,
426
+ files=files,
427
+ json=json,
428
+ params=params,
429
+ headers=headers,
430
+ cookies=cookies,
431
+ auth=auth,
432
+ proxy=proxy,
433
+ proxies=proxies,
434
+ follow_redirects=follow_redirects,
435
+ cert=cert,
436
+ verify=verify,
437
+ timeout=timeout,
438
+ trust_env=trust_env,
439
+ )
440
+
441
+
442
+ def delete(
443
+ url: URL | str,
444
+ *,
445
+ params: QueryParamTypes | None = None,
446
+ headers: HeaderTypes | None = None,
447
+ cookies: CookieTypes | None = None,
448
+ auth: AuthTypes | None = None,
449
+ proxy: ProxyTypes | None = None,
450
+ proxies: ProxiesTypes | None = None,
451
+ follow_redirects: bool = False,
452
+ cert: CertTypes | None = None,
453
+ verify: VerifyTypes = True,
454
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
455
+ trust_env: bool = True,
456
+ ) -> Response:
457
+ """
458
+ Sends a `DELETE` request.
459
+
460
+ **Parameters**: See `httpx.request`.
461
+
462
+ Note that the `data`, `files`, `json` and `content` parameters are not available
463
+ on this function, as `DELETE` requests should not include a request body.
464
+ """
465
+ return request(
466
+ "DELETE",
467
+ url,
468
+ params=params,
469
+ headers=headers,
470
+ cookies=cookies,
471
+ auth=auth,
472
+ proxy=proxy,
473
+ proxies=proxies,
474
+ follow_redirects=follow_redirects,
475
+ cert=cert,
476
+ verify=verify,
477
+ timeout=timeout,
478
+ trust_env=trust_env,
479
+ )
lib/python3.10/site-packages/httpx/_auth.py ADDED
@@ -0,0 +1,348 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import hashlib
4
+ import os
5
+ import re
6
+ import time
7
+ import typing
8
+ from base64 import b64encode
9
+ from urllib.request import parse_http_list
10
+
11
+ from ._exceptions import ProtocolError
12
+ from ._models import Cookies, Request, Response
13
+ from ._utils import to_bytes, to_str, unquote
14
+
15
+ if typing.TYPE_CHECKING: # pragma: no cover
16
+ from hashlib import _Hash
17
+
18
+
19
+ __all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"]
20
+
21
+
22
+ class Auth:
23
+ """
24
+ Base class for all authentication schemes.
25
+
26
+ To implement a custom authentication scheme, subclass `Auth` and override
27
+ the `.auth_flow()` method.
28
+
29
+ If the authentication scheme does I/O such as disk access or network calls, or uses
30
+ synchronization primitives such as locks, you should override `.sync_auth_flow()`
31
+ and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized
32
+ implementations that will be used by `Client` and `AsyncClient` respectively.
33
+ """
34
+
35
+ requires_request_body = False
36
+ requires_response_body = False
37
+
38
+ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
39
+ """
40
+ Execute the authentication flow.
41
+
42
+ To dispatch a request, `yield` it:
43
+
44
+ ```
45
+ yield request
46
+ ```
47
+
48
+ The client will `.send()` the response back into the flow generator. You can
49
+ access it like so:
50
+
51
+ ```
52
+ response = yield request
53
+ ```
54
+
55
+ A `return` (or reaching the end of the generator) will result in the
56
+ client returning the last response obtained from the server.
57
+
58
+ You can dispatch as many requests as is necessary.
59
+ """
60
+ yield request
61
+
62
+ def sync_auth_flow(
63
+ self, request: Request
64
+ ) -> typing.Generator[Request, Response, None]:
65
+ """
66
+ Execute the authentication flow synchronously.
67
+
68
+ By default, this defers to `.auth_flow()`. You should override this method
69
+ when the authentication scheme does I/O and/or uses concurrency primitives.
70
+ """
71
+ if self.requires_request_body:
72
+ request.read()
73
+
74
+ flow = self.auth_flow(request)
75
+ request = next(flow)
76
+
77
+ while True:
78
+ response = yield request
79
+ if self.requires_response_body:
80
+ response.read()
81
+
82
+ try:
83
+ request = flow.send(response)
84
+ except StopIteration:
85
+ break
86
+
87
+ async def async_auth_flow(
88
+ self, request: Request
89
+ ) -> typing.AsyncGenerator[Request, Response]:
90
+ """
91
+ Execute the authentication flow asynchronously.
92
+
93
+ By default, this defers to `.auth_flow()`. You should override this method
94
+ when the authentication scheme does I/O and/or uses concurrency primitives.
95
+ """
96
+ if self.requires_request_body:
97
+ await request.aread()
98
+
99
+ flow = self.auth_flow(request)
100
+ request = next(flow)
101
+
102
+ while True:
103
+ response = yield request
104
+ if self.requires_response_body:
105
+ await response.aread()
106
+
107
+ try:
108
+ request = flow.send(response)
109
+ except StopIteration:
110
+ break
111
+
112
+
113
+ class FunctionAuth(Auth):
114
+ """
115
+ Allows the 'auth' argument to be passed as a simple callable function,
116
+ that takes the request, and returns a new, modified request.
117
+ """
118
+
119
+ def __init__(self, func: typing.Callable[[Request], Request]) -> None:
120
+ self._func = func
121
+
122
+ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
123
+ yield self._func(request)
124
+
125
+
126
+ class BasicAuth(Auth):
127
+ """
128
+ Allows the 'auth' argument to be passed as a (username, password) pair,
129
+ and uses HTTP Basic authentication.
130
+ """
131
+
132
+ def __init__(self, username: str | bytes, password: str | bytes) -> None:
133
+ self._auth_header = self._build_auth_header(username, password)
134
+
135
+ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
136
+ request.headers["Authorization"] = self._auth_header
137
+ yield request
138
+
139
+ def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str:
140
+ userpass = b":".join((to_bytes(username), to_bytes(password)))
141
+ token = b64encode(userpass).decode()
142
+ return f"Basic {token}"
143
+
144
+
145
+ class NetRCAuth(Auth):
146
+ """
147
+ Use a 'netrc' file to lookup basic auth credentials based on the url host.
148
+ """
149
+
150
+ def __init__(self, file: str | None = None) -> None:
151
+ # Lazily import 'netrc'.
152
+ # There's no need for us to load this module unless 'NetRCAuth' is being used.
153
+ import netrc
154
+
155
+ self._netrc_info = netrc.netrc(file)
156
+
157
+ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
158
+ auth_info = self._netrc_info.authenticators(request.url.host)
159
+ if auth_info is None or not auth_info[2]:
160
+ # The netrc file did not have authentication credentials for this host.
161
+ yield request
162
+ else:
163
+ # Build a basic auth header with credentials from the netrc file.
164
+ request.headers["Authorization"] = self._build_auth_header(
165
+ username=auth_info[0], password=auth_info[2]
166
+ )
167
+ yield request
168
+
169
+ def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str:
170
+ userpass = b":".join((to_bytes(username), to_bytes(password)))
171
+ token = b64encode(userpass).decode()
172
+ return f"Basic {token}"
173
+
174
+
175
+ class DigestAuth(Auth):
176
+ _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = {
177
+ "MD5": hashlib.md5,
178
+ "MD5-SESS": hashlib.md5,
179
+ "SHA": hashlib.sha1,
180
+ "SHA-SESS": hashlib.sha1,
181
+ "SHA-256": hashlib.sha256,
182
+ "SHA-256-SESS": hashlib.sha256,
183
+ "SHA-512": hashlib.sha512,
184
+ "SHA-512-SESS": hashlib.sha512,
185
+ }
186
+
187
+ def __init__(self, username: str | bytes, password: str | bytes) -> None:
188
+ self._username = to_bytes(username)
189
+ self._password = to_bytes(password)
190
+ self._last_challenge: _DigestAuthChallenge | None = None
191
+ self._nonce_count = 1
192
+
193
+ def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]:
194
+ if self._last_challenge:
195
+ request.headers["Authorization"] = self._build_auth_header(
196
+ request, self._last_challenge
197
+ )
198
+
199
+ response = yield request
200
+
201
+ if response.status_code != 401 or "www-authenticate" not in response.headers:
202
+ # If the response is not a 401 then we don't
203
+ # need to build an authenticated request.
204
+ return
205
+
206
+ for auth_header in response.headers.get_list("www-authenticate"):
207
+ if auth_header.lower().startswith("digest "):
208
+ break
209
+ else:
210
+ # If the response does not include a 'WWW-Authenticate: Digest ...'
211
+ # header, then we don't need to build an authenticated request.
212
+ return
213
+
214
+ self._last_challenge = self._parse_challenge(request, response, auth_header)
215
+ self._nonce_count = 1
216
+
217
+ request.headers["Authorization"] = self._build_auth_header(
218
+ request, self._last_challenge
219
+ )
220
+ if response.cookies:
221
+ Cookies(response.cookies).set_cookie_header(request=request)
222
+ yield request
223
+
224
+ def _parse_challenge(
225
+ self, request: Request, response: Response, auth_header: str
226
+ ) -> _DigestAuthChallenge:
227
+ """
228
+ Returns a challenge from a Digest WWW-Authenticate header.
229
+ These take the form of:
230
+ `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"`
231
+ """
232
+ scheme, _, fields = auth_header.partition(" ")
233
+
234
+ # This method should only ever have been called with a Digest auth header.
235
+ assert scheme.lower() == "digest"
236
+
237
+ header_dict: dict[str, str] = {}
238
+ for field in parse_http_list(fields):
239
+ key, value = field.strip().split("=", 1)
240
+ header_dict[key] = unquote(value)
241
+
242
+ try:
243
+ realm = header_dict["realm"].encode()
244
+ nonce = header_dict["nonce"].encode()
245
+ algorithm = header_dict.get("algorithm", "MD5")
246
+ opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None
247
+ qop = header_dict["qop"].encode() if "qop" in header_dict else None
248
+ return _DigestAuthChallenge(
249
+ realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop
250
+ )
251
+ except KeyError as exc:
252
+ message = "Malformed Digest WWW-Authenticate header"
253
+ raise ProtocolError(message, request=request) from exc
254
+
255
+ def _build_auth_header(
256
+ self, request: Request, challenge: _DigestAuthChallenge
257
+ ) -> str:
258
+ hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()]
259
+
260
+ def digest(data: bytes) -> bytes:
261
+ return hash_func(data).hexdigest().encode()
262
+
263
+ A1 = b":".join((self._username, challenge.realm, self._password))
264
+
265
+ path = request.url.raw_path
266
+ A2 = b":".join((request.method.encode(), path))
267
+ # TODO: implement auth-int
268
+ HA2 = digest(A2)
269
+
270
+ nc_value = b"%08x" % self._nonce_count
271
+ cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce)
272
+ self._nonce_count += 1
273
+
274
+ HA1 = digest(A1)
275
+ if challenge.algorithm.lower().endswith("-sess"):
276
+ HA1 = digest(b":".join((HA1, challenge.nonce, cnonce)))
277
+
278
+ qop = self._resolve_qop(challenge.qop, request=request)
279
+ if qop is None:
280
+ # Following RFC 2069
281
+ digest_data = [HA1, challenge.nonce, HA2]
282
+ else:
283
+ # Following RFC 2617/7616
284
+ digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2]
285
+
286
+ format_args = {
287
+ "username": self._username,
288
+ "realm": challenge.realm,
289
+ "nonce": challenge.nonce,
290
+ "uri": path,
291
+ "response": digest(b":".join(digest_data)),
292
+ "algorithm": challenge.algorithm.encode(),
293
+ }
294
+ if challenge.opaque:
295
+ format_args["opaque"] = challenge.opaque
296
+ if qop:
297
+ format_args["qop"] = b"auth"
298
+ format_args["nc"] = nc_value
299
+ format_args["cnonce"] = cnonce
300
+
301
+ return "Digest " + self._get_header_value(format_args)
302
+
303
+ def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes:
304
+ s = str(nonce_count).encode()
305
+ s += nonce
306
+ s += time.ctime().encode()
307
+ s += os.urandom(8)
308
+
309
+ return hashlib.sha1(s).hexdigest()[:16].encode()
310
+
311
+ def _get_header_value(self, header_fields: dict[str, bytes]) -> str:
312
+ NON_QUOTED_FIELDS = ("algorithm", "qop", "nc")
313
+ QUOTED_TEMPLATE = '{}="{}"'
314
+ NON_QUOTED_TEMPLATE = "{}={}"
315
+
316
+ header_value = ""
317
+ for i, (field, value) in enumerate(header_fields.items()):
318
+ if i > 0:
319
+ header_value += ", "
320
+ template = (
321
+ QUOTED_TEMPLATE
322
+ if field not in NON_QUOTED_FIELDS
323
+ else NON_QUOTED_TEMPLATE
324
+ )
325
+ header_value += template.format(field, to_str(value))
326
+
327
+ return header_value
328
+
329
+ def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None:
330
+ if qop is None:
331
+ return None
332
+ qops = re.split(b", ?", qop)
333
+ if b"auth" in qops:
334
+ return b"auth"
335
+
336
+ if qops == [b"auth-int"]:
337
+ raise NotImplementedError("Digest auth-int support is not yet implemented")
338
+
339
+ message = f'Unexpected qop value "{qop!r}" in digest auth'
340
+ raise ProtocolError(message, request=request)
341
+
342
+
343
+ class _DigestAuthChallenge(typing.NamedTuple):
344
+ realm: bytes
345
+ nonce: bytes
346
+ algorithm: str
347
+ opaque: bytes | None
348
+ qop: bytes | None
lib/python3.10/site-packages/httpx/_client.py ADDED
@@ -0,0 +1,2065 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import enum
5
+ import logging
6
+ import typing
7
+ import warnings
8
+ from contextlib import asynccontextmanager, contextmanager
9
+ from types import TracebackType
10
+
11
+ from .__version__ import __version__
12
+ from ._auth import Auth, BasicAuth, FunctionAuth
13
+ from ._config import (
14
+ DEFAULT_LIMITS,
15
+ DEFAULT_MAX_REDIRECTS,
16
+ DEFAULT_TIMEOUT_CONFIG,
17
+ Limits,
18
+ Proxy,
19
+ Timeout,
20
+ )
21
+ from ._decoders import SUPPORTED_DECODERS
22
+ from ._exceptions import (
23
+ InvalidURL,
24
+ RemoteProtocolError,
25
+ TooManyRedirects,
26
+ request_context,
27
+ )
28
+ from ._models import Cookies, Headers, Request, Response
29
+ from ._status_codes import codes
30
+ from ._transports.asgi import ASGITransport
31
+ from ._transports.base import AsyncBaseTransport, BaseTransport
32
+ from ._transports.default import AsyncHTTPTransport, HTTPTransport
33
+ from ._transports.wsgi import WSGITransport
34
+ from ._types import (
35
+ AsyncByteStream,
36
+ AuthTypes,
37
+ CertTypes,
38
+ CookieTypes,
39
+ HeaderTypes,
40
+ ProxiesTypes,
41
+ ProxyTypes,
42
+ QueryParamTypes,
43
+ RequestContent,
44
+ RequestData,
45
+ RequestExtensions,
46
+ RequestFiles,
47
+ SyncByteStream,
48
+ TimeoutTypes,
49
+ VerifyTypes,
50
+ )
51
+ from ._urls import URL, QueryParams
52
+ from ._utils import (
53
+ Timer,
54
+ URLPattern,
55
+ get_environment_proxies,
56
+ is_https_redirect,
57
+ same_origin,
58
+ )
59
+
60
+ __all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"]
61
+
62
+ # The type annotation for @classmethod and context managers here follows PEP 484
63
+ # https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods
64
+ T = typing.TypeVar("T", bound="Client")
65
+ U = typing.TypeVar("U", bound="AsyncClient")
66
+
67
+
68
+ class UseClientDefault:
69
+ """
70
+ For some parameters such as `auth=...` and `timeout=...` we need to be able
71
+ to indicate the default "unset" state, in a way that is distinctly different
72
+ to using `None`.
73
+
74
+ The default "unset" state indicates that whatever default is set on the
75
+ client should be used. This is different to setting `None`, which
76
+ explicitly disables the parameter, possibly overriding a client default.
77
+
78
+ For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature.
79
+ Omitting the `timeout` parameter will send a request using whatever default
80
+ timeout has been configured on the client. Including `timeout=None` will
81
+ ensure no timeout is used.
82
+
83
+ Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant,
84
+ but it is used internally when a parameter is not included.
85
+ """
86
+
87
+
88
+ USE_CLIENT_DEFAULT = UseClientDefault()
89
+
90
+
91
+ logger = logging.getLogger("httpx")
92
+
93
+ USER_AGENT = f"python-httpx/{__version__}"
94
+ ACCEPT_ENCODING = ", ".join(
95
+ [key for key in SUPPORTED_DECODERS.keys() if key != "identity"]
96
+ )
97
+
98
+
99
+ class ClientState(enum.Enum):
100
+ # UNOPENED:
101
+ # The client has been instantiated, but has not been used to send a request,
102
+ # or been opened by entering the context of a `with` block.
103
+ UNOPENED = 1
104
+ # OPENED:
105
+ # The client has either sent a request, or is within a `with` block.
106
+ OPENED = 2
107
+ # CLOSED:
108
+ # The client has either exited the `with` block, or `close()` has
109
+ # been called explicitly.
110
+ CLOSED = 3
111
+
112
+
113
+ class BoundSyncStream(SyncByteStream):
114
+ """
115
+ A byte stream that is bound to a given response instance, and that
116
+ ensures the `response.elapsed` is set once the response is closed.
117
+ """
118
+
119
+ def __init__(
120
+ self, stream: SyncByteStream, response: Response, timer: Timer
121
+ ) -> None:
122
+ self._stream = stream
123
+ self._response = response
124
+ self._timer = timer
125
+
126
+ def __iter__(self) -> typing.Iterator[bytes]:
127
+ for chunk in self._stream:
128
+ yield chunk
129
+
130
+ def close(self) -> None:
131
+ seconds = self._timer.sync_elapsed()
132
+ self._response.elapsed = datetime.timedelta(seconds=seconds)
133
+ self._stream.close()
134
+
135
+
136
+ class BoundAsyncStream(AsyncByteStream):
137
+ """
138
+ An async byte stream that is bound to a given response instance, and that
139
+ ensures the `response.elapsed` is set once the response is closed.
140
+ """
141
+
142
+ def __init__(
143
+ self, stream: AsyncByteStream, response: Response, timer: Timer
144
+ ) -> None:
145
+ self._stream = stream
146
+ self._response = response
147
+ self._timer = timer
148
+
149
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
150
+ async for chunk in self._stream:
151
+ yield chunk
152
+
153
+ async def aclose(self) -> None:
154
+ seconds = await self._timer.async_elapsed()
155
+ self._response.elapsed = datetime.timedelta(seconds=seconds)
156
+ await self._stream.aclose()
157
+
158
+
159
+ EventHook = typing.Callable[..., typing.Any]
160
+
161
+
162
+ class BaseClient:
163
+ def __init__(
164
+ self,
165
+ *,
166
+ auth: AuthTypes | None = None,
167
+ params: QueryParamTypes | None = None,
168
+ headers: HeaderTypes | None = None,
169
+ cookies: CookieTypes | None = None,
170
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
171
+ follow_redirects: bool = False,
172
+ max_redirects: int = DEFAULT_MAX_REDIRECTS,
173
+ event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
174
+ base_url: URL | str = "",
175
+ trust_env: bool = True,
176
+ default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
177
+ ) -> None:
178
+ event_hooks = {} if event_hooks is None else event_hooks
179
+
180
+ self._base_url = self._enforce_trailing_slash(URL(base_url))
181
+
182
+ self._auth = self._build_auth(auth)
183
+ self._params = QueryParams(params)
184
+ self.headers = Headers(headers)
185
+ self._cookies = Cookies(cookies)
186
+ self._timeout = Timeout(timeout)
187
+ self.follow_redirects = follow_redirects
188
+ self.max_redirects = max_redirects
189
+ self._event_hooks = {
190
+ "request": list(event_hooks.get("request", [])),
191
+ "response": list(event_hooks.get("response", [])),
192
+ }
193
+ self._trust_env = trust_env
194
+ self._default_encoding = default_encoding
195
+ self._state = ClientState.UNOPENED
196
+
197
+ @property
198
+ def is_closed(self) -> bool:
199
+ """
200
+ Check if the client being closed
201
+ """
202
+ return self._state == ClientState.CLOSED
203
+
204
+ @property
205
+ def trust_env(self) -> bool:
206
+ return self._trust_env
207
+
208
+ def _enforce_trailing_slash(self, url: URL) -> URL:
209
+ if url.raw_path.endswith(b"/"):
210
+ return url
211
+ return url.copy_with(raw_path=url.raw_path + b"/")
212
+
213
+ def _get_proxy_map(
214
+ self, proxies: ProxiesTypes | None, allow_env_proxies: bool
215
+ ) -> dict[str, Proxy | None]:
216
+ if proxies is None:
217
+ if allow_env_proxies:
218
+ return {
219
+ key: None if url is None else Proxy(url=url)
220
+ for key, url in get_environment_proxies().items()
221
+ }
222
+ return {}
223
+ if isinstance(proxies, dict):
224
+ new_proxies = {}
225
+ for key, value in proxies.items():
226
+ proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value
227
+ new_proxies[str(key)] = proxy
228
+ return new_proxies
229
+ else:
230
+ proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies
231
+ return {"all://": proxy}
232
+
233
+ @property
234
+ def timeout(self) -> Timeout:
235
+ return self._timeout
236
+
237
+ @timeout.setter
238
+ def timeout(self, timeout: TimeoutTypes) -> None:
239
+ self._timeout = Timeout(timeout)
240
+
241
+ @property
242
+ def event_hooks(self) -> dict[str, list[EventHook]]:
243
+ return self._event_hooks
244
+
245
+ @event_hooks.setter
246
+ def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None:
247
+ self._event_hooks = {
248
+ "request": list(event_hooks.get("request", [])),
249
+ "response": list(event_hooks.get("response", [])),
250
+ }
251
+
252
+ @property
253
+ def auth(self) -> Auth | None:
254
+ """
255
+ Authentication class used when none is passed at the request-level.
256
+
257
+ See also [Authentication][0].
258
+
259
+ [0]: /quickstart/#authentication
260
+ """
261
+ return self._auth
262
+
263
+ @auth.setter
264
+ def auth(self, auth: AuthTypes) -> None:
265
+ self._auth = self._build_auth(auth)
266
+
267
+ @property
268
+ def base_url(self) -> URL:
269
+ """
270
+ Base URL to use when sending requests with relative URLs.
271
+ """
272
+ return self._base_url
273
+
274
+ @base_url.setter
275
+ def base_url(self, url: URL | str) -> None:
276
+ self._base_url = self._enforce_trailing_slash(URL(url))
277
+
278
+ @property
279
+ def headers(self) -> Headers:
280
+ """
281
+ HTTP headers to include when sending requests.
282
+ """
283
+ return self._headers
284
+
285
+ @headers.setter
286
+ def headers(self, headers: HeaderTypes) -> None:
287
+ client_headers = Headers(
288
+ {
289
+ b"Accept": b"*/*",
290
+ b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"),
291
+ b"Connection": b"keep-alive",
292
+ b"User-Agent": USER_AGENT.encode("ascii"),
293
+ }
294
+ )
295
+ client_headers.update(headers)
296
+ self._headers = client_headers
297
+
298
+ @property
299
+ def cookies(self) -> Cookies:
300
+ """
301
+ Cookie values to include when sending requests.
302
+ """
303
+ return self._cookies
304
+
305
+ @cookies.setter
306
+ def cookies(self, cookies: CookieTypes) -> None:
307
+ self._cookies = Cookies(cookies)
308
+
309
+ @property
310
+ def params(self) -> QueryParams:
311
+ """
312
+ Query parameters to include in the URL when sending requests.
313
+ """
314
+ return self._params
315
+
316
+ @params.setter
317
+ def params(self, params: QueryParamTypes) -> None:
318
+ self._params = QueryParams(params)
319
+
320
+ def build_request(
321
+ self,
322
+ method: str,
323
+ url: URL | str,
324
+ *,
325
+ content: RequestContent | None = None,
326
+ data: RequestData | None = None,
327
+ files: RequestFiles | None = None,
328
+ json: typing.Any | None = None,
329
+ params: QueryParamTypes | None = None,
330
+ headers: HeaderTypes | None = None,
331
+ cookies: CookieTypes | None = None,
332
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
333
+ extensions: RequestExtensions | None = None,
334
+ ) -> Request:
335
+ """
336
+ Build and return a request instance.
337
+
338
+ * The `params`, `headers` and `cookies` arguments
339
+ are merged with any values set on the client.
340
+ * The `url` argument is merged with any `base_url` set on the client.
341
+
342
+ See also: [Request instances][0]
343
+
344
+ [0]: /advanced/clients/#request-instances
345
+ """
346
+ url = self._merge_url(url)
347
+ headers = self._merge_headers(headers)
348
+ cookies = self._merge_cookies(cookies)
349
+ params = self._merge_queryparams(params)
350
+ extensions = {} if extensions is None else extensions
351
+ if "timeout" not in extensions:
352
+ timeout = (
353
+ self.timeout
354
+ if isinstance(timeout, UseClientDefault)
355
+ else Timeout(timeout)
356
+ )
357
+ extensions = dict(**extensions, timeout=timeout.as_dict())
358
+ return Request(
359
+ method,
360
+ url,
361
+ content=content,
362
+ data=data,
363
+ files=files,
364
+ json=json,
365
+ params=params,
366
+ headers=headers,
367
+ cookies=cookies,
368
+ extensions=extensions,
369
+ )
370
+
371
+ def _merge_url(self, url: URL | str) -> URL:
372
+ """
373
+ Merge a URL argument together with any 'base_url' on the client,
374
+ to create the URL used for the outgoing request.
375
+ """
376
+ merge_url = URL(url)
377
+ if merge_url.is_relative_url:
378
+ # To merge URLs we always append to the base URL. To get this
379
+ # behaviour correct we always ensure the base URL ends in a '/'
380
+ # separator, and strip any leading '/' from the merge URL.
381
+ #
382
+ # So, eg...
383
+ #
384
+ # >>> client = Client(base_url="https://www.example.com/subpath")
385
+ # >>> client.base_url
386
+ # URL('https://www.example.com/subpath/')
387
+ # >>> client.build_request("GET", "/path").url
388
+ # URL('https://www.example.com/subpath/path')
389
+ merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/")
390
+ return self.base_url.copy_with(raw_path=merge_raw_path)
391
+ return merge_url
392
+
393
+ def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None:
394
+ """
395
+ Merge a cookies argument together with any cookies on the client,
396
+ to create the cookies used for the outgoing request.
397
+ """
398
+ if cookies or self.cookies:
399
+ merged_cookies = Cookies(self.cookies)
400
+ merged_cookies.update(cookies)
401
+ return merged_cookies
402
+ return cookies
403
+
404
+ def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None:
405
+ """
406
+ Merge a headers argument together with any headers on the client,
407
+ to create the headers used for the outgoing request.
408
+ """
409
+ merged_headers = Headers(self.headers)
410
+ merged_headers.update(headers)
411
+ return merged_headers
412
+
413
+ def _merge_queryparams(
414
+ self, params: QueryParamTypes | None = None
415
+ ) -> QueryParamTypes | None:
416
+ """
417
+ Merge a queryparams argument together with any queryparams on the client,
418
+ to create the queryparams used for the outgoing request.
419
+ """
420
+ if params or self.params:
421
+ merged_queryparams = QueryParams(self.params)
422
+ return merged_queryparams.merge(params)
423
+ return params
424
+
425
+ def _build_auth(self, auth: AuthTypes | None) -> Auth | None:
426
+ if auth is None:
427
+ return None
428
+ elif isinstance(auth, tuple):
429
+ return BasicAuth(username=auth[0], password=auth[1])
430
+ elif isinstance(auth, Auth):
431
+ return auth
432
+ elif callable(auth):
433
+ return FunctionAuth(func=auth)
434
+ else:
435
+ raise TypeError(f'Invalid "auth" argument: {auth!r}')
436
+
437
+ def _build_request_auth(
438
+ self,
439
+ request: Request,
440
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
441
+ ) -> Auth:
442
+ auth = (
443
+ self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth)
444
+ )
445
+
446
+ if auth is not None:
447
+ return auth
448
+
449
+ username, password = request.url.username, request.url.password
450
+ if username or password:
451
+ return BasicAuth(username=username, password=password)
452
+
453
+ return Auth()
454
+
455
+ def _build_redirect_request(self, request: Request, response: Response) -> Request:
456
+ """
457
+ Given a request and a redirect response, return a new request that
458
+ should be used to effect the redirect.
459
+ """
460
+ method = self._redirect_method(request, response)
461
+ url = self._redirect_url(request, response)
462
+ headers = self._redirect_headers(request, url, method)
463
+ stream = self._redirect_stream(request, method)
464
+ cookies = Cookies(self.cookies)
465
+ return Request(
466
+ method=method,
467
+ url=url,
468
+ headers=headers,
469
+ cookies=cookies,
470
+ stream=stream,
471
+ extensions=request.extensions,
472
+ )
473
+
474
+ def _redirect_method(self, request: Request, response: Response) -> str:
475
+ """
476
+ When being redirected we may want to change the method of the request
477
+ based on certain specs or browser behavior.
478
+ """
479
+ method = request.method
480
+
481
+ # https://tools.ietf.org/html/rfc7231#section-6.4.4
482
+ if response.status_code == codes.SEE_OTHER and method != "HEAD":
483
+ method = "GET"
484
+
485
+ # Do what the browsers do, despite standards...
486
+ # Turn 302s into GETs.
487
+ if response.status_code == codes.FOUND and method != "HEAD":
488
+ method = "GET"
489
+
490
+ # If a POST is responded to with a 301, turn it into a GET.
491
+ # This bizarre behaviour is explained in 'requests' issue 1704.
492
+ if response.status_code == codes.MOVED_PERMANENTLY and method == "POST":
493
+ method = "GET"
494
+
495
+ return method
496
+
497
+ def _redirect_url(self, request: Request, response: Response) -> URL:
498
+ """
499
+ Return the URL for the redirect to follow.
500
+ """
501
+ location = response.headers["Location"]
502
+
503
+ try:
504
+ url = URL(location)
505
+ except InvalidURL as exc:
506
+ raise RemoteProtocolError(
507
+ f"Invalid URL in location header: {exc}.", request=request
508
+ ) from None
509
+
510
+ # Handle malformed 'Location' headers that are "absolute" form, have no host.
511
+ # See: https://github.com/encode/httpx/issues/771
512
+ if url.scheme and not url.host:
513
+ url = url.copy_with(host=request.url.host)
514
+
515
+ # Facilitate relative 'Location' headers, as allowed by RFC 7231.
516
+ # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
517
+ if url.is_relative_url:
518
+ url = request.url.join(url)
519
+
520
+ # Attach previous fragment if needed (RFC 7231 7.1.2)
521
+ if request.url.fragment and not url.fragment:
522
+ url = url.copy_with(fragment=request.url.fragment)
523
+
524
+ return url
525
+
526
+ def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers:
527
+ """
528
+ Return the headers that should be used for the redirect request.
529
+ """
530
+ headers = Headers(request.headers)
531
+
532
+ if not same_origin(url, request.url):
533
+ if not is_https_redirect(request.url, url):
534
+ # Strip Authorization headers when responses are redirected
535
+ # away from the origin. (Except for direct HTTP to HTTPS redirects.)
536
+ headers.pop("Authorization", None)
537
+
538
+ # Update the Host header.
539
+ headers["Host"] = url.netloc.decode("ascii")
540
+
541
+ if method != request.method and method == "GET":
542
+ # If we've switch to a 'GET' request, then strip any headers which
543
+ # are only relevant to the request body.
544
+ headers.pop("Content-Length", None)
545
+ headers.pop("Transfer-Encoding", None)
546
+
547
+ # We should use the client cookie store to determine any cookie header,
548
+ # rather than whatever was on the original outgoing request.
549
+ headers.pop("Cookie", None)
550
+
551
+ return headers
552
+
553
+ def _redirect_stream(
554
+ self, request: Request, method: str
555
+ ) -> SyncByteStream | AsyncByteStream | None:
556
+ """
557
+ Return the body that should be used for the redirect request.
558
+ """
559
+ if method != request.method and method == "GET":
560
+ return None
561
+
562
+ return request.stream
563
+
564
+ def _set_timeout(self, request: Request) -> None:
565
+ if "timeout" not in request.extensions:
566
+ timeout = (
567
+ self.timeout
568
+ if isinstance(self.timeout, UseClientDefault)
569
+ else Timeout(self.timeout)
570
+ )
571
+ request.extensions = dict(**request.extensions, timeout=timeout.as_dict())
572
+
573
+
574
+ class Client(BaseClient):
575
+ """
576
+ An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc.
577
+
578
+ It can be shared between threads.
579
+
580
+ Usage:
581
+
582
+ ```python
583
+ >>> client = httpx.Client()
584
+ >>> response = client.get('https://example.org')
585
+ ```
586
+
587
+ **Parameters:**
588
+
589
+ * **auth** - *(optional)* An authentication class to use when sending
590
+ requests.
591
+ * **params** - *(optional)* Query parameters to include in request URLs, as
592
+ a string, dictionary, or sequence of two-tuples.
593
+ * **headers** - *(optional)* Dictionary of HTTP headers to include when
594
+ sending requests.
595
+ * **cookies** - *(optional)* Dictionary of Cookie items to include when
596
+ sending requests.
597
+ * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
598
+ verify the identity of requested hosts. Either `True` (default CA bundle),
599
+ a path to an SSL certificate file, an `ssl.SSLContext`, or `False`
600
+ (which will disable verification).
601
+ * **cert** - *(optional)* An SSL certificate used by the requested host
602
+ to authenticate the client. Either a path to an SSL certificate file, or
603
+ two-tuple of (certificate file, key file), or a three-tuple of (certificate
604
+ file, key file, password).
605
+ * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be
606
+ enabled. Defaults to `False`.
607
+ * **proxy** - *(optional)* A proxy URL where all the traffic should be routed.
608
+ * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy
609
+ URLs.
610
+ * **timeout** - *(optional)* The timeout configuration to use when sending
611
+ requests.
612
+ * **limits** - *(optional)* The limits configuration to use.
613
+ * **max_redirects** - *(optional)* The maximum number of redirect responses
614
+ that should be followed.
615
+ * **base_url** - *(optional)* A URL to use as the base when building
616
+ request URLs.
617
+ * **transport** - *(optional)* A transport class to use for sending requests
618
+ over the network.
619
+ * **app** - *(optional)* An WSGI application to send requests to,
620
+ rather than sending actual network requests.
621
+ * **trust_env** - *(optional)* Enables or disables usage of environment
622
+ variables for configuration.
623
+ * **default_encoding** - *(optional)* The default encoding to use for decoding
624
+ response text, if no charset information is included in a response Content-Type
625
+ header. Set to a callable for automatic character set detection. Default: "utf-8".
626
+ """
627
+
628
+ def __init__(
629
+ self,
630
+ *,
631
+ auth: AuthTypes | None = None,
632
+ params: QueryParamTypes | None = None,
633
+ headers: HeaderTypes | None = None,
634
+ cookies: CookieTypes | None = None,
635
+ verify: VerifyTypes = True,
636
+ cert: CertTypes | None = None,
637
+ http1: bool = True,
638
+ http2: bool = False,
639
+ proxy: ProxyTypes | None = None,
640
+ proxies: ProxiesTypes | None = None,
641
+ mounts: None | (typing.Mapping[str, BaseTransport | None]) = None,
642
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
643
+ follow_redirects: bool = False,
644
+ limits: Limits = DEFAULT_LIMITS,
645
+ max_redirects: int = DEFAULT_MAX_REDIRECTS,
646
+ event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
647
+ base_url: URL | str = "",
648
+ transport: BaseTransport | None = None,
649
+ app: typing.Callable[..., typing.Any] | None = None,
650
+ trust_env: bool = True,
651
+ default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
652
+ ) -> None:
653
+ super().__init__(
654
+ auth=auth,
655
+ params=params,
656
+ headers=headers,
657
+ cookies=cookies,
658
+ timeout=timeout,
659
+ follow_redirects=follow_redirects,
660
+ max_redirects=max_redirects,
661
+ event_hooks=event_hooks,
662
+ base_url=base_url,
663
+ trust_env=trust_env,
664
+ default_encoding=default_encoding,
665
+ )
666
+
667
+ if http2:
668
+ try:
669
+ import h2 # noqa
670
+ except ImportError: # pragma: no cover
671
+ raise ImportError(
672
+ "Using http2=True, but the 'h2' package is not installed. "
673
+ "Make sure to install httpx using `pip install httpx[http2]`."
674
+ ) from None
675
+
676
+ if proxies:
677
+ message = (
678
+ "The 'proxies' argument is now deprecated."
679
+ " Use 'proxy' or 'mounts' instead."
680
+ )
681
+ warnings.warn(message, DeprecationWarning)
682
+ if proxy:
683
+ raise RuntimeError("Use either `proxy` or 'proxies', not both.")
684
+
685
+ if app:
686
+ message = (
687
+ "The 'app' shortcut is now deprecated."
688
+ " Use the explicit style 'transport=WSGITransport(app=...)' instead."
689
+ )
690
+ warnings.warn(message, DeprecationWarning)
691
+
692
+ allow_env_proxies = trust_env and app is None and transport is None
693
+ proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies)
694
+
695
+ self._transport = self._init_transport(
696
+ verify=verify,
697
+ cert=cert,
698
+ http1=http1,
699
+ http2=http2,
700
+ limits=limits,
701
+ transport=transport,
702
+ app=app,
703
+ trust_env=trust_env,
704
+ )
705
+ self._mounts: dict[URLPattern, BaseTransport | None] = {
706
+ URLPattern(key): None
707
+ if proxy is None
708
+ else self._init_proxy_transport(
709
+ proxy,
710
+ verify=verify,
711
+ cert=cert,
712
+ http1=http1,
713
+ http2=http2,
714
+ limits=limits,
715
+ trust_env=trust_env,
716
+ )
717
+ for key, proxy in proxy_map.items()
718
+ }
719
+ if mounts is not None:
720
+ self._mounts.update(
721
+ {URLPattern(key): transport for key, transport in mounts.items()}
722
+ )
723
+
724
+ self._mounts = dict(sorted(self._mounts.items()))
725
+
726
+ def _init_transport(
727
+ self,
728
+ verify: VerifyTypes = True,
729
+ cert: CertTypes | None = None,
730
+ http1: bool = True,
731
+ http2: bool = False,
732
+ limits: Limits = DEFAULT_LIMITS,
733
+ transport: BaseTransport | None = None,
734
+ app: typing.Callable[..., typing.Any] | None = None,
735
+ trust_env: bool = True,
736
+ ) -> BaseTransport:
737
+ if transport is not None:
738
+ return transport
739
+
740
+ if app is not None:
741
+ return WSGITransport(app=app)
742
+
743
+ return HTTPTransport(
744
+ verify=verify,
745
+ cert=cert,
746
+ http1=http1,
747
+ http2=http2,
748
+ limits=limits,
749
+ trust_env=trust_env,
750
+ )
751
+
752
+ def _init_proxy_transport(
753
+ self,
754
+ proxy: Proxy,
755
+ verify: VerifyTypes = True,
756
+ cert: CertTypes | None = None,
757
+ http1: bool = True,
758
+ http2: bool = False,
759
+ limits: Limits = DEFAULT_LIMITS,
760
+ trust_env: bool = True,
761
+ ) -> BaseTransport:
762
+ return HTTPTransport(
763
+ verify=verify,
764
+ cert=cert,
765
+ http1=http1,
766
+ http2=http2,
767
+ limits=limits,
768
+ trust_env=trust_env,
769
+ proxy=proxy,
770
+ )
771
+
772
+ def _transport_for_url(self, url: URL) -> BaseTransport:
773
+ """
774
+ Returns the transport instance that should be used for a given URL.
775
+ This will either be the standard connection pool, or a proxy.
776
+ """
777
+ for pattern, transport in self._mounts.items():
778
+ if pattern.matches(url):
779
+ return self._transport if transport is None else transport
780
+
781
+ return self._transport
782
+
783
+ def request(
784
+ self,
785
+ method: str,
786
+ url: URL | str,
787
+ *,
788
+ content: RequestContent | None = None,
789
+ data: RequestData | None = None,
790
+ files: RequestFiles | None = None,
791
+ json: typing.Any | None = None,
792
+ params: QueryParamTypes | None = None,
793
+ headers: HeaderTypes | None = None,
794
+ cookies: CookieTypes | None = None,
795
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
796
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
797
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
798
+ extensions: RequestExtensions | None = None,
799
+ ) -> Response:
800
+ """
801
+ Build and send a request.
802
+
803
+ Equivalent to:
804
+
805
+ ```python
806
+ request = client.build_request(...)
807
+ response = client.send(request, ...)
808
+ ```
809
+
810
+ See `Client.build_request()`, `Client.send()` and
811
+ [Merging of configuration][0] for how the various parameters
812
+ are merged with client-level configuration.
813
+
814
+ [0]: /advanced/clients/#merging-of-configuration
815
+ """
816
+ if cookies is not None:
817
+ message = (
818
+ "Setting per-request cookies=<...> is being deprecated, because "
819
+ "the expected behaviour on cookie persistence is ambiguous. Set "
820
+ "cookies directly on the client instance instead."
821
+ )
822
+ warnings.warn(message, DeprecationWarning)
823
+
824
+ request = self.build_request(
825
+ method=method,
826
+ url=url,
827
+ content=content,
828
+ data=data,
829
+ files=files,
830
+ json=json,
831
+ params=params,
832
+ headers=headers,
833
+ cookies=cookies,
834
+ timeout=timeout,
835
+ extensions=extensions,
836
+ )
837
+ return self.send(request, auth=auth, follow_redirects=follow_redirects)
838
+
839
+ @contextmanager
840
+ def stream(
841
+ self,
842
+ method: str,
843
+ url: URL | str,
844
+ *,
845
+ content: RequestContent | None = None,
846
+ data: RequestData | None = None,
847
+ files: RequestFiles | None = None,
848
+ json: typing.Any | None = None,
849
+ params: QueryParamTypes | None = None,
850
+ headers: HeaderTypes | None = None,
851
+ cookies: CookieTypes | None = None,
852
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
853
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
854
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
855
+ extensions: RequestExtensions | None = None,
856
+ ) -> typing.Iterator[Response]:
857
+ """
858
+ Alternative to `httpx.request()` that streams the response body
859
+ instead of loading it into memory at once.
860
+
861
+ **Parameters**: See `httpx.request`.
862
+
863
+ See also: [Streaming Responses][0]
864
+
865
+ [0]: /quickstart#streaming-responses
866
+ """
867
+ request = self.build_request(
868
+ method=method,
869
+ url=url,
870
+ content=content,
871
+ data=data,
872
+ files=files,
873
+ json=json,
874
+ params=params,
875
+ headers=headers,
876
+ cookies=cookies,
877
+ timeout=timeout,
878
+ extensions=extensions,
879
+ )
880
+ response = self.send(
881
+ request=request,
882
+ auth=auth,
883
+ follow_redirects=follow_redirects,
884
+ stream=True,
885
+ )
886
+ try:
887
+ yield response
888
+ finally:
889
+ response.close()
890
+
891
+ def send(
892
+ self,
893
+ request: Request,
894
+ *,
895
+ stream: bool = False,
896
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
897
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
898
+ ) -> Response:
899
+ """
900
+ Send a request.
901
+
902
+ The request is sent as-is, unmodified.
903
+
904
+ Typically you'll want to build one with `Client.build_request()`
905
+ so that any client-level configuration is merged into the request,
906
+ but passing an explicit `httpx.Request()` is supported as well.
907
+
908
+ See also: [Request instances][0]
909
+
910
+ [0]: /advanced/clients/#request-instances
911
+ """
912
+ if self._state == ClientState.CLOSED:
913
+ raise RuntimeError("Cannot send a request, as the client has been closed.")
914
+
915
+ self._state = ClientState.OPENED
916
+ follow_redirects = (
917
+ self.follow_redirects
918
+ if isinstance(follow_redirects, UseClientDefault)
919
+ else follow_redirects
920
+ )
921
+
922
+ self._set_timeout(request)
923
+
924
+ auth = self._build_request_auth(request, auth)
925
+
926
+ response = self._send_handling_auth(
927
+ request,
928
+ auth=auth,
929
+ follow_redirects=follow_redirects,
930
+ history=[],
931
+ )
932
+ try:
933
+ if not stream:
934
+ response.read()
935
+
936
+ return response
937
+
938
+ except BaseException as exc:
939
+ response.close()
940
+ raise exc
941
+
942
+ def _send_handling_auth(
943
+ self,
944
+ request: Request,
945
+ auth: Auth,
946
+ follow_redirects: bool,
947
+ history: list[Response],
948
+ ) -> Response:
949
+ auth_flow = auth.sync_auth_flow(request)
950
+ try:
951
+ request = next(auth_flow)
952
+
953
+ while True:
954
+ response = self._send_handling_redirects(
955
+ request,
956
+ follow_redirects=follow_redirects,
957
+ history=history,
958
+ )
959
+ try:
960
+ try:
961
+ next_request = auth_flow.send(response)
962
+ except StopIteration:
963
+ return response
964
+
965
+ response.history = list(history)
966
+ response.read()
967
+ request = next_request
968
+ history.append(response)
969
+
970
+ except BaseException as exc:
971
+ response.close()
972
+ raise exc
973
+ finally:
974
+ auth_flow.close()
975
+
976
+ def _send_handling_redirects(
977
+ self,
978
+ request: Request,
979
+ follow_redirects: bool,
980
+ history: list[Response],
981
+ ) -> Response:
982
+ while True:
983
+ if len(history) > self.max_redirects:
984
+ raise TooManyRedirects(
985
+ "Exceeded maximum allowed redirects.", request=request
986
+ )
987
+
988
+ for hook in self._event_hooks["request"]:
989
+ hook(request)
990
+
991
+ response = self._send_single_request(request)
992
+ try:
993
+ for hook in self._event_hooks["response"]:
994
+ hook(response)
995
+ response.history = list(history)
996
+
997
+ if not response.has_redirect_location:
998
+ return response
999
+
1000
+ request = self._build_redirect_request(request, response)
1001
+ history = history + [response]
1002
+
1003
+ if follow_redirects:
1004
+ response.read()
1005
+ else:
1006
+ response.next_request = request
1007
+ return response
1008
+
1009
+ except BaseException as exc:
1010
+ response.close()
1011
+ raise exc
1012
+
1013
+ def _send_single_request(self, request: Request) -> Response:
1014
+ """
1015
+ Sends a single request, without handling any redirections.
1016
+ """
1017
+ transport = self._transport_for_url(request.url)
1018
+ timer = Timer()
1019
+ timer.sync_start()
1020
+
1021
+ if not isinstance(request.stream, SyncByteStream):
1022
+ raise RuntimeError(
1023
+ "Attempted to send an async request with a sync Client instance."
1024
+ )
1025
+
1026
+ with request_context(request=request):
1027
+ response = transport.handle_request(request)
1028
+
1029
+ assert isinstance(response.stream, SyncByteStream)
1030
+
1031
+ response.request = request
1032
+ response.stream = BoundSyncStream(
1033
+ response.stream, response=response, timer=timer
1034
+ )
1035
+ self.cookies.extract_cookies(response)
1036
+ response.default_encoding = self._default_encoding
1037
+
1038
+ logger.info(
1039
+ 'HTTP Request: %s %s "%s %d %s"',
1040
+ request.method,
1041
+ request.url,
1042
+ response.http_version,
1043
+ response.status_code,
1044
+ response.reason_phrase,
1045
+ )
1046
+
1047
+ return response
1048
+
1049
+ def get(
1050
+ self,
1051
+ url: URL | str,
1052
+ *,
1053
+ params: QueryParamTypes | None = None,
1054
+ headers: HeaderTypes | None = None,
1055
+ cookies: CookieTypes | None = None,
1056
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1057
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1058
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1059
+ extensions: RequestExtensions | None = None,
1060
+ ) -> Response:
1061
+ """
1062
+ Send a `GET` request.
1063
+
1064
+ **Parameters**: See `httpx.request`.
1065
+ """
1066
+ return self.request(
1067
+ "GET",
1068
+ url,
1069
+ params=params,
1070
+ headers=headers,
1071
+ cookies=cookies,
1072
+ auth=auth,
1073
+ follow_redirects=follow_redirects,
1074
+ timeout=timeout,
1075
+ extensions=extensions,
1076
+ )
1077
+
1078
+ def options(
1079
+ self,
1080
+ url: URL | str,
1081
+ *,
1082
+ params: QueryParamTypes | None = None,
1083
+ headers: HeaderTypes | None = None,
1084
+ cookies: CookieTypes | None = None,
1085
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1086
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1087
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1088
+ extensions: RequestExtensions | None = None,
1089
+ ) -> Response:
1090
+ """
1091
+ Send an `OPTIONS` request.
1092
+
1093
+ **Parameters**: See `httpx.request`.
1094
+ """
1095
+ return self.request(
1096
+ "OPTIONS",
1097
+ url,
1098
+ params=params,
1099
+ headers=headers,
1100
+ cookies=cookies,
1101
+ auth=auth,
1102
+ follow_redirects=follow_redirects,
1103
+ timeout=timeout,
1104
+ extensions=extensions,
1105
+ )
1106
+
1107
+ def head(
1108
+ self,
1109
+ url: URL | str,
1110
+ *,
1111
+ params: QueryParamTypes | None = None,
1112
+ headers: HeaderTypes | None = None,
1113
+ cookies: CookieTypes | None = None,
1114
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1115
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1116
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1117
+ extensions: RequestExtensions | None = None,
1118
+ ) -> Response:
1119
+ """
1120
+ Send a `HEAD` request.
1121
+
1122
+ **Parameters**: See `httpx.request`.
1123
+ """
1124
+ return self.request(
1125
+ "HEAD",
1126
+ url,
1127
+ params=params,
1128
+ headers=headers,
1129
+ cookies=cookies,
1130
+ auth=auth,
1131
+ follow_redirects=follow_redirects,
1132
+ timeout=timeout,
1133
+ extensions=extensions,
1134
+ )
1135
+
1136
+ def post(
1137
+ self,
1138
+ url: URL | str,
1139
+ *,
1140
+ content: RequestContent | None = None,
1141
+ data: RequestData | None = None,
1142
+ files: RequestFiles | None = None,
1143
+ json: typing.Any | None = None,
1144
+ params: QueryParamTypes | None = None,
1145
+ headers: HeaderTypes | None = None,
1146
+ cookies: CookieTypes | None = None,
1147
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1148
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1149
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1150
+ extensions: RequestExtensions | None = None,
1151
+ ) -> Response:
1152
+ """
1153
+ Send a `POST` request.
1154
+
1155
+ **Parameters**: See `httpx.request`.
1156
+ """
1157
+ return self.request(
1158
+ "POST",
1159
+ url,
1160
+ content=content,
1161
+ data=data,
1162
+ files=files,
1163
+ json=json,
1164
+ params=params,
1165
+ headers=headers,
1166
+ cookies=cookies,
1167
+ auth=auth,
1168
+ follow_redirects=follow_redirects,
1169
+ timeout=timeout,
1170
+ extensions=extensions,
1171
+ )
1172
+
1173
+ def put(
1174
+ self,
1175
+ url: URL | str,
1176
+ *,
1177
+ content: RequestContent | None = None,
1178
+ data: RequestData | None = None,
1179
+ files: RequestFiles | None = None,
1180
+ json: typing.Any | None = None,
1181
+ params: QueryParamTypes | None = None,
1182
+ headers: HeaderTypes | None = None,
1183
+ cookies: CookieTypes | None = None,
1184
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1185
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1186
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1187
+ extensions: RequestExtensions | None = None,
1188
+ ) -> Response:
1189
+ """
1190
+ Send a `PUT` request.
1191
+
1192
+ **Parameters**: See `httpx.request`.
1193
+ """
1194
+ return self.request(
1195
+ "PUT",
1196
+ url,
1197
+ content=content,
1198
+ data=data,
1199
+ files=files,
1200
+ json=json,
1201
+ params=params,
1202
+ headers=headers,
1203
+ cookies=cookies,
1204
+ auth=auth,
1205
+ follow_redirects=follow_redirects,
1206
+ timeout=timeout,
1207
+ extensions=extensions,
1208
+ )
1209
+
1210
+ def patch(
1211
+ self,
1212
+ url: URL | str,
1213
+ *,
1214
+ content: RequestContent | None = None,
1215
+ data: RequestData | None = None,
1216
+ files: RequestFiles | None = None,
1217
+ json: typing.Any | None = None,
1218
+ params: QueryParamTypes | None = None,
1219
+ headers: HeaderTypes | None = None,
1220
+ cookies: CookieTypes | None = None,
1221
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1222
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1223
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1224
+ extensions: RequestExtensions | None = None,
1225
+ ) -> Response:
1226
+ """
1227
+ Send a `PATCH` request.
1228
+
1229
+ **Parameters**: See `httpx.request`.
1230
+ """
1231
+ return self.request(
1232
+ "PATCH",
1233
+ url,
1234
+ content=content,
1235
+ data=data,
1236
+ files=files,
1237
+ json=json,
1238
+ params=params,
1239
+ headers=headers,
1240
+ cookies=cookies,
1241
+ auth=auth,
1242
+ follow_redirects=follow_redirects,
1243
+ timeout=timeout,
1244
+ extensions=extensions,
1245
+ )
1246
+
1247
+ def delete(
1248
+ self,
1249
+ url: URL | str,
1250
+ *,
1251
+ params: QueryParamTypes | None = None,
1252
+ headers: HeaderTypes | None = None,
1253
+ cookies: CookieTypes | None = None,
1254
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1255
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1256
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1257
+ extensions: RequestExtensions | None = None,
1258
+ ) -> Response:
1259
+ """
1260
+ Send a `DELETE` request.
1261
+
1262
+ **Parameters**: See `httpx.request`.
1263
+ """
1264
+ return self.request(
1265
+ "DELETE",
1266
+ url,
1267
+ params=params,
1268
+ headers=headers,
1269
+ cookies=cookies,
1270
+ auth=auth,
1271
+ follow_redirects=follow_redirects,
1272
+ timeout=timeout,
1273
+ extensions=extensions,
1274
+ )
1275
+
1276
+ def close(self) -> None:
1277
+ """
1278
+ Close transport and proxies.
1279
+ """
1280
+ if self._state != ClientState.CLOSED:
1281
+ self._state = ClientState.CLOSED
1282
+
1283
+ self._transport.close()
1284
+ for transport in self._mounts.values():
1285
+ if transport is not None:
1286
+ transport.close()
1287
+
1288
+ def __enter__(self: T) -> T:
1289
+ if self._state != ClientState.UNOPENED:
1290
+ msg = {
1291
+ ClientState.OPENED: "Cannot open a client instance more than once.",
1292
+ ClientState.CLOSED: (
1293
+ "Cannot reopen a client instance, once it has been closed."
1294
+ ),
1295
+ }[self._state]
1296
+ raise RuntimeError(msg)
1297
+
1298
+ self._state = ClientState.OPENED
1299
+
1300
+ self._transport.__enter__()
1301
+ for transport in self._mounts.values():
1302
+ if transport is not None:
1303
+ transport.__enter__()
1304
+ return self
1305
+
1306
+ def __exit__(
1307
+ self,
1308
+ exc_type: type[BaseException] | None = None,
1309
+ exc_value: BaseException | None = None,
1310
+ traceback: TracebackType | None = None,
1311
+ ) -> None:
1312
+ self._state = ClientState.CLOSED
1313
+
1314
+ self._transport.__exit__(exc_type, exc_value, traceback)
1315
+ for transport in self._mounts.values():
1316
+ if transport is not None:
1317
+ transport.__exit__(exc_type, exc_value, traceback)
1318
+
1319
+
1320
+ class AsyncClient(BaseClient):
1321
+ """
1322
+ An asynchronous HTTP client, with connection pooling, HTTP/2, redirects,
1323
+ cookie persistence, etc.
1324
+
1325
+ It can be shared between tasks.
1326
+
1327
+ Usage:
1328
+
1329
+ ```python
1330
+ >>> async with httpx.AsyncClient() as client:
1331
+ >>> response = await client.get('https://example.org')
1332
+ ```
1333
+
1334
+ **Parameters:**
1335
+
1336
+ * **auth** - *(optional)* An authentication class to use when sending
1337
+ requests.
1338
+ * **params** - *(optional)* Query parameters to include in request URLs, as
1339
+ a string, dictionary, or sequence of two-tuples.
1340
+ * **headers** - *(optional)* Dictionary of HTTP headers to include when
1341
+ sending requests.
1342
+ * **cookies** - *(optional)* Dictionary of Cookie items to include when
1343
+ sending requests.
1344
+ * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to
1345
+ verify the identity of requested hosts. Either `True` (default CA bundle),
1346
+ a path to an SSL certificate file, an `ssl.SSLContext`, or `False`
1347
+ (which will disable verification).
1348
+ * **cert** - *(optional)* An SSL certificate used by the requested host
1349
+ to authenticate the client. Either a path to an SSL certificate file, or
1350
+ two-tuple of (certificate file, key file), or a three-tuple of (certificate
1351
+ file, key file, password).
1352
+ * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be
1353
+ enabled. Defaults to `False`.
1354
+ * **proxy** - *(optional)* A proxy URL where all the traffic should be routed.
1355
+ * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy
1356
+ URLs.
1357
+ * **timeout** - *(optional)* The timeout configuration to use when sending
1358
+ requests.
1359
+ * **limits** - *(optional)* The limits configuration to use.
1360
+ * **max_redirects** - *(optional)* The maximum number of redirect responses
1361
+ that should be followed.
1362
+ * **base_url** - *(optional)* A URL to use as the base when building
1363
+ request URLs.
1364
+ * **transport** - *(optional)* A transport class to use for sending requests
1365
+ over the network.
1366
+ * **app** - *(optional)* An ASGI application to send requests to,
1367
+ rather than sending actual network requests.
1368
+ * **trust_env** - *(optional)* Enables or disables usage of environment
1369
+ variables for configuration.
1370
+ * **default_encoding** - *(optional)* The default encoding to use for decoding
1371
+ response text, if no charset information is included in a response Content-Type
1372
+ header. Set to a callable for automatic character set detection. Default: "utf-8".
1373
+ """
1374
+
1375
+ def __init__(
1376
+ self,
1377
+ *,
1378
+ auth: AuthTypes | None = None,
1379
+ params: QueryParamTypes | None = None,
1380
+ headers: HeaderTypes | None = None,
1381
+ cookies: CookieTypes | None = None,
1382
+ verify: VerifyTypes = True,
1383
+ cert: CertTypes | None = None,
1384
+ http1: bool = True,
1385
+ http2: bool = False,
1386
+ proxy: ProxyTypes | None = None,
1387
+ proxies: ProxiesTypes | None = None,
1388
+ mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None,
1389
+ timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG,
1390
+ follow_redirects: bool = False,
1391
+ limits: Limits = DEFAULT_LIMITS,
1392
+ max_redirects: int = DEFAULT_MAX_REDIRECTS,
1393
+ event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None,
1394
+ base_url: URL | str = "",
1395
+ transport: AsyncBaseTransport | None = None,
1396
+ app: typing.Callable[..., typing.Any] | None = None,
1397
+ trust_env: bool = True,
1398
+ default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
1399
+ ) -> None:
1400
+ super().__init__(
1401
+ auth=auth,
1402
+ params=params,
1403
+ headers=headers,
1404
+ cookies=cookies,
1405
+ timeout=timeout,
1406
+ follow_redirects=follow_redirects,
1407
+ max_redirects=max_redirects,
1408
+ event_hooks=event_hooks,
1409
+ base_url=base_url,
1410
+ trust_env=trust_env,
1411
+ default_encoding=default_encoding,
1412
+ )
1413
+
1414
+ if http2:
1415
+ try:
1416
+ import h2 # noqa
1417
+ except ImportError: # pragma: no cover
1418
+ raise ImportError(
1419
+ "Using http2=True, but the 'h2' package is not installed. "
1420
+ "Make sure to install httpx using `pip install httpx[http2]`."
1421
+ ) from None
1422
+
1423
+ if proxies:
1424
+ message = (
1425
+ "The 'proxies' argument is now deprecated."
1426
+ " Use 'proxy' or 'mounts' instead."
1427
+ )
1428
+ warnings.warn(message, DeprecationWarning)
1429
+ if proxy:
1430
+ raise RuntimeError("Use either `proxy` or 'proxies', not both.")
1431
+
1432
+ if app:
1433
+ message = (
1434
+ "The 'app' shortcut is now deprecated."
1435
+ " Use the explicit style 'transport=ASGITransport(app=...)' instead."
1436
+ )
1437
+ warnings.warn(message, DeprecationWarning)
1438
+
1439
+ allow_env_proxies = trust_env and app is None and transport is None
1440
+ proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies)
1441
+
1442
+ self._transport = self._init_transport(
1443
+ verify=verify,
1444
+ cert=cert,
1445
+ http1=http1,
1446
+ http2=http2,
1447
+ limits=limits,
1448
+ transport=transport,
1449
+ app=app,
1450
+ trust_env=trust_env,
1451
+ )
1452
+
1453
+ self._mounts: dict[URLPattern, AsyncBaseTransport | None] = {
1454
+ URLPattern(key): None
1455
+ if proxy is None
1456
+ else self._init_proxy_transport(
1457
+ proxy,
1458
+ verify=verify,
1459
+ cert=cert,
1460
+ http1=http1,
1461
+ http2=http2,
1462
+ limits=limits,
1463
+ trust_env=trust_env,
1464
+ )
1465
+ for key, proxy in proxy_map.items()
1466
+ }
1467
+ if mounts is not None:
1468
+ self._mounts.update(
1469
+ {URLPattern(key): transport for key, transport in mounts.items()}
1470
+ )
1471
+ self._mounts = dict(sorted(self._mounts.items()))
1472
+
1473
+ def _init_transport(
1474
+ self,
1475
+ verify: VerifyTypes = True,
1476
+ cert: CertTypes | None = None,
1477
+ http1: bool = True,
1478
+ http2: bool = False,
1479
+ limits: Limits = DEFAULT_LIMITS,
1480
+ transport: AsyncBaseTransport | None = None,
1481
+ app: typing.Callable[..., typing.Any] | None = None,
1482
+ trust_env: bool = True,
1483
+ ) -> AsyncBaseTransport:
1484
+ if transport is not None:
1485
+ return transport
1486
+
1487
+ if app is not None:
1488
+ return ASGITransport(app=app)
1489
+
1490
+ return AsyncHTTPTransport(
1491
+ verify=verify,
1492
+ cert=cert,
1493
+ http1=http1,
1494
+ http2=http2,
1495
+ limits=limits,
1496
+ trust_env=trust_env,
1497
+ )
1498
+
1499
+ def _init_proxy_transport(
1500
+ self,
1501
+ proxy: Proxy,
1502
+ verify: VerifyTypes = True,
1503
+ cert: CertTypes | None = None,
1504
+ http1: bool = True,
1505
+ http2: bool = False,
1506
+ limits: Limits = DEFAULT_LIMITS,
1507
+ trust_env: bool = True,
1508
+ ) -> AsyncBaseTransport:
1509
+ return AsyncHTTPTransport(
1510
+ verify=verify,
1511
+ cert=cert,
1512
+ http1=http1,
1513
+ http2=http2,
1514
+ limits=limits,
1515
+ trust_env=trust_env,
1516
+ proxy=proxy,
1517
+ )
1518
+
1519
+ def _transport_for_url(self, url: URL) -> AsyncBaseTransport:
1520
+ """
1521
+ Returns the transport instance that should be used for a given URL.
1522
+ This will either be the standard connection pool, or a proxy.
1523
+ """
1524
+ for pattern, transport in self._mounts.items():
1525
+ if pattern.matches(url):
1526
+ return self._transport if transport is None else transport
1527
+
1528
+ return self._transport
1529
+
1530
+ async def request(
1531
+ self,
1532
+ method: str,
1533
+ url: URL | str,
1534
+ *,
1535
+ content: RequestContent | None = None,
1536
+ data: RequestData | None = None,
1537
+ files: RequestFiles | None = None,
1538
+ json: typing.Any | None = None,
1539
+ params: QueryParamTypes | None = None,
1540
+ headers: HeaderTypes | None = None,
1541
+ cookies: CookieTypes | None = None,
1542
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1543
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1544
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1545
+ extensions: RequestExtensions | None = None,
1546
+ ) -> Response:
1547
+ """
1548
+ Build and send a request.
1549
+
1550
+ Equivalent to:
1551
+
1552
+ ```python
1553
+ request = client.build_request(...)
1554
+ response = await client.send(request, ...)
1555
+ ```
1556
+
1557
+ See `AsyncClient.build_request()`, `AsyncClient.send()`
1558
+ and [Merging of configuration][0] for how the various parameters
1559
+ are merged with client-level configuration.
1560
+
1561
+ [0]: /advanced/clients/#merging-of-configuration
1562
+ """
1563
+
1564
+ if cookies is not None: # pragma: no cover
1565
+ message = (
1566
+ "Setting per-request cookies=<...> is being deprecated, because "
1567
+ "the expected behaviour on cookie persistence is ambiguous. Set "
1568
+ "cookies directly on the client instance instead."
1569
+ )
1570
+ warnings.warn(message, DeprecationWarning)
1571
+
1572
+ request = self.build_request(
1573
+ method=method,
1574
+ url=url,
1575
+ content=content,
1576
+ data=data,
1577
+ files=files,
1578
+ json=json,
1579
+ params=params,
1580
+ headers=headers,
1581
+ cookies=cookies,
1582
+ timeout=timeout,
1583
+ extensions=extensions,
1584
+ )
1585
+ return await self.send(request, auth=auth, follow_redirects=follow_redirects)
1586
+
1587
+ @asynccontextmanager
1588
+ async def stream(
1589
+ self,
1590
+ method: str,
1591
+ url: URL | str,
1592
+ *,
1593
+ content: RequestContent | None = None,
1594
+ data: RequestData | None = None,
1595
+ files: RequestFiles | None = None,
1596
+ json: typing.Any | None = None,
1597
+ params: QueryParamTypes | None = None,
1598
+ headers: HeaderTypes | None = None,
1599
+ cookies: CookieTypes | None = None,
1600
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1601
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1602
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1603
+ extensions: RequestExtensions | None = None,
1604
+ ) -> typing.AsyncIterator[Response]:
1605
+ """
1606
+ Alternative to `httpx.request()` that streams the response body
1607
+ instead of loading it into memory at once.
1608
+
1609
+ **Parameters**: See `httpx.request`.
1610
+
1611
+ See also: [Streaming Responses][0]
1612
+
1613
+ [0]: /quickstart#streaming-responses
1614
+ """
1615
+ request = self.build_request(
1616
+ method=method,
1617
+ url=url,
1618
+ content=content,
1619
+ data=data,
1620
+ files=files,
1621
+ json=json,
1622
+ params=params,
1623
+ headers=headers,
1624
+ cookies=cookies,
1625
+ timeout=timeout,
1626
+ extensions=extensions,
1627
+ )
1628
+ response = await self.send(
1629
+ request=request,
1630
+ auth=auth,
1631
+ follow_redirects=follow_redirects,
1632
+ stream=True,
1633
+ )
1634
+ try:
1635
+ yield response
1636
+ finally:
1637
+ await response.aclose()
1638
+
1639
+ async def send(
1640
+ self,
1641
+ request: Request,
1642
+ *,
1643
+ stream: bool = False,
1644
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1645
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1646
+ ) -> Response:
1647
+ """
1648
+ Send a request.
1649
+
1650
+ The request is sent as-is, unmodified.
1651
+
1652
+ Typically you'll want to build one with `AsyncClient.build_request()`
1653
+ so that any client-level configuration is merged into the request,
1654
+ but passing an explicit `httpx.Request()` is supported as well.
1655
+
1656
+ See also: [Request instances][0]
1657
+
1658
+ [0]: /advanced/clients/#request-instances
1659
+ """
1660
+ if self._state == ClientState.CLOSED:
1661
+ raise RuntimeError("Cannot send a request, as the client has been closed.")
1662
+
1663
+ self._state = ClientState.OPENED
1664
+ follow_redirects = (
1665
+ self.follow_redirects
1666
+ if isinstance(follow_redirects, UseClientDefault)
1667
+ else follow_redirects
1668
+ )
1669
+
1670
+ self._set_timeout(request)
1671
+
1672
+ auth = self._build_request_auth(request, auth)
1673
+
1674
+ response = await self._send_handling_auth(
1675
+ request,
1676
+ auth=auth,
1677
+ follow_redirects=follow_redirects,
1678
+ history=[],
1679
+ )
1680
+ try:
1681
+ if not stream:
1682
+ await response.aread()
1683
+
1684
+ return response
1685
+
1686
+ except BaseException as exc:
1687
+ await response.aclose()
1688
+ raise exc
1689
+
1690
+ async def _send_handling_auth(
1691
+ self,
1692
+ request: Request,
1693
+ auth: Auth,
1694
+ follow_redirects: bool,
1695
+ history: list[Response],
1696
+ ) -> Response:
1697
+ auth_flow = auth.async_auth_flow(request)
1698
+ try:
1699
+ request = await auth_flow.__anext__()
1700
+
1701
+ while True:
1702
+ response = await self._send_handling_redirects(
1703
+ request,
1704
+ follow_redirects=follow_redirects,
1705
+ history=history,
1706
+ )
1707
+ try:
1708
+ try:
1709
+ next_request = await auth_flow.asend(response)
1710
+ except StopAsyncIteration:
1711
+ return response
1712
+
1713
+ response.history = list(history)
1714
+ await response.aread()
1715
+ request = next_request
1716
+ history.append(response)
1717
+
1718
+ except BaseException as exc:
1719
+ await response.aclose()
1720
+ raise exc
1721
+ finally:
1722
+ await auth_flow.aclose()
1723
+
1724
+ async def _send_handling_redirects(
1725
+ self,
1726
+ request: Request,
1727
+ follow_redirects: bool,
1728
+ history: list[Response],
1729
+ ) -> Response:
1730
+ while True:
1731
+ if len(history) > self.max_redirects:
1732
+ raise TooManyRedirects(
1733
+ "Exceeded maximum allowed redirects.", request=request
1734
+ )
1735
+
1736
+ for hook in self._event_hooks["request"]:
1737
+ await hook(request)
1738
+
1739
+ response = await self._send_single_request(request)
1740
+ try:
1741
+ for hook in self._event_hooks["response"]:
1742
+ await hook(response)
1743
+
1744
+ response.history = list(history)
1745
+
1746
+ if not response.has_redirect_location:
1747
+ return response
1748
+
1749
+ request = self._build_redirect_request(request, response)
1750
+ history = history + [response]
1751
+
1752
+ if follow_redirects:
1753
+ await response.aread()
1754
+ else:
1755
+ response.next_request = request
1756
+ return response
1757
+
1758
+ except BaseException as exc:
1759
+ await response.aclose()
1760
+ raise exc
1761
+
1762
+ async def _send_single_request(self, request: Request) -> Response:
1763
+ """
1764
+ Sends a single request, without handling any redirections.
1765
+ """
1766
+ transport = self._transport_for_url(request.url)
1767
+ timer = Timer()
1768
+ await timer.async_start()
1769
+
1770
+ if not isinstance(request.stream, AsyncByteStream):
1771
+ raise RuntimeError(
1772
+ "Attempted to send an sync request with an AsyncClient instance."
1773
+ )
1774
+
1775
+ with request_context(request=request):
1776
+ response = await transport.handle_async_request(request)
1777
+
1778
+ assert isinstance(response.stream, AsyncByteStream)
1779
+ response.request = request
1780
+ response.stream = BoundAsyncStream(
1781
+ response.stream, response=response, timer=timer
1782
+ )
1783
+ self.cookies.extract_cookies(response)
1784
+ response.default_encoding = self._default_encoding
1785
+
1786
+ logger.info(
1787
+ 'HTTP Request: %s %s "%s %d %s"',
1788
+ request.method,
1789
+ request.url,
1790
+ response.http_version,
1791
+ response.status_code,
1792
+ response.reason_phrase,
1793
+ )
1794
+
1795
+ return response
1796
+
1797
+ async def get(
1798
+ self,
1799
+ url: URL | str,
1800
+ *,
1801
+ params: QueryParamTypes | None = None,
1802
+ headers: HeaderTypes | None = None,
1803
+ cookies: CookieTypes | None = None,
1804
+ auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT,
1805
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1806
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1807
+ extensions: RequestExtensions | None = None,
1808
+ ) -> Response:
1809
+ """
1810
+ Send a `GET` request.
1811
+
1812
+ **Parameters**: See `httpx.request`.
1813
+ """
1814
+ return await self.request(
1815
+ "GET",
1816
+ url,
1817
+ params=params,
1818
+ headers=headers,
1819
+ cookies=cookies,
1820
+ auth=auth,
1821
+ follow_redirects=follow_redirects,
1822
+ timeout=timeout,
1823
+ extensions=extensions,
1824
+ )
1825
+
1826
+ async def options(
1827
+ self,
1828
+ url: URL | str,
1829
+ *,
1830
+ params: QueryParamTypes | None = None,
1831
+ headers: HeaderTypes | None = None,
1832
+ cookies: CookieTypes | None = None,
1833
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1834
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1835
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1836
+ extensions: RequestExtensions | None = None,
1837
+ ) -> Response:
1838
+ """
1839
+ Send an `OPTIONS` request.
1840
+
1841
+ **Parameters**: See `httpx.request`.
1842
+ """
1843
+ return await self.request(
1844
+ "OPTIONS",
1845
+ url,
1846
+ params=params,
1847
+ headers=headers,
1848
+ cookies=cookies,
1849
+ auth=auth,
1850
+ follow_redirects=follow_redirects,
1851
+ timeout=timeout,
1852
+ extensions=extensions,
1853
+ )
1854
+
1855
+ async def head(
1856
+ self,
1857
+ url: URL | str,
1858
+ *,
1859
+ params: QueryParamTypes | None = None,
1860
+ headers: HeaderTypes | None = None,
1861
+ cookies: CookieTypes | None = None,
1862
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1863
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1864
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1865
+ extensions: RequestExtensions | None = None,
1866
+ ) -> Response:
1867
+ """
1868
+ Send a `HEAD` request.
1869
+
1870
+ **Parameters**: See `httpx.request`.
1871
+ """
1872
+ return await self.request(
1873
+ "HEAD",
1874
+ url,
1875
+ params=params,
1876
+ headers=headers,
1877
+ cookies=cookies,
1878
+ auth=auth,
1879
+ follow_redirects=follow_redirects,
1880
+ timeout=timeout,
1881
+ extensions=extensions,
1882
+ )
1883
+
1884
+ async def post(
1885
+ self,
1886
+ url: URL | str,
1887
+ *,
1888
+ content: RequestContent | None = None,
1889
+ data: RequestData | None = None,
1890
+ files: RequestFiles | None = None,
1891
+ json: typing.Any | None = None,
1892
+ params: QueryParamTypes | None = None,
1893
+ headers: HeaderTypes | None = None,
1894
+ cookies: CookieTypes | None = None,
1895
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1896
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1897
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1898
+ extensions: RequestExtensions | None = None,
1899
+ ) -> Response:
1900
+ """
1901
+ Send a `POST` request.
1902
+
1903
+ **Parameters**: See `httpx.request`.
1904
+ """
1905
+ return await self.request(
1906
+ "POST",
1907
+ url,
1908
+ content=content,
1909
+ data=data,
1910
+ files=files,
1911
+ json=json,
1912
+ params=params,
1913
+ headers=headers,
1914
+ cookies=cookies,
1915
+ auth=auth,
1916
+ follow_redirects=follow_redirects,
1917
+ timeout=timeout,
1918
+ extensions=extensions,
1919
+ )
1920
+
1921
+ async def put(
1922
+ self,
1923
+ url: URL | str,
1924
+ *,
1925
+ content: RequestContent | None = None,
1926
+ data: RequestData | None = None,
1927
+ files: RequestFiles | None = None,
1928
+ json: typing.Any | None = None,
1929
+ params: QueryParamTypes | None = None,
1930
+ headers: HeaderTypes | None = None,
1931
+ cookies: CookieTypes | None = None,
1932
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1933
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1934
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1935
+ extensions: RequestExtensions | None = None,
1936
+ ) -> Response:
1937
+ """
1938
+ Send a `PUT` request.
1939
+
1940
+ **Parameters**: See `httpx.request`.
1941
+ """
1942
+ return await self.request(
1943
+ "PUT",
1944
+ url,
1945
+ content=content,
1946
+ data=data,
1947
+ files=files,
1948
+ json=json,
1949
+ params=params,
1950
+ headers=headers,
1951
+ cookies=cookies,
1952
+ auth=auth,
1953
+ follow_redirects=follow_redirects,
1954
+ timeout=timeout,
1955
+ extensions=extensions,
1956
+ )
1957
+
1958
+ async def patch(
1959
+ self,
1960
+ url: URL | str,
1961
+ *,
1962
+ content: RequestContent | None = None,
1963
+ data: RequestData | None = None,
1964
+ files: RequestFiles | None = None,
1965
+ json: typing.Any | None = None,
1966
+ params: QueryParamTypes | None = None,
1967
+ headers: HeaderTypes | None = None,
1968
+ cookies: CookieTypes | None = None,
1969
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1970
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
1971
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
1972
+ extensions: RequestExtensions | None = None,
1973
+ ) -> Response:
1974
+ """
1975
+ Send a `PATCH` request.
1976
+
1977
+ **Parameters**: See `httpx.request`.
1978
+ """
1979
+ return await self.request(
1980
+ "PATCH",
1981
+ url,
1982
+ content=content,
1983
+ data=data,
1984
+ files=files,
1985
+ json=json,
1986
+ params=params,
1987
+ headers=headers,
1988
+ cookies=cookies,
1989
+ auth=auth,
1990
+ follow_redirects=follow_redirects,
1991
+ timeout=timeout,
1992
+ extensions=extensions,
1993
+ )
1994
+
1995
+ async def delete(
1996
+ self,
1997
+ url: URL | str,
1998
+ *,
1999
+ params: QueryParamTypes | None = None,
2000
+ headers: HeaderTypes | None = None,
2001
+ cookies: CookieTypes | None = None,
2002
+ auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT,
2003
+ follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT,
2004
+ timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT,
2005
+ extensions: RequestExtensions | None = None,
2006
+ ) -> Response:
2007
+ """
2008
+ Send a `DELETE` request.
2009
+
2010
+ **Parameters**: See `httpx.request`.
2011
+ """
2012
+ return await self.request(
2013
+ "DELETE",
2014
+ url,
2015
+ params=params,
2016
+ headers=headers,
2017
+ cookies=cookies,
2018
+ auth=auth,
2019
+ follow_redirects=follow_redirects,
2020
+ timeout=timeout,
2021
+ extensions=extensions,
2022
+ )
2023
+
2024
+ async def aclose(self) -> None:
2025
+ """
2026
+ Close transport and proxies.
2027
+ """
2028
+ if self._state != ClientState.CLOSED:
2029
+ self._state = ClientState.CLOSED
2030
+
2031
+ await self._transport.aclose()
2032
+ for proxy in self._mounts.values():
2033
+ if proxy is not None:
2034
+ await proxy.aclose()
2035
+
2036
+ async def __aenter__(self: U) -> U:
2037
+ if self._state != ClientState.UNOPENED:
2038
+ msg = {
2039
+ ClientState.OPENED: "Cannot open a client instance more than once.",
2040
+ ClientState.CLOSED: (
2041
+ "Cannot reopen a client instance, once it has been closed."
2042
+ ),
2043
+ }[self._state]
2044
+ raise RuntimeError(msg)
2045
+
2046
+ self._state = ClientState.OPENED
2047
+
2048
+ await self._transport.__aenter__()
2049
+ for proxy in self._mounts.values():
2050
+ if proxy is not None:
2051
+ await proxy.__aenter__()
2052
+ return self
2053
+
2054
+ async def __aexit__(
2055
+ self,
2056
+ exc_type: type[BaseException] | None = None,
2057
+ exc_value: BaseException | None = None,
2058
+ traceback: TracebackType | None = None,
2059
+ ) -> None:
2060
+ self._state = ClientState.CLOSED
2061
+
2062
+ await self._transport.__aexit__(exc_type, exc_value, traceback)
2063
+ for proxy in self._mounts.values():
2064
+ if proxy is not None:
2065
+ await proxy.__aexit__(exc_type, exc_value, traceback)
lib/python3.10/site-packages/httpx/_compat.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ The _compat module is used for code which requires branching between different
3
+ Python environments. It is excluded from the code coverage checks.
4
+ """
5
+
6
+ import re
7
+ import ssl
8
+ import sys
9
+ from types import ModuleType
10
+ from typing import Optional
11
+
12
+ # Brotli support is optional
13
+ # The C bindings in `brotli` are recommended for CPython.
14
+ # The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else.
15
+ try:
16
+ import brotlicffi as brotli
17
+ except ImportError: # pragma: no cover
18
+ try:
19
+ import brotli
20
+ except ImportError:
21
+ brotli = None
22
+
23
+ # Zstandard support is optional
24
+ zstd: Optional[ModuleType] = None
25
+ try:
26
+ import zstandard as zstd
27
+ except (AttributeError, ImportError, ValueError): # Defensive:
28
+ zstd = None
29
+ else:
30
+ # The package 'zstandard' added the 'eof' property starting
31
+ # in v0.18.0 which we require to ensure a complete and
32
+ # valid zstd stream was fed into the ZstdDecoder.
33
+ # See: https://github.com/urllib3/urllib3/pull/2624
34
+ _zstd_version = tuple(
35
+ map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
36
+ )
37
+ if _zstd_version < (0, 18): # Defensive:
38
+ zstd = None
39
+
40
+
41
+ if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7):
42
+
43
+ def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None:
44
+ # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of
45
+ # 'SSLContext.minimum_version' from Python 3.7 onwards, however
46
+ # this attribute is not available unless the ssl module is compiled
47
+ # with OpenSSL 1.1.0g or newer.
48
+ # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version
49
+ # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version
50
+ context.minimum_version = ssl.TLSVersion.TLSv1_2
51
+
52
+ else:
53
+
54
+ def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None:
55
+ # If 'minimum_version' isn't available, we configure these options with
56
+ # the older deprecated variants.
57
+ context.options |= ssl.OP_NO_SSLv2
58
+ context.options |= ssl.OP_NO_SSLv3
59
+ context.options |= ssl.OP_NO_TLSv1
60
+ context.options |= ssl.OP_NO_TLSv1_1
61
+
62
+
63
+ __all__ = ["brotli", "set_minimum_tls_version_1_2"]
lib/python3.10/site-packages/httpx/_config.py ADDED
@@ -0,0 +1,372 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import os
5
+ import ssl
6
+ import typing
7
+ from pathlib import Path
8
+
9
+ import certifi
10
+
11
+ from ._compat import set_minimum_tls_version_1_2
12
+ from ._models import Headers
13
+ from ._types import CertTypes, HeaderTypes, TimeoutTypes, VerifyTypes
14
+ from ._urls import URL
15
+ from ._utils import get_ca_bundle_from_env
16
+
17
+ __all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"]
18
+
19
+ DEFAULT_CIPHERS = ":".join(
20
+ [
21
+ "ECDHE+AESGCM",
22
+ "ECDHE+CHACHA20",
23
+ "DHE+AESGCM",
24
+ "DHE+CHACHA20",
25
+ "ECDH+AESGCM",
26
+ "DH+AESGCM",
27
+ "ECDH+AES",
28
+ "DH+AES",
29
+ "RSA+AESGCM",
30
+ "RSA+AES",
31
+ "!aNULL",
32
+ "!eNULL",
33
+ "!MD5",
34
+ "!DSS",
35
+ ]
36
+ )
37
+
38
+
39
+ logger = logging.getLogger("httpx")
40
+
41
+
42
+ class UnsetType:
43
+ pass # pragma: no cover
44
+
45
+
46
+ UNSET = UnsetType()
47
+
48
+
49
+ def create_ssl_context(
50
+ cert: CertTypes | None = None,
51
+ verify: VerifyTypes = True,
52
+ trust_env: bool = True,
53
+ http2: bool = False,
54
+ ) -> ssl.SSLContext:
55
+ return SSLConfig(
56
+ cert=cert, verify=verify, trust_env=trust_env, http2=http2
57
+ ).ssl_context
58
+
59
+
60
+ class SSLConfig:
61
+ """
62
+ SSL Configuration.
63
+ """
64
+
65
+ DEFAULT_CA_BUNDLE_PATH = Path(certifi.where())
66
+
67
+ def __init__(
68
+ self,
69
+ *,
70
+ cert: CertTypes | None = None,
71
+ verify: VerifyTypes = True,
72
+ trust_env: bool = True,
73
+ http2: bool = False,
74
+ ) -> None:
75
+ self.cert = cert
76
+ self.verify = verify
77
+ self.trust_env = trust_env
78
+ self.http2 = http2
79
+ self.ssl_context = self.load_ssl_context()
80
+
81
+ def load_ssl_context(self) -> ssl.SSLContext:
82
+ logger.debug(
83
+ "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r",
84
+ self.verify,
85
+ self.cert,
86
+ self.trust_env,
87
+ self.http2,
88
+ )
89
+
90
+ if self.verify:
91
+ return self.load_ssl_context_verify()
92
+ return self.load_ssl_context_no_verify()
93
+
94
+ def load_ssl_context_no_verify(self) -> ssl.SSLContext:
95
+ """
96
+ Return an SSL context for unverified connections.
97
+ """
98
+ context = self._create_default_ssl_context()
99
+ context.check_hostname = False
100
+ context.verify_mode = ssl.CERT_NONE
101
+ self._load_client_certs(context)
102
+ return context
103
+
104
+ def load_ssl_context_verify(self) -> ssl.SSLContext:
105
+ """
106
+ Return an SSL context for verified connections.
107
+ """
108
+ if self.trust_env and self.verify is True:
109
+ ca_bundle = get_ca_bundle_from_env()
110
+ if ca_bundle is not None:
111
+ self.verify = ca_bundle
112
+
113
+ if isinstance(self.verify, ssl.SSLContext):
114
+ # Allow passing in our own SSLContext object that's pre-configured.
115
+ context = self.verify
116
+ self._load_client_certs(context)
117
+ return context
118
+ elif isinstance(self.verify, bool):
119
+ ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH
120
+ elif Path(self.verify).exists():
121
+ ca_bundle_path = Path(self.verify)
122
+ else:
123
+ raise IOError(
124
+ "Could not find a suitable TLS CA certificate bundle, "
125
+ "invalid path: {}".format(self.verify)
126
+ )
127
+
128
+ context = self._create_default_ssl_context()
129
+ context.verify_mode = ssl.CERT_REQUIRED
130
+ context.check_hostname = True
131
+
132
+ # Signal to server support for PHA in TLS 1.3. Raises an
133
+ # AttributeError if only read-only access is implemented.
134
+ try:
135
+ context.post_handshake_auth = True
136
+ except AttributeError: # pragma: no cover
137
+ pass
138
+
139
+ # Disable using 'commonName' for SSLContext.check_hostname
140
+ # when the 'subjectAltName' extension isn't available.
141
+ try:
142
+ context.hostname_checks_common_name = False
143
+ except AttributeError: # pragma: no cover
144
+ pass
145
+
146
+ if ca_bundle_path.is_file():
147
+ cafile = str(ca_bundle_path)
148
+ logger.debug("load_verify_locations cafile=%r", cafile)
149
+ context.load_verify_locations(cafile=cafile)
150
+ elif ca_bundle_path.is_dir():
151
+ capath = str(ca_bundle_path)
152
+ logger.debug("load_verify_locations capath=%r", capath)
153
+ context.load_verify_locations(capath=capath)
154
+
155
+ self._load_client_certs(context)
156
+
157
+ return context
158
+
159
+ def _create_default_ssl_context(self) -> ssl.SSLContext:
160
+ """
161
+ Creates the default SSLContext object that's used for both verified
162
+ and unverified connections.
163
+ """
164
+ context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
165
+ set_minimum_tls_version_1_2(context)
166
+ context.options |= ssl.OP_NO_COMPRESSION
167
+ context.set_ciphers(DEFAULT_CIPHERS)
168
+
169
+ if ssl.HAS_ALPN:
170
+ alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"]
171
+ context.set_alpn_protocols(alpn_idents)
172
+
173
+ keylogfile = os.environ.get("SSLKEYLOGFILE")
174
+ if keylogfile and self.trust_env:
175
+ context.keylog_filename = keylogfile
176
+
177
+ return context
178
+
179
+ def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None:
180
+ """
181
+ Loads client certificates into our SSLContext object
182
+ """
183
+ if self.cert is not None:
184
+ if isinstance(self.cert, str):
185
+ ssl_context.load_cert_chain(certfile=self.cert)
186
+ elif isinstance(self.cert, tuple) and len(self.cert) == 2:
187
+ ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1])
188
+ elif isinstance(self.cert, tuple) and len(self.cert) == 3:
189
+ ssl_context.load_cert_chain(
190
+ certfile=self.cert[0],
191
+ keyfile=self.cert[1],
192
+ password=self.cert[2],
193
+ )
194
+
195
+
196
+ class Timeout:
197
+ """
198
+ Timeout configuration.
199
+
200
+ **Usage**:
201
+
202
+ Timeout(None) # No timeouts.
203
+ Timeout(5.0) # 5s timeout on all operations.
204
+ Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts.
205
+ Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere.
206
+ Timeout(5.0, pool=None) # No timeout on acquiring connection from pool.
207
+ # 5s timeout elsewhere.
208
+ """
209
+
210
+ def __init__(
211
+ self,
212
+ timeout: TimeoutTypes | UnsetType = UNSET,
213
+ *,
214
+ connect: None | float | UnsetType = UNSET,
215
+ read: None | float | UnsetType = UNSET,
216
+ write: None | float | UnsetType = UNSET,
217
+ pool: None | float | UnsetType = UNSET,
218
+ ) -> None:
219
+ if isinstance(timeout, Timeout):
220
+ # Passed as a single explicit Timeout.
221
+ assert connect is UNSET
222
+ assert read is UNSET
223
+ assert write is UNSET
224
+ assert pool is UNSET
225
+ self.connect = timeout.connect # type: typing.Optional[float]
226
+ self.read = timeout.read # type: typing.Optional[float]
227
+ self.write = timeout.write # type: typing.Optional[float]
228
+ self.pool = timeout.pool # type: typing.Optional[float]
229
+ elif isinstance(timeout, tuple):
230
+ # Passed as a tuple.
231
+ self.connect = timeout[0]
232
+ self.read = timeout[1]
233
+ self.write = None if len(timeout) < 3 else timeout[2]
234
+ self.pool = None if len(timeout) < 4 else timeout[3]
235
+ elif not (
236
+ isinstance(connect, UnsetType)
237
+ or isinstance(read, UnsetType)
238
+ or isinstance(write, UnsetType)
239
+ or isinstance(pool, UnsetType)
240
+ ):
241
+ self.connect = connect
242
+ self.read = read
243
+ self.write = write
244
+ self.pool = pool
245
+ else:
246
+ if isinstance(timeout, UnsetType):
247
+ raise ValueError(
248
+ "httpx.Timeout must either include a default, or set all "
249
+ "four parameters explicitly."
250
+ )
251
+ self.connect = timeout if isinstance(connect, UnsetType) else connect
252
+ self.read = timeout if isinstance(read, UnsetType) else read
253
+ self.write = timeout if isinstance(write, UnsetType) else write
254
+ self.pool = timeout if isinstance(pool, UnsetType) else pool
255
+
256
+ def as_dict(self) -> dict[str, float | None]:
257
+ return {
258
+ "connect": self.connect,
259
+ "read": self.read,
260
+ "write": self.write,
261
+ "pool": self.pool,
262
+ }
263
+
264
+ def __eq__(self, other: typing.Any) -> bool:
265
+ return (
266
+ isinstance(other, self.__class__)
267
+ and self.connect == other.connect
268
+ and self.read == other.read
269
+ and self.write == other.write
270
+ and self.pool == other.pool
271
+ )
272
+
273
+ def __repr__(self) -> str:
274
+ class_name = self.__class__.__name__
275
+ if len({self.connect, self.read, self.write, self.pool}) == 1:
276
+ return f"{class_name}(timeout={self.connect})"
277
+ return (
278
+ f"{class_name}(connect={self.connect}, "
279
+ f"read={self.read}, write={self.write}, pool={self.pool})"
280
+ )
281
+
282
+
283
+ class Limits:
284
+ """
285
+ Configuration for limits to various client behaviors.
286
+
287
+ **Parameters:**
288
+
289
+ * **max_connections** - The maximum number of concurrent connections that may be
290
+ established.
291
+ * **max_keepalive_connections** - Allow the connection pool to maintain
292
+ keep-alive connections below this point. Should be less than or equal
293
+ to `max_connections`.
294
+ * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds.
295
+ """
296
+
297
+ def __init__(
298
+ self,
299
+ *,
300
+ max_connections: int | None = None,
301
+ max_keepalive_connections: int | None = None,
302
+ keepalive_expiry: float | None = 5.0,
303
+ ) -> None:
304
+ self.max_connections = max_connections
305
+ self.max_keepalive_connections = max_keepalive_connections
306
+ self.keepalive_expiry = keepalive_expiry
307
+
308
+ def __eq__(self, other: typing.Any) -> bool:
309
+ return (
310
+ isinstance(other, self.__class__)
311
+ and self.max_connections == other.max_connections
312
+ and self.max_keepalive_connections == other.max_keepalive_connections
313
+ and self.keepalive_expiry == other.keepalive_expiry
314
+ )
315
+
316
+ def __repr__(self) -> str:
317
+ class_name = self.__class__.__name__
318
+ return (
319
+ f"{class_name}(max_connections={self.max_connections}, "
320
+ f"max_keepalive_connections={self.max_keepalive_connections}, "
321
+ f"keepalive_expiry={self.keepalive_expiry})"
322
+ )
323
+
324
+
325
+ class Proxy:
326
+ def __init__(
327
+ self,
328
+ url: URL | str,
329
+ *,
330
+ ssl_context: ssl.SSLContext | None = None,
331
+ auth: tuple[str, str] | None = None,
332
+ headers: HeaderTypes | None = None,
333
+ ) -> None:
334
+ url = URL(url)
335
+ headers = Headers(headers)
336
+
337
+ if url.scheme not in ("http", "https", "socks5"):
338
+ raise ValueError(f"Unknown scheme for proxy URL {url!r}")
339
+
340
+ if url.username or url.password:
341
+ # Remove any auth credentials from the URL.
342
+ auth = (url.username, url.password)
343
+ url = url.copy_with(username=None, password=None)
344
+
345
+ self.url = url
346
+ self.auth = auth
347
+ self.headers = headers
348
+ self.ssl_context = ssl_context
349
+
350
+ @property
351
+ def raw_auth(self) -> tuple[bytes, bytes] | None:
352
+ # The proxy authentication as raw bytes.
353
+ return (
354
+ None
355
+ if self.auth is None
356
+ else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8"))
357
+ )
358
+
359
+ def __repr__(self) -> str:
360
+ # The authentication is represented with the password component masked.
361
+ auth = (self.auth[0], "********") if self.auth else None
362
+
363
+ # Build a nice concise representation.
364
+ url_str = f"{str(self.url)!r}"
365
+ auth_str = f", auth={auth!r}" if auth else ""
366
+ headers_str = f", headers={dict(self.headers)!r}" if self.headers else ""
367
+ return f"Proxy({url_str}{auth_str}{headers_str})"
368
+
369
+
370
+ DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0)
371
+ DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20)
372
+ DEFAULT_MAX_REDIRECTS = 20
lib/python3.10/site-packages/httpx/_content.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import inspect
4
+ import warnings
5
+ from json import dumps as json_dumps
6
+ from typing import (
7
+ Any,
8
+ AsyncIterable,
9
+ AsyncIterator,
10
+ Iterable,
11
+ Iterator,
12
+ Mapping,
13
+ )
14
+ from urllib.parse import urlencode
15
+
16
+ from ._exceptions import StreamClosed, StreamConsumed
17
+ from ._multipart import MultipartStream
18
+ from ._types import (
19
+ AsyncByteStream,
20
+ RequestContent,
21
+ RequestData,
22
+ RequestFiles,
23
+ ResponseContent,
24
+ SyncByteStream,
25
+ )
26
+ from ._utils import peek_filelike_length, primitive_value_to_str
27
+
28
+ __all__ = ["ByteStream"]
29
+
30
+
31
+ class ByteStream(AsyncByteStream, SyncByteStream):
32
+ def __init__(self, stream: bytes) -> None:
33
+ self._stream = stream
34
+
35
+ def __iter__(self) -> Iterator[bytes]:
36
+ yield self._stream
37
+
38
+ async def __aiter__(self) -> AsyncIterator[bytes]:
39
+ yield self._stream
40
+
41
+
42
+ class IteratorByteStream(SyncByteStream):
43
+ CHUNK_SIZE = 65_536
44
+
45
+ def __init__(self, stream: Iterable[bytes]) -> None:
46
+ self._stream = stream
47
+ self._is_stream_consumed = False
48
+ self._is_generator = inspect.isgenerator(stream)
49
+
50
+ def __iter__(self) -> Iterator[bytes]:
51
+ if self._is_stream_consumed and self._is_generator:
52
+ raise StreamConsumed()
53
+
54
+ self._is_stream_consumed = True
55
+ if hasattr(self._stream, "read"):
56
+ # File-like interfaces should use 'read' directly.
57
+ chunk = self._stream.read(self.CHUNK_SIZE)
58
+ while chunk:
59
+ yield chunk
60
+ chunk = self._stream.read(self.CHUNK_SIZE)
61
+ else:
62
+ # Otherwise iterate.
63
+ for part in self._stream:
64
+ yield part
65
+
66
+
67
+ class AsyncIteratorByteStream(AsyncByteStream):
68
+ CHUNK_SIZE = 65_536
69
+
70
+ def __init__(self, stream: AsyncIterable[bytes]) -> None:
71
+ self._stream = stream
72
+ self._is_stream_consumed = False
73
+ self._is_generator = inspect.isasyncgen(stream)
74
+
75
+ async def __aiter__(self) -> AsyncIterator[bytes]:
76
+ if self._is_stream_consumed and self._is_generator:
77
+ raise StreamConsumed()
78
+
79
+ self._is_stream_consumed = True
80
+ if hasattr(self._stream, "aread"):
81
+ # File-like interfaces should use 'aread' directly.
82
+ chunk = await self._stream.aread(self.CHUNK_SIZE)
83
+ while chunk:
84
+ yield chunk
85
+ chunk = await self._stream.aread(self.CHUNK_SIZE)
86
+ else:
87
+ # Otherwise iterate.
88
+ async for part in self._stream:
89
+ yield part
90
+
91
+
92
+ class UnattachedStream(AsyncByteStream, SyncByteStream):
93
+ """
94
+ If a request or response is serialized using pickle, then it is no longer
95
+ attached to a stream for I/O purposes. Any stream operations should result
96
+ in `httpx.StreamClosed`.
97
+ """
98
+
99
+ def __iter__(self) -> Iterator[bytes]:
100
+ raise StreamClosed()
101
+
102
+ async def __aiter__(self) -> AsyncIterator[bytes]:
103
+ raise StreamClosed()
104
+ yield b"" # pragma: no cover
105
+
106
+
107
+ def encode_content(
108
+ content: str | bytes | Iterable[bytes] | AsyncIterable[bytes],
109
+ ) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]:
110
+ if isinstance(content, (bytes, str)):
111
+ body = content.encode("utf-8") if isinstance(content, str) else content
112
+ content_length = len(body)
113
+ headers = {"Content-Length": str(content_length)} if body else {}
114
+ return headers, ByteStream(body)
115
+
116
+ elif isinstance(content, Iterable) and not isinstance(content, dict):
117
+ # `not isinstance(content, dict)` is a bit oddly specific, but it
118
+ # catches a case that's easy for users to make in error, and would
119
+ # otherwise pass through here, like any other bytes-iterable,
120
+ # because `dict` happens to be iterable. See issue #2491.
121
+ content_length_or_none = peek_filelike_length(content)
122
+
123
+ if content_length_or_none is None:
124
+ headers = {"Transfer-Encoding": "chunked"}
125
+ else:
126
+ headers = {"Content-Length": str(content_length_or_none)}
127
+ return headers, IteratorByteStream(content) # type: ignore
128
+
129
+ elif isinstance(content, AsyncIterable):
130
+ headers = {"Transfer-Encoding": "chunked"}
131
+ return headers, AsyncIteratorByteStream(content)
132
+
133
+ raise TypeError(f"Unexpected type for 'content', {type(content)!r}")
134
+
135
+
136
+ def encode_urlencoded_data(
137
+ data: RequestData,
138
+ ) -> tuple[dict[str, str], ByteStream]:
139
+ plain_data = []
140
+ for key, value in data.items():
141
+ if isinstance(value, (list, tuple)):
142
+ plain_data.extend([(key, primitive_value_to_str(item)) for item in value])
143
+ else:
144
+ plain_data.append((key, primitive_value_to_str(value)))
145
+ body = urlencode(plain_data, doseq=True).encode("utf-8")
146
+ content_length = str(len(body))
147
+ content_type = "application/x-www-form-urlencoded"
148
+ headers = {"Content-Length": content_length, "Content-Type": content_type}
149
+ return headers, ByteStream(body)
150
+
151
+
152
+ def encode_multipart_data(
153
+ data: RequestData, files: RequestFiles, boundary: bytes | None
154
+ ) -> tuple[dict[str, str], MultipartStream]:
155
+ multipart = MultipartStream(data=data, files=files, boundary=boundary)
156
+ headers = multipart.get_headers()
157
+ return headers, multipart
158
+
159
+
160
+ def encode_text(text: str) -> tuple[dict[str, str], ByteStream]:
161
+ body = text.encode("utf-8")
162
+ content_length = str(len(body))
163
+ content_type = "text/plain; charset=utf-8"
164
+ headers = {"Content-Length": content_length, "Content-Type": content_type}
165
+ return headers, ByteStream(body)
166
+
167
+
168
+ def encode_html(html: str) -> tuple[dict[str, str], ByteStream]:
169
+ body = html.encode("utf-8")
170
+ content_length = str(len(body))
171
+ content_type = "text/html; charset=utf-8"
172
+ headers = {"Content-Length": content_length, "Content-Type": content_type}
173
+ return headers, ByteStream(body)
174
+
175
+
176
+ def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]:
177
+ body = json_dumps(json).encode("utf-8")
178
+ content_length = str(len(body))
179
+ content_type = "application/json"
180
+ headers = {"Content-Length": content_length, "Content-Type": content_type}
181
+ return headers, ByteStream(body)
182
+
183
+
184
+ def encode_request(
185
+ content: RequestContent | None = None,
186
+ data: RequestData | None = None,
187
+ files: RequestFiles | None = None,
188
+ json: Any | None = None,
189
+ boundary: bytes | None = None,
190
+ ) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]:
191
+ """
192
+ Handles encoding the given `content`, `data`, `files`, and `json`,
193
+ returning a two-tuple of (<headers>, <stream>).
194
+ """
195
+ if data is not None and not isinstance(data, Mapping):
196
+ # We prefer to separate `content=<bytes|str|byte iterator|bytes aiterator>`
197
+ # for raw request content, and `data=<form data>` for url encoded or
198
+ # multipart form content.
199
+ #
200
+ # However for compat with requests, we *do* still support
201
+ # `data=<bytes...>` usages. We deal with that case here, treating it
202
+ # as if `content=<...>` had been supplied instead.
203
+ message = "Use 'content=<...>' to upload raw bytes/text content."
204
+ warnings.warn(message, DeprecationWarning)
205
+ return encode_content(data)
206
+
207
+ if content is not None:
208
+ return encode_content(content)
209
+ elif files:
210
+ return encode_multipart_data(data or {}, files, boundary)
211
+ elif data:
212
+ return encode_urlencoded_data(data)
213
+ elif json is not None:
214
+ return encode_json(json)
215
+
216
+ return {}, ByteStream(b"")
217
+
218
+
219
+ def encode_response(
220
+ content: ResponseContent | None = None,
221
+ text: str | None = None,
222
+ html: str | None = None,
223
+ json: Any | None = None,
224
+ ) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]:
225
+ """
226
+ Handles encoding the given `content`, returning a two-tuple of
227
+ (<headers>, <stream>).
228
+ """
229
+ if content is not None:
230
+ return encode_content(content)
231
+ elif text is not None:
232
+ return encode_text(text)
233
+ elif html is not None:
234
+ return encode_html(html)
235
+ elif json is not None:
236
+ return encode_json(json)
237
+
238
+ return {}, ByteStream(b"")
lib/python3.10/site-packages/httpx/_decoders.py ADDED
@@ -0,0 +1,371 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Handlers for Content-Encoding.
3
+
4
+ See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import codecs
10
+ import io
11
+ import typing
12
+ import zlib
13
+
14
+ from ._compat import brotli, zstd
15
+ from ._exceptions import DecodingError
16
+
17
+
18
+ class ContentDecoder:
19
+ def decode(self, data: bytes) -> bytes:
20
+ raise NotImplementedError() # pragma: no cover
21
+
22
+ def flush(self) -> bytes:
23
+ raise NotImplementedError() # pragma: no cover
24
+
25
+
26
+ class IdentityDecoder(ContentDecoder):
27
+ """
28
+ Handle unencoded data.
29
+ """
30
+
31
+ def decode(self, data: bytes) -> bytes:
32
+ return data
33
+
34
+ def flush(self) -> bytes:
35
+ return b""
36
+
37
+
38
+ class DeflateDecoder(ContentDecoder):
39
+ """
40
+ Handle 'deflate' decoding.
41
+
42
+ See: https://stackoverflow.com/questions/1838699
43
+ """
44
+
45
+ def __init__(self) -> None:
46
+ self.first_attempt = True
47
+ self.decompressor = zlib.decompressobj()
48
+
49
+ def decode(self, data: bytes) -> bytes:
50
+ was_first_attempt = self.first_attempt
51
+ self.first_attempt = False
52
+ try:
53
+ return self.decompressor.decompress(data)
54
+ except zlib.error as exc:
55
+ if was_first_attempt:
56
+ self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS)
57
+ return self.decode(data)
58
+ raise DecodingError(str(exc)) from exc
59
+
60
+ def flush(self) -> bytes:
61
+ try:
62
+ return self.decompressor.flush()
63
+ except zlib.error as exc: # pragma: no cover
64
+ raise DecodingError(str(exc)) from exc
65
+
66
+
67
+ class GZipDecoder(ContentDecoder):
68
+ """
69
+ Handle 'gzip' decoding.
70
+
71
+ See: https://stackoverflow.com/questions/1838699
72
+ """
73
+
74
+ def __init__(self) -> None:
75
+ self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16)
76
+
77
+ def decode(self, data: bytes) -> bytes:
78
+ try:
79
+ return self.decompressor.decompress(data)
80
+ except zlib.error as exc:
81
+ raise DecodingError(str(exc)) from exc
82
+
83
+ def flush(self) -> bytes:
84
+ try:
85
+ return self.decompressor.flush()
86
+ except zlib.error as exc: # pragma: no cover
87
+ raise DecodingError(str(exc)) from exc
88
+
89
+
90
+ class BrotliDecoder(ContentDecoder):
91
+ """
92
+ Handle 'brotli' decoding.
93
+
94
+ Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/
95
+ or `pip install brotli`. See https://github.com/google/brotli
96
+ Supports both 'brotlipy' and 'Brotli' packages since they share an import
97
+ name. The top branches are for 'brotlipy' and bottom branches for 'Brotli'
98
+ """
99
+
100
+ def __init__(self) -> None:
101
+ if brotli is None: # pragma: no cover
102
+ raise ImportError(
103
+ "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' "
104
+ "packages have been installed. "
105
+ "Make sure to install httpx using `pip install httpx[brotli]`."
106
+ ) from None
107
+
108
+ self.decompressor = brotli.Decompressor()
109
+ self.seen_data = False
110
+ self._decompress: typing.Callable[[bytes], bytes]
111
+ if hasattr(self.decompressor, "decompress"):
112
+ # The 'brotlicffi' package.
113
+ self._decompress = self.decompressor.decompress # pragma: no cover
114
+ else:
115
+ # The 'brotli' package.
116
+ self._decompress = self.decompressor.process # pragma: no cover
117
+
118
+ def decode(self, data: bytes) -> bytes:
119
+ if not data:
120
+ return b""
121
+ self.seen_data = True
122
+ try:
123
+ return self._decompress(data)
124
+ except brotli.error as exc:
125
+ raise DecodingError(str(exc)) from exc
126
+
127
+ def flush(self) -> bytes:
128
+ if not self.seen_data:
129
+ return b""
130
+ try:
131
+ if hasattr(self.decompressor, "finish"):
132
+ # Only available in the 'brotlicffi' package.
133
+
134
+ # As the decompressor decompresses eagerly, this
135
+ # will never actually emit any data. However, it will potentially throw
136
+ # errors if a truncated or damaged data stream has been used.
137
+ self.decompressor.finish() # pragma: no cover
138
+ return b""
139
+ except brotli.error as exc: # pragma: no cover
140
+ raise DecodingError(str(exc)) from exc
141
+
142
+
143
+ class ZStandardDecoder(ContentDecoder):
144
+ """
145
+ Handle 'zstd' RFC 8878 decoding.
146
+
147
+ Requires `pip install zstandard`.
148
+ Can be installed as a dependency of httpx using `pip install httpx[zstd]`.
149
+ """
150
+
151
+ # inspired by the ZstdDecoder implementation in urllib3
152
+ def __init__(self) -> None:
153
+ if zstd is None: # pragma: no cover
154
+ raise ImportError(
155
+ "Using 'ZStandardDecoder', ..."
156
+ "Make sure to install httpx using `pip install httpx[zstd]`."
157
+ ) from None
158
+
159
+ self.decompressor = zstd.ZstdDecompressor().decompressobj()
160
+
161
+ def decode(self, data: bytes) -> bytes:
162
+ assert zstd is not None
163
+ output = io.BytesIO()
164
+ try:
165
+ output.write(self.decompressor.decompress(data))
166
+ while self.decompressor.eof and self.decompressor.unused_data:
167
+ unused_data = self.decompressor.unused_data
168
+ self.decompressor = zstd.ZstdDecompressor().decompressobj()
169
+ output.write(self.decompressor.decompress(unused_data))
170
+ except zstd.ZstdError as exc:
171
+ raise DecodingError(str(exc)) from exc
172
+ return output.getvalue()
173
+
174
+ def flush(self) -> bytes:
175
+ ret = self.decompressor.flush() # note: this is a no-op
176
+ if not self.decompressor.eof:
177
+ raise DecodingError("Zstandard data is incomplete") # pragma: no cover
178
+ return bytes(ret)
179
+
180
+
181
+ class MultiDecoder(ContentDecoder):
182
+ """
183
+ Handle the case where multiple encodings have been applied.
184
+ """
185
+
186
+ def __init__(self, children: typing.Sequence[ContentDecoder]) -> None:
187
+ """
188
+ 'children' should be a sequence of decoders in the order in which
189
+ each was applied.
190
+ """
191
+ # Note that we reverse the order for decoding.
192
+ self.children = list(reversed(children))
193
+
194
+ def decode(self, data: bytes) -> bytes:
195
+ for child in self.children:
196
+ data = child.decode(data)
197
+ return data
198
+
199
+ def flush(self) -> bytes:
200
+ data = b""
201
+ for child in self.children:
202
+ data = child.decode(data) + child.flush()
203
+ return data
204
+
205
+
206
+ class ByteChunker:
207
+ """
208
+ Handles returning byte content in fixed-size chunks.
209
+ """
210
+
211
+ def __init__(self, chunk_size: int | None = None) -> None:
212
+ self._buffer = io.BytesIO()
213
+ self._chunk_size = chunk_size
214
+
215
+ def decode(self, content: bytes) -> list[bytes]:
216
+ if self._chunk_size is None:
217
+ return [content] if content else []
218
+
219
+ self._buffer.write(content)
220
+ if self._buffer.tell() >= self._chunk_size:
221
+ value = self._buffer.getvalue()
222
+ chunks = [
223
+ value[i : i + self._chunk_size]
224
+ for i in range(0, len(value), self._chunk_size)
225
+ ]
226
+ if len(chunks[-1]) == self._chunk_size:
227
+ self._buffer.seek(0)
228
+ self._buffer.truncate()
229
+ return chunks
230
+ else:
231
+ self._buffer.seek(0)
232
+ self._buffer.write(chunks[-1])
233
+ self._buffer.truncate()
234
+ return chunks[:-1]
235
+ else:
236
+ return []
237
+
238
+ def flush(self) -> list[bytes]:
239
+ value = self._buffer.getvalue()
240
+ self._buffer.seek(0)
241
+ self._buffer.truncate()
242
+ return [value] if value else []
243
+
244
+
245
+ class TextChunker:
246
+ """
247
+ Handles returning text content in fixed-size chunks.
248
+ """
249
+
250
+ def __init__(self, chunk_size: int | None = None) -> None:
251
+ self._buffer = io.StringIO()
252
+ self._chunk_size = chunk_size
253
+
254
+ def decode(self, content: str) -> list[str]:
255
+ if self._chunk_size is None:
256
+ return [content] if content else []
257
+
258
+ self._buffer.write(content)
259
+ if self._buffer.tell() >= self._chunk_size:
260
+ value = self._buffer.getvalue()
261
+ chunks = [
262
+ value[i : i + self._chunk_size]
263
+ for i in range(0, len(value), self._chunk_size)
264
+ ]
265
+ if len(chunks[-1]) == self._chunk_size:
266
+ self._buffer.seek(0)
267
+ self._buffer.truncate()
268
+ return chunks
269
+ else:
270
+ self._buffer.seek(0)
271
+ self._buffer.write(chunks[-1])
272
+ self._buffer.truncate()
273
+ return chunks[:-1]
274
+ else:
275
+ return []
276
+
277
+ def flush(self) -> list[str]:
278
+ value = self._buffer.getvalue()
279
+ self._buffer.seek(0)
280
+ self._buffer.truncate()
281
+ return [value] if value else []
282
+
283
+
284
+ class TextDecoder:
285
+ """
286
+ Handles incrementally decoding bytes into text
287
+ """
288
+
289
+ def __init__(self, encoding: str = "utf-8") -> None:
290
+ self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace")
291
+
292
+ def decode(self, data: bytes) -> str:
293
+ return self.decoder.decode(data)
294
+
295
+ def flush(self) -> str:
296
+ return self.decoder.decode(b"", True)
297
+
298
+
299
+ class LineDecoder:
300
+ """
301
+ Handles incrementally reading lines from text.
302
+
303
+ Has the same behaviour as the stdllib splitlines,
304
+ but handling the input iteratively.
305
+ """
306
+
307
+ def __init__(self) -> None:
308
+ self.buffer: list[str] = []
309
+ self.trailing_cr: bool = False
310
+
311
+ def decode(self, text: str) -> list[str]:
312
+ # See https://docs.python.org/3/library/stdtypes.html#str.splitlines
313
+ NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029"
314
+
315
+ # We always push a trailing `\r` into the next decode iteration.
316
+ if self.trailing_cr:
317
+ text = "\r" + text
318
+ self.trailing_cr = False
319
+ if text.endswith("\r"):
320
+ self.trailing_cr = True
321
+ text = text[:-1]
322
+
323
+ if not text:
324
+ # NOTE: the edge case input of empty text doesn't occur in practice,
325
+ # because other httpx internals filter out this value
326
+ return [] # pragma: no cover
327
+
328
+ trailing_newline = text[-1] in NEWLINE_CHARS
329
+ lines = text.splitlines()
330
+
331
+ if len(lines) == 1 and not trailing_newline:
332
+ # No new lines, buffer the input and continue.
333
+ self.buffer.append(lines[0])
334
+ return []
335
+
336
+ if self.buffer:
337
+ # Include any existing buffer in the first portion of the
338
+ # splitlines result.
339
+ lines = ["".join(self.buffer) + lines[0]] + lines[1:]
340
+ self.buffer = []
341
+
342
+ if not trailing_newline:
343
+ # If the last segment of splitlines is not newline terminated,
344
+ # then drop it from our output and start a new buffer.
345
+ self.buffer = [lines.pop()]
346
+
347
+ return lines
348
+
349
+ def flush(self) -> list[str]:
350
+ if not self.buffer and not self.trailing_cr:
351
+ return []
352
+
353
+ lines = ["".join(self.buffer)]
354
+ self.buffer = []
355
+ self.trailing_cr = False
356
+ return lines
357
+
358
+
359
+ SUPPORTED_DECODERS = {
360
+ "identity": IdentityDecoder,
361
+ "gzip": GZipDecoder,
362
+ "deflate": DeflateDecoder,
363
+ "br": BrotliDecoder,
364
+ "zstd": ZStandardDecoder,
365
+ }
366
+
367
+
368
+ if brotli is None:
369
+ SUPPORTED_DECODERS.pop("br") # pragma: no cover
370
+ if zstd is None:
371
+ SUPPORTED_DECODERS.pop("zstd") # pragma: no cover
lib/python3.10/site-packages/httpx/_exceptions.py ADDED
@@ -0,0 +1,379 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Our exception hierarchy:
3
+
4
+ * HTTPError
5
+ x RequestError
6
+ + TransportError
7
+ - TimeoutException
8
+ · ConnectTimeout
9
+ · ReadTimeout
10
+ · WriteTimeout
11
+ · PoolTimeout
12
+ - NetworkError
13
+ · ConnectError
14
+ · ReadError
15
+ · WriteError
16
+ · CloseError
17
+ - ProtocolError
18
+ · LocalProtocolError
19
+ · RemoteProtocolError
20
+ - ProxyError
21
+ - UnsupportedProtocol
22
+ + DecodingError
23
+ + TooManyRedirects
24
+ x HTTPStatusError
25
+ * InvalidURL
26
+ * CookieConflict
27
+ * StreamError
28
+ x StreamConsumed
29
+ x StreamClosed
30
+ x ResponseNotRead
31
+ x RequestNotRead
32
+ """
33
+
34
+ from __future__ import annotations
35
+
36
+ import contextlib
37
+ import typing
38
+
39
+ if typing.TYPE_CHECKING:
40
+ from ._models import Request, Response # pragma: no cover
41
+
42
+ __all__ = [
43
+ "CloseError",
44
+ "ConnectError",
45
+ "ConnectTimeout",
46
+ "CookieConflict",
47
+ "DecodingError",
48
+ "HTTPError",
49
+ "HTTPStatusError",
50
+ "InvalidURL",
51
+ "LocalProtocolError",
52
+ "NetworkError",
53
+ "PoolTimeout",
54
+ "ProtocolError",
55
+ "ProxyError",
56
+ "ReadError",
57
+ "ReadTimeout",
58
+ "RemoteProtocolError",
59
+ "RequestError",
60
+ "RequestNotRead",
61
+ "ResponseNotRead",
62
+ "StreamClosed",
63
+ "StreamConsumed",
64
+ "StreamError",
65
+ "TimeoutException",
66
+ "TooManyRedirects",
67
+ "TransportError",
68
+ "UnsupportedProtocol",
69
+ "WriteError",
70
+ "WriteTimeout",
71
+ ]
72
+
73
+
74
+ class HTTPError(Exception):
75
+ """
76
+ Base class for `RequestError` and `HTTPStatusError`.
77
+
78
+ Useful for `try...except` blocks when issuing a request,
79
+ and then calling `.raise_for_status()`.
80
+
81
+ For example:
82
+
83
+ ```
84
+ try:
85
+ response = httpx.get("https://www.example.com")
86
+ response.raise_for_status()
87
+ except httpx.HTTPError as exc:
88
+ print(f"HTTP Exception for {exc.request.url} - {exc}")
89
+ ```
90
+ """
91
+
92
+ def __init__(self, message: str) -> None:
93
+ super().__init__(message)
94
+ self._request: Request | None = None
95
+
96
+ @property
97
+ def request(self) -> Request:
98
+ if self._request is None:
99
+ raise RuntimeError("The .request property has not been set.")
100
+ return self._request
101
+
102
+ @request.setter
103
+ def request(self, request: Request) -> None:
104
+ self._request = request
105
+
106
+
107
+ class RequestError(HTTPError):
108
+ """
109
+ Base class for all exceptions that may occur when issuing a `.request()`.
110
+ """
111
+
112
+ def __init__(self, message: str, *, request: Request | None = None) -> None:
113
+ super().__init__(message)
114
+ # At the point an exception is raised we won't typically have a request
115
+ # instance to associate it with.
116
+ #
117
+ # The 'request_context' context manager is used within the Client and
118
+ # Response methods in order to ensure that any raised exceptions
119
+ # have a `.request` property set on them.
120
+ self._request = request
121
+
122
+
123
+ class TransportError(RequestError):
124
+ """
125
+ Base class for all exceptions that occur at the level of the Transport API.
126
+ """
127
+
128
+
129
+ # Timeout exceptions...
130
+
131
+
132
+ class TimeoutException(TransportError):
133
+ """
134
+ The base class for timeout errors.
135
+
136
+ An operation has timed out.
137
+ """
138
+
139
+
140
+ class ConnectTimeout(TimeoutException):
141
+ """
142
+ Timed out while connecting to the host.
143
+ """
144
+
145
+
146
+ class ReadTimeout(TimeoutException):
147
+ """
148
+ Timed out while receiving data from the host.
149
+ """
150
+
151
+
152
+ class WriteTimeout(TimeoutException):
153
+ """
154
+ Timed out while sending data to the host.
155
+ """
156
+
157
+
158
+ class PoolTimeout(TimeoutException):
159
+ """
160
+ Timed out waiting to acquire a connection from the pool.
161
+ """
162
+
163
+
164
+ # Core networking exceptions...
165
+
166
+
167
+ class NetworkError(TransportError):
168
+ """
169
+ The base class for network-related errors.
170
+
171
+ An error occurred while interacting with the network.
172
+ """
173
+
174
+
175
+ class ReadError(NetworkError):
176
+ """
177
+ Failed to receive data from the network.
178
+ """
179
+
180
+
181
+ class WriteError(NetworkError):
182
+ """
183
+ Failed to send data through the network.
184
+ """
185
+
186
+
187
+ class ConnectError(NetworkError):
188
+ """
189
+ Failed to establish a connection.
190
+ """
191
+
192
+
193
+ class CloseError(NetworkError):
194
+ """
195
+ Failed to close a connection.
196
+ """
197
+
198
+
199
+ # Other transport exceptions...
200
+
201
+
202
+ class ProxyError(TransportError):
203
+ """
204
+ An error occurred while establishing a proxy connection.
205
+ """
206
+
207
+
208
+ class UnsupportedProtocol(TransportError):
209
+ """
210
+ Attempted to make a request to an unsupported protocol.
211
+
212
+ For example issuing a request to `ftp://www.example.com`.
213
+ """
214
+
215
+
216
+ class ProtocolError(TransportError):
217
+ """
218
+ The protocol was violated.
219
+ """
220
+
221
+
222
+ class LocalProtocolError(ProtocolError):
223
+ """
224
+ A protocol was violated by the client.
225
+
226
+ For example if the user instantiated a `Request` instance explicitly,
227
+ failed to include the mandatory `Host:` header, and then issued it directly
228
+ using `client.send()`.
229
+ """
230
+
231
+
232
+ class RemoteProtocolError(ProtocolError):
233
+ """
234
+ The protocol was violated by the server.
235
+
236
+ For example, returning malformed HTTP.
237
+ """
238
+
239
+
240
+ # Other request exceptions...
241
+
242
+
243
+ class DecodingError(RequestError):
244
+ """
245
+ Decoding of the response failed, due to a malformed encoding.
246
+ """
247
+
248
+
249
+ class TooManyRedirects(RequestError):
250
+ """
251
+ Too many redirects.
252
+ """
253
+
254
+
255
+ # Client errors
256
+
257
+
258
+ class HTTPStatusError(HTTPError):
259
+ """
260
+ The response had an error HTTP status of 4xx or 5xx.
261
+
262
+ May be raised when calling `response.raise_for_status()`
263
+ """
264
+
265
+ def __init__(self, message: str, *, request: Request, response: Response) -> None:
266
+ super().__init__(message)
267
+ self.request = request
268
+ self.response = response
269
+
270
+
271
+ class InvalidURL(Exception):
272
+ """
273
+ URL is improperly formed or cannot be parsed.
274
+ """
275
+
276
+ def __init__(self, message: str) -> None:
277
+ super().__init__(message)
278
+
279
+
280
+ class CookieConflict(Exception):
281
+ """
282
+ Attempted to lookup a cookie by name, but multiple cookies existed.
283
+
284
+ Can occur when calling `response.cookies.get(...)`.
285
+ """
286
+
287
+ def __init__(self, message: str) -> None:
288
+ super().__init__(message)
289
+
290
+
291
+ # Stream exceptions...
292
+
293
+ # These may occur as the result of a programming error, by accessing
294
+ # the request/response stream in an invalid manner.
295
+
296
+
297
+ class StreamError(RuntimeError):
298
+ """
299
+ The base class for stream exceptions.
300
+
301
+ The developer made an error in accessing the request stream in
302
+ an invalid way.
303
+ """
304
+
305
+ def __init__(self, message: str) -> None:
306
+ super().__init__(message)
307
+
308
+
309
+ class StreamConsumed(StreamError):
310
+ """
311
+ Attempted to read or stream content, but the content has already
312
+ been streamed.
313
+ """
314
+
315
+ def __init__(self) -> None:
316
+ message = (
317
+ "Attempted to read or stream some content, but the content has "
318
+ "already been streamed. For requests, this could be due to passing "
319
+ "a generator as request content, and then receiving a redirect "
320
+ "response or a secondary request as part of an authentication flow."
321
+ "For responses, this could be due to attempting to stream the response "
322
+ "content more than once."
323
+ )
324
+ super().__init__(message)
325
+
326
+
327
+ class StreamClosed(StreamError):
328
+ """
329
+ Attempted to read or stream response content, but the request has been
330
+ closed.
331
+ """
332
+
333
+ def __init__(self) -> None:
334
+ message = (
335
+ "Attempted to read or stream content, but the stream has " "been closed."
336
+ )
337
+ super().__init__(message)
338
+
339
+
340
+ class ResponseNotRead(StreamError):
341
+ """
342
+ Attempted to access streaming response content, without having called `read()`.
343
+ """
344
+
345
+ def __init__(self) -> None:
346
+ message = (
347
+ "Attempted to access streaming response content,"
348
+ " without having called `read()`."
349
+ )
350
+ super().__init__(message)
351
+
352
+
353
+ class RequestNotRead(StreamError):
354
+ """
355
+ Attempted to access streaming request content, without having called `read()`.
356
+ """
357
+
358
+ def __init__(self) -> None:
359
+ message = (
360
+ "Attempted to access streaming request content,"
361
+ " without having called `read()`."
362
+ )
363
+ super().__init__(message)
364
+
365
+
366
+ @contextlib.contextmanager
367
+ def request_context(
368
+ request: Request | None = None,
369
+ ) -> typing.Iterator[None]:
370
+ """
371
+ A context manager that can be used to attach the given request context
372
+ to any `RequestError` exceptions that are raised within the block.
373
+ """
374
+ try:
375
+ yield
376
+ except RequestError as exc:
377
+ if request is not None:
378
+ exc.request = request
379
+ raise exc
lib/python3.10/site-packages/httpx/_main.py ADDED
@@ -0,0 +1,509 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import functools
4
+ import json
5
+ import sys
6
+ import typing
7
+
8
+ import click
9
+ import httpcore
10
+ import pygments.lexers
11
+ import pygments.util
12
+ import rich.console
13
+ import rich.markup
14
+ import rich.progress
15
+ import rich.syntax
16
+ import rich.table
17
+
18
+ from ._client import Client
19
+ from ._exceptions import RequestError
20
+ from ._models import Response
21
+ from ._status_codes import codes
22
+
23
+
24
+ def print_help() -> None:
25
+ console = rich.console.Console()
26
+
27
+ console.print("[bold]HTTPX :butterfly:", justify="center")
28
+ console.print()
29
+ console.print("A next generation HTTP client.", justify="center")
30
+ console.print()
31
+ console.print(
32
+ "Usage: [bold]httpx[/bold] [cyan]<URL> [OPTIONS][/cyan] ", justify="left"
33
+ )
34
+ console.print()
35
+
36
+ table = rich.table.Table.grid(padding=1, pad_edge=True)
37
+ table.add_column("Parameter", no_wrap=True, justify="left", style="bold")
38
+ table.add_column("Description")
39
+ table.add_row(
40
+ "-m, --method [cyan]METHOD",
41
+ "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n"
42
+ "[Default: GET, or POST if a request body is included]",
43
+ )
44
+ table.add_row(
45
+ "-p, --params [cyan]<NAME VALUE> ...",
46
+ "Query parameters to include in the request URL.",
47
+ )
48
+ table.add_row(
49
+ "-c, --content [cyan]TEXT", "Byte content to include in the request body."
50
+ )
51
+ table.add_row(
52
+ "-d, --data [cyan]<NAME VALUE> ...", "Form data to include in the request body."
53
+ )
54
+ table.add_row(
55
+ "-f, --files [cyan]<NAME FILENAME> ...",
56
+ "Form files to include in the request body.",
57
+ )
58
+ table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.")
59
+ table.add_row(
60
+ "-h, --headers [cyan]<NAME VALUE> ...",
61
+ "Include additional HTTP headers in the request.",
62
+ )
63
+ table.add_row(
64
+ "--cookies [cyan]<NAME VALUE> ...", "Cookies to include in the request."
65
+ )
66
+ table.add_row(
67
+ "--auth [cyan]<USER PASS>",
68
+ "Username and password to include in the request. Specify '-' for the password"
69
+ " to use a password prompt. Note that using --verbose/-v will expose"
70
+ " the Authorization header, including the password encoding"
71
+ " in a trivially reversible format.",
72
+ )
73
+
74
+ table.add_row(
75
+ "--proxy [cyan]URL",
76
+ "Send the request via a proxy. Should be the URL giving the proxy address.",
77
+ )
78
+
79
+ table.add_row(
80
+ "--timeout [cyan]FLOAT",
81
+ "Timeout value to use for network operations, such as establishing the"
82
+ " connection, reading some data, etc... [Default: 5.0]",
83
+ )
84
+
85
+ table.add_row("--follow-redirects", "Automatically follow redirects.")
86
+ table.add_row("--no-verify", "Disable SSL verification.")
87
+ table.add_row(
88
+ "--http2", "Send the request using HTTP/2, if the remote server supports it."
89
+ )
90
+
91
+ table.add_row(
92
+ "--download [cyan]FILE",
93
+ "Save the response content as a file, rather than displaying it.",
94
+ )
95
+
96
+ table.add_row("-v, --verbose", "Verbose output. Show request as well as response.")
97
+ table.add_row("--help", "Show this message and exit.")
98
+ console.print(table)
99
+
100
+
101
+ def get_lexer_for_response(response: Response) -> str:
102
+ content_type = response.headers.get("Content-Type")
103
+ if content_type is not None:
104
+ mime_type, _, _ = content_type.partition(";")
105
+ try:
106
+ return typing.cast(
107
+ str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name
108
+ )
109
+ except pygments.util.ClassNotFound: # pragma: no cover
110
+ pass
111
+ return "" # pragma: no cover
112
+
113
+
114
+ def format_request_headers(request: httpcore.Request, http2: bool = False) -> str:
115
+ version = "HTTP/2" if http2 else "HTTP/1.1"
116
+ headers = [
117
+ (name.lower() if http2 else name, value) for name, value in request.headers
118
+ ]
119
+ method = request.method.decode("ascii")
120
+ target = request.url.target.decode("ascii")
121
+ lines = [f"{method} {target} {version}"] + [
122
+ f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers
123
+ ]
124
+ return "\n".join(lines)
125
+
126
+
127
+ def format_response_headers(
128
+ http_version: bytes,
129
+ status: int,
130
+ reason_phrase: bytes | None,
131
+ headers: list[tuple[bytes, bytes]],
132
+ ) -> str:
133
+ version = http_version.decode("ascii")
134
+ reason = (
135
+ codes.get_reason_phrase(status)
136
+ if reason_phrase is None
137
+ else reason_phrase.decode("ascii")
138
+ )
139
+ lines = [f"{version} {status} {reason}"] + [
140
+ f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers
141
+ ]
142
+ return "\n".join(lines)
143
+
144
+
145
+ def print_request_headers(request: httpcore.Request, http2: bool = False) -> None:
146
+ console = rich.console.Console()
147
+ http_text = format_request_headers(request, http2=http2)
148
+ syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True)
149
+ console.print(syntax)
150
+ syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True)
151
+ console.print(syntax)
152
+
153
+
154
+ def print_response_headers(
155
+ http_version: bytes,
156
+ status: int,
157
+ reason_phrase: bytes | None,
158
+ headers: list[tuple[bytes, bytes]],
159
+ ) -> None:
160
+ console = rich.console.Console()
161
+ http_text = format_response_headers(http_version, status, reason_phrase, headers)
162
+ syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True)
163
+ console.print(syntax)
164
+ syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True)
165
+ console.print(syntax)
166
+
167
+
168
+ def print_response(response: Response) -> None:
169
+ console = rich.console.Console()
170
+ lexer_name = get_lexer_for_response(response)
171
+ if lexer_name:
172
+ if lexer_name.lower() == "json":
173
+ try:
174
+ data = response.json()
175
+ text = json.dumps(data, indent=4)
176
+ except ValueError: # pragma: no cover
177
+ text = response.text
178
+ else:
179
+ text = response.text
180
+
181
+ syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True)
182
+ console.print(syntax)
183
+ else:
184
+ console.print(f"<{len(response.content)} bytes of binary data>")
185
+
186
+
187
+ _PCTRTT = typing.Tuple[typing.Tuple[str, str], ...]
188
+ _PCTRTTT = typing.Tuple[_PCTRTT, ...]
189
+ _PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]]
190
+
191
+
192
+ def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover
193
+ lines = []
194
+ for key, value in cert.items():
195
+ if isinstance(value, (list, tuple)):
196
+ lines.append(f"* {key}:")
197
+ for item in value:
198
+ if key in ("subject", "issuer"):
199
+ for sub_item in item:
200
+ lines.append(f"* {sub_item[0]}: {sub_item[1]!r}")
201
+ elif isinstance(item, tuple) and len(item) == 2:
202
+ lines.append(f"* {item[0]}: {item[1]!r}")
203
+ else:
204
+ lines.append(f"* {item!r}")
205
+ else:
206
+ lines.append(f"* {key}: {value!r}")
207
+ return "\n".join(lines)
208
+
209
+
210
+ def trace(
211
+ name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False
212
+ ) -> None:
213
+ console = rich.console.Console()
214
+ if name == "connection.connect_tcp.started" and verbose:
215
+ host = info["host"]
216
+ console.print(f"* Connecting to {host!r}")
217
+ elif name == "connection.connect_tcp.complete" and verbose:
218
+ stream = info["return_value"]
219
+ server_addr = stream.get_extra_info("server_addr")
220
+ console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}")
221
+ elif name == "connection.start_tls.complete" and verbose: # pragma: no cover
222
+ stream = info["return_value"]
223
+ ssl_object = stream.get_extra_info("ssl_object")
224
+ version = ssl_object.version()
225
+ cipher = ssl_object.cipher()
226
+ server_cert = ssl_object.getpeercert()
227
+ alpn = ssl_object.selected_alpn_protocol()
228
+ console.print(f"* SSL established using {version!r} / {cipher[0]!r}")
229
+ console.print(f"* Selected ALPN protocol: {alpn!r}")
230
+ if server_cert:
231
+ console.print("* Server certificate:")
232
+ console.print(format_certificate(server_cert))
233
+ elif name == "http11.send_request_headers.started" and verbose:
234
+ request = info["request"]
235
+ print_request_headers(request, http2=False)
236
+ elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover
237
+ request = info["request"]
238
+ print_request_headers(request, http2=True)
239
+ elif name == "http11.receive_response_headers.complete":
240
+ http_version, status, reason_phrase, headers = info["return_value"]
241
+ print_response_headers(http_version, status, reason_phrase, headers)
242
+ elif name == "http2.receive_response_headers.complete": # pragma: no cover
243
+ status, headers = info["return_value"]
244
+ http_version = b"HTTP/2"
245
+ reason_phrase = None
246
+ print_response_headers(http_version, status, reason_phrase, headers)
247
+
248
+
249
+ def download_response(response: Response, download: typing.BinaryIO) -> None:
250
+ console = rich.console.Console()
251
+ console.print()
252
+ content_length = response.headers.get("Content-Length")
253
+ with rich.progress.Progress(
254
+ "[progress.description]{task.description}",
255
+ "[progress.percentage]{task.percentage:>3.0f}%",
256
+ rich.progress.BarColumn(bar_width=None),
257
+ rich.progress.DownloadColumn(),
258
+ rich.progress.TransferSpeedColumn(),
259
+ ) as progress:
260
+ description = f"Downloading [bold]{rich.markup.escape(download.name)}"
261
+ download_task = progress.add_task(
262
+ description,
263
+ total=int(content_length or 0),
264
+ start=content_length is not None,
265
+ )
266
+ for chunk in response.iter_bytes():
267
+ download.write(chunk)
268
+ progress.update(download_task, completed=response.num_bytes_downloaded)
269
+
270
+
271
+ def validate_json(
272
+ ctx: click.Context,
273
+ param: click.Option | click.Parameter,
274
+ value: typing.Any,
275
+ ) -> typing.Any:
276
+ if value is None:
277
+ return None
278
+
279
+ try:
280
+ return json.loads(value)
281
+ except json.JSONDecodeError: # pragma: no cover
282
+ raise click.BadParameter("Not valid JSON")
283
+
284
+
285
+ def validate_auth(
286
+ ctx: click.Context,
287
+ param: click.Option | click.Parameter,
288
+ value: typing.Any,
289
+ ) -> typing.Any:
290
+ if value == (None, None):
291
+ return None
292
+
293
+ username, password = value
294
+ if password == "-": # pragma: no cover
295
+ password = click.prompt("Password", hide_input=True)
296
+ return (username, password)
297
+
298
+
299
+ def handle_help(
300
+ ctx: click.Context,
301
+ param: click.Option | click.Parameter,
302
+ value: typing.Any,
303
+ ) -> None:
304
+ if not value or ctx.resilient_parsing:
305
+ return
306
+
307
+ print_help()
308
+ ctx.exit()
309
+
310
+
311
+ @click.command(add_help_option=False)
312
+ @click.argument("url", type=str)
313
+ @click.option(
314
+ "--method",
315
+ "-m",
316
+ "method",
317
+ type=str,
318
+ help=(
319
+ "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. "
320
+ "[Default: GET, or POST if a request body is included]"
321
+ ),
322
+ )
323
+ @click.option(
324
+ "--params",
325
+ "-p",
326
+ "params",
327
+ type=(str, str),
328
+ multiple=True,
329
+ help="Query parameters to include in the request URL.",
330
+ )
331
+ @click.option(
332
+ "--content",
333
+ "-c",
334
+ "content",
335
+ type=str,
336
+ help="Byte content to include in the request body.",
337
+ )
338
+ @click.option(
339
+ "--data",
340
+ "-d",
341
+ "data",
342
+ type=(str, str),
343
+ multiple=True,
344
+ help="Form data to include in the request body.",
345
+ )
346
+ @click.option(
347
+ "--files",
348
+ "-f",
349
+ "files",
350
+ type=(str, click.File(mode="rb")),
351
+ multiple=True,
352
+ help="Form files to include in the request body.",
353
+ )
354
+ @click.option(
355
+ "--json",
356
+ "-j",
357
+ "json",
358
+ type=str,
359
+ callback=validate_json,
360
+ help="JSON data to include in the request body.",
361
+ )
362
+ @click.option(
363
+ "--headers",
364
+ "-h",
365
+ "headers",
366
+ type=(str, str),
367
+ multiple=True,
368
+ help="Include additional HTTP headers in the request.",
369
+ )
370
+ @click.option(
371
+ "--cookies",
372
+ "cookies",
373
+ type=(str, str),
374
+ multiple=True,
375
+ help="Cookies to include in the request.",
376
+ )
377
+ @click.option(
378
+ "--auth",
379
+ "auth",
380
+ type=(str, str),
381
+ default=(None, None),
382
+ callback=validate_auth,
383
+ help=(
384
+ "Username and password to include in the request. "
385
+ "Specify '-' for the password to use a password prompt. "
386
+ "Note that using --verbose/-v will expose the Authorization header, "
387
+ "including the password encoding in a trivially reversible format."
388
+ ),
389
+ )
390
+ @click.option(
391
+ "--proxy",
392
+ "proxy",
393
+ type=str,
394
+ default=None,
395
+ help="Send the request via a proxy. Should be the URL giving the proxy address.",
396
+ )
397
+ @click.option(
398
+ "--timeout",
399
+ "timeout",
400
+ type=float,
401
+ default=5.0,
402
+ help=(
403
+ "Timeout value to use for network operations, such as establishing the "
404
+ "connection, reading some data, etc... [Default: 5.0]"
405
+ ),
406
+ )
407
+ @click.option(
408
+ "--follow-redirects",
409
+ "follow_redirects",
410
+ is_flag=True,
411
+ default=False,
412
+ help="Automatically follow redirects.",
413
+ )
414
+ @click.option(
415
+ "--no-verify",
416
+ "verify",
417
+ is_flag=True,
418
+ default=True,
419
+ help="Disable SSL verification.",
420
+ )
421
+ @click.option(
422
+ "--http2",
423
+ "http2",
424
+ type=bool,
425
+ is_flag=True,
426
+ default=False,
427
+ help="Send the request using HTTP/2, if the remote server supports it.",
428
+ )
429
+ @click.option(
430
+ "--download",
431
+ type=click.File("wb"),
432
+ help="Save the response content as a file, rather than displaying it.",
433
+ )
434
+ @click.option(
435
+ "--verbose",
436
+ "-v",
437
+ type=bool,
438
+ is_flag=True,
439
+ default=False,
440
+ help="Verbose. Show request as well as response.",
441
+ )
442
+ @click.option(
443
+ "--help",
444
+ is_flag=True,
445
+ is_eager=True,
446
+ expose_value=False,
447
+ callback=handle_help,
448
+ help="Show this message and exit.",
449
+ )
450
+ def main(
451
+ url: str,
452
+ method: str,
453
+ params: list[tuple[str, str]],
454
+ content: str,
455
+ data: list[tuple[str, str]],
456
+ files: list[tuple[str, click.File]],
457
+ json: str,
458
+ headers: list[tuple[str, str]],
459
+ cookies: list[tuple[str, str]],
460
+ auth: tuple[str, str] | None,
461
+ proxy: str,
462
+ timeout: float,
463
+ follow_redirects: bool,
464
+ verify: bool,
465
+ http2: bool,
466
+ download: typing.BinaryIO | None,
467
+ verbose: bool,
468
+ ) -> None:
469
+ """
470
+ An HTTP command line client.
471
+ Sends a request and displays the response.
472
+ """
473
+ if not method:
474
+ method = "POST" if content or data or files or json else "GET"
475
+
476
+ try:
477
+ with Client(
478
+ proxy=proxy,
479
+ timeout=timeout,
480
+ verify=verify,
481
+ http2=http2,
482
+ ) as client:
483
+ with client.stream(
484
+ method,
485
+ url,
486
+ params=list(params),
487
+ content=content,
488
+ data=dict(data),
489
+ files=files, # type: ignore
490
+ json=json,
491
+ headers=headers,
492
+ cookies=dict(cookies),
493
+ auth=auth,
494
+ follow_redirects=follow_redirects,
495
+ extensions={"trace": functools.partial(trace, verbose=verbose)},
496
+ ) as response:
497
+ if download is not None:
498
+ download_response(response, download)
499
+ else:
500
+ response.read()
501
+ if response.content:
502
+ print_response(response)
503
+
504
+ except RequestError as exc:
505
+ console = rich.console.Console()
506
+ console.print(f"[red]{type(exc).__name__}[/red]: {exc}")
507
+ sys.exit(1)
508
+
509
+ sys.exit(0 if response.is_success else 1)
lib/python3.10/site-packages/httpx/_models.py ADDED
@@ -0,0 +1,1211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import datetime
4
+ import email.message
5
+ import json as jsonlib
6
+ import typing
7
+ import urllib.request
8
+ from collections.abc import Mapping
9
+ from http.cookiejar import Cookie, CookieJar
10
+
11
+ from ._content import ByteStream, UnattachedStream, encode_request, encode_response
12
+ from ._decoders import (
13
+ SUPPORTED_DECODERS,
14
+ ByteChunker,
15
+ ContentDecoder,
16
+ IdentityDecoder,
17
+ LineDecoder,
18
+ MultiDecoder,
19
+ TextChunker,
20
+ TextDecoder,
21
+ )
22
+ from ._exceptions import (
23
+ CookieConflict,
24
+ HTTPStatusError,
25
+ RequestNotRead,
26
+ ResponseNotRead,
27
+ StreamClosed,
28
+ StreamConsumed,
29
+ request_context,
30
+ )
31
+ from ._multipart import get_multipart_boundary_from_content_type
32
+ from ._status_codes import codes
33
+ from ._types import (
34
+ AsyncByteStream,
35
+ CookieTypes,
36
+ HeaderTypes,
37
+ QueryParamTypes,
38
+ RequestContent,
39
+ RequestData,
40
+ RequestExtensions,
41
+ RequestFiles,
42
+ ResponseContent,
43
+ ResponseExtensions,
44
+ SyncByteStream,
45
+ )
46
+ from ._urls import URL
47
+ from ._utils import (
48
+ is_known_encoding,
49
+ normalize_header_key,
50
+ normalize_header_value,
51
+ obfuscate_sensitive_headers,
52
+ parse_content_type_charset,
53
+ parse_header_links,
54
+ )
55
+
56
+ __all__ = ["Cookies", "Headers", "Request", "Response"]
57
+
58
+
59
+ class Headers(typing.MutableMapping[str, str]):
60
+ """
61
+ HTTP headers, as a case-insensitive multi-dict.
62
+ """
63
+
64
+ def __init__(
65
+ self,
66
+ headers: HeaderTypes | None = None,
67
+ encoding: str | None = None,
68
+ ) -> None:
69
+ if headers is None:
70
+ self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]]
71
+ elif isinstance(headers, Headers):
72
+ self._list = list(headers._list)
73
+ elif isinstance(headers, Mapping):
74
+ self._list = [
75
+ (
76
+ normalize_header_key(k, lower=False, encoding=encoding),
77
+ normalize_header_key(k, lower=True, encoding=encoding),
78
+ normalize_header_value(v, encoding),
79
+ )
80
+ for k, v in headers.items()
81
+ ]
82
+ else:
83
+ self._list = [
84
+ (
85
+ normalize_header_key(k, lower=False, encoding=encoding),
86
+ normalize_header_key(k, lower=True, encoding=encoding),
87
+ normalize_header_value(v, encoding),
88
+ )
89
+ for k, v in headers
90
+ ]
91
+
92
+ self._encoding = encoding
93
+
94
+ @property
95
+ def encoding(self) -> str:
96
+ """
97
+ Header encoding is mandated as ascii, but we allow fallbacks to utf-8
98
+ or iso-8859-1.
99
+ """
100
+ if self._encoding is None:
101
+ for encoding in ["ascii", "utf-8"]:
102
+ for key, value in self.raw:
103
+ try:
104
+ key.decode(encoding)
105
+ value.decode(encoding)
106
+ except UnicodeDecodeError:
107
+ break
108
+ else:
109
+ # The else block runs if 'break' did not occur, meaning
110
+ # all values fitted the encoding.
111
+ self._encoding = encoding
112
+ break
113
+ else:
114
+ # The ISO-8859-1 encoding covers all 256 code points in a byte,
115
+ # so will never raise decode errors.
116
+ self._encoding = "iso-8859-1"
117
+ return self._encoding
118
+
119
+ @encoding.setter
120
+ def encoding(self, value: str) -> None:
121
+ self._encoding = value
122
+
123
+ @property
124
+ def raw(self) -> list[tuple[bytes, bytes]]:
125
+ """
126
+ Returns a list of the raw header items, as byte pairs.
127
+ """
128
+ return [(raw_key, value) for raw_key, _, value in self._list]
129
+
130
+ def keys(self) -> typing.KeysView[str]:
131
+ return {key.decode(self.encoding): None for _, key, value in self._list}.keys()
132
+
133
+ def values(self) -> typing.ValuesView[str]:
134
+ values_dict: dict[str, str] = {}
135
+ for _, key, value in self._list:
136
+ str_key = key.decode(self.encoding)
137
+ str_value = value.decode(self.encoding)
138
+ if str_key in values_dict:
139
+ values_dict[str_key] += f", {str_value}"
140
+ else:
141
+ values_dict[str_key] = str_value
142
+ return values_dict.values()
143
+
144
+ def items(self) -> typing.ItemsView[str, str]:
145
+ """
146
+ Return `(key, value)` items of headers. Concatenate headers
147
+ into a single comma separated value when a key occurs multiple times.
148
+ """
149
+ values_dict: dict[str, str] = {}
150
+ for _, key, value in self._list:
151
+ str_key = key.decode(self.encoding)
152
+ str_value = value.decode(self.encoding)
153
+ if str_key in values_dict:
154
+ values_dict[str_key] += f", {str_value}"
155
+ else:
156
+ values_dict[str_key] = str_value
157
+ return values_dict.items()
158
+
159
+ def multi_items(self) -> list[tuple[str, str]]:
160
+ """
161
+ Return a list of `(key, value)` pairs of headers. Allow multiple
162
+ occurrences of the same key without concatenating into a single
163
+ comma separated value.
164
+ """
165
+ return [
166
+ (key.decode(self.encoding), value.decode(self.encoding))
167
+ for _, key, value in self._list
168
+ ]
169
+
170
+ def get(self, key: str, default: typing.Any = None) -> typing.Any:
171
+ """
172
+ Return a header value. If multiple occurrences of the header occur
173
+ then concatenate them together with commas.
174
+ """
175
+ try:
176
+ return self[key]
177
+ except KeyError:
178
+ return default
179
+
180
+ def get_list(self, key: str, split_commas: bool = False) -> list[str]:
181
+ """
182
+ Return a list of all header values for a given key.
183
+ If `split_commas=True` is passed, then any comma separated header
184
+ values are split into multiple return strings.
185
+ """
186
+ get_header_key = key.lower().encode(self.encoding)
187
+
188
+ values = [
189
+ item_value.decode(self.encoding)
190
+ for _, item_key, item_value in self._list
191
+ if item_key.lower() == get_header_key
192
+ ]
193
+
194
+ if not split_commas:
195
+ return values
196
+
197
+ split_values = []
198
+ for value in values:
199
+ split_values.extend([item.strip() for item in value.split(",")])
200
+ return split_values
201
+
202
+ def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore
203
+ headers = Headers(headers)
204
+ for key in headers.keys():
205
+ if key in self:
206
+ self.pop(key)
207
+ self._list.extend(headers._list)
208
+
209
+ def copy(self) -> Headers:
210
+ return Headers(self, encoding=self.encoding)
211
+
212
+ def __getitem__(self, key: str) -> str:
213
+ """
214
+ Return a single header value.
215
+
216
+ If there are multiple headers with the same key, then we concatenate
217
+ them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2
218
+ """
219
+ normalized_key = key.lower().encode(self.encoding)
220
+
221
+ items = [
222
+ header_value.decode(self.encoding)
223
+ for _, header_key, header_value in self._list
224
+ if header_key == normalized_key
225
+ ]
226
+
227
+ if items:
228
+ return ", ".join(items)
229
+
230
+ raise KeyError(key)
231
+
232
+ def __setitem__(self, key: str, value: str) -> None:
233
+ """
234
+ Set the header `key` to `value`, removing any duplicate entries.
235
+ Retains insertion order.
236
+ """
237
+ set_key = key.encode(self._encoding or "utf-8")
238
+ set_value = value.encode(self._encoding or "utf-8")
239
+ lookup_key = set_key.lower()
240
+
241
+ found_indexes = [
242
+ idx
243
+ for idx, (_, item_key, _) in enumerate(self._list)
244
+ if item_key == lookup_key
245
+ ]
246
+
247
+ for idx in reversed(found_indexes[1:]):
248
+ del self._list[idx]
249
+
250
+ if found_indexes:
251
+ idx = found_indexes[0]
252
+ self._list[idx] = (set_key, lookup_key, set_value)
253
+ else:
254
+ self._list.append((set_key, lookup_key, set_value))
255
+
256
+ def __delitem__(self, key: str) -> None:
257
+ """
258
+ Remove the header `key`.
259
+ """
260
+ del_key = key.lower().encode(self.encoding)
261
+
262
+ pop_indexes = [
263
+ idx
264
+ for idx, (_, item_key, _) in enumerate(self._list)
265
+ if item_key.lower() == del_key
266
+ ]
267
+
268
+ if not pop_indexes:
269
+ raise KeyError(key)
270
+
271
+ for idx in reversed(pop_indexes):
272
+ del self._list[idx]
273
+
274
+ def __contains__(self, key: typing.Any) -> bool:
275
+ header_key = key.lower().encode(self.encoding)
276
+ return header_key in [key for _, key, _ in self._list]
277
+
278
+ def __iter__(self) -> typing.Iterator[typing.Any]:
279
+ return iter(self.keys())
280
+
281
+ def __len__(self) -> int:
282
+ return len(self._list)
283
+
284
+ def __eq__(self, other: typing.Any) -> bool:
285
+ try:
286
+ other_headers = Headers(other)
287
+ except ValueError:
288
+ return False
289
+
290
+ self_list = [(key, value) for _, key, value in self._list]
291
+ other_list = [(key, value) for _, key, value in other_headers._list]
292
+ return sorted(self_list) == sorted(other_list)
293
+
294
+ def __repr__(self) -> str:
295
+ class_name = self.__class__.__name__
296
+
297
+ encoding_str = ""
298
+ if self.encoding != "ascii":
299
+ encoding_str = f", encoding={self.encoding!r}"
300
+
301
+ as_list = list(obfuscate_sensitive_headers(self.multi_items()))
302
+ as_dict = dict(as_list)
303
+
304
+ no_duplicate_keys = len(as_dict) == len(as_list)
305
+ if no_duplicate_keys:
306
+ return f"{class_name}({as_dict!r}{encoding_str})"
307
+ return f"{class_name}({as_list!r}{encoding_str})"
308
+
309
+
310
+ class Request:
311
+ def __init__(
312
+ self,
313
+ method: str | bytes,
314
+ url: URL | str,
315
+ *,
316
+ params: QueryParamTypes | None = None,
317
+ headers: HeaderTypes | None = None,
318
+ cookies: CookieTypes | None = None,
319
+ content: RequestContent | None = None,
320
+ data: RequestData | None = None,
321
+ files: RequestFiles | None = None,
322
+ json: typing.Any | None = None,
323
+ stream: SyncByteStream | AsyncByteStream | None = None,
324
+ extensions: RequestExtensions | None = None,
325
+ ) -> None:
326
+ self.method = (
327
+ method.decode("ascii").upper()
328
+ if isinstance(method, bytes)
329
+ else method.upper()
330
+ )
331
+ self.url = URL(url)
332
+ if params is not None:
333
+ self.url = self.url.copy_merge_params(params=params)
334
+ self.headers = Headers(headers)
335
+ self.extensions = {} if extensions is None else extensions
336
+
337
+ if cookies:
338
+ Cookies(cookies).set_cookie_header(self)
339
+
340
+ if stream is None:
341
+ content_type: str | None = self.headers.get("content-type")
342
+ headers, stream = encode_request(
343
+ content=content,
344
+ data=data,
345
+ files=files,
346
+ json=json,
347
+ boundary=get_multipart_boundary_from_content_type(
348
+ content_type=content_type.encode(self.headers.encoding)
349
+ if content_type
350
+ else None
351
+ ),
352
+ )
353
+ self._prepare(headers)
354
+ self.stream = stream
355
+ # Load the request body, except for streaming content.
356
+ if isinstance(stream, ByteStream):
357
+ self.read()
358
+ else:
359
+ # There's an important distinction between `Request(content=...)`,
360
+ # and `Request(stream=...)`.
361
+ #
362
+ # Using `content=...` implies automatically populated `Host` and content
363
+ # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`.
364
+ #
365
+ # Using `stream=...` will not automatically include *any*
366
+ # auto-populated headers.
367
+ #
368
+ # As an end-user you don't really need `stream=...`. It's only
369
+ # useful when:
370
+ #
371
+ # * Preserving the request stream when copying requests, eg for redirects.
372
+ # * Creating request instances on the *server-side* of the transport API.
373
+ self.stream = stream
374
+
375
+ def _prepare(self, default_headers: dict[str, str]) -> None:
376
+ for key, value in default_headers.items():
377
+ # Ignore Transfer-Encoding if the Content-Length has been set explicitly.
378
+ if key.lower() == "transfer-encoding" and "Content-Length" in self.headers:
379
+ continue
380
+ self.headers.setdefault(key, value)
381
+
382
+ auto_headers: list[tuple[bytes, bytes]] = []
383
+
384
+ has_host = "Host" in self.headers
385
+ has_content_length = (
386
+ "Content-Length" in self.headers or "Transfer-Encoding" in self.headers
387
+ )
388
+
389
+ if not has_host and self.url.host:
390
+ auto_headers.append((b"Host", self.url.netloc))
391
+ if not has_content_length and self.method in ("POST", "PUT", "PATCH"):
392
+ auto_headers.append((b"Content-Length", b"0"))
393
+
394
+ self.headers = Headers(auto_headers + self.headers.raw)
395
+
396
+ @property
397
+ def content(self) -> bytes:
398
+ if not hasattr(self, "_content"):
399
+ raise RequestNotRead()
400
+ return self._content
401
+
402
+ def read(self) -> bytes:
403
+ """
404
+ Read and return the request content.
405
+ """
406
+ if not hasattr(self, "_content"):
407
+ assert isinstance(self.stream, typing.Iterable)
408
+ self._content = b"".join(self.stream)
409
+ if not isinstance(self.stream, ByteStream):
410
+ # If a streaming request has been read entirely into memory, then
411
+ # we can replace the stream with a raw bytes implementation,
412
+ # to ensure that any non-replayable streams can still be used.
413
+ self.stream = ByteStream(self._content)
414
+ return self._content
415
+
416
+ async def aread(self) -> bytes:
417
+ """
418
+ Read and return the request content.
419
+ """
420
+ if not hasattr(self, "_content"):
421
+ assert isinstance(self.stream, typing.AsyncIterable)
422
+ self._content = b"".join([part async for part in self.stream])
423
+ if not isinstance(self.stream, ByteStream):
424
+ # If a streaming request has been read entirely into memory, then
425
+ # we can replace the stream with a raw bytes implementation,
426
+ # to ensure that any non-replayable streams can still be used.
427
+ self.stream = ByteStream(self._content)
428
+ return self._content
429
+
430
+ def __repr__(self) -> str:
431
+ class_name = self.__class__.__name__
432
+ url = str(self.url)
433
+ return f"<{class_name}({self.method!r}, {url!r})>"
434
+
435
+ def __getstate__(self) -> dict[str, typing.Any]:
436
+ return {
437
+ name: value
438
+ for name, value in self.__dict__.items()
439
+ if name not in ["extensions", "stream"]
440
+ }
441
+
442
+ def __setstate__(self, state: dict[str, typing.Any]) -> None:
443
+ for name, value in state.items():
444
+ setattr(self, name, value)
445
+ self.extensions = {}
446
+ self.stream = UnattachedStream()
447
+
448
+
449
+ class Response:
450
+ def __init__(
451
+ self,
452
+ status_code: int,
453
+ *,
454
+ headers: HeaderTypes | None = None,
455
+ content: ResponseContent | None = None,
456
+ text: str | None = None,
457
+ html: str | None = None,
458
+ json: typing.Any = None,
459
+ stream: SyncByteStream | AsyncByteStream | None = None,
460
+ request: Request | None = None,
461
+ extensions: ResponseExtensions | None = None,
462
+ history: list[Response] | None = None,
463
+ default_encoding: str | typing.Callable[[bytes], str] = "utf-8",
464
+ ) -> None:
465
+ self.status_code = status_code
466
+ self.headers = Headers(headers)
467
+
468
+ self._request: Request | None = request
469
+
470
+ # When follow_redirects=False and a redirect is received,
471
+ # the client will set `response.next_request`.
472
+ self.next_request: Request | None = None
473
+
474
+ self.extensions: ResponseExtensions = {} if extensions is None else extensions
475
+ self.history = [] if history is None else list(history)
476
+
477
+ self.is_closed = False
478
+ self.is_stream_consumed = False
479
+
480
+ self.default_encoding = default_encoding
481
+
482
+ if stream is None:
483
+ headers, stream = encode_response(content, text, html, json)
484
+ self._prepare(headers)
485
+ self.stream = stream
486
+ if isinstance(stream, ByteStream):
487
+ # Load the response body, except for streaming content.
488
+ self.read()
489
+ else:
490
+ # There's an important distinction between `Response(content=...)`,
491
+ # and `Response(stream=...)`.
492
+ #
493
+ # Using `content=...` implies automatically populated content headers,
494
+ # of either `Content-Length: ...` or `Transfer-Encoding: chunked`.
495
+ #
496
+ # Using `stream=...` will not automatically include any content headers.
497
+ #
498
+ # As an end-user you don't really need `stream=...`. It's only
499
+ # useful when creating response instances having received a stream
500
+ # from the transport API.
501
+ self.stream = stream
502
+
503
+ self._num_bytes_downloaded = 0
504
+
505
+ def _prepare(self, default_headers: dict[str, str]) -> None:
506
+ for key, value in default_headers.items():
507
+ # Ignore Transfer-Encoding if the Content-Length has been set explicitly.
508
+ if key.lower() == "transfer-encoding" and "content-length" in self.headers:
509
+ continue
510
+ self.headers.setdefault(key, value)
511
+
512
+ @property
513
+ def elapsed(self) -> datetime.timedelta:
514
+ """
515
+ Returns the time taken for the complete request/response
516
+ cycle to complete.
517
+ """
518
+ if not hasattr(self, "_elapsed"):
519
+ raise RuntimeError(
520
+ "'.elapsed' may only be accessed after the response "
521
+ "has been read or closed."
522
+ )
523
+ return self._elapsed
524
+
525
+ @elapsed.setter
526
+ def elapsed(self, elapsed: datetime.timedelta) -> None:
527
+ self._elapsed = elapsed
528
+
529
+ @property
530
+ def request(self) -> Request:
531
+ """
532
+ Returns the request instance associated to the current response.
533
+ """
534
+ if self._request is None:
535
+ raise RuntimeError(
536
+ "The request instance has not been set on this response."
537
+ )
538
+ return self._request
539
+
540
+ @request.setter
541
+ def request(self, value: Request) -> None:
542
+ self._request = value
543
+
544
+ @property
545
+ def http_version(self) -> str:
546
+ try:
547
+ http_version: bytes = self.extensions["http_version"]
548
+ except KeyError:
549
+ return "HTTP/1.1"
550
+ else:
551
+ return http_version.decode("ascii", errors="ignore")
552
+
553
+ @property
554
+ def reason_phrase(self) -> str:
555
+ try:
556
+ reason_phrase: bytes = self.extensions["reason_phrase"]
557
+ except KeyError:
558
+ return codes.get_reason_phrase(self.status_code)
559
+ else:
560
+ return reason_phrase.decode("ascii", errors="ignore")
561
+
562
+ @property
563
+ def url(self) -> URL:
564
+ """
565
+ Returns the URL for which the request was made.
566
+ """
567
+ return self.request.url
568
+
569
+ @property
570
+ def content(self) -> bytes:
571
+ if not hasattr(self, "_content"):
572
+ raise ResponseNotRead()
573
+ return self._content
574
+
575
+ @property
576
+ def text(self) -> str:
577
+ if not hasattr(self, "_text"):
578
+ content = self.content
579
+ if not content:
580
+ self._text = ""
581
+ else:
582
+ decoder = TextDecoder(encoding=self.encoding or "utf-8")
583
+ self._text = "".join([decoder.decode(self.content), decoder.flush()])
584
+ return self._text
585
+
586
+ @property
587
+ def encoding(self) -> str | None:
588
+ """
589
+ Return an encoding to use for decoding the byte content into text.
590
+ The priority for determining this is given by...
591
+
592
+ * `.encoding = <>` has been set explicitly.
593
+ * The encoding as specified by the charset parameter in the Content-Type header.
594
+ * The encoding as determined by `default_encoding`, which may either be
595
+ a string like "utf-8" indicating the encoding to use, or may be a callable
596
+ which enables charset autodetection.
597
+ """
598
+ if not hasattr(self, "_encoding"):
599
+ encoding = self.charset_encoding
600
+ if encoding is None or not is_known_encoding(encoding):
601
+ if isinstance(self.default_encoding, str):
602
+ encoding = self.default_encoding
603
+ elif hasattr(self, "_content"):
604
+ encoding = self.default_encoding(self._content)
605
+ self._encoding = encoding or "utf-8"
606
+ return self._encoding
607
+
608
+ @encoding.setter
609
+ def encoding(self, value: str) -> None:
610
+ """
611
+ Set the encoding to use for decoding the byte content into text.
612
+
613
+ If the `text` attribute has been accessed, attempting to set the
614
+ encoding will throw a ValueError.
615
+ """
616
+ if hasattr(self, "_text"):
617
+ raise ValueError(
618
+ "Setting encoding after `text` has been accessed is not allowed."
619
+ )
620
+ self._encoding = value
621
+
622
+ @property
623
+ def charset_encoding(self) -> str | None:
624
+ """
625
+ Return the encoding, as specified by the Content-Type header.
626
+ """
627
+ content_type = self.headers.get("Content-Type")
628
+ if content_type is None:
629
+ return None
630
+
631
+ return parse_content_type_charset(content_type)
632
+
633
+ def _get_content_decoder(self) -> ContentDecoder:
634
+ """
635
+ Returns a decoder instance which can be used to decode the raw byte
636
+ content, depending on the Content-Encoding used in the response.
637
+ """
638
+ if not hasattr(self, "_decoder"):
639
+ decoders: list[ContentDecoder] = []
640
+ values = self.headers.get_list("content-encoding", split_commas=True)
641
+ for value in values:
642
+ value = value.strip().lower()
643
+ try:
644
+ decoder_cls = SUPPORTED_DECODERS[value]
645
+ decoders.append(decoder_cls())
646
+ except KeyError:
647
+ continue
648
+
649
+ if len(decoders) == 1:
650
+ self._decoder = decoders[0]
651
+ elif len(decoders) > 1:
652
+ self._decoder = MultiDecoder(children=decoders)
653
+ else:
654
+ self._decoder = IdentityDecoder()
655
+
656
+ return self._decoder
657
+
658
+ @property
659
+ def is_informational(self) -> bool:
660
+ """
661
+ A property which is `True` for 1xx status codes, `False` otherwise.
662
+ """
663
+ return codes.is_informational(self.status_code)
664
+
665
+ @property
666
+ def is_success(self) -> bool:
667
+ """
668
+ A property which is `True` for 2xx status codes, `False` otherwise.
669
+ """
670
+ return codes.is_success(self.status_code)
671
+
672
+ @property
673
+ def is_redirect(self) -> bool:
674
+ """
675
+ A property which is `True` for 3xx status codes, `False` otherwise.
676
+
677
+ Note that not all responses with a 3xx status code indicate a URL redirect.
678
+
679
+ Use `response.has_redirect_location` to determine responses with a properly
680
+ formed URL redirection.
681
+ """
682
+ return codes.is_redirect(self.status_code)
683
+
684
+ @property
685
+ def is_client_error(self) -> bool:
686
+ """
687
+ A property which is `True` for 4xx status codes, `False` otherwise.
688
+ """
689
+ return codes.is_client_error(self.status_code)
690
+
691
+ @property
692
+ def is_server_error(self) -> bool:
693
+ """
694
+ A property which is `True` for 5xx status codes, `False` otherwise.
695
+ """
696
+ return codes.is_server_error(self.status_code)
697
+
698
+ @property
699
+ def is_error(self) -> bool:
700
+ """
701
+ A property which is `True` for 4xx and 5xx status codes, `False` otherwise.
702
+ """
703
+ return codes.is_error(self.status_code)
704
+
705
+ @property
706
+ def has_redirect_location(self) -> bool:
707
+ """
708
+ Returns True for 3xx responses with a properly formed URL redirection,
709
+ `False` otherwise.
710
+ """
711
+ return (
712
+ self.status_code
713
+ in (
714
+ # 301 (Cacheable redirect. Method may change to GET.)
715
+ codes.MOVED_PERMANENTLY,
716
+ # 302 (Uncacheable redirect. Method may change to GET.)
717
+ codes.FOUND,
718
+ # 303 (Client should make a GET or HEAD request.)
719
+ codes.SEE_OTHER,
720
+ # 307 (Equiv. 302, but retain method)
721
+ codes.TEMPORARY_REDIRECT,
722
+ # 308 (Equiv. 301, but retain method)
723
+ codes.PERMANENT_REDIRECT,
724
+ )
725
+ and "Location" in self.headers
726
+ )
727
+
728
+ def raise_for_status(self) -> Response:
729
+ """
730
+ Raise the `HTTPStatusError` if one occurred.
731
+ """
732
+ request = self._request
733
+ if request is None:
734
+ raise RuntimeError(
735
+ "Cannot call `raise_for_status` as the request "
736
+ "instance has not been set on this response."
737
+ )
738
+
739
+ if self.is_success:
740
+ return self
741
+
742
+ if self.has_redirect_location:
743
+ message = (
744
+ "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n"
745
+ "Redirect location: '{0.headers[location]}'\n"
746
+ "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}"
747
+ )
748
+ else:
749
+ message = (
750
+ "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n"
751
+ "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}"
752
+ )
753
+
754
+ status_class = self.status_code // 100
755
+ error_types = {
756
+ 1: "Informational response",
757
+ 3: "Redirect response",
758
+ 4: "Client error",
759
+ 5: "Server error",
760
+ }
761
+ error_type = error_types.get(status_class, "Invalid status code")
762
+ message = message.format(self, error_type=error_type)
763
+ raise HTTPStatusError(message, request=request, response=self)
764
+
765
+ def json(self, **kwargs: typing.Any) -> typing.Any:
766
+ return jsonlib.loads(self.content, **kwargs)
767
+
768
+ @property
769
+ def cookies(self) -> Cookies:
770
+ if not hasattr(self, "_cookies"):
771
+ self._cookies = Cookies()
772
+ self._cookies.extract_cookies(self)
773
+ return self._cookies
774
+
775
+ @property
776
+ def links(self) -> dict[str | None, dict[str, str]]:
777
+ """
778
+ Returns the parsed header links of the response, if any
779
+ """
780
+ header = self.headers.get("link")
781
+ if header is None:
782
+ return {}
783
+
784
+ return {
785
+ (link.get("rel") or link.get("url")): link
786
+ for link in parse_header_links(header)
787
+ }
788
+
789
+ @property
790
+ def num_bytes_downloaded(self) -> int:
791
+ return self._num_bytes_downloaded
792
+
793
+ def __repr__(self) -> str:
794
+ return f"<Response [{self.status_code} {self.reason_phrase}]>"
795
+
796
+ def __getstate__(self) -> dict[str, typing.Any]:
797
+ return {
798
+ name: value
799
+ for name, value in self.__dict__.items()
800
+ if name not in ["extensions", "stream", "is_closed", "_decoder"]
801
+ }
802
+
803
+ def __setstate__(self, state: dict[str, typing.Any]) -> None:
804
+ for name, value in state.items():
805
+ setattr(self, name, value)
806
+ self.is_closed = True
807
+ self.extensions = {}
808
+ self.stream = UnattachedStream()
809
+
810
+ def read(self) -> bytes:
811
+ """
812
+ Read and return the response content.
813
+ """
814
+ if not hasattr(self, "_content"):
815
+ self._content = b"".join(self.iter_bytes())
816
+ return self._content
817
+
818
+ def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
819
+ """
820
+ A byte-iterator over the decoded response content.
821
+ This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
822
+ """
823
+ if hasattr(self, "_content"):
824
+ chunk_size = len(self._content) if chunk_size is None else chunk_size
825
+ for i in range(0, len(self._content), max(chunk_size, 1)):
826
+ yield self._content[i : i + chunk_size]
827
+ else:
828
+ decoder = self._get_content_decoder()
829
+ chunker = ByteChunker(chunk_size=chunk_size)
830
+ with request_context(request=self._request):
831
+ for raw_bytes in self.iter_raw():
832
+ decoded = decoder.decode(raw_bytes)
833
+ for chunk in chunker.decode(decoded):
834
+ yield chunk
835
+ decoded = decoder.flush()
836
+ for chunk in chunker.decode(decoded):
837
+ yield chunk # pragma: no cover
838
+ for chunk in chunker.flush():
839
+ yield chunk
840
+
841
+ def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]:
842
+ """
843
+ A str-iterator over the decoded response content
844
+ that handles both gzip, deflate, etc but also detects the content's
845
+ string encoding.
846
+ """
847
+ decoder = TextDecoder(encoding=self.encoding or "utf-8")
848
+ chunker = TextChunker(chunk_size=chunk_size)
849
+ with request_context(request=self._request):
850
+ for byte_content in self.iter_bytes():
851
+ text_content = decoder.decode(byte_content)
852
+ for chunk in chunker.decode(text_content):
853
+ yield chunk
854
+ text_content = decoder.flush()
855
+ for chunk in chunker.decode(text_content):
856
+ yield chunk # pragma: no cover
857
+ for chunk in chunker.flush():
858
+ yield chunk
859
+
860
+ def iter_lines(self) -> typing.Iterator[str]:
861
+ decoder = LineDecoder()
862
+ with request_context(request=self._request):
863
+ for text in self.iter_text():
864
+ for line in decoder.decode(text):
865
+ yield line
866
+ for line in decoder.flush():
867
+ yield line
868
+
869
+ def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]:
870
+ """
871
+ A byte-iterator over the raw response content.
872
+ """
873
+ if self.is_stream_consumed:
874
+ raise StreamConsumed()
875
+ if self.is_closed:
876
+ raise StreamClosed()
877
+ if not isinstance(self.stream, SyncByteStream):
878
+ raise RuntimeError("Attempted to call a sync iterator on an async stream.")
879
+
880
+ self.is_stream_consumed = True
881
+ self._num_bytes_downloaded = 0
882
+ chunker = ByteChunker(chunk_size=chunk_size)
883
+
884
+ with request_context(request=self._request):
885
+ for raw_stream_bytes in self.stream:
886
+ self._num_bytes_downloaded += len(raw_stream_bytes)
887
+ for chunk in chunker.decode(raw_stream_bytes):
888
+ yield chunk
889
+
890
+ for chunk in chunker.flush():
891
+ yield chunk
892
+
893
+ self.close()
894
+
895
+ def close(self) -> None:
896
+ """
897
+ Close the response and release the connection.
898
+ Automatically called if the response body is read to completion.
899
+ """
900
+ if not isinstance(self.stream, SyncByteStream):
901
+ raise RuntimeError("Attempted to call an sync close on an async stream.")
902
+
903
+ if not self.is_closed:
904
+ self.is_closed = True
905
+ with request_context(request=self._request):
906
+ self.stream.close()
907
+
908
+ async def aread(self) -> bytes:
909
+ """
910
+ Read and return the response content.
911
+ """
912
+ if not hasattr(self, "_content"):
913
+ self._content = b"".join([part async for part in self.aiter_bytes()])
914
+ return self._content
915
+
916
+ async def aiter_bytes(
917
+ self, chunk_size: int | None = None
918
+ ) -> typing.AsyncIterator[bytes]:
919
+ """
920
+ A byte-iterator over the decoded response content.
921
+ This allows us to handle gzip, deflate, brotli, and zstd encoded responses.
922
+ """
923
+ if hasattr(self, "_content"):
924
+ chunk_size = len(self._content) if chunk_size is None else chunk_size
925
+ for i in range(0, len(self._content), max(chunk_size, 1)):
926
+ yield self._content[i : i + chunk_size]
927
+ else:
928
+ decoder = self._get_content_decoder()
929
+ chunker = ByteChunker(chunk_size=chunk_size)
930
+ with request_context(request=self._request):
931
+ async for raw_bytes in self.aiter_raw():
932
+ decoded = decoder.decode(raw_bytes)
933
+ for chunk in chunker.decode(decoded):
934
+ yield chunk
935
+ decoded = decoder.flush()
936
+ for chunk in chunker.decode(decoded):
937
+ yield chunk # pragma: no cover
938
+ for chunk in chunker.flush():
939
+ yield chunk
940
+
941
+ async def aiter_text(
942
+ self, chunk_size: int | None = None
943
+ ) -> typing.AsyncIterator[str]:
944
+ """
945
+ A str-iterator over the decoded response content
946
+ that handles both gzip, deflate, etc but also detects the content's
947
+ string encoding.
948
+ """
949
+ decoder = TextDecoder(encoding=self.encoding or "utf-8")
950
+ chunker = TextChunker(chunk_size=chunk_size)
951
+ with request_context(request=self._request):
952
+ async for byte_content in self.aiter_bytes():
953
+ text_content = decoder.decode(byte_content)
954
+ for chunk in chunker.decode(text_content):
955
+ yield chunk
956
+ text_content = decoder.flush()
957
+ for chunk in chunker.decode(text_content):
958
+ yield chunk # pragma: no cover
959
+ for chunk in chunker.flush():
960
+ yield chunk
961
+
962
+ async def aiter_lines(self) -> typing.AsyncIterator[str]:
963
+ decoder = LineDecoder()
964
+ with request_context(request=self._request):
965
+ async for text in self.aiter_text():
966
+ for line in decoder.decode(text):
967
+ yield line
968
+ for line in decoder.flush():
969
+ yield line
970
+
971
+ async def aiter_raw(
972
+ self, chunk_size: int | None = None
973
+ ) -> typing.AsyncIterator[bytes]:
974
+ """
975
+ A byte-iterator over the raw response content.
976
+ """
977
+ if self.is_stream_consumed:
978
+ raise StreamConsumed()
979
+ if self.is_closed:
980
+ raise StreamClosed()
981
+ if not isinstance(self.stream, AsyncByteStream):
982
+ raise RuntimeError("Attempted to call an async iterator on an sync stream.")
983
+
984
+ self.is_stream_consumed = True
985
+ self._num_bytes_downloaded = 0
986
+ chunker = ByteChunker(chunk_size=chunk_size)
987
+
988
+ with request_context(request=self._request):
989
+ async for raw_stream_bytes in self.stream:
990
+ self._num_bytes_downloaded += len(raw_stream_bytes)
991
+ for chunk in chunker.decode(raw_stream_bytes):
992
+ yield chunk
993
+
994
+ for chunk in chunker.flush():
995
+ yield chunk
996
+
997
+ await self.aclose()
998
+
999
+ async def aclose(self) -> None:
1000
+ """
1001
+ Close the response and release the connection.
1002
+ Automatically called if the response body is read to completion.
1003
+ """
1004
+ if not isinstance(self.stream, AsyncByteStream):
1005
+ raise RuntimeError("Attempted to call an async close on an sync stream.")
1006
+
1007
+ if not self.is_closed:
1008
+ self.is_closed = True
1009
+ with request_context(request=self._request):
1010
+ await self.stream.aclose()
1011
+
1012
+
1013
+ class Cookies(typing.MutableMapping[str, str]):
1014
+ """
1015
+ HTTP Cookies, as a mutable mapping.
1016
+ """
1017
+
1018
+ def __init__(self, cookies: CookieTypes | None = None) -> None:
1019
+ if cookies is None or isinstance(cookies, dict):
1020
+ self.jar = CookieJar()
1021
+ if isinstance(cookies, dict):
1022
+ for key, value in cookies.items():
1023
+ self.set(key, value)
1024
+ elif isinstance(cookies, list):
1025
+ self.jar = CookieJar()
1026
+ for key, value in cookies:
1027
+ self.set(key, value)
1028
+ elif isinstance(cookies, Cookies):
1029
+ self.jar = CookieJar()
1030
+ for cookie in cookies.jar:
1031
+ self.jar.set_cookie(cookie)
1032
+ else:
1033
+ self.jar = cookies
1034
+
1035
+ def extract_cookies(self, response: Response) -> None:
1036
+ """
1037
+ Loads any cookies based on the response `Set-Cookie` headers.
1038
+ """
1039
+ urllib_response = self._CookieCompatResponse(response)
1040
+ urllib_request = self._CookieCompatRequest(response.request)
1041
+
1042
+ self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore
1043
+
1044
+ def set_cookie_header(self, request: Request) -> None:
1045
+ """
1046
+ Sets an appropriate 'Cookie:' HTTP header on the `Request`.
1047
+ """
1048
+ urllib_request = self._CookieCompatRequest(request)
1049
+ self.jar.add_cookie_header(urllib_request)
1050
+
1051
+ def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None:
1052
+ """
1053
+ Set a cookie value by name. May optionally include domain and path.
1054
+ """
1055
+ kwargs = {
1056
+ "version": 0,
1057
+ "name": name,
1058
+ "value": value,
1059
+ "port": None,
1060
+ "port_specified": False,
1061
+ "domain": domain,
1062
+ "domain_specified": bool(domain),
1063
+ "domain_initial_dot": domain.startswith("."),
1064
+ "path": path,
1065
+ "path_specified": bool(path),
1066
+ "secure": False,
1067
+ "expires": None,
1068
+ "discard": True,
1069
+ "comment": None,
1070
+ "comment_url": None,
1071
+ "rest": {"HttpOnly": None},
1072
+ "rfc2109": False,
1073
+ }
1074
+ cookie = Cookie(**kwargs) # type: ignore
1075
+ self.jar.set_cookie(cookie)
1076
+
1077
+ def get( # type: ignore
1078
+ self,
1079
+ name: str,
1080
+ default: str | None = None,
1081
+ domain: str | None = None,
1082
+ path: str | None = None,
1083
+ ) -> str | None:
1084
+ """
1085
+ Get a cookie by name. May optionally include domain and path
1086
+ in order to specify exactly which cookie to retrieve.
1087
+ """
1088
+ value = None
1089
+ for cookie in self.jar:
1090
+ if cookie.name == name:
1091
+ if domain is None or cookie.domain == domain:
1092
+ if path is None or cookie.path == path:
1093
+ if value is not None:
1094
+ message = f"Multiple cookies exist with name={name}"
1095
+ raise CookieConflict(message)
1096
+ value = cookie.value
1097
+
1098
+ if value is None:
1099
+ return default
1100
+ return value
1101
+
1102
+ def delete(
1103
+ self,
1104
+ name: str,
1105
+ domain: str | None = None,
1106
+ path: str | None = None,
1107
+ ) -> None:
1108
+ """
1109
+ Delete a cookie by name. May optionally include domain and path
1110
+ in order to specify exactly which cookie to delete.
1111
+ """
1112
+ if domain is not None and path is not None:
1113
+ return self.jar.clear(domain, path, name)
1114
+
1115
+ remove = [
1116
+ cookie
1117
+ for cookie in self.jar
1118
+ if cookie.name == name
1119
+ and (domain is None or cookie.domain == domain)
1120
+ and (path is None or cookie.path == path)
1121
+ ]
1122
+
1123
+ for cookie in remove:
1124
+ self.jar.clear(cookie.domain, cookie.path, cookie.name)
1125
+
1126
+ def clear(self, domain: str | None = None, path: str | None = None) -> None:
1127
+ """
1128
+ Delete all cookies. Optionally include a domain and path in
1129
+ order to only delete a subset of all the cookies.
1130
+ """
1131
+ args = []
1132
+ if domain is not None:
1133
+ args.append(domain)
1134
+ if path is not None:
1135
+ assert domain is not None
1136
+ args.append(path)
1137
+ self.jar.clear(*args)
1138
+
1139
+ def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore
1140
+ cookies = Cookies(cookies)
1141
+ for cookie in cookies.jar:
1142
+ self.jar.set_cookie(cookie)
1143
+
1144
+ def __setitem__(self, name: str, value: str) -> None:
1145
+ return self.set(name, value)
1146
+
1147
+ def __getitem__(self, name: str) -> str:
1148
+ value = self.get(name)
1149
+ if value is None:
1150
+ raise KeyError(name)
1151
+ return value
1152
+
1153
+ def __delitem__(self, name: str) -> None:
1154
+ return self.delete(name)
1155
+
1156
+ def __len__(self) -> int:
1157
+ return len(self.jar)
1158
+
1159
+ def __iter__(self) -> typing.Iterator[str]:
1160
+ return (cookie.name for cookie in self.jar)
1161
+
1162
+ def __bool__(self) -> bool:
1163
+ for _ in self.jar:
1164
+ return True
1165
+ return False
1166
+
1167
+ def __repr__(self) -> str:
1168
+ cookies_repr = ", ".join(
1169
+ [
1170
+ f"<Cookie {cookie.name}={cookie.value} for {cookie.domain} />"
1171
+ for cookie in self.jar
1172
+ ]
1173
+ )
1174
+
1175
+ return f"<Cookies[{cookies_repr}]>"
1176
+
1177
+ class _CookieCompatRequest(urllib.request.Request):
1178
+ """
1179
+ Wraps a `Request` instance up in a compatibility interface suitable
1180
+ for use with `CookieJar` operations.
1181
+ """
1182
+
1183
+ def __init__(self, request: Request) -> None:
1184
+ super().__init__(
1185
+ url=str(request.url),
1186
+ headers=dict(request.headers),
1187
+ method=request.method,
1188
+ )
1189
+ self.request = request
1190
+
1191
+ def add_unredirected_header(self, key: str, value: str) -> None:
1192
+ super().add_unredirected_header(key, value)
1193
+ self.request.headers[key] = value
1194
+
1195
+ class _CookieCompatResponse:
1196
+ """
1197
+ Wraps a `Request` instance up in a compatibility interface suitable
1198
+ for use with `CookieJar` operations.
1199
+ """
1200
+
1201
+ def __init__(self, response: Response) -> None:
1202
+ self.response = response
1203
+
1204
+ def info(self) -> email.message.Message:
1205
+ info = email.message.Message()
1206
+ for key, value in self.response.headers.multi_items():
1207
+ # Note that setting `info[key]` here is an "append" operation,
1208
+ # not a "replace" operation.
1209
+ # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__
1210
+ info[key] = value
1211
+ return info
lib/python3.10/site-packages/httpx/_multipart.py ADDED
@@ -0,0 +1,269 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import os
5
+ import typing
6
+ from pathlib import Path
7
+
8
+ from ._types import (
9
+ AsyncByteStream,
10
+ FileContent,
11
+ FileTypes,
12
+ RequestData,
13
+ RequestFiles,
14
+ SyncByteStream,
15
+ )
16
+ from ._utils import (
17
+ format_form_param,
18
+ guess_content_type,
19
+ peek_filelike_length,
20
+ primitive_value_to_str,
21
+ to_bytes,
22
+ )
23
+
24
+
25
+ def get_multipart_boundary_from_content_type(
26
+ content_type: bytes | None,
27
+ ) -> bytes | None:
28
+ if not content_type or not content_type.startswith(b"multipart/form-data"):
29
+ return None
30
+ # parse boundary according to
31
+ # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1
32
+ if b";" in content_type:
33
+ for section in content_type.split(b";"):
34
+ if section.strip().lower().startswith(b"boundary="):
35
+ return section.strip()[len(b"boundary=") :].strip(b'"')
36
+ return None
37
+
38
+
39
+ class DataField:
40
+ """
41
+ A single form field item, within a multipart form field.
42
+ """
43
+
44
+ def __init__(self, name: str, value: str | bytes | int | float | None) -> None:
45
+ if not isinstance(name, str):
46
+ raise TypeError(
47
+ f"Invalid type for name. Expected str, got {type(name)}: {name!r}"
48
+ )
49
+ if value is not None and not isinstance(value, (str, bytes, int, float)):
50
+ raise TypeError(
51
+ "Invalid type for value. Expected primitive type,"
52
+ f" got {type(value)}: {value!r}"
53
+ )
54
+ self.name = name
55
+ self.value: str | bytes = (
56
+ value if isinstance(value, bytes) else primitive_value_to_str(value)
57
+ )
58
+
59
+ def render_headers(self) -> bytes:
60
+ if not hasattr(self, "_headers"):
61
+ name = format_form_param("name", self.name)
62
+ self._headers = b"".join(
63
+ [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"]
64
+ )
65
+
66
+ return self._headers
67
+
68
+ def render_data(self) -> bytes:
69
+ if not hasattr(self, "_data"):
70
+ self._data = to_bytes(self.value)
71
+
72
+ return self._data
73
+
74
+ def get_length(self) -> int:
75
+ headers = self.render_headers()
76
+ data = self.render_data()
77
+ return len(headers) + len(data)
78
+
79
+ def render(self) -> typing.Iterator[bytes]:
80
+ yield self.render_headers()
81
+ yield self.render_data()
82
+
83
+
84
+ class FileField:
85
+ """
86
+ A single file field item, within a multipart form field.
87
+ """
88
+
89
+ CHUNK_SIZE = 64 * 1024
90
+
91
+ def __init__(self, name: str, value: FileTypes) -> None:
92
+ self.name = name
93
+
94
+ fileobj: FileContent
95
+
96
+ headers: dict[str, str] = {}
97
+ content_type: str | None = None
98
+
99
+ # This large tuple based API largely mirror's requests' API
100
+ # It would be good to think of better APIs for this that we could
101
+ # include in httpx 2.0 since variable length tuples(especially of 4 elements)
102
+ # are quite unwieldly
103
+ if isinstance(value, tuple):
104
+ if len(value) == 2:
105
+ # neither the 3rd parameter (content_type) nor the 4th (headers)
106
+ # was included
107
+ filename, fileobj = value
108
+ elif len(value) == 3:
109
+ filename, fileobj, content_type = value
110
+ else:
111
+ # all 4 parameters included
112
+ filename, fileobj, content_type, headers = value # type: ignore
113
+ else:
114
+ filename = Path(str(getattr(value, "name", "upload"))).name
115
+ fileobj = value
116
+
117
+ if content_type is None:
118
+ content_type = guess_content_type(filename)
119
+
120
+ has_content_type_header = any("content-type" in key.lower() for key in headers)
121
+ if content_type is not None and not has_content_type_header:
122
+ # note that unlike requests, we ignore the content_type provided in the 3rd
123
+ # tuple element if it is also included in the headers requests does
124
+ # the opposite (it overwrites the headerwith the 3rd tuple element)
125
+ headers["Content-Type"] = content_type
126
+
127
+ if isinstance(fileobj, io.StringIO):
128
+ raise TypeError(
129
+ "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'."
130
+ )
131
+ if isinstance(fileobj, io.TextIOBase):
132
+ raise TypeError(
133
+ "Multipart file uploads must be opened in binary mode, not text mode."
134
+ )
135
+
136
+ self.filename = filename
137
+ self.file = fileobj
138
+ self.headers = headers
139
+
140
+ def get_length(self) -> int | None:
141
+ headers = self.render_headers()
142
+
143
+ if isinstance(self.file, (str, bytes)):
144
+ return len(headers) + len(to_bytes(self.file))
145
+
146
+ file_length = peek_filelike_length(self.file)
147
+
148
+ # If we can't determine the filesize without reading it into memory,
149
+ # then return `None` here, to indicate an unknown file length.
150
+ if file_length is None:
151
+ return None
152
+
153
+ return len(headers) + file_length
154
+
155
+ def render_headers(self) -> bytes:
156
+ if not hasattr(self, "_headers"):
157
+ parts = [
158
+ b"Content-Disposition: form-data; ",
159
+ format_form_param("name", self.name),
160
+ ]
161
+ if self.filename:
162
+ filename = format_form_param("filename", self.filename)
163
+ parts.extend([b"; ", filename])
164
+ for header_name, header_value in self.headers.items():
165
+ key, val = f"\r\n{header_name}: ".encode(), header_value.encode()
166
+ parts.extend([key, val])
167
+ parts.append(b"\r\n\r\n")
168
+ self._headers = b"".join(parts)
169
+
170
+ return self._headers
171
+
172
+ def render_data(self) -> typing.Iterator[bytes]:
173
+ if isinstance(self.file, (str, bytes)):
174
+ yield to_bytes(self.file)
175
+ return
176
+
177
+ if hasattr(self.file, "seek"):
178
+ try:
179
+ self.file.seek(0)
180
+ except io.UnsupportedOperation:
181
+ pass
182
+
183
+ chunk = self.file.read(self.CHUNK_SIZE)
184
+ while chunk:
185
+ yield to_bytes(chunk)
186
+ chunk = self.file.read(self.CHUNK_SIZE)
187
+
188
+ def render(self) -> typing.Iterator[bytes]:
189
+ yield self.render_headers()
190
+ yield from self.render_data()
191
+
192
+
193
+ class MultipartStream(SyncByteStream, AsyncByteStream):
194
+ """
195
+ Request content as streaming multipart encoded form data.
196
+ """
197
+
198
+ def __init__(
199
+ self,
200
+ data: RequestData,
201
+ files: RequestFiles,
202
+ boundary: bytes | None = None,
203
+ ) -> None:
204
+ if boundary is None:
205
+ boundary = os.urandom(16).hex().encode("ascii")
206
+
207
+ self.boundary = boundary
208
+ self.content_type = "multipart/form-data; boundary=%s" % boundary.decode(
209
+ "ascii"
210
+ )
211
+ self.fields = list(self._iter_fields(data, files))
212
+
213
+ def _iter_fields(
214
+ self, data: RequestData, files: RequestFiles
215
+ ) -> typing.Iterator[FileField | DataField]:
216
+ for name, value in data.items():
217
+ if isinstance(value, (tuple, list)):
218
+ for item in value:
219
+ yield DataField(name=name, value=item)
220
+ else:
221
+ yield DataField(name=name, value=value)
222
+
223
+ file_items = files.items() if isinstance(files, typing.Mapping) else files
224
+ for name, value in file_items:
225
+ yield FileField(name=name, value=value)
226
+
227
+ def iter_chunks(self) -> typing.Iterator[bytes]:
228
+ for field in self.fields:
229
+ yield b"--%s\r\n" % self.boundary
230
+ yield from field.render()
231
+ yield b"\r\n"
232
+ yield b"--%s--\r\n" % self.boundary
233
+
234
+ def get_content_length(self) -> int | None:
235
+ """
236
+ Return the length of the multipart encoded content, or `None` if
237
+ any of the files have a length that cannot be determined upfront.
238
+ """
239
+ boundary_length = len(self.boundary)
240
+ length = 0
241
+
242
+ for field in self.fields:
243
+ field_length = field.get_length()
244
+ if field_length is None:
245
+ return None
246
+
247
+ length += 2 + boundary_length + 2 # b"--{boundary}\r\n"
248
+ length += field_length
249
+ length += 2 # b"\r\n"
250
+
251
+ length += 2 + boundary_length + 4 # b"--{boundary}--\r\n"
252
+ return length
253
+
254
+ # Content stream interface.
255
+
256
+ def get_headers(self) -> dict[str, str]:
257
+ content_length = self.get_content_length()
258
+ content_type = self.content_type
259
+ if content_length is None:
260
+ return {"Transfer-Encoding": "chunked", "Content-Type": content_type}
261
+ return {"Content-Length": str(content_length), "Content-Type": content_type}
262
+
263
+ def __iter__(self) -> typing.Iterator[bytes]:
264
+ for chunk in self.iter_chunks():
265
+ yield chunk
266
+
267
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
268
+ for chunk in self.iter_chunks():
269
+ yield chunk
lib/python3.10/site-packages/httpx/_status_codes.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from enum import IntEnum
4
+
5
+ __all__ = ["codes"]
6
+
7
+
8
+ class codes(IntEnum):
9
+ """HTTP status codes and reason phrases
10
+
11
+ Status codes from the following RFCs are all observed:
12
+
13
+ * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616
14
+ * RFC 6585: Additional HTTP Status Codes
15
+ * RFC 3229: Delta encoding in HTTP
16
+ * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518
17
+ * RFC 5842: Binding Extensions to WebDAV
18
+ * RFC 7238: Permanent Redirect
19
+ * RFC 2295: Transparent Content Negotiation in HTTP
20
+ * RFC 2774: An HTTP Extension Framework
21
+ * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2)
22
+ * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0)
23
+ * RFC 7725: An HTTP Status Code to Report Legal Obstacles
24
+ * RFC 8297: An HTTP Status Code for Indicating Hints
25
+ * RFC 8470: Using Early Data in HTTP
26
+ """
27
+
28
+ def __new__(cls, value: int, phrase: str = "") -> codes:
29
+ obj = int.__new__(cls, value)
30
+ obj._value_ = value
31
+
32
+ obj.phrase = phrase # type: ignore[attr-defined]
33
+ return obj
34
+
35
+ def __str__(self) -> str:
36
+ return str(self.value)
37
+
38
+ @classmethod
39
+ def get_reason_phrase(cls, value: int) -> str:
40
+ try:
41
+ return codes(value).phrase # type: ignore
42
+ except ValueError:
43
+ return ""
44
+
45
+ @classmethod
46
+ def is_informational(cls, value: int) -> bool:
47
+ """
48
+ Returns `True` for 1xx status codes, `False` otherwise.
49
+ """
50
+ return 100 <= value <= 199
51
+
52
+ @classmethod
53
+ def is_success(cls, value: int) -> bool:
54
+ """
55
+ Returns `True` for 2xx status codes, `False` otherwise.
56
+ """
57
+ return 200 <= value <= 299
58
+
59
+ @classmethod
60
+ def is_redirect(cls, value: int) -> bool:
61
+ """
62
+ Returns `True` for 3xx status codes, `False` otherwise.
63
+ """
64
+ return 300 <= value <= 399
65
+
66
+ @classmethod
67
+ def is_client_error(cls, value: int) -> bool:
68
+ """
69
+ Returns `True` for 4xx status codes, `False` otherwise.
70
+ """
71
+ return 400 <= value <= 499
72
+
73
+ @classmethod
74
+ def is_server_error(cls, value: int) -> bool:
75
+ """
76
+ Returns `True` for 5xx status codes, `False` otherwise.
77
+ """
78
+ return 500 <= value <= 599
79
+
80
+ @classmethod
81
+ def is_error(cls, value: int) -> bool:
82
+ """
83
+ Returns `True` for 4xx or 5xx status codes, `False` otherwise.
84
+ """
85
+ return 400 <= value <= 599
86
+
87
+ # informational
88
+ CONTINUE = 100, "Continue"
89
+ SWITCHING_PROTOCOLS = 101, "Switching Protocols"
90
+ PROCESSING = 102, "Processing"
91
+ EARLY_HINTS = 103, "Early Hints"
92
+
93
+ # success
94
+ OK = 200, "OK"
95
+ CREATED = 201, "Created"
96
+ ACCEPTED = 202, "Accepted"
97
+ NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information"
98
+ NO_CONTENT = 204, "No Content"
99
+ RESET_CONTENT = 205, "Reset Content"
100
+ PARTIAL_CONTENT = 206, "Partial Content"
101
+ MULTI_STATUS = 207, "Multi-Status"
102
+ ALREADY_REPORTED = 208, "Already Reported"
103
+ IM_USED = 226, "IM Used"
104
+
105
+ # redirection
106
+ MULTIPLE_CHOICES = 300, "Multiple Choices"
107
+ MOVED_PERMANENTLY = 301, "Moved Permanently"
108
+ FOUND = 302, "Found"
109
+ SEE_OTHER = 303, "See Other"
110
+ NOT_MODIFIED = 304, "Not Modified"
111
+ USE_PROXY = 305, "Use Proxy"
112
+ TEMPORARY_REDIRECT = 307, "Temporary Redirect"
113
+ PERMANENT_REDIRECT = 308, "Permanent Redirect"
114
+
115
+ # client error
116
+ BAD_REQUEST = 400, "Bad Request"
117
+ UNAUTHORIZED = 401, "Unauthorized"
118
+ PAYMENT_REQUIRED = 402, "Payment Required"
119
+ FORBIDDEN = 403, "Forbidden"
120
+ NOT_FOUND = 404, "Not Found"
121
+ METHOD_NOT_ALLOWED = 405, "Method Not Allowed"
122
+ NOT_ACCEPTABLE = 406, "Not Acceptable"
123
+ PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required"
124
+ REQUEST_TIMEOUT = 408, "Request Timeout"
125
+ CONFLICT = 409, "Conflict"
126
+ GONE = 410, "Gone"
127
+ LENGTH_REQUIRED = 411, "Length Required"
128
+ PRECONDITION_FAILED = 412, "Precondition Failed"
129
+ REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large"
130
+ REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long"
131
+ UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type"
132
+ REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable"
133
+ EXPECTATION_FAILED = 417, "Expectation Failed"
134
+ IM_A_TEAPOT = 418, "I'm a teapot"
135
+ MISDIRECTED_REQUEST = 421, "Misdirected Request"
136
+ UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity"
137
+ LOCKED = 423, "Locked"
138
+ FAILED_DEPENDENCY = 424, "Failed Dependency"
139
+ TOO_EARLY = 425, "Too Early"
140
+ UPGRADE_REQUIRED = 426, "Upgrade Required"
141
+ PRECONDITION_REQUIRED = 428, "Precondition Required"
142
+ TOO_MANY_REQUESTS = 429, "Too Many Requests"
143
+ REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large"
144
+ UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons"
145
+
146
+ # server errors
147
+ INTERNAL_SERVER_ERROR = 500, "Internal Server Error"
148
+ NOT_IMPLEMENTED = 501, "Not Implemented"
149
+ BAD_GATEWAY = 502, "Bad Gateway"
150
+ SERVICE_UNAVAILABLE = 503, "Service Unavailable"
151
+ GATEWAY_TIMEOUT = 504, "Gateway Timeout"
152
+ HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported"
153
+ VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates"
154
+ INSUFFICIENT_STORAGE = 507, "Insufficient Storage"
155
+ LOOP_DETECTED = 508, "Loop Detected"
156
+ NOT_EXTENDED = 510, "Not Extended"
157
+ NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required"
158
+
159
+
160
+ # Include lower-case styles for `requests` compatibility.
161
+ for code in codes:
162
+ setattr(codes, code._name_.lower(), int(code))
lib/python3.10/site-packages/httpx/_transports/__init__.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .asgi import *
2
+ from .base import *
3
+ from .default import *
4
+ from .mock import *
5
+ from .wsgi import *
6
+
7
+ __all__ = [
8
+ "ASGITransport",
9
+ "AsyncBaseTransport",
10
+ "BaseTransport",
11
+ "AsyncHTTPTransport",
12
+ "HTTPTransport",
13
+ "MockTransport",
14
+ "WSGITransport",
15
+ ]
lib/python3.10/site-packages/httpx/_transports/asgi.py ADDED
@@ -0,0 +1,174 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+
5
+ import sniffio
6
+
7
+ from .._models import Request, Response
8
+ from .._types import AsyncByteStream
9
+ from .base import AsyncBaseTransport
10
+
11
+ if typing.TYPE_CHECKING: # pragma: no cover
12
+ import asyncio
13
+
14
+ import trio
15
+
16
+ Event = typing.Union[asyncio.Event, trio.Event]
17
+
18
+
19
+ _Message = typing.MutableMapping[str, typing.Any]
20
+ _Receive = typing.Callable[[], typing.Awaitable[_Message]]
21
+ _Send = typing.Callable[
22
+ [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None]
23
+ ]
24
+ _ASGIApp = typing.Callable[
25
+ [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None]
26
+ ]
27
+
28
+ __all__ = ["ASGITransport"]
29
+
30
+
31
+ def create_event() -> Event:
32
+ if sniffio.current_async_library() == "trio":
33
+ import trio
34
+
35
+ return trio.Event()
36
+ else:
37
+ import asyncio
38
+
39
+ return asyncio.Event()
40
+
41
+
42
+ class ASGIResponseStream(AsyncByteStream):
43
+ def __init__(self, body: list[bytes]) -> None:
44
+ self._body = body
45
+
46
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
47
+ yield b"".join(self._body)
48
+
49
+
50
+ class ASGITransport(AsyncBaseTransport):
51
+ """
52
+ A custom AsyncTransport that handles sending requests directly to an ASGI app.
53
+
54
+ ```python
55
+ transport = httpx.ASGITransport(
56
+ app=app,
57
+ root_path="/submount",
58
+ client=("1.2.3.4", 123)
59
+ )
60
+ client = httpx.AsyncClient(transport=transport)
61
+ ```
62
+
63
+ Arguments:
64
+
65
+ * `app` - The ASGI application.
66
+ * `raise_app_exceptions` - Boolean indicating if exceptions in the application
67
+ should be raised. Default to `True`. Can be set to `False` for use cases
68
+ such as testing the content of a client 500 response.
69
+ * `root_path` - The root path on which the ASGI application should be mounted.
70
+ * `client` - A two-tuple indicating the client IP and port of incoming requests.
71
+ ```
72
+ """
73
+
74
+ def __init__(
75
+ self,
76
+ app: _ASGIApp,
77
+ raise_app_exceptions: bool = True,
78
+ root_path: str = "",
79
+ client: tuple[str, int] = ("127.0.0.1", 123),
80
+ ) -> None:
81
+ self.app = app
82
+ self.raise_app_exceptions = raise_app_exceptions
83
+ self.root_path = root_path
84
+ self.client = client
85
+
86
+ async def handle_async_request(
87
+ self,
88
+ request: Request,
89
+ ) -> Response:
90
+ assert isinstance(request.stream, AsyncByteStream)
91
+
92
+ # ASGI scope.
93
+ scope = {
94
+ "type": "http",
95
+ "asgi": {"version": "3.0"},
96
+ "http_version": "1.1",
97
+ "method": request.method,
98
+ "headers": [(k.lower(), v) for (k, v) in request.headers.raw],
99
+ "scheme": request.url.scheme,
100
+ "path": request.url.path,
101
+ "raw_path": request.url.raw_path.split(b"?")[0],
102
+ "query_string": request.url.query,
103
+ "server": (request.url.host, request.url.port),
104
+ "client": self.client,
105
+ "root_path": self.root_path,
106
+ }
107
+
108
+ # Request.
109
+ request_body_chunks = request.stream.__aiter__()
110
+ request_complete = False
111
+
112
+ # Response.
113
+ status_code = None
114
+ response_headers = None
115
+ body_parts = []
116
+ response_started = False
117
+ response_complete = create_event()
118
+
119
+ # ASGI callables.
120
+
121
+ async def receive() -> dict[str, typing.Any]:
122
+ nonlocal request_complete
123
+
124
+ if request_complete:
125
+ await response_complete.wait()
126
+ return {"type": "http.disconnect"}
127
+
128
+ try:
129
+ body = await request_body_chunks.__anext__()
130
+ except StopAsyncIteration:
131
+ request_complete = True
132
+ return {"type": "http.request", "body": b"", "more_body": False}
133
+ return {"type": "http.request", "body": body, "more_body": True}
134
+
135
+ async def send(message: typing.MutableMapping[str, typing.Any]) -> None:
136
+ nonlocal status_code, response_headers, response_started
137
+
138
+ if message["type"] == "http.response.start":
139
+ assert not response_started
140
+
141
+ status_code = message["status"]
142
+ response_headers = message.get("headers", [])
143
+ response_started = True
144
+
145
+ elif message["type"] == "http.response.body":
146
+ assert not response_complete.is_set()
147
+ body = message.get("body", b"")
148
+ more_body = message.get("more_body", False)
149
+
150
+ if body and request.method != "HEAD":
151
+ body_parts.append(body)
152
+
153
+ if not more_body:
154
+ response_complete.set()
155
+
156
+ try:
157
+ await self.app(scope, receive, send)
158
+ except Exception: # noqa: PIE-786
159
+ if self.raise_app_exceptions:
160
+ raise
161
+
162
+ response_complete.set()
163
+ if status_code is None:
164
+ status_code = 500
165
+ if response_headers is None:
166
+ response_headers = {}
167
+
168
+ assert response_complete.is_set()
169
+ assert status_code is not None
170
+ assert response_headers is not None
171
+
172
+ stream = ASGIResponseStream(body_parts)
173
+
174
+ return Response(status_code, headers=response_headers, stream=stream)
lib/python3.10/site-packages/httpx/_transports/base.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+ from types import TracebackType
5
+
6
+ from .._models import Request, Response
7
+
8
+ T = typing.TypeVar("T", bound="BaseTransport")
9
+ A = typing.TypeVar("A", bound="AsyncBaseTransport")
10
+
11
+ __all__ = ["AsyncBaseTransport", "BaseTransport"]
12
+
13
+
14
+ class BaseTransport:
15
+ def __enter__(self: T) -> T:
16
+ return self
17
+
18
+ def __exit__(
19
+ self,
20
+ exc_type: type[BaseException] | None = None,
21
+ exc_value: BaseException | None = None,
22
+ traceback: TracebackType | None = None,
23
+ ) -> None:
24
+ self.close()
25
+
26
+ def handle_request(self, request: Request) -> Response:
27
+ """
28
+ Send a single HTTP request and return a response.
29
+
30
+ Developers shouldn't typically ever need to call into this API directly,
31
+ since the Client class provides all the higher level user-facing API
32
+ niceties.
33
+
34
+ In order to properly release any network resources, the response
35
+ stream should *either* be consumed immediately, with a call to
36
+ `response.stream.read()`, or else the `handle_request` call should
37
+ be followed with a try/finally block to ensuring the stream is
38
+ always closed.
39
+
40
+ Example usage:
41
+
42
+ with httpx.HTTPTransport() as transport:
43
+ req = httpx.Request(
44
+ method=b"GET",
45
+ url=(b"https", b"www.example.com", 443, b"/"),
46
+ headers=[(b"Host", b"www.example.com")],
47
+ )
48
+ resp = transport.handle_request(req)
49
+ body = resp.stream.read()
50
+ print(resp.status_code, resp.headers, body)
51
+
52
+
53
+ Takes a `Request` instance as the only argument.
54
+
55
+ Returns a `Response` instance.
56
+ """
57
+ raise NotImplementedError(
58
+ "The 'handle_request' method must be implemented."
59
+ ) # pragma: no cover
60
+
61
+ def close(self) -> None:
62
+ pass
63
+
64
+
65
+ class AsyncBaseTransport:
66
+ async def __aenter__(self: A) -> A:
67
+ return self
68
+
69
+ async def __aexit__(
70
+ self,
71
+ exc_type: type[BaseException] | None = None,
72
+ exc_value: BaseException | None = None,
73
+ traceback: TracebackType | None = None,
74
+ ) -> None:
75
+ await self.aclose()
76
+
77
+ async def handle_async_request(
78
+ self,
79
+ request: Request,
80
+ ) -> Response:
81
+ raise NotImplementedError(
82
+ "The 'handle_async_request' method must be implemented."
83
+ ) # pragma: no cover
84
+
85
+ async def aclose(self) -> None:
86
+ pass
lib/python3.10/site-packages/httpx/_transports/default.py ADDED
@@ -0,0 +1,389 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Custom transports, with nicely configured defaults.
3
+
4
+ The following additional keyword arguments are currently supported by httpcore...
5
+
6
+ * uds: str
7
+ * local_address: str
8
+ * retries: int
9
+
10
+ Example usages...
11
+
12
+ # Disable HTTP/2 on a single specific domain.
13
+ mounts = {
14
+ "all://": httpx.HTTPTransport(http2=True),
15
+ "all://*example.org": httpx.HTTPTransport()
16
+ }
17
+
18
+ # Using advanced httpcore configuration, with connection retries.
19
+ transport = httpx.HTTPTransport(retries=1)
20
+ client = httpx.Client(transport=transport)
21
+
22
+ # Using advanced httpcore configuration, with unix domain sockets.
23
+ transport = httpx.HTTPTransport(uds="socket.uds")
24
+ client = httpx.Client(transport=transport)
25
+ """
26
+
27
+ from __future__ import annotations
28
+
29
+ import contextlib
30
+ import typing
31
+ from types import TracebackType
32
+
33
+ import httpcore
34
+
35
+ from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context
36
+ from .._exceptions import (
37
+ ConnectError,
38
+ ConnectTimeout,
39
+ LocalProtocolError,
40
+ NetworkError,
41
+ PoolTimeout,
42
+ ProtocolError,
43
+ ProxyError,
44
+ ReadError,
45
+ ReadTimeout,
46
+ RemoteProtocolError,
47
+ TimeoutException,
48
+ UnsupportedProtocol,
49
+ WriteError,
50
+ WriteTimeout,
51
+ )
52
+ from .._models import Request, Response
53
+ from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream, VerifyTypes
54
+ from .._urls import URL
55
+ from .base import AsyncBaseTransport, BaseTransport
56
+
57
+ T = typing.TypeVar("T", bound="HTTPTransport")
58
+ A = typing.TypeVar("A", bound="AsyncHTTPTransport")
59
+
60
+ SOCKET_OPTION = typing.Union[
61
+ typing.Tuple[int, int, int],
62
+ typing.Tuple[int, int, typing.Union[bytes, bytearray]],
63
+ typing.Tuple[int, int, None, int],
64
+ ]
65
+
66
+ __all__ = ["AsyncHTTPTransport", "HTTPTransport"]
67
+
68
+
69
+ @contextlib.contextmanager
70
+ def map_httpcore_exceptions() -> typing.Iterator[None]:
71
+ try:
72
+ yield
73
+ except Exception as exc:
74
+ mapped_exc = None
75
+
76
+ for from_exc, to_exc in HTTPCORE_EXC_MAP.items():
77
+ if not isinstance(exc, from_exc):
78
+ continue
79
+ # We want to map to the most specific exception we can find.
80
+ # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to
81
+ # `httpx.ReadTimeout`, not just `httpx.TimeoutException`.
82
+ if mapped_exc is None or issubclass(to_exc, mapped_exc):
83
+ mapped_exc = to_exc
84
+
85
+ if mapped_exc is None: # pragma: no cover
86
+ raise
87
+
88
+ message = str(exc)
89
+ raise mapped_exc(message) from exc
90
+
91
+
92
+ HTTPCORE_EXC_MAP = {
93
+ httpcore.TimeoutException: TimeoutException,
94
+ httpcore.ConnectTimeout: ConnectTimeout,
95
+ httpcore.ReadTimeout: ReadTimeout,
96
+ httpcore.WriteTimeout: WriteTimeout,
97
+ httpcore.PoolTimeout: PoolTimeout,
98
+ httpcore.NetworkError: NetworkError,
99
+ httpcore.ConnectError: ConnectError,
100
+ httpcore.ReadError: ReadError,
101
+ httpcore.WriteError: WriteError,
102
+ httpcore.ProxyError: ProxyError,
103
+ httpcore.UnsupportedProtocol: UnsupportedProtocol,
104
+ httpcore.ProtocolError: ProtocolError,
105
+ httpcore.LocalProtocolError: LocalProtocolError,
106
+ httpcore.RemoteProtocolError: RemoteProtocolError,
107
+ }
108
+
109
+
110
+ class ResponseStream(SyncByteStream):
111
+ def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None:
112
+ self._httpcore_stream = httpcore_stream
113
+
114
+ def __iter__(self) -> typing.Iterator[bytes]:
115
+ with map_httpcore_exceptions():
116
+ for part in self._httpcore_stream:
117
+ yield part
118
+
119
+ def close(self) -> None:
120
+ if hasattr(self._httpcore_stream, "close"):
121
+ self._httpcore_stream.close()
122
+
123
+
124
+ class HTTPTransport(BaseTransport):
125
+ def __init__(
126
+ self,
127
+ verify: VerifyTypes = True,
128
+ cert: CertTypes | None = None,
129
+ http1: bool = True,
130
+ http2: bool = False,
131
+ limits: Limits = DEFAULT_LIMITS,
132
+ trust_env: bool = True,
133
+ proxy: ProxyTypes | None = None,
134
+ uds: str | None = None,
135
+ local_address: str | None = None,
136
+ retries: int = 0,
137
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
138
+ ) -> None:
139
+ ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env)
140
+ proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy
141
+
142
+ if proxy is None:
143
+ self._pool = httpcore.ConnectionPool(
144
+ ssl_context=ssl_context,
145
+ max_connections=limits.max_connections,
146
+ max_keepalive_connections=limits.max_keepalive_connections,
147
+ keepalive_expiry=limits.keepalive_expiry,
148
+ http1=http1,
149
+ http2=http2,
150
+ uds=uds,
151
+ local_address=local_address,
152
+ retries=retries,
153
+ socket_options=socket_options,
154
+ )
155
+ elif proxy.url.scheme in ("http", "https"):
156
+ self._pool = httpcore.HTTPProxy(
157
+ proxy_url=httpcore.URL(
158
+ scheme=proxy.url.raw_scheme,
159
+ host=proxy.url.raw_host,
160
+ port=proxy.url.port,
161
+ target=proxy.url.raw_path,
162
+ ),
163
+ proxy_auth=proxy.raw_auth,
164
+ proxy_headers=proxy.headers.raw,
165
+ ssl_context=ssl_context,
166
+ proxy_ssl_context=proxy.ssl_context,
167
+ max_connections=limits.max_connections,
168
+ max_keepalive_connections=limits.max_keepalive_connections,
169
+ keepalive_expiry=limits.keepalive_expiry,
170
+ http1=http1,
171
+ http2=http2,
172
+ socket_options=socket_options,
173
+ )
174
+ elif proxy.url.scheme == "socks5":
175
+ try:
176
+ import socksio # noqa
177
+ except ImportError: # pragma: no cover
178
+ raise ImportError(
179
+ "Using SOCKS proxy, but the 'socksio' package is not installed. "
180
+ "Make sure to install httpx using `pip install httpx[socks]`."
181
+ ) from None
182
+
183
+ self._pool = httpcore.SOCKSProxy(
184
+ proxy_url=httpcore.URL(
185
+ scheme=proxy.url.raw_scheme,
186
+ host=proxy.url.raw_host,
187
+ port=proxy.url.port,
188
+ target=proxy.url.raw_path,
189
+ ),
190
+ proxy_auth=proxy.raw_auth,
191
+ ssl_context=ssl_context,
192
+ max_connections=limits.max_connections,
193
+ max_keepalive_connections=limits.max_keepalive_connections,
194
+ keepalive_expiry=limits.keepalive_expiry,
195
+ http1=http1,
196
+ http2=http2,
197
+ )
198
+ else: # pragma: no cover
199
+ raise ValueError(
200
+ "Proxy protocol must be either 'http', 'https', or 'socks5',"
201
+ f" but got {proxy.url.scheme!r}."
202
+ )
203
+
204
+ def __enter__(self: T) -> T: # Use generics for subclass support.
205
+ self._pool.__enter__()
206
+ return self
207
+
208
+ def __exit__(
209
+ self,
210
+ exc_type: type[BaseException] | None = None,
211
+ exc_value: BaseException | None = None,
212
+ traceback: TracebackType | None = None,
213
+ ) -> None:
214
+ with map_httpcore_exceptions():
215
+ self._pool.__exit__(exc_type, exc_value, traceback)
216
+
217
+ def handle_request(
218
+ self,
219
+ request: Request,
220
+ ) -> Response:
221
+ assert isinstance(request.stream, SyncByteStream)
222
+
223
+ req = httpcore.Request(
224
+ method=request.method,
225
+ url=httpcore.URL(
226
+ scheme=request.url.raw_scheme,
227
+ host=request.url.raw_host,
228
+ port=request.url.port,
229
+ target=request.url.raw_path,
230
+ ),
231
+ headers=request.headers.raw,
232
+ content=request.stream,
233
+ extensions=request.extensions,
234
+ )
235
+ with map_httpcore_exceptions():
236
+ resp = self._pool.handle_request(req)
237
+
238
+ assert isinstance(resp.stream, typing.Iterable)
239
+
240
+ return Response(
241
+ status_code=resp.status,
242
+ headers=resp.headers,
243
+ stream=ResponseStream(resp.stream),
244
+ extensions=resp.extensions,
245
+ )
246
+
247
+ def close(self) -> None:
248
+ self._pool.close()
249
+
250
+
251
+ class AsyncResponseStream(AsyncByteStream):
252
+ def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None:
253
+ self._httpcore_stream = httpcore_stream
254
+
255
+ async def __aiter__(self) -> typing.AsyncIterator[bytes]:
256
+ with map_httpcore_exceptions():
257
+ async for part in self._httpcore_stream:
258
+ yield part
259
+
260
+ async def aclose(self) -> None:
261
+ if hasattr(self._httpcore_stream, "aclose"):
262
+ await self._httpcore_stream.aclose()
263
+
264
+
265
+ class AsyncHTTPTransport(AsyncBaseTransport):
266
+ def __init__(
267
+ self,
268
+ verify: VerifyTypes = True,
269
+ cert: CertTypes | None = None,
270
+ http1: bool = True,
271
+ http2: bool = False,
272
+ limits: Limits = DEFAULT_LIMITS,
273
+ trust_env: bool = True,
274
+ proxy: ProxyTypes | None = None,
275
+ uds: str | None = None,
276
+ local_address: str | None = None,
277
+ retries: int = 0,
278
+ socket_options: typing.Iterable[SOCKET_OPTION] | None = None,
279
+ ) -> None:
280
+ ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env)
281
+ proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy
282
+
283
+ if proxy is None:
284
+ self._pool = httpcore.AsyncConnectionPool(
285
+ ssl_context=ssl_context,
286
+ max_connections=limits.max_connections,
287
+ max_keepalive_connections=limits.max_keepalive_connections,
288
+ keepalive_expiry=limits.keepalive_expiry,
289
+ http1=http1,
290
+ http2=http2,
291
+ uds=uds,
292
+ local_address=local_address,
293
+ retries=retries,
294
+ socket_options=socket_options,
295
+ )
296
+ elif proxy.url.scheme in ("http", "https"):
297
+ self._pool = httpcore.AsyncHTTPProxy(
298
+ proxy_url=httpcore.URL(
299
+ scheme=proxy.url.raw_scheme,
300
+ host=proxy.url.raw_host,
301
+ port=proxy.url.port,
302
+ target=proxy.url.raw_path,
303
+ ),
304
+ proxy_auth=proxy.raw_auth,
305
+ proxy_headers=proxy.headers.raw,
306
+ proxy_ssl_context=proxy.ssl_context,
307
+ ssl_context=ssl_context,
308
+ max_connections=limits.max_connections,
309
+ max_keepalive_connections=limits.max_keepalive_connections,
310
+ keepalive_expiry=limits.keepalive_expiry,
311
+ http1=http1,
312
+ http2=http2,
313
+ socket_options=socket_options,
314
+ )
315
+ elif proxy.url.scheme == "socks5":
316
+ try:
317
+ import socksio # noqa
318
+ except ImportError: # pragma: no cover
319
+ raise ImportError(
320
+ "Using SOCKS proxy, but the 'socksio' package is not installed. "
321
+ "Make sure to install httpx using `pip install httpx[socks]`."
322
+ ) from None
323
+
324
+ self._pool = httpcore.AsyncSOCKSProxy(
325
+ proxy_url=httpcore.URL(
326
+ scheme=proxy.url.raw_scheme,
327
+ host=proxy.url.raw_host,
328
+ port=proxy.url.port,
329
+ target=proxy.url.raw_path,
330
+ ),
331
+ proxy_auth=proxy.raw_auth,
332
+ ssl_context=ssl_context,
333
+ max_connections=limits.max_connections,
334
+ max_keepalive_connections=limits.max_keepalive_connections,
335
+ keepalive_expiry=limits.keepalive_expiry,
336
+ http1=http1,
337
+ http2=http2,
338
+ )
339
+ else: # pragma: no cover
340
+ raise ValueError(
341
+ "Proxy protocol must be either 'http', 'https', or 'socks5',"
342
+ " but got {proxy.url.scheme!r}."
343
+ )
344
+
345
+ async def __aenter__(self: A) -> A: # Use generics for subclass support.
346
+ await self._pool.__aenter__()
347
+ return self
348
+
349
+ async def __aexit__(
350
+ self,
351
+ exc_type: type[BaseException] | None = None,
352
+ exc_value: BaseException | None = None,
353
+ traceback: TracebackType | None = None,
354
+ ) -> None:
355
+ with map_httpcore_exceptions():
356
+ await self._pool.__aexit__(exc_type, exc_value, traceback)
357
+
358
+ async def handle_async_request(
359
+ self,
360
+ request: Request,
361
+ ) -> Response:
362
+ assert isinstance(request.stream, AsyncByteStream)
363
+
364
+ req = httpcore.Request(
365
+ method=request.method,
366
+ url=httpcore.URL(
367
+ scheme=request.url.raw_scheme,
368
+ host=request.url.raw_host,
369
+ port=request.url.port,
370
+ target=request.url.raw_path,
371
+ ),
372
+ headers=request.headers.raw,
373
+ content=request.stream,
374
+ extensions=request.extensions,
375
+ )
376
+ with map_httpcore_exceptions():
377
+ resp = await self._pool.handle_async_request(req)
378
+
379
+ assert isinstance(resp.stream, typing.AsyncIterable)
380
+
381
+ return Response(
382
+ status_code=resp.status,
383
+ headers=resp.headers,
384
+ stream=AsyncResponseStream(resp.stream),
385
+ extensions=resp.extensions,
386
+ )
387
+
388
+ async def aclose(self) -> None:
389
+ await self._pool.aclose()
lib/python3.10/site-packages/httpx/_transports/mock.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+
5
+ from .._models import Request, Response
6
+ from .base import AsyncBaseTransport, BaseTransport
7
+
8
+ SyncHandler = typing.Callable[[Request], Response]
9
+ AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]]
10
+
11
+
12
+ __all__ = ["MockTransport"]
13
+
14
+
15
+ class MockTransport(AsyncBaseTransport, BaseTransport):
16
+ def __init__(self, handler: SyncHandler | AsyncHandler) -> None:
17
+ self.handler = handler
18
+
19
+ def handle_request(
20
+ self,
21
+ request: Request,
22
+ ) -> Response:
23
+ request.read()
24
+ response = self.handler(request)
25
+ if not isinstance(response, Response): # pragma: no cover
26
+ raise TypeError("Cannot use an async handler in a sync Client")
27
+ return response
28
+
29
+ async def handle_async_request(
30
+ self,
31
+ request: Request,
32
+ ) -> Response:
33
+ await request.aread()
34
+ response = self.handler(request)
35
+
36
+ # Allow handler to *optionally* be an `async` function.
37
+ # If it is, then the `response` variable need to be awaited to actually
38
+ # return the result.
39
+
40
+ if not isinstance(response, Response):
41
+ response = await response
42
+
43
+ return response
lib/python3.10/site-packages/httpx/_transports/wsgi.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import itertools
5
+ import sys
6
+ import typing
7
+
8
+ from .._models import Request, Response
9
+ from .._types import SyncByteStream
10
+ from .base import BaseTransport
11
+
12
+ if typing.TYPE_CHECKING:
13
+ from _typeshed import OptExcInfo # pragma: no cover
14
+ from _typeshed.wsgi import WSGIApplication # pragma: no cover
15
+
16
+ _T = typing.TypeVar("_T")
17
+
18
+
19
+ __all__ = ["WSGITransport"]
20
+
21
+
22
+ def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]:
23
+ body = iter(body)
24
+ for chunk in body:
25
+ if chunk:
26
+ return itertools.chain([chunk], body)
27
+ return []
28
+
29
+
30
+ class WSGIByteStream(SyncByteStream):
31
+ def __init__(self, result: typing.Iterable[bytes]) -> None:
32
+ self._close = getattr(result, "close", None)
33
+ self._result = _skip_leading_empty_chunks(result)
34
+
35
+ def __iter__(self) -> typing.Iterator[bytes]:
36
+ for part in self._result:
37
+ yield part
38
+
39
+ def close(self) -> None:
40
+ if self._close is not None:
41
+ self._close()
42
+
43
+
44
+ class WSGITransport(BaseTransport):
45
+ """
46
+ A custom transport that handles sending requests directly to an WSGI app.
47
+ The simplest way to use this functionality is to use the `app` argument.
48
+
49
+ ```
50
+ client = httpx.Client(app=app)
51
+ ```
52
+
53
+ Alternatively, you can setup the transport instance explicitly.
54
+ This allows you to include any additional configuration arguments specific
55
+ to the WSGITransport class:
56
+
57
+ ```
58
+ transport = httpx.WSGITransport(
59
+ app=app,
60
+ script_name="/submount",
61
+ remote_addr="1.2.3.4"
62
+ )
63
+ client = httpx.Client(transport=transport)
64
+ ```
65
+
66
+ Arguments:
67
+
68
+ * `app` - The WSGI application.
69
+ * `raise_app_exceptions` - Boolean indicating if exceptions in the application
70
+ should be raised. Default to `True`. Can be set to `False` for use cases
71
+ such as testing the content of a client 500 response.
72
+ * `script_name` - The root path on which the WSGI application should be mounted.
73
+ * `remote_addr` - A string indicating the client IP of incoming requests.
74
+ ```
75
+ """
76
+
77
+ def __init__(
78
+ self,
79
+ app: WSGIApplication,
80
+ raise_app_exceptions: bool = True,
81
+ script_name: str = "",
82
+ remote_addr: str = "127.0.0.1",
83
+ wsgi_errors: typing.TextIO | None = None,
84
+ ) -> None:
85
+ self.app = app
86
+ self.raise_app_exceptions = raise_app_exceptions
87
+ self.script_name = script_name
88
+ self.remote_addr = remote_addr
89
+ self.wsgi_errors = wsgi_errors
90
+
91
+ def handle_request(self, request: Request) -> Response:
92
+ request.read()
93
+ wsgi_input = io.BytesIO(request.content)
94
+
95
+ port = request.url.port or {"http": 80, "https": 443}[request.url.scheme]
96
+ environ = {
97
+ "wsgi.version": (1, 0),
98
+ "wsgi.url_scheme": request.url.scheme,
99
+ "wsgi.input": wsgi_input,
100
+ "wsgi.errors": self.wsgi_errors or sys.stderr,
101
+ "wsgi.multithread": True,
102
+ "wsgi.multiprocess": False,
103
+ "wsgi.run_once": False,
104
+ "REQUEST_METHOD": request.method,
105
+ "SCRIPT_NAME": self.script_name,
106
+ "PATH_INFO": request.url.path,
107
+ "QUERY_STRING": request.url.query.decode("ascii"),
108
+ "SERVER_NAME": request.url.host,
109
+ "SERVER_PORT": str(port),
110
+ "SERVER_PROTOCOL": "HTTP/1.1",
111
+ "REMOTE_ADDR": self.remote_addr,
112
+ }
113
+ for header_key, header_value in request.headers.raw:
114
+ key = header_key.decode("ascii").upper().replace("-", "_")
115
+ if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"):
116
+ key = "HTTP_" + key
117
+ environ[key] = header_value.decode("ascii")
118
+
119
+ seen_status = None
120
+ seen_response_headers = None
121
+ seen_exc_info = None
122
+
123
+ def start_response(
124
+ status: str,
125
+ response_headers: list[tuple[str, str]],
126
+ exc_info: OptExcInfo | None = None,
127
+ ) -> typing.Callable[[bytes], typing.Any]:
128
+ nonlocal seen_status, seen_response_headers, seen_exc_info
129
+ seen_status = status
130
+ seen_response_headers = response_headers
131
+ seen_exc_info = exc_info
132
+ return lambda _: None
133
+
134
+ result = self.app(environ, start_response)
135
+
136
+ stream = WSGIByteStream(result)
137
+
138
+ assert seen_status is not None
139
+ assert seen_response_headers is not None
140
+ if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions:
141
+ raise seen_exc_info[1]
142
+
143
+ status_code = int(seen_status.split()[0])
144
+ headers = [
145
+ (key.encode("ascii"), value.encode("ascii"))
146
+ for key, value in seen_response_headers
147
+ ]
148
+
149
+ return Response(status_code, headers=headers, stream=stream)
lib/python3.10/site-packages/httpx/_types.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Type definitions for type checking purposes.
3
+ """
4
+
5
+ import ssl
6
+ from http.cookiejar import CookieJar
7
+ from typing import (
8
+ IO,
9
+ TYPE_CHECKING,
10
+ Any,
11
+ AsyncIterable,
12
+ AsyncIterator,
13
+ Callable,
14
+ Dict,
15
+ Iterable,
16
+ Iterator,
17
+ List,
18
+ Mapping,
19
+ MutableMapping,
20
+ NamedTuple,
21
+ Optional,
22
+ Sequence,
23
+ Tuple,
24
+ Union,
25
+ )
26
+
27
+ if TYPE_CHECKING: # pragma: no cover
28
+ from ._auth import Auth # noqa: F401
29
+ from ._config import Proxy, Timeout # noqa: F401
30
+ from ._models import Cookies, Headers, Request # noqa: F401
31
+ from ._urls import URL, QueryParams # noqa: F401
32
+
33
+
34
+ PrimitiveData = Optional[Union[str, int, float, bool]]
35
+
36
+ RawURL = NamedTuple(
37
+ "RawURL",
38
+ [
39
+ ("raw_scheme", bytes),
40
+ ("raw_host", bytes),
41
+ ("port", Optional[int]),
42
+ ("raw_path", bytes),
43
+ ],
44
+ )
45
+
46
+ URLTypes = Union["URL", str]
47
+
48
+ QueryParamTypes = Union[
49
+ "QueryParams",
50
+ Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]],
51
+ List[Tuple[str, PrimitiveData]],
52
+ Tuple[Tuple[str, PrimitiveData], ...],
53
+ str,
54
+ bytes,
55
+ ]
56
+
57
+ HeaderTypes = Union[
58
+ "Headers",
59
+ Mapping[str, str],
60
+ Mapping[bytes, bytes],
61
+ Sequence[Tuple[str, str]],
62
+ Sequence[Tuple[bytes, bytes]],
63
+ ]
64
+
65
+ CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]]
66
+
67
+ CertTypes = Union[
68
+ # certfile
69
+ str,
70
+ # (certfile, keyfile)
71
+ Tuple[str, Optional[str]],
72
+ # (certfile, keyfile, password)
73
+ Tuple[str, Optional[str], Optional[str]],
74
+ ]
75
+ VerifyTypes = Union[str, bool, ssl.SSLContext]
76
+ TimeoutTypes = Union[
77
+ Optional[float],
78
+ Tuple[Optional[float], Optional[float], Optional[float], Optional[float]],
79
+ "Timeout",
80
+ ]
81
+ ProxyTypes = Union["URL", str, "Proxy"]
82
+ ProxiesTypes = Union[ProxyTypes, Dict[Union["URL", str], Union[None, ProxyTypes]]]
83
+
84
+ AuthTypes = Union[
85
+ Tuple[Union[str, bytes], Union[str, bytes]],
86
+ Callable[["Request"], "Request"],
87
+ "Auth",
88
+ ]
89
+
90
+ RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]]
91
+ ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]]
92
+ ResponseExtensions = MutableMapping[str, Any]
93
+
94
+ RequestData = Mapping[str, Any]
95
+
96
+ FileContent = Union[IO[bytes], bytes, str]
97
+ FileTypes = Union[
98
+ # file (or bytes)
99
+ FileContent,
100
+ # (filename, file (or bytes))
101
+ Tuple[Optional[str], FileContent],
102
+ # (filename, file (or bytes), content_type)
103
+ Tuple[Optional[str], FileContent, Optional[str]],
104
+ # (filename, file (or bytes), content_type, headers)
105
+ Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]],
106
+ ]
107
+ RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]]
108
+
109
+ RequestExtensions = MutableMapping[str, Any]
110
+
111
+ __all__ = ["AsyncByteStream", "SyncByteStream"]
112
+
113
+
114
+ class SyncByteStream:
115
+ def __iter__(self) -> Iterator[bytes]:
116
+ raise NotImplementedError(
117
+ "The '__iter__' method must be implemented."
118
+ ) # pragma: no cover
119
+ yield b"" # pragma: no cover
120
+
121
+ def close(self) -> None:
122
+ """
123
+ Subclasses can override this method to release any network resources
124
+ after a request/response cycle is complete.
125
+ """
126
+
127
+
128
+ class AsyncByteStream:
129
+ async def __aiter__(self) -> AsyncIterator[bytes]:
130
+ raise NotImplementedError(
131
+ "The '__aiter__' method must be implemented."
132
+ ) # pragma: no cover
133
+ yield b"" # pragma: no cover
134
+
135
+ async def aclose(self) -> None:
136
+ pass
lib/python3.10/site-packages/httpx/_urlparse.py ADDED
@@ -0,0 +1,505 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ An implementation of `urlparse` that provides URL validation and normalization
3
+ as described by RFC3986.
4
+
5
+ We rely on this implementation rather than the one in Python's stdlib, because:
6
+
7
+ * It provides more complete URL validation.
8
+ * It properly differentiates between an empty querystring and an absent querystring,
9
+ to distinguish URLs with a trailing '?'.
10
+ * It handles scheme, hostname, port, and path normalization.
11
+ * It supports IDNA hostnames, normalizing them to their encoded form.
12
+ * The API supports passing individual components, as well as the complete URL string.
13
+
14
+ Previously we relied on the excellent `rfc3986` package to handle URL parsing and
15
+ validation, but this module provides a simpler alternative, with less indirection
16
+ required.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ import ipaddress
22
+ import re
23
+ import typing
24
+
25
+ import idna
26
+
27
+ from ._exceptions import InvalidURL
28
+
29
+ MAX_URL_LENGTH = 65536
30
+
31
+ # https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3
32
+ UNRESERVED_CHARACTERS = (
33
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"
34
+ )
35
+ SUB_DELIMS = "!$&'()*+,;="
36
+
37
+ PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}")
38
+
39
+
40
+ # {scheme}: (optional)
41
+ # //{authority} (optional)
42
+ # {path}
43
+ # ?{query} (optional)
44
+ # #{fragment} (optional)
45
+ URL_REGEX = re.compile(
46
+ (
47
+ r"(?:(?P<scheme>{scheme}):)?"
48
+ r"(?://(?P<authority>{authority}))?"
49
+ r"(?P<path>{path})"
50
+ r"(?:\?(?P<query>{query}))?"
51
+ r"(?:#(?P<fragment>{fragment}))?"
52
+ ).format(
53
+ scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?",
54
+ authority="[^/?#]*",
55
+ path="[^?#]*",
56
+ query="[^#]*",
57
+ fragment=".*",
58
+ )
59
+ )
60
+
61
+ # {userinfo}@ (optional)
62
+ # {host}
63
+ # :{port} (optional)
64
+ AUTHORITY_REGEX = re.compile(
65
+ (
66
+ r"(?:(?P<userinfo>{userinfo})@)?" r"(?P<host>{host})" r":?(?P<port>{port})?"
67
+ ).format(
68
+ userinfo=".*", # Any character sequence.
69
+ host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@',
70
+ # or an IPv6 address enclosed within square brackets.
71
+ port=".*", # Any character sequence.
72
+ )
73
+ )
74
+
75
+
76
+ # If we call urlparse with an individual component, then we need to regex
77
+ # validate that component individually.
78
+ # Note that we're duplicating the same strings as above. Shock! Horror!!
79
+ COMPONENT_REGEX = {
80
+ "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"),
81
+ "authority": re.compile("[^/?#]*"),
82
+ "path": re.compile("[^?#]*"),
83
+ "query": re.compile("[^#]*"),
84
+ "fragment": re.compile(".*"),
85
+ "userinfo": re.compile("[^@]*"),
86
+ "host": re.compile("(\\[.*\\]|[^:]*)"),
87
+ "port": re.compile(".*"),
88
+ }
89
+
90
+
91
+ # We use these simple regexs as a first pass before handing off to
92
+ # the stdlib 'ipaddress' module for IP address validation.
93
+ IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$")
94
+ IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$")
95
+
96
+
97
+ class ParseResult(typing.NamedTuple):
98
+ scheme: str
99
+ userinfo: str
100
+ host: str
101
+ port: int | None
102
+ path: str
103
+ query: str | None
104
+ fragment: str | None
105
+
106
+ @property
107
+ def authority(self) -> str:
108
+ return "".join(
109
+ [
110
+ f"{self.userinfo}@" if self.userinfo else "",
111
+ f"[{self.host}]" if ":" in self.host else self.host,
112
+ f":{self.port}" if self.port is not None else "",
113
+ ]
114
+ )
115
+
116
+ @property
117
+ def netloc(self) -> str:
118
+ return "".join(
119
+ [
120
+ f"[{self.host}]" if ":" in self.host else self.host,
121
+ f":{self.port}" if self.port is not None else "",
122
+ ]
123
+ )
124
+
125
+ def copy_with(self, **kwargs: str | None) -> ParseResult:
126
+ if not kwargs:
127
+ return self
128
+
129
+ defaults = {
130
+ "scheme": self.scheme,
131
+ "authority": self.authority,
132
+ "path": self.path,
133
+ "query": self.query,
134
+ "fragment": self.fragment,
135
+ }
136
+ defaults.update(kwargs)
137
+ return urlparse("", **defaults)
138
+
139
+ def __str__(self) -> str:
140
+ authority = self.authority
141
+ return "".join(
142
+ [
143
+ f"{self.scheme}:" if self.scheme else "",
144
+ f"//{authority}" if authority else "",
145
+ self.path,
146
+ f"?{self.query}" if self.query is not None else "",
147
+ f"#{self.fragment}" if self.fragment is not None else "",
148
+ ]
149
+ )
150
+
151
+
152
+ def urlparse(url: str = "", **kwargs: str | None) -> ParseResult:
153
+ # Initial basic checks on allowable URLs.
154
+ # ---------------------------------------
155
+
156
+ # Hard limit the maximum allowable URL length.
157
+ if len(url) > MAX_URL_LENGTH:
158
+ raise InvalidURL("URL too long")
159
+
160
+ # If a URL includes any ASCII control characters including \t, \r, \n,
161
+ # then treat it as invalid.
162
+ if any(char.isascii() and not char.isprintable() for char in url):
163
+ char = next(char for char in url if char.isascii() and not char.isprintable())
164
+ idx = url.find(char)
165
+ error = (
166
+ f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}."
167
+ )
168
+ raise InvalidURL(error)
169
+
170
+ # Some keyword arguments require special handling.
171
+ # ------------------------------------------------
172
+
173
+ # Coerce "port" to a string, if it is provided as an integer.
174
+ if "port" in kwargs:
175
+ port = kwargs["port"]
176
+ kwargs["port"] = str(port) if isinstance(port, int) else port
177
+
178
+ # Replace "netloc" with "host and "port".
179
+ if "netloc" in kwargs:
180
+ netloc = kwargs.pop("netloc") or ""
181
+ kwargs["host"], _, kwargs["port"] = netloc.partition(":")
182
+
183
+ # Replace "username" and/or "password" with "userinfo".
184
+ if "username" in kwargs or "password" in kwargs:
185
+ username = quote(kwargs.pop("username", "") or "")
186
+ password = quote(kwargs.pop("password", "") or "")
187
+ kwargs["userinfo"] = f"{username}:{password}" if password else username
188
+
189
+ # Replace "raw_path" with "path" and "query".
190
+ if "raw_path" in kwargs:
191
+ raw_path = kwargs.pop("raw_path") or ""
192
+ kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?")
193
+ if not seperator:
194
+ kwargs["query"] = None
195
+
196
+ # Ensure that IPv6 "host" addresses are always escaped with "[...]".
197
+ if "host" in kwargs:
198
+ host = kwargs.get("host") or ""
199
+ if ":" in host and not (host.startswith("[") and host.endswith("]")):
200
+ kwargs["host"] = f"[{host}]"
201
+
202
+ # If any keyword arguments are provided, ensure they are valid.
203
+ # -------------------------------------------------------------
204
+
205
+ for key, value in kwargs.items():
206
+ if value is not None:
207
+ if len(value) > MAX_URL_LENGTH:
208
+ raise InvalidURL(f"URL component '{key}' too long")
209
+
210
+ # If a component includes any ASCII control characters including \t, \r, \n,
211
+ # then treat it as invalid.
212
+ if any(char.isascii() and not char.isprintable() for char in value):
213
+ char = next(
214
+ char for char in value if char.isascii() and not char.isprintable()
215
+ )
216
+ idx = value.find(char)
217
+ error = (
218
+ f"Invalid non-printable ASCII character in URL {key} component, "
219
+ f"{char!r} at position {idx}."
220
+ )
221
+ raise InvalidURL(error)
222
+
223
+ # Ensure that keyword arguments match as a valid regex.
224
+ if not COMPONENT_REGEX[key].fullmatch(value):
225
+ raise InvalidURL(f"Invalid URL component '{key}'")
226
+
227
+ # The URL_REGEX will always match, but may have empty components.
228
+ url_match = URL_REGEX.match(url)
229
+ assert url_match is not None
230
+ url_dict = url_match.groupdict()
231
+
232
+ # * 'scheme', 'authority', and 'path' may be empty strings.
233
+ # * 'query' may be 'None', indicating no trailing "?" portion.
234
+ # Any string including the empty string, indicates a trailing "?".
235
+ # * 'fragment' may be 'None', indicating no trailing "#" portion.
236
+ # Any string including the empty string, indicates a trailing "#".
237
+ scheme = kwargs.get("scheme", url_dict["scheme"]) or ""
238
+ authority = kwargs.get("authority", url_dict["authority"]) or ""
239
+ path = kwargs.get("path", url_dict["path"]) or ""
240
+ query = kwargs.get("query", url_dict["query"])
241
+ fragment = kwargs.get("fragment", url_dict["fragment"])
242
+
243
+ # The AUTHORITY_REGEX will always match, but may have empty components.
244
+ authority_match = AUTHORITY_REGEX.match(authority)
245
+ assert authority_match is not None
246
+ authority_dict = authority_match.groupdict()
247
+
248
+ # * 'userinfo' and 'host' may be empty strings.
249
+ # * 'port' may be 'None'.
250
+ userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or ""
251
+ host = kwargs.get("host", authority_dict["host"]) or ""
252
+ port = kwargs.get("port", authority_dict["port"])
253
+
254
+ # Normalize and validate each component.
255
+ # We end up with a parsed representation of the URL,
256
+ # with components that are plain ASCII bytestrings.
257
+ parsed_scheme: str = scheme.lower()
258
+ parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":")
259
+ parsed_host: str = encode_host(host)
260
+ parsed_port: int | None = normalize_port(port, scheme)
261
+
262
+ has_scheme = parsed_scheme != ""
263
+ has_authority = (
264
+ parsed_userinfo != "" or parsed_host != "" or parsed_port is not None
265
+ )
266
+ validate_path(path, has_scheme=has_scheme, has_authority=has_authority)
267
+ if has_scheme or has_authority:
268
+ path = normalize_path(path)
269
+
270
+ # The GEN_DELIMS set is... : / ? # [ ] @
271
+ # These do not need to be percent-quoted unless they serve as delimiters for the
272
+ # specific component.
273
+ WHATWG_SAFE = '`{}%|^\\"'
274
+
275
+ # For 'path' we need to drop ? and # from the GEN_DELIMS set.
276
+ parsed_path: str = quote(path, safe=SUB_DELIMS + WHATWG_SAFE + ":/[]@")
277
+ # For 'query' we need to drop '#' from the GEN_DELIMS set.
278
+ parsed_query: str | None = (
279
+ None
280
+ if query is None
281
+ else quote(query, safe=SUB_DELIMS + WHATWG_SAFE + ":/?[]@")
282
+ )
283
+ # For 'fragment' we can include all of the GEN_DELIMS set.
284
+ parsed_fragment: str | None = (
285
+ None
286
+ if fragment is None
287
+ else quote(fragment, safe=SUB_DELIMS + WHATWG_SAFE + ":/?#[]@")
288
+ )
289
+
290
+ # The parsed ASCII bytestrings are our canonical form.
291
+ # All properties of the URL are derived from these.
292
+ return ParseResult(
293
+ parsed_scheme,
294
+ parsed_userinfo,
295
+ parsed_host,
296
+ parsed_port,
297
+ parsed_path,
298
+ parsed_query,
299
+ parsed_fragment,
300
+ )
301
+
302
+
303
+ def encode_host(host: str) -> str:
304
+ if not host:
305
+ return ""
306
+
307
+ elif IPv4_STYLE_HOSTNAME.match(host):
308
+ # Validate IPv4 hostnames like #.#.#.#
309
+ #
310
+ # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
311
+ #
312
+ # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
313
+ try:
314
+ ipaddress.IPv4Address(host)
315
+ except ipaddress.AddressValueError:
316
+ raise InvalidURL(f"Invalid IPv4 address: {host!r}")
317
+ return host
318
+
319
+ elif IPv6_STYLE_HOSTNAME.match(host):
320
+ # Validate IPv6 hostnames like [...]
321
+ #
322
+ # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
323
+ #
324
+ # "A host identified by an Internet Protocol literal address, version 6
325
+ # [RFC3513] or later, is distinguished by enclosing the IP literal
326
+ # within square brackets ("[" and "]"). This is the only place where
327
+ # square bracket characters are allowed in the URI syntax."
328
+ try:
329
+ ipaddress.IPv6Address(host[1:-1])
330
+ except ipaddress.AddressValueError:
331
+ raise InvalidURL(f"Invalid IPv6 address: {host!r}")
332
+ return host[1:-1]
333
+
334
+ elif host.isascii():
335
+ # Regular ASCII hostnames
336
+ #
337
+ # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2
338
+ #
339
+ # reg-name = *( unreserved / pct-encoded / sub-delims )
340
+ WHATWG_SAFE = '"`{}%|\\'
341
+ return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE)
342
+
343
+ # IDNA hostnames
344
+ try:
345
+ return idna.encode(host.lower()).decode("ascii")
346
+ except idna.IDNAError:
347
+ raise InvalidURL(f"Invalid IDNA hostname: {host!r}")
348
+
349
+
350
+ def normalize_port(port: str | int | None, scheme: str) -> int | None:
351
+ # From https://tools.ietf.org/html/rfc3986#section-3.2.3
352
+ #
353
+ # "A scheme may define a default port. For example, the "http" scheme
354
+ # defines a default port of "80", corresponding to its reserved TCP
355
+ # port number. The type of port designated by the port number (e.g.,
356
+ # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and
357
+ # normalizers should omit the port component and its ":" delimiter if
358
+ # port is empty or if its value would be the same as that of the
359
+ # scheme's default."
360
+ if port is None or port == "":
361
+ return None
362
+
363
+ try:
364
+ port_as_int = int(port)
365
+ except ValueError:
366
+ raise InvalidURL(f"Invalid port: {port!r}")
367
+
368
+ # See https://url.spec.whatwg.org/#url-miscellaneous
369
+ default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get(
370
+ scheme
371
+ )
372
+ if port_as_int == default_port:
373
+ return None
374
+ return port_as_int
375
+
376
+
377
+ def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None:
378
+ """
379
+ Path validation rules that depend on if the URL contains
380
+ a scheme or authority component.
381
+
382
+ See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3
383
+ """
384
+ if has_authority:
385
+ # If a URI contains an authority component, then the path component
386
+ # must either be empty or begin with a slash ("/") character."
387
+ if path and not path.startswith("/"):
388
+ raise InvalidURL("For absolute URLs, path must be empty or begin with '/'")
389
+
390
+ if not has_scheme and not has_authority:
391
+ # If a URI does not contain an authority component, then the path cannot begin
392
+ # with two slash characters ("//").
393
+ if path.startswith("//"):
394
+ raise InvalidURL("Relative URLs cannot have a path starting with '//'")
395
+
396
+ # In addition, a URI reference (Section 4.1) may be a relative-path reference,
397
+ # in which case the first path segment cannot contain a colon (":") character.
398
+ if path.startswith(":"):
399
+ raise InvalidURL("Relative URLs cannot have a path starting with ':'")
400
+
401
+
402
+ def normalize_path(path: str) -> str:
403
+ """
404
+ Drop "." and ".." segments from a URL path.
405
+
406
+ For example:
407
+
408
+ normalize_path("/path/./to/somewhere/..") == "/path/to"
409
+ """
410
+ # Fast return when no '.' characters in the path.
411
+ if "." not in path:
412
+ return path
413
+
414
+ components = path.split("/")
415
+
416
+ # Fast return when no '.' or '..' components in the path.
417
+ if "." not in components and ".." not in components:
418
+ return path
419
+
420
+ # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4
421
+ output: list[str] = []
422
+ for component in components:
423
+ if component == ".":
424
+ pass
425
+ elif component == "..":
426
+ if output and output != [""]:
427
+ output.pop()
428
+ else:
429
+ output.append(component)
430
+ return "/".join(output)
431
+
432
+
433
+ def PERCENT(string: str) -> str:
434
+ return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")])
435
+
436
+
437
+ def percent_encoded(string: str, safe: str = "/") -> str:
438
+ """
439
+ Use percent-encoding to quote a string.
440
+ """
441
+ NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe
442
+
443
+ # Fast path for strings that don't need escaping.
444
+ if not string.rstrip(NON_ESCAPED_CHARS):
445
+ return string
446
+
447
+ return "".join(
448
+ [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string]
449
+ )
450
+
451
+
452
+ def quote(string: str, safe: str = "/") -> str:
453
+ """
454
+ Use percent-encoding to quote a string, omitting existing '%xx' escape sequences.
455
+
456
+ See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1
457
+
458
+ * `string`: The string to be percent-escaped.
459
+ * `safe`: A string containing characters that may be treated as safe, and do not
460
+ need to be escaped. Unreserved characters are always treated as safe.
461
+ See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3
462
+ """
463
+ parts = []
464
+ current_position = 0
465
+ for match in re.finditer(PERCENT_ENCODED_REGEX, string):
466
+ start_position, end_position = match.start(), match.end()
467
+ matched_text = match.group(0)
468
+ # Add any text up to the '%xx' escape sequence.
469
+ if start_position != current_position:
470
+ leading_text = string[current_position:start_position]
471
+ parts.append(percent_encoded(leading_text, safe=safe))
472
+
473
+ # Add the '%xx' escape sequence.
474
+ parts.append(matched_text)
475
+ current_position = end_position
476
+
477
+ # Add any text after the final '%xx' escape sequence.
478
+ if current_position != len(string):
479
+ trailing_text = string[current_position:]
480
+ parts.append(percent_encoded(trailing_text, safe=safe))
481
+
482
+ return "".join(parts)
483
+
484
+
485
+ def urlencode(items: list[tuple[str, str]]) -> str:
486
+ """
487
+ We can use a much simpler version of the stdlib urlencode here because
488
+ we don't need to handle a bunch of different typing cases, such as bytes vs str.
489
+
490
+ https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926
491
+
492
+ Note that we use '%20' encoding for spaces. and '%2F for '/'.
493
+ This is slightly different than `requests`, but is the behaviour that browsers use.
494
+
495
+ See
496
+ - https://github.com/encode/httpx/issues/2536
497
+ - https://github.com/encode/httpx/issues/2721
498
+ - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode
499
+ """
500
+ return "&".join(
501
+ [
502
+ percent_encoded(k, safe="") + "=" + percent_encoded(v, safe="")
503
+ for k, v in items
504
+ ]
505
+ )
lib/python3.10/site-packages/httpx/_urls.py ADDED
@@ -0,0 +1,648 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import typing
4
+ from urllib.parse import parse_qs, unquote
5
+
6
+ import idna
7
+
8
+ from ._types import QueryParamTypes, RawURL
9
+ from ._urlparse import urlencode, urlparse
10
+ from ._utils import primitive_value_to_str
11
+
12
+ __all__ = ["URL", "QueryParams"]
13
+
14
+
15
+ class URL:
16
+ """
17
+ url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink")
18
+
19
+ assert url.scheme == "https"
20
+ assert url.username == "jo@email.com"
21
+ assert url.password == "a secret"
22
+ assert url.userinfo == b"jo%40email.com:a%20secret"
23
+ assert url.host == "müller.de"
24
+ assert url.raw_host == b"xn--mller-kva.de"
25
+ assert url.port == 1234
26
+ assert url.netloc == b"xn--mller-kva.de:1234"
27
+ assert url.path == "/pa th"
28
+ assert url.query == b"?search=ab"
29
+ assert url.raw_path == b"/pa%20th?search=ab"
30
+ assert url.fragment == "anchorlink"
31
+
32
+ The components of a URL are broken down like this:
33
+
34
+ https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink
35
+ [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment]
36
+ [ userinfo ] [ netloc ][ raw_path ]
37
+
38
+ Note that:
39
+
40
+ * `url.scheme` is normalized to always be lowercased.
41
+
42
+ * `url.host` is normalized to always be lowercased. Internationalized domain
43
+ names are represented in unicode, without IDNA encoding applied. For instance:
44
+
45
+ url = httpx.URL("http://中国.icom.museum")
46
+ assert url.host == "中国.icom.museum"
47
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
48
+ assert url.host == "中国.icom.museum"
49
+
50
+ * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded.
51
+
52
+ url = httpx.URL("http://中国.icom.museum")
53
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
54
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
55
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
56
+
57
+ * `url.port` is either None or an integer. URLs that include the default port for
58
+ "http", "https", "ws", "wss", and "ftp" schemes have their port
59
+ normalized to `None`.
60
+
61
+ assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80")
62
+ assert httpx.URL("http://example.com").port is None
63
+ assert httpx.URL("http://example.com:80").port is None
64
+
65
+ * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work
66
+ with `url.username` and `url.password` instead, which handle the URL escaping.
67
+
68
+ * `url.raw_path` is raw bytes of both the path and query, without URL escaping.
69
+ This portion is used as the target when constructing HTTP requests. Usually you'll
70
+ want to work with `url.path` instead.
71
+
72
+ * `url.query` is raw bytes, without URL escaping. A URL query string portion can
73
+ only be properly URL escaped when decoding the parameter names and values
74
+ themselves.
75
+ """
76
+
77
+ def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None:
78
+ if kwargs:
79
+ allowed = {
80
+ "scheme": str,
81
+ "username": str,
82
+ "password": str,
83
+ "userinfo": bytes,
84
+ "host": str,
85
+ "port": int,
86
+ "netloc": bytes,
87
+ "path": str,
88
+ "query": bytes,
89
+ "raw_path": bytes,
90
+ "fragment": str,
91
+ "params": object,
92
+ }
93
+
94
+ # Perform type checking for all supported keyword arguments.
95
+ for key, value in kwargs.items():
96
+ if key not in allowed:
97
+ message = f"{key!r} is an invalid keyword argument for URL()"
98
+ raise TypeError(message)
99
+ if value is not None and not isinstance(value, allowed[key]):
100
+ expected = allowed[key].__name__
101
+ seen = type(value).__name__
102
+ message = f"Argument {key!r} must be {expected} but got {seen}"
103
+ raise TypeError(message)
104
+ if isinstance(value, bytes):
105
+ kwargs[key] = value.decode("ascii")
106
+
107
+ if "params" in kwargs:
108
+ # Replace any "params" keyword with the raw "query" instead.
109
+ #
110
+ # Ensure that empty params use `kwargs["query"] = None` rather
111
+ # than `kwargs["query"] = ""`, so that generated URLs do not
112
+ # include an empty trailing "?".
113
+ params = kwargs.pop("params")
114
+ kwargs["query"] = None if not params else str(QueryParams(params))
115
+
116
+ if isinstance(url, str):
117
+ self._uri_reference = urlparse(url, **kwargs)
118
+ elif isinstance(url, URL):
119
+ self._uri_reference = url._uri_reference.copy_with(**kwargs)
120
+ else:
121
+ raise TypeError(
122
+ "Invalid type for url. Expected str or httpx.URL,"
123
+ f" got {type(url)}: {url!r}"
124
+ )
125
+
126
+ @property
127
+ def scheme(self) -> str:
128
+ """
129
+ The URL scheme, such as "http", "https".
130
+ Always normalised to lowercase.
131
+ """
132
+ return self._uri_reference.scheme
133
+
134
+ @property
135
+ def raw_scheme(self) -> bytes:
136
+ """
137
+ The raw bytes representation of the URL scheme, such as b"http", b"https".
138
+ Always normalised to lowercase.
139
+ """
140
+ return self._uri_reference.scheme.encode("ascii")
141
+
142
+ @property
143
+ def userinfo(self) -> bytes:
144
+ """
145
+ The URL userinfo as a raw bytestring.
146
+ For example: b"jo%40email.com:a%20secret".
147
+ """
148
+ return self._uri_reference.userinfo.encode("ascii")
149
+
150
+ @property
151
+ def username(self) -> str:
152
+ """
153
+ The URL username as a string, with URL decoding applied.
154
+ For example: "jo@email.com"
155
+ """
156
+ userinfo = self._uri_reference.userinfo
157
+ return unquote(userinfo.partition(":")[0])
158
+
159
+ @property
160
+ def password(self) -> str:
161
+ """
162
+ The URL password as a string, with URL decoding applied.
163
+ For example: "a secret"
164
+ """
165
+ userinfo = self._uri_reference.userinfo
166
+ return unquote(userinfo.partition(":")[2])
167
+
168
+ @property
169
+ def host(self) -> str:
170
+ """
171
+ The URL host as a string.
172
+ Always normalized to lowercase, with IDNA hosts decoded into unicode.
173
+
174
+ Examples:
175
+
176
+ url = httpx.URL("http://www.EXAMPLE.org")
177
+ assert url.host == "www.example.org"
178
+
179
+ url = httpx.URL("http://中国.icom.museum")
180
+ assert url.host == "中国.icom.museum"
181
+
182
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
183
+ assert url.host == "中国.icom.museum"
184
+
185
+ url = httpx.URL("https://[::ffff:192.168.0.1]")
186
+ assert url.host == "::ffff:192.168.0.1"
187
+ """
188
+ host: str = self._uri_reference.host
189
+
190
+ if host.startswith("xn--"):
191
+ host = idna.decode(host)
192
+
193
+ return host
194
+
195
+ @property
196
+ def raw_host(self) -> bytes:
197
+ """
198
+ The raw bytes representation of the URL host.
199
+ Always normalized to lowercase, and IDNA encoded.
200
+
201
+ Examples:
202
+
203
+ url = httpx.URL("http://www.EXAMPLE.org")
204
+ assert url.raw_host == b"www.example.org"
205
+
206
+ url = httpx.URL("http://中国.icom.museum")
207
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
208
+
209
+ url = httpx.URL("http://xn--fiqs8s.icom.museum")
210
+ assert url.raw_host == b"xn--fiqs8s.icom.museum"
211
+
212
+ url = httpx.URL("https://[::ffff:192.168.0.1]")
213
+ assert url.raw_host == b"::ffff:192.168.0.1"
214
+ """
215
+ return self._uri_reference.host.encode("ascii")
216
+
217
+ @property
218
+ def port(self) -> int | None:
219
+ """
220
+ The URL port as an integer.
221
+
222
+ Note that the URL class performs port normalization as per the WHATWG spec.
223
+ Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always
224
+ treated as `None`.
225
+
226
+ For example:
227
+
228
+ assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80")
229
+ assert httpx.URL("http://www.example.com:80").port is None
230
+ """
231
+ return self._uri_reference.port
232
+
233
+ @property
234
+ def netloc(self) -> bytes:
235
+ """
236
+ Either `<host>` or `<host>:<port>` as bytes.
237
+ Always normalized to lowercase, and IDNA encoded.
238
+
239
+ This property may be used for generating the value of a request
240
+ "Host" header.
241
+ """
242
+ return self._uri_reference.netloc.encode("ascii")
243
+
244
+ @property
245
+ def path(self) -> str:
246
+ """
247
+ The URL path as a string. Excluding the query string, and URL decoded.
248
+
249
+ For example:
250
+
251
+ url = httpx.URL("https://example.com/pa%20th")
252
+ assert url.path == "/pa th"
253
+ """
254
+ path = self._uri_reference.path or "/"
255
+ return unquote(path)
256
+
257
+ @property
258
+ def query(self) -> bytes:
259
+ """
260
+ The URL query string, as raw bytes, excluding the leading b"?".
261
+
262
+ This is necessarily a bytewise interface, because we cannot
263
+ perform URL decoding of this representation until we've parsed
264
+ the keys and values into a QueryParams instance.
265
+
266
+ For example:
267
+
268
+ url = httpx.URL("https://example.com/?filter=some%20search%20terms")
269
+ assert url.query == b"filter=some%20search%20terms"
270
+ """
271
+ query = self._uri_reference.query or ""
272
+ return query.encode("ascii")
273
+
274
+ @property
275
+ def params(self) -> QueryParams:
276
+ """
277
+ The URL query parameters, neatly parsed and packaged into an immutable
278
+ multidict representation.
279
+ """
280
+ return QueryParams(self._uri_reference.query)
281
+
282
+ @property
283
+ def raw_path(self) -> bytes:
284
+ """
285
+ The complete URL path and query string as raw bytes.
286
+ Used as the target when constructing HTTP requests.
287
+
288
+ For example:
289
+
290
+ GET /users?search=some%20text HTTP/1.1
291
+ Host: www.example.org
292
+ Connection: close
293
+ """
294
+ path = self._uri_reference.path or "/"
295
+ if self._uri_reference.query is not None:
296
+ path += "?" + self._uri_reference.query
297
+ return path.encode("ascii")
298
+
299
+ @property
300
+ def fragment(self) -> str:
301
+ """
302
+ The URL fragments, as used in HTML anchors.
303
+ As a string, without the leading '#'.
304
+ """
305
+ return unquote(self._uri_reference.fragment or "")
306
+
307
+ @property
308
+ def raw(self) -> RawURL:
309
+ """
310
+ Provides the (scheme, host, port, target) for the outgoing request.
311
+
312
+ In older versions of `httpx` this was used in the low-level transport API.
313
+ We no longer use `RawURL`, and this property will be deprecated
314
+ in a future release.
315
+ """
316
+ return RawURL(
317
+ self.raw_scheme,
318
+ self.raw_host,
319
+ self.port,
320
+ self.raw_path,
321
+ )
322
+
323
+ @property
324
+ def is_absolute_url(self) -> bool:
325
+ """
326
+ Return `True` for absolute URLs such as 'http://example.com/path',
327
+ and `False` for relative URLs such as '/path'.
328
+ """
329
+ # We don't use `.is_absolute` from `rfc3986` because it treats
330
+ # URLs with a fragment portion as not absolute.
331
+ # What we actually care about is if the URL provides
332
+ # a scheme and hostname to which connections should be made.
333
+ return bool(self._uri_reference.scheme and self._uri_reference.host)
334
+
335
+ @property
336
+ def is_relative_url(self) -> bool:
337
+ """
338
+ Return `False` for absolute URLs such as 'http://example.com/path',
339
+ and `True` for relative URLs such as '/path'.
340
+ """
341
+ return not self.is_absolute_url
342
+
343
+ def copy_with(self, **kwargs: typing.Any) -> URL:
344
+ """
345
+ Copy this URL, returning a new URL with some components altered.
346
+ Accepts the same set of parameters as the components that are made
347
+ available via properties on the `URL` class.
348
+
349
+ For example:
350
+
351
+ url = httpx.URL("https://www.example.com").copy_with(
352
+ username="jo@gmail.com", password="a secret"
353
+ )
354
+ assert url == "https://jo%40email.com:a%20secret@www.example.com"
355
+ """
356
+ return URL(self, **kwargs)
357
+
358
+ def copy_set_param(self, key: str, value: typing.Any = None) -> URL:
359
+ return self.copy_with(params=self.params.set(key, value))
360
+
361
+ def copy_add_param(self, key: str, value: typing.Any = None) -> URL:
362
+ return self.copy_with(params=self.params.add(key, value))
363
+
364
+ def copy_remove_param(self, key: str) -> URL:
365
+ return self.copy_with(params=self.params.remove(key))
366
+
367
+ def copy_merge_params(self, params: QueryParamTypes) -> URL:
368
+ return self.copy_with(params=self.params.merge(params))
369
+
370
+ def join(self, url: URL | str) -> URL:
371
+ """
372
+ Return an absolute URL, using this URL as the base.
373
+
374
+ Eg.
375
+
376
+ url = httpx.URL("https://www.example.com/test")
377
+ url = url.join("/new/path")
378
+ assert url == "https://www.example.com/new/path"
379
+ """
380
+ from urllib.parse import urljoin
381
+
382
+ return URL(urljoin(str(self), str(URL(url))))
383
+
384
+ def __hash__(self) -> int:
385
+ return hash(str(self))
386
+
387
+ def __eq__(self, other: typing.Any) -> bool:
388
+ return isinstance(other, (URL, str)) and str(self) == str(URL(other))
389
+
390
+ def __str__(self) -> str:
391
+ return str(self._uri_reference)
392
+
393
+ def __repr__(self) -> str:
394
+ scheme, userinfo, host, port, path, query, fragment = self._uri_reference
395
+
396
+ if ":" in userinfo:
397
+ # Mask any password component.
398
+ userinfo = f'{userinfo.split(":")[0]}:[secure]'
399
+
400
+ authority = "".join(
401
+ [
402
+ f"{userinfo}@" if userinfo else "",
403
+ f"[{host}]" if ":" in host else host,
404
+ f":{port}" if port is not None else "",
405
+ ]
406
+ )
407
+ url = "".join(
408
+ [
409
+ f"{self.scheme}:" if scheme else "",
410
+ f"//{authority}" if authority else "",
411
+ path,
412
+ f"?{query}" if query is not None else "",
413
+ f"#{fragment}" if fragment is not None else "",
414
+ ]
415
+ )
416
+
417
+ return f"{self.__class__.__name__}({url!r})"
418
+
419
+
420
+ class QueryParams(typing.Mapping[str, str]):
421
+ """
422
+ URL query parameters, as a multi-dict.
423
+ """
424
+
425
+ def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None:
426
+ assert len(args) < 2, "Too many arguments."
427
+ assert not (args and kwargs), "Cannot mix named and unnamed arguments."
428
+
429
+ value = args[0] if args else kwargs
430
+
431
+ if value is None or isinstance(value, (str, bytes)):
432
+ value = value.decode("ascii") if isinstance(value, bytes) else value
433
+ self._dict = parse_qs(value, keep_blank_values=True)
434
+ elif isinstance(value, QueryParams):
435
+ self._dict = {k: list(v) for k, v in value._dict.items()}
436
+ else:
437
+ dict_value: dict[typing.Any, list[typing.Any]] = {}
438
+ if isinstance(value, (list, tuple)):
439
+ # Convert list inputs like:
440
+ # [("a", "123"), ("a", "456"), ("b", "789")]
441
+ # To a dict representation, like:
442
+ # {"a": ["123", "456"], "b": ["789"]}
443
+ for item in value:
444
+ dict_value.setdefault(item[0], []).append(item[1])
445
+ else:
446
+ # Convert dict inputs like:
447
+ # {"a": "123", "b": ["456", "789"]}
448
+ # To dict inputs where values are always lists, like:
449
+ # {"a": ["123"], "b": ["456", "789"]}
450
+ dict_value = {
451
+ k: list(v) if isinstance(v, (list, tuple)) else [v]
452
+ for k, v in value.items()
453
+ }
454
+
455
+ # Ensure that keys and values are neatly coerced to strings.
456
+ # We coerce values `True` and `False` to JSON-like "true" and "false"
457
+ # representations, and coerce `None` values to the empty string.
458
+ self._dict = {
459
+ str(k): [primitive_value_to_str(item) for item in v]
460
+ for k, v in dict_value.items()
461
+ }
462
+
463
+ def keys(self) -> typing.KeysView[str]:
464
+ """
465
+ Return all the keys in the query params.
466
+
467
+ Usage:
468
+
469
+ q = httpx.QueryParams("a=123&a=456&b=789")
470
+ assert list(q.keys()) == ["a", "b"]
471
+ """
472
+ return self._dict.keys()
473
+
474
+ def values(self) -> typing.ValuesView[str]:
475
+ """
476
+ Return all the values in the query params. If a key occurs more than once
477
+ only the first item for that key is returned.
478
+
479
+ Usage:
480
+
481
+ q = httpx.QueryParams("a=123&a=456&b=789")
482
+ assert list(q.values()) == ["123", "789"]
483
+ """
484
+ return {k: v[0] for k, v in self._dict.items()}.values()
485
+
486
+ def items(self) -> typing.ItemsView[str, str]:
487
+ """
488
+ Return all items in the query params. If a key occurs more than once
489
+ only the first item for that key is returned.
490
+
491
+ Usage:
492
+
493
+ q = httpx.QueryParams("a=123&a=456&b=789")
494
+ assert list(q.items()) == [("a", "123"), ("b", "789")]
495
+ """
496
+ return {k: v[0] for k, v in self._dict.items()}.items()
497
+
498
+ def multi_items(self) -> list[tuple[str, str]]:
499
+ """
500
+ Return all items in the query params. Allow duplicate keys to occur.
501
+
502
+ Usage:
503
+
504
+ q = httpx.QueryParams("a=123&a=456&b=789")
505
+ assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")]
506
+ """
507
+ multi_items: list[tuple[str, str]] = []
508
+ for k, v in self._dict.items():
509
+ multi_items.extend([(k, i) for i in v])
510
+ return multi_items
511
+
512
+ def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any:
513
+ """
514
+ Get a value from the query param for a given key. If the key occurs
515
+ more than once, then only the first value is returned.
516
+
517
+ Usage:
518
+
519
+ q = httpx.QueryParams("a=123&a=456&b=789")
520
+ assert q.get("a") == "123"
521
+ """
522
+ if key in self._dict:
523
+ return self._dict[str(key)][0]
524
+ return default
525
+
526
+ def get_list(self, key: str) -> list[str]:
527
+ """
528
+ Get all values from the query param for a given key.
529
+
530
+ Usage:
531
+
532
+ q = httpx.QueryParams("a=123&a=456&b=789")
533
+ assert q.get_list("a") == ["123", "456"]
534
+ """
535
+ return list(self._dict.get(str(key), []))
536
+
537
+ def set(self, key: str, value: typing.Any = None) -> QueryParams:
538
+ """
539
+ Return a new QueryParams instance, setting the value of a key.
540
+
541
+ Usage:
542
+
543
+ q = httpx.QueryParams("a=123")
544
+ q = q.set("a", "456")
545
+ assert q == httpx.QueryParams("a=456")
546
+ """
547
+ q = QueryParams()
548
+ q._dict = dict(self._dict)
549
+ q._dict[str(key)] = [primitive_value_to_str(value)]
550
+ return q
551
+
552
+ def add(self, key: str, value: typing.Any = None) -> QueryParams:
553
+ """
554
+ Return a new QueryParams instance, setting or appending the value of a key.
555
+
556
+ Usage:
557
+
558
+ q = httpx.QueryParams("a=123")
559
+ q = q.add("a", "456")
560
+ assert q == httpx.QueryParams("a=123&a=456")
561
+ """
562
+ q = QueryParams()
563
+ q._dict = dict(self._dict)
564
+ q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)]
565
+ return q
566
+
567
+ def remove(self, key: str) -> QueryParams:
568
+ """
569
+ Return a new QueryParams instance, removing the value of a key.
570
+
571
+ Usage:
572
+
573
+ q = httpx.QueryParams("a=123")
574
+ q = q.remove("a")
575
+ assert q == httpx.QueryParams("")
576
+ """
577
+ q = QueryParams()
578
+ q._dict = dict(self._dict)
579
+ q._dict.pop(str(key), None)
580
+ return q
581
+
582
+ def merge(self, params: QueryParamTypes | None = None) -> QueryParams:
583
+ """
584
+ Return a new QueryParams instance, updated with.
585
+
586
+ Usage:
587
+
588
+ q = httpx.QueryParams("a=123")
589
+ q = q.merge({"b": "456"})
590
+ assert q == httpx.QueryParams("a=123&b=456")
591
+
592
+ q = httpx.QueryParams("a=123")
593
+ q = q.merge({"a": "456", "b": "789"})
594
+ assert q == httpx.QueryParams("a=456&b=789")
595
+ """
596
+ q = QueryParams(params)
597
+ q._dict = {**self._dict, **q._dict}
598
+ return q
599
+
600
+ def __getitem__(self, key: typing.Any) -> str:
601
+ return self._dict[key][0]
602
+
603
+ def __contains__(self, key: typing.Any) -> bool:
604
+ return key in self._dict
605
+
606
+ def __iter__(self) -> typing.Iterator[typing.Any]:
607
+ return iter(self.keys())
608
+
609
+ def __len__(self) -> int:
610
+ return len(self._dict)
611
+
612
+ def __bool__(self) -> bool:
613
+ return bool(self._dict)
614
+
615
+ def __hash__(self) -> int:
616
+ return hash(str(self))
617
+
618
+ def __eq__(self, other: typing.Any) -> bool:
619
+ if not isinstance(other, self.__class__):
620
+ return False
621
+ return sorted(self.multi_items()) == sorted(other.multi_items())
622
+
623
+ def __str__(self) -> str:
624
+ """
625
+ Note that we use '%20' encoding for spaces, and treat '/' as a safe
626
+ character.
627
+
628
+ See https://github.com/encode/httpx/issues/2536 and
629
+ https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode
630
+ """
631
+ return urlencode(self.multi_items())
632
+
633
+ def __repr__(self) -> str:
634
+ class_name = self.__class__.__name__
635
+ query_string = str(self)
636
+ return f"{class_name}({query_string!r})"
637
+
638
+ def update(self, params: QueryParamTypes | None = None) -> None:
639
+ raise RuntimeError(
640
+ "QueryParams are immutable since 0.18.0. "
641
+ "Use `q = q.merge(...)` to create an updated copy."
642
+ )
643
+
644
+ def __setitem__(self, key: str, value: str) -> None:
645
+ raise RuntimeError(
646
+ "QueryParams are immutable since 0.18.0. "
647
+ "Use `q = q.set(key, value)` to create an updated copy."
648
+ )
lib/python3.10/site-packages/httpx/_utils.py ADDED
@@ -0,0 +1,440 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import codecs
4
+ import email.message
5
+ import ipaddress
6
+ import mimetypes
7
+ import os
8
+ import re
9
+ import time
10
+ import typing
11
+ from pathlib import Path
12
+ from urllib.request import getproxies
13
+
14
+ import sniffio
15
+
16
+ from ._types import PrimitiveData
17
+
18
+ if typing.TYPE_CHECKING: # pragma: no cover
19
+ from ._urls import URL
20
+
21
+
22
+ _HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"}
23
+ _HTML5_FORM_ENCODING_REPLACEMENTS.update(
24
+ {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B}
25
+ )
26
+ _HTML5_FORM_ENCODING_RE = re.compile(
27
+ r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()])
28
+ )
29
+
30
+
31
+ def normalize_header_key(
32
+ value: str | bytes,
33
+ lower: bool,
34
+ encoding: str | None = None,
35
+ ) -> bytes:
36
+ """
37
+ Coerce str/bytes into a strictly byte-wise HTTP header key.
38
+ """
39
+ if isinstance(value, bytes):
40
+ bytes_value = value
41
+ else:
42
+ bytes_value = value.encode(encoding or "ascii")
43
+
44
+ return bytes_value.lower() if lower else bytes_value
45
+
46
+
47
+ def normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes:
48
+ """
49
+ Coerce str/bytes into a strictly byte-wise HTTP header value.
50
+ """
51
+ if isinstance(value, bytes):
52
+ return value
53
+ return value.encode(encoding or "ascii")
54
+
55
+
56
+ def primitive_value_to_str(value: PrimitiveData) -> str:
57
+ """
58
+ Coerce a primitive data type into a string value.
59
+
60
+ Note that we prefer JSON-style 'true'/'false' for boolean values here.
61
+ """
62
+ if value is True:
63
+ return "true"
64
+ elif value is False:
65
+ return "false"
66
+ elif value is None:
67
+ return ""
68
+ return str(value)
69
+
70
+
71
+ def is_known_encoding(encoding: str) -> bool:
72
+ """
73
+ Return `True` if `encoding` is a known codec.
74
+ """
75
+ try:
76
+ codecs.lookup(encoding)
77
+ except LookupError:
78
+ return False
79
+ return True
80
+
81
+
82
+ def format_form_param(name: str, value: str) -> bytes:
83
+ """
84
+ Encode a name/value pair within a multipart form.
85
+ """
86
+
87
+ def replacer(match: typing.Match[str]) -> str:
88
+ return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)]
89
+
90
+ value = _HTML5_FORM_ENCODING_RE.sub(replacer, value)
91
+ return f'{name}="{value}"'.encode()
92
+
93
+
94
+ def get_ca_bundle_from_env() -> str | None:
95
+ if "SSL_CERT_FILE" in os.environ:
96
+ ssl_file = Path(os.environ["SSL_CERT_FILE"])
97
+ if ssl_file.is_file():
98
+ return str(ssl_file)
99
+ if "SSL_CERT_DIR" in os.environ:
100
+ ssl_path = Path(os.environ["SSL_CERT_DIR"])
101
+ if ssl_path.is_dir():
102
+ return str(ssl_path)
103
+ return None
104
+
105
+
106
+ def parse_header_links(value: str) -> list[dict[str, str]]:
107
+ """
108
+ Returns a list of parsed link headers, for more info see:
109
+ https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link
110
+ The generic syntax of those is:
111
+ Link: < uri-reference >; param1=value1; param2="value2"
112
+ So for instance:
113
+ Link; '<http:/.../front.jpeg>; type="image/jpeg",<http://.../back.jpeg>;'
114
+ would return
115
+ [
116
+ {"url": "http:/.../front.jpeg", "type": "image/jpeg"},
117
+ {"url": "http://.../back.jpeg"},
118
+ ]
119
+ :param value: HTTP Link entity-header field
120
+ :return: list of parsed link headers
121
+ """
122
+ links: list[dict[str, str]] = []
123
+ replace_chars = " '\""
124
+ value = value.strip(replace_chars)
125
+ if not value:
126
+ return links
127
+ for val in re.split(", *<", value):
128
+ try:
129
+ url, params = val.split(";", 1)
130
+ except ValueError:
131
+ url, params = val, ""
132
+ link = {"url": url.strip("<> '\"")}
133
+ for param in params.split(";"):
134
+ try:
135
+ key, value = param.split("=")
136
+ except ValueError:
137
+ break
138
+ link[key.strip(replace_chars)] = value.strip(replace_chars)
139
+ links.append(link)
140
+ return links
141
+
142
+
143
+ def parse_content_type_charset(content_type: str) -> str | None:
144
+ # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery.
145
+ # See: https://peps.python.org/pep-0594/#cgi
146
+ msg = email.message.Message()
147
+ msg["content-type"] = content_type
148
+ return msg.get_content_charset(failobj=None)
149
+
150
+
151
+ SENSITIVE_HEADERS = {"authorization", "proxy-authorization"}
152
+
153
+
154
+ def obfuscate_sensitive_headers(
155
+ items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]],
156
+ ) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]:
157
+ for k, v in items:
158
+ if to_str(k.lower()) in SENSITIVE_HEADERS:
159
+ v = to_bytes_or_str("[secure]", match_type_of=v)
160
+ yield k, v
161
+
162
+
163
+ def port_or_default(url: URL) -> int | None:
164
+ if url.port is not None:
165
+ return url.port
166
+ return {"http": 80, "https": 443}.get(url.scheme)
167
+
168
+
169
+ def same_origin(url: URL, other: URL) -> bool:
170
+ """
171
+ Return 'True' if the given URLs share the same origin.
172
+ """
173
+ return (
174
+ url.scheme == other.scheme
175
+ and url.host == other.host
176
+ and port_or_default(url) == port_or_default(other)
177
+ )
178
+
179
+
180
+ def is_https_redirect(url: URL, location: URL) -> bool:
181
+ """
182
+ Return 'True' if 'location' is a HTTPS upgrade of 'url'
183
+ """
184
+ if url.host != location.host:
185
+ return False
186
+
187
+ return (
188
+ url.scheme == "http"
189
+ and port_or_default(url) == 80
190
+ and location.scheme == "https"
191
+ and port_or_default(location) == 443
192
+ )
193
+
194
+
195
+ def get_environment_proxies() -> dict[str, str | None]:
196
+ """Gets proxy information from the environment"""
197
+
198
+ # urllib.request.getproxies() falls back on System
199
+ # Registry and Config for proxies on Windows and macOS.
200
+ # We don't want to propagate non-HTTP proxies into
201
+ # our configuration such as 'TRAVIS_APT_PROXY'.
202
+ proxy_info = getproxies()
203
+ mounts: dict[str, str | None] = {}
204
+
205
+ for scheme in ("http", "https", "all"):
206
+ if proxy_info.get(scheme):
207
+ hostname = proxy_info[scheme]
208
+ mounts[f"{scheme}://"] = (
209
+ hostname if "://" in hostname else f"http://{hostname}"
210
+ )
211
+
212
+ no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")]
213
+ for hostname in no_proxy_hosts:
214
+ # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details
215
+ # on how names in `NO_PROXY` are handled.
216
+ if hostname == "*":
217
+ # If NO_PROXY=* is used or if "*" occurs as any one of the comma
218
+ # separated hostnames, then we should just bypass any information
219
+ # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore
220
+ # proxies.
221
+ return {}
222
+ elif hostname:
223
+ # NO_PROXY=.google.com is marked as "all://*.google.com,
224
+ # which disables "www.google.com" but not "google.com"
225
+ # NO_PROXY=google.com is marked as "all://*google.com,
226
+ # which disables "www.google.com" and "google.com".
227
+ # (But not "wwwgoogle.com")
228
+ # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost"
229
+ # NO_PROXY=example.com,::1,localhost,192.168.0.0/16
230
+ if "://" in hostname:
231
+ mounts[hostname] = None
232
+ elif is_ipv4_hostname(hostname):
233
+ mounts[f"all://{hostname}"] = None
234
+ elif is_ipv6_hostname(hostname):
235
+ mounts[f"all://[{hostname}]"] = None
236
+ elif hostname.lower() == "localhost":
237
+ mounts[f"all://{hostname}"] = None
238
+ else:
239
+ mounts[f"all://*{hostname}"] = None
240
+
241
+ return mounts
242
+
243
+
244
+ def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes:
245
+ return value.encode(encoding) if isinstance(value, str) else value
246
+
247
+
248
+ def to_str(value: str | bytes, encoding: str = "utf-8") -> str:
249
+ return value if isinstance(value, str) else value.decode(encoding)
250
+
251
+
252
+ def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr:
253
+ return value if isinstance(match_type_of, str) else value.encode()
254
+
255
+
256
+ def unquote(value: str) -> str:
257
+ return value[1:-1] if value[0] == value[-1] == '"' else value
258
+
259
+
260
+ def guess_content_type(filename: str | None) -> str | None:
261
+ if filename:
262
+ return mimetypes.guess_type(filename)[0] or "application/octet-stream"
263
+ return None
264
+
265
+
266
+ def peek_filelike_length(stream: typing.Any) -> int | None:
267
+ """
268
+ Given a file-like stream object, return its length in number of bytes
269
+ without reading it into memory.
270
+ """
271
+ try:
272
+ # Is it an actual file?
273
+ fd = stream.fileno()
274
+ # Yup, seems to be an actual file.
275
+ length = os.fstat(fd).st_size
276
+ except (AttributeError, OSError):
277
+ # No... Maybe it's something that supports random access, like `io.BytesIO`?
278
+ try:
279
+ # Assuming so, go to end of stream to figure out its length,
280
+ # then put it back in place.
281
+ offset = stream.tell()
282
+ length = stream.seek(0, os.SEEK_END)
283
+ stream.seek(offset)
284
+ except (AttributeError, OSError):
285
+ # Not even that? Sorry, we're doomed...
286
+ return None
287
+
288
+ return length
289
+
290
+
291
+ class Timer:
292
+ async def _get_time(self) -> float:
293
+ library = sniffio.current_async_library()
294
+ if library == "trio":
295
+ import trio
296
+
297
+ return trio.current_time()
298
+ else:
299
+ import asyncio
300
+
301
+ return asyncio.get_event_loop().time()
302
+
303
+ def sync_start(self) -> None:
304
+ self.started = time.perf_counter()
305
+
306
+ async def async_start(self) -> None:
307
+ self.started = await self._get_time()
308
+
309
+ def sync_elapsed(self) -> float:
310
+ now = time.perf_counter()
311
+ return now - self.started
312
+
313
+ async def async_elapsed(self) -> float:
314
+ now = await self._get_time()
315
+ return now - self.started
316
+
317
+
318
+ class URLPattern:
319
+ """
320
+ A utility class currently used for making lookups against proxy keys...
321
+
322
+ # Wildcard matching...
323
+ >>> pattern = URLPattern("all://")
324
+ >>> pattern.matches(httpx.URL("http://example.com"))
325
+ True
326
+
327
+ # Witch scheme matching...
328
+ >>> pattern = URLPattern("https://")
329
+ >>> pattern.matches(httpx.URL("https://example.com"))
330
+ True
331
+ >>> pattern.matches(httpx.URL("http://example.com"))
332
+ False
333
+
334
+ # With domain matching...
335
+ >>> pattern = URLPattern("https://example.com")
336
+ >>> pattern.matches(httpx.URL("https://example.com"))
337
+ True
338
+ >>> pattern.matches(httpx.URL("http://example.com"))
339
+ False
340
+ >>> pattern.matches(httpx.URL("https://other.com"))
341
+ False
342
+
343
+ # Wildcard scheme, with domain matching...
344
+ >>> pattern = URLPattern("all://example.com")
345
+ >>> pattern.matches(httpx.URL("https://example.com"))
346
+ True
347
+ >>> pattern.matches(httpx.URL("http://example.com"))
348
+ True
349
+ >>> pattern.matches(httpx.URL("https://other.com"))
350
+ False
351
+
352
+ # With port matching...
353
+ >>> pattern = URLPattern("https://example.com:1234")
354
+ >>> pattern.matches(httpx.URL("https://example.com:1234"))
355
+ True
356
+ >>> pattern.matches(httpx.URL("https://example.com"))
357
+ False
358
+ """
359
+
360
+ def __init__(self, pattern: str) -> None:
361
+ from ._urls import URL
362
+
363
+ if pattern and ":" not in pattern:
364
+ raise ValueError(
365
+ f"Proxy keys should use proper URL forms rather "
366
+ f"than plain scheme strings. "
367
+ f'Instead of "{pattern}", use "{pattern}://"'
368
+ )
369
+
370
+ url = URL(pattern)
371
+ self.pattern = pattern
372
+ self.scheme = "" if url.scheme == "all" else url.scheme
373
+ self.host = "" if url.host == "*" else url.host
374
+ self.port = url.port
375
+ if not url.host or url.host == "*":
376
+ self.host_regex: typing.Pattern[str] | None = None
377
+ elif url.host.startswith("*."):
378
+ # *.example.com should match "www.example.com", but not "example.com"
379
+ domain = re.escape(url.host[2:])
380
+ self.host_regex = re.compile(f"^.+\\.{domain}$")
381
+ elif url.host.startswith("*"):
382
+ # *example.com should match "www.example.com" and "example.com"
383
+ domain = re.escape(url.host[1:])
384
+ self.host_regex = re.compile(f"^(.+\\.)?{domain}$")
385
+ else:
386
+ # example.com should match "example.com" but not "www.example.com"
387
+ domain = re.escape(url.host)
388
+ self.host_regex = re.compile(f"^{domain}$")
389
+
390
+ def matches(self, other: URL) -> bool:
391
+ if self.scheme and self.scheme != other.scheme:
392
+ return False
393
+ if (
394
+ self.host
395
+ and self.host_regex is not None
396
+ and not self.host_regex.match(other.host)
397
+ ):
398
+ return False
399
+ if self.port is not None and self.port != other.port:
400
+ return False
401
+ return True
402
+
403
+ @property
404
+ def priority(self) -> tuple[int, int, int]:
405
+ """
406
+ The priority allows URLPattern instances to be sortable, so that
407
+ we can match from most specific to least specific.
408
+ """
409
+ # URLs with a port should take priority over URLs without a port.
410
+ port_priority = 0 if self.port is not None else 1
411
+ # Longer hostnames should match first.
412
+ host_priority = -len(self.host)
413
+ # Longer schemes should match first.
414
+ scheme_priority = -len(self.scheme)
415
+ return (port_priority, host_priority, scheme_priority)
416
+
417
+ def __hash__(self) -> int:
418
+ return hash(self.pattern)
419
+
420
+ def __lt__(self, other: URLPattern) -> bool:
421
+ return self.priority < other.priority
422
+
423
+ def __eq__(self, other: typing.Any) -> bool:
424
+ return isinstance(other, URLPattern) and self.pattern == other.pattern
425
+
426
+
427
+ def is_ipv4_hostname(hostname: str) -> bool:
428
+ try:
429
+ ipaddress.IPv4Address(hostname.split("/")[0])
430
+ except Exception:
431
+ return False
432
+ return True
433
+
434
+
435
+ def is_ipv6_hostname(hostname: str) -> bool:
436
+ try:
437
+ ipaddress.IPv6Address(hostname.split("/")[0])
438
+ except Exception:
439
+ return False
440
+ return True
lib/python3.10/site-packages/httpx/py.typed ADDED
File without changes
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/INSTALLER ADDED
@@ -0,0 +1 @@
 
 
1
+ uv
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/LICENSE.txt ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Unless stated otherwise in the source files, all code is copyright 2010 David
2
+ Wolever <david@wolever.net>. All rights reserved.
3
+
4
+ Redistribution and use in source and binary forms, with or without
5
+ modification, are permitted provided that the following conditions are met:
6
+
7
+ 1. Redistributions of source code must retain the above copyright notice,
8
+ this list of conditions and the following disclaimer.
9
+
10
+ 2. Redistributions in binary form must reproduce the above copyright notice,
11
+ this list of conditions and the following disclaimer in the documentation
12
+ and/or other materials provided with the distribution.
13
+
14
+ THIS SOFTWARE IS PROVIDED BY DAVID WOLEVER ``AS IS'' AND ANY EXPRESS OR
15
+ IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
16
+ MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
17
+ EVENT SHALL DAVID WOLEVER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
18
+ INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
19
+ BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
20
+ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
21
+ LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
22
+ OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
23
+ ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
24
+
25
+ The views and conclusions contained in the software and documentation are those
26
+ of the authors and should not be interpreted as representing official policies,
27
+ either expressed or implied, of David Wolever.
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/METADATA ADDED
@@ -0,0 +1,669 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Metadata-Version: 2.1
2
+ Name: parameterized
3
+ Version: 0.9.0
4
+ Summary: Parameterized testing with any Python test framework
5
+ Author-email: David Wolever <david@wolever.net>
6
+ License: FreeBSD
7
+ Project-URL: Homepage, https://github.com/wolever/parameterized
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: License :: OSI Approved :: BSD License
10
+ Requires-Python: >=3.7
11
+ Description-Content-Type: text/x-rst
12
+ License-File: LICENSE.txt
13
+ Provides-Extra: dev
14
+ Requires-Dist: jinja2 ; extra == 'dev'
15
+
16
+ Parameterized testing with any Python test framework
17
+ ====================================================
18
+
19
+ .. image:: https://img.shields.io/pypi/v/parameterized
20
+ :alt: PyPI
21
+ :target: https://pypi.org/project/parameterized/
22
+
23
+ .. image:: https://img.shields.io/pypi/dm/parameterized
24
+ :alt: PyPI - Downloads
25
+ :target: https://pypi.org/project/parameterized/
26
+
27
+ .. image:: https://circleci.com/gh/wolever/parameterized.svg?style=svg
28
+ :alt: Circle CI
29
+ :target: https://circleci.com/gh/wolever/parameterized
30
+
31
+
32
+ Parameterized testing in Python sucks.
33
+
34
+ ``parameterized`` fixes that. For everything. Parameterized testing for nose,
35
+ parameterized testing for py.test, parameterized testing for unittest.
36
+
37
+ .. code:: python
38
+
39
+ # test_math.py
40
+ from nose.tools import assert_equal
41
+ from parameterized import parameterized, parameterized_class
42
+
43
+ import unittest
44
+ import math
45
+
46
+ @parameterized([
47
+ (2, 2, 4),
48
+ (2, 3, 8),
49
+ (1, 9, 1),
50
+ (0, 9, 0),
51
+ ])
52
+ def test_pow(base, exponent, expected):
53
+ assert_equal(math.pow(base, exponent), expected)
54
+
55
+ class TestMathUnitTest(unittest.TestCase):
56
+ @parameterized.expand([
57
+ ("negative", -1.5, -2.0),
58
+ ("integer", 1, 1.0),
59
+ ("large fraction", 1.6, 1),
60
+ ])
61
+ def test_floor(self, name, input, expected):
62
+ assert_equal(math.floor(input), expected)
63
+
64
+ @parameterized_class(('a', 'b', 'expected_sum', 'expected_product'), [
65
+ (1, 2, 3, 2),
66
+ (5, 5, 10, 25),
67
+ ])
68
+ class TestMathClass(unittest.TestCase):
69
+ def test_add(self):
70
+ assert_equal(self.a + self.b, self.expected_sum)
71
+
72
+ def test_multiply(self):
73
+ assert_equal(self.a * self.b, self.expected_product)
74
+
75
+ @parameterized_class([
76
+ { "a": 3, "expected": 2 },
77
+ { "b": 5, "expected": -4 },
78
+ ])
79
+ class TestMathClassDict(unittest.TestCase):
80
+ a = 1
81
+ b = 1
82
+
83
+ def test_subtract(self):
84
+ assert_equal(self.a - self.b, self.expected)
85
+
86
+
87
+ With nose (and nose2)::
88
+
89
+ $ nosetests -v test_math.py
90
+ test_floor_0_negative (test_math.TestMathUnitTest) ... ok
91
+ test_floor_1_integer (test_math.TestMathUnitTest) ... ok
92
+ test_floor_2_large_fraction (test_math.TestMathUnitTest) ... ok
93
+ test_math.test_pow(2, 2, 4, {}) ... ok
94
+ test_math.test_pow(2, 3, 8, {}) ... ok
95
+ test_math.test_pow(1, 9, 1, {}) ... ok
96
+ test_math.test_pow(0, 9, 0, {}) ... ok
97
+ test_add (test_math.TestMathClass_0) ... ok
98
+ test_multiply (test_math.TestMathClass_0) ... ok
99
+ test_add (test_math.TestMathClass_1) ... ok
100
+ test_multiply (test_math.TestMathClass_1) ... ok
101
+ test_subtract (test_math.TestMathClassDict_0) ... ok
102
+
103
+ ----------------------------------------------------------------------
104
+ Ran 12 tests in 0.015s
105
+
106
+ OK
107
+
108
+ As the package name suggests, nose is best supported and will be used for all
109
+ further examples.
110
+
111
+
112
+ With py.test (version 2.0 and above)::
113
+
114
+ $ py.test -v test_math.py
115
+ ============================= test session starts ==============================
116
+ platform darwin -- Python 3.6.1, pytest-3.1.3, py-1.4.34, pluggy-0.4.0
117
+ collecting ... collected 13 items
118
+
119
+ test_math.py::test_pow::[0] PASSED
120
+ test_math.py::test_pow::[1] PASSED
121
+ test_math.py::test_pow::[2] PASSED
122
+ test_math.py::test_pow::[3] PASSED
123
+ test_math.py::TestMathUnitTest::test_floor_0_negative PASSED
124
+ test_math.py::TestMathUnitTest::test_floor_1_integer PASSED
125
+ test_math.py::TestMathUnitTest::test_floor_2_large_fraction PASSED
126
+ test_math.py::TestMathClass_0::test_add PASSED
127
+ test_math.py::TestMathClass_0::test_multiply PASSED
128
+ test_math.py::TestMathClass_1::test_add PASSED
129
+ test_math.py::TestMathClass_1::test_multiply PASSED
130
+ test_math.py::TestMathClassDict_0::test_subtract PASSED
131
+ ==================== 12 passed, 4 warnings in 0.16 seconds =====================
132
+
133
+ With unittest (and unittest2)::
134
+
135
+ $ python -m unittest -v test_math
136
+ test_floor_0_negative (test_math.TestMathUnitTest) ... ok
137
+ test_floor_1_integer (test_math.TestMathUnitTest) ... ok
138
+ test_floor_2_large_fraction (test_math.TestMathUnitTest) ... ok
139
+ test_add (test_math.TestMathClass_0) ... ok
140
+ test_multiply (test_math.TestMathClass_0) ... ok
141
+ test_add (test_math.TestMathClass_1) ... ok
142
+ test_multiply (test_math.TestMathClass_1) ... ok
143
+ test_subtract (test_math.TestMathClassDict_0) ... ok
144
+
145
+ ----------------------------------------------------------------------
146
+ Ran 8 tests in 0.001s
147
+
148
+ OK
149
+
150
+ (note: because unittest does not support test decorators, only tests created
151
+ with ``@parameterized.expand`` will be executed)
152
+
153
+ With green::
154
+
155
+ $ green test_math.py -vvv
156
+ test_math
157
+ TestMathClass_1
158
+ . test_method_a
159
+ . test_method_b
160
+ TestMathClass_2
161
+ . test_method_a
162
+ . test_method_b
163
+ TestMathClass_3
164
+ . test_method_a
165
+ . test_method_b
166
+ TestMathUnitTest
167
+ . test_floor_0_negative
168
+ . test_floor_1_integer
169
+ . test_floor_2_large_fraction
170
+ TestMathClass_0
171
+ . test_add
172
+ . test_multiply
173
+ TestMathClass_1
174
+ . test_add
175
+ . test_multiply
176
+ TestMathClassDict_0
177
+ . test_subtract
178
+
179
+ Ran 12 tests in 0.121s
180
+
181
+ OK (passes=9)
182
+
183
+
184
+ Installation
185
+ ------------
186
+
187
+ ::
188
+
189
+ $ pip install parameterized
190
+
191
+
192
+ Compatibility
193
+ -------------
194
+
195
+ `Yes`__ (mostly).
196
+
197
+ __ https://app.circleci.com/pipelines/github/wolever/parameterized?branch=master
198
+
199
+ .. list-table::
200
+ :header-rows: 1
201
+ :stub-columns: 1
202
+
203
+ * -
204
+ - Py3.7
205
+ - Py3.8
206
+ - Py3.9
207
+ - Py3.10
208
+ - Py3.11
209
+ - PyPy3
210
+ - ``@mock.patch``
211
+ * - nose
212
+ - yes
213
+ - yes
214
+ - yes
215
+ - yes
216
+ - no§
217
+ - no§
218
+ - yes
219
+ * - nose2
220
+ - yes
221
+ - yes
222
+ - yes
223
+ - yes
224
+ - yes
225
+ - yes
226
+ - yes
227
+ * - py.test 2
228
+ - no*
229
+ - no*
230
+ - no*
231
+ - no*
232
+ - no*
233
+ - no*
234
+ - no*
235
+ * - py.test 3
236
+ - yes
237
+ - yes
238
+ - yes
239
+ - yes
240
+ - no*
241
+ - no*
242
+ - yes
243
+ * - py.test 4
244
+ - no**
245
+ - no**
246
+ - no**
247
+ - no**
248
+ - no**
249
+ - no**
250
+ - no**
251
+ * - py.test fixtures
252
+ - no†
253
+ - no†
254
+ - no†
255
+ - no†
256
+ - no†
257
+ - no†
258
+ - no†
259
+ * - | unittest
260
+ | (``@parameterized.expand``)
261
+ - yes
262
+ - yes
263
+ - yes
264
+ - yes
265
+ - yes
266
+ - yes
267
+ - yes
268
+ * - | unittest2
269
+ | (``@parameterized.expand``)
270
+ - yes
271
+ - yes
272
+ - yes
273
+ - yes
274
+ - no§
275
+ - no§
276
+ - yes
277
+
278
+ §: nose and unittest2 - both of which were last updated in 2015 - sadly do not
279
+ appear to support Python 3.10 or 3.11.
280
+
281
+ \*: `py.test 2 does not appear to work under Python 3 (#71)`__, and
282
+ `py.test 3 does not appear to work under Python 3.10 or 3.11 (#154)`__.
283
+
284
+ \*\*: py.test 4 is not yet supported (but coming!) in `issue #34`__
285
+
286
+ †: py.test fixture support is documented in `issue #81`__
287
+
288
+
289
+ __ https://github.com/wolever/parameterized/issues/71
290
+ __ https://github.com/wolever/parameterized/issues/154
291
+ __ https://github.com/wolever/parameterized/issues/34
292
+ __ https://github.com/wolever/parameterized/issues/81
293
+
294
+ Dependencies
295
+ ------------
296
+
297
+ (this section left intentionally blank)
298
+
299
+
300
+ Exhaustive Usage Examples
301
+ --------------------------
302
+
303
+ The ``@parameterized`` and ``@parameterized.expand`` decorators accept a list
304
+ or iterable of tuples or ``param(...)``, or a callable which returns a list or
305
+ iterable:
306
+
307
+ .. code:: python
308
+
309
+ from parameterized import parameterized, param
310
+
311
+ # A list of tuples
312
+ @parameterized([
313
+ (2, 3, 5),
314
+ (3, 5, 8),
315
+ ])
316
+ def test_add(a, b, expected):
317
+ assert_equal(a + b, expected)
318
+
319
+ # A list of params
320
+ @parameterized([
321
+ param("10", 10),
322
+ param("10", 16, base=16),
323
+ ])
324
+ def test_int(str_val, expected, base=10):
325
+ assert_equal(int(str_val, base=base), expected)
326
+
327
+ # An iterable of params
328
+ @parameterized(
329
+ param.explicit(*json.loads(line))
330
+ for line in open("testcases.jsons")
331
+ )
332
+ def test_from_json_file(...):
333
+ ...
334
+
335
+ # A callable which returns a list of tuples
336
+ def load_test_cases():
337
+ return [
338
+ ("test1", ),
339
+ ("test2", ),
340
+ ]
341
+ @parameterized(load_test_cases)
342
+ def test_from_function(name):
343
+ ...
344
+
345
+ .. **
346
+
347
+ Note that, when using an iterator or a generator, all the items will be loaded
348
+ into memory before the start of the test run (we do this explicitly to ensure
349
+ that generators are exhausted exactly once in multi-process or multi-threaded
350
+ testing environments).
351
+
352
+ The ``@parameterized`` decorator can be used test class methods, and standalone
353
+ functions:
354
+
355
+ .. code:: python
356
+
357
+ from parameterized import parameterized
358
+
359
+ class AddTest(object):
360
+ @parameterized([
361
+ (2, 3, 5),
362
+ ])
363
+ def test_add(self, a, b, expected):
364
+ assert_equal(a + b, expected)
365
+
366
+ @parameterized([
367
+ (2, 3, 5),
368
+ ])
369
+ def test_add(a, b, expected):
370
+ assert_equal(a + b, expected)
371
+
372
+
373
+ And ``@parameterized.expand`` can be used to generate test methods in
374
+ situations where test generators cannot be used (for example, when the test
375
+ class is a subclass of ``unittest.TestCase``):
376
+
377
+ .. code:: python
378
+
379
+ import unittest
380
+ from parameterized import parameterized
381
+
382
+ class AddTestCase(unittest.TestCase):
383
+ @parameterized.expand([
384
+ ("2 and 3", 2, 3, 5),
385
+ ("3 and 5", 3, 5, 8),
386
+ ])
387
+ def test_add(self, _, a, b, expected):
388
+ assert_equal(a + b, expected)
389
+
390
+ Will create the test cases::
391
+
392
+ $ nosetests example.py
393
+ test_add_0_2_and_3 (example.AddTestCase) ... ok
394
+ test_add_1_3_and_5 (example.AddTestCase) ... ok
395
+
396
+ ----------------------------------------------------------------------
397
+ Ran 2 tests in 0.001s
398
+
399
+ OK
400
+
401
+ Note that ``@parameterized.expand`` works by creating new methods on the test
402
+ class. If the first parameter is a string, that string will be added to the end
403
+ of the method name. For example, the test case above will generate the methods
404
+ ``test_add_0_2_and_3`` and ``test_add_1_3_and_5``.
405
+
406
+ The names of the test cases generated by ``@parameterized.expand`` can be
407
+ customized using the ``name_func`` keyword argument. The value should
408
+ be a function which accepts three arguments: ``testcase_func``, ``param_num``,
409
+ and ``params``, and it should return the name of the test case.
410
+ ``testcase_func`` will be the function to be tested, ``param_num`` will be the
411
+ index of the test case parameters in the list of parameters, and ``param``
412
+ (an instance of ``param``) will be the parameters which will be used.
413
+
414
+ .. code:: python
415
+
416
+ import unittest
417
+ from parameterized import parameterized
418
+
419
+ def custom_name_func(testcase_func, param_num, param):
420
+ return "%s_%s" %(
421
+ testcase_func.__name__,
422
+ parameterized.to_safe_name("_".join(str(x) for x in param.args)),
423
+ )
424
+
425
+ class AddTestCase(unittest.TestCase):
426
+ @parameterized.expand([
427
+ (2, 3, 5),
428
+ (2, 3, 5),
429
+ ], name_func=custom_name_func)
430
+ def test_add(self, a, b, expected):
431
+ assert_equal(a + b, expected)
432
+
433
+ Will create the test cases::
434
+
435
+ $ nosetests example.py
436
+ test_add_1_2_3 (example.AddTestCase) ... ok
437
+ test_add_2_3_5 (example.AddTestCase) ... ok
438
+
439
+ ----------------------------------------------------------------------
440
+ Ran 2 tests in 0.001s
441
+
442
+ OK
443
+
444
+
445
+ The ``param(...)`` helper class stores the parameters for one specific test
446
+ case. It can be used to pass keyword arguments to test cases:
447
+
448
+ .. code:: python
449
+
450
+ from parameterized import parameterized, param
451
+
452
+ @parameterized([
453
+ param("10", 10),
454
+ param("10", 16, base=16),
455
+ ])
456
+ def test_int(str_val, expected, base=10):
457
+ assert_equal(int(str_val, base=base), expected)
458
+
459
+
460
+ If test cases have a docstring, the parameters for that test case will be
461
+ appended to the first line of the docstring. This behavior can be controlled
462
+ with the ``doc_func`` argument:
463
+
464
+ .. code:: python
465
+
466
+ from parameterized import parameterized
467
+
468
+ @parameterized([
469
+ (1, 2, 3),
470
+ (4, 5, 9),
471
+ ])
472
+ def test_add(a, b, expected):
473
+ """ Test addition. """
474
+ assert_equal(a + b, expected)
475
+
476
+ def my_doc_func(func, num, param):
477
+ return "%s: %s with %s" %(num, func.__name__, param)
478
+
479
+ @parameterized([
480
+ (5, 4, 1),
481
+ (9, 6, 3),
482
+ ], doc_func=my_doc_func)
483
+ def test_subtraction(a, b, expected):
484
+ assert_equal(a - b, expected)
485
+
486
+ ::
487
+
488
+ $ nosetests example.py
489
+ Test addition. [with a=1, b=2, expected=3] ... ok
490
+ Test addition. [with a=4, b=5, expected=9] ... ok
491
+ 0: test_subtraction with param(*(5, 4, 1)) ... ok
492
+ 1: test_subtraction with param(*(9, 6, 3)) ... ok
493
+
494
+ ----------------------------------------------------------------------
495
+ Ran 4 tests in 0.001s
496
+
497
+ OK
498
+
499
+ Finally ``@parameterized_class`` parameterizes an entire class, using
500
+ either a list of attributes, or a list of dicts that will be applied to the
501
+ class:
502
+
503
+ .. code:: python
504
+
505
+ from yourapp.models import User
506
+ from parameterized import parameterized_class
507
+
508
+ @parameterized_class([
509
+ { "username": "user_1", "access_level": 1 },
510
+ { "username": "user_2", "access_level": 2, "expected_status_code": 404 },
511
+ ])
512
+ class TestUserAccessLevel(TestCase):
513
+ expected_status_code = 200
514
+
515
+ def setUp(self):
516
+ self.client.force_login(User.objects.get(username=self.username)[0])
517
+
518
+ def test_url_a(self):
519
+ response = self.client.get('/url')
520
+ self.assertEqual(response.status_code, self.expected_status_code)
521
+
522
+ def tearDown(self):
523
+ self.client.logout()
524
+
525
+
526
+ @parameterized_class(("username", "access_level", "expected_status_code"), [
527
+ ("user_1", 1, 200),
528
+ ("user_2", 2, 404)
529
+ ])
530
+ class TestUserAccessLevel(TestCase):
531
+ def setUp(self):
532
+ self.client.force_login(User.objects.get(username=self.username)[0])
533
+
534
+ def test_url_a(self):
535
+ response = self.client.get("/url")
536
+ self.assertEqual(response.status_code, self.expected_status_code)
537
+
538
+ def tearDown(self):
539
+ self.client.logout()
540
+
541
+
542
+ The ``@parameterized_class`` decorator accepts a ``class_name_func`` argument,
543
+ which controls the name of the parameterized classes generated by
544
+ ``@parameterized_class``:
545
+
546
+ .. code:: python
547
+
548
+ from parameterized import parameterized, parameterized_class
549
+
550
+ def get_class_name(cls, num, params_dict):
551
+ # By default the generated class named includes either the "name"
552
+ # parameter (if present), or the first string value. This example shows
553
+ # multiple parameters being included in the generated class name:
554
+ return "%s_%s_%s%s" %(
555
+ cls.__name__,
556
+ num,
557
+ parameterized.to_safe_name(params_dict['a']),
558
+ parameterized.to_safe_name(params_dict['b']),
559
+ )
560
+
561
+ @parameterized_class([
562
+ { "a": "hello", "b": " world!", "expected": "hello world!" },
563
+ { "a": "say ", "b": " cheese :)", "expected": "say cheese :)" },
564
+ ], class_name_func=get_class_name)
565
+ class TestConcatenation(TestCase):
566
+ def test_concat(self):
567
+ self.assertEqual(self.a + self.b, self.expected)
568
+
569
+ ::
570
+
571
+ $ nosetests -v test_math.py
572
+ test_concat (test_concat.TestConcatenation_0_hello_world_) ... ok
573
+ test_concat (test_concat.TestConcatenation_0_say_cheese__) ... ok
574
+
575
+
576
+
577
+ Using with Single Parameters
578
+ ............................
579
+
580
+ If a test function only accepts one parameter and the value is not iterable,
581
+ then it is possible to supply a list of values without wrapping each one in a
582
+ tuple:
583
+
584
+ .. code:: python
585
+
586
+ @parameterized([1, 2, 3])
587
+ def test_greater_than_zero(value):
588
+ assert value > 0
589
+
590
+ Note, however, that if the single parameter *is* iterable (such as a list or
591
+ tuple), then it *must* be wrapped in a tuple, list, or the ``param(...)``
592
+ helper:
593
+
594
+ .. code:: python
595
+
596
+ @parameterized([
597
+ ([1, 2, 3], ),
598
+ ([3, 3], ),
599
+ ([6], ),
600
+ ])
601
+ def test_sums_to_6(numbers):
602
+ assert sum(numbers) == 6
603
+
604
+ (note, also, that Python requires single element tuples to be defined with a
605
+ trailing comma: ``(foo, )``)
606
+
607
+
608
+ Using with ``@mock.patch``
609
+ ..........................
610
+
611
+ ``parameterized`` can be used with ``mock.patch``, but the argument ordering
612
+ can be confusing. The ``@mock.patch(...)`` decorator must come *below* the
613
+ ``@parameterized(...)``, and the mocked parameters must come *last*:
614
+
615
+ .. code:: python
616
+
617
+ @mock.patch("os.getpid")
618
+ class TestOS(object):
619
+ @parameterized(...)
620
+ @mock.patch("os.fdopen")
621
+ @mock.patch("os.umask")
622
+ def test_method(self, param1, param2, ..., mock_umask, mock_fdopen, mock_getpid):
623
+ ...
624
+
625
+ Note: the same holds true when using ``@parameterized.expand``.
626
+
627
+
628
+ Migrating from ``nose-parameterized`` to ``parameterized``
629
+ ----------------------------------------------------------
630
+
631
+ To migrate a codebase from ``nose-parameterized`` to ``parameterized``:
632
+
633
+ 1. Update your requirements file, replacing ``nose-parameterized`` with
634
+ ``parameterized``.
635
+
636
+ 2. Replace all references to ``nose_parameterized`` with ``parameterized``::
637
+
638
+ $ perl -pi -e 's/nose_parameterized/parameterized/g' your-codebase/
639
+
640
+ 3. You're done!
641
+
642
+
643
+ FAQ
644
+ ---
645
+
646
+ What happened to Python 2.X, 3.5, and 3.6 support?
647
+ As of version 0.9.0, ``parameterized`` no longer supports Python 2.X, 3.5,
648
+ or 3.6. Previous versions of ``parameterized`` - 0.8.1 being the latest -
649
+ will continue to work, but will not receive any new features or bug fixes.
650
+
651
+ What do you mean when you say "nose is best supported"?
652
+ There are small caveates with ``py.test`` and ``unittest``: ``py.test``
653
+ does not show the parameter values (ex, it will show ``test_add[0]``
654
+ instead of ``test_add[1, 2, 3]``), and ``unittest``/``unittest2`` do not
655
+ support test generators so ``@parameterized.expand`` must be used.
656
+
657
+ Why not use ``@pytest.mark.parametrize``?
658
+ Because spelling is difficult. Also, ``parameterized`` doesn't require you
659
+ to repeat argument names, and (using ``param``) it supports optional
660
+ keyword arguments.
661
+
662
+ Why do I get an ``AttributeError: 'function' object has no attribute 'expand'`` with ``@parameterized.expand``?
663
+ You've likely installed the ``parametrized`` (note the missing *e*)
664
+ package. Use ``parameterized`` (with the *e*) instead and you'll be all
665
+ set.
666
+
667
+ What happened to ``nose-parameterized``?
668
+ Originally only nose was supported. But now everything is supported, and it
669
+ only made sense to change the name!
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/RECORD ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ parameterized-0.9.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2
2
+ parameterized-0.9.0.dist-info/LICENSE.txt,sha256=Aeb_aptOwAmK50RKx02snQgwzCm2T3GkGh650rsS4i4,1558
3
+ parameterized-0.9.0.dist-info/METADATA,sha256=2v3-4en7GIKOJAvstR3gjYIyQ_mnXNdbO3x997_a6ag,18918
4
+ parameterized-0.9.0.dist-info/RECORD,,
5
+ parameterized-0.9.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ parameterized-0.9.0.dist-info/WHEEL,sha256=a-zpFRIJzOq5QfuhBzbhiA1eHTzNCJn8OdRvhdNX0Rk,110
7
+ parameterized-0.9.0.dist-info/top_level.txt,sha256=FmnwidrcOm0vumnYZSgKXqAcpDIeC3Oz1mjMW006-3s,14
8
+ parameterized/__init__.py,sha256=uSoz1WwU2FP4DZwxyEZQSNys4GX27hfXU74ogSMvO6w,92
9
+ parameterized/parameterized.py,sha256=Ee8Sc8sFXZ98jEHuVa1mQLUICmphxtMBHovgBdprL_M,26511
10
+ parameterized/test.py,sha256=wCjfzDqiZy0_zdrBkvfAyyLXC6CsjVvTbq73CJlDDqk,23914
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/REQUESTED ADDED
File without changes
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/WHEEL ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ Wheel-Version: 1.0
2
+ Generator: bdist_wheel (0.40.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py2-none-any
5
+ Tag: py3-none-any
6
+
lib/python3.10/site-packages/parameterized-0.9.0.dist-info/top_level.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ parameterized
lib/python3.10/site-packages/pyasn1_modules/rfc2315.py ADDED
@@ -0,0 +1,294 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is part of pyasn1-modules software.
3
+ #
4
+ # Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
5
+ # License: http://snmplabs.com/pyasn1/license.html
6
+ #
7
+ # PKCS#7 message syntax
8
+ #
9
+ # ASN.1 source from:
10
+ # https://opensource.apple.com/source/Security/Security-55179.1/libsecurity_asn1/asn1/pkcs7.asn.auto.html
11
+ #
12
+ # Sample captures from:
13
+ # openssl crl2pkcs7 -nocrl -certfile cert1.cer -out outfile.p7b
14
+ #
15
+ from pyasn1_modules.rfc2459 import *
16
+
17
+
18
+ class Attribute(univ.Sequence):
19
+ componentType = namedtype.NamedTypes(
20
+ namedtype.NamedType('type', AttributeType()),
21
+ namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue()))
22
+ )
23
+
24
+
25
+ class AttributeValueAssertion(univ.Sequence):
26
+ componentType = namedtype.NamedTypes(
27
+ namedtype.NamedType('attributeType', AttributeType()),
28
+ namedtype.NamedType('attributeValue', AttributeValue(),
29
+ openType=opentype.OpenType('type', certificateAttributesMap))
30
+ )
31
+
32
+
33
+ pkcs_7 = univ.ObjectIdentifier('1.2.840.113549.1.7')
34
+ data = univ.ObjectIdentifier('1.2.840.113549.1.7.1')
35
+ signedData = univ.ObjectIdentifier('1.2.840.113549.1.7.2')
36
+ envelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.3')
37
+ signedAndEnvelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.4')
38
+ digestedData = univ.ObjectIdentifier('1.2.840.113549.1.7.5')
39
+ encryptedData = univ.ObjectIdentifier('1.2.840.113549.1.7.6')
40
+
41
+
42
+ class ContentType(univ.ObjectIdentifier):
43
+ pass
44
+
45
+
46
+ class ContentEncryptionAlgorithmIdentifier(AlgorithmIdentifier):
47
+ pass
48
+
49
+
50
+ class EncryptedContent(univ.OctetString):
51
+ pass
52
+
53
+
54
+ contentTypeMap = {}
55
+
56
+
57
+ class EncryptedContentInfo(univ.Sequence):
58
+ componentType = namedtype.NamedTypes(
59
+ namedtype.NamedType('contentType', ContentType()),
60
+ namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()),
61
+ namedtype.OptionalNamedType(
62
+ 'encryptedContent', EncryptedContent().subtype(
63
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)
64
+ ),
65
+ openType=opentype.OpenType('contentType', contentTypeMap)
66
+ )
67
+ )
68
+
69
+
70
+ class Version(univ.Integer): # overrides x509.Version
71
+ pass
72
+
73
+
74
+ class EncryptedData(univ.Sequence):
75
+ componentType = namedtype.NamedTypes(
76
+ namedtype.NamedType('version', Version()),
77
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo())
78
+ )
79
+
80
+
81
+ class DigestAlgorithmIdentifier(AlgorithmIdentifier):
82
+ pass
83
+
84
+
85
+ class DigestAlgorithmIdentifiers(univ.SetOf):
86
+ componentType = DigestAlgorithmIdentifier()
87
+
88
+
89
+ class Digest(univ.OctetString):
90
+ pass
91
+
92
+
93
+ class ContentInfo(univ.Sequence):
94
+ componentType = namedtype.NamedTypes(
95
+ namedtype.NamedType('contentType', ContentType()),
96
+ namedtype.OptionalNamedType(
97
+ 'content',
98
+ univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)),
99
+ openType=opentype.OpenType('contentType', contentTypeMap)
100
+ )
101
+ )
102
+
103
+
104
+ class DigestedData(univ.Sequence):
105
+ componentType = namedtype.NamedTypes(
106
+ namedtype.NamedType('version', Version()),
107
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
108
+ namedtype.NamedType('contentInfo', ContentInfo()),
109
+ namedtype.NamedType('digest', Digest())
110
+ )
111
+
112
+
113
+ class IssuerAndSerialNumber(univ.Sequence):
114
+ componentType = namedtype.NamedTypes(
115
+ namedtype.NamedType('issuer', Name()),
116
+ namedtype.NamedType('serialNumber', CertificateSerialNumber())
117
+ )
118
+
119
+
120
+ class KeyEncryptionAlgorithmIdentifier(AlgorithmIdentifier):
121
+ pass
122
+
123
+
124
+ class EncryptedKey(univ.OctetString):
125
+ pass
126
+
127
+
128
+ class RecipientInfo(univ.Sequence):
129
+ componentType = namedtype.NamedTypes(
130
+ namedtype.NamedType('version', Version()),
131
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
132
+ namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()),
133
+ namedtype.NamedType('encryptedKey', EncryptedKey())
134
+ )
135
+
136
+
137
+ class RecipientInfos(univ.SetOf):
138
+ componentType = RecipientInfo()
139
+
140
+
141
+ class Attributes(univ.SetOf):
142
+ componentType = Attribute()
143
+
144
+
145
+ class ExtendedCertificateInfo(univ.Sequence):
146
+ componentType = namedtype.NamedTypes(
147
+ namedtype.NamedType('version', Version()),
148
+ namedtype.NamedType('certificate', Certificate()),
149
+ namedtype.NamedType('attributes', Attributes())
150
+ )
151
+
152
+
153
+ class SignatureAlgorithmIdentifier(AlgorithmIdentifier):
154
+ pass
155
+
156
+
157
+ class Signature(univ.BitString):
158
+ pass
159
+
160
+
161
+ class ExtendedCertificate(univ.Sequence):
162
+ componentType = namedtype.NamedTypes(
163
+ namedtype.NamedType('extendedCertificateInfo', ExtendedCertificateInfo()),
164
+ namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()),
165
+ namedtype.NamedType('signature', Signature())
166
+ )
167
+
168
+
169
+ class ExtendedCertificateOrCertificate(univ.Choice):
170
+ componentType = namedtype.NamedTypes(
171
+ namedtype.NamedType('certificate', Certificate()),
172
+ namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(
173
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
174
+ )
175
+
176
+
177
+ class ExtendedCertificatesAndCertificates(univ.SetOf):
178
+ componentType = ExtendedCertificateOrCertificate()
179
+
180
+
181
+ class SerialNumber(univ.Integer):
182
+ pass
183
+
184
+
185
+ class CRLEntry(univ.Sequence):
186
+ componentType = namedtype.NamedTypes(
187
+ namedtype.NamedType('userCertificate', SerialNumber()),
188
+ namedtype.NamedType('revocationDate', useful.UTCTime())
189
+ )
190
+
191
+
192
+ class TBSCertificateRevocationList(univ.Sequence):
193
+ componentType = namedtype.NamedTypes(
194
+ namedtype.NamedType('signature', AlgorithmIdentifier()),
195
+ namedtype.NamedType('issuer', Name()),
196
+ namedtype.NamedType('lastUpdate', useful.UTCTime()),
197
+ namedtype.NamedType('nextUpdate', useful.UTCTime()),
198
+ namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=CRLEntry()))
199
+ )
200
+
201
+
202
+ class CertificateRevocationList(univ.Sequence):
203
+ componentType = namedtype.NamedTypes(
204
+ namedtype.NamedType('tbsCertificateRevocationList', TBSCertificateRevocationList()),
205
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
206
+ namedtype.NamedType('signature', univ.BitString())
207
+ )
208
+
209
+
210
+ class CertificateRevocationLists(univ.SetOf):
211
+ componentType = CertificateRevocationList()
212
+
213
+
214
+ class DigestEncryptionAlgorithmIdentifier(AlgorithmIdentifier):
215
+ pass
216
+
217
+
218
+ class EncryptedDigest(univ.OctetString):
219
+ pass
220
+
221
+
222
+ class SignerInfo(univ.Sequence):
223
+ componentType = namedtype.NamedTypes(
224
+ namedtype.NamedType('version', Version()),
225
+ namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()),
226
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
227
+ namedtype.OptionalNamedType('authenticatedAttributes', Attributes().subtype(
228
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
229
+ namedtype.NamedType('digestEncryptionAlgorithm', DigestEncryptionAlgorithmIdentifier()),
230
+ namedtype.NamedType('encryptedDigest', EncryptedDigest()),
231
+ namedtype.OptionalNamedType('unauthenticatedAttributes', Attributes().subtype(
232
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1)))
233
+ )
234
+
235
+
236
+ class SignerInfos(univ.SetOf):
237
+ componentType = SignerInfo()
238
+
239
+
240
+ class SignedAndEnvelopedData(univ.Sequence):
241
+ componentType = namedtype.NamedTypes(
242
+ namedtype.NamedType('version', Version()),
243
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
244
+ namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
245
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()),
246
+ namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(
247
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
248
+ namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(
249
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
250
+ namedtype.NamedType('signerInfos', SignerInfos())
251
+ )
252
+
253
+
254
+ class EnvelopedData(univ.Sequence):
255
+ componentType = namedtype.NamedTypes(
256
+ namedtype.NamedType('version', Version()),
257
+ namedtype.NamedType('recipientInfos', RecipientInfos()),
258
+ namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo())
259
+ )
260
+
261
+
262
+ class DigestInfo(univ.Sequence):
263
+ componentType = namedtype.NamedTypes(
264
+ namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()),
265
+ namedtype.NamedType('digest', Digest())
266
+ )
267
+
268
+
269
+ class SignedData(univ.Sequence):
270
+ componentType = namedtype.NamedTypes(
271
+ namedtype.NamedType('version', Version()),
272
+ namedtype.OptionalNamedType('digestAlgorithms', DigestAlgorithmIdentifiers()),
273
+ namedtype.NamedType('contentInfo', ContentInfo()),
274
+ namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(
275
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))),
276
+ namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(
277
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
278
+ namedtype.OptionalNamedType('signerInfos', SignerInfos())
279
+ )
280
+
281
+
282
+ class Data(univ.OctetString):
283
+ pass
284
+
285
+ _contentTypeMapUpdate = {
286
+ data: Data(),
287
+ signedData: SignedData(),
288
+ envelopedData: EnvelopedData(),
289
+ signedAndEnvelopedData: SignedAndEnvelopedData(),
290
+ digestedData: DigestedData(),
291
+ encryptedData: EncryptedData()
292
+ }
293
+
294
+ contentTypeMap.update(_contentTypeMapUpdate)
lib/python3.10/site-packages/pyasn1_modules/rfc2986.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # coding: utf-8
2
+ #
3
+ # This file is part of pyasn1-modules software.
4
+ #
5
+ # Created by Joel Johnson with asn1ate tool.
6
+ # Modified by Russ Housley to add support for opentypes by importing
7
+ # definitions from rfc5280 so that the same maps are used.
8
+ #
9
+ # Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
10
+ # License: http://snmplabs.com/pyasn1/license.html
11
+ #
12
+ # PKCS #10: Certification Request Syntax Specification
13
+ #
14
+ # ASN.1 source from:
15
+ # https://www.rfc-editor.org/rfc/rfc2986.txt
16
+ #
17
+ from pyasn1.type import namedtype
18
+ from pyasn1.type import tag
19
+ from pyasn1.type import univ
20
+
21
+ from pyasn1_modules import rfc5280
22
+
23
+ MAX = float('inf')
24
+
25
+
26
+ AttributeType = rfc5280.AttributeType
27
+
28
+ AttributeValue = rfc5280.AttributeValue
29
+
30
+ AttributeTypeAndValue = rfc5280.AttributeTypeAndValue
31
+
32
+ Attribute = rfc5280.Attribute
33
+
34
+ RelativeDistinguishedName = rfc5280.RelativeDistinguishedName
35
+
36
+ RDNSequence = rfc5280.RDNSequence
37
+
38
+ Name = rfc5280.Name
39
+
40
+ AlgorithmIdentifier = rfc5280.AlgorithmIdentifier
41
+
42
+ SubjectPublicKeyInfo = rfc5280.SubjectPublicKeyInfo
43
+
44
+
45
+ class Attributes(univ.SetOf):
46
+ pass
47
+
48
+
49
+ Attributes.componentType = Attribute()
50
+
51
+
52
+ class CertificationRequestInfo(univ.Sequence):
53
+ pass
54
+
55
+
56
+ CertificationRequestInfo.componentType = namedtype.NamedTypes(
57
+ namedtype.NamedType('version', univ.Integer()),
58
+ namedtype.NamedType('subject', Name()),
59
+ namedtype.NamedType('subjectPKInfo', SubjectPublicKeyInfo()),
60
+ namedtype.NamedType('attributes',
61
+ Attributes().subtype(implicitTag=tag.Tag(
62
+ tag.tagClassContext, tag.tagFormatSimple, 0))
63
+ )
64
+ )
65
+
66
+
67
+ class CertificationRequest(univ.Sequence):
68
+ pass
69
+
70
+
71
+ CertificationRequest.componentType = namedtype.NamedTypes(
72
+ namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()),
73
+ namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()),
74
+ namedtype.NamedType('signature', univ.BitString())
75
+ )
lib/python3.10/site-packages/pyasn1_modules/rfc3709.py ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is part of pyasn1-modules software.
3
+ #
4
+ # Created by Russ Housley with assistance from asn1ate v.0.6.0.
5
+ # Modified by Russ Housley to add maps for use with opentypes.
6
+ #
7
+ # Copyright (c) 2019, Vigil Security, LLC
8
+ # License: http://snmplabs.com/pyasn1/license.html
9
+ #
10
+ # Logotypes in X.509 Certificates
11
+ #
12
+ # ASN.1 source from:
13
+ # https://www.rfc-editor.org/rfc/rfc3709.txt
14
+ #
15
+
16
+ from pyasn1.type import char
17
+ from pyasn1.type import constraint
18
+ from pyasn1.type import namedtype
19
+ from pyasn1.type import namedval
20
+ from pyasn1.type import tag
21
+ from pyasn1.type import univ
22
+
23
+ from pyasn1_modules import rfc5280
24
+ from pyasn1_modules import rfc6170
25
+
26
+ MAX = float('inf')
27
+
28
+
29
+ class HashAlgAndValue(univ.Sequence):
30
+ pass
31
+
32
+ HashAlgAndValue.componentType = namedtype.NamedTypes(
33
+ namedtype.NamedType('hashAlg', rfc5280.AlgorithmIdentifier()),
34
+ namedtype.NamedType('hashValue', univ.OctetString())
35
+ )
36
+
37
+
38
+ class LogotypeDetails(univ.Sequence):
39
+ pass
40
+
41
+ LogotypeDetails.componentType = namedtype.NamedTypes(
42
+ namedtype.NamedType('mediaType', char.IA5String()),
43
+ namedtype.NamedType('logotypeHash', univ.SequenceOf(
44
+ componentType=HashAlgAndValue()).subtype(
45
+ sizeSpec=constraint.ValueSizeConstraint(1, MAX))),
46
+ namedtype.NamedType('logotypeURI', univ.SequenceOf(
47
+ componentType=char.IA5String()).subtype(
48
+ sizeSpec=constraint.ValueSizeConstraint(1, MAX)))
49
+ )
50
+
51
+
52
+ class LogotypeAudioInfo(univ.Sequence):
53
+ pass
54
+
55
+ LogotypeAudioInfo.componentType = namedtype.NamedTypes(
56
+ namedtype.NamedType('fileSize', univ.Integer()),
57
+ namedtype.NamedType('playTime', univ.Integer()),
58
+ namedtype.NamedType('channels', univ.Integer()),
59
+ namedtype.OptionalNamedType('sampleRate', univ.Integer().subtype(
60
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
61
+ namedtype.OptionalNamedType('language', char.IA5String().subtype(
62
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
63
+ )
64
+
65
+
66
+ class LogotypeAudio(univ.Sequence):
67
+ pass
68
+
69
+ LogotypeAudio.componentType = namedtype.NamedTypes(
70
+ namedtype.NamedType('audioDetails', LogotypeDetails()),
71
+ namedtype.OptionalNamedType('audioInfo', LogotypeAudioInfo())
72
+ )
73
+
74
+
75
+ class LogotypeImageType(univ.Integer):
76
+ pass
77
+
78
+ LogotypeImageType.namedValues = namedval.NamedValues(
79
+ ('grayScale', 0),
80
+ ('color', 1)
81
+ )
82
+
83
+
84
+ class LogotypeImageResolution(univ.Choice):
85
+ pass
86
+
87
+ LogotypeImageResolution.componentType = namedtype.NamedTypes(
88
+ namedtype.NamedType('numBits',
89
+ univ.Integer().subtype(implicitTag=tag.Tag(
90
+ tag.tagClassContext, tag.tagFormatSimple, 1))),
91
+ namedtype.NamedType('tableSize',
92
+ univ.Integer().subtype(implicitTag=tag.Tag(
93
+ tag.tagClassContext, tag.tagFormatSimple, 2)))
94
+ )
95
+
96
+
97
+ class LogotypeImageInfo(univ.Sequence):
98
+ pass
99
+
100
+ LogotypeImageInfo.componentType = namedtype.NamedTypes(
101
+ namedtype.DefaultedNamedType('type', LogotypeImageType().subtype(
102
+ implicitTag=tag.Tag(tag.tagClassContext,
103
+ tag.tagFormatSimple, 0)).subtype(value='color')),
104
+ namedtype.NamedType('fileSize', univ.Integer()),
105
+ namedtype.NamedType('xSize', univ.Integer()),
106
+ namedtype.NamedType('ySize', univ.Integer()),
107
+ namedtype.OptionalNamedType('resolution', LogotypeImageResolution()),
108
+ namedtype.OptionalNamedType('language', char.IA5String().subtype(
109
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
110
+ )
111
+
112
+
113
+ class LogotypeImage(univ.Sequence):
114
+ pass
115
+
116
+ LogotypeImage.componentType = namedtype.NamedTypes(
117
+ namedtype.NamedType('imageDetails', LogotypeDetails()),
118
+ namedtype.OptionalNamedType('imageInfo', LogotypeImageInfo())
119
+ )
120
+
121
+
122
+ class LogotypeData(univ.Sequence):
123
+ pass
124
+
125
+ LogotypeData.componentType = namedtype.NamedTypes(
126
+ namedtype.OptionalNamedType('image', univ.SequenceOf(
127
+ componentType=LogotypeImage())),
128
+ namedtype.OptionalNamedType('audio', univ.SequenceOf(
129
+ componentType=LogotypeAudio()).subtype(
130
+ implicitTag=tag.Tag(tag.tagClassContext,
131
+ tag.tagFormatSimple, 1)))
132
+ )
133
+
134
+
135
+ class LogotypeReference(univ.Sequence):
136
+ pass
137
+
138
+ LogotypeReference.componentType = namedtype.NamedTypes(
139
+ namedtype.NamedType('refStructHash', univ.SequenceOf(
140
+ componentType=HashAlgAndValue()).subtype(
141
+ sizeSpec=constraint.ValueSizeConstraint(1, MAX))),
142
+ namedtype.NamedType('refStructURI', univ.SequenceOf(
143
+ componentType=char.IA5String()).subtype(
144
+ sizeSpec=constraint.ValueSizeConstraint(1, MAX)))
145
+ )
146
+
147
+
148
+ class LogotypeInfo(univ.Choice):
149
+ pass
150
+
151
+ LogotypeInfo.componentType = namedtype.NamedTypes(
152
+ namedtype.NamedType('direct',
153
+ LogotypeData().subtype(implicitTag=tag.Tag(tag.tagClassContext,
154
+ tag.tagFormatConstructed, 0))),
155
+ namedtype.NamedType('indirect', LogotypeReference().subtype(
156
+ implicitTag=tag.Tag(tag.tagClassContext,
157
+ tag.tagFormatConstructed, 1)))
158
+ )
159
+
160
+ # Other logotype type and associated object identifiers
161
+
162
+ id_logo_background = univ.ObjectIdentifier('1.3.6.1.5.5.7.20.2')
163
+
164
+ id_logo_loyalty = univ.ObjectIdentifier('1.3.6.1.5.5.7.20.1')
165
+
166
+ id_logo_certImage = rfc6170.id_logo_certImage
167
+
168
+
169
+ class OtherLogotypeInfo(univ.Sequence):
170
+ pass
171
+
172
+ OtherLogotypeInfo.componentType = namedtype.NamedTypes(
173
+ namedtype.NamedType('logotypeType', univ.ObjectIdentifier()),
174
+ namedtype.NamedType('info', LogotypeInfo())
175
+ )
176
+
177
+
178
+ # Logotype Certificate Extension
179
+
180
+ id_pe_logotype = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.12')
181
+
182
+
183
+ class LogotypeExtn(univ.Sequence):
184
+ pass
185
+
186
+ LogotypeExtn.componentType = namedtype.NamedTypes(
187
+ namedtype.OptionalNamedType('communityLogos', univ.SequenceOf(
188
+ componentType=LogotypeInfo()).subtype(
189
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
190
+ namedtype.OptionalNamedType('issuerLogo', LogotypeInfo().subtype(
191
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))),
192
+ namedtype.OptionalNamedType('subjectLogo', LogotypeInfo().subtype(
193
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))),
194
+ namedtype.OptionalNamedType('otherLogos', univ.SequenceOf(
195
+ componentType=OtherLogotypeInfo()).subtype(explicitTag=tag.Tag(
196
+ tag.tagClassContext, tag.tagFormatSimple, 3)))
197
+ )
198
+
199
+
200
+ # Map of Certificate Extension OIDs to Extensions added to the
201
+ # ones that are in rfc5280.py
202
+
203
+ _certificateExtensionsMapUpdate = {
204
+ id_pe_logotype: LogotypeExtn(),
205
+ }
206
+
207
+ rfc5280.certificateExtensionsMap.update(_certificateExtensionsMapUpdate)
lib/python3.10/site-packages/pyasn1_modules/rfc3739.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is part of pyasn1-modules software.
3
+ #
4
+ # Created by Russ Housley with assistance from asn1ate v.0.6.0.
5
+ # Modified by Russ Housley to add WithComponentsConstraints to
6
+ # enforce the requirements that are indicated in comments.
7
+ #
8
+ # Copyright (c) 2019, Vigil Security, LLC
9
+ # License: http://snmplabs.com/pyasn1/license.html
10
+ #
11
+ # Qualified Certificates
12
+ #
13
+ # ASN.1 source from:
14
+ # https://www.rfc-editor.org/rfc/rfc3739.txt
15
+ #
16
+
17
+ from pyasn1.type import char
18
+ from pyasn1.type import constraint
19
+ from pyasn1.type import namedtype
20
+ from pyasn1.type import namedval
21
+ from pyasn1.type import opentype
22
+ from pyasn1.type import univ
23
+ from pyasn1.type import useful
24
+
25
+ from pyasn1_modules import rfc5280
26
+
27
+ MAX = float('inf')
28
+
29
+
30
+ # Initialize the qcStatement map
31
+
32
+ qcStatementMap = { }
33
+
34
+
35
+ # Imports from RFC 5280
36
+
37
+ AlgorithmIdentifier = rfc5280.AlgorithmIdentifier
38
+
39
+ AttributeType = rfc5280.AttributeType
40
+
41
+ DirectoryString = rfc5280.DirectoryString
42
+
43
+ GeneralName = rfc5280.GeneralName
44
+
45
+ id_pkix = rfc5280.id_pkix
46
+
47
+ id_pe = rfc5280.id_pe
48
+
49
+
50
+ # Arc for QC personal data attributes
51
+
52
+ id_pda = id_pkix + (9, )
53
+
54
+
55
+ # Arc for QC statements
56
+
57
+ id_qcs = id_pkix + (11, )
58
+
59
+
60
+ # Personal data attributes
61
+
62
+ id_pda_dateOfBirth = id_pda + (1, )
63
+
64
+ class DateOfBirth(useful.GeneralizedTime):
65
+ pass
66
+
67
+
68
+ id_pda_placeOfBirth = id_pda + (2, )
69
+
70
+ class PlaceOfBirth(DirectoryString):
71
+ pass
72
+
73
+
74
+ id_pda_gender = id_pda + (3, )
75
+
76
+ class Gender(char.PrintableString):
77
+ subtypeSpec = constraint.ConstraintsIntersection(
78
+ constraint.ValueSizeConstraint(1, 1),
79
+ constraint.SingleValueConstraint('M', 'F', 'm', 'f')
80
+ )
81
+
82
+
83
+ id_pda_countryOfCitizenship = id_pda + (4, )
84
+
85
+ class CountryOfCitizenship(char.PrintableString):
86
+ subtypeSpec = constraint.ValueSizeConstraint(2, 2)
87
+ # ISO 3166 Country Code
88
+
89
+
90
+ id_pda_countryOfResidence = id_pda + (5, )
91
+
92
+ class CountryOfResidence(char.PrintableString):
93
+ subtypeSpec = constraint.ValueSizeConstraint(2, 2)
94
+ # ISO 3166 Country Code
95
+
96
+
97
+ # Biometric info certificate extension
98
+
99
+ id_pe_biometricInfo = id_pe + (2, )
100
+
101
+
102
+ class PredefinedBiometricType(univ.Integer):
103
+ namedValues = namedval.NamedValues(
104
+ ('picture', 0),
105
+ ('handwritten-signature', 1)
106
+ )
107
+ subtypeSpec = constraint.SingleValueConstraint(0, 1)
108
+
109
+
110
+ class TypeOfBiometricData(univ.Choice):
111
+ componentType = namedtype.NamedTypes(
112
+ namedtype.NamedType('predefinedBiometricType', PredefinedBiometricType()),
113
+ namedtype.NamedType('biometricDataOid', univ.ObjectIdentifier())
114
+ )
115
+
116
+
117
+ class BiometricData(univ.Sequence):
118
+ componentType = namedtype.NamedTypes(
119
+ namedtype.NamedType('typeOfBiometricData', TypeOfBiometricData()),
120
+ namedtype.NamedType('hashAlgorithm', AlgorithmIdentifier()),
121
+ namedtype.NamedType('biometricDataHash', univ.OctetString()),
122
+ namedtype.OptionalNamedType('sourceDataUri', char.IA5String())
123
+ )
124
+
125
+
126
+ class BiometricSyntax(univ.SequenceOf):
127
+ componentType = BiometricData()
128
+
129
+
130
+ # QC Statements certificate extension
131
+ # NOTE: This extension does not allow to mix critical and
132
+ # non-critical Qualified Certificate Statements. Either all
133
+ # statements must be critical or all statements must be
134
+ # non-critical.
135
+
136
+ id_pe_qcStatements = id_pe + (3, )
137
+
138
+
139
+ class NameRegistrationAuthorities(univ.SequenceOf):
140
+ componentType = GeneralName()
141
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
142
+
143
+
144
+ class QCStatement(univ.Sequence):
145
+ componentType = namedtype.NamedTypes(
146
+ namedtype.NamedType('statementId', univ.ObjectIdentifier()),
147
+ namedtype.OptionalNamedType('statementInfo', univ.Any(),
148
+ openType=opentype.OpenType('statementId', qcStatementMap))
149
+ )
150
+
151
+
152
+ class QCStatements(univ.SequenceOf):
153
+ componentType = QCStatement()
154
+
155
+
156
+ class SemanticsInformation(univ.Sequence):
157
+ componentType = namedtype.NamedTypes(
158
+ namedtype.OptionalNamedType('semanticsIndentifier',
159
+ univ.ObjectIdentifier()),
160
+ namedtype.OptionalNamedType('nameRegistrationAuthorities',
161
+ NameRegistrationAuthorities())
162
+ )
163
+ subtypeSpec = constraint.ConstraintsUnion(
164
+ constraint.WithComponentsConstraint(
165
+ ('semanticsIndentifier', constraint.ComponentPresentConstraint())),
166
+ constraint.WithComponentsConstraint(
167
+ ('nameRegistrationAuthorities', constraint.ComponentPresentConstraint()))
168
+ )
169
+
170
+
171
+ id_qcs = id_pkix + (11, )
172
+
173
+
174
+ id_qcs_pkixQCSyntax_v1 = id_qcs + (1, )
175
+
176
+
177
+ id_qcs_pkixQCSyntax_v2 = id_qcs + (2, )
178
+
179
+
180
+ # Map of Certificate Extension OIDs to Extensions
181
+ # To be added to the ones that are in rfc5280.py
182
+
183
+ _certificateExtensionsMap = {
184
+ id_pe_biometricInfo: BiometricSyntax(),
185
+ id_pe_qcStatements: QCStatements(),
186
+ }
187
+
188
+ rfc5280.certificateExtensionsMap.update(_certificateExtensionsMap)
189
+
190
+
191
+ # Map of AttributeType OIDs to AttributeValue added to the
192
+ # ones that are in rfc5280.py
193
+
194
+ _certificateAttributesMapUpdate = {
195
+ id_pda_dateOfBirth: DateOfBirth(),
196
+ id_pda_placeOfBirth: PlaceOfBirth(),
197
+ id_pda_gender: Gender(),
198
+ id_pda_countryOfCitizenship: CountryOfCitizenship(),
199
+ id_pda_countryOfResidence: CountryOfResidence(),
200
+ }
201
+
202
+ rfc5280.certificateAttributesMap.update(_certificateAttributesMapUpdate)
203
+
lib/python3.10/site-packages/pyasn1_modules/rfc5084.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is being contributed to pyasn1-modules software.
2
+ #
3
+ # Created by Russ Housley with assistance from the asn1ate tool, with manual
4
+ # changes to AES_CCM_ICVlen.subtypeSpec and added comments
5
+ #
6
+ # Copyright (c) 2018-2019, Vigil Security, LLC
7
+ # License: http://snmplabs.com/pyasn1/license.html
8
+ #
9
+ # AES-CCM and AES-GCM Algorithms fo use with the Authenticated-Enveloped-Data
10
+ # protecting content type for the Cryptographic Message Syntax (CMS)
11
+ #
12
+ # ASN.1 source from:
13
+ # https://www.rfc-editor.org/rfc/rfc5084.txt
14
+
15
+ from pyasn1.type import constraint
16
+ from pyasn1.type import namedtype
17
+ from pyasn1.type import univ
18
+
19
+ from pyasn1_modules import rfc5280
20
+
21
+
22
+ def _OID(*components):
23
+ output = []
24
+ for x in tuple(components):
25
+ if isinstance(x, univ.ObjectIdentifier):
26
+ output.extend(list(x))
27
+ else:
28
+ output.append(int(x))
29
+
30
+ return univ.ObjectIdentifier(output)
31
+
32
+
33
+ class AES_CCM_ICVlen(univ.Integer):
34
+ pass
35
+
36
+
37
+ class AES_GCM_ICVlen(univ.Integer):
38
+ pass
39
+
40
+
41
+ AES_CCM_ICVlen.subtypeSpec = constraint.SingleValueConstraint(4, 6, 8, 10, 12, 14, 16)
42
+
43
+ AES_GCM_ICVlen.subtypeSpec = constraint.ValueRangeConstraint(12, 16)
44
+
45
+
46
+ class CCMParameters(univ.Sequence):
47
+ pass
48
+
49
+
50
+ CCMParameters.componentType = namedtype.NamedTypes(
51
+ namedtype.NamedType('aes-nonce', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(7, 13))),
52
+ # The aes-nonce parameter contains 15-L octets, where L is the size of the length field. L=8 is RECOMMENDED.
53
+ # Within the scope of any content-authenticated-encryption key, the nonce value MUST be unique.
54
+ namedtype.DefaultedNamedType('aes-ICVlen', AES_CCM_ICVlen().subtype(value=12))
55
+ )
56
+
57
+
58
+ class GCMParameters(univ.Sequence):
59
+ pass
60
+
61
+
62
+ GCMParameters.componentType = namedtype.NamedTypes(
63
+ namedtype.NamedType('aes-nonce', univ.OctetString()),
64
+ # The aes-nonce may have any number of bits between 8 and 2^64, but it MUST be a multiple of 8 bits.
65
+ # Within the scope of any content-authenticated-encryption key, the nonce value MUST be unique.
66
+ # A nonce value of 12 octets can be processed more efficiently, so that length is RECOMMENDED.
67
+ namedtype.DefaultedNamedType('aes-ICVlen', AES_GCM_ICVlen().subtype(value=12))
68
+ )
69
+
70
+ aes = _OID(2, 16, 840, 1, 101, 3, 4, 1)
71
+
72
+ id_aes128_CCM = _OID(aes, 7)
73
+
74
+ id_aes128_GCM = _OID(aes, 6)
75
+
76
+ id_aes192_CCM = _OID(aes, 27)
77
+
78
+ id_aes192_GCM = _OID(aes, 26)
79
+
80
+ id_aes256_CCM = _OID(aes, 47)
81
+
82
+ id_aes256_GCM = _OID(aes, 46)
83
+
84
+
85
+ # Map of Algorithm Identifier OIDs to Parameters is added to the
86
+ # ones in rfc5280.py
87
+
88
+ _algorithmIdentifierMapUpdate = {
89
+ id_aes128_CCM: CCMParameters(),
90
+ id_aes128_GCM: GCMParameters(),
91
+ id_aes192_CCM: CCMParameters(),
92
+ id_aes192_GCM: GCMParameters(),
93
+ id_aes256_CCM: CCMParameters(),
94
+ id_aes256_GCM: GCMParameters(),
95
+ }
96
+
97
+ rfc5280.algorithmIdentifierMap.update(_algorithmIdentifierMapUpdate)
lib/python3.10/site-packages/pyasn1_modules/rfc5126.py ADDED
@@ -0,0 +1,577 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is part of pyasn1-modules software.
3
+ #
4
+ # Created by Russ Housley with assistance from asn1ate v.0.6.0.
5
+ #
6
+ # Copyright (c) 2019, Vigil Security, LLC
7
+ # License: http://snmplabs.com/pyasn1/license.html
8
+ #
9
+ # CMS Advanced Electronic Signatures (CAdES)
10
+ #
11
+ # ASN.1 source from:
12
+ # https://www.rfc-editor.org/rfc/rfc5126.txt
13
+ #
14
+
15
+ from pyasn1.type import char
16
+ from pyasn1.type import constraint
17
+ from pyasn1.type import namedtype
18
+ from pyasn1.type import opentype
19
+ from pyasn1.type import tag
20
+ from pyasn1.type import useful
21
+ from pyasn1.type import univ
22
+
23
+ from pyasn1_modules import rfc5280
24
+ from pyasn1_modules import rfc5652
25
+ from pyasn1_modules import rfc5035
26
+ from pyasn1_modules import rfc5755
27
+ from pyasn1_modules import rfc6960
28
+ from pyasn1_modules import rfc3161
29
+
30
+ MAX = float('inf')
31
+
32
+
33
+ # Maps for OpenTypes
34
+
35
+ commitmentQualifierMap = { }
36
+
37
+ sigQualifiersMap = { }
38
+
39
+ otherRevRefMap = { }
40
+
41
+ otherRevValMap = { }
42
+
43
+
44
+ # Imports from RFC 5652
45
+
46
+ ContentInfo = rfc5652.ContentInfo
47
+
48
+ ContentType = rfc5652.ContentType
49
+
50
+ SignedData = rfc5652.SignedData
51
+
52
+ EncapsulatedContentInfo = rfc5652.EncapsulatedContentInfo
53
+
54
+ SignerInfo = rfc5652.SignerInfo
55
+
56
+ MessageDigest = rfc5652.MessageDigest
57
+
58
+ SigningTime = rfc5652.SigningTime
59
+
60
+ Countersignature = rfc5652.Countersignature
61
+
62
+ id_data = rfc5652.id_data
63
+
64
+ id_signedData = rfc5652.id_signedData
65
+
66
+ id_contentType= rfc5652.id_contentType
67
+
68
+ id_messageDigest = rfc5652.id_messageDigest
69
+
70
+ id_signingTime = rfc5652.id_signingTime
71
+
72
+ id_countersignature = rfc5652.id_countersignature
73
+
74
+
75
+ # Imports from RFC 5035
76
+
77
+ SigningCertificate = rfc5035.SigningCertificate
78
+
79
+ IssuerSerial = rfc5035.IssuerSerial
80
+
81
+ ContentReference = rfc5035.ContentReference
82
+
83
+ ContentIdentifier = rfc5035.ContentIdentifier
84
+
85
+ id_aa_contentReference = rfc5035.id_aa_contentReference
86
+
87
+ id_aa_contentIdentifier = rfc5035.id_aa_contentIdentifier
88
+
89
+ id_aa_signingCertificate = rfc5035.id_aa_signingCertificate
90
+
91
+ id_aa_signingCertificateV2 = rfc5035.id_aa_signingCertificateV2
92
+
93
+
94
+ # Imports from RFC 5280
95
+
96
+ Certificate = rfc5280.Certificate
97
+
98
+ AlgorithmIdentifier = rfc5280.AlgorithmIdentifier
99
+
100
+ CertificateList = rfc5280.CertificateList
101
+
102
+ Name = rfc5280.Name
103
+
104
+ Attribute = rfc5280.Attribute
105
+
106
+ GeneralNames = rfc5280.GeneralNames
107
+
108
+ GeneralName = rfc5280.GeneralName
109
+
110
+ PolicyInformation = rfc5280.PolicyInformation
111
+
112
+ DirectoryString = rfc5280.DirectoryString
113
+
114
+
115
+ # Imports from RFC 5755
116
+
117
+ AttributeCertificate = rfc5755.AttributeCertificate
118
+
119
+
120
+ # Imports from RFC 6960
121
+
122
+ BasicOCSPResponse = rfc6960.BasicOCSPResponse
123
+
124
+ ResponderID = rfc6960.ResponderID
125
+
126
+
127
+ # Imports from RFC 3161
128
+
129
+ TimeStampToken = rfc3161.TimeStampToken
130
+
131
+
132
+ # OID used referencing electronic signature mechanisms
133
+
134
+ id_etsi_es_IDUP_Mechanism_v1 = univ.ObjectIdentifier('0.4.0.1733.1.4.1')
135
+
136
+
137
+ # OtherSigningCertificate - deprecated
138
+
139
+ id_aa_ets_otherSigCert = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.19')
140
+
141
+
142
+ class OtherHashValue(univ.OctetString):
143
+ pass
144
+
145
+
146
+ class OtherHashAlgAndValue(univ.Sequence):
147
+ componentType = namedtype.NamedTypes(
148
+ namedtype.NamedType('hashAlgorithm', AlgorithmIdentifier()),
149
+ namedtype.NamedType('hashValue', OtherHashValue())
150
+ )
151
+
152
+
153
+ class OtherHash(univ.Choice):
154
+ componentType = namedtype.NamedTypes(
155
+ namedtype.NamedType('sha1Hash', OtherHashValue()),
156
+ namedtype.NamedType('otherHash', OtherHashAlgAndValue())
157
+ )
158
+
159
+
160
+ class OtherCertID(univ.Sequence):
161
+ componentType = namedtype.NamedTypes(
162
+ namedtype.NamedType('otherCertHash', OtherHash()),
163
+ namedtype.OptionalNamedType('issuerSerial', IssuerSerial())
164
+ )
165
+
166
+
167
+ class OtherSigningCertificate(univ.Sequence):
168
+ componentType = namedtype.NamedTypes(
169
+ namedtype.NamedType('certs',
170
+ univ.SequenceOf(componentType=OtherCertID())),
171
+ namedtype.OptionalNamedType('policies',
172
+ univ.SequenceOf(componentType=PolicyInformation()))
173
+ )
174
+
175
+
176
+ # Signature Policy Identifier
177
+
178
+ id_aa_ets_sigPolicyId = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.15')
179
+
180
+
181
+ class SigPolicyId(univ.ObjectIdentifier):
182
+ pass
183
+
184
+
185
+ class SigPolicyHash(OtherHashAlgAndValue):
186
+ pass
187
+
188
+
189
+ class SigPolicyQualifierId(univ.ObjectIdentifier):
190
+ pass
191
+
192
+
193
+ class SigPolicyQualifierInfo(univ.Sequence):
194
+ componentType = namedtype.NamedTypes(
195
+ namedtype.NamedType('sigPolicyQualifierId', SigPolicyQualifierId()),
196
+ namedtype.NamedType('sigQualifier', univ.Any(),
197
+ openType=opentype.OpenType('sigPolicyQualifierId', sigQualifiersMap))
198
+ )
199
+
200
+
201
+ class SignaturePolicyId(univ.Sequence):
202
+ componentType = namedtype.NamedTypes(
203
+ namedtype.NamedType('sigPolicyId', SigPolicyId()),
204
+ namedtype.NamedType('sigPolicyHash', SigPolicyHash()),
205
+ namedtype.OptionalNamedType('sigPolicyQualifiers',
206
+ univ.SequenceOf(componentType=SigPolicyQualifierInfo()).subtype(
207
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
208
+ )
209
+
210
+
211
+ class SignaturePolicyImplied(univ.Null):
212
+ pass
213
+
214
+
215
+ class SignaturePolicy(univ.Choice):
216
+ componentType = namedtype.NamedTypes(
217
+ namedtype.NamedType('signaturePolicyId', SignaturePolicyId()),
218
+ namedtype.NamedType('signaturePolicyImplied', SignaturePolicyImplied())
219
+ )
220
+
221
+
222
+ id_spq_ets_unotice = univ.ObjectIdentifier('1.2.840.113549.1.9.16.5.2')
223
+
224
+
225
+ class DisplayText(univ.Choice):
226
+ componentType = namedtype.NamedTypes(
227
+ namedtype.NamedType('visibleString', char.VisibleString().subtype(
228
+ subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
229
+ namedtype.NamedType('bmpString', char.BMPString().subtype(
230
+ subtypeSpec=constraint.ValueSizeConstraint(1, 200))),
231
+ namedtype.NamedType('utf8String', char.UTF8String().subtype(
232
+ subtypeSpec=constraint.ValueSizeConstraint(1, 200)))
233
+ )
234
+
235
+
236
+ class NoticeReference(univ.Sequence):
237
+ componentType = namedtype.NamedTypes(
238
+ namedtype.NamedType('organization', DisplayText()),
239
+ namedtype.NamedType('noticeNumbers',
240
+ univ.SequenceOf(componentType=univ.Integer()))
241
+ )
242
+
243
+ class SPUserNotice(univ.Sequence):
244
+ componentType = namedtype.NamedTypes(
245
+ namedtype.OptionalNamedType('noticeRef', NoticeReference()),
246
+ namedtype.OptionalNamedType('explicitText', DisplayText())
247
+ )
248
+
249
+
250
+ noticeToUser = SigPolicyQualifierInfo()
251
+ noticeToUser['sigPolicyQualifierId'] = id_spq_ets_unotice
252
+ noticeToUser['sigQualifier'] = SPUserNotice()
253
+
254
+
255
+ id_spq_ets_uri = univ.ObjectIdentifier('1.2.840.113549.1.9.16.5.1')
256
+
257
+
258
+ class SPuri(char.IA5String):
259
+ pass
260
+
261
+
262
+ pointerToSigPolSpec = SigPolicyQualifierInfo()
263
+ pointerToSigPolSpec['sigPolicyQualifierId'] = id_spq_ets_uri
264
+ pointerToSigPolSpec['sigQualifier'] = SPuri()
265
+
266
+
267
+ # Commitment Type
268
+
269
+ id_aa_ets_commitmentType = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.16')
270
+
271
+
272
+ class CommitmentTypeIdentifier(univ.ObjectIdentifier):
273
+ pass
274
+
275
+
276
+ class CommitmentTypeQualifier(univ.Sequence):
277
+ componentType = namedtype.NamedTypes(
278
+ namedtype.NamedType('commitmentTypeIdentifier',
279
+ CommitmentTypeIdentifier()),
280
+ namedtype.NamedType('qualifier', univ.Any(),
281
+ openType=opentype.OpenType('commitmentTypeIdentifier',
282
+ commitmentQualifierMap))
283
+ )
284
+
285
+
286
+ class CommitmentTypeIndication(univ.Sequence):
287
+ componentType = namedtype.NamedTypes(
288
+ namedtype.NamedType('commitmentTypeId', CommitmentTypeIdentifier()),
289
+ namedtype.OptionalNamedType('commitmentTypeQualifier',
290
+ univ.SequenceOf(componentType=CommitmentTypeQualifier()).subtype(
291
+ subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
292
+ )
293
+
294
+
295
+ id_cti_ets_proofOfOrigin = univ.ObjectIdentifier('1.2.840.113549.1.9.16.6.1')
296
+
297
+ id_cti_ets_proofOfReceipt = univ.ObjectIdentifier('1.2.840.113549.1.9.16.6.2')
298
+
299
+ id_cti_ets_proofOfDelivery = univ.ObjectIdentifier('1.2.840.113549.1.9.16.6.3')
300
+
301
+ id_cti_ets_proofOfSender = univ.ObjectIdentifier('1.2.840.113549.1.9.16.6.4')
302
+
303
+ id_cti_ets_proofOfApproval = univ.ObjectIdentifier('1.2.840.113549.1.9.16.6.5')
304
+
305
+ id_cti_ets_proofOfCreation = univ.ObjectIdentifier('1.2.840.113549.1.9.16.6.6')
306
+
307
+
308
+ # Signer Location
309
+
310
+ id_aa_ets_signerLocation = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.17')
311
+
312
+
313
+ class PostalAddress(univ.SequenceOf):
314
+ componentType = DirectoryString()
315
+ subtypeSpec = constraint.ValueSizeConstraint(1, 6)
316
+
317
+
318
+ class SignerLocation(univ.Sequence):
319
+ componentType = namedtype.NamedTypes(
320
+ namedtype.OptionalNamedType('countryName',
321
+ DirectoryString().subtype(explicitTag=tag.Tag(
322
+ tag.tagClassContext, tag.tagFormatSimple, 0))),
323
+ namedtype.OptionalNamedType('localityName',
324
+ DirectoryString().subtype(explicitTag=tag.Tag(
325
+ tag.tagClassContext, tag.tagFormatSimple, 1))),
326
+ namedtype.OptionalNamedType('postalAdddress',
327
+ PostalAddress().subtype(explicitTag=tag.Tag(
328
+ tag.tagClassContext, tag.tagFormatSimple, 2)))
329
+ )
330
+
331
+
332
+ # Signature Timestamp
333
+
334
+ id_aa_signatureTimeStampToken = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.14')
335
+
336
+
337
+ class SignatureTimeStampToken(TimeStampToken):
338
+ pass
339
+
340
+
341
+ # Content Timestamp
342
+
343
+ id_aa_ets_contentTimestamp = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.20')
344
+
345
+
346
+ class ContentTimestamp(TimeStampToken):
347
+ pass
348
+
349
+
350
+ # Signer Attributes
351
+
352
+ id_aa_ets_signerAttr = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.18')
353
+
354
+
355
+ class ClaimedAttributes(univ.SequenceOf):
356
+ componentType = Attribute()
357
+
358
+
359
+ class CertifiedAttributes(AttributeCertificate):
360
+ pass
361
+
362
+
363
+ class SignerAttribute(univ.SequenceOf):
364
+ componentType = univ.Choice(componentType=namedtype.NamedTypes(
365
+ namedtype.NamedType('claimedAttributes',
366
+ ClaimedAttributes().subtype(explicitTag=tag.Tag(
367
+ tag.tagClassContext, tag.tagFormatSimple, 0))),
368
+ namedtype.NamedType('certifiedAttributes',
369
+ CertifiedAttributes().subtype(explicitTag=tag.Tag(
370
+ tag.tagClassContext, tag.tagFormatSimple, 1)))
371
+ ))
372
+
373
+
374
+ # Complete Certificate Refs
375
+
376
+ id_aa_ets_certificateRefs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.21')
377
+
378
+
379
+ class CompleteCertificateRefs(univ.SequenceOf):
380
+ componentType = OtherCertID()
381
+
382
+
383
+ # Complete Revocation Refs
384
+
385
+ id_aa_ets_revocationRefs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.22')
386
+
387
+
388
+ class CrlIdentifier(univ.Sequence):
389
+ componentType = namedtype.NamedTypes(
390
+ namedtype.NamedType('crlissuer', Name()),
391
+ namedtype.NamedType('crlIssuedTime', useful.UTCTime()),
392
+ namedtype.OptionalNamedType('crlNumber', univ.Integer())
393
+ )
394
+
395
+
396
+ class CrlValidatedID(univ.Sequence):
397
+ componentType = namedtype.NamedTypes(
398
+ namedtype.NamedType('crlHash', OtherHash()),
399
+ namedtype.OptionalNamedType('crlIdentifier', CrlIdentifier())
400
+ )
401
+
402
+
403
+ class CRLListID(univ.Sequence):
404
+ componentType = namedtype.NamedTypes(
405
+ namedtype.NamedType('crls',
406
+ univ.SequenceOf(componentType=CrlValidatedID()))
407
+ )
408
+
409
+
410
+ class OcspIdentifier(univ.Sequence):
411
+ componentType = namedtype.NamedTypes(
412
+ namedtype.NamedType('ocspResponderID', ResponderID()),
413
+ namedtype.NamedType('producedAt', useful.GeneralizedTime())
414
+ )
415
+
416
+
417
+ class OcspResponsesID(univ.Sequence):
418
+ componentType = namedtype.NamedTypes(
419
+ namedtype.NamedType('ocspIdentifier', OcspIdentifier()),
420
+ namedtype.OptionalNamedType('ocspRepHash', OtherHash())
421
+ )
422
+
423
+
424
+ class OcspListID(univ.Sequence):
425
+ componentType = namedtype.NamedTypes(
426
+ namedtype.NamedType('ocspResponses',
427
+ univ.SequenceOf(componentType=OcspResponsesID()))
428
+ )
429
+
430
+
431
+ class OtherRevRefType(univ.ObjectIdentifier):
432
+ pass
433
+
434
+
435
+ class OtherRevRefs(univ.Sequence):
436
+ componentType = namedtype.NamedTypes(
437
+ namedtype.NamedType('otherRevRefType', OtherRevRefType()),
438
+ namedtype.NamedType('otherRevRefs', univ.Any(),
439
+ openType=opentype.OpenType('otherRevRefType', otherRevRefMap))
440
+ )
441
+
442
+
443
+ class CrlOcspRef(univ.Sequence):
444
+ componentType = namedtype.NamedTypes(
445
+ namedtype.OptionalNamedType('crlids',
446
+ CRLListID().subtype(explicitTag=tag.Tag(
447
+ tag.tagClassContext, tag.tagFormatConstructed, 0))),
448
+ namedtype.OptionalNamedType('ocspids',
449
+ OcspListID().subtype(explicitTag=tag.Tag(
450
+ tag.tagClassContext, tag.tagFormatConstructed, 1))),
451
+ namedtype.OptionalNamedType('otherRev',
452
+ OtherRevRefs().subtype(explicitTag=tag.Tag(
453
+ tag.tagClassContext, tag.tagFormatConstructed, 2)))
454
+ )
455
+
456
+
457
+ class CompleteRevocationRefs(univ.SequenceOf):
458
+ componentType = CrlOcspRef()
459
+
460
+
461
+ # Certificate Values
462
+
463
+ id_aa_ets_certValues = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.23')
464
+
465
+
466
+ class CertificateValues(univ.SequenceOf):
467
+ componentType = Certificate()
468
+
469
+
470
+ # Certificate Revocation Values
471
+
472
+ id_aa_ets_revocationValues = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.24')
473
+
474
+
475
+ class OtherRevValType(univ.ObjectIdentifier):
476
+ pass
477
+
478
+
479
+ class OtherRevVals(univ.Sequence):
480
+ componentType = namedtype.NamedTypes(
481
+ namedtype.NamedType('otherRevValType', OtherRevValType()),
482
+ namedtype.NamedType('otherRevVals', univ.Any(),
483
+ openType=opentype.OpenType('otherRevValType', otherRevValMap))
484
+ )
485
+
486
+
487
+ class RevocationValues(univ.Sequence):
488
+ componentType = namedtype.NamedTypes(
489
+ namedtype.OptionalNamedType('crlVals',
490
+ univ.SequenceOf(componentType=CertificateList()).subtype(
491
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
492
+ namedtype.OptionalNamedType('ocspVals',
493
+ univ.SequenceOf(componentType=BasicOCSPResponse()).subtype(
494
+ explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
495
+ namedtype.OptionalNamedType('otherRevVals',
496
+ OtherRevVals().subtype(explicitTag=tag.Tag(
497
+ tag.tagClassContext, tag.tagFormatConstructed, 2)))
498
+ )
499
+
500
+
501
+ # CAdES-C Timestamp
502
+
503
+ id_aa_ets_escTimeStamp = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.25')
504
+
505
+
506
+ class ESCTimeStampToken(TimeStampToken):
507
+ pass
508
+
509
+
510
+ # Time-Stamped Certificates and CRLs
511
+
512
+ id_aa_ets_certCRLTimestamp = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.26')
513
+
514
+
515
+ class TimestampedCertsCRLs(TimeStampToken):
516
+ pass
517
+
518
+
519
+ # Archive Timestamp
520
+
521
+ id_aa_ets_archiveTimestampV2 = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.48')
522
+
523
+
524
+ class ArchiveTimeStampToken(TimeStampToken):
525
+ pass
526
+
527
+
528
+ # Attribute certificate references
529
+
530
+ id_aa_ets_attrCertificateRefs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.44')
531
+
532
+
533
+ class AttributeCertificateRefs(univ.SequenceOf):
534
+ componentType = OtherCertID()
535
+
536
+
537
+ # Attribute revocation references
538
+
539
+ id_aa_ets_attrRevocationRefs = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.45')
540
+
541
+
542
+ class AttributeRevocationRefs(univ.SequenceOf):
543
+ componentType = CrlOcspRef()
544
+
545
+
546
+ # Update the sigQualifiersMap
547
+
548
+ _sigQualifiersMapUpdate = {
549
+ id_spq_ets_unotice: SPUserNotice(),
550
+ id_spq_ets_uri: SPuri(),
551
+ }
552
+
553
+ sigQualifiersMap.update(_sigQualifiersMapUpdate)
554
+
555
+
556
+ # Update the CMS Attribute Map in rfc5652.py
557
+
558
+ _cmsAttributesMapUpdate = {
559
+ id_aa_ets_otherSigCert: OtherSigningCertificate(),
560
+ id_aa_ets_sigPolicyId: SignaturePolicy(),
561
+ id_aa_ets_commitmentType: CommitmentTypeIndication(),
562
+ id_aa_ets_signerLocation: SignerLocation(),
563
+ id_aa_signatureTimeStampToken: SignatureTimeStampToken(),
564
+ id_aa_ets_contentTimestamp: ContentTimestamp(),
565
+ id_aa_ets_signerAttr: SignerAttribute(),
566
+ id_aa_ets_certificateRefs: CompleteCertificateRefs(),
567
+ id_aa_ets_revocationRefs: CompleteRevocationRefs(),
568
+ id_aa_ets_certValues: CertificateValues(),
569
+ id_aa_ets_revocationValues: RevocationValues(),
570
+ id_aa_ets_escTimeStamp: ESCTimeStampToken(),
571
+ id_aa_ets_certCRLTimestamp: TimestampedCertsCRLs(),
572
+ id_aa_ets_archiveTimestampV2: ArchiveTimeStampToken(),
573
+ id_aa_ets_attrCertificateRefs: AttributeCertificateRefs(),
574
+ id_aa_ets_attrRevocationRefs: AttributeRevocationRefs(),
575
+ }
576
+
577
+ rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate)
lib/python3.10/site-packages/pyasn1_modules/rfc5208.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is part of pyasn1-modules software.
3
+ #
4
+ # Copyright (c) 2005-2020, Ilya Etingof <etingof@gmail.com>
5
+ # License: http://snmplabs.com/pyasn1/license.html
6
+ #
7
+ # PKCS#8 syntax
8
+ #
9
+ # ASN.1 source from:
10
+ # http://tools.ietf.org/html/rfc5208
11
+ #
12
+ # Sample captures could be obtained with "openssl pkcs8 -topk8" command
13
+ #
14
+ from pyasn1_modules import rfc2251
15
+ from pyasn1_modules.rfc2459 import *
16
+
17
+
18
+ class KeyEncryptionAlgorithms(AlgorithmIdentifier):
19
+ pass
20
+
21
+
22
+ class PrivateKeyAlgorithms(AlgorithmIdentifier):
23
+ pass
24
+
25
+
26
+ class EncryptedData(univ.OctetString):
27
+ pass
28
+
29
+
30
+ class EncryptedPrivateKeyInfo(univ.Sequence):
31
+ componentType = namedtype.NamedTypes(
32
+ namedtype.NamedType('encryptionAlgorithm', AlgorithmIdentifier()),
33
+ namedtype.NamedType('encryptedData', EncryptedData())
34
+ )
35
+
36
+
37
+ class PrivateKey(univ.OctetString):
38
+ pass
39
+
40
+
41
+ class Attributes(univ.SetOf):
42
+ componentType = rfc2251.Attribute()
43
+
44
+
45
+ class Version(univ.Integer):
46
+ namedValues = namedval.NamedValues(('v1', 0), ('v2', 1))
47
+
48
+
49
+ class PrivateKeyInfo(univ.Sequence):
50
+ componentType = namedtype.NamedTypes(
51
+ namedtype.NamedType('version', Version()),
52
+ namedtype.NamedType('privateKeyAlgorithm', AlgorithmIdentifier()),
53
+ namedtype.NamedType('privateKey', PrivateKey()),
54
+ namedtype.OptionalNamedType('attributes', Attributes().subtype(
55
+ implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0)))
56
+ )
lib/python3.10/site-packages/pyasn1_modules/rfc5649.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is being contributed to pyasn1-modules software.
2
+ #
3
+ # Created by Russ Housley.
4
+ #
5
+ # Copyright (c) 2019, Vigil Security, LLC
6
+ # License: http://snmplabs.com/pyasn1/license.html
7
+ #
8
+ # AES Key Wrap with Padding
9
+ #
10
+ # ASN.1 source from:
11
+ # https://www.rfc-editor.org/rfc/rfc5649.txt
12
+
13
+ from pyasn1.type import univ
14
+
15
+ from pyasn1_modules import rfc5280
16
+
17
+
18
+ class AlgorithmIdentifier(rfc5280.AlgorithmIdentifier):
19
+ pass
20
+
21
+
22
+ id_aes128_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.5')
23
+
24
+ id_aes192_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.25')
25
+
26
+ id_aes256_wrap = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.45')
27
+
28
+
29
+ id_aes128_wrap_pad = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.8')
30
+
31
+ id_aes192_wrap_pad = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.28')
32
+
33
+ id_aes256_wrap_pad = univ.ObjectIdentifier('2.16.840.1.101.3.4.1.48')
lib/python3.10/site-packages/pyasn1_modules/rfc6010.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #
2
+ # This file is part of pyasn1-modules software.
3
+ #
4
+ # Created by Russ Housley with assistance from asn1ate v.0.6.0.
5
+ # Modified by Russ Housley to add maps for use with opentypes.
6
+ #
7
+ # Copyright (c) 2019, Vigil Security, LLC
8
+ # License: http://snmplabs.com/pyasn1/license.html
9
+ #
10
+ # Certificate Extension for CMS Content Constraints (CCC)
11
+ #
12
+ # ASN.1 source from:
13
+ # https://www.rfc-editor.org/rfc/rfc6010.txt
14
+ #
15
+
16
+ from pyasn1.type import constraint
17
+ from pyasn1.type import namedtype
18
+ from pyasn1.type import namedval
19
+ from pyasn1.type import univ
20
+
21
+ from pyasn1_modules import rfc5280
22
+
23
+ MAX = float('inf')
24
+
25
+
26
+ AttributeType = rfc5280.AttributeType
27
+
28
+ AttributeValue = rfc5280.AttributeValue
29
+
30
+
31
+ id_ct_anyContentType = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.0')
32
+
33
+
34
+ class AttrConstraint(univ.Sequence):
35
+ pass
36
+
37
+ AttrConstraint.componentType = namedtype.NamedTypes(
38
+ namedtype.NamedType('attrType', AttributeType()),
39
+ namedtype.NamedType('attrValues', univ.SetOf(
40
+ componentType=AttributeValue()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX)))
41
+ )
42
+
43
+
44
+ class AttrConstraintList(univ.SequenceOf):
45
+ pass
46
+
47
+ AttrConstraintList.componentType = AttrConstraint()
48
+ AttrConstraintList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
49
+
50
+
51
+ class ContentTypeGeneration(univ.Enumerated):
52
+ pass
53
+
54
+ ContentTypeGeneration.namedValues = namedval.NamedValues(
55
+ ('canSource', 0),
56
+ ('cannotSource', 1)
57
+ )
58
+
59
+
60
+ class ContentTypeConstraint(univ.Sequence):
61
+ pass
62
+
63
+ ContentTypeConstraint.componentType = namedtype.NamedTypes(
64
+ namedtype.NamedType('contentType', univ.ObjectIdentifier()),
65
+ namedtype.DefaultedNamedType('canSource', ContentTypeGeneration().subtype(value='canSource')),
66
+ namedtype.OptionalNamedType('attrConstraints', AttrConstraintList())
67
+ )
68
+
69
+
70
+ # CMS Content Constraints (CCC) Extension and Object Identifier
71
+
72
+ id_pe_cmsContentConstraints = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.18')
73
+
74
+ class CMSContentConstraints(univ.SequenceOf):
75
+ pass
76
+
77
+ CMSContentConstraints.componentType = ContentTypeConstraint()
78
+ CMSContentConstraints.subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
79
+
80
+
81
+ # Map of Certificate Extension OIDs to Extensions
82
+ # To be added to the ones that are in rfc5280.py
83
+
84
+ _certificateExtensionsMap = {
85
+ id_pe_cmsContentConstraints: CMSContentConstraints(),
86
+ }
87
+
88
+ rfc5280.certificateExtensionsMap.update(_certificateExtensionsMap)
lib/python3.10/site-packages/pyasn1_modules/rfc6019.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is being contributed to pyasn1-modules software.
2
+ #
3
+ # Created by Russ Housley.
4
+ # Modified by Russ Housley to add a map for use with opentypes.
5
+ #
6
+ # Copyright (c) 2019, Vigil Security, LLC
7
+ # License: http://snmplabs.com/pyasn1/license.html
8
+ #
9
+ # BinaryTime: An Alternate Format for Representing Date and Time
10
+ #
11
+ # ASN.1 source from:
12
+ # https://www.rfc-editor.org/rfc/rfc6019.txt
13
+
14
+ from pyasn1.type import constraint
15
+ from pyasn1.type import univ
16
+
17
+ from pyasn1_modules import rfc5652
18
+
19
+ MAX = float('inf')
20
+
21
+
22
+ # BinaryTime: Represent date and time as an integer
23
+
24
+ class BinaryTime(univ.Integer):
25
+ pass
26
+
27
+ BinaryTime.subtypeSpec = constraint.ValueRangeConstraint(0, MAX)
28
+
29
+
30
+ # CMS Attribute for representing signing time in BinaryTime
31
+
32
+ id_aa_binarySigningTime = univ.ObjectIdentifier('1.2.840.113549.1.9.16.2.46')
33
+
34
+ class BinarySigningTime(BinaryTime):
35
+ pass
36
+
37
+
38
+ # Map of Attribute Type OIDs to Attributes ia added to the
39
+ # ones that are in rfc5652.py
40
+
41
+ _cmsAttributesMapUpdate = {
42
+ id_aa_binarySigningTime: BinarySigningTime(),
43
+ }
44
+
45
+ rfc5652.cmsAttributesMap.update(_cmsAttributesMapUpdate)