Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- parrot/lib/libtinfow.so +3 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/aiohttp/_cparser.pxd +158 -0
- parrot/lib/python3.10/site-packages/aiohttp/_find_header.pxd +2 -0
- parrot/lib/python3.10/site-packages/aiohttp/_http_parser.pyx +841 -0
- parrot/lib/python3.10/site-packages/aiohttp/client_reqrep.py +1274 -0
- parrot/lib/python3.10/site-packages/aiohttp/client_ws.py +398 -0
- parrot/lib/python3.10/site-packages/aiohttp/compression_utils.py +159 -0
- parrot/lib/python3.10/site-packages/aiohttp/connector.py +1594 -0
.gitattributes
CHANGED
|
@@ -99,3 +99,4 @@ parrot/lib/python3.10/site-packages/cv2/qt/fonts/DejaVuSans-ExtraLight.ttf filte
|
|
| 99 |
parrot/lib/libncursesw.a filter=lfs diff=lfs merge=lfs -text
|
| 100 |
parrot/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
| 101 |
parrot/lib/libncursesw.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 99 |
parrot/lib/libncursesw.a filter=lfs diff=lfs merge=lfs -text
|
| 100 |
parrot/lib/libncurses.so filter=lfs diff=lfs merge=lfs -text
|
| 101 |
parrot/lib/libncursesw.so.6.4 filter=lfs diff=lfs merge=lfs -text
|
| 102 |
+
parrot/lib/libtinfow.so filter=lfs diff=lfs merge=lfs -text
|
parrot/lib/libtinfow.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7ff9b333bc4b796b31c188c2dadd7840788cb963dbf4f34567deb3f326326b02
|
| 3 |
+
size 287080
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyi.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
6682a22524b9d4fc442e123672622be7bdfb6238d9709b7b15b2113b7ca6d52b /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyi
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_helpers.pyx.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
19d98f08efd55a40c99b2fc4c8341da7ee5cc143b1a59181014c3f43a3e95423 /home/runner/work/aiohttp/aiohttp/aiohttp/_helpers.pyx
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
e2d962e51a183b6e2723c1cb97b9f11c795bedc7093ae1eb038a7040dd8f4d70 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
6881c0a7c838655e646c645d99971efaf5e310bc3633a7c62b226e39d81842ac /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/_websocket.pyx.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
d57b8e48d0c26f20ebcc5e6e300da2b2a6aeb12b3c9768d64cb0e53432ccf48a /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket.pyx
|
parrot/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
bb39f96a09ff8d789dda1fa4cba63464043c06b3de4c62c31abfb07a231cb6ca /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc
ADDED
|
Binary file (9.3 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc
ADDED
|
Binary file (33.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_exceptions.cpython-310.pyc
ADDED
|
Binary file (12.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc
ADDED
|
Binary file (7.28 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc
ADDED
|
Binary file (31.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc
ADDED
|
Binary file (11.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc
ADDED
|
Binary file (5.2 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc
ADDED
|
Binary file (39.3 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc
ADDED
|
Binary file (11.8 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc
ADDED
|
Binary file (4.83 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc
ADDED
|
Binary file (5.18 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc
ADDED
|
Binary file (30 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc
ADDED
|
Binary file (1.45 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc
ADDED
|
Binary file (4.42 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc
ADDED
|
Binary file (5.69 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc
ADDED
|
Binary file (15.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/payload_streamer.cpython-310.pyc
ADDED
|
Binary file (3.48 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc
ADDED
|
Binary file (10.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc
ADDED
|
Binary file (4.53 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc
ADDED
|
Binary file (18.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc
ADDED
|
Binary file (22.1 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc
ADDED
|
Binary file (10.9 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc
ADDED
|
Binary file (11.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-310.pyc
ADDED
|
Binary file (7.74 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc
ADDED
|
Binary file (7.36 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc
ADDED
|
Binary file (3.84 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc
ADDED
|
Binary file (17.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc
ADDED
|
Binary file (24.8 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc
ADDED
|
Binary file (21.6 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc
ADDED
|
Binary file (7.64 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc
ADDED
|
Binary file (3.52 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc
ADDED
|
Binary file (43.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc
ADDED
|
Binary file (16.5 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/aiohttp/_cparser.pxd
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
| 5 |
+
|
| 6 |
+
struct llhttp__internal_s:
|
| 7 |
+
int32_t _index
|
| 8 |
+
void* _span_pos0
|
| 9 |
+
void* _span_cb0
|
| 10 |
+
int32_t error
|
| 11 |
+
const char* reason
|
| 12 |
+
const char* error_pos
|
| 13 |
+
void* data
|
| 14 |
+
void* _current
|
| 15 |
+
uint64_t content_length
|
| 16 |
+
uint8_t type
|
| 17 |
+
uint8_t method
|
| 18 |
+
uint8_t http_major
|
| 19 |
+
uint8_t http_minor
|
| 20 |
+
uint8_t header_state
|
| 21 |
+
uint8_t lenient_flags
|
| 22 |
+
uint8_t upgrade
|
| 23 |
+
uint8_t finish
|
| 24 |
+
uint16_t flags
|
| 25 |
+
uint16_t status_code
|
| 26 |
+
void* settings
|
| 27 |
+
|
| 28 |
+
ctypedef llhttp__internal_s llhttp__internal_t
|
| 29 |
+
ctypedef llhttp__internal_t llhttp_t
|
| 30 |
+
|
| 31 |
+
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
| 32 |
+
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
| 33 |
+
|
| 34 |
+
struct llhttp_settings_s:
|
| 35 |
+
llhttp_cb on_message_begin
|
| 36 |
+
llhttp_data_cb on_url
|
| 37 |
+
llhttp_data_cb on_status
|
| 38 |
+
llhttp_data_cb on_header_field
|
| 39 |
+
llhttp_data_cb on_header_value
|
| 40 |
+
llhttp_cb on_headers_complete
|
| 41 |
+
llhttp_data_cb on_body
|
| 42 |
+
llhttp_cb on_message_complete
|
| 43 |
+
llhttp_cb on_chunk_header
|
| 44 |
+
llhttp_cb on_chunk_complete
|
| 45 |
+
|
| 46 |
+
llhttp_cb on_url_complete
|
| 47 |
+
llhttp_cb on_status_complete
|
| 48 |
+
llhttp_cb on_header_field_complete
|
| 49 |
+
llhttp_cb on_header_value_complete
|
| 50 |
+
|
| 51 |
+
ctypedef llhttp_settings_s llhttp_settings_t
|
| 52 |
+
|
| 53 |
+
enum llhttp_errno:
|
| 54 |
+
HPE_OK,
|
| 55 |
+
HPE_INTERNAL,
|
| 56 |
+
HPE_STRICT,
|
| 57 |
+
HPE_LF_EXPECTED,
|
| 58 |
+
HPE_UNEXPECTED_CONTENT_LENGTH,
|
| 59 |
+
HPE_CLOSED_CONNECTION,
|
| 60 |
+
HPE_INVALID_METHOD,
|
| 61 |
+
HPE_INVALID_URL,
|
| 62 |
+
HPE_INVALID_CONSTANT,
|
| 63 |
+
HPE_INVALID_VERSION,
|
| 64 |
+
HPE_INVALID_HEADER_TOKEN,
|
| 65 |
+
HPE_INVALID_CONTENT_LENGTH,
|
| 66 |
+
HPE_INVALID_CHUNK_SIZE,
|
| 67 |
+
HPE_INVALID_STATUS,
|
| 68 |
+
HPE_INVALID_EOF_STATE,
|
| 69 |
+
HPE_INVALID_TRANSFER_ENCODING,
|
| 70 |
+
HPE_CB_MESSAGE_BEGIN,
|
| 71 |
+
HPE_CB_HEADERS_COMPLETE,
|
| 72 |
+
HPE_CB_MESSAGE_COMPLETE,
|
| 73 |
+
HPE_CB_CHUNK_HEADER,
|
| 74 |
+
HPE_CB_CHUNK_COMPLETE,
|
| 75 |
+
HPE_PAUSED,
|
| 76 |
+
HPE_PAUSED_UPGRADE,
|
| 77 |
+
HPE_USER
|
| 78 |
+
|
| 79 |
+
ctypedef llhttp_errno llhttp_errno_t
|
| 80 |
+
|
| 81 |
+
enum llhttp_flags:
|
| 82 |
+
F_CHUNKED,
|
| 83 |
+
F_CONTENT_LENGTH
|
| 84 |
+
|
| 85 |
+
enum llhttp_type:
|
| 86 |
+
HTTP_REQUEST,
|
| 87 |
+
HTTP_RESPONSE,
|
| 88 |
+
HTTP_BOTH
|
| 89 |
+
|
| 90 |
+
enum llhttp_method:
|
| 91 |
+
HTTP_DELETE,
|
| 92 |
+
HTTP_GET,
|
| 93 |
+
HTTP_HEAD,
|
| 94 |
+
HTTP_POST,
|
| 95 |
+
HTTP_PUT,
|
| 96 |
+
HTTP_CONNECT,
|
| 97 |
+
HTTP_OPTIONS,
|
| 98 |
+
HTTP_TRACE,
|
| 99 |
+
HTTP_COPY,
|
| 100 |
+
HTTP_LOCK,
|
| 101 |
+
HTTP_MKCOL,
|
| 102 |
+
HTTP_MOVE,
|
| 103 |
+
HTTP_PROPFIND,
|
| 104 |
+
HTTP_PROPPATCH,
|
| 105 |
+
HTTP_SEARCH,
|
| 106 |
+
HTTP_UNLOCK,
|
| 107 |
+
HTTP_BIND,
|
| 108 |
+
HTTP_REBIND,
|
| 109 |
+
HTTP_UNBIND,
|
| 110 |
+
HTTP_ACL,
|
| 111 |
+
HTTP_REPORT,
|
| 112 |
+
HTTP_MKACTIVITY,
|
| 113 |
+
HTTP_CHECKOUT,
|
| 114 |
+
HTTP_MERGE,
|
| 115 |
+
HTTP_MSEARCH,
|
| 116 |
+
HTTP_NOTIFY,
|
| 117 |
+
HTTP_SUBSCRIBE,
|
| 118 |
+
HTTP_UNSUBSCRIBE,
|
| 119 |
+
HTTP_PATCH,
|
| 120 |
+
HTTP_PURGE,
|
| 121 |
+
HTTP_MKCALENDAR,
|
| 122 |
+
HTTP_LINK,
|
| 123 |
+
HTTP_UNLINK,
|
| 124 |
+
HTTP_SOURCE,
|
| 125 |
+
HTTP_PRI,
|
| 126 |
+
HTTP_DESCRIBE,
|
| 127 |
+
HTTP_ANNOUNCE,
|
| 128 |
+
HTTP_SETUP,
|
| 129 |
+
HTTP_PLAY,
|
| 130 |
+
HTTP_PAUSE,
|
| 131 |
+
HTTP_TEARDOWN,
|
| 132 |
+
HTTP_GET_PARAMETER,
|
| 133 |
+
HTTP_SET_PARAMETER,
|
| 134 |
+
HTTP_REDIRECT,
|
| 135 |
+
HTTP_RECORD,
|
| 136 |
+
HTTP_FLUSH
|
| 137 |
+
|
| 138 |
+
ctypedef llhttp_method llhttp_method_t;
|
| 139 |
+
|
| 140 |
+
void llhttp_settings_init(llhttp_settings_t* settings)
|
| 141 |
+
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
| 142 |
+
const llhttp_settings_t* settings)
|
| 143 |
+
|
| 144 |
+
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
| 145 |
+
|
| 146 |
+
int llhttp_should_keep_alive(const llhttp_t* parser)
|
| 147 |
+
|
| 148 |
+
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
| 149 |
+
|
| 150 |
+
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
| 151 |
+
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
| 152 |
+
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
| 153 |
+
|
| 154 |
+
const char* llhttp_method_name(llhttp_method_t method)
|
| 155 |
+
|
| 156 |
+
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
| 157 |
+
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
| 158 |
+
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
parrot/lib/python3.10/site-packages/aiohttp/_find_header.pxd
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "_find_header.h":
|
| 2 |
+
int find_header(char *, int)
|
parrot/lib/python3.10/site-packages/aiohttp/_http_parser.pyx
ADDED
|
@@ -0,0 +1,841 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#cython: language_level=3
|
| 2 |
+
#
|
| 3 |
+
# Based on https://github.com/MagicStack/httptools
|
| 4 |
+
#
|
| 5 |
+
|
| 6 |
+
from cpython cimport (
|
| 7 |
+
Py_buffer,
|
| 8 |
+
PyBUF_SIMPLE,
|
| 9 |
+
PyBuffer_Release,
|
| 10 |
+
PyBytes_AsString,
|
| 11 |
+
PyBytes_AsStringAndSize,
|
| 12 |
+
PyObject_GetBuffer,
|
| 13 |
+
)
|
| 14 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
| 15 |
+
from libc.limits cimport ULLONG_MAX
|
| 16 |
+
from libc.string cimport memcpy
|
| 17 |
+
|
| 18 |
+
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
| 19 |
+
from yarl import URL as _URL
|
| 20 |
+
|
| 21 |
+
from aiohttp import hdrs
|
| 22 |
+
from aiohttp.helpers import DEBUG, set_exception
|
| 23 |
+
|
| 24 |
+
from .http_exceptions import (
|
| 25 |
+
BadHttpMessage,
|
| 26 |
+
BadStatusLine,
|
| 27 |
+
ContentLengthError,
|
| 28 |
+
InvalidHeader,
|
| 29 |
+
InvalidURLError,
|
| 30 |
+
LineTooLong,
|
| 31 |
+
PayloadEncodingError,
|
| 32 |
+
TransferEncodingError,
|
| 33 |
+
)
|
| 34 |
+
from .http_parser import DeflateBuffer as _DeflateBuffer
|
| 35 |
+
from .http_writer import (
|
| 36 |
+
HttpVersion as _HttpVersion,
|
| 37 |
+
HttpVersion10 as _HttpVersion10,
|
| 38 |
+
HttpVersion11 as _HttpVersion11,
|
| 39 |
+
)
|
| 40 |
+
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
| 41 |
+
|
| 42 |
+
cimport cython
|
| 43 |
+
|
| 44 |
+
from aiohttp cimport _cparser as cparser
|
| 45 |
+
|
| 46 |
+
include "_headers.pxi"
|
| 47 |
+
|
| 48 |
+
from aiohttp cimport _find_header
|
| 49 |
+
|
| 50 |
+
ALLOWED_UPGRADES = frozenset({"websocket"})
|
| 51 |
+
DEF DEFAULT_FREELIST_SIZE = 250
|
| 52 |
+
|
| 53 |
+
cdef extern from "Python.h":
|
| 54 |
+
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
| 55 |
+
Py_ssize_t PyByteArray_Size(object) except -1
|
| 56 |
+
char* PyByteArray_AsString(object)
|
| 57 |
+
|
| 58 |
+
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
| 59 |
+
'RawRequestMessage', 'RawResponseMessage')
|
| 60 |
+
|
| 61 |
+
cdef object URL = _URL
|
| 62 |
+
cdef object URL_build = URL.build
|
| 63 |
+
cdef object CIMultiDict = _CIMultiDict
|
| 64 |
+
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
| 65 |
+
cdef object HttpVersion = _HttpVersion
|
| 66 |
+
cdef object HttpVersion10 = _HttpVersion10
|
| 67 |
+
cdef object HttpVersion11 = _HttpVersion11
|
| 68 |
+
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
| 69 |
+
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
| 70 |
+
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
| 71 |
+
cdef object StreamReader = _StreamReader
|
| 72 |
+
cdef object DeflateBuffer = _DeflateBuffer
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
cdef inline object extend(object buf, const char* at, size_t length):
|
| 76 |
+
cdef Py_ssize_t s
|
| 77 |
+
cdef char* ptr
|
| 78 |
+
s = PyByteArray_Size(buf)
|
| 79 |
+
PyByteArray_Resize(buf, s + length)
|
| 80 |
+
ptr = PyByteArray_AsString(buf)
|
| 81 |
+
memcpy(ptr + s, at, length)
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
DEF METHODS_COUNT = 46;
|
| 85 |
+
|
| 86 |
+
cdef list _http_method = []
|
| 87 |
+
|
| 88 |
+
for i in range(METHODS_COUNT):
|
| 89 |
+
_http_method.append(
|
| 90 |
+
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
cdef inline str http_method_str(int i):
|
| 94 |
+
if i < METHODS_COUNT:
|
| 95 |
+
return <str>_http_method[i]
|
| 96 |
+
else:
|
| 97 |
+
return "<unknown>"
|
| 98 |
+
|
| 99 |
+
cdef inline object find_header(bytes raw_header):
|
| 100 |
+
cdef Py_ssize_t size
|
| 101 |
+
cdef char *buf
|
| 102 |
+
cdef int idx
|
| 103 |
+
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
| 104 |
+
idx = _find_header.find_header(buf, size)
|
| 105 |
+
if idx == -1:
|
| 106 |
+
return raw_header.decode('utf-8', 'surrogateescape')
|
| 107 |
+
return headers[idx]
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
| 111 |
+
cdef class RawRequestMessage:
|
| 112 |
+
cdef readonly str method
|
| 113 |
+
cdef readonly str path
|
| 114 |
+
cdef readonly object version # HttpVersion
|
| 115 |
+
cdef readonly object headers # CIMultiDict
|
| 116 |
+
cdef readonly object raw_headers # tuple
|
| 117 |
+
cdef readonly object should_close
|
| 118 |
+
cdef readonly object compression
|
| 119 |
+
cdef readonly object upgrade
|
| 120 |
+
cdef readonly object chunked
|
| 121 |
+
cdef readonly object url # yarl.URL
|
| 122 |
+
|
| 123 |
+
def __init__(self, method, path, version, headers, raw_headers,
|
| 124 |
+
should_close, compression, upgrade, chunked, url):
|
| 125 |
+
self.method = method
|
| 126 |
+
self.path = path
|
| 127 |
+
self.version = version
|
| 128 |
+
self.headers = headers
|
| 129 |
+
self.raw_headers = raw_headers
|
| 130 |
+
self.should_close = should_close
|
| 131 |
+
self.compression = compression
|
| 132 |
+
self.upgrade = upgrade
|
| 133 |
+
self.chunked = chunked
|
| 134 |
+
self.url = url
|
| 135 |
+
|
| 136 |
+
def __repr__(self):
|
| 137 |
+
info = []
|
| 138 |
+
info.append(("method", self.method))
|
| 139 |
+
info.append(("path", self.path))
|
| 140 |
+
info.append(("version", self.version))
|
| 141 |
+
info.append(("headers", self.headers))
|
| 142 |
+
info.append(("raw_headers", self.raw_headers))
|
| 143 |
+
info.append(("should_close", self.should_close))
|
| 144 |
+
info.append(("compression", self.compression))
|
| 145 |
+
info.append(("upgrade", self.upgrade))
|
| 146 |
+
info.append(("chunked", self.chunked))
|
| 147 |
+
info.append(("url", self.url))
|
| 148 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
| 149 |
+
return '<RawRequestMessage(' + sinfo + ')>'
|
| 150 |
+
|
| 151 |
+
def _replace(self, **dct):
|
| 152 |
+
cdef RawRequestMessage ret
|
| 153 |
+
ret = _new_request_message(self.method,
|
| 154 |
+
self.path,
|
| 155 |
+
self.version,
|
| 156 |
+
self.headers,
|
| 157 |
+
self.raw_headers,
|
| 158 |
+
self.should_close,
|
| 159 |
+
self.compression,
|
| 160 |
+
self.upgrade,
|
| 161 |
+
self.chunked,
|
| 162 |
+
self.url)
|
| 163 |
+
if "method" in dct:
|
| 164 |
+
ret.method = dct["method"]
|
| 165 |
+
if "path" in dct:
|
| 166 |
+
ret.path = dct["path"]
|
| 167 |
+
if "version" in dct:
|
| 168 |
+
ret.version = dct["version"]
|
| 169 |
+
if "headers" in dct:
|
| 170 |
+
ret.headers = dct["headers"]
|
| 171 |
+
if "raw_headers" in dct:
|
| 172 |
+
ret.raw_headers = dct["raw_headers"]
|
| 173 |
+
if "should_close" in dct:
|
| 174 |
+
ret.should_close = dct["should_close"]
|
| 175 |
+
if "compression" in dct:
|
| 176 |
+
ret.compression = dct["compression"]
|
| 177 |
+
if "upgrade" in dct:
|
| 178 |
+
ret.upgrade = dct["upgrade"]
|
| 179 |
+
if "chunked" in dct:
|
| 180 |
+
ret.chunked = dct["chunked"]
|
| 181 |
+
if "url" in dct:
|
| 182 |
+
ret.url = dct["url"]
|
| 183 |
+
return ret
|
| 184 |
+
|
| 185 |
+
cdef _new_request_message(str method,
|
| 186 |
+
str path,
|
| 187 |
+
object version,
|
| 188 |
+
object headers,
|
| 189 |
+
object raw_headers,
|
| 190 |
+
bint should_close,
|
| 191 |
+
object compression,
|
| 192 |
+
bint upgrade,
|
| 193 |
+
bint chunked,
|
| 194 |
+
object url):
|
| 195 |
+
cdef RawRequestMessage ret
|
| 196 |
+
ret = RawRequestMessage.__new__(RawRequestMessage)
|
| 197 |
+
ret.method = method
|
| 198 |
+
ret.path = path
|
| 199 |
+
ret.version = version
|
| 200 |
+
ret.headers = headers
|
| 201 |
+
ret.raw_headers = raw_headers
|
| 202 |
+
ret.should_close = should_close
|
| 203 |
+
ret.compression = compression
|
| 204 |
+
ret.upgrade = upgrade
|
| 205 |
+
ret.chunked = chunked
|
| 206 |
+
ret.url = url
|
| 207 |
+
return ret
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
| 211 |
+
cdef class RawResponseMessage:
|
| 212 |
+
cdef readonly object version # HttpVersion
|
| 213 |
+
cdef readonly int code
|
| 214 |
+
cdef readonly str reason
|
| 215 |
+
cdef readonly object headers # CIMultiDict
|
| 216 |
+
cdef readonly object raw_headers # tuple
|
| 217 |
+
cdef readonly object should_close
|
| 218 |
+
cdef readonly object compression
|
| 219 |
+
cdef readonly object upgrade
|
| 220 |
+
cdef readonly object chunked
|
| 221 |
+
|
| 222 |
+
def __init__(self, version, code, reason, headers, raw_headers,
|
| 223 |
+
should_close, compression, upgrade, chunked):
|
| 224 |
+
self.version = version
|
| 225 |
+
self.code = code
|
| 226 |
+
self.reason = reason
|
| 227 |
+
self.headers = headers
|
| 228 |
+
self.raw_headers = raw_headers
|
| 229 |
+
self.should_close = should_close
|
| 230 |
+
self.compression = compression
|
| 231 |
+
self.upgrade = upgrade
|
| 232 |
+
self.chunked = chunked
|
| 233 |
+
|
| 234 |
+
def __repr__(self):
|
| 235 |
+
info = []
|
| 236 |
+
info.append(("version", self.version))
|
| 237 |
+
info.append(("code", self.code))
|
| 238 |
+
info.append(("reason", self.reason))
|
| 239 |
+
info.append(("headers", self.headers))
|
| 240 |
+
info.append(("raw_headers", self.raw_headers))
|
| 241 |
+
info.append(("should_close", self.should_close))
|
| 242 |
+
info.append(("compression", self.compression))
|
| 243 |
+
info.append(("upgrade", self.upgrade))
|
| 244 |
+
info.append(("chunked", self.chunked))
|
| 245 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
| 246 |
+
return '<RawResponseMessage(' + sinfo + ')>'
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
cdef _new_response_message(object version,
|
| 250 |
+
int code,
|
| 251 |
+
str reason,
|
| 252 |
+
object headers,
|
| 253 |
+
object raw_headers,
|
| 254 |
+
bint should_close,
|
| 255 |
+
object compression,
|
| 256 |
+
bint upgrade,
|
| 257 |
+
bint chunked):
|
| 258 |
+
cdef RawResponseMessage ret
|
| 259 |
+
ret = RawResponseMessage.__new__(RawResponseMessage)
|
| 260 |
+
ret.version = version
|
| 261 |
+
ret.code = code
|
| 262 |
+
ret.reason = reason
|
| 263 |
+
ret.headers = headers
|
| 264 |
+
ret.raw_headers = raw_headers
|
| 265 |
+
ret.should_close = should_close
|
| 266 |
+
ret.compression = compression
|
| 267 |
+
ret.upgrade = upgrade
|
| 268 |
+
ret.chunked = chunked
|
| 269 |
+
return ret
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
@cython.internal
|
| 273 |
+
cdef class HttpParser:
|
| 274 |
+
|
| 275 |
+
cdef:
|
| 276 |
+
cparser.llhttp_t* _cparser
|
| 277 |
+
cparser.llhttp_settings_t* _csettings
|
| 278 |
+
|
| 279 |
+
bytearray _raw_name
|
| 280 |
+
bytearray _raw_value
|
| 281 |
+
bint _has_value
|
| 282 |
+
|
| 283 |
+
object _protocol
|
| 284 |
+
object _loop
|
| 285 |
+
object _timer
|
| 286 |
+
|
| 287 |
+
size_t _max_line_size
|
| 288 |
+
size_t _max_field_size
|
| 289 |
+
size_t _max_headers
|
| 290 |
+
bint _response_with_body
|
| 291 |
+
bint _read_until_eof
|
| 292 |
+
|
| 293 |
+
bint _started
|
| 294 |
+
object _url
|
| 295 |
+
bytearray _buf
|
| 296 |
+
str _path
|
| 297 |
+
str _reason
|
| 298 |
+
object _headers
|
| 299 |
+
list _raw_headers
|
| 300 |
+
bint _upgraded
|
| 301 |
+
list _messages
|
| 302 |
+
object _payload
|
| 303 |
+
bint _payload_error
|
| 304 |
+
object _payload_exception
|
| 305 |
+
object _last_error
|
| 306 |
+
bint _auto_decompress
|
| 307 |
+
int _limit
|
| 308 |
+
|
| 309 |
+
str _content_encoding
|
| 310 |
+
|
| 311 |
+
Py_buffer py_buf
|
| 312 |
+
|
| 313 |
+
def __cinit__(self):
|
| 314 |
+
self._cparser = <cparser.llhttp_t*> \
|
| 315 |
+
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
| 316 |
+
if self._cparser is NULL:
|
| 317 |
+
raise MemoryError()
|
| 318 |
+
|
| 319 |
+
self._csettings = <cparser.llhttp_settings_t*> \
|
| 320 |
+
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
| 321 |
+
if self._csettings is NULL:
|
| 322 |
+
raise MemoryError()
|
| 323 |
+
|
| 324 |
+
def __dealloc__(self):
|
| 325 |
+
PyMem_Free(self._cparser)
|
| 326 |
+
PyMem_Free(self._csettings)
|
| 327 |
+
|
| 328 |
+
cdef _init(
|
| 329 |
+
self, cparser.llhttp_type mode,
|
| 330 |
+
object protocol, object loop, int limit,
|
| 331 |
+
object timer=None,
|
| 332 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
| 333 |
+
size_t max_field_size=8190, payload_exception=None,
|
| 334 |
+
bint response_with_body=True, bint read_until_eof=False,
|
| 335 |
+
bint auto_decompress=True,
|
| 336 |
+
):
|
| 337 |
+
cparser.llhttp_settings_init(self._csettings)
|
| 338 |
+
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
| 339 |
+
self._cparser.data = <void*>self
|
| 340 |
+
self._cparser.content_length = 0
|
| 341 |
+
|
| 342 |
+
self._protocol = protocol
|
| 343 |
+
self._loop = loop
|
| 344 |
+
self._timer = timer
|
| 345 |
+
|
| 346 |
+
self._buf = bytearray()
|
| 347 |
+
self._payload = None
|
| 348 |
+
self._payload_error = 0
|
| 349 |
+
self._payload_exception = payload_exception
|
| 350 |
+
self._messages = []
|
| 351 |
+
|
| 352 |
+
self._raw_name = bytearray()
|
| 353 |
+
self._raw_value = bytearray()
|
| 354 |
+
self._has_value = False
|
| 355 |
+
|
| 356 |
+
self._max_line_size = max_line_size
|
| 357 |
+
self._max_headers = max_headers
|
| 358 |
+
self._max_field_size = max_field_size
|
| 359 |
+
self._response_with_body = response_with_body
|
| 360 |
+
self._read_until_eof = read_until_eof
|
| 361 |
+
self._upgraded = False
|
| 362 |
+
self._auto_decompress = auto_decompress
|
| 363 |
+
self._content_encoding = None
|
| 364 |
+
|
| 365 |
+
self._csettings.on_url = cb_on_url
|
| 366 |
+
self._csettings.on_status = cb_on_status
|
| 367 |
+
self._csettings.on_header_field = cb_on_header_field
|
| 368 |
+
self._csettings.on_header_value = cb_on_header_value
|
| 369 |
+
self._csettings.on_headers_complete = cb_on_headers_complete
|
| 370 |
+
self._csettings.on_body = cb_on_body
|
| 371 |
+
self._csettings.on_message_begin = cb_on_message_begin
|
| 372 |
+
self._csettings.on_message_complete = cb_on_message_complete
|
| 373 |
+
self._csettings.on_chunk_header = cb_on_chunk_header
|
| 374 |
+
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
| 375 |
+
|
| 376 |
+
self._last_error = None
|
| 377 |
+
self._limit = limit
|
| 378 |
+
|
| 379 |
+
cdef _process_header(self):
|
| 380 |
+
if self._raw_name:
|
| 381 |
+
raw_name = bytes(self._raw_name)
|
| 382 |
+
raw_value = bytes(self._raw_value)
|
| 383 |
+
|
| 384 |
+
name = find_header(raw_name)
|
| 385 |
+
value = raw_value.decode('utf-8', 'surrogateescape')
|
| 386 |
+
|
| 387 |
+
self._headers.add(name, value)
|
| 388 |
+
|
| 389 |
+
if name is CONTENT_ENCODING:
|
| 390 |
+
self._content_encoding = value
|
| 391 |
+
|
| 392 |
+
PyByteArray_Resize(self._raw_name, 0)
|
| 393 |
+
PyByteArray_Resize(self._raw_value, 0)
|
| 394 |
+
self._has_value = False
|
| 395 |
+
self._raw_headers.append((raw_name, raw_value))
|
| 396 |
+
|
| 397 |
+
cdef _on_header_field(self, char* at, size_t length):
|
| 398 |
+
cdef Py_ssize_t size
|
| 399 |
+
cdef char *buf
|
| 400 |
+
if self._has_value:
|
| 401 |
+
self._process_header()
|
| 402 |
+
|
| 403 |
+
size = PyByteArray_Size(self._raw_name)
|
| 404 |
+
PyByteArray_Resize(self._raw_name, size + length)
|
| 405 |
+
buf = PyByteArray_AsString(self._raw_name)
|
| 406 |
+
memcpy(buf + size, at, length)
|
| 407 |
+
|
| 408 |
+
cdef _on_header_value(self, char* at, size_t length):
|
| 409 |
+
cdef Py_ssize_t size
|
| 410 |
+
cdef char *buf
|
| 411 |
+
|
| 412 |
+
size = PyByteArray_Size(self._raw_value)
|
| 413 |
+
PyByteArray_Resize(self._raw_value, size + length)
|
| 414 |
+
buf = PyByteArray_AsString(self._raw_value)
|
| 415 |
+
memcpy(buf + size, at, length)
|
| 416 |
+
self._has_value = True
|
| 417 |
+
|
| 418 |
+
cdef _on_headers_complete(self):
|
| 419 |
+
self._process_header()
|
| 420 |
+
|
| 421 |
+
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
| 422 |
+
upgrade = self._cparser.upgrade
|
| 423 |
+
chunked = self._cparser.flags & cparser.F_CHUNKED
|
| 424 |
+
|
| 425 |
+
raw_headers = tuple(self._raw_headers)
|
| 426 |
+
headers = CIMultiDictProxy(self._headers)
|
| 427 |
+
|
| 428 |
+
if self._cparser.type == cparser.HTTP_REQUEST:
|
| 429 |
+
allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES
|
| 430 |
+
if allowed or self._cparser.method == cparser.HTTP_CONNECT:
|
| 431 |
+
self._upgraded = True
|
| 432 |
+
else:
|
| 433 |
+
if upgrade and self._cparser.status_code == 101:
|
| 434 |
+
self._upgraded = True
|
| 435 |
+
|
| 436 |
+
# do not support old websocket spec
|
| 437 |
+
if SEC_WEBSOCKET_KEY1 in headers:
|
| 438 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 439 |
+
|
| 440 |
+
encoding = None
|
| 441 |
+
enc = self._content_encoding
|
| 442 |
+
if enc is not None:
|
| 443 |
+
self._content_encoding = None
|
| 444 |
+
enc = enc.lower()
|
| 445 |
+
if enc in ('gzip', 'deflate', 'br'):
|
| 446 |
+
encoding = enc
|
| 447 |
+
|
| 448 |
+
if self._cparser.type == cparser.HTTP_REQUEST:
|
| 449 |
+
method = http_method_str(self._cparser.method)
|
| 450 |
+
msg = _new_request_message(
|
| 451 |
+
method, self._path,
|
| 452 |
+
self.http_version(), headers, raw_headers,
|
| 453 |
+
should_close, encoding, upgrade, chunked, self._url)
|
| 454 |
+
else:
|
| 455 |
+
msg = _new_response_message(
|
| 456 |
+
self.http_version(), self._cparser.status_code, self._reason,
|
| 457 |
+
headers, raw_headers, should_close, encoding,
|
| 458 |
+
upgrade, chunked)
|
| 459 |
+
|
| 460 |
+
if (
|
| 461 |
+
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
| 462 |
+
self._cparser.method == cparser.HTTP_CONNECT or
|
| 463 |
+
(self._cparser.status_code >= 199 and
|
| 464 |
+
self._cparser.content_length == 0 and
|
| 465 |
+
self._read_until_eof)
|
| 466 |
+
):
|
| 467 |
+
payload = StreamReader(
|
| 468 |
+
self._protocol, timer=self._timer, loop=self._loop,
|
| 469 |
+
limit=self._limit)
|
| 470 |
+
else:
|
| 471 |
+
payload = EMPTY_PAYLOAD
|
| 472 |
+
|
| 473 |
+
self._payload = payload
|
| 474 |
+
if encoding is not None and self._auto_decompress:
|
| 475 |
+
self._payload = DeflateBuffer(payload, encoding)
|
| 476 |
+
|
| 477 |
+
if not self._response_with_body:
|
| 478 |
+
payload = EMPTY_PAYLOAD
|
| 479 |
+
|
| 480 |
+
self._messages.append((msg, payload))
|
| 481 |
+
|
| 482 |
+
cdef _on_message_complete(self):
|
| 483 |
+
self._payload.feed_eof()
|
| 484 |
+
self._payload = None
|
| 485 |
+
|
| 486 |
+
cdef _on_chunk_header(self):
|
| 487 |
+
self._payload.begin_http_chunk_receiving()
|
| 488 |
+
|
| 489 |
+
cdef _on_chunk_complete(self):
|
| 490 |
+
self._payload.end_http_chunk_receiving()
|
| 491 |
+
|
| 492 |
+
cdef object _on_status_complete(self):
|
| 493 |
+
pass
|
| 494 |
+
|
| 495 |
+
cdef inline http_version(self):
|
| 496 |
+
cdef cparser.llhttp_t* parser = self._cparser
|
| 497 |
+
|
| 498 |
+
if parser.http_major == 1:
|
| 499 |
+
if parser.http_minor == 0:
|
| 500 |
+
return HttpVersion10
|
| 501 |
+
elif parser.http_minor == 1:
|
| 502 |
+
return HttpVersion11
|
| 503 |
+
|
| 504 |
+
return HttpVersion(parser.http_major, parser.http_minor)
|
| 505 |
+
|
| 506 |
+
### Public API ###
|
| 507 |
+
|
| 508 |
+
def feed_eof(self):
|
| 509 |
+
cdef bytes desc
|
| 510 |
+
|
| 511 |
+
if self._payload is not None:
|
| 512 |
+
if self._cparser.flags & cparser.F_CHUNKED:
|
| 513 |
+
raise TransferEncodingError(
|
| 514 |
+
"Not enough data for satisfy transfer length header.")
|
| 515 |
+
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
| 516 |
+
raise ContentLengthError(
|
| 517 |
+
"Not enough data for satisfy content length header.")
|
| 518 |
+
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
| 519 |
+
desc = cparser.llhttp_get_error_reason(self._cparser)
|
| 520 |
+
raise PayloadEncodingError(desc.decode('latin-1'))
|
| 521 |
+
else:
|
| 522 |
+
self._payload.feed_eof()
|
| 523 |
+
elif self._started:
|
| 524 |
+
self._on_headers_complete()
|
| 525 |
+
if self._messages:
|
| 526 |
+
return self._messages[-1][0]
|
| 527 |
+
|
| 528 |
+
def feed_data(self, data):
|
| 529 |
+
cdef:
|
| 530 |
+
size_t data_len
|
| 531 |
+
size_t nb
|
| 532 |
+
cdef cparser.llhttp_errno_t errno
|
| 533 |
+
|
| 534 |
+
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
| 535 |
+
data_len = <size_t>self.py_buf.len
|
| 536 |
+
|
| 537 |
+
errno = cparser.llhttp_execute(
|
| 538 |
+
self._cparser,
|
| 539 |
+
<char*>self.py_buf.buf,
|
| 540 |
+
data_len)
|
| 541 |
+
|
| 542 |
+
if errno is cparser.HPE_PAUSED_UPGRADE:
|
| 543 |
+
cparser.llhttp_resume_after_upgrade(self._cparser)
|
| 544 |
+
|
| 545 |
+
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
| 546 |
+
|
| 547 |
+
PyBuffer_Release(&self.py_buf)
|
| 548 |
+
|
| 549 |
+
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
| 550 |
+
if self._payload_error == 0:
|
| 551 |
+
if self._last_error is not None:
|
| 552 |
+
ex = self._last_error
|
| 553 |
+
self._last_error = None
|
| 554 |
+
else:
|
| 555 |
+
after = cparser.llhttp_get_error_pos(self._cparser)
|
| 556 |
+
before = data[:after - <char*>self.py_buf.buf]
|
| 557 |
+
after_b = after.split(b"\r\n", 1)[0]
|
| 558 |
+
before = before.rsplit(b"\r\n", 1)[-1]
|
| 559 |
+
data = before + after_b
|
| 560 |
+
pointer = " " * (len(repr(before))-1) + "^"
|
| 561 |
+
ex = parser_error_from_errno(self._cparser, data, pointer)
|
| 562 |
+
self._payload = None
|
| 563 |
+
raise ex
|
| 564 |
+
|
| 565 |
+
if self._messages:
|
| 566 |
+
messages = self._messages
|
| 567 |
+
self._messages = []
|
| 568 |
+
else:
|
| 569 |
+
messages = ()
|
| 570 |
+
|
| 571 |
+
if self._upgraded:
|
| 572 |
+
return messages, True, data[nb:]
|
| 573 |
+
else:
|
| 574 |
+
return messages, False, b""
|
| 575 |
+
|
| 576 |
+
def set_upgraded(self, val):
|
| 577 |
+
self._upgraded = val
|
| 578 |
+
|
| 579 |
+
|
| 580 |
+
cdef class HttpRequestParser(HttpParser):
|
| 581 |
+
|
| 582 |
+
def __init__(
|
| 583 |
+
self, protocol, loop, int limit, timer=None,
|
| 584 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
| 585 |
+
size_t max_field_size=8190, payload_exception=None,
|
| 586 |
+
bint response_with_body=True, bint read_until_eof=False,
|
| 587 |
+
bint auto_decompress=True,
|
| 588 |
+
):
|
| 589 |
+
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
| 590 |
+
max_line_size, max_headers, max_field_size,
|
| 591 |
+
payload_exception, response_with_body, read_until_eof,
|
| 592 |
+
auto_decompress)
|
| 593 |
+
|
| 594 |
+
cdef object _on_status_complete(self):
|
| 595 |
+
cdef int idx1, idx2
|
| 596 |
+
if not self._buf:
|
| 597 |
+
return
|
| 598 |
+
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
| 599 |
+
try:
|
| 600 |
+
idx3 = len(self._path)
|
| 601 |
+
if self._cparser.method == cparser.HTTP_CONNECT:
|
| 602 |
+
# authority-form,
|
| 603 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
| 604 |
+
self._url = URL.build(authority=self._path, encoded=True)
|
| 605 |
+
elif idx3 > 1 and self._path[0] == '/':
|
| 606 |
+
# origin-form,
|
| 607 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
| 608 |
+
idx1 = self._path.find("?")
|
| 609 |
+
if idx1 == -1:
|
| 610 |
+
query = ""
|
| 611 |
+
idx2 = self._path.find("#")
|
| 612 |
+
if idx2 == -1:
|
| 613 |
+
path = self._path
|
| 614 |
+
fragment = ""
|
| 615 |
+
else:
|
| 616 |
+
path = self._path[0: idx2]
|
| 617 |
+
fragment = self._path[idx2+1:]
|
| 618 |
+
|
| 619 |
+
else:
|
| 620 |
+
path = self._path[0:idx1]
|
| 621 |
+
idx1 += 1
|
| 622 |
+
idx2 = self._path.find("#", idx1+1)
|
| 623 |
+
if idx2 == -1:
|
| 624 |
+
query = self._path[idx1:]
|
| 625 |
+
fragment = ""
|
| 626 |
+
else:
|
| 627 |
+
query = self._path[idx1: idx2]
|
| 628 |
+
fragment = self._path[idx2+1:]
|
| 629 |
+
|
| 630 |
+
self._url = URL.build(
|
| 631 |
+
path=path,
|
| 632 |
+
query_string=query,
|
| 633 |
+
fragment=fragment,
|
| 634 |
+
encoded=True,
|
| 635 |
+
)
|
| 636 |
+
else:
|
| 637 |
+
# absolute-form for proxy maybe,
|
| 638 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
| 639 |
+
self._url = URL(self._path, encoded=True)
|
| 640 |
+
finally:
|
| 641 |
+
PyByteArray_Resize(self._buf, 0)
|
| 642 |
+
|
| 643 |
+
|
| 644 |
+
cdef class HttpResponseParser(HttpParser):
|
| 645 |
+
|
| 646 |
+
def __init__(
|
| 647 |
+
self, protocol, loop, int limit, timer=None,
|
| 648 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
| 649 |
+
size_t max_field_size=8190, payload_exception=None,
|
| 650 |
+
bint response_with_body=True, bint read_until_eof=False,
|
| 651 |
+
bint auto_decompress=True
|
| 652 |
+
):
|
| 653 |
+
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
| 654 |
+
max_line_size, max_headers, max_field_size,
|
| 655 |
+
payload_exception, response_with_body, read_until_eof,
|
| 656 |
+
auto_decompress)
|
| 657 |
+
# Use strict parsing on dev mode, so users are warned about broken servers.
|
| 658 |
+
if not DEBUG:
|
| 659 |
+
cparser.llhttp_set_lenient_headers(self._cparser, 1)
|
| 660 |
+
cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
|
| 661 |
+
cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
|
| 662 |
+
|
| 663 |
+
cdef object _on_status_complete(self):
|
| 664 |
+
if self._buf:
|
| 665 |
+
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
| 666 |
+
PyByteArray_Resize(self._buf, 0)
|
| 667 |
+
else:
|
| 668 |
+
self._reason = self._reason or ''
|
| 669 |
+
|
| 670 |
+
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
| 671 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 672 |
+
|
| 673 |
+
pyparser._started = True
|
| 674 |
+
pyparser._headers = CIMultiDict()
|
| 675 |
+
pyparser._raw_headers = []
|
| 676 |
+
PyByteArray_Resize(pyparser._buf, 0)
|
| 677 |
+
pyparser._path = None
|
| 678 |
+
pyparser._reason = None
|
| 679 |
+
return 0
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
cdef int cb_on_url(cparser.llhttp_t* parser,
|
| 683 |
+
const char *at, size_t length) except -1:
|
| 684 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 685 |
+
try:
|
| 686 |
+
if length > pyparser._max_line_size:
|
| 687 |
+
raise LineTooLong(
|
| 688 |
+
'Status line is too long', pyparser._max_line_size, length)
|
| 689 |
+
extend(pyparser._buf, at, length)
|
| 690 |
+
except BaseException as ex:
|
| 691 |
+
pyparser._last_error = ex
|
| 692 |
+
return -1
|
| 693 |
+
else:
|
| 694 |
+
return 0
|
| 695 |
+
|
| 696 |
+
|
| 697 |
+
cdef int cb_on_status(cparser.llhttp_t* parser,
|
| 698 |
+
const char *at, size_t length) except -1:
|
| 699 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 700 |
+
cdef str reason
|
| 701 |
+
try:
|
| 702 |
+
if length > pyparser._max_line_size:
|
| 703 |
+
raise LineTooLong(
|
| 704 |
+
'Status line is too long', pyparser._max_line_size, length)
|
| 705 |
+
extend(pyparser._buf, at, length)
|
| 706 |
+
except BaseException as ex:
|
| 707 |
+
pyparser._last_error = ex
|
| 708 |
+
return -1
|
| 709 |
+
else:
|
| 710 |
+
return 0
|
| 711 |
+
|
| 712 |
+
|
| 713 |
+
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
| 714 |
+
const char *at, size_t length) except -1:
|
| 715 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 716 |
+
cdef Py_ssize_t size
|
| 717 |
+
try:
|
| 718 |
+
pyparser._on_status_complete()
|
| 719 |
+
size = len(pyparser._raw_name) + length
|
| 720 |
+
if size > pyparser._max_field_size:
|
| 721 |
+
raise LineTooLong(
|
| 722 |
+
'Header name is too long', pyparser._max_field_size, size)
|
| 723 |
+
pyparser._on_header_field(at, length)
|
| 724 |
+
except BaseException as ex:
|
| 725 |
+
pyparser._last_error = ex
|
| 726 |
+
return -1
|
| 727 |
+
else:
|
| 728 |
+
return 0
|
| 729 |
+
|
| 730 |
+
|
| 731 |
+
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
| 732 |
+
const char *at, size_t length) except -1:
|
| 733 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 734 |
+
cdef Py_ssize_t size
|
| 735 |
+
try:
|
| 736 |
+
size = len(pyparser._raw_value) + length
|
| 737 |
+
if size > pyparser._max_field_size:
|
| 738 |
+
raise LineTooLong(
|
| 739 |
+
'Header value is too long', pyparser._max_field_size, size)
|
| 740 |
+
pyparser._on_header_value(at, length)
|
| 741 |
+
except BaseException as ex:
|
| 742 |
+
pyparser._last_error = ex
|
| 743 |
+
return -1
|
| 744 |
+
else:
|
| 745 |
+
return 0
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
| 749 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 750 |
+
try:
|
| 751 |
+
pyparser._on_status_complete()
|
| 752 |
+
pyparser._on_headers_complete()
|
| 753 |
+
except BaseException as exc:
|
| 754 |
+
pyparser._last_error = exc
|
| 755 |
+
return -1
|
| 756 |
+
else:
|
| 757 |
+
if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:
|
| 758 |
+
return 2
|
| 759 |
+
else:
|
| 760 |
+
return 0
|
| 761 |
+
|
| 762 |
+
|
| 763 |
+
cdef int cb_on_body(cparser.llhttp_t* parser,
|
| 764 |
+
const char *at, size_t length) except -1:
|
| 765 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 766 |
+
cdef bytes body = at[:length]
|
| 767 |
+
try:
|
| 768 |
+
pyparser._payload.feed_data(body, length)
|
| 769 |
+
except BaseException as underlying_exc:
|
| 770 |
+
reraised_exc = underlying_exc
|
| 771 |
+
if pyparser._payload_exception is not None:
|
| 772 |
+
reraised_exc = pyparser._payload_exception(str(underlying_exc))
|
| 773 |
+
|
| 774 |
+
set_exception(pyparser._payload, reraised_exc, underlying_exc)
|
| 775 |
+
|
| 776 |
+
pyparser._payload_error = 1
|
| 777 |
+
return -1
|
| 778 |
+
else:
|
| 779 |
+
return 0
|
| 780 |
+
|
| 781 |
+
|
| 782 |
+
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
| 783 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 784 |
+
try:
|
| 785 |
+
pyparser._started = False
|
| 786 |
+
pyparser._on_message_complete()
|
| 787 |
+
except BaseException as exc:
|
| 788 |
+
pyparser._last_error = exc
|
| 789 |
+
return -1
|
| 790 |
+
else:
|
| 791 |
+
return 0
|
| 792 |
+
|
| 793 |
+
|
| 794 |
+
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
| 795 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 796 |
+
try:
|
| 797 |
+
pyparser._on_chunk_header()
|
| 798 |
+
except BaseException as exc:
|
| 799 |
+
pyparser._last_error = exc
|
| 800 |
+
return -1
|
| 801 |
+
else:
|
| 802 |
+
return 0
|
| 803 |
+
|
| 804 |
+
|
| 805 |
+
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
| 806 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 807 |
+
try:
|
| 808 |
+
pyparser._on_chunk_complete()
|
| 809 |
+
except BaseException as exc:
|
| 810 |
+
pyparser._last_error = exc
|
| 811 |
+
return -1
|
| 812 |
+
else:
|
| 813 |
+
return 0
|
| 814 |
+
|
| 815 |
+
|
| 816 |
+
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
| 817 |
+
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
| 818 |
+
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
| 819 |
+
|
| 820 |
+
err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
|
| 821 |
+
|
| 822 |
+
if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
|
| 823 |
+
cparser.HPE_CB_HEADERS_COMPLETE,
|
| 824 |
+
cparser.HPE_CB_MESSAGE_COMPLETE,
|
| 825 |
+
cparser.HPE_CB_CHUNK_HEADER,
|
| 826 |
+
cparser.HPE_CB_CHUNK_COMPLETE,
|
| 827 |
+
cparser.HPE_INVALID_CONSTANT,
|
| 828 |
+
cparser.HPE_INVALID_HEADER_TOKEN,
|
| 829 |
+
cparser.HPE_INVALID_CONTENT_LENGTH,
|
| 830 |
+
cparser.HPE_INVALID_CHUNK_SIZE,
|
| 831 |
+
cparser.HPE_INVALID_EOF_STATE,
|
| 832 |
+
cparser.HPE_INVALID_TRANSFER_ENCODING}:
|
| 833 |
+
return BadHttpMessage(err_msg)
|
| 834 |
+
elif errno in {cparser.HPE_INVALID_STATUS,
|
| 835 |
+
cparser.HPE_INVALID_METHOD,
|
| 836 |
+
cparser.HPE_INVALID_VERSION}:
|
| 837 |
+
return BadStatusLine(error=err_msg)
|
| 838 |
+
elif errno == cparser.HPE_INVALID_URL:
|
| 839 |
+
return InvalidURLError(err_msg)
|
| 840 |
+
|
| 841 |
+
return BadHttpMessage(err_msg)
|
parrot/lib/python3.10/site-packages/aiohttp/client_reqrep.py
ADDED
|
@@ -0,0 +1,1274 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import codecs
|
| 3 |
+
import contextlib
|
| 4 |
+
import functools
|
| 5 |
+
import io
|
| 6 |
+
import re
|
| 7 |
+
import sys
|
| 8 |
+
import traceback
|
| 9 |
+
import warnings
|
| 10 |
+
from hashlib import md5, sha1, sha256
|
| 11 |
+
from http.cookies import CookieError, Morsel, SimpleCookie
|
| 12 |
+
from types import MappingProxyType, TracebackType
|
| 13 |
+
from typing import (
|
| 14 |
+
TYPE_CHECKING,
|
| 15 |
+
Any,
|
| 16 |
+
Callable,
|
| 17 |
+
Dict,
|
| 18 |
+
Iterable,
|
| 19 |
+
List,
|
| 20 |
+
Mapping,
|
| 21 |
+
Optional,
|
| 22 |
+
Tuple,
|
| 23 |
+
Type,
|
| 24 |
+
Union,
|
| 25 |
+
cast,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
import attr
|
| 29 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
| 30 |
+
from yarl import URL, __version__ as yarl_version
|
| 31 |
+
|
| 32 |
+
from . import hdrs, helpers, http, multipart, payload
|
| 33 |
+
from .abc import AbstractStreamWriter
|
| 34 |
+
from .client_exceptions import (
|
| 35 |
+
ClientConnectionError,
|
| 36 |
+
ClientOSError,
|
| 37 |
+
ClientResponseError,
|
| 38 |
+
ContentTypeError,
|
| 39 |
+
InvalidURL,
|
| 40 |
+
ServerFingerprintMismatch,
|
| 41 |
+
)
|
| 42 |
+
from .compression_utils import HAS_BROTLI
|
| 43 |
+
from .formdata import FormData
|
| 44 |
+
from .helpers import (
|
| 45 |
+
BaseTimerContext,
|
| 46 |
+
BasicAuth,
|
| 47 |
+
HeadersMixin,
|
| 48 |
+
TimerNoop,
|
| 49 |
+
basicauth_from_netrc,
|
| 50 |
+
netrc_from_env,
|
| 51 |
+
noop,
|
| 52 |
+
reify,
|
| 53 |
+
set_exception,
|
| 54 |
+
set_result,
|
| 55 |
+
)
|
| 56 |
+
from .http import (
|
| 57 |
+
SERVER_SOFTWARE,
|
| 58 |
+
HttpVersion,
|
| 59 |
+
HttpVersion10,
|
| 60 |
+
HttpVersion11,
|
| 61 |
+
StreamWriter,
|
| 62 |
+
)
|
| 63 |
+
from .log import client_logger
|
| 64 |
+
from .streams import StreamReader
|
| 65 |
+
from .typedefs import (
|
| 66 |
+
DEFAULT_JSON_DECODER,
|
| 67 |
+
JSONDecoder,
|
| 68 |
+
LooseCookies,
|
| 69 |
+
LooseHeaders,
|
| 70 |
+
Query,
|
| 71 |
+
RawHeaders,
|
| 72 |
+
)
|
| 73 |
+
|
| 74 |
+
try:
|
| 75 |
+
import ssl
|
| 76 |
+
from ssl import SSLContext
|
| 77 |
+
except ImportError: # pragma: no cover
|
| 78 |
+
ssl = None # type: ignore[assignment]
|
| 79 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
if TYPE_CHECKING:
|
| 86 |
+
from .client import ClientSession
|
| 87 |
+
from .connector import Connection
|
| 88 |
+
from .tracing import Trace
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
| 92 |
+
_YARL_SUPPORTS_EXTEND_QUERY = tuple(map(int, yarl_version.split(".")[:2])) >= (1, 11)
|
| 93 |
+
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def _gen_default_accept_encoding() -> str:
|
| 97 |
+
return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 101 |
+
class ContentDisposition:
|
| 102 |
+
type: Optional[str]
|
| 103 |
+
parameters: "MappingProxyType[str, str]"
|
| 104 |
+
filename: Optional[str]
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 108 |
+
class RequestInfo:
|
| 109 |
+
url: URL
|
| 110 |
+
method: str
|
| 111 |
+
headers: "CIMultiDictProxy[str]"
|
| 112 |
+
real_url: URL = attr.ib()
|
| 113 |
+
|
| 114 |
+
@real_url.default
|
| 115 |
+
def real_url_default(self) -> URL:
|
| 116 |
+
return self.url
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
class Fingerprint:
|
| 120 |
+
HASHFUNC_BY_DIGESTLEN = {
|
| 121 |
+
16: md5,
|
| 122 |
+
20: sha1,
|
| 123 |
+
32: sha256,
|
| 124 |
+
}
|
| 125 |
+
|
| 126 |
+
def __init__(self, fingerprint: bytes) -> None:
|
| 127 |
+
digestlen = len(fingerprint)
|
| 128 |
+
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
|
| 129 |
+
if not hashfunc:
|
| 130 |
+
raise ValueError("fingerprint has invalid length")
|
| 131 |
+
elif hashfunc is md5 or hashfunc is sha1:
|
| 132 |
+
raise ValueError(
|
| 133 |
+
"md5 and sha1 are insecure and " "not supported. Use sha256."
|
| 134 |
+
)
|
| 135 |
+
self._hashfunc = hashfunc
|
| 136 |
+
self._fingerprint = fingerprint
|
| 137 |
+
|
| 138 |
+
@property
|
| 139 |
+
def fingerprint(self) -> bytes:
|
| 140 |
+
return self._fingerprint
|
| 141 |
+
|
| 142 |
+
def check(self, transport: asyncio.Transport) -> None:
|
| 143 |
+
if not transport.get_extra_info("sslcontext"):
|
| 144 |
+
return
|
| 145 |
+
sslobj = transport.get_extra_info("ssl_object")
|
| 146 |
+
cert = sslobj.getpeercert(binary_form=True)
|
| 147 |
+
got = self._hashfunc(cert).digest()
|
| 148 |
+
if got != self._fingerprint:
|
| 149 |
+
host, port, *_ = transport.get_extra_info("peername")
|
| 150 |
+
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
if ssl is not None:
|
| 154 |
+
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
|
| 155 |
+
else: # pragma: no cover
|
| 156 |
+
SSL_ALLOWED_TYPES = (bool, type(None))
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def _merge_ssl_params(
|
| 160 |
+
ssl: Union["SSLContext", bool, Fingerprint],
|
| 161 |
+
verify_ssl: Optional[bool],
|
| 162 |
+
ssl_context: Optional["SSLContext"],
|
| 163 |
+
fingerprint: Optional[bytes],
|
| 164 |
+
) -> Union["SSLContext", bool, Fingerprint]:
|
| 165 |
+
if ssl is None:
|
| 166 |
+
ssl = True # Double check for backwards compatibility
|
| 167 |
+
if verify_ssl is not None and not verify_ssl:
|
| 168 |
+
warnings.warn(
|
| 169 |
+
"verify_ssl is deprecated, use ssl=False instead",
|
| 170 |
+
DeprecationWarning,
|
| 171 |
+
stacklevel=3,
|
| 172 |
+
)
|
| 173 |
+
if ssl is not True:
|
| 174 |
+
raise ValueError(
|
| 175 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 176 |
+
"parameters are mutually exclusive"
|
| 177 |
+
)
|
| 178 |
+
else:
|
| 179 |
+
ssl = False
|
| 180 |
+
if ssl_context is not None:
|
| 181 |
+
warnings.warn(
|
| 182 |
+
"ssl_context is deprecated, use ssl=context instead",
|
| 183 |
+
DeprecationWarning,
|
| 184 |
+
stacklevel=3,
|
| 185 |
+
)
|
| 186 |
+
if ssl is not True:
|
| 187 |
+
raise ValueError(
|
| 188 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 189 |
+
"parameters are mutually exclusive"
|
| 190 |
+
)
|
| 191 |
+
else:
|
| 192 |
+
ssl = ssl_context
|
| 193 |
+
if fingerprint is not None:
|
| 194 |
+
warnings.warn(
|
| 195 |
+
"fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
|
| 196 |
+
DeprecationWarning,
|
| 197 |
+
stacklevel=3,
|
| 198 |
+
)
|
| 199 |
+
if ssl is not True:
|
| 200 |
+
raise ValueError(
|
| 201 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 202 |
+
"parameters are mutually exclusive"
|
| 203 |
+
)
|
| 204 |
+
else:
|
| 205 |
+
ssl = Fingerprint(fingerprint)
|
| 206 |
+
if not isinstance(ssl, SSL_ALLOWED_TYPES):
|
| 207 |
+
raise TypeError(
|
| 208 |
+
"ssl should be SSLContext, bool, Fingerprint or None, "
|
| 209 |
+
"got {!r} instead.".format(ssl)
|
| 210 |
+
)
|
| 211 |
+
return ssl
|
| 212 |
+
|
| 213 |
+
|
| 214 |
+
@attr.s(auto_attribs=True, slots=True, frozen=True, cache_hash=True)
|
| 215 |
+
class ConnectionKey:
|
| 216 |
+
# the key should contain an information about used proxy / TLS
|
| 217 |
+
# to prevent reusing wrong connections from a pool
|
| 218 |
+
host: str
|
| 219 |
+
port: Optional[int]
|
| 220 |
+
is_ssl: bool
|
| 221 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
| 222 |
+
proxy: Optional[URL]
|
| 223 |
+
proxy_auth: Optional[BasicAuth]
|
| 224 |
+
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
def _is_expected_content_type(
|
| 228 |
+
response_content_type: str, expected_content_type: str
|
| 229 |
+
) -> bool:
|
| 230 |
+
if expected_content_type == "application/json":
|
| 231 |
+
return json_re.match(response_content_type) is not None
|
| 232 |
+
return expected_content_type in response_content_type
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
class ClientRequest:
|
| 236 |
+
GET_METHODS = {
|
| 237 |
+
hdrs.METH_GET,
|
| 238 |
+
hdrs.METH_HEAD,
|
| 239 |
+
hdrs.METH_OPTIONS,
|
| 240 |
+
hdrs.METH_TRACE,
|
| 241 |
+
}
|
| 242 |
+
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
|
| 243 |
+
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
|
| 244 |
+
|
| 245 |
+
DEFAULT_HEADERS = {
|
| 246 |
+
hdrs.ACCEPT: "*/*",
|
| 247 |
+
hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
|
| 248 |
+
}
|
| 249 |
+
|
| 250 |
+
# Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
|
| 251 |
+
body: Any = b""
|
| 252 |
+
auth = None
|
| 253 |
+
response = None
|
| 254 |
+
|
| 255 |
+
__writer = None # async task for streaming data
|
| 256 |
+
_continue = None # waiter future for '100 Continue' response
|
| 257 |
+
|
| 258 |
+
# N.B.
|
| 259 |
+
# Adding __del__ method with self._writer closing doesn't make sense
|
| 260 |
+
# because _writer is instance method, thus it keeps a reference to self.
|
| 261 |
+
# Until writer has finished finalizer will not be called.
|
| 262 |
+
|
| 263 |
+
def __init__(
|
| 264 |
+
self,
|
| 265 |
+
method: str,
|
| 266 |
+
url: URL,
|
| 267 |
+
*,
|
| 268 |
+
params: Query = None,
|
| 269 |
+
headers: Optional[LooseHeaders] = None,
|
| 270 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 271 |
+
data: Any = None,
|
| 272 |
+
cookies: Optional[LooseCookies] = None,
|
| 273 |
+
auth: Optional[BasicAuth] = None,
|
| 274 |
+
version: http.HttpVersion = http.HttpVersion11,
|
| 275 |
+
compress: Union[str, bool, None] = None,
|
| 276 |
+
chunked: Optional[bool] = None,
|
| 277 |
+
expect100: bool = False,
|
| 278 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 279 |
+
response_class: Optional[Type["ClientResponse"]] = None,
|
| 280 |
+
proxy: Optional[URL] = None,
|
| 281 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 282 |
+
timer: Optional[BaseTimerContext] = None,
|
| 283 |
+
session: Optional["ClientSession"] = None,
|
| 284 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 285 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 286 |
+
traces: Optional[List["Trace"]] = None,
|
| 287 |
+
trust_env: bool = False,
|
| 288 |
+
server_hostname: Optional[str] = None,
|
| 289 |
+
):
|
| 290 |
+
if loop is None:
|
| 291 |
+
loop = asyncio.get_event_loop()
|
| 292 |
+
|
| 293 |
+
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
| 294 |
+
if match:
|
| 295 |
+
raise ValueError(
|
| 296 |
+
f"Method cannot contain non-token characters {method!r} "
|
| 297 |
+
"(found at least {match.group()!r})"
|
| 298 |
+
)
|
| 299 |
+
|
| 300 |
+
assert isinstance(url, URL), url
|
| 301 |
+
assert isinstance(proxy, (URL, type(None))), proxy
|
| 302 |
+
# FIXME: session is None in tests only, need to fix tests
|
| 303 |
+
# assert session is not None
|
| 304 |
+
self._session = cast("ClientSession", session)
|
| 305 |
+
if params:
|
| 306 |
+
if _YARL_SUPPORTS_EXTEND_QUERY:
|
| 307 |
+
url = url.extend_query(params)
|
| 308 |
+
else:
|
| 309 |
+
q = MultiDict(url.query)
|
| 310 |
+
url2 = url.with_query(params)
|
| 311 |
+
q.extend(url2.query)
|
| 312 |
+
url = url.with_query(q)
|
| 313 |
+
self.original_url = url
|
| 314 |
+
self.url = url.with_fragment(None)
|
| 315 |
+
self.method = method.upper()
|
| 316 |
+
self.chunked = chunked
|
| 317 |
+
self.compress = compress
|
| 318 |
+
self.loop = loop
|
| 319 |
+
self.length = None
|
| 320 |
+
if response_class is None:
|
| 321 |
+
real_response_class = ClientResponse
|
| 322 |
+
else:
|
| 323 |
+
real_response_class = response_class
|
| 324 |
+
self.response_class: Type[ClientResponse] = real_response_class
|
| 325 |
+
self._timer = timer if timer is not None else TimerNoop()
|
| 326 |
+
self._ssl = ssl if ssl is not None else True
|
| 327 |
+
self.server_hostname = server_hostname
|
| 328 |
+
|
| 329 |
+
if loop.get_debug():
|
| 330 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 331 |
+
|
| 332 |
+
self.update_version(version)
|
| 333 |
+
self.update_host(url)
|
| 334 |
+
self.update_headers(headers)
|
| 335 |
+
self.update_auto_headers(skip_auto_headers)
|
| 336 |
+
self.update_cookies(cookies)
|
| 337 |
+
self.update_content_encoding(data)
|
| 338 |
+
self.update_auth(auth, trust_env)
|
| 339 |
+
self.update_proxy(proxy, proxy_auth, proxy_headers)
|
| 340 |
+
|
| 341 |
+
self.update_body_from_data(data)
|
| 342 |
+
if data is not None or self.method not in self.GET_METHODS:
|
| 343 |
+
self.update_transfer_encoding()
|
| 344 |
+
self.update_expect_continue(expect100)
|
| 345 |
+
if traces is None:
|
| 346 |
+
traces = []
|
| 347 |
+
self._traces = traces
|
| 348 |
+
|
| 349 |
+
def __reset_writer(self, _: object = None) -> None:
|
| 350 |
+
self.__writer = None
|
| 351 |
+
|
| 352 |
+
@property
|
| 353 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
| 354 |
+
return self.__writer
|
| 355 |
+
|
| 356 |
+
@_writer.setter
|
| 357 |
+
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
|
| 358 |
+
if self.__writer is not None:
|
| 359 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
| 360 |
+
self.__writer = writer
|
| 361 |
+
if writer is None:
|
| 362 |
+
return
|
| 363 |
+
if writer.done():
|
| 364 |
+
# The writer is already done, so we can reset it immediately.
|
| 365 |
+
self.__reset_writer()
|
| 366 |
+
else:
|
| 367 |
+
writer.add_done_callback(self.__reset_writer)
|
| 368 |
+
|
| 369 |
+
def is_ssl(self) -> bool:
|
| 370 |
+
return self.url.scheme in ("https", "wss")
|
| 371 |
+
|
| 372 |
+
@property
|
| 373 |
+
def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
|
| 374 |
+
return self._ssl
|
| 375 |
+
|
| 376 |
+
@property
|
| 377 |
+
def connection_key(self) -> ConnectionKey:
|
| 378 |
+
proxy_headers = self.proxy_headers
|
| 379 |
+
if proxy_headers:
|
| 380 |
+
h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
|
| 381 |
+
else:
|
| 382 |
+
h = None
|
| 383 |
+
return ConnectionKey(
|
| 384 |
+
self.host,
|
| 385 |
+
self.port,
|
| 386 |
+
self.is_ssl(),
|
| 387 |
+
self.ssl,
|
| 388 |
+
self.proxy,
|
| 389 |
+
self.proxy_auth,
|
| 390 |
+
h,
|
| 391 |
+
)
|
| 392 |
+
|
| 393 |
+
@property
|
| 394 |
+
def host(self) -> str:
|
| 395 |
+
ret = self.url.raw_host
|
| 396 |
+
assert ret is not None
|
| 397 |
+
return ret
|
| 398 |
+
|
| 399 |
+
@property
|
| 400 |
+
def port(self) -> Optional[int]:
|
| 401 |
+
return self.url.port
|
| 402 |
+
|
| 403 |
+
@property
|
| 404 |
+
def request_info(self) -> RequestInfo:
|
| 405 |
+
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
|
| 406 |
+
return RequestInfo(self.url, self.method, headers, self.original_url)
|
| 407 |
+
|
| 408 |
+
def update_host(self, url: URL) -> None:
|
| 409 |
+
"""Update destination host, port and connection type (ssl)."""
|
| 410 |
+
# get host/port
|
| 411 |
+
if not url.raw_host:
|
| 412 |
+
raise InvalidURL(url)
|
| 413 |
+
|
| 414 |
+
# basic auth info
|
| 415 |
+
username, password = url.user, url.password
|
| 416 |
+
if username or password:
|
| 417 |
+
self.auth = helpers.BasicAuth(username or "", password or "")
|
| 418 |
+
|
| 419 |
+
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
|
| 420 |
+
"""Convert request version to two elements tuple.
|
| 421 |
+
|
| 422 |
+
parser HTTP version '1.1' => (1, 1)
|
| 423 |
+
"""
|
| 424 |
+
if isinstance(version, str):
|
| 425 |
+
v = [part.strip() for part in version.split(".", 1)]
|
| 426 |
+
try:
|
| 427 |
+
version = http.HttpVersion(int(v[0]), int(v[1]))
|
| 428 |
+
except ValueError:
|
| 429 |
+
raise ValueError(
|
| 430 |
+
f"Can not parse http version number: {version}"
|
| 431 |
+
) from None
|
| 432 |
+
self.version = version
|
| 433 |
+
|
| 434 |
+
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
|
| 435 |
+
"""Update request headers."""
|
| 436 |
+
self.headers: CIMultiDict[str] = CIMultiDict()
|
| 437 |
+
|
| 438 |
+
# add host
|
| 439 |
+
netloc = cast(str, self.url.raw_host)
|
| 440 |
+
if helpers.is_ipv6_address(netloc):
|
| 441 |
+
netloc = f"[{netloc}]"
|
| 442 |
+
# See https://github.com/aio-libs/aiohttp/issues/3636.
|
| 443 |
+
netloc = netloc.rstrip(".")
|
| 444 |
+
if self.url.port is not None and not self.url.is_default_port():
|
| 445 |
+
netloc += ":" + str(self.url.port)
|
| 446 |
+
self.headers[hdrs.HOST] = netloc
|
| 447 |
+
|
| 448 |
+
if headers:
|
| 449 |
+
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
|
| 450 |
+
headers = headers.items()
|
| 451 |
+
|
| 452 |
+
for key, value in headers: # type: ignore[misc]
|
| 453 |
+
# A special case for Host header
|
| 454 |
+
if key.lower() == "host":
|
| 455 |
+
self.headers[key] = value
|
| 456 |
+
else:
|
| 457 |
+
self.headers.add(key, value)
|
| 458 |
+
|
| 459 |
+
def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
|
| 460 |
+
if skip_auto_headers is not None:
|
| 461 |
+
self.skip_auto_headers = CIMultiDict(
|
| 462 |
+
(hdr, None) for hdr in sorted(skip_auto_headers)
|
| 463 |
+
)
|
| 464 |
+
used_headers = self.headers.copy()
|
| 465 |
+
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
|
| 466 |
+
else:
|
| 467 |
+
# Fast path when there are no headers to skip
|
| 468 |
+
# which is the most common case.
|
| 469 |
+
self.skip_auto_headers = CIMultiDict()
|
| 470 |
+
used_headers = self.headers
|
| 471 |
+
|
| 472 |
+
for hdr, val in self.DEFAULT_HEADERS.items():
|
| 473 |
+
if hdr not in used_headers:
|
| 474 |
+
self.headers.add(hdr, val)
|
| 475 |
+
|
| 476 |
+
if hdrs.USER_AGENT not in used_headers:
|
| 477 |
+
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
|
| 478 |
+
|
| 479 |
+
def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
|
| 480 |
+
"""Update request cookies header."""
|
| 481 |
+
if not cookies:
|
| 482 |
+
return
|
| 483 |
+
|
| 484 |
+
c = SimpleCookie()
|
| 485 |
+
if hdrs.COOKIE in self.headers:
|
| 486 |
+
c.load(self.headers.get(hdrs.COOKIE, ""))
|
| 487 |
+
del self.headers[hdrs.COOKIE]
|
| 488 |
+
|
| 489 |
+
if isinstance(cookies, Mapping):
|
| 490 |
+
iter_cookies = cookies.items()
|
| 491 |
+
else:
|
| 492 |
+
iter_cookies = cookies # type: ignore[assignment]
|
| 493 |
+
for name, value in iter_cookies:
|
| 494 |
+
if isinstance(value, Morsel):
|
| 495 |
+
# Preserve coded_value
|
| 496 |
+
mrsl_val = value.get(value.key, Morsel())
|
| 497 |
+
mrsl_val.set(value.key, value.value, value.coded_value)
|
| 498 |
+
c[name] = mrsl_val
|
| 499 |
+
else:
|
| 500 |
+
c[name] = value # type: ignore[assignment]
|
| 501 |
+
|
| 502 |
+
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
|
| 503 |
+
|
| 504 |
+
def update_content_encoding(self, data: Any) -> None:
|
| 505 |
+
"""Set request content encoding."""
|
| 506 |
+
if not data:
|
| 507 |
+
# Don't compress an empty body.
|
| 508 |
+
self.compress = None
|
| 509 |
+
return
|
| 510 |
+
|
| 511 |
+
if self.headers.get(hdrs.CONTENT_ENCODING):
|
| 512 |
+
if self.compress:
|
| 513 |
+
raise ValueError(
|
| 514 |
+
"compress can not be set " "if Content-Encoding header is set"
|
| 515 |
+
)
|
| 516 |
+
elif self.compress:
|
| 517 |
+
if not isinstance(self.compress, str):
|
| 518 |
+
self.compress = "deflate"
|
| 519 |
+
self.headers[hdrs.CONTENT_ENCODING] = self.compress
|
| 520 |
+
self.chunked = True # enable chunked, no need to deal with length
|
| 521 |
+
|
| 522 |
+
def update_transfer_encoding(self) -> None:
|
| 523 |
+
"""Analyze transfer-encoding header."""
|
| 524 |
+
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
| 525 |
+
|
| 526 |
+
if "chunked" in te:
|
| 527 |
+
if self.chunked:
|
| 528 |
+
raise ValueError(
|
| 529 |
+
"chunked can not be set "
|
| 530 |
+
'if "Transfer-Encoding: chunked" header is set'
|
| 531 |
+
)
|
| 532 |
+
|
| 533 |
+
elif self.chunked:
|
| 534 |
+
if hdrs.CONTENT_LENGTH in self.headers:
|
| 535 |
+
raise ValueError(
|
| 536 |
+
"chunked can not be set " "if Content-Length header is set"
|
| 537 |
+
)
|
| 538 |
+
|
| 539 |
+
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 540 |
+
else:
|
| 541 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 542 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
|
| 543 |
+
|
| 544 |
+
def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
|
| 545 |
+
"""Set basic auth."""
|
| 546 |
+
if auth is None:
|
| 547 |
+
auth = self.auth
|
| 548 |
+
if auth is None and trust_env and self.url.host is not None:
|
| 549 |
+
netrc_obj = netrc_from_env()
|
| 550 |
+
with contextlib.suppress(LookupError):
|
| 551 |
+
auth = basicauth_from_netrc(netrc_obj, self.url.host)
|
| 552 |
+
if auth is None:
|
| 553 |
+
return
|
| 554 |
+
|
| 555 |
+
if not isinstance(auth, helpers.BasicAuth):
|
| 556 |
+
raise TypeError("BasicAuth() tuple is required instead")
|
| 557 |
+
|
| 558 |
+
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
| 559 |
+
|
| 560 |
+
def update_body_from_data(self, body: Any) -> None:
|
| 561 |
+
if body is None:
|
| 562 |
+
return
|
| 563 |
+
|
| 564 |
+
# FormData
|
| 565 |
+
if isinstance(body, FormData):
|
| 566 |
+
body = body()
|
| 567 |
+
|
| 568 |
+
try:
|
| 569 |
+
body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
|
| 570 |
+
except payload.LookupError:
|
| 571 |
+
body = FormData(body)()
|
| 572 |
+
|
| 573 |
+
self.body = body
|
| 574 |
+
|
| 575 |
+
# enable chunked encoding if needed
|
| 576 |
+
if not self.chunked:
|
| 577 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 578 |
+
size = body.size
|
| 579 |
+
if size is None:
|
| 580 |
+
self.chunked = True
|
| 581 |
+
else:
|
| 582 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 583 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(size)
|
| 584 |
+
|
| 585 |
+
# copy payload headers
|
| 586 |
+
assert body.headers
|
| 587 |
+
for key, value in body.headers.items():
|
| 588 |
+
if key in self.headers or key in self.skip_auto_headers:
|
| 589 |
+
continue
|
| 590 |
+
self.headers[key] = value
|
| 591 |
+
|
| 592 |
+
def update_expect_continue(self, expect: bool = False) -> None:
|
| 593 |
+
if expect:
|
| 594 |
+
self.headers[hdrs.EXPECT] = "100-continue"
|
| 595 |
+
elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
|
| 596 |
+
expect = True
|
| 597 |
+
|
| 598 |
+
if expect:
|
| 599 |
+
self._continue = self.loop.create_future()
|
| 600 |
+
|
| 601 |
+
def update_proxy(
|
| 602 |
+
self,
|
| 603 |
+
proxy: Optional[URL],
|
| 604 |
+
proxy_auth: Optional[BasicAuth],
|
| 605 |
+
proxy_headers: Optional[LooseHeaders],
|
| 606 |
+
) -> None:
|
| 607 |
+
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
|
| 608 |
+
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
|
| 609 |
+
self.proxy = proxy
|
| 610 |
+
self.proxy_auth = proxy_auth
|
| 611 |
+
if proxy_headers is not None and not isinstance(
|
| 612 |
+
proxy_headers, (MultiDict, MultiDictProxy)
|
| 613 |
+
):
|
| 614 |
+
proxy_headers = CIMultiDict(proxy_headers)
|
| 615 |
+
self.proxy_headers = proxy_headers
|
| 616 |
+
|
| 617 |
+
def keep_alive(self) -> bool:
|
| 618 |
+
if self.version < HttpVersion10:
|
| 619 |
+
# keep alive not supported at all
|
| 620 |
+
return False
|
| 621 |
+
if self.version == HttpVersion10:
|
| 622 |
+
if self.headers.get(hdrs.CONNECTION) == "keep-alive":
|
| 623 |
+
return True
|
| 624 |
+
else: # no headers means we close for Http 1.0
|
| 625 |
+
return False
|
| 626 |
+
elif self.headers.get(hdrs.CONNECTION) == "close":
|
| 627 |
+
return False
|
| 628 |
+
|
| 629 |
+
return True
|
| 630 |
+
|
| 631 |
+
async def write_bytes(
|
| 632 |
+
self, writer: AbstractStreamWriter, conn: "Connection"
|
| 633 |
+
) -> None:
|
| 634 |
+
"""Support coroutines that yields bytes objects."""
|
| 635 |
+
# 100 response
|
| 636 |
+
if self._continue is not None:
|
| 637 |
+
await writer.drain()
|
| 638 |
+
await self._continue
|
| 639 |
+
|
| 640 |
+
protocol = conn.protocol
|
| 641 |
+
assert protocol is not None
|
| 642 |
+
try:
|
| 643 |
+
if isinstance(self.body, payload.Payload):
|
| 644 |
+
await self.body.write(writer)
|
| 645 |
+
else:
|
| 646 |
+
if isinstance(self.body, (bytes, bytearray)):
|
| 647 |
+
self.body = (self.body,)
|
| 648 |
+
|
| 649 |
+
for chunk in self.body:
|
| 650 |
+
await writer.write(chunk)
|
| 651 |
+
except OSError as underlying_exc:
|
| 652 |
+
reraised_exc = underlying_exc
|
| 653 |
+
|
| 654 |
+
exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
|
| 655 |
+
underlying_exc, asyncio.TimeoutError
|
| 656 |
+
)
|
| 657 |
+
if exc_is_not_timeout:
|
| 658 |
+
reraised_exc = ClientOSError(
|
| 659 |
+
underlying_exc.errno,
|
| 660 |
+
f"Can not write request body for {self.url !s}",
|
| 661 |
+
)
|
| 662 |
+
|
| 663 |
+
set_exception(protocol, reraised_exc, underlying_exc)
|
| 664 |
+
except asyncio.CancelledError:
|
| 665 |
+
# Body hasn't been fully sent, so connection can't be reused.
|
| 666 |
+
conn.close()
|
| 667 |
+
raise
|
| 668 |
+
except Exception as underlying_exc:
|
| 669 |
+
set_exception(
|
| 670 |
+
protocol,
|
| 671 |
+
ClientConnectionError(
|
| 672 |
+
f"Failed to send bytes into the underlying connection {conn !s}",
|
| 673 |
+
),
|
| 674 |
+
underlying_exc,
|
| 675 |
+
)
|
| 676 |
+
else:
|
| 677 |
+
await writer.write_eof()
|
| 678 |
+
protocol.start_timeout()
|
| 679 |
+
|
| 680 |
+
async def send(self, conn: "Connection") -> "ClientResponse":
|
| 681 |
+
# Specify request target:
|
| 682 |
+
# - CONNECT request must send authority form URI
|
| 683 |
+
# - not CONNECT proxy must send absolute form URI
|
| 684 |
+
# - most common is origin form URI
|
| 685 |
+
if self.method == hdrs.METH_CONNECT:
|
| 686 |
+
connect_host = self.url.raw_host
|
| 687 |
+
assert connect_host is not None
|
| 688 |
+
if helpers.is_ipv6_address(connect_host):
|
| 689 |
+
connect_host = f"[{connect_host}]"
|
| 690 |
+
path = f"{connect_host}:{self.url.port}"
|
| 691 |
+
elif self.proxy and not self.is_ssl():
|
| 692 |
+
path = str(self.url)
|
| 693 |
+
else:
|
| 694 |
+
path = self.url.raw_path
|
| 695 |
+
if self.url.raw_query_string:
|
| 696 |
+
path += "?" + self.url.raw_query_string
|
| 697 |
+
|
| 698 |
+
protocol = conn.protocol
|
| 699 |
+
assert protocol is not None
|
| 700 |
+
writer = StreamWriter(
|
| 701 |
+
protocol,
|
| 702 |
+
self.loop,
|
| 703 |
+
on_chunk_sent=(
|
| 704 |
+
functools.partial(self._on_chunk_request_sent, self.method, self.url)
|
| 705 |
+
if self._traces
|
| 706 |
+
else None
|
| 707 |
+
),
|
| 708 |
+
on_headers_sent=(
|
| 709 |
+
functools.partial(self._on_headers_request_sent, self.method, self.url)
|
| 710 |
+
if self._traces
|
| 711 |
+
else None
|
| 712 |
+
),
|
| 713 |
+
)
|
| 714 |
+
|
| 715 |
+
if self.compress:
|
| 716 |
+
writer.enable_compression(self.compress) # type: ignore[arg-type]
|
| 717 |
+
|
| 718 |
+
if self.chunked is not None:
|
| 719 |
+
writer.enable_chunking()
|
| 720 |
+
|
| 721 |
+
# set default content-type
|
| 722 |
+
if (
|
| 723 |
+
self.method in self.POST_METHODS
|
| 724 |
+
and hdrs.CONTENT_TYPE not in self.skip_auto_headers
|
| 725 |
+
and hdrs.CONTENT_TYPE not in self.headers
|
| 726 |
+
):
|
| 727 |
+
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
|
| 728 |
+
|
| 729 |
+
# set the connection header
|
| 730 |
+
connection = self.headers.get(hdrs.CONNECTION)
|
| 731 |
+
if not connection:
|
| 732 |
+
if self.keep_alive():
|
| 733 |
+
if self.version == HttpVersion10:
|
| 734 |
+
connection = "keep-alive"
|
| 735 |
+
else:
|
| 736 |
+
if self.version == HttpVersion11:
|
| 737 |
+
connection = "close"
|
| 738 |
+
|
| 739 |
+
if connection is not None:
|
| 740 |
+
self.headers[hdrs.CONNECTION] = connection
|
| 741 |
+
|
| 742 |
+
# status + headers
|
| 743 |
+
v = self.version
|
| 744 |
+
status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
|
| 745 |
+
await writer.write_headers(status_line, self.headers)
|
| 746 |
+
coro = self.write_bytes(writer, conn)
|
| 747 |
+
|
| 748 |
+
if sys.version_info >= (3, 12):
|
| 749 |
+
# Optimization for Python 3.12, try to write
|
| 750 |
+
# bytes immediately to avoid having to schedule
|
| 751 |
+
# the task on the event loop.
|
| 752 |
+
task = asyncio.Task(coro, loop=self.loop, eager_start=True)
|
| 753 |
+
else:
|
| 754 |
+
task = self.loop.create_task(coro)
|
| 755 |
+
|
| 756 |
+
self._writer = task
|
| 757 |
+
response_class = self.response_class
|
| 758 |
+
assert response_class is not None
|
| 759 |
+
self.response = response_class(
|
| 760 |
+
self.method,
|
| 761 |
+
self.original_url,
|
| 762 |
+
writer=self._writer,
|
| 763 |
+
continue100=self._continue,
|
| 764 |
+
timer=self._timer,
|
| 765 |
+
request_info=self.request_info,
|
| 766 |
+
traces=self._traces,
|
| 767 |
+
loop=self.loop,
|
| 768 |
+
session=self._session,
|
| 769 |
+
)
|
| 770 |
+
return self.response
|
| 771 |
+
|
| 772 |
+
async def close(self) -> None:
|
| 773 |
+
if self._writer is not None:
|
| 774 |
+
try:
|
| 775 |
+
await self._writer
|
| 776 |
+
except asyncio.CancelledError:
|
| 777 |
+
if (
|
| 778 |
+
sys.version_info >= (3, 11)
|
| 779 |
+
and (task := asyncio.current_task())
|
| 780 |
+
and task.cancelling()
|
| 781 |
+
):
|
| 782 |
+
raise
|
| 783 |
+
|
| 784 |
+
def terminate(self) -> None:
|
| 785 |
+
if self._writer is not None:
|
| 786 |
+
if not self.loop.is_closed():
|
| 787 |
+
self._writer.cancel()
|
| 788 |
+
self._writer.remove_done_callback(self.__reset_writer)
|
| 789 |
+
self._writer = None
|
| 790 |
+
|
| 791 |
+
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
|
| 792 |
+
for trace in self._traces:
|
| 793 |
+
await trace.send_request_chunk_sent(method, url, chunk)
|
| 794 |
+
|
| 795 |
+
async def _on_headers_request_sent(
|
| 796 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
| 797 |
+
) -> None:
|
| 798 |
+
for trace in self._traces:
|
| 799 |
+
await trace.send_request_headers(method, url, headers)
|
| 800 |
+
|
| 801 |
+
|
| 802 |
+
class ClientResponse(HeadersMixin):
|
| 803 |
+
|
| 804 |
+
# Some of these attributes are None when created,
|
| 805 |
+
# but will be set by the start() method.
|
| 806 |
+
# As the end user will likely never see the None values, we cheat the types below.
|
| 807 |
+
# from the Status-Line of the response
|
| 808 |
+
version: Optional[HttpVersion] = None # HTTP-Version
|
| 809 |
+
status: int = None # type: ignore[assignment] # Status-Code
|
| 810 |
+
reason: Optional[str] = None # Reason-Phrase
|
| 811 |
+
|
| 812 |
+
content: StreamReader = None # type: ignore[assignment] # Payload stream
|
| 813 |
+
_headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
|
| 814 |
+
_raw_headers: RawHeaders = None # type: ignore[assignment]
|
| 815 |
+
|
| 816 |
+
_connection = None # current connection
|
| 817 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
| 818 |
+
# set up by ClientRequest after ClientResponse object creation
|
| 819 |
+
# post-init stage allows to not change ctor signature
|
| 820 |
+
_closed = True # to allow __del__ for non-initialized properly response
|
| 821 |
+
_released = False
|
| 822 |
+
_in_context = False
|
| 823 |
+
__writer = None
|
| 824 |
+
|
| 825 |
+
def __init__(
|
| 826 |
+
self,
|
| 827 |
+
method: str,
|
| 828 |
+
url: URL,
|
| 829 |
+
*,
|
| 830 |
+
writer: "asyncio.Task[None]",
|
| 831 |
+
continue100: Optional["asyncio.Future[bool]"],
|
| 832 |
+
timer: BaseTimerContext,
|
| 833 |
+
request_info: RequestInfo,
|
| 834 |
+
traces: List["Trace"],
|
| 835 |
+
loop: asyncio.AbstractEventLoop,
|
| 836 |
+
session: "ClientSession",
|
| 837 |
+
) -> None:
|
| 838 |
+
assert isinstance(url, URL)
|
| 839 |
+
|
| 840 |
+
self.method = method
|
| 841 |
+
self.cookies = SimpleCookie()
|
| 842 |
+
|
| 843 |
+
self._real_url = url
|
| 844 |
+
self._url = url.with_fragment(None)
|
| 845 |
+
self._body: Any = None
|
| 846 |
+
self._writer: Optional[asyncio.Task[None]] = writer
|
| 847 |
+
self._continue = continue100 # None by default
|
| 848 |
+
self._closed = True
|
| 849 |
+
self._history: Tuple[ClientResponse, ...] = ()
|
| 850 |
+
self._request_info = request_info
|
| 851 |
+
self._timer = timer if timer is not None else TimerNoop()
|
| 852 |
+
self._cache: Dict[str, Any] = {}
|
| 853 |
+
self._traces = traces
|
| 854 |
+
self._loop = loop
|
| 855 |
+
# store a reference to session #1985
|
| 856 |
+
self._session: Optional[ClientSession] = session
|
| 857 |
+
# Save reference to _resolve_charset, so that get_encoding() will still
|
| 858 |
+
# work after the response has finished reading the body.
|
| 859 |
+
if session is None:
|
| 860 |
+
# TODO: Fix session=None in tests (see ClientRequest.__init__).
|
| 861 |
+
self._resolve_charset: Callable[["ClientResponse", bytes], str] = (
|
| 862 |
+
lambda *_: "utf-8"
|
| 863 |
+
)
|
| 864 |
+
else:
|
| 865 |
+
self._resolve_charset = session._resolve_charset
|
| 866 |
+
if loop.get_debug():
|
| 867 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 868 |
+
|
| 869 |
+
def __reset_writer(self, _: object = None) -> None:
|
| 870 |
+
self.__writer = None
|
| 871 |
+
|
| 872 |
+
@property
|
| 873 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
| 874 |
+
return self.__writer
|
| 875 |
+
|
| 876 |
+
@_writer.setter
|
| 877 |
+
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
|
| 878 |
+
if self.__writer is not None:
|
| 879 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
| 880 |
+
self.__writer = writer
|
| 881 |
+
if writer is None:
|
| 882 |
+
return
|
| 883 |
+
if writer.done():
|
| 884 |
+
# The writer is already done, so we can reset it immediately.
|
| 885 |
+
self.__reset_writer()
|
| 886 |
+
else:
|
| 887 |
+
writer.add_done_callback(self.__reset_writer)
|
| 888 |
+
|
| 889 |
+
@reify
|
| 890 |
+
def url(self) -> URL:
|
| 891 |
+
return self._url
|
| 892 |
+
|
| 893 |
+
@reify
|
| 894 |
+
def url_obj(self) -> URL:
|
| 895 |
+
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
|
| 896 |
+
return self._url
|
| 897 |
+
|
| 898 |
+
@reify
|
| 899 |
+
def real_url(self) -> URL:
|
| 900 |
+
return self._real_url
|
| 901 |
+
|
| 902 |
+
@reify
|
| 903 |
+
def host(self) -> str:
|
| 904 |
+
assert self._url.host is not None
|
| 905 |
+
return self._url.host
|
| 906 |
+
|
| 907 |
+
@reify
|
| 908 |
+
def headers(self) -> "CIMultiDictProxy[str]":
|
| 909 |
+
return self._headers
|
| 910 |
+
|
| 911 |
+
@reify
|
| 912 |
+
def raw_headers(self) -> RawHeaders:
|
| 913 |
+
return self._raw_headers
|
| 914 |
+
|
| 915 |
+
@reify
|
| 916 |
+
def request_info(self) -> RequestInfo:
|
| 917 |
+
return self._request_info
|
| 918 |
+
|
| 919 |
+
@reify
|
| 920 |
+
def content_disposition(self) -> Optional[ContentDisposition]:
|
| 921 |
+
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
|
| 922 |
+
if raw is None:
|
| 923 |
+
return None
|
| 924 |
+
disposition_type, params_dct = multipart.parse_content_disposition(raw)
|
| 925 |
+
params = MappingProxyType(params_dct)
|
| 926 |
+
filename = multipart.content_disposition_filename(params)
|
| 927 |
+
return ContentDisposition(disposition_type, params, filename)
|
| 928 |
+
|
| 929 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 930 |
+
if self._closed:
|
| 931 |
+
return
|
| 932 |
+
|
| 933 |
+
if self._connection is not None:
|
| 934 |
+
self._connection.release()
|
| 935 |
+
self._cleanup_writer()
|
| 936 |
+
|
| 937 |
+
if self._loop.get_debug():
|
| 938 |
+
kwargs = {"source": self}
|
| 939 |
+
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
|
| 940 |
+
context = {"client_response": self, "message": "Unclosed response"}
|
| 941 |
+
if self._source_traceback:
|
| 942 |
+
context["source_traceback"] = self._source_traceback
|
| 943 |
+
self._loop.call_exception_handler(context)
|
| 944 |
+
|
| 945 |
+
def __repr__(self) -> str:
|
| 946 |
+
out = io.StringIO()
|
| 947 |
+
ascii_encodable_url = str(self.url)
|
| 948 |
+
if self.reason:
|
| 949 |
+
ascii_encodable_reason = self.reason.encode(
|
| 950 |
+
"ascii", "backslashreplace"
|
| 951 |
+
).decode("ascii")
|
| 952 |
+
else:
|
| 953 |
+
ascii_encodable_reason = "None"
|
| 954 |
+
print(
|
| 955 |
+
"<ClientResponse({}) [{} {}]>".format(
|
| 956 |
+
ascii_encodable_url, self.status, ascii_encodable_reason
|
| 957 |
+
),
|
| 958 |
+
file=out,
|
| 959 |
+
)
|
| 960 |
+
print(self.headers, file=out)
|
| 961 |
+
return out.getvalue()
|
| 962 |
+
|
| 963 |
+
@property
|
| 964 |
+
def connection(self) -> Optional["Connection"]:
|
| 965 |
+
return self._connection
|
| 966 |
+
|
| 967 |
+
@reify
|
| 968 |
+
def history(self) -> Tuple["ClientResponse", ...]:
|
| 969 |
+
"""A sequence of of responses, if redirects occurred."""
|
| 970 |
+
return self._history
|
| 971 |
+
|
| 972 |
+
@reify
|
| 973 |
+
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
|
| 974 |
+
links_str = ", ".join(self.headers.getall("link", []))
|
| 975 |
+
|
| 976 |
+
if not links_str:
|
| 977 |
+
return MultiDictProxy(MultiDict())
|
| 978 |
+
|
| 979 |
+
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
|
| 980 |
+
|
| 981 |
+
for val in re.split(r",(?=\s*<)", links_str):
|
| 982 |
+
match = re.match(r"\s*<(.*)>(.*)", val)
|
| 983 |
+
if match is None: # pragma: no cover
|
| 984 |
+
# the check exists to suppress mypy error
|
| 985 |
+
continue
|
| 986 |
+
url, params_str = match.groups()
|
| 987 |
+
params = params_str.split(";")[1:]
|
| 988 |
+
|
| 989 |
+
link: MultiDict[Union[str, URL]] = MultiDict()
|
| 990 |
+
|
| 991 |
+
for param in params:
|
| 992 |
+
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
|
| 993 |
+
if match is None: # pragma: no cover
|
| 994 |
+
# the check exists to suppress mypy error
|
| 995 |
+
continue
|
| 996 |
+
key, _, value, _ = match.groups()
|
| 997 |
+
|
| 998 |
+
link.add(key, value)
|
| 999 |
+
|
| 1000 |
+
key = link.get("rel", url)
|
| 1001 |
+
|
| 1002 |
+
link.add("url", self.url.join(URL(url)))
|
| 1003 |
+
|
| 1004 |
+
links.add(str(key), MultiDictProxy(link))
|
| 1005 |
+
|
| 1006 |
+
return MultiDictProxy(links)
|
| 1007 |
+
|
| 1008 |
+
async def start(self, connection: "Connection") -> "ClientResponse":
|
| 1009 |
+
"""Start response processing."""
|
| 1010 |
+
self._closed = False
|
| 1011 |
+
self._protocol = connection.protocol
|
| 1012 |
+
self._connection = connection
|
| 1013 |
+
|
| 1014 |
+
with self._timer:
|
| 1015 |
+
while True:
|
| 1016 |
+
# read response
|
| 1017 |
+
try:
|
| 1018 |
+
protocol = self._protocol
|
| 1019 |
+
message, payload = await protocol.read() # type: ignore[union-attr]
|
| 1020 |
+
except http.HttpProcessingError as exc:
|
| 1021 |
+
raise ClientResponseError(
|
| 1022 |
+
self.request_info,
|
| 1023 |
+
self.history,
|
| 1024 |
+
status=exc.code,
|
| 1025 |
+
message=exc.message,
|
| 1026 |
+
headers=exc.headers,
|
| 1027 |
+
) from exc
|
| 1028 |
+
|
| 1029 |
+
if message.code < 100 or message.code > 199 or message.code == 101:
|
| 1030 |
+
break
|
| 1031 |
+
|
| 1032 |
+
if self._continue is not None:
|
| 1033 |
+
set_result(self._continue, True)
|
| 1034 |
+
self._continue = None
|
| 1035 |
+
|
| 1036 |
+
# payload eof handler
|
| 1037 |
+
payload.on_eof(self._response_eof)
|
| 1038 |
+
|
| 1039 |
+
# response status
|
| 1040 |
+
self.version = message.version
|
| 1041 |
+
self.status = message.code
|
| 1042 |
+
self.reason = message.reason
|
| 1043 |
+
|
| 1044 |
+
# headers
|
| 1045 |
+
self._headers = message.headers # type is CIMultiDictProxy
|
| 1046 |
+
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
|
| 1047 |
+
|
| 1048 |
+
# payload
|
| 1049 |
+
self.content = payload
|
| 1050 |
+
|
| 1051 |
+
# cookies
|
| 1052 |
+
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
|
| 1053 |
+
try:
|
| 1054 |
+
self.cookies.load(hdr)
|
| 1055 |
+
except CookieError as exc:
|
| 1056 |
+
client_logger.warning("Can not load response cookies: %s", exc)
|
| 1057 |
+
return self
|
| 1058 |
+
|
| 1059 |
+
def _response_eof(self) -> None:
|
| 1060 |
+
if self._closed:
|
| 1061 |
+
return
|
| 1062 |
+
|
| 1063 |
+
# protocol could be None because connection could be detached
|
| 1064 |
+
protocol = self._connection and self._connection.protocol
|
| 1065 |
+
if protocol is not None and protocol.upgraded:
|
| 1066 |
+
return
|
| 1067 |
+
|
| 1068 |
+
self._closed = True
|
| 1069 |
+
self._cleanup_writer()
|
| 1070 |
+
self._release_connection()
|
| 1071 |
+
|
| 1072 |
+
@property
|
| 1073 |
+
def closed(self) -> bool:
|
| 1074 |
+
return self._closed
|
| 1075 |
+
|
| 1076 |
+
def close(self) -> None:
|
| 1077 |
+
if not self._released:
|
| 1078 |
+
self._notify_content()
|
| 1079 |
+
|
| 1080 |
+
self._closed = True
|
| 1081 |
+
if self._loop is None or self._loop.is_closed():
|
| 1082 |
+
return
|
| 1083 |
+
|
| 1084 |
+
self._cleanup_writer()
|
| 1085 |
+
if self._connection is not None:
|
| 1086 |
+
self._connection.close()
|
| 1087 |
+
self._connection = None
|
| 1088 |
+
|
| 1089 |
+
def release(self) -> Any:
|
| 1090 |
+
if not self._released:
|
| 1091 |
+
self._notify_content()
|
| 1092 |
+
|
| 1093 |
+
self._closed = True
|
| 1094 |
+
|
| 1095 |
+
self._cleanup_writer()
|
| 1096 |
+
self._release_connection()
|
| 1097 |
+
return noop()
|
| 1098 |
+
|
| 1099 |
+
@property
|
| 1100 |
+
def ok(self) -> bool:
|
| 1101 |
+
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
|
| 1102 |
+
|
| 1103 |
+
This is **not** a check for ``200 OK`` but a check that the response
|
| 1104 |
+
status is under 400.
|
| 1105 |
+
"""
|
| 1106 |
+
return 400 > self.status
|
| 1107 |
+
|
| 1108 |
+
def raise_for_status(self) -> None:
|
| 1109 |
+
if not self.ok:
|
| 1110 |
+
# reason should always be not None for a started response
|
| 1111 |
+
assert self.reason is not None
|
| 1112 |
+
|
| 1113 |
+
# If we're in a context we can rely on __aexit__() to release as the
|
| 1114 |
+
# exception propagates.
|
| 1115 |
+
if not self._in_context:
|
| 1116 |
+
self.release()
|
| 1117 |
+
|
| 1118 |
+
raise ClientResponseError(
|
| 1119 |
+
self.request_info,
|
| 1120 |
+
self.history,
|
| 1121 |
+
status=self.status,
|
| 1122 |
+
message=self.reason,
|
| 1123 |
+
headers=self.headers,
|
| 1124 |
+
)
|
| 1125 |
+
|
| 1126 |
+
def _release_connection(self) -> None:
|
| 1127 |
+
if self._connection is not None:
|
| 1128 |
+
if self._writer is None:
|
| 1129 |
+
self._connection.release()
|
| 1130 |
+
self._connection = None
|
| 1131 |
+
else:
|
| 1132 |
+
self._writer.add_done_callback(lambda f: self._release_connection())
|
| 1133 |
+
|
| 1134 |
+
async def _wait_released(self) -> None:
|
| 1135 |
+
if self._writer is not None:
|
| 1136 |
+
try:
|
| 1137 |
+
await self._writer
|
| 1138 |
+
except asyncio.CancelledError:
|
| 1139 |
+
if (
|
| 1140 |
+
sys.version_info >= (3, 11)
|
| 1141 |
+
and (task := asyncio.current_task())
|
| 1142 |
+
and task.cancelling()
|
| 1143 |
+
):
|
| 1144 |
+
raise
|
| 1145 |
+
self._release_connection()
|
| 1146 |
+
|
| 1147 |
+
def _cleanup_writer(self) -> None:
|
| 1148 |
+
if self._writer is not None:
|
| 1149 |
+
self._writer.cancel()
|
| 1150 |
+
self._session = None
|
| 1151 |
+
|
| 1152 |
+
def _notify_content(self) -> None:
|
| 1153 |
+
content = self.content
|
| 1154 |
+
if content and content.exception() is None:
|
| 1155 |
+
set_exception(content, ClientConnectionError("Connection closed"))
|
| 1156 |
+
self._released = True
|
| 1157 |
+
|
| 1158 |
+
async def wait_for_close(self) -> None:
|
| 1159 |
+
if self._writer is not None:
|
| 1160 |
+
try:
|
| 1161 |
+
await self._writer
|
| 1162 |
+
except asyncio.CancelledError:
|
| 1163 |
+
if (
|
| 1164 |
+
sys.version_info >= (3, 11)
|
| 1165 |
+
and (task := asyncio.current_task())
|
| 1166 |
+
and task.cancelling()
|
| 1167 |
+
):
|
| 1168 |
+
raise
|
| 1169 |
+
self.release()
|
| 1170 |
+
|
| 1171 |
+
async def read(self) -> bytes:
|
| 1172 |
+
"""Read response payload."""
|
| 1173 |
+
if self._body is None:
|
| 1174 |
+
try:
|
| 1175 |
+
self._body = await self.content.read()
|
| 1176 |
+
for trace in self._traces:
|
| 1177 |
+
await trace.send_response_chunk_received(
|
| 1178 |
+
self.method, self.url, self._body
|
| 1179 |
+
)
|
| 1180 |
+
except BaseException:
|
| 1181 |
+
self.close()
|
| 1182 |
+
raise
|
| 1183 |
+
elif self._released: # Response explicitly released
|
| 1184 |
+
raise ClientConnectionError("Connection closed")
|
| 1185 |
+
|
| 1186 |
+
protocol = self._connection and self._connection.protocol
|
| 1187 |
+
if protocol is None or not protocol.upgraded:
|
| 1188 |
+
await self._wait_released() # Underlying connection released
|
| 1189 |
+
return self._body # type: ignore[no-any-return]
|
| 1190 |
+
|
| 1191 |
+
def get_encoding(self) -> str:
|
| 1192 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
| 1193 |
+
mimetype = helpers.parse_mimetype(ctype)
|
| 1194 |
+
|
| 1195 |
+
encoding = mimetype.parameters.get("charset")
|
| 1196 |
+
if encoding:
|
| 1197 |
+
with contextlib.suppress(LookupError, ValueError):
|
| 1198 |
+
return codecs.lookup(encoding).name
|
| 1199 |
+
|
| 1200 |
+
if mimetype.type == "application" and (
|
| 1201 |
+
mimetype.subtype == "json" or mimetype.subtype == "rdap"
|
| 1202 |
+
):
|
| 1203 |
+
# RFC 7159 states that the default encoding is UTF-8.
|
| 1204 |
+
# RFC 7483 defines application/rdap+json
|
| 1205 |
+
return "utf-8"
|
| 1206 |
+
|
| 1207 |
+
if self._body is None:
|
| 1208 |
+
raise RuntimeError(
|
| 1209 |
+
"Cannot compute fallback encoding of a not yet read body"
|
| 1210 |
+
)
|
| 1211 |
+
|
| 1212 |
+
return self._resolve_charset(self, self._body)
|
| 1213 |
+
|
| 1214 |
+
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
|
| 1215 |
+
"""Read response payload and decode."""
|
| 1216 |
+
if self._body is None:
|
| 1217 |
+
await self.read()
|
| 1218 |
+
|
| 1219 |
+
if encoding is None:
|
| 1220 |
+
encoding = self.get_encoding()
|
| 1221 |
+
|
| 1222 |
+
return self._body.decode( # type: ignore[no-any-return,union-attr]
|
| 1223 |
+
encoding, errors=errors
|
| 1224 |
+
)
|
| 1225 |
+
|
| 1226 |
+
async def json(
|
| 1227 |
+
self,
|
| 1228 |
+
*,
|
| 1229 |
+
encoding: Optional[str] = None,
|
| 1230 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
| 1231 |
+
content_type: Optional[str] = "application/json",
|
| 1232 |
+
) -> Any:
|
| 1233 |
+
"""Read and decodes JSON response."""
|
| 1234 |
+
if self._body is None:
|
| 1235 |
+
await self.read()
|
| 1236 |
+
|
| 1237 |
+
if content_type:
|
| 1238 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
| 1239 |
+
if not _is_expected_content_type(ctype, content_type):
|
| 1240 |
+
raise ContentTypeError(
|
| 1241 |
+
self.request_info,
|
| 1242 |
+
self.history,
|
| 1243 |
+
status=self.status,
|
| 1244 |
+
message=(
|
| 1245 |
+
"Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
|
| 1246 |
+
),
|
| 1247 |
+
headers=self.headers,
|
| 1248 |
+
)
|
| 1249 |
+
|
| 1250 |
+
stripped = self._body.strip() # type: ignore[union-attr]
|
| 1251 |
+
if not stripped:
|
| 1252 |
+
return None
|
| 1253 |
+
|
| 1254 |
+
if encoding is None:
|
| 1255 |
+
encoding = self.get_encoding()
|
| 1256 |
+
|
| 1257 |
+
return loads(stripped.decode(encoding))
|
| 1258 |
+
|
| 1259 |
+
async def __aenter__(self) -> "ClientResponse":
|
| 1260 |
+
self._in_context = True
|
| 1261 |
+
return self
|
| 1262 |
+
|
| 1263 |
+
async def __aexit__(
|
| 1264 |
+
self,
|
| 1265 |
+
exc_type: Optional[Type[BaseException]],
|
| 1266 |
+
exc_val: Optional[BaseException],
|
| 1267 |
+
exc_tb: Optional[TracebackType],
|
| 1268 |
+
) -> None:
|
| 1269 |
+
self._in_context = False
|
| 1270 |
+
# similar to _RequestContextManager, we do not need to check
|
| 1271 |
+
# for exceptions, response object can close connection
|
| 1272 |
+
# if state is broken
|
| 1273 |
+
self.release()
|
| 1274 |
+
await self.wait_for_close()
|
parrot/lib/python3.10/site-packages/aiohttp/client_ws.py
ADDED
|
@@ -0,0 +1,398 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""WebSocket client for asyncio."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import sys
|
| 5 |
+
from types import TracebackType
|
| 6 |
+
from typing import Any, Optional, Type, cast
|
| 7 |
+
|
| 8 |
+
from .client_exceptions import ClientError, ServerTimeoutError
|
| 9 |
+
from .client_reqrep import ClientResponse
|
| 10 |
+
from .helpers import calculate_timeout_when, set_result
|
| 11 |
+
from .http import (
|
| 12 |
+
WS_CLOSED_MESSAGE,
|
| 13 |
+
WS_CLOSING_MESSAGE,
|
| 14 |
+
WebSocketError,
|
| 15 |
+
WSCloseCode,
|
| 16 |
+
WSMessage,
|
| 17 |
+
WSMsgType,
|
| 18 |
+
)
|
| 19 |
+
from .http_websocket import WebSocketWriter # WSMessage
|
| 20 |
+
from .streams import EofStream, FlowControlDataQueue
|
| 21 |
+
from .typedefs import (
|
| 22 |
+
DEFAULT_JSON_DECODER,
|
| 23 |
+
DEFAULT_JSON_ENCODER,
|
| 24 |
+
JSONDecoder,
|
| 25 |
+
JSONEncoder,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
if sys.version_info >= (3, 11):
|
| 29 |
+
import asyncio as async_timeout
|
| 30 |
+
else:
|
| 31 |
+
import async_timeout
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
class ClientWebSocketResponse:
|
| 35 |
+
def __init__(
|
| 36 |
+
self,
|
| 37 |
+
reader: "FlowControlDataQueue[WSMessage]",
|
| 38 |
+
writer: WebSocketWriter,
|
| 39 |
+
protocol: Optional[str],
|
| 40 |
+
response: ClientResponse,
|
| 41 |
+
timeout: float,
|
| 42 |
+
autoclose: bool,
|
| 43 |
+
autoping: bool,
|
| 44 |
+
loop: asyncio.AbstractEventLoop,
|
| 45 |
+
*,
|
| 46 |
+
receive_timeout: Optional[float] = None,
|
| 47 |
+
heartbeat: Optional[float] = None,
|
| 48 |
+
compress: int = 0,
|
| 49 |
+
client_notakeover: bool = False,
|
| 50 |
+
) -> None:
|
| 51 |
+
self._response = response
|
| 52 |
+
self._conn = response.connection
|
| 53 |
+
|
| 54 |
+
self._writer = writer
|
| 55 |
+
self._reader = reader
|
| 56 |
+
self._protocol = protocol
|
| 57 |
+
self._closed = False
|
| 58 |
+
self._closing = False
|
| 59 |
+
self._close_code: Optional[int] = None
|
| 60 |
+
self._timeout = timeout
|
| 61 |
+
self._receive_timeout = receive_timeout
|
| 62 |
+
self._autoclose = autoclose
|
| 63 |
+
self._autoping = autoping
|
| 64 |
+
self._heartbeat = heartbeat
|
| 65 |
+
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
| 66 |
+
self._heartbeat_when: float = 0.0
|
| 67 |
+
if heartbeat is not None:
|
| 68 |
+
self._pong_heartbeat = heartbeat / 2.0
|
| 69 |
+
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
| 70 |
+
self._loop = loop
|
| 71 |
+
self._waiting: bool = False
|
| 72 |
+
self._close_wait: Optional[asyncio.Future[None]] = None
|
| 73 |
+
self._exception: Optional[BaseException] = None
|
| 74 |
+
self._compress = compress
|
| 75 |
+
self._client_notakeover = client_notakeover
|
| 76 |
+
self._ping_task: Optional[asyncio.Task[None]] = None
|
| 77 |
+
|
| 78 |
+
self._reset_heartbeat()
|
| 79 |
+
|
| 80 |
+
def _cancel_heartbeat(self) -> None:
|
| 81 |
+
self._cancel_pong_response_cb()
|
| 82 |
+
if self._heartbeat_cb is not None:
|
| 83 |
+
self._heartbeat_cb.cancel()
|
| 84 |
+
self._heartbeat_cb = None
|
| 85 |
+
if self._ping_task is not None:
|
| 86 |
+
self._ping_task.cancel()
|
| 87 |
+
self._ping_task = None
|
| 88 |
+
|
| 89 |
+
def _cancel_pong_response_cb(self) -> None:
|
| 90 |
+
if self._pong_response_cb is not None:
|
| 91 |
+
self._pong_response_cb.cancel()
|
| 92 |
+
self._pong_response_cb = None
|
| 93 |
+
|
| 94 |
+
def _reset_heartbeat(self) -> None:
|
| 95 |
+
if self._heartbeat is None:
|
| 96 |
+
return
|
| 97 |
+
self._cancel_pong_response_cb()
|
| 98 |
+
loop = self._loop
|
| 99 |
+
assert loop is not None
|
| 100 |
+
conn = self._conn
|
| 101 |
+
timeout_ceil_threshold = (
|
| 102 |
+
conn._connector._timeout_ceil_threshold if conn is not None else 5
|
| 103 |
+
)
|
| 104 |
+
now = loop.time()
|
| 105 |
+
when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)
|
| 106 |
+
self._heartbeat_when = when
|
| 107 |
+
if self._heartbeat_cb is None:
|
| 108 |
+
# We do not cancel the previous heartbeat_cb here because
|
| 109 |
+
# it generates a significant amount of TimerHandle churn
|
| 110 |
+
# which causes asyncio to rebuild the heap frequently.
|
| 111 |
+
# Instead _send_heartbeat() will reschedule the next
|
| 112 |
+
# heartbeat if it fires too early.
|
| 113 |
+
self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)
|
| 114 |
+
|
| 115 |
+
def _send_heartbeat(self) -> None:
|
| 116 |
+
self._heartbeat_cb = None
|
| 117 |
+
loop = self._loop
|
| 118 |
+
now = loop.time()
|
| 119 |
+
if now < self._heartbeat_when:
|
| 120 |
+
# Heartbeat fired too early, reschedule
|
| 121 |
+
self._heartbeat_cb = loop.call_at(
|
| 122 |
+
self._heartbeat_when, self._send_heartbeat
|
| 123 |
+
)
|
| 124 |
+
return
|
| 125 |
+
|
| 126 |
+
conn = self._conn
|
| 127 |
+
timeout_ceil_threshold = (
|
| 128 |
+
conn._connector._timeout_ceil_threshold if conn is not None else 5
|
| 129 |
+
)
|
| 130 |
+
when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)
|
| 131 |
+
self._cancel_pong_response_cb()
|
| 132 |
+
self._pong_response_cb = loop.call_at(when, self._pong_not_received)
|
| 133 |
+
|
| 134 |
+
if sys.version_info >= (3, 12):
|
| 135 |
+
# Optimization for Python 3.12, try to send the ping
|
| 136 |
+
# immediately to avoid having to schedule
|
| 137 |
+
# the task on the event loop.
|
| 138 |
+
ping_task = asyncio.Task(self._writer.ping(), loop=loop, eager_start=True)
|
| 139 |
+
else:
|
| 140 |
+
ping_task = loop.create_task(self._writer.ping())
|
| 141 |
+
|
| 142 |
+
if not ping_task.done():
|
| 143 |
+
self._ping_task = ping_task
|
| 144 |
+
ping_task.add_done_callback(self._ping_task_done)
|
| 145 |
+
else:
|
| 146 |
+
self._ping_task_done(ping_task)
|
| 147 |
+
|
| 148 |
+
def _ping_task_done(self, task: "asyncio.Task[None]") -> None:
|
| 149 |
+
"""Callback for when the ping task completes."""
|
| 150 |
+
if not task.cancelled() and (exc := task.exception()):
|
| 151 |
+
self._handle_ping_pong_exception(exc)
|
| 152 |
+
self._ping_task = None
|
| 153 |
+
|
| 154 |
+
def _pong_not_received(self) -> None:
|
| 155 |
+
self._handle_ping_pong_exception(ServerTimeoutError())
|
| 156 |
+
|
| 157 |
+
def _handle_ping_pong_exception(self, exc: BaseException) -> None:
|
| 158 |
+
"""Handle exceptions raised during ping/pong processing."""
|
| 159 |
+
if self._closed:
|
| 160 |
+
return
|
| 161 |
+
self._set_closed()
|
| 162 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 163 |
+
self._exception = exc
|
| 164 |
+
self._response.close()
|
| 165 |
+
if self._waiting and not self._closing:
|
| 166 |
+
self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None))
|
| 167 |
+
|
| 168 |
+
def _set_closed(self) -> None:
|
| 169 |
+
"""Set the connection to closed.
|
| 170 |
+
|
| 171 |
+
Cancel any heartbeat timers and set the closed flag.
|
| 172 |
+
"""
|
| 173 |
+
self._closed = True
|
| 174 |
+
self._cancel_heartbeat()
|
| 175 |
+
|
| 176 |
+
def _set_closing(self) -> None:
|
| 177 |
+
"""Set the connection to closing.
|
| 178 |
+
|
| 179 |
+
Cancel any heartbeat timers and set the closing flag.
|
| 180 |
+
"""
|
| 181 |
+
self._closing = True
|
| 182 |
+
self._cancel_heartbeat()
|
| 183 |
+
|
| 184 |
+
@property
|
| 185 |
+
def closed(self) -> bool:
|
| 186 |
+
return self._closed
|
| 187 |
+
|
| 188 |
+
@property
|
| 189 |
+
def close_code(self) -> Optional[int]:
|
| 190 |
+
return self._close_code
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def protocol(self) -> Optional[str]:
|
| 194 |
+
return self._protocol
|
| 195 |
+
|
| 196 |
+
@property
|
| 197 |
+
def compress(self) -> int:
|
| 198 |
+
return self._compress
|
| 199 |
+
|
| 200 |
+
@property
|
| 201 |
+
def client_notakeover(self) -> bool:
|
| 202 |
+
return self._client_notakeover
|
| 203 |
+
|
| 204 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
| 205 |
+
"""extra info from connection transport"""
|
| 206 |
+
conn = self._response.connection
|
| 207 |
+
if conn is None:
|
| 208 |
+
return default
|
| 209 |
+
transport = conn.transport
|
| 210 |
+
if transport is None:
|
| 211 |
+
return default
|
| 212 |
+
return transport.get_extra_info(name, default)
|
| 213 |
+
|
| 214 |
+
def exception(self) -> Optional[BaseException]:
|
| 215 |
+
return self._exception
|
| 216 |
+
|
| 217 |
+
async def ping(self, message: bytes = b"") -> None:
|
| 218 |
+
await self._writer.ping(message)
|
| 219 |
+
|
| 220 |
+
async def pong(self, message: bytes = b"") -> None:
|
| 221 |
+
await self._writer.pong(message)
|
| 222 |
+
|
| 223 |
+
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
| 224 |
+
if not isinstance(data, str):
|
| 225 |
+
raise TypeError("data argument must be str (%r)" % type(data))
|
| 226 |
+
await self._writer.send(data, binary=False, compress=compress)
|
| 227 |
+
|
| 228 |
+
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
| 229 |
+
if not isinstance(data, (bytes, bytearray, memoryview)):
|
| 230 |
+
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
| 231 |
+
await self._writer.send(data, binary=True, compress=compress)
|
| 232 |
+
|
| 233 |
+
async def send_json(
|
| 234 |
+
self,
|
| 235 |
+
data: Any,
|
| 236 |
+
compress: Optional[int] = None,
|
| 237 |
+
*,
|
| 238 |
+
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
| 239 |
+
) -> None:
|
| 240 |
+
await self.send_str(dumps(data), compress=compress)
|
| 241 |
+
|
| 242 |
+
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
| 243 |
+
# we need to break `receive()` cycle first,
|
| 244 |
+
# `close()` may be called from different task
|
| 245 |
+
if self._waiting and not self._closing:
|
| 246 |
+
assert self._loop is not None
|
| 247 |
+
self._close_wait = self._loop.create_future()
|
| 248 |
+
self._set_closing()
|
| 249 |
+
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
| 250 |
+
await self._close_wait
|
| 251 |
+
|
| 252 |
+
if not self._closed:
|
| 253 |
+
self._set_closed()
|
| 254 |
+
try:
|
| 255 |
+
await self._writer.close(code, message)
|
| 256 |
+
except asyncio.CancelledError:
|
| 257 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 258 |
+
self._response.close()
|
| 259 |
+
raise
|
| 260 |
+
except Exception as exc:
|
| 261 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 262 |
+
self._exception = exc
|
| 263 |
+
self._response.close()
|
| 264 |
+
return True
|
| 265 |
+
|
| 266 |
+
if self._close_code:
|
| 267 |
+
self._response.close()
|
| 268 |
+
return True
|
| 269 |
+
|
| 270 |
+
while True:
|
| 271 |
+
try:
|
| 272 |
+
async with async_timeout.timeout(self._timeout):
|
| 273 |
+
msg = await self._reader.read()
|
| 274 |
+
except asyncio.CancelledError:
|
| 275 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 276 |
+
self._response.close()
|
| 277 |
+
raise
|
| 278 |
+
except Exception as exc:
|
| 279 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 280 |
+
self._exception = exc
|
| 281 |
+
self._response.close()
|
| 282 |
+
return True
|
| 283 |
+
|
| 284 |
+
if msg.type is WSMsgType.CLOSE:
|
| 285 |
+
self._close_code = msg.data
|
| 286 |
+
self._response.close()
|
| 287 |
+
return True
|
| 288 |
+
else:
|
| 289 |
+
return False
|
| 290 |
+
|
| 291 |
+
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
| 292 |
+
receive_timeout = timeout or self._receive_timeout
|
| 293 |
+
|
| 294 |
+
while True:
|
| 295 |
+
if self._waiting:
|
| 296 |
+
raise RuntimeError("Concurrent call to receive() is not allowed")
|
| 297 |
+
|
| 298 |
+
if self._closed:
|
| 299 |
+
return WS_CLOSED_MESSAGE
|
| 300 |
+
elif self._closing:
|
| 301 |
+
await self.close()
|
| 302 |
+
return WS_CLOSED_MESSAGE
|
| 303 |
+
|
| 304 |
+
try:
|
| 305 |
+
self._waiting = True
|
| 306 |
+
try:
|
| 307 |
+
if receive_timeout:
|
| 308 |
+
# Entering the context manager and creating
|
| 309 |
+
# Timeout() object can take almost 50% of the
|
| 310 |
+
# run time in this loop so we avoid it if
|
| 311 |
+
# there is no read timeout.
|
| 312 |
+
async with async_timeout.timeout(receive_timeout):
|
| 313 |
+
msg = await self._reader.read()
|
| 314 |
+
else:
|
| 315 |
+
msg = await self._reader.read()
|
| 316 |
+
self._reset_heartbeat()
|
| 317 |
+
finally:
|
| 318 |
+
self._waiting = False
|
| 319 |
+
if self._close_wait:
|
| 320 |
+
set_result(self._close_wait, None)
|
| 321 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 322 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 323 |
+
raise
|
| 324 |
+
except EofStream:
|
| 325 |
+
self._close_code = WSCloseCode.OK
|
| 326 |
+
await self.close()
|
| 327 |
+
return WSMessage(WSMsgType.CLOSED, None, None)
|
| 328 |
+
except ClientError:
|
| 329 |
+
# Likely ServerDisconnectedError when connection is lost
|
| 330 |
+
self._set_closed()
|
| 331 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 332 |
+
return WS_CLOSED_MESSAGE
|
| 333 |
+
except WebSocketError as exc:
|
| 334 |
+
self._close_code = exc.code
|
| 335 |
+
await self.close(code=exc.code)
|
| 336 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 337 |
+
except Exception as exc:
|
| 338 |
+
self._exception = exc
|
| 339 |
+
self._set_closing()
|
| 340 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 341 |
+
await self.close()
|
| 342 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 343 |
+
|
| 344 |
+
if msg.type is WSMsgType.CLOSE:
|
| 345 |
+
self._set_closing()
|
| 346 |
+
self._close_code = msg.data
|
| 347 |
+
if not self._closed and self._autoclose:
|
| 348 |
+
await self.close()
|
| 349 |
+
elif msg.type is WSMsgType.CLOSING:
|
| 350 |
+
self._set_closing()
|
| 351 |
+
elif msg.type is WSMsgType.PING and self._autoping:
|
| 352 |
+
await self.pong(msg.data)
|
| 353 |
+
continue
|
| 354 |
+
elif msg.type is WSMsgType.PONG and self._autoping:
|
| 355 |
+
continue
|
| 356 |
+
|
| 357 |
+
return msg
|
| 358 |
+
|
| 359 |
+
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
| 360 |
+
msg = await self.receive(timeout)
|
| 361 |
+
if msg.type is not WSMsgType.TEXT:
|
| 362 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
| 363 |
+
return cast(str, msg.data)
|
| 364 |
+
|
| 365 |
+
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
| 366 |
+
msg = await self.receive(timeout)
|
| 367 |
+
if msg.type is not WSMsgType.BINARY:
|
| 368 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
| 369 |
+
return cast(bytes, msg.data)
|
| 370 |
+
|
| 371 |
+
async def receive_json(
|
| 372 |
+
self,
|
| 373 |
+
*,
|
| 374 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
| 375 |
+
timeout: Optional[float] = None,
|
| 376 |
+
) -> Any:
|
| 377 |
+
data = await self.receive_str(timeout=timeout)
|
| 378 |
+
return loads(data)
|
| 379 |
+
|
| 380 |
+
def __aiter__(self) -> "ClientWebSocketResponse":
|
| 381 |
+
return self
|
| 382 |
+
|
| 383 |
+
async def __anext__(self) -> WSMessage:
|
| 384 |
+
msg = await self.receive()
|
| 385 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
| 386 |
+
raise StopAsyncIteration
|
| 387 |
+
return msg
|
| 388 |
+
|
| 389 |
+
async def __aenter__(self) -> "ClientWebSocketResponse":
|
| 390 |
+
return self
|
| 391 |
+
|
| 392 |
+
async def __aexit__(
|
| 393 |
+
self,
|
| 394 |
+
exc_type: Optional[Type[BaseException]],
|
| 395 |
+
exc_val: Optional[BaseException],
|
| 396 |
+
exc_tb: Optional[TracebackType],
|
| 397 |
+
) -> None:
|
| 398 |
+
await self.close()
|
parrot/lib/python3.10/site-packages/aiohttp/compression_utils.py
ADDED
|
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import zlib
|
| 3 |
+
from concurrent.futures import Executor
|
| 4 |
+
from typing import Optional, cast
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
try:
|
| 8 |
+
import brotlicffi as brotli
|
| 9 |
+
except ImportError:
|
| 10 |
+
import brotli
|
| 11 |
+
|
| 12 |
+
HAS_BROTLI = True
|
| 13 |
+
except ImportError: # pragma: no cover
|
| 14 |
+
HAS_BROTLI = False
|
| 15 |
+
|
| 16 |
+
MAX_SYNC_CHUNK_SIZE = 1024
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def encoding_to_mode(
|
| 20 |
+
encoding: Optional[str] = None,
|
| 21 |
+
suppress_deflate_header: bool = False,
|
| 22 |
+
) -> int:
|
| 23 |
+
if encoding == "gzip":
|
| 24 |
+
return 16 + zlib.MAX_WBITS
|
| 25 |
+
|
| 26 |
+
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ZlibBaseHandler:
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
mode: int,
|
| 33 |
+
executor: Optional[Executor] = None,
|
| 34 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 35 |
+
):
|
| 36 |
+
self._mode = mode
|
| 37 |
+
self._executor = executor
|
| 38 |
+
self._max_sync_chunk_size = max_sync_chunk_size
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class ZLibCompressor(ZlibBaseHandler):
|
| 42 |
+
def __init__(
|
| 43 |
+
self,
|
| 44 |
+
encoding: Optional[str] = None,
|
| 45 |
+
suppress_deflate_header: bool = False,
|
| 46 |
+
level: Optional[int] = None,
|
| 47 |
+
wbits: Optional[int] = None,
|
| 48 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
| 49 |
+
executor: Optional[Executor] = None,
|
| 50 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 51 |
+
):
|
| 52 |
+
super().__init__(
|
| 53 |
+
mode=(
|
| 54 |
+
encoding_to_mode(encoding, suppress_deflate_header)
|
| 55 |
+
if wbits is None
|
| 56 |
+
else wbits
|
| 57 |
+
),
|
| 58 |
+
executor=executor,
|
| 59 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
| 60 |
+
)
|
| 61 |
+
if level is None:
|
| 62 |
+
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
| 63 |
+
else:
|
| 64 |
+
self._compressor = zlib.compressobj(
|
| 65 |
+
wbits=self._mode, strategy=strategy, level=level
|
| 66 |
+
)
|
| 67 |
+
self._compress_lock = asyncio.Lock()
|
| 68 |
+
|
| 69 |
+
def compress_sync(self, data: bytes) -> bytes:
|
| 70 |
+
return self._compressor.compress(data)
|
| 71 |
+
|
| 72 |
+
async def compress(self, data: bytes) -> bytes:
|
| 73 |
+
async with self._compress_lock:
|
| 74 |
+
# To ensure the stream is consistent in the event
|
| 75 |
+
# there are multiple writers, we need to lock
|
| 76 |
+
# the compressor so that only one writer can
|
| 77 |
+
# compress at a time.
|
| 78 |
+
if (
|
| 79 |
+
self._max_sync_chunk_size is not None
|
| 80 |
+
and len(data) > self._max_sync_chunk_size
|
| 81 |
+
):
|
| 82 |
+
return await asyncio.get_event_loop().run_in_executor(
|
| 83 |
+
self._executor, self.compress_sync, data
|
| 84 |
+
)
|
| 85 |
+
return self.compress_sync(data)
|
| 86 |
+
|
| 87 |
+
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
| 88 |
+
return self._compressor.flush(mode)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class ZLibDecompressor(ZlibBaseHandler):
|
| 92 |
+
def __init__(
|
| 93 |
+
self,
|
| 94 |
+
encoding: Optional[str] = None,
|
| 95 |
+
suppress_deflate_header: bool = False,
|
| 96 |
+
executor: Optional[Executor] = None,
|
| 97 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 98 |
+
):
|
| 99 |
+
super().__init__(
|
| 100 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
| 101 |
+
executor=executor,
|
| 102 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
| 103 |
+
)
|
| 104 |
+
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
| 105 |
+
|
| 106 |
+
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
| 107 |
+
return self._decompressor.decompress(data, max_length)
|
| 108 |
+
|
| 109 |
+
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
| 110 |
+
if (
|
| 111 |
+
self._max_sync_chunk_size is not None
|
| 112 |
+
and len(data) > self._max_sync_chunk_size
|
| 113 |
+
):
|
| 114 |
+
return await asyncio.get_event_loop().run_in_executor(
|
| 115 |
+
self._executor, self.decompress_sync, data, max_length
|
| 116 |
+
)
|
| 117 |
+
return self.decompress_sync(data, max_length)
|
| 118 |
+
|
| 119 |
+
def flush(self, length: int = 0) -> bytes:
|
| 120 |
+
return (
|
| 121 |
+
self._decompressor.flush(length)
|
| 122 |
+
if length > 0
|
| 123 |
+
else self._decompressor.flush()
|
| 124 |
+
)
|
| 125 |
+
|
| 126 |
+
@property
|
| 127 |
+
def eof(self) -> bool:
|
| 128 |
+
return self._decompressor.eof
|
| 129 |
+
|
| 130 |
+
@property
|
| 131 |
+
def unconsumed_tail(self) -> bytes:
|
| 132 |
+
return self._decompressor.unconsumed_tail
|
| 133 |
+
|
| 134 |
+
@property
|
| 135 |
+
def unused_data(self) -> bytes:
|
| 136 |
+
return self._decompressor.unused_data
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class BrotliDecompressor:
|
| 140 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
| 141 |
+
# since they share an import name. The top branches
|
| 142 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
| 143 |
+
def __init__(self) -> None:
|
| 144 |
+
if not HAS_BROTLI:
|
| 145 |
+
raise RuntimeError(
|
| 146 |
+
"The brotli decompression is not available. "
|
| 147 |
+
"Please install `Brotli` module"
|
| 148 |
+
)
|
| 149 |
+
self._obj = brotli.Decompressor()
|
| 150 |
+
|
| 151 |
+
def decompress_sync(self, data: bytes) -> bytes:
|
| 152 |
+
if hasattr(self._obj, "decompress"):
|
| 153 |
+
return cast(bytes, self._obj.decompress(data))
|
| 154 |
+
return cast(bytes, self._obj.process(data))
|
| 155 |
+
|
| 156 |
+
def flush(self) -> bytes:
|
| 157 |
+
if hasattr(self._obj, "flush"):
|
| 158 |
+
return cast(bytes, self._obj.flush())
|
| 159 |
+
return b""
|
parrot/lib/python3.10/site-packages/aiohttp/connector.py
ADDED
|
@@ -0,0 +1,1594 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import functools
|
| 3 |
+
import random
|
| 4 |
+
import socket
|
| 5 |
+
import sys
|
| 6 |
+
import traceback
|
| 7 |
+
import warnings
|
| 8 |
+
from collections import defaultdict, deque
|
| 9 |
+
from contextlib import suppress
|
| 10 |
+
from http import HTTPStatus
|
| 11 |
+
from http.cookies import SimpleCookie
|
| 12 |
+
from itertools import cycle, islice
|
| 13 |
+
from time import monotonic
|
| 14 |
+
from types import TracebackType
|
| 15 |
+
from typing import (
|
| 16 |
+
TYPE_CHECKING,
|
| 17 |
+
Any,
|
| 18 |
+
Awaitable,
|
| 19 |
+
Callable,
|
| 20 |
+
DefaultDict,
|
| 21 |
+
Dict,
|
| 22 |
+
Iterator,
|
| 23 |
+
List,
|
| 24 |
+
Literal,
|
| 25 |
+
Optional,
|
| 26 |
+
Sequence,
|
| 27 |
+
Set,
|
| 28 |
+
Tuple,
|
| 29 |
+
Type,
|
| 30 |
+
Union,
|
| 31 |
+
cast,
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
import aiohappyeyeballs
|
| 35 |
+
import attr
|
| 36 |
+
|
| 37 |
+
from . import hdrs, helpers
|
| 38 |
+
from .abc import AbstractResolver, ResolveResult
|
| 39 |
+
from .client_exceptions import (
|
| 40 |
+
ClientConnectionError,
|
| 41 |
+
ClientConnectorCertificateError,
|
| 42 |
+
ClientConnectorError,
|
| 43 |
+
ClientConnectorSSLError,
|
| 44 |
+
ClientHttpProxyError,
|
| 45 |
+
ClientProxyConnectionError,
|
| 46 |
+
ServerFingerprintMismatch,
|
| 47 |
+
UnixClientConnectorError,
|
| 48 |
+
cert_errors,
|
| 49 |
+
ssl_errors,
|
| 50 |
+
)
|
| 51 |
+
from .client_proto import ResponseHandler
|
| 52 |
+
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
|
| 53 |
+
from .helpers import ceil_timeout, is_ip_address, noop, sentinel
|
| 54 |
+
from .locks import EventResultOrError
|
| 55 |
+
from .resolver import DefaultResolver
|
| 56 |
+
|
| 57 |
+
try:
|
| 58 |
+
import ssl
|
| 59 |
+
|
| 60 |
+
SSLContext = ssl.SSLContext
|
| 61 |
+
except ImportError: # pragma: no cover
|
| 62 |
+
ssl = None # type: ignore[assignment]
|
| 63 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
EMPTY_SCHEMA_SET = frozenset({""})
|
| 67 |
+
HTTP_SCHEMA_SET = frozenset({"http", "https"})
|
| 68 |
+
WS_SCHEMA_SET = frozenset({"ws", "wss"})
|
| 69 |
+
|
| 70 |
+
HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET
|
| 71 |
+
HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
if TYPE_CHECKING:
|
| 78 |
+
from .client import ClientTimeout
|
| 79 |
+
from .client_reqrep import ConnectionKey
|
| 80 |
+
from .tracing import Trace
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class _DeprecationWaiter:
|
| 84 |
+
__slots__ = ("_awaitable", "_awaited")
|
| 85 |
+
|
| 86 |
+
def __init__(self, awaitable: Awaitable[Any]) -> None:
|
| 87 |
+
self._awaitable = awaitable
|
| 88 |
+
self._awaited = False
|
| 89 |
+
|
| 90 |
+
def __await__(self) -> Any:
|
| 91 |
+
self._awaited = True
|
| 92 |
+
return self._awaitable.__await__()
|
| 93 |
+
|
| 94 |
+
def __del__(self) -> None:
|
| 95 |
+
if not self._awaited:
|
| 96 |
+
warnings.warn(
|
| 97 |
+
"Connector.close() is a coroutine, "
|
| 98 |
+
"please use await connector.close()",
|
| 99 |
+
DeprecationWarning,
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class Connection:
|
| 104 |
+
|
| 105 |
+
_source_traceback = None
|
| 106 |
+
_transport = None
|
| 107 |
+
|
| 108 |
+
def __init__(
|
| 109 |
+
self,
|
| 110 |
+
connector: "BaseConnector",
|
| 111 |
+
key: "ConnectionKey",
|
| 112 |
+
protocol: ResponseHandler,
|
| 113 |
+
loop: asyncio.AbstractEventLoop,
|
| 114 |
+
) -> None:
|
| 115 |
+
self._key = key
|
| 116 |
+
self._connector = connector
|
| 117 |
+
self._loop = loop
|
| 118 |
+
self._protocol: Optional[ResponseHandler] = protocol
|
| 119 |
+
self._callbacks: List[Callable[[], None]] = []
|
| 120 |
+
|
| 121 |
+
if loop.get_debug():
|
| 122 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 123 |
+
|
| 124 |
+
def __repr__(self) -> str:
|
| 125 |
+
return f"Connection<{self._key}>"
|
| 126 |
+
|
| 127 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 128 |
+
if self._protocol is not None:
|
| 129 |
+
kwargs = {"source": self}
|
| 130 |
+
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
|
| 131 |
+
if self._loop.is_closed():
|
| 132 |
+
return
|
| 133 |
+
|
| 134 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
| 135 |
+
|
| 136 |
+
context = {"client_connection": self, "message": "Unclosed connection"}
|
| 137 |
+
if self._source_traceback is not None:
|
| 138 |
+
context["source_traceback"] = self._source_traceback
|
| 139 |
+
self._loop.call_exception_handler(context)
|
| 140 |
+
|
| 141 |
+
def __bool__(self) -> Literal[True]:
|
| 142 |
+
"""Force subclasses to not be falsy, to make checks simpler."""
|
| 143 |
+
return True
|
| 144 |
+
|
| 145 |
+
@property
|
| 146 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 147 |
+
warnings.warn(
|
| 148 |
+
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
|
| 149 |
+
)
|
| 150 |
+
return self._loop
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
| 154 |
+
if self._protocol is None:
|
| 155 |
+
return None
|
| 156 |
+
return self._protocol.transport
|
| 157 |
+
|
| 158 |
+
@property
|
| 159 |
+
def protocol(self) -> Optional[ResponseHandler]:
|
| 160 |
+
return self._protocol
|
| 161 |
+
|
| 162 |
+
def add_callback(self, callback: Callable[[], None]) -> None:
|
| 163 |
+
if callback is not None:
|
| 164 |
+
self._callbacks.append(callback)
|
| 165 |
+
|
| 166 |
+
def _notify_release(self) -> None:
|
| 167 |
+
callbacks, self._callbacks = self._callbacks[:], []
|
| 168 |
+
|
| 169 |
+
for cb in callbacks:
|
| 170 |
+
with suppress(Exception):
|
| 171 |
+
cb()
|
| 172 |
+
|
| 173 |
+
def close(self) -> None:
|
| 174 |
+
self._notify_release()
|
| 175 |
+
|
| 176 |
+
if self._protocol is not None:
|
| 177 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
| 178 |
+
self._protocol = None
|
| 179 |
+
|
| 180 |
+
def release(self) -> None:
|
| 181 |
+
self._notify_release()
|
| 182 |
+
|
| 183 |
+
if self._protocol is not None:
|
| 184 |
+
self._connector._release(
|
| 185 |
+
self._key, self._protocol, should_close=self._protocol.should_close
|
| 186 |
+
)
|
| 187 |
+
self._protocol = None
|
| 188 |
+
|
| 189 |
+
@property
|
| 190 |
+
def closed(self) -> bool:
|
| 191 |
+
return self._protocol is None or not self._protocol.is_connected()
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
class _TransportPlaceholder:
|
| 195 |
+
"""placeholder for BaseConnector.connect function"""
|
| 196 |
+
|
| 197 |
+
def close(self) -> None:
|
| 198 |
+
pass
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class BaseConnector:
|
| 202 |
+
"""Base connector class.
|
| 203 |
+
|
| 204 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 205 |
+
force_close - Set to True to force close and do reconnect
|
| 206 |
+
after each request (and between redirects).
|
| 207 |
+
limit - The total number of simultaneous connections.
|
| 208 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 209 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
| 210 |
+
Disabled by default.
|
| 211 |
+
timeout_ceil_threshold - Trigger ceiling of timeout values when
|
| 212 |
+
it's above timeout_ceil_threshold.
|
| 213 |
+
loop - Optional event loop.
|
| 214 |
+
"""
|
| 215 |
+
|
| 216 |
+
_closed = True # prevent AttributeError in __del__ if ctor was failed
|
| 217 |
+
_source_traceback = None
|
| 218 |
+
|
| 219 |
+
# abort transport after 2 seconds (cleanup broken connections)
|
| 220 |
+
_cleanup_closed_period = 2.0
|
| 221 |
+
|
| 222 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET
|
| 223 |
+
|
| 224 |
+
def __init__(
|
| 225 |
+
self,
|
| 226 |
+
*,
|
| 227 |
+
keepalive_timeout: Union[object, None, float] = sentinel,
|
| 228 |
+
force_close: bool = False,
|
| 229 |
+
limit: int = 100,
|
| 230 |
+
limit_per_host: int = 0,
|
| 231 |
+
enable_cleanup_closed: bool = False,
|
| 232 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 233 |
+
timeout_ceil_threshold: float = 5,
|
| 234 |
+
) -> None:
|
| 235 |
+
|
| 236 |
+
if force_close:
|
| 237 |
+
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
|
| 238 |
+
raise ValueError(
|
| 239 |
+
"keepalive_timeout cannot " "be set if force_close is True"
|
| 240 |
+
)
|
| 241 |
+
else:
|
| 242 |
+
if keepalive_timeout is sentinel:
|
| 243 |
+
keepalive_timeout = 15.0
|
| 244 |
+
|
| 245 |
+
loop = loop or asyncio.get_running_loop()
|
| 246 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
| 247 |
+
|
| 248 |
+
self._closed = False
|
| 249 |
+
if loop.get_debug():
|
| 250 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 251 |
+
|
| 252 |
+
self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
|
| 253 |
+
self._limit = limit
|
| 254 |
+
self._limit_per_host = limit_per_host
|
| 255 |
+
self._acquired: Set[ResponseHandler] = set()
|
| 256 |
+
self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = (
|
| 257 |
+
defaultdict(set)
|
| 258 |
+
)
|
| 259 |
+
self._keepalive_timeout = cast(float, keepalive_timeout)
|
| 260 |
+
self._force_close = force_close
|
| 261 |
+
|
| 262 |
+
# {host_key: FIFO list of waiters}
|
| 263 |
+
self._waiters = defaultdict(deque) # type: ignore[var-annotated]
|
| 264 |
+
|
| 265 |
+
self._loop = loop
|
| 266 |
+
self._factory = functools.partial(ResponseHandler, loop=loop)
|
| 267 |
+
|
| 268 |
+
self.cookies = SimpleCookie()
|
| 269 |
+
|
| 270 |
+
# start keep-alive connection cleanup task
|
| 271 |
+
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
|
| 272 |
+
|
| 273 |
+
# start cleanup closed transports task
|
| 274 |
+
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
|
| 275 |
+
self._cleanup_closed_disabled = not enable_cleanup_closed
|
| 276 |
+
self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
|
| 277 |
+
self._cleanup_closed()
|
| 278 |
+
|
| 279 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 280 |
+
if self._closed:
|
| 281 |
+
return
|
| 282 |
+
if not self._conns:
|
| 283 |
+
return
|
| 284 |
+
|
| 285 |
+
conns = [repr(c) for c in self._conns.values()]
|
| 286 |
+
|
| 287 |
+
self._close()
|
| 288 |
+
|
| 289 |
+
kwargs = {"source": self}
|
| 290 |
+
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
|
| 291 |
+
context = {
|
| 292 |
+
"connector": self,
|
| 293 |
+
"connections": conns,
|
| 294 |
+
"message": "Unclosed connector",
|
| 295 |
+
}
|
| 296 |
+
if self._source_traceback is not None:
|
| 297 |
+
context["source_traceback"] = self._source_traceback
|
| 298 |
+
self._loop.call_exception_handler(context)
|
| 299 |
+
|
| 300 |
+
def __enter__(self) -> "BaseConnector":
|
| 301 |
+
warnings.warn(
|
| 302 |
+
'"with Connector():" is deprecated, '
|
| 303 |
+
'use "async with Connector():" instead',
|
| 304 |
+
DeprecationWarning,
|
| 305 |
+
)
|
| 306 |
+
return self
|
| 307 |
+
|
| 308 |
+
def __exit__(self, *exc: Any) -> None:
|
| 309 |
+
self._close()
|
| 310 |
+
|
| 311 |
+
async def __aenter__(self) -> "BaseConnector":
|
| 312 |
+
return self
|
| 313 |
+
|
| 314 |
+
async def __aexit__(
|
| 315 |
+
self,
|
| 316 |
+
exc_type: Optional[Type[BaseException]] = None,
|
| 317 |
+
exc_value: Optional[BaseException] = None,
|
| 318 |
+
exc_traceback: Optional[TracebackType] = None,
|
| 319 |
+
) -> None:
|
| 320 |
+
await self.close()
|
| 321 |
+
|
| 322 |
+
@property
|
| 323 |
+
def force_close(self) -> bool:
|
| 324 |
+
"""Ultimately close connection on releasing if True."""
|
| 325 |
+
return self._force_close
|
| 326 |
+
|
| 327 |
+
@property
|
| 328 |
+
def limit(self) -> int:
|
| 329 |
+
"""The total number for simultaneous connections.
|
| 330 |
+
|
| 331 |
+
If limit is 0 the connector has no limit.
|
| 332 |
+
The default limit size is 100.
|
| 333 |
+
"""
|
| 334 |
+
return self._limit
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def limit_per_host(self) -> int:
|
| 338 |
+
"""The limit for simultaneous connections to the same endpoint.
|
| 339 |
+
|
| 340 |
+
Endpoints are the same if they are have equal
|
| 341 |
+
(host, port, is_ssl) triple.
|
| 342 |
+
"""
|
| 343 |
+
return self._limit_per_host
|
| 344 |
+
|
| 345 |
+
def _cleanup(self) -> None:
|
| 346 |
+
"""Cleanup unused transports."""
|
| 347 |
+
if self._cleanup_handle:
|
| 348 |
+
self._cleanup_handle.cancel()
|
| 349 |
+
# _cleanup_handle should be unset, otherwise _release() will not
|
| 350 |
+
# recreate it ever!
|
| 351 |
+
self._cleanup_handle = None
|
| 352 |
+
|
| 353 |
+
now = self._loop.time()
|
| 354 |
+
timeout = self._keepalive_timeout
|
| 355 |
+
|
| 356 |
+
if self._conns:
|
| 357 |
+
connections = {}
|
| 358 |
+
deadline = now - timeout
|
| 359 |
+
for key, conns in self._conns.items():
|
| 360 |
+
alive = []
|
| 361 |
+
for proto, use_time in conns:
|
| 362 |
+
if proto.is_connected():
|
| 363 |
+
if use_time - deadline < 0:
|
| 364 |
+
transport = proto.transport
|
| 365 |
+
proto.close()
|
| 366 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 367 |
+
self._cleanup_closed_transports.append(transport)
|
| 368 |
+
else:
|
| 369 |
+
alive.append((proto, use_time))
|
| 370 |
+
else:
|
| 371 |
+
transport = proto.transport
|
| 372 |
+
proto.close()
|
| 373 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 374 |
+
self._cleanup_closed_transports.append(transport)
|
| 375 |
+
|
| 376 |
+
if alive:
|
| 377 |
+
connections[key] = alive
|
| 378 |
+
|
| 379 |
+
self._conns = connections
|
| 380 |
+
|
| 381 |
+
if self._conns:
|
| 382 |
+
self._cleanup_handle = helpers.weakref_handle(
|
| 383 |
+
self,
|
| 384 |
+
"_cleanup",
|
| 385 |
+
timeout,
|
| 386 |
+
self._loop,
|
| 387 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
| 388 |
+
)
|
| 389 |
+
|
| 390 |
+
def _drop_acquired_per_host(
|
| 391 |
+
self, key: "ConnectionKey", val: ResponseHandler
|
| 392 |
+
) -> None:
|
| 393 |
+
acquired_per_host = self._acquired_per_host
|
| 394 |
+
if key not in acquired_per_host:
|
| 395 |
+
return
|
| 396 |
+
conns = acquired_per_host[key]
|
| 397 |
+
conns.remove(val)
|
| 398 |
+
if not conns:
|
| 399 |
+
del self._acquired_per_host[key]
|
| 400 |
+
|
| 401 |
+
def _cleanup_closed(self) -> None:
|
| 402 |
+
"""Double confirmation for transport close.
|
| 403 |
+
|
| 404 |
+
Some broken ssl servers may leave socket open without proper close.
|
| 405 |
+
"""
|
| 406 |
+
if self._cleanup_closed_handle:
|
| 407 |
+
self._cleanup_closed_handle.cancel()
|
| 408 |
+
|
| 409 |
+
for transport in self._cleanup_closed_transports:
|
| 410 |
+
if transport is not None:
|
| 411 |
+
transport.abort()
|
| 412 |
+
|
| 413 |
+
self._cleanup_closed_transports = []
|
| 414 |
+
|
| 415 |
+
if not self._cleanup_closed_disabled:
|
| 416 |
+
self._cleanup_closed_handle = helpers.weakref_handle(
|
| 417 |
+
self,
|
| 418 |
+
"_cleanup_closed",
|
| 419 |
+
self._cleanup_closed_period,
|
| 420 |
+
self._loop,
|
| 421 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
def close(self) -> Awaitable[None]:
|
| 425 |
+
"""Close all opened transports."""
|
| 426 |
+
self._close()
|
| 427 |
+
return _DeprecationWaiter(noop())
|
| 428 |
+
|
| 429 |
+
def _close(self) -> None:
|
| 430 |
+
if self._closed:
|
| 431 |
+
return
|
| 432 |
+
|
| 433 |
+
self._closed = True
|
| 434 |
+
|
| 435 |
+
try:
|
| 436 |
+
if self._loop.is_closed():
|
| 437 |
+
return
|
| 438 |
+
|
| 439 |
+
# cancel cleanup task
|
| 440 |
+
if self._cleanup_handle:
|
| 441 |
+
self._cleanup_handle.cancel()
|
| 442 |
+
|
| 443 |
+
# cancel cleanup close task
|
| 444 |
+
if self._cleanup_closed_handle:
|
| 445 |
+
self._cleanup_closed_handle.cancel()
|
| 446 |
+
|
| 447 |
+
for data in self._conns.values():
|
| 448 |
+
for proto, t0 in data:
|
| 449 |
+
proto.close()
|
| 450 |
+
|
| 451 |
+
for proto in self._acquired:
|
| 452 |
+
proto.close()
|
| 453 |
+
|
| 454 |
+
for transport in self._cleanup_closed_transports:
|
| 455 |
+
if transport is not None:
|
| 456 |
+
transport.abort()
|
| 457 |
+
|
| 458 |
+
finally:
|
| 459 |
+
self._conns.clear()
|
| 460 |
+
self._acquired.clear()
|
| 461 |
+
self._waiters.clear()
|
| 462 |
+
self._cleanup_handle = None
|
| 463 |
+
self._cleanup_closed_transports.clear()
|
| 464 |
+
self._cleanup_closed_handle = None
|
| 465 |
+
|
| 466 |
+
@property
|
| 467 |
+
def closed(self) -> bool:
|
| 468 |
+
"""Is connector closed.
|
| 469 |
+
|
| 470 |
+
A readonly property.
|
| 471 |
+
"""
|
| 472 |
+
return self._closed
|
| 473 |
+
|
| 474 |
+
def _available_connections(self, key: "ConnectionKey") -> int:
|
| 475 |
+
"""
|
| 476 |
+
Return number of available connections.
|
| 477 |
+
|
| 478 |
+
The limit, limit_per_host and the connection key are taken into account.
|
| 479 |
+
|
| 480 |
+
If it returns less than 1 means that there are no connections
|
| 481 |
+
available.
|
| 482 |
+
"""
|
| 483 |
+
if self._limit:
|
| 484 |
+
# total calc available connections
|
| 485 |
+
available = self._limit - len(self._acquired)
|
| 486 |
+
|
| 487 |
+
# check limit per host
|
| 488 |
+
if (
|
| 489 |
+
self._limit_per_host
|
| 490 |
+
and available > 0
|
| 491 |
+
and key in self._acquired_per_host
|
| 492 |
+
):
|
| 493 |
+
acquired = self._acquired_per_host.get(key)
|
| 494 |
+
assert acquired is not None
|
| 495 |
+
available = self._limit_per_host - len(acquired)
|
| 496 |
+
|
| 497 |
+
elif self._limit_per_host and key in self._acquired_per_host:
|
| 498 |
+
# check limit per host
|
| 499 |
+
acquired = self._acquired_per_host.get(key)
|
| 500 |
+
assert acquired is not None
|
| 501 |
+
available = self._limit_per_host - len(acquired)
|
| 502 |
+
else:
|
| 503 |
+
available = 1
|
| 504 |
+
|
| 505 |
+
return available
|
| 506 |
+
|
| 507 |
+
async def connect(
|
| 508 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
| 509 |
+
) -> Connection:
|
| 510 |
+
"""Get from pool or create new connection."""
|
| 511 |
+
key = req.connection_key
|
| 512 |
+
available = self._available_connections(key)
|
| 513 |
+
|
| 514 |
+
# Wait if there are no available connections or if there are/were
|
| 515 |
+
# waiters (i.e. don't steal connection from a waiter about to wake up)
|
| 516 |
+
if available <= 0 or key in self._waiters:
|
| 517 |
+
fut = self._loop.create_future()
|
| 518 |
+
|
| 519 |
+
# This connection will now count towards the limit.
|
| 520 |
+
self._waiters[key].append(fut)
|
| 521 |
+
|
| 522 |
+
if traces:
|
| 523 |
+
for trace in traces:
|
| 524 |
+
await trace.send_connection_queued_start()
|
| 525 |
+
|
| 526 |
+
try:
|
| 527 |
+
await fut
|
| 528 |
+
except BaseException as e:
|
| 529 |
+
if key in self._waiters:
|
| 530 |
+
# remove a waiter even if it was cancelled, normally it's
|
| 531 |
+
# removed when it's notified
|
| 532 |
+
try:
|
| 533 |
+
self._waiters[key].remove(fut)
|
| 534 |
+
except ValueError: # fut may no longer be in list
|
| 535 |
+
pass
|
| 536 |
+
|
| 537 |
+
raise e
|
| 538 |
+
finally:
|
| 539 |
+
if key in self._waiters and not self._waiters[key]:
|
| 540 |
+
del self._waiters[key]
|
| 541 |
+
|
| 542 |
+
if traces:
|
| 543 |
+
for trace in traces:
|
| 544 |
+
await trace.send_connection_queued_end()
|
| 545 |
+
|
| 546 |
+
proto = self._get(key)
|
| 547 |
+
if proto is None:
|
| 548 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
| 549 |
+
self._acquired.add(placeholder)
|
| 550 |
+
self._acquired_per_host[key].add(placeholder)
|
| 551 |
+
|
| 552 |
+
if traces:
|
| 553 |
+
for trace in traces:
|
| 554 |
+
await trace.send_connection_create_start()
|
| 555 |
+
|
| 556 |
+
try:
|
| 557 |
+
proto = await self._create_connection(req, traces, timeout)
|
| 558 |
+
if self._closed:
|
| 559 |
+
proto.close()
|
| 560 |
+
raise ClientConnectionError("Connector is closed.")
|
| 561 |
+
except BaseException:
|
| 562 |
+
if not self._closed:
|
| 563 |
+
self._acquired.remove(placeholder)
|
| 564 |
+
self._drop_acquired_per_host(key, placeholder)
|
| 565 |
+
self._release_waiter()
|
| 566 |
+
raise
|
| 567 |
+
else:
|
| 568 |
+
if not self._closed:
|
| 569 |
+
self._acquired.remove(placeholder)
|
| 570 |
+
self._drop_acquired_per_host(key, placeholder)
|
| 571 |
+
|
| 572 |
+
if traces:
|
| 573 |
+
for trace in traces:
|
| 574 |
+
await trace.send_connection_create_end()
|
| 575 |
+
else:
|
| 576 |
+
if traces:
|
| 577 |
+
# Acquire the connection to prevent race conditions with limits
|
| 578 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
| 579 |
+
self._acquired.add(placeholder)
|
| 580 |
+
self._acquired_per_host[key].add(placeholder)
|
| 581 |
+
for trace in traces:
|
| 582 |
+
await trace.send_connection_reuseconn()
|
| 583 |
+
self._acquired.remove(placeholder)
|
| 584 |
+
self._drop_acquired_per_host(key, placeholder)
|
| 585 |
+
|
| 586 |
+
self._acquired.add(proto)
|
| 587 |
+
self._acquired_per_host[key].add(proto)
|
| 588 |
+
return Connection(self, key, proto, self._loop)
|
| 589 |
+
|
| 590 |
+
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
|
| 591 |
+
try:
|
| 592 |
+
conns = self._conns[key]
|
| 593 |
+
except KeyError:
|
| 594 |
+
return None
|
| 595 |
+
|
| 596 |
+
t1 = self._loop.time()
|
| 597 |
+
while conns:
|
| 598 |
+
proto, t0 = conns.pop()
|
| 599 |
+
if proto.is_connected():
|
| 600 |
+
if t1 - t0 > self._keepalive_timeout:
|
| 601 |
+
transport = proto.transport
|
| 602 |
+
proto.close()
|
| 603 |
+
# only for SSL transports
|
| 604 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 605 |
+
self._cleanup_closed_transports.append(transport)
|
| 606 |
+
else:
|
| 607 |
+
if not conns:
|
| 608 |
+
# The very last connection was reclaimed: drop the key
|
| 609 |
+
del self._conns[key]
|
| 610 |
+
return proto
|
| 611 |
+
else:
|
| 612 |
+
transport = proto.transport
|
| 613 |
+
proto.close()
|
| 614 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 615 |
+
self._cleanup_closed_transports.append(transport)
|
| 616 |
+
|
| 617 |
+
# No more connections: drop the key
|
| 618 |
+
del self._conns[key]
|
| 619 |
+
return None
|
| 620 |
+
|
| 621 |
+
def _release_waiter(self) -> None:
|
| 622 |
+
"""
|
| 623 |
+
Iterates over all waiters until one to be released is found.
|
| 624 |
+
|
| 625 |
+
The one to be released is not finished and
|
| 626 |
+
belongs to a host that has available connections.
|
| 627 |
+
"""
|
| 628 |
+
if not self._waiters:
|
| 629 |
+
return
|
| 630 |
+
|
| 631 |
+
# Having the dict keys ordered this avoids to iterate
|
| 632 |
+
# at the same order at each call.
|
| 633 |
+
queues = list(self._waiters.keys())
|
| 634 |
+
random.shuffle(queues)
|
| 635 |
+
|
| 636 |
+
for key in queues:
|
| 637 |
+
if self._available_connections(key) < 1:
|
| 638 |
+
continue
|
| 639 |
+
|
| 640 |
+
waiters = self._waiters[key]
|
| 641 |
+
while waiters:
|
| 642 |
+
waiter = waiters.popleft()
|
| 643 |
+
if not waiter.done():
|
| 644 |
+
waiter.set_result(None)
|
| 645 |
+
return
|
| 646 |
+
|
| 647 |
+
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
|
| 648 |
+
if self._closed:
|
| 649 |
+
# acquired connection is already released on connector closing
|
| 650 |
+
return
|
| 651 |
+
|
| 652 |
+
try:
|
| 653 |
+
self._acquired.remove(proto)
|
| 654 |
+
self._drop_acquired_per_host(key, proto)
|
| 655 |
+
except KeyError: # pragma: no cover
|
| 656 |
+
# this may be result of undetermenistic order of objects
|
| 657 |
+
# finalization due garbage collection.
|
| 658 |
+
pass
|
| 659 |
+
else:
|
| 660 |
+
self._release_waiter()
|
| 661 |
+
|
| 662 |
+
def _release(
|
| 663 |
+
self,
|
| 664 |
+
key: "ConnectionKey",
|
| 665 |
+
protocol: ResponseHandler,
|
| 666 |
+
*,
|
| 667 |
+
should_close: bool = False,
|
| 668 |
+
) -> None:
|
| 669 |
+
if self._closed:
|
| 670 |
+
# acquired connection is already released on connector closing
|
| 671 |
+
return
|
| 672 |
+
|
| 673 |
+
self._release_acquired(key, protocol)
|
| 674 |
+
|
| 675 |
+
if self._force_close:
|
| 676 |
+
should_close = True
|
| 677 |
+
|
| 678 |
+
if should_close or protocol.should_close:
|
| 679 |
+
transport = protocol.transport
|
| 680 |
+
protocol.close()
|
| 681 |
+
|
| 682 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 683 |
+
self._cleanup_closed_transports.append(transport)
|
| 684 |
+
else:
|
| 685 |
+
conns = self._conns.get(key)
|
| 686 |
+
if conns is None:
|
| 687 |
+
conns = self._conns[key] = []
|
| 688 |
+
conns.append((protocol, self._loop.time()))
|
| 689 |
+
|
| 690 |
+
if self._cleanup_handle is None:
|
| 691 |
+
self._cleanup_handle = helpers.weakref_handle(
|
| 692 |
+
self,
|
| 693 |
+
"_cleanup",
|
| 694 |
+
self._keepalive_timeout,
|
| 695 |
+
self._loop,
|
| 696 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
| 697 |
+
)
|
| 698 |
+
|
| 699 |
+
async def _create_connection(
|
| 700 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
| 701 |
+
) -> ResponseHandler:
|
| 702 |
+
raise NotImplementedError()
|
| 703 |
+
|
| 704 |
+
|
| 705 |
+
class _DNSCacheTable:
|
| 706 |
+
def __init__(self, ttl: Optional[float] = None) -> None:
|
| 707 |
+
self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {}
|
| 708 |
+
self._timestamps: Dict[Tuple[str, int], float] = {}
|
| 709 |
+
self._ttl = ttl
|
| 710 |
+
|
| 711 |
+
def __contains__(self, host: object) -> bool:
|
| 712 |
+
return host in self._addrs_rr
|
| 713 |
+
|
| 714 |
+
def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None:
|
| 715 |
+
self._addrs_rr[key] = (cycle(addrs), len(addrs))
|
| 716 |
+
|
| 717 |
+
if self._ttl is not None:
|
| 718 |
+
self._timestamps[key] = monotonic()
|
| 719 |
+
|
| 720 |
+
def remove(self, key: Tuple[str, int]) -> None:
|
| 721 |
+
self._addrs_rr.pop(key, None)
|
| 722 |
+
|
| 723 |
+
if self._ttl is not None:
|
| 724 |
+
self._timestamps.pop(key, None)
|
| 725 |
+
|
| 726 |
+
def clear(self) -> None:
|
| 727 |
+
self._addrs_rr.clear()
|
| 728 |
+
self._timestamps.clear()
|
| 729 |
+
|
| 730 |
+
def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]:
|
| 731 |
+
loop, length = self._addrs_rr[key]
|
| 732 |
+
addrs = list(islice(loop, length))
|
| 733 |
+
# Consume one more element to shift internal state of `cycle`
|
| 734 |
+
next(loop)
|
| 735 |
+
return addrs
|
| 736 |
+
|
| 737 |
+
def expired(self, key: Tuple[str, int]) -> bool:
|
| 738 |
+
if self._ttl is None:
|
| 739 |
+
return False
|
| 740 |
+
|
| 741 |
+
return self._timestamps[key] + self._ttl < monotonic()
|
| 742 |
+
|
| 743 |
+
|
| 744 |
+
def _make_ssl_context(verified: bool) -> SSLContext:
|
| 745 |
+
"""Create SSL context.
|
| 746 |
+
|
| 747 |
+
This method is not async-friendly and should be called from a thread
|
| 748 |
+
because it will load certificates from disk and do other blocking I/O.
|
| 749 |
+
"""
|
| 750 |
+
if ssl is None:
|
| 751 |
+
# No ssl support
|
| 752 |
+
return None
|
| 753 |
+
if verified:
|
| 754 |
+
return ssl.create_default_context()
|
| 755 |
+
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
| 756 |
+
sslcontext.options |= ssl.OP_NO_SSLv2
|
| 757 |
+
sslcontext.options |= ssl.OP_NO_SSLv3
|
| 758 |
+
sslcontext.check_hostname = False
|
| 759 |
+
sslcontext.verify_mode = ssl.CERT_NONE
|
| 760 |
+
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
| 761 |
+
sslcontext.set_default_verify_paths()
|
| 762 |
+
return sslcontext
|
| 763 |
+
|
| 764 |
+
|
| 765 |
+
# The default SSLContext objects are created at import time
|
| 766 |
+
# since they do blocking I/O to load certificates from disk,
|
| 767 |
+
# and imports should always be done before the event loop starts
|
| 768 |
+
# or in a thread.
|
| 769 |
+
_SSL_CONTEXT_VERIFIED = _make_ssl_context(True)
|
| 770 |
+
_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False)
|
| 771 |
+
|
| 772 |
+
|
| 773 |
+
class TCPConnector(BaseConnector):
|
| 774 |
+
"""TCP connector.
|
| 775 |
+
|
| 776 |
+
verify_ssl - Set to True to check ssl certifications.
|
| 777 |
+
fingerprint - Pass the binary sha256
|
| 778 |
+
digest of the expected certificate in DER format to verify
|
| 779 |
+
that the certificate the server presents matches. See also
|
| 780 |
+
https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
|
| 781 |
+
resolver - Enable DNS lookups and use this
|
| 782 |
+
resolver
|
| 783 |
+
use_dns_cache - Use memory cache for DNS lookups.
|
| 784 |
+
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
|
| 785 |
+
family - socket address family
|
| 786 |
+
local_addr - local tuple of (host, port) to bind socket to
|
| 787 |
+
|
| 788 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 789 |
+
force_close - Set to True to force close and do reconnect
|
| 790 |
+
after each request (and between redirects).
|
| 791 |
+
limit - The total number of simultaneous connections.
|
| 792 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 793 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
| 794 |
+
Disabled by default.
|
| 795 |
+
happy_eyeballs_delay - This is the “Connection Attempt Delay”
|
| 796 |
+
as defined in RFC 8305. To disable
|
| 797 |
+
the happy eyeballs algorithm, set to None.
|
| 798 |
+
interleave - “First Address Family Count” as defined in RFC 8305
|
| 799 |
+
loop - Optional event loop.
|
| 800 |
+
"""
|
| 801 |
+
|
| 802 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"})
|
| 803 |
+
|
| 804 |
+
def __init__(
|
| 805 |
+
self,
|
| 806 |
+
*,
|
| 807 |
+
verify_ssl: bool = True,
|
| 808 |
+
fingerprint: Optional[bytes] = None,
|
| 809 |
+
use_dns_cache: bool = True,
|
| 810 |
+
ttl_dns_cache: Optional[int] = 10,
|
| 811 |
+
family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC,
|
| 812 |
+
ssl_context: Optional[SSLContext] = None,
|
| 813 |
+
ssl: Union[bool, Fingerprint, SSLContext] = True,
|
| 814 |
+
local_addr: Optional[Tuple[str, int]] = None,
|
| 815 |
+
resolver: Optional[AbstractResolver] = None,
|
| 816 |
+
keepalive_timeout: Union[None, float, object] = sentinel,
|
| 817 |
+
force_close: bool = False,
|
| 818 |
+
limit: int = 100,
|
| 819 |
+
limit_per_host: int = 0,
|
| 820 |
+
enable_cleanup_closed: bool = False,
|
| 821 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 822 |
+
timeout_ceil_threshold: float = 5,
|
| 823 |
+
happy_eyeballs_delay: Optional[float] = 0.25,
|
| 824 |
+
interleave: Optional[int] = None,
|
| 825 |
+
):
|
| 826 |
+
super().__init__(
|
| 827 |
+
keepalive_timeout=keepalive_timeout,
|
| 828 |
+
force_close=force_close,
|
| 829 |
+
limit=limit,
|
| 830 |
+
limit_per_host=limit_per_host,
|
| 831 |
+
enable_cleanup_closed=enable_cleanup_closed,
|
| 832 |
+
loop=loop,
|
| 833 |
+
timeout_ceil_threshold=timeout_ceil_threshold,
|
| 834 |
+
)
|
| 835 |
+
|
| 836 |
+
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 837 |
+
if resolver is None:
|
| 838 |
+
resolver = DefaultResolver(loop=self._loop)
|
| 839 |
+
self._resolver = resolver
|
| 840 |
+
|
| 841 |
+
self._use_dns_cache = use_dns_cache
|
| 842 |
+
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
|
| 843 |
+
self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
|
| 844 |
+
self._family = family
|
| 845 |
+
self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr)
|
| 846 |
+
self._happy_eyeballs_delay = happy_eyeballs_delay
|
| 847 |
+
self._interleave = interleave
|
| 848 |
+
self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set()
|
| 849 |
+
|
| 850 |
+
def close(self) -> Awaitable[None]:
|
| 851 |
+
"""Close all ongoing DNS calls."""
|
| 852 |
+
for ev in self._throttle_dns_events.values():
|
| 853 |
+
ev.cancel()
|
| 854 |
+
|
| 855 |
+
for t in self._resolve_host_tasks:
|
| 856 |
+
t.cancel()
|
| 857 |
+
|
| 858 |
+
return super().close()
|
| 859 |
+
|
| 860 |
+
@property
|
| 861 |
+
def family(self) -> int:
|
| 862 |
+
"""Socket family like AF_INET."""
|
| 863 |
+
return self._family
|
| 864 |
+
|
| 865 |
+
@property
|
| 866 |
+
def use_dns_cache(self) -> bool:
|
| 867 |
+
"""True if local DNS caching is enabled."""
|
| 868 |
+
return self._use_dns_cache
|
| 869 |
+
|
| 870 |
+
def clear_dns_cache(
|
| 871 |
+
self, host: Optional[str] = None, port: Optional[int] = None
|
| 872 |
+
) -> None:
|
| 873 |
+
"""Remove specified host/port or clear all dns local cache."""
|
| 874 |
+
if host is not None and port is not None:
|
| 875 |
+
self._cached_hosts.remove((host, port))
|
| 876 |
+
elif host is not None or port is not None:
|
| 877 |
+
raise ValueError("either both host and port " "or none of them are allowed")
|
| 878 |
+
else:
|
| 879 |
+
self._cached_hosts.clear()
|
| 880 |
+
|
| 881 |
+
async def _resolve_host(
|
| 882 |
+
self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None
|
| 883 |
+
) -> List[ResolveResult]:
|
| 884 |
+
"""Resolve host and return list of addresses."""
|
| 885 |
+
if is_ip_address(host):
|
| 886 |
+
return [
|
| 887 |
+
{
|
| 888 |
+
"hostname": host,
|
| 889 |
+
"host": host,
|
| 890 |
+
"port": port,
|
| 891 |
+
"family": self._family,
|
| 892 |
+
"proto": 0,
|
| 893 |
+
"flags": 0,
|
| 894 |
+
}
|
| 895 |
+
]
|
| 896 |
+
|
| 897 |
+
if not self._use_dns_cache:
|
| 898 |
+
|
| 899 |
+
if traces:
|
| 900 |
+
for trace in traces:
|
| 901 |
+
await trace.send_dns_resolvehost_start(host)
|
| 902 |
+
|
| 903 |
+
res = await self._resolver.resolve(host, port, family=self._family)
|
| 904 |
+
|
| 905 |
+
if traces:
|
| 906 |
+
for trace in traces:
|
| 907 |
+
await trace.send_dns_resolvehost_end(host)
|
| 908 |
+
|
| 909 |
+
return res
|
| 910 |
+
|
| 911 |
+
key = (host, port)
|
| 912 |
+
if key in self._cached_hosts and not self._cached_hosts.expired(key):
|
| 913 |
+
# get result early, before any await (#4014)
|
| 914 |
+
result = self._cached_hosts.next_addrs(key)
|
| 915 |
+
|
| 916 |
+
if traces:
|
| 917 |
+
for trace in traces:
|
| 918 |
+
await trace.send_dns_cache_hit(host)
|
| 919 |
+
return result
|
| 920 |
+
|
| 921 |
+
#
|
| 922 |
+
# If multiple connectors are resolving the same host, we wait
|
| 923 |
+
# for the first one to resolve and then use the result for all of them.
|
| 924 |
+
# We use a throttle event to ensure that we only resolve the host once
|
| 925 |
+
# and then use the result for all the waiters.
|
| 926 |
+
#
|
| 927 |
+
# In this case we need to create a task to ensure that we can shield
|
| 928 |
+
# the task from cancellation as cancelling this lookup should not cancel
|
| 929 |
+
# the underlying lookup or else the cancel event will get broadcast to
|
| 930 |
+
# all the waiters across all connections.
|
| 931 |
+
#
|
| 932 |
+
resolved_host_task = asyncio.create_task(
|
| 933 |
+
self._resolve_host_with_throttle(key, host, port, traces)
|
| 934 |
+
)
|
| 935 |
+
self._resolve_host_tasks.add(resolved_host_task)
|
| 936 |
+
resolved_host_task.add_done_callback(self._resolve_host_tasks.discard)
|
| 937 |
+
try:
|
| 938 |
+
return await asyncio.shield(resolved_host_task)
|
| 939 |
+
except asyncio.CancelledError:
|
| 940 |
+
|
| 941 |
+
def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None:
|
| 942 |
+
with suppress(Exception, asyncio.CancelledError):
|
| 943 |
+
fut.result()
|
| 944 |
+
|
| 945 |
+
resolved_host_task.add_done_callback(drop_exception)
|
| 946 |
+
raise
|
| 947 |
+
|
| 948 |
+
async def _resolve_host_with_throttle(
|
| 949 |
+
self,
|
| 950 |
+
key: Tuple[str, int],
|
| 951 |
+
host: str,
|
| 952 |
+
port: int,
|
| 953 |
+
traces: Optional[Sequence["Trace"]],
|
| 954 |
+
) -> List[ResolveResult]:
|
| 955 |
+
"""Resolve host with a dns events throttle."""
|
| 956 |
+
if key in self._throttle_dns_events:
|
| 957 |
+
# get event early, before any await (#4014)
|
| 958 |
+
event = self._throttle_dns_events[key]
|
| 959 |
+
if traces:
|
| 960 |
+
for trace in traces:
|
| 961 |
+
await trace.send_dns_cache_hit(host)
|
| 962 |
+
await event.wait()
|
| 963 |
+
else:
|
| 964 |
+
# update dict early, before any await (#4014)
|
| 965 |
+
self._throttle_dns_events[key] = EventResultOrError(self._loop)
|
| 966 |
+
if traces:
|
| 967 |
+
for trace in traces:
|
| 968 |
+
await trace.send_dns_cache_miss(host)
|
| 969 |
+
try:
|
| 970 |
+
|
| 971 |
+
if traces:
|
| 972 |
+
for trace in traces:
|
| 973 |
+
await trace.send_dns_resolvehost_start(host)
|
| 974 |
+
|
| 975 |
+
addrs = await self._resolver.resolve(host, port, family=self._family)
|
| 976 |
+
if traces:
|
| 977 |
+
for trace in traces:
|
| 978 |
+
await trace.send_dns_resolvehost_end(host)
|
| 979 |
+
|
| 980 |
+
self._cached_hosts.add(key, addrs)
|
| 981 |
+
self._throttle_dns_events[key].set()
|
| 982 |
+
except BaseException as e:
|
| 983 |
+
# any DNS exception, independently of the implementation
|
| 984 |
+
# is set for the waiters to raise the same exception.
|
| 985 |
+
self._throttle_dns_events[key].set(exc=e)
|
| 986 |
+
raise
|
| 987 |
+
finally:
|
| 988 |
+
self._throttle_dns_events.pop(key)
|
| 989 |
+
|
| 990 |
+
return self._cached_hosts.next_addrs(key)
|
| 991 |
+
|
| 992 |
+
async def _create_connection(
|
| 993 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
| 994 |
+
) -> ResponseHandler:
|
| 995 |
+
"""Create connection.
|
| 996 |
+
|
| 997 |
+
Has same keyword arguments as BaseEventLoop.create_connection.
|
| 998 |
+
"""
|
| 999 |
+
if req.proxy:
|
| 1000 |
+
_, proto = await self._create_proxy_connection(req, traces, timeout)
|
| 1001 |
+
else:
|
| 1002 |
+
_, proto = await self._create_direct_connection(req, traces, timeout)
|
| 1003 |
+
|
| 1004 |
+
return proto
|
| 1005 |
+
|
| 1006 |
+
def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
|
| 1007 |
+
"""Logic to get the correct SSL context
|
| 1008 |
+
|
| 1009 |
+
0. if req.ssl is false, return None
|
| 1010 |
+
|
| 1011 |
+
1. if ssl_context is specified in req, use it
|
| 1012 |
+
2. if _ssl_context is specified in self, use it
|
| 1013 |
+
3. otherwise:
|
| 1014 |
+
1. if verify_ssl is not specified in req, use self.ssl_context
|
| 1015 |
+
(will generate a default context according to self.verify_ssl)
|
| 1016 |
+
2. if verify_ssl is True in req, generate a default SSL context
|
| 1017 |
+
3. if verify_ssl is False in req, generate a SSL context that
|
| 1018 |
+
won't verify
|
| 1019 |
+
"""
|
| 1020 |
+
if not req.is_ssl():
|
| 1021 |
+
return None
|
| 1022 |
+
|
| 1023 |
+
if ssl is None: # pragma: no cover
|
| 1024 |
+
raise RuntimeError("SSL is not supported.")
|
| 1025 |
+
sslcontext = req.ssl
|
| 1026 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
| 1027 |
+
return sslcontext
|
| 1028 |
+
if sslcontext is not True:
|
| 1029 |
+
# not verified or fingerprinted
|
| 1030 |
+
return _SSL_CONTEXT_UNVERIFIED
|
| 1031 |
+
sslcontext = self._ssl
|
| 1032 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
| 1033 |
+
return sslcontext
|
| 1034 |
+
if sslcontext is not True:
|
| 1035 |
+
# not verified or fingerprinted
|
| 1036 |
+
return _SSL_CONTEXT_UNVERIFIED
|
| 1037 |
+
return _SSL_CONTEXT_VERIFIED
|
| 1038 |
+
|
| 1039 |
+
def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
|
| 1040 |
+
ret = req.ssl
|
| 1041 |
+
if isinstance(ret, Fingerprint):
|
| 1042 |
+
return ret
|
| 1043 |
+
ret = self._ssl
|
| 1044 |
+
if isinstance(ret, Fingerprint):
|
| 1045 |
+
return ret
|
| 1046 |
+
return None
|
| 1047 |
+
|
| 1048 |
+
async def _wrap_create_connection(
|
| 1049 |
+
self,
|
| 1050 |
+
*args: Any,
|
| 1051 |
+
addr_infos: List[aiohappyeyeballs.AddrInfoType],
|
| 1052 |
+
req: ClientRequest,
|
| 1053 |
+
timeout: "ClientTimeout",
|
| 1054 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 1055 |
+
**kwargs: Any,
|
| 1056 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
| 1057 |
+
try:
|
| 1058 |
+
async with ceil_timeout(
|
| 1059 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
| 1060 |
+
):
|
| 1061 |
+
sock = await aiohappyeyeballs.start_connection(
|
| 1062 |
+
addr_infos=addr_infos,
|
| 1063 |
+
local_addr_infos=self._local_addr_infos,
|
| 1064 |
+
happy_eyeballs_delay=self._happy_eyeballs_delay,
|
| 1065 |
+
interleave=self._interleave,
|
| 1066 |
+
loop=self._loop,
|
| 1067 |
+
)
|
| 1068 |
+
return await self._loop.create_connection(*args, **kwargs, sock=sock)
|
| 1069 |
+
except cert_errors as exc:
|
| 1070 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
| 1071 |
+
except ssl_errors as exc:
|
| 1072 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
| 1073 |
+
except OSError as exc:
|
| 1074 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 1075 |
+
raise
|
| 1076 |
+
raise client_error(req.connection_key, exc) from exc
|
| 1077 |
+
|
| 1078 |
+
async def _wrap_existing_connection(
|
| 1079 |
+
self,
|
| 1080 |
+
*args: Any,
|
| 1081 |
+
req: ClientRequest,
|
| 1082 |
+
timeout: "ClientTimeout",
|
| 1083 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 1084 |
+
**kwargs: Any,
|
| 1085 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
| 1086 |
+
try:
|
| 1087 |
+
async with ceil_timeout(
|
| 1088 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
| 1089 |
+
):
|
| 1090 |
+
return await self._loop.create_connection(*args, **kwargs)
|
| 1091 |
+
except cert_errors as exc:
|
| 1092 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
| 1093 |
+
except ssl_errors as exc:
|
| 1094 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
| 1095 |
+
except OSError as exc:
|
| 1096 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 1097 |
+
raise
|
| 1098 |
+
raise client_error(req.connection_key, exc) from exc
|
| 1099 |
+
|
| 1100 |
+
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
|
| 1101 |
+
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
|
| 1102 |
+
|
| 1103 |
+
It is necessary for TLS-in-TLS so that it is possible to
|
| 1104 |
+
send HTTPS queries through HTTPS proxies.
|
| 1105 |
+
|
| 1106 |
+
This doesn't affect regular HTTP requests, though.
|
| 1107 |
+
"""
|
| 1108 |
+
if not req.is_ssl():
|
| 1109 |
+
return
|
| 1110 |
+
|
| 1111 |
+
proxy_url = req.proxy
|
| 1112 |
+
assert proxy_url is not None
|
| 1113 |
+
if proxy_url.scheme != "https":
|
| 1114 |
+
return
|
| 1115 |
+
|
| 1116 |
+
self._check_loop_for_start_tls()
|
| 1117 |
+
|
| 1118 |
+
def _check_loop_for_start_tls(self) -> None:
|
| 1119 |
+
try:
|
| 1120 |
+
self._loop.start_tls
|
| 1121 |
+
except AttributeError as attr_exc:
|
| 1122 |
+
raise RuntimeError(
|
| 1123 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
| 1124 |
+
"This needs support for TLS in TLS but it is not implemented "
|
| 1125 |
+
"in your runtime for the stdlib asyncio.\n\n"
|
| 1126 |
+
"Please upgrade to Python 3.11 or higher. For more details, "
|
| 1127 |
+
"please see:\n"
|
| 1128 |
+
"* https://bugs.python.org/issue37179\n"
|
| 1129 |
+
"* https://github.com/python/cpython/pull/28073\n"
|
| 1130 |
+
"* https://docs.aiohttp.org/en/stable/"
|
| 1131 |
+
"client_advanced.html#proxy-support\n"
|
| 1132 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
| 1133 |
+
) from attr_exc
|
| 1134 |
+
|
| 1135 |
+
def _loop_supports_start_tls(self) -> bool:
|
| 1136 |
+
try:
|
| 1137 |
+
self._check_loop_for_start_tls()
|
| 1138 |
+
except RuntimeError:
|
| 1139 |
+
return False
|
| 1140 |
+
else:
|
| 1141 |
+
return True
|
| 1142 |
+
|
| 1143 |
+
def _warn_about_tls_in_tls(
|
| 1144 |
+
self,
|
| 1145 |
+
underlying_transport: asyncio.Transport,
|
| 1146 |
+
req: ClientRequest,
|
| 1147 |
+
) -> None:
|
| 1148 |
+
"""Issue a warning if the requested URL has HTTPS scheme."""
|
| 1149 |
+
if req.request_info.url.scheme != "https":
|
| 1150 |
+
return
|
| 1151 |
+
|
| 1152 |
+
asyncio_supports_tls_in_tls = getattr(
|
| 1153 |
+
underlying_transport,
|
| 1154 |
+
"_start_tls_compatible",
|
| 1155 |
+
False,
|
| 1156 |
+
)
|
| 1157 |
+
|
| 1158 |
+
if asyncio_supports_tls_in_tls:
|
| 1159 |
+
return
|
| 1160 |
+
|
| 1161 |
+
warnings.warn(
|
| 1162 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
| 1163 |
+
"This support for TLS in TLS is known to be disabled "
|
| 1164 |
+
"in the stdlib asyncio (Python <3.11). This is why you'll probably see "
|
| 1165 |
+
"an error in the log below.\n\n"
|
| 1166 |
+
"It is possible to enable it via monkeypatching. "
|
| 1167 |
+
"For more details, see:\n"
|
| 1168 |
+
"* https://bugs.python.org/issue37179\n"
|
| 1169 |
+
"* https://github.com/python/cpython/pull/28073\n\n"
|
| 1170 |
+
"You can temporarily patch this as follows:\n"
|
| 1171 |
+
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
|
| 1172 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
| 1173 |
+
RuntimeWarning,
|
| 1174 |
+
source=self,
|
| 1175 |
+
# Why `4`? At least 3 of the calls in the stack originate
|
| 1176 |
+
# from the methods in this class.
|
| 1177 |
+
stacklevel=3,
|
| 1178 |
+
)
|
| 1179 |
+
|
| 1180 |
+
async def _start_tls_connection(
|
| 1181 |
+
self,
|
| 1182 |
+
underlying_transport: asyncio.Transport,
|
| 1183 |
+
req: ClientRequest,
|
| 1184 |
+
timeout: "ClientTimeout",
|
| 1185 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 1186 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
| 1187 |
+
"""Wrap the raw TCP transport with TLS."""
|
| 1188 |
+
tls_proto = self._factory() # Create a brand new proto for TLS
|
| 1189 |
+
sslcontext = self._get_ssl_context(req)
|
| 1190 |
+
if TYPE_CHECKING:
|
| 1191 |
+
# _start_tls_connection is unreachable in the current code path
|
| 1192 |
+
# if sslcontext is None.
|
| 1193 |
+
assert sslcontext is not None
|
| 1194 |
+
|
| 1195 |
+
try:
|
| 1196 |
+
async with ceil_timeout(
|
| 1197 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
| 1198 |
+
):
|
| 1199 |
+
try:
|
| 1200 |
+
tls_transport = await self._loop.start_tls(
|
| 1201 |
+
underlying_transport,
|
| 1202 |
+
tls_proto,
|
| 1203 |
+
sslcontext,
|
| 1204 |
+
server_hostname=req.server_hostname or req.host,
|
| 1205 |
+
ssl_handshake_timeout=timeout.total,
|
| 1206 |
+
)
|
| 1207 |
+
except BaseException:
|
| 1208 |
+
# We need to close the underlying transport since
|
| 1209 |
+
# `start_tls()` probably failed before it had a
|
| 1210 |
+
# chance to do this:
|
| 1211 |
+
underlying_transport.close()
|
| 1212 |
+
raise
|
| 1213 |
+
except cert_errors as exc:
|
| 1214 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
| 1215 |
+
except ssl_errors as exc:
|
| 1216 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
| 1217 |
+
except OSError as exc:
|
| 1218 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 1219 |
+
raise
|
| 1220 |
+
raise client_error(req.connection_key, exc) from exc
|
| 1221 |
+
except TypeError as type_err:
|
| 1222 |
+
# Example cause looks like this:
|
| 1223 |
+
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
|
| 1224 |
+
# object at 0x7f760615e460> is not supported by start_tls()
|
| 1225 |
+
|
| 1226 |
+
raise ClientConnectionError(
|
| 1227 |
+
"Cannot initialize a TLS-in-TLS connection to host "
|
| 1228 |
+
f"{req.host!s}:{req.port:d} through an underlying connection "
|
| 1229 |
+
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
|
| 1230 |
+
f"[{type_err!s}]"
|
| 1231 |
+
) from type_err
|
| 1232 |
+
else:
|
| 1233 |
+
if tls_transport is None:
|
| 1234 |
+
msg = "Failed to start TLS (possibly caused by closing transport)"
|
| 1235 |
+
raise client_error(req.connection_key, OSError(msg))
|
| 1236 |
+
tls_proto.connection_made(
|
| 1237 |
+
tls_transport
|
| 1238 |
+
) # Kick the state machine of the new TLS protocol
|
| 1239 |
+
|
| 1240 |
+
return tls_transport, tls_proto
|
| 1241 |
+
|
| 1242 |
+
def _convert_hosts_to_addr_infos(
|
| 1243 |
+
self, hosts: List[ResolveResult]
|
| 1244 |
+
) -> List[aiohappyeyeballs.AddrInfoType]:
|
| 1245 |
+
"""Converts the list of hosts to a list of addr_infos.
|
| 1246 |
+
|
| 1247 |
+
The list of hosts is the result of a DNS lookup. The list of
|
| 1248 |
+
addr_infos is the result of a call to `socket.getaddrinfo()`.
|
| 1249 |
+
"""
|
| 1250 |
+
addr_infos: List[aiohappyeyeballs.AddrInfoType] = []
|
| 1251 |
+
for hinfo in hosts:
|
| 1252 |
+
host = hinfo["host"]
|
| 1253 |
+
is_ipv6 = ":" in host
|
| 1254 |
+
family = socket.AF_INET6 if is_ipv6 else socket.AF_INET
|
| 1255 |
+
if self._family and self._family != family:
|
| 1256 |
+
continue
|
| 1257 |
+
addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"])
|
| 1258 |
+
addr_infos.append(
|
| 1259 |
+
(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)
|
| 1260 |
+
)
|
| 1261 |
+
return addr_infos
|
| 1262 |
+
|
| 1263 |
+
async def _create_direct_connection(
|
| 1264 |
+
self,
|
| 1265 |
+
req: ClientRequest,
|
| 1266 |
+
traces: List["Trace"],
|
| 1267 |
+
timeout: "ClientTimeout",
|
| 1268 |
+
*,
|
| 1269 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 1270 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
| 1271 |
+
sslcontext = self._get_ssl_context(req)
|
| 1272 |
+
fingerprint = self._get_fingerprint(req)
|
| 1273 |
+
|
| 1274 |
+
host = req.url.raw_host
|
| 1275 |
+
assert host is not None
|
| 1276 |
+
# Replace multiple trailing dots with a single one.
|
| 1277 |
+
# A trailing dot is only present for fully-qualified domain names.
|
| 1278 |
+
# See https://github.com/aio-libs/aiohttp/pull/7364.
|
| 1279 |
+
if host.endswith(".."):
|
| 1280 |
+
host = host.rstrip(".") + "."
|
| 1281 |
+
port = req.port
|
| 1282 |
+
assert port is not None
|
| 1283 |
+
try:
|
| 1284 |
+
# Cancelling this lookup should not cancel the underlying lookup
|
| 1285 |
+
# or else the cancel event will get broadcast to all the waiters
|
| 1286 |
+
# across all connections.
|
| 1287 |
+
hosts = await self._resolve_host(host, port, traces=traces)
|
| 1288 |
+
except OSError as exc:
|
| 1289 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 1290 |
+
raise
|
| 1291 |
+
# in case of proxy it is not ClientProxyConnectionError
|
| 1292 |
+
# it is problem of resolving proxy ip itself
|
| 1293 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
| 1294 |
+
|
| 1295 |
+
last_exc: Optional[Exception] = None
|
| 1296 |
+
addr_infos = self._convert_hosts_to_addr_infos(hosts)
|
| 1297 |
+
while addr_infos:
|
| 1298 |
+
# Strip trailing dots, certificates contain FQDN without dots.
|
| 1299 |
+
# See https://github.com/aio-libs/aiohttp/issues/3636
|
| 1300 |
+
server_hostname = (
|
| 1301 |
+
(req.server_hostname or host).rstrip(".") if sslcontext else None
|
| 1302 |
+
)
|
| 1303 |
+
|
| 1304 |
+
try:
|
| 1305 |
+
transp, proto = await self._wrap_create_connection(
|
| 1306 |
+
self._factory,
|
| 1307 |
+
timeout=timeout,
|
| 1308 |
+
ssl=sslcontext,
|
| 1309 |
+
addr_infos=addr_infos,
|
| 1310 |
+
server_hostname=server_hostname,
|
| 1311 |
+
req=req,
|
| 1312 |
+
client_error=client_error,
|
| 1313 |
+
)
|
| 1314 |
+
except ClientConnectorError as exc:
|
| 1315 |
+
last_exc = exc
|
| 1316 |
+
aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave)
|
| 1317 |
+
continue
|
| 1318 |
+
|
| 1319 |
+
if req.is_ssl() and fingerprint:
|
| 1320 |
+
try:
|
| 1321 |
+
fingerprint.check(transp)
|
| 1322 |
+
except ServerFingerprintMismatch as exc:
|
| 1323 |
+
transp.close()
|
| 1324 |
+
if not self._cleanup_closed_disabled:
|
| 1325 |
+
self._cleanup_closed_transports.append(transp)
|
| 1326 |
+
last_exc = exc
|
| 1327 |
+
# Remove the bad peer from the list of addr_infos
|
| 1328 |
+
sock: socket.socket = transp.get_extra_info("socket")
|
| 1329 |
+
bad_peer = sock.getpeername()
|
| 1330 |
+
aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer)
|
| 1331 |
+
continue
|
| 1332 |
+
|
| 1333 |
+
return transp, proto
|
| 1334 |
+
else:
|
| 1335 |
+
assert last_exc is not None
|
| 1336 |
+
raise last_exc
|
| 1337 |
+
|
| 1338 |
+
async def _create_proxy_connection(
|
| 1339 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
| 1340 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
| 1341 |
+
self._fail_on_no_start_tls(req)
|
| 1342 |
+
runtime_has_start_tls = self._loop_supports_start_tls()
|
| 1343 |
+
|
| 1344 |
+
headers: Dict[str, str] = {}
|
| 1345 |
+
if req.proxy_headers is not None:
|
| 1346 |
+
headers = req.proxy_headers # type: ignore[assignment]
|
| 1347 |
+
headers[hdrs.HOST] = req.headers[hdrs.HOST]
|
| 1348 |
+
|
| 1349 |
+
url = req.proxy
|
| 1350 |
+
assert url is not None
|
| 1351 |
+
proxy_req = ClientRequest(
|
| 1352 |
+
hdrs.METH_GET,
|
| 1353 |
+
url,
|
| 1354 |
+
headers=headers,
|
| 1355 |
+
auth=req.proxy_auth,
|
| 1356 |
+
loop=self._loop,
|
| 1357 |
+
ssl=req.ssl,
|
| 1358 |
+
)
|
| 1359 |
+
|
| 1360 |
+
# create connection to proxy server
|
| 1361 |
+
transport, proto = await self._create_direct_connection(
|
| 1362 |
+
proxy_req, [], timeout, client_error=ClientProxyConnectionError
|
| 1363 |
+
)
|
| 1364 |
+
|
| 1365 |
+
# Many HTTP proxies has buggy keepalive support. Let's not
|
| 1366 |
+
# reuse connection but close it after processing every
|
| 1367 |
+
# response.
|
| 1368 |
+
proto.force_close()
|
| 1369 |
+
|
| 1370 |
+
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
|
| 1371 |
+
if auth is not None:
|
| 1372 |
+
if not req.is_ssl():
|
| 1373 |
+
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
| 1374 |
+
else:
|
| 1375 |
+
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
| 1376 |
+
|
| 1377 |
+
if req.is_ssl():
|
| 1378 |
+
if runtime_has_start_tls:
|
| 1379 |
+
self._warn_about_tls_in_tls(transport, req)
|
| 1380 |
+
|
| 1381 |
+
# For HTTPS requests over HTTP proxy
|
| 1382 |
+
# we must notify proxy to tunnel connection
|
| 1383 |
+
# so we send CONNECT command:
|
| 1384 |
+
# CONNECT www.python.org:443 HTTP/1.1
|
| 1385 |
+
# Host: www.python.org
|
| 1386 |
+
#
|
| 1387 |
+
# next we must do TLS handshake and so on
|
| 1388 |
+
# to do this we must wrap raw socket into secure one
|
| 1389 |
+
# asyncio handles this perfectly
|
| 1390 |
+
proxy_req.method = hdrs.METH_CONNECT
|
| 1391 |
+
proxy_req.url = req.url
|
| 1392 |
+
key = attr.evolve(
|
| 1393 |
+
req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
|
| 1394 |
+
)
|
| 1395 |
+
conn = Connection(self, key, proto, self._loop)
|
| 1396 |
+
proxy_resp = await proxy_req.send(conn)
|
| 1397 |
+
try:
|
| 1398 |
+
protocol = conn._protocol
|
| 1399 |
+
assert protocol is not None
|
| 1400 |
+
|
| 1401 |
+
# read_until_eof=True will ensure the connection isn't closed
|
| 1402 |
+
# once the response is received and processed allowing
|
| 1403 |
+
# START_TLS to work on the connection below.
|
| 1404 |
+
protocol.set_response_params(
|
| 1405 |
+
read_until_eof=runtime_has_start_tls,
|
| 1406 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
| 1407 |
+
)
|
| 1408 |
+
resp = await proxy_resp.start(conn)
|
| 1409 |
+
except BaseException:
|
| 1410 |
+
proxy_resp.close()
|
| 1411 |
+
conn.close()
|
| 1412 |
+
raise
|
| 1413 |
+
else:
|
| 1414 |
+
conn._protocol = None
|
| 1415 |
+
conn._transport = None
|
| 1416 |
+
try:
|
| 1417 |
+
if resp.status != 200:
|
| 1418 |
+
message = resp.reason
|
| 1419 |
+
if message is None:
|
| 1420 |
+
message = HTTPStatus(resp.status).phrase
|
| 1421 |
+
raise ClientHttpProxyError(
|
| 1422 |
+
proxy_resp.request_info,
|
| 1423 |
+
resp.history,
|
| 1424 |
+
status=resp.status,
|
| 1425 |
+
message=message,
|
| 1426 |
+
headers=resp.headers,
|
| 1427 |
+
)
|
| 1428 |
+
if not runtime_has_start_tls:
|
| 1429 |
+
rawsock = transport.get_extra_info("socket", default=None)
|
| 1430 |
+
if rawsock is None:
|
| 1431 |
+
raise RuntimeError(
|
| 1432 |
+
"Transport does not expose socket instance"
|
| 1433 |
+
)
|
| 1434 |
+
# Duplicate the socket, so now we can close proxy transport
|
| 1435 |
+
rawsock = rawsock.dup()
|
| 1436 |
+
except BaseException:
|
| 1437 |
+
# It shouldn't be closed in `finally` because it's fed to
|
| 1438 |
+
# `loop.start_tls()` and the docs say not to touch it after
|
| 1439 |
+
# passing there.
|
| 1440 |
+
transport.close()
|
| 1441 |
+
raise
|
| 1442 |
+
finally:
|
| 1443 |
+
if not runtime_has_start_tls:
|
| 1444 |
+
transport.close()
|
| 1445 |
+
|
| 1446 |
+
if not runtime_has_start_tls:
|
| 1447 |
+
# HTTP proxy with support for upgrade to HTTPS
|
| 1448 |
+
sslcontext = self._get_ssl_context(req)
|
| 1449 |
+
return await self._wrap_existing_connection(
|
| 1450 |
+
self._factory,
|
| 1451 |
+
timeout=timeout,
|
| 1452 |
+
ssl=sslcontext,
|
| 1453 |
+
sock=rawsock,
|
| 1454 |
+
server_hostname=req.host,
|
| 1455 |
+
req=req,
|
| 1456 |
+
)
|
| 1457 |
+
|
| 1458 |
+
return await self._start_tls_connection(
|
| 1459 |
+
# Access the old transport for the last time before it's
|
| 1460 |
+
# closed and forgotten forever:
|
| 1461 |
+
transport,
|
| 1462 |
+
req=req,
|
| 1463 |
+
timeout=timeout,
|
| 1464 |
+
)
|
| 1465 |
+
finally:
|
| 1466 |
+
proxy_resp.close()
|
| 1467 |
+
|
| 1468 |
+
return transport, proto
|
| 1469 |
+
|
| 1470 |
+
|
| 1471 |
+
class UnixConnector(BaseConnector):
|
| 1472 |
+
"""Unix socket connector.
|
| 1473 |
+
|
| 1474 |
+
path - Unix socket path.
|
| 1475 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 1476 |
+
force_close - Set to True to force close and do reconnect
|
| 1477 |
+
after each request (and between redirects).
|
| 1478 |
+
limit - The total number of simultaneous connections.
|
| 1479 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 1480 |
+
loop - Optional event loop.
|
| 1481 |
+
"""
|
| 1482 |
+
|
| 1483 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"})
|
| 1484 |
+
|
| 1485 |
+
def __init__(
|
| 1486 |
+
self,
|
| 1487 |
+
path: str,
|
| 1488 |
+
force_close: bool = False,
|
| 1489 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
| 1490 |
+
limit: int = 100,
|
| 1491 |
+
limit_per_host: int = 0,
|
| 1492 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1493 |
+
) -> None:
|
| 1494 |
+
super().__init__(
|
| 1495 |
+
force_close=force_close,
|
| 1496 |
+
keepalive_timeout=keepalive_timeout,
|
| 1497 |
+
limit=limit,
|
| 1498 |
+
limit_per_host=limit_per_host,
|
| 1499 |
+
loop=loop,
|
| 1500 |
+
)
|
| 1501 |
+
self._path = path
|
| 1502 |
+
|
| 1503 |
+
@property
|
| 1504 |
+
def path(self) -> str:
|
| 1505 |
+
"""Path to unix socket."""
|
| 1506 |
+
return self._path
|
| 1507 |
+
|
| 1508 |
+
async def _create_connection(
|
| 1509 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
| 1510 |
+
) -> ResponseHandler:
|
| 1511 |
+
try:
|
| 1512 |
+
async with ceil_timeout(
|
| 1513 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
| 1514 |
+
):
|
| 1515 |
+
_, proto = await self._loop.create_unix_connection(
|
| 1516 |
+
self._factory, self._path
|
| 1517 |
+
)
|
| 1518 |
+
except OSError as exc:
|
| 1519 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 1520 |
+
raise
|
| 1521 |
+
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
|
| 1522 |
+
|
| 1523 |
+
return proto
|
| 1524 |
+
|
| 1525 |
+
|
| 1526 |
+
class NamedPipeConnector(BaseConnector):
|
| 1527 |
+
"""Named pipe connector.
|
| 1528 |
+
|
| 1529 |
+
Only supported by the proactor event loop.
|
| 1530 |
+
See also: https://docs.python.org/3/library/asyncio-eventloop.html
|
| 1531 |
+
|
| 1532 |
+
path - Windows named pipe path.
|
| 1533 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 1534 |
+
force_close - Set to True to force close and do reconnect
|
| 1535 |
+
after each request (and between redirects).
|
| 1536 |
+
limit - The total number of simultaneous connections.
|
| 1537 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 1538 |
+
loop - Optional event loop.
|
| 1539 |
+
"""
|
| 1540 |
+
|
| 1541 |
+
allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"})
|
| 1542 |
+
|
| 1543 |
+
def __init__(
|
| 1544 |
+
self,
|
| 1545 |
+
path: str,
|
| 1546 |
+
force_close: bool = False,
|
| 1547 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
| 1548 |
+
limit: int = 100,
|
| 1549 |
+
limit_per_host: int = 0,
|
| 1550 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1551 |
+
) -> None:
|
| 1552 |
+
super().__init__(
|
| 1553 |
+
force_close=force_close,
|
| 1554 |
+
keepalive_timeout=keepalive_timeout,
|
| 1555 |
+
limit=limit,
|
| 1556 |
+
limit_per_host=limit_per_host,
|
| 1557 |
+
loop=loop,
|
| 1558 |
+
)
|
| 1559 |
+
if not isinstance(
|
| 1560 |
+
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
| 1561 |
+
):
|
| 1562 |
+
raise RuntimeError(
|
| 1563 |
+
"Named Pipes only available in proactor " "loop under windows"
|
| 1564 |
+
)
|
| 1565 |
+
self._path = path
|
| 1566 |
+
|
| 1567 |
+
@property
|
| 1568 |
+
def path(self) -> str:
|
| 1569 |
+
"""Path to the named pipe."""
|
| 1570 |
+
return self._path
|
| 1571 |
+
|
| 1572 |
+
async def _create_connection(
|
| 1573 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
| 1574 |
+
) -> ResponseHandler:
|
| 1575 |
+
try:
|
| 1576 |
+
async with ceil_timeout(
|
| 1577 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
| 1578 |
+
):
|
| 1579 |
+
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
|
| 1580 |
+
self._factory, self._path
|
| 1581 |
+
)
|
| 1582 |
+
# the drain is required so that the connection_made is called
|
| 1583 |
+
# and transport is set otherwise it is not set before the
|
| 1584 |
+
# `assert conn.transport is not None`
|
| 1585 |
+
# in client.py's _request method
|
| 1586 |
+
await asyncio.sleep(0)
|
| 1587 |
+
# other option is to manually set transport like
|
| 1588 |
+
# `proto.transport = trans`
|
| 1589 |
+
except OSError as exc:
|
| 1590 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 1591 |
+
raise
|
| 1592 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
| 1593 |
+
|
| 1594 |
+
return cast(ResponseHandler, proto)
|