ZTWHHH commited on
Commit
6eea538
·
verified ·
1 Parent(s): 9f486ee

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash +1 -0
  2. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash +1 -0
  3. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash +1 -0
  4. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash +1 -0
  5. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash +1 -0
  6. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc +0 -0
  7. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc +0 -0
  8. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc +0 -0
  9. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc +0 -0
  10. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc +0 -0
  11. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc +0 -0
  12. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc +0 -0
  13. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc +0 -0
  14. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc +0 -0
  15. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc +0 -0
  16. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc +0 -0
  17. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc +0 -0
  18. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc +0 -0
  19. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc +0 -0
  20. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc +0 -0
  21. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc +0 -0
  22. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc +0 -0
  23. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc +0 -0
  24. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc +0 -0
  25. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc +0 -0
  26. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc +0 -0
  27. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc +0 -0
  28. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc +0 -0
  29. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc +0 -0
  30. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_cparser.pxd +158 -0
  31. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_http_parser.pyx +837 -0
  32. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_http_writer.pyx +162 -0
  33. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash +1 -0
  34. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash +1 -0
  35. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash +1 -0
  36. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/__pycache__/models.cpython-310.pyc +0 -0
  37. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-310.pyc +0 -0
  38. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/helpers.py +147 -0
  39. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/mask.pxd +3 -0
  40. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/mask.pyx +48 -0
  41. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/reader.py +31 -0
  42. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/reader_c.pxd +102 -0
  43. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/reader_py.py +468 -0
  44. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/writer.py +177 -0
  45. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client_exceptions.py +421 -0
  46. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client_reqrep.py +1315 -0
  47. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client_ws.py +426 -0
  48. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/connector.py +1652 -0
  49. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/hdrs.py +121 -0
  50. evalkit_cambrian/lib/python3.10/site-packages/aiohttp/http_websocket.py +36 -0
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_cparser.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_find_header.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_http_parser.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ c107400e3e4b8b3c02ffb9c51abf2722593a1a9a1a41e434df9f47d0730a1ae3 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/_http_writer.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 7e209c93f1158118935fb56d028576025763b9eb093053debf84d677d171f23a /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ dab8f933203eeb245d60f856e542a45b888d5a110094620e4811f90f816628d1 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/abc.cpython-310.pyc ADDED
Binary file (9.87 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc ADDED
Binary file (34.6 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/client_ws.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc ADDED
Binary file (5.82 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc ADDED
Binary file (40.5 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc ADDED
Binary file (4.89 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc ADDED
Binary file (5.47 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc ADDED
Binary file (27.4 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/http.cpython-310.pyc ADDED
Binary file (1.46 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc ADDED
Binary file (4.86 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/http_writer.cpython-310.pyc ADDED
Binary file (6.56 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc ADDED
Binary file (16 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc ADDED
Binary file (11.5 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc ADDED
Binary file (4.45 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc ADDED
Binary file (19.4 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc ADDED
Binary file (22.7 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc ADDED
Binary file (11 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc ADDED
Binary file (11.6 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc ADDED
Binary file (7.54 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc ADDED
Binary file (3.85 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc ADDED
Binary file (24.8 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc ADDED
Binary file (7.65 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-310.pyc ADDED
Binary file (43.6 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc ADDED
Binary file (17.4 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_cparser.pxd ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
2
+
3
+
4
+ cdef extern from "../vendor/llhttp/build/llhttp.h":
5
+
6
+ struct llhttp__internal_s:
7
+ int32_t _index
8
+ void* _span_pos0
9
+ void* _span_cb0
10
+ int32_t error
11
+ const char* reason
12
+ const char* error_pos
13
+ void* data
14
+ void* _current
15
+ uint64_t content_length
16
+ uint8_t type
17
+ uint8_t method
18
+ uint8_t http_major
19
+ uint8_t http_minor
20
+ uint8_t header_state
21
+ uint8_t lenient_flags
22
+ uint8_t upgrade
23
+ uint8_t finish
24
+ uint16_t flags
25
+ uint16_t status_code
26
+ void* settings
27
+
28
+ ctypedef llhttp__internal_s llhttp__internal_t
29
+ ctypedef llhttp__internal_t llhttp_t
30
+
31
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
32
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
33
+
34
+ struct llhttp_settings_s:
35
+ llhttp_cb on_message_begin
36
+ llhttp_data_cb on_url
37
+ llhttp_data_cb on_status
38
+ llhttp_data_cb on_header_field
39
+ llhttp_data_cb on_header_value
40
+ llhttp_cb on_headers_complete
41
+ llhttp_data_cb on_body
42
+ llhttp_cb on_message_complete
43
+ llhttp_cb on_chunk_header
44
+ llhttp_cb on_chunk_complete
45
+
46
+ llhttp_cb on_url_complete
47
+ llhttp_cb on_status_complete
48
+ llhttp_cb on_header_field_complete
49
+ llhttp_cb on_header_value_complete
50
+
51
+ ctypedef llhttp_settings_s llhttp_settings_t
52
+
53
+ enum llhttp_errno:
54
+ HPE_OK,
55
+ HPE_INTERNAL,
56
+ HPE_STRICT,
57
+ HPE_LF_EXPECTED,
58
+ HPE_UNEXPECTED_CONTENT_LENGTH,
59
+ HPE_CLOSED_CONNECTION,
60
+ HPE_INVALID_METHOD,
61
+ HPE_INVALID_URL,
62
+ HPE_INVALID_CONSTANT,
63
+ HPE_INVALID_VERSION,
64
+ HPE_INVALID_HEADER_TOKEN,
65
+ HPE_INVALID_CONTENT_LENGTH,
66
+ HPE_INVALID_CHUNK_SIZE,
67
+ HPE_INVALID_STATUS,
68
+ HPE_INVALID_EOF_STATE,
69
+ HPE_INVALID_TRANSFER_ENCODING,
70
+ HPE_CB_MESSAGE_BEGIN,
71
+ HPE_CB_HEADERS_COMPLETE,
72
+ HPE_CB_MESSAGE_COMPLETE,
73
+ HPE_CB_CHUNK_HEADER,
74
+ HPE_CB_CHUNK_COMPLETE,
75
+ HPE_PAUSED,
76
+ HPE_PAUSED_UPGRADE,
77
+ HPE_USER
78
+
79
+ ctypedef llhttp_errno llhttp_errno_t
80
+
81
+ enum llhttp_flags:
82
+ F_CHUNKED,
83
+ F_CONTENT_LENGTH
84
+
85
+ enum llhttp_type:
86
+ HTTP_REQUEST,
87
+ HTTP_RESPONSE,
88
+ HTTP_BOTH
89
+
90
+ enum llhttp_method:
91
+ HTTP_DELETE,
92
+ HTTP_GET,
93
+ HTTP_HEAD,
94
+ HTTP_POST,
95
+ HTTP_PUT,
96
+ HTTP_CONNECT,
97
+ HTTP_OPTIONS,
98
+ HTTP_TRACE,
99
+ HTTP_COPY,
100
+ HTTP_LOCK,
101
+ HTTP_MKCOL,
102
+ HTTP_MOVE,
103
+ HTTP_PROPFIND,
104
+ HTTP_PROPPATCH,
105
+ HTTP_SEARCH,
106
+ HTTP_UNLOCK,
107
+ HTTP_BIND,
108
+ HTTP_REBIND,
109
+ HTTP_UNBIND,
110
+ HTTP_ACL,
111
+ HTTP_REPORT,
112
+ HTTP_MKACTIVITY,
113
+ HTTP_CHECKOUT,
114
+ HTTP_MERGE,
115
+ HTTP_MSEARCH,
116
+ HTTP_NOTIFY,
117
+ HTTP_SUBSCRIBE,
118
+ HTTP_UNSUBSCRIBE,
119
+ HTTP_PATCH,
120
+ HTTP_PURGE,
121
+ HTTP_MKCALENDAR,
122
+ HTTP_LINK,
123
+ HTTP_UNLINK,
124
+ HTTP_SOURCE,
125
+ HTTP_PRI,
126
+ HTTP_DESCRIBE,
127
+ HTTP_ANNOUNCE,
128
+ HTTP_SETUP,
129
+ HTTP_PLAY,
130
+ HTTP_PAUSE,
131
+ HTTP_TEARDOWN,
132
+ HTTP_GET_PARAMETER,
133
+ HTTP_SET_PARAMETER,
134
+ HTTP_REDIRECT,
135
+ HTTP_RECORD,
136
+ HTTP_FLUSH
137
+
138
+ ctypedef llhttp_method llhttp_method_t;
139
+
140
+ void llhttp_settings_init(llhttp_settings_t* settings)
141
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
142
+ const llhttp_settings_t* settings)
143
+
144
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
145
+
146
+ int llhttp_should_keep_alive(const llhttp_t* parser)
147
+
148
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
149
+
150
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
151
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
152
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
153
+
154
+ const char* llhttp_method_name(llhttp_method_t method)
155
+
156
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
157
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
158
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_http_parser.pyx ADDED
@@ -0,0 +1,837 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #cython: language_level=3
2
+ #
3
+ # Based on https://github.com/MagicStack/httptools
4
+ #
5
+
6
+ from cpython cimport (
7
+ Py_buffer,
8
+ PyBUF_SIMPLE,
9
+ PyBuffer_Release,
10
+ PyBytes_AsString,
11
+ PyBytes_AsStringAndSize,
12
+ PyObject_GetBuffer,
13
+ )
14
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc
15
+ from libc.limits cimport ULLONG_MAX
16
+ from libc.string cimport memcpy
17
+
18
+ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
19
+ from yarl import URL as _URL
20
+
21
+ from aiohttp import hdrs
22
+ from aiohttp.helpers import DEBUG, set_exception
23
+
24
+ from .http_exceptions import (
25
+ BadHttpMessage,
26
+ BadHttpMethod,
27
+ BadStatusLine,
28
+ ContentLengthError,
29
+ InvalidHeader,
30
+ InvalidURLError,
31
+ LineTooLong,
32
+ PayloadEncodingError,
33
+ TransferEncodingError,
34
+ )
35
+ from .http_parser import DeflateBuffer as _DeflateBuffer
36
+ from .http_writer import (
37
+ HttpVersion as _HttpVersion,
38
+ HttpVersion10 as _HttpVersion10,
39
+ HttpVersion11 as _HttpVersion11,
40
+ )
41
+ from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
42
+
43
+ cimport cython
44
+
45
+ from aiohttp cimport _cparser as cparser
46
+
47
+ include "_headers.pxi"
48
+
49
+ from aiohttp cimport _find_header
50
+
51
+ ALLOWED_UPGRADES = frozenset({"websocket"})
52
+ DEF DEFAULT_FREELIST_SIZE = 250
53
+
54
+ cdef extern from "Python.h":
55
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
56
+ Py_ssize_t PyByteArray_Size(object) except -1
57
+ char* PyByteArray_AsString(object)
58
+
59
+ __all__ = ('HttpRequestParser', 'HttpResponseParser',
60
+ 'RawRequestMessage', 'RawResponseMessage')
61
+
62
+ cdef object URL = _URL
63
+ cdef object URL_build = URL.build
64
+ cdef object CIMultiDict = _CIMultiDict
65
+ cdef object CIMultiDictProxy = _CIMultiDictProxy
66
+ cdef object HttpVersion = _HttpVersion
67
+ cdef object HttpVersion10 = _HttpVersion10
68
+ cdef object HttpVersion11 = _HttpVersion11
69
+ cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
70
+ cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
71
+ cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
72
+ cdef object StreamReader = _StreamReader
73
+ cdef object DeflateBuffer = _DeflateBuffer
74
+ cdef bytes EMPTY_BYTES = b""
75
+
76
+ cdef inline object extend(object buf, const char* at, size_t length):
77
+ cdef Py_ssize_t s
78
+ cdef char* ptr
79
+ s = PyByteArray_Size(buf)
80
+ PyByteArray_Resize(buf, s + length)
81
+ ptr = PyByteArray_AsString(buf)
82
+ memcpy(ptr + s, at, length)
83
+
84
+
85
+ DEF METHODS_COUNT = 46;
86
+
87
+ cdef list _http_method = []
88
+
89
+ for i in range(METHODS_COUNT):
90
+ _http_method.append(
91
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
92
+
93
+
94
+ cdef inline str http_method_str(int i):
95
+ if i < METHODS_COUNT:
96
+ return <str>_http_method[i]
97
+ else:
98
+ return "<unknown>"
99
+
100
+ cdef inline object find_header(bytes raw_header):
101
+ cdef Py_ssize_t size
102
+ cdef char *buf
103
+ cdef int idx
104
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
105
+ idx = _find_header.find_header(buf, size)
106
+ if idx == -1:
107
+ return raw_header.decode('utf-8', 'surrogateescape')
108
+ return headers[idx]
109
+
110
+
111
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
112
+ cdef class RawRequestMessage:
113
+ cdef readonly str method
114
+ cdef readonly str path
115
+ cdef readonly object version # HttpVersion
116
+ cdef readonly object headers # CIMultiDict
117
+ cdef readonly object raw_headers # tuple
118
+ cdef readonly object should_close
119
+ cdef readonly object compression
120
+ cdef readonly object upgrade
121
+ cdef readonly object chunked
122
+ cdef readonly object url # yarl.URL
123
+
124
+ def __init__(self, method, path, version, headers, raw_headers,
125
+ should_close, compression, upgrade, chunked, url):
126
+ self.method = method
127
+ self.path = path
128
+ self.version = version
129
+ self.headers = headers
130
+ self.raw_headers = raw_headers
131
+ self.should_close = should_close
132
+ self.compression = compression
133
+ self.upgrade = upgrade
134
+ self.chunked = chunked
135
+ self.url = url
136
+
137
+ def __repr__(self):
138
+ info = []
139
+ info.append(("method", self.method))
140
+ info.append(("path", self.path))
141
+ info.append(("version", self.version))
142
+ info.append(("headers", self.headers))
143
+ info.append(("raw_headers", self.raw_headers))
144
+ info.append(("should_close", self.should_close))
145
+ info.append(("compression", self.compression))
146
+ info.append(("upgrade", self.upgrade))
147
+ info.append(("chunked", self.chunked))
148
+ info.append(("url", self.url))
149
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
150
+ return '<RawRequestMessage(' + sinfo + ')>'
151
+
152
+ def _replace(self, **dct):
153
+ cdef RawRequestMessage ret
154
+ ret = _new_request_message(self.method,
155
+ self.path,
156
+ self.version,
157
+ self.headers,
158
+ self.raw_headers,
159
+ self.should_close,
160
+ self.compression,
161
+ self.upgrade,
162
+ self.chunked,
163
+ self.url)
164
+ if "method" in dct:
165
+ ret.method = dct["method"]
166
+ if "path" in dct:
167
+ ret.path = dct["path"]
168
+ if "version" in dct:
169
+ ret.version = dct["version"]
170
+ if "headers" in dct:
171
+ ret.headers = dct["headers"]
172
+ if "raw_headers" in dct:
173
+ ret.raw_headers = dct["raw_headers"]
174
+ if "should_close" in dct:
175
+ ret.should_close = dct["should_close"]
176
+ if "compression" in dct:
177
+ ret.compression = dct["compression"]
178
+ if "upgrade" in dct:
179
+ ret.upgrade = dct["upgrade"]
180
+ if "chunked" in dct:
181
+ ret.chunked = dct["chunked"]
182
+ if "url" in dct:
183
+ ret.url = dct["url"]
184
+ return ret
185
+
186
+ cdef _new_request_message(str method,
187
+ str path,
188
+ object version,
189
+ object headers,
190
+ object raw_headers,
191
+ bint should_close,
192
+ object compression,
193
+ bint upgrade,
194
+ bint chunked,
195
+ object url):
196
+ cdef RawRequestMessage ret
197
+ ret = RawRequestMessage.__new__(RawRequestMessage)
198
+ ret.method = method
199
+ ret.path = path
200
+ ret.version = version
201
+ ret.headers = headers
202
+ ret.raw_headers = raw_headers
203
+ ret.should_close = should_close
204
+ ret.compression = compression
205
+ ret.upgrade = upgrade
206
+ ret.chunked = chunked
207
+ ret.url = url
208
+ return ret
209
+
210
+
211
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
212
+ cdef class RawResponseMessage:
213
+ cdef readonly object version # HttpVersion
214
+ cdef readonly int code
215
+ cdef readonly str reason
216
+ cdef readonly object headers # CIMultiDict
217
+ cdef readonly object raw_headers # tuple
218
+ cdef readonly object should_close
219
+ cdef readonly object compression
220
+ cdef readonly object upgrade
221
+ cdef readonly object chunked
222
+
223
+ def __init__(self, version, code, reason, headers, raw_headers,
224
+ should_close, compression, upgrade, chunked):
225
+ self.version = version
226
+ self.code = code
227
+ self.reason = reason
228
+ self.headers = headers
229
+ self.raw_headers = raw_headers
230
+ self.should_close = should_close
231
+ self.compression = compression
232
+ self.upgrade = upgrade
233
+ self.chunked = chunked
234
+
235
+ def __repr__(self):
236
+ info = []
237
+ info.append(("version", self.version))
238
+ info.append(("code", self.code))
239
+ info.append(("reason", self.reason))
240
+ info.append(("headers", self.headers))
241
+ info.append(("raw_headers", self.raw_headers))
242
+ info.append(("should_close", self.should_close))
243
+ info.append(("compression", self.compression))
244
+ info.append(("upgrade", self.upgrade))
245
+ info.append(("chunked", self.chunked))
246
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
247
+ return '<RawResponseMessage(' + sinfo + ')>'
248
+
249
+
250
+ cdef _new_response_message(object version,
251
+ int code,
252
+ str reason,
253
+ object headers,
254
+ object raw_headers,
255
+ bint should_close,
256
+ object compression,
257
+ bint upgrade,
258
+ bint chunked):
259
+ cdef RawResponseMessage ret
260
+ ret = RawResponseMessage.__new__(RawResponseMessage)
261
+ ret.version = version
262
+ ret.code = code
263
+ ret.reason = reason
264
+ ret.headers = headers
265
+ ret.raw_headers = raw_headers
266
+ ret.should_close = should_close
267
+ ret.compression = compression
268
+ ret.upgrade = upgrade
269
+ ret.chunked = chunked
270
+ return ret
271
+
272
+
273
+ @cython.internal
274
+ cdef class HttpParser:
275
+
276
+ cdef:
277
+ cparser.llhttp_t* _cparser
278
+ cparser.llhttp_settings_t* _csettings
279
+
280
+ bytes _raw_name
281
+ object _name
282
+ bytes _raw_value
283
+ bint _has_value
284
+
285
+ object _protocol
286
+ object _loop
287
+ object _timer
288
+
289
+ size_t _max_line_size
290
+ size_t _max_field_size
291
+ size_t _max_headers
292
+ bint _response_with_body
293
+ bint _read_until_eof
294
+
295
+ bint _started
296
+ object _url
297
+ bytearray _buf
298
+ str _path
299
+ str _reason
300
+ list _headers
301
+ list _raw_headers
302
+ bint _upgraded
303
+ list _messages
304
+ object _payload
305
+ bint _payload_error
306
+ object _payload_exception
307
+ object _last_error
308
+ bint _auto_decompress
309
+ int _limit
310
+
311
+ str _content_encoding
312
+
313
+ Py_buffer py_buf
314
+
315
+ def __cinit__(self):
316
+ self._cparser = <cparser.llhttp_t*> \
317
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
318
+ if self._cparser is NULL:
319
+ raise MemoryError()
320
+
321
+ self._csettings = <cparser.llhttp_settings_t*> \
322
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
323
+ if self._csettings is NULL:
324
+ raise MemoryError()
325
+
326
+ def __dealloc__(self):
327
+ PyMem_Free(self._cparser)
328
+ PyMem_Free(self._csettings)
329
+
330
+ cdef _init(
331
+ self, cparser.llhttp_type mode,
332
+ object protocol, object loop, int limit,
333
+ object timer=None,
334
+ size_t max_line_size=8190, size_t max_headers=32768,
335
+ size_t max_field_size=8190, payload_exception=None,
336
+ bint response_with_body=True, bint read_until_eof=False,
337
+ bint auto_decompress=True,
338
+ ):
339
+ cparser.llhttp_settings_init(self._csettings)
340
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
341
+ self._cparser.data = <void*>self
342
+ self._cparser.content_length = 0
343
+
344
+ self._protocol = protocol
345
+ self._loop = loop
346
+ self._timer = timer
347
+
348
+ self._buf = bytearray()
349
+ self._payload = None
350
+ self._payload_error = 0
351
+ self._payload_exception = payload_exception
352
+ self._messages = []
353
+
354
+ self._raw_name = EMPTY_BYTES
355
+ self._raw_value = EMPTY_BYTES
356
+ self._has_value = False
357
+
358
+ self._max_line_size = max_line_size
359
+ self._max_headers = max_headers
360
+ self._max_field_size = max_field_size
361
+ self._response_with_body = response_with_body
362
+ self._read_until_eof = read_until_eof
363
+ self._upgraded = False
364
+ self._auto_decompress = auto_decompress
365
+ self._content_encoding = None
366
+
367
+ self._csettings.on_url = cb_on_url
368
+ self._csettings.on_status = cb_on_status
369
+ self._csettings.on_header_field = cb_on_header_field
370
+ self._csettings.on_header_value = cb_on_header_value
371
+ self._csettings.on_headers_complete = cb_on_headers_complete
372
+ self._csettings.on_body = cb_on_body
373
+ self._csettings.on_message_begin = cb_on_message_begin
374
+ self._csettings.on_message_complete = cb_on_message_complete
375
+ self._csettings.on_chunk_header = cb_on_chunk_header
376
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
377
+
378
+ self._last_error = None
379
+ self._limit = limit
380
+
381
+ cdef _process_header(self):
382
+ cdef str value
383
+ if self._raw_name is not EMPTY_BYTES:
384
+ name = find_header(self._raw_name)
385
+ value = self._raw_value.decode('utf-8', 'surrogateescape')
386
+
387
+ self._headers.append((name, value))
388
+
389
+ if name is CONTENT_ENCODING:
390
+ self._content_encoding = value
391
+
392
+ self._has_value = False
393
+ self._raw_headers.append((self._raw_name, self._raw_value))
394
+ self._raw_name = EMPTY_BYTES
395
+ self._raw_value = EMPTY_BYTES
396
+
397
+ cdef _on_header_field(self, char* at, size_t length):
398
+ if self._has_value:
399
+ self._process_header()
400
+
401
+ if self._raw_name is EMPTY_BYTES:
402
+ self._raw_name = at[:length]
403
+ else:
404
+ self._raw_name += at[:length]
405
+
406
+ cdef _on_header_value(self, char* at, size_t length):
407
+ if self._raw_value is EMPTY_BYTES:
408
+ self._raw_value = at[:length]
409
+ else:
410
+ self._raw_value += at[:length]
411
+ self._has_value = True
412
+
413
+ cdef _on_headers_complete(self):
414
+ self._process_header()
415
+
416
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
417
+ upgrade = self._cparser.upgrade
418
+ chunked = self._cparser.flags & cparser.F_CHUNKED
419
+
420
+ raw_headers = tuple(self._raw_headers)
421
+ headers = CIMultiDictProxy(CIMultiDict(self._headers))
422
+
423
+ if self._cparser.type == cparser.HTTP_REQUEST:
424
+ allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES
425
+ if allowed or self._cparser.method == cparser.HTTP_CONNECT:
426
+ self._upgraded = True
427
+ else:
428
+ if upgrade and self._cparser.status_code == 101:
429
+ self._upgraded = True
430
+
431
+ # do not support old websocket spec
432
+ if SEC_WEBSOCKET_KEY1 in headers:
433
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
434
+
435
+ encoding = None
436
+ enc = self._content_encoding
437
+ if enc is not None:
438
+ self._content_encoding = None
439
+ enc = enc.lower()
440
+ if enc in ('gzip', 'deflate', 'br'):
441
+ encoding = enc
442
+
443
+ if self._cparser.type == cparser.HTTP_REQUEST:
444
+ method = http_method_str(self._cparser.method)
445
+ msg = _new_request_message(
446
+ method, self._path,
447
+ self.http_version(), headers, raw_headers,
448
+ should_close, encoding, upgrade, chunked, self._url)
449
+ else:
450
+ msg = _new_response_message(
451
+ self.http_version(), self._cparser.status_code, self._reason,
452
+ headers, raw_headers, should_close, encoding,
453
+ upgrade, chunked)
454
+
455
+ if (
456
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
457
+ self._cparser.method == cparser.HTTP_CONNECT or
458
+ (self._cparser.status_code >= 199 and
459
+ self._cparser.content_length == 0 and
460
+ self._read_until_eof)
461
+ ):
462
+ payload = StreamReader(
463
+ self._protocol, timer=self._timer, loop=self._loop,
464
+ limit=self._limit)
465
+ else:
466
+ payload = EMPTY_PAYLOAD
467
+
468
+ self._payload = payload
469
+ if encoding is not None and self._auto_decompress:
470
+ self._payload = DeflateBuffer(payload, encoding)
471
+
472
+ if not self._response_with_body:
473
+ payload = EMPTY_PAYLOAD
474
+
475
+ self._messages.append((msg, payload))
476
+
477
+ cdef _on_message_complete(self):
478
+ self._payload.feed_eof()
479
+ self._payload = None
480
+
481
+ cdef _on_chunk_header(self):
482
+ self._payload.begin_http_chunk_receiving()
483
+
484
+ cdef _on_chunk_complete(self):
485
+ self._payload.end_http_chunk_receiving()
486
+
487
+ cdef object _on_status_complete(self):
488
+ pass
489
+
490
+ cdef inline http_version(self):
491
+ cdef cparser.llhttp_t* parser = self._cparser
492
+
493
+ if parser.http_major == 1:
494
+ if parser.http_minor == 0:
495
+ return HttpVersion10
496
+ elif parser.http_minor == 1:
497
+ return HttpVersion11
498
+
499
+ return HttpVersion(parser.http_major, parser.http_minor)
500
+
501
+ ### Public API ###
502
+
503
+ def feed_eof(self):
504
+ cdef bytes desc
505
+
506
+ if self._payload is not None:
507
+ if self._cparser.flags & cparser.F_CHUNKED:
508
+ raise TransferEncodingError(
509
+ "Not enough data for satisfy transfer length header.")
510
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
511
+ raise ContentLengthError(
512
+ "Not enough data for satisfy content length header.")
513
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
514
+ desc = cparser.llhttp_get_error_reason(self._cparser)
515
+ raise PayloadEncodingError(desc.decode('latin-1'))
516
+ else:
517
+ self._payload.feed_eof()
518
+ elif self._started:
519
+ self._on_headers_complete()
520
+ if self._messages:
521
+ return self._messages[-1][0]
522
+
523
+ def feed_data(self, data):
524
+ cdef:
525
+ size_t data_len
526
+ size_t nb
527
+ cdef cparser.llhttp_errno_t errno
528
+
529
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
530
+ data_len = <size_t>self.py_buf.len
531
+
532
+ errno = cparser.llhttp_execute(
533
+ self._cparser,
534
+ <char*>self.py_buf.buf,
535
+ data_len)
536
+
537
+ if errno is cparser.HPE_PAUSED_UPGRADE:
538
+ cparser.llhttp_resume_after_upgrade(self._cparser)
539
+
540
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
541
+
542
+ PyBuffer_Release(&self.py_buf)
543
+
544
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
545
+ if self._payload_error == 0:
546
+ if self._last_error is not None:
547
+ ex = self._last_error
548
+ self._last_error = None
549
+ else:
550
+ after = cparser.llhttp_get_error_pos(self._cparser)
551
+ before = data[:after - <char*>self.py_buf.buf]
552
+ after_b = after.split(b"\r\n", 1)[0]
553
+ before = before.rsplit(b"\r\n", 1)[-1]
554
+ data = before + after_b
555
+ pointer = " " * (len(repr(before))-1) + "^"
556
+ ex = parser_error_from_errno(self._cparser, data, pointer)
557
+ self._payload = None
558
+ raise ex
559
+
560
+ if self._messages:
561
+ messages = self._messages
562
+ self._messages = []
563
+ else:
564
+ messages = ()
565
+
566
+ if self._upgraded:
567
+ return messages, True, data[nb:]
568
+ else:
569
+ return messages, False, b""
570
+
571
+ def set_upgraded(self, val):
572
+ self._upgraded = val
573
+
574
+
575
+ cdef class HttpRequestParser(HttpParser):
576
+
577
+ def __init__(
578
+ self, protocol, loop, int limit, timer=None,
579
+ size_t max_line_size=8190, size_t max_headers=32768,
580
+ size_t max_field_size=8190, payload_exception=None,
581
+ bint response_with_body=True, bint read_until_eof=False,
582
+ bint auto_decompress=True,
583
+ ):
584
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
585
+ max_line_size, max_headers, max_field_size,
586
+ payload_exception, response_with_body, read_until_eof,
587
+ auto_decompress)
588
+
589
+ cdef object _on_status_complete(self):
590
+ cdef int idx1, idx2
591
+ if not self._buf:
592
+ return
593
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
594
+ try:
595
+ idx3 = len(self._path)
596
+ if self._cparser.method == cparser.HTTP_CONNECT:
597
+ # authority-form,
598
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
599
+ self._url = URL.build(authority=self._path, encoded=True)
600
+ elif idx3 > 1 and self._path[0] == '/':
601
+ # origin-form,
602
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
603
+ idx1 = self._path.find("?")
604
+ if idx1 == -1:
605
+ query = ""
606
+ idx2 = self._path.find("#")
607
+ if idx2 == -1:
608
+ path = self._path
609
+ fragment = ""
610
+ else:
611
+ path = self._path[0: idx2]
612
+ fragment = self._path[idx2+1:]
613
+
614
+ else:
615
+ path = self._path[0:idx1]
616
+ idx1 += 1
617
+ idx2 = self._path.find("#", idx1+1)
618
+ if idx2 == -1:
619
+ query = self._path[idx1:]
620
+ fragment = ""
621
+ else:
622
+ query = self._path[idx1: idx2]
623
+ fragment = self._path[idx2+1:]
624
+
625
+ self._url = URL.build(
626
+ path=path,
627
+ query_string=query,
628
+ fragment=fragment,
629
+ encoded=True,
630
+ )
631
+ else:
632
+ # absolute-form for proxy maybe,
633
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
634
+ self._url = URL(self._path, encoded=True)
635
+ finally:
636
+ PyByteArray_Resize(self._buf, 0)
637
+
638
+
639
+ cdef class HttpResponseParser(HttpParser):
640
+
641
+ def __init__(
642
+ self, protocol, loop, int limit, timer=None,
643
+ size_t max_line_size=8190, size_t max_headers=32768,
644
+ size_t max_field_size=8190, payload_exception=None,
645
+ bint response_with_body=True, bint read_until_eof=False,
646
+ bint auto_decompress=True
647
+ ):
648
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
649
+ max_line_size, max_headers, max_field_size,
650
+ payload_exception, response_with_body, read_until_eof,
651
+ auto_decompress)
652
+ # Use strict parsing on dev mode, so users are warned about broken servers.
653
+ if not DEBUG:
654
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
655
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
656
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
657
+
658
+ cdef object _on_status_complete(self):
659
+ if self._buf:
660
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
661
+ PyByteArray_Resize(self._buf, 0)
662
+ else:
663
+ self._reason = self._reason or ''
664
+
665
+ cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
666
+ cdef HttpParser pyparser = <HttpParser>parser.data
667
+
668
+ pyparser._started = True
669
+ pyparser._headers = []
670
+ pyparser._raw_headers = []
671
+ PyByteArray_Resize(pyparser._buf, 0)
672
+ pyparser._path = None
673
+ pyparser._reason = None
674
+ return 0
675
+
676
+
677
+ cdef int cb_on_url(cparser.llhttp_t* parser,
678
+ const char *at, size_t length) except -1:
679
+ cdef HttpParser pyparser = <HttpParser>parser.data
680
+ try:
681
+ if length > pyparser._max_line_size:
682
+ raise LineTooLong(
683
+ 'Status line is too long', pyparser._max_line_size, length)
684
+ extend(pyparser._buf, at, length)
685
+ except BaseException as ex:
686
+ pyparser._last_error = ex
687
+ return -1
688
+ else:
689
+ return 0
690
+
691
+
692
+ cdef int cb_on_status(cparser.llhttp_t* parser,
693
+ const char *at, size_t length) except -1:
694
+ cdef HttpParser pyparser = <HttpParser>parser.data
695
+ cdef str reason
696
+ try:
697
+ if length > pyparser._max_line_size:
698
+ raise LineTooLong(
699
+ 'Status line is too long', pyparser._max_line_size, length)
700
+ extend(pyparser._buf, at, length)
701
+ except BaseException as ex:
702
+ pyparser._last_error = ex
703
+ return -1
704
+ else:
705
+ return 0
706
+
707
+
708
+ cdef int cb_on_header_field(cparser.llhttp_t* parser,
709
+ const char *at, size_t length) except -1:
710
+ cdef HttpParser pyparser = <HttpParser>parser.data
711
+ cdef Py_ssize_t size
712
+ try:
713
+ pyparser._on_status_complete()
714
+ size = len(pyparser._raw_name) + length
715
+ if size > pyparser._max_field_size:
716
+ raise LineTooLong(
717
+ 'Header name is too long', pyparser._max_field_size, size)
718
+ pyparser._on_header_field(at, length)
719
+ except BaseException as ex:
720
+ pyparser._last_error = ex
721
+ return -1
722
+ else:
723
+ return 0
724
+
725
+
726
+ cdef int cb_on_header_value(cparser.llhttp_t* parser,
727
+ const char *at, size_t length) except -1:
728
+ cdef HttpParser pyparser = <HttpParser>parser.data
729
+ cdef Py_ssize_t size
730
+ try:
731
+ size = len(pyparser._raw_value) + length
732
+ if size > pyparser._max_field_size:
733
+ raise LineTooLong(
734
+ 'Header value is too long', pyparser._max_field_size, size)
735
+ pyparser._on_header_value(at, length)
736
+ except BaseException as ex:
737
+ pyparser._last_error = ex
738
+ return -1
739
+ else:
740
+ return 0
741
+
742
+
743
+ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
744
+ cdef HttpParser pyparser = <HttpParser>parser.data
745
+ try:
746
+ pyparser._on_status_complete()
747
+ pyparser._on_headers_complete()
748
+ except BaseException as exc:
749
+ pyparser._last_error = exc
750
+ return -1
751
+ else:
752
+ if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:
753
+ return 2
754
+ else:
755
+ return 0
756
+
757
+
758
+ cdef int cb_on_body(cparser.llhttp_t* parser,
759
+ const char *at, size_t length) except -1:
760
+ cdef HttpParser pyparser = <HttpParser>parser.data
761
+ cdef bytes body = at[:length]
762
+ try:
763
+ pyparser._payload.feed_data(body, length)
764
+ except BaseException as underlying_exc:
765
+ reraised_exc = underlying_exc
766
+ if pyparser._payload_exception is not None:
767
+ reraised_exc = pyparser._payload_exception(str(underlying_exc))
768
+
769
+ set_exception(pyparser._payload, reraised_exc, underlying_exc)
770
+
771
+ pyparser._payload_error = 1
772
+ return -1
773
+ else:
774
+ return 0
775
+
776
+
777
+ cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
778
+ cdef HttpParser pyparser = <HttpParser>parser.data
779
+ try:
780
+ pyparser._started = False
781
+ pyparser._on_message_complete()
782
+ except BaseException as exc:
783
+ pyparser._last_error = exc
784
+ return -1
785
+ else:
786
+ return 0
787
+
788
+
789
+ cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
790
+ cdef HttpParser pyparser = <HttpParser>parser.data
791
+ try:
792
+ pyparser._on_chunk_header()
793
+ except BaseException as exc:
794
+ pyparser._last_error = exc
795
+ return -1
796
+ else:
797
+ return 0
798
+
799
+
800
+ cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
801
+ cdef HttpParser pyparser = <HttpParser>parser.data
802
+ try:
803
+ pyparser._on_chunk_complete()
804
+ except BaseException as exc:
805
+ pyparser._last_error = exc
806
+ return -1
807
+ else:
808
+ return 0
809
+
810
+
811
+ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
812
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
813
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
814
+
815
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
816
+
817
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
818
+ cparser.HPE_CB_HEADERS_COMPLETE,
819
+ cparser.HPE_CB_MESSAGE_COMPLETE,
820
+ cparser.HPE_CB_CHUNK_HEADER,
821
+ cparser.HPE_CB_CHUNK_COMPLETE,
822
+ cparser.HPE_INVALID_CONSTANT,
823
+ cparser.HPE_INVALID_HEADER_TOKEN,
824
+ cparser.HPE_INVALID_CONTENT_LENGTH,
825
+ cparser.HPE_INVALID_CHUNK_SIZE,
826
+ cparser.HPE_INVALID_EOF_STATE,
827
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
828
+ return BadHttpMessage(err_msg)
829
+ elif errno == cparser.HPE_INVALID_METHOD:
830
+ return BadHttpMethod(error=err_msg)
831
+ elif errno in {cparser.HPE_INVALID_STATUS,
832
+ cparser.HPE_INVALID_VERSION}:
833
+ return BadStatusLine(error=err_msg)
834
+ elif errno == cparser.HPE_INVALID_URL:
835
+ return InvalidURLError(err_msg)
836
+
837
+ return BadHttpMessage(err_msg)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_http_writer.pyx ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython.bytes cimport PyBytes_FromStringAndSize
2
+ from cpython.exc cimport PyErr_NoMemory
3
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
4
+ from cpython.object cimport PyObject_Str
5
+ from libc.stdint cimport uint8_t, uint64_t
6
+ from libc.string cimport memcpy
7
+
8
+ from multidict import istr
9
+
10
+ DEF BUF_SIZE = 16 * 1024 # 16KiB
11
+ cdef char BUFFER[BUF_SIZE]
12
+
13
+ cdef object _istr = istr
14
+
15
+
16
+ # ----------------- writer ---------------------------
17
+
18
+ cdef struct Writer:
19
+ char *buf
20
+ Py_ssize_t size
21
+ Py_ssize_t pos
22
+
23
+
24
+ cdef inline void _init_writer(Writer* writer):
25
+ writer.buf = &BUFFER[0]
26
+ writer.size = BUF_SIZE
27
+ writer.pos = 0
28
+
29
+
30
+ cdef inline void _release_writer(Writer* writer):
31
+ if writer.buf != BUFFER:
32
+ PyMem_Free(writer.buf)
33
+
34
+
35
+ cdef inline int _write_byte(Writer* writer, uint8_t ch):
36
+ cdef char * buf
37
+ cdef Py_ssize_t size
38
+
39
+ if writer.pos == writer.size:
40
+ # reallocate
41
+ size = writer.size + BUF_SIZE
42
+ if writer.buf == BUFFER:
43
+ buf = <char*>PyMem_Malloc(size)
44
+ if buf == NULL:
45
+ PyErr_NoMemory()
46
+ return -1
47
+ memcpy(buf, writer.buf, writer.size)
48
+ else:
49
+ buf = <char*>PyMem_Realloc(writer.buf, size)
50
+ if buf == NULL:
51
+ PyErr_NoMemory()
52
+ return -1
53
+ writer.buf = buf
54
+ writer.size = size
55
+ writer.buf[writer.pos] = <char>ch
56
+ writer.pos += 1
57
+ return 0
58
+
59
+
60
+ cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
61
+ cdef uint64_t utf = <uint64_t> symbol
62
+
63
+ if utf < 0x80:
64
+ return _write_byte(writer, <uint8_t>utf)
65
+ elif utf < 0x800:
66
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
67
+ return -1
68
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
69
+ elif 0xD800 <= utf <= 0xDFFF:
70
+ # surogate pair, ignored
71
+ return 0
72
+ elif utf < 0x10000:
73
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
74
+ return -1
75
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
76
+ return -1
77
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
78
+ elif utf > 0x10FFFF:
79
+ # symbol is too large
80
+ return 0
81
+ else:
82
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
83
+ return -1
84
+ if _write_byte(writer,
85
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
86
+ return -1
87
+ if _write_byte(writer,
88
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
89
+ return -1
90
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
91
+
92
+
93
+ cdef inline int _write_str(Writer* writer, str s):
94
+ cdef Py_UCS4 ch
95
+ for ch in s:
96
+ if _write_utf8(writer, ch) < 0:
97
+ return -1
98
+
99
+
100
+ # --------------- _serialize_headers ----------------------
101
+
102
+ cdef str to_str(object s):
103
+ if type(s) is str:
104
+ return <str>s
105
+ elif type(s) is _istr:
106
+ return PyObject_Str(s)
107
+ elif not isinstance(s, str):
108
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
109
+ else:
110
+ return str(s)
111
+
112
+
113
+
114
+ def _serialize_headers(str status_line, headers):
115
+ cdef Writer writer
116
+ cdef object key
117
+ cdef object val
118
+ cdef bytes ret
119
+ cdef str key_str
120
+ cdef str val_str
121
+
122
+ _init_writer(&writer)
123
+
124
+ try:
125
+ if _write_str(&writer, status_line) < 0:
126
+ raise
127
+ if _write_byte(&writer, b'\r') < 0:
128
+ raise
129
+ if _write_byte(&writer, b'\n') < 0:
130
+ raise
131
+
132
+ for key, val in headers.items():
133
+ key_str = to_str(key)
134
+ val_str = to_str(val)
135
+
136
+ if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str:
137
+ raise ValueError(
138
+ "Newline or carriage return character detected in HTTP status message or "
139
+ "header. This is a potential security issue."
140
+ )
141
+
142
+ if _write_str(&writer, key_str) < 0:
143
+ raise
144
+ if _write_byte(&writer, b':') < 0:
145
+ raise
146
+ if _write_byte(&writer, b' ') < 0:
147
+ raise
148
+ if _write_str(&writer, val_str) < 0:
149
+ raise
150
+ if _write_byte(&writer, b'\r') < 0:
151
+ raise
152
+ if _write_byte(&writer, b'\n') < 0:
153
+ raise
154
+
155
+ if _write_byte(&writer, b'\r') < 0:
156
+ raise
157
+ if _write_byte(&writer, b'\n') < 0:
158
+ raise
159
+
160
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
161
+ finally:
162
+ _release_writer(&writer)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ b01999d409b29bd916e067bc963d5f2d9ee63cfc9ae0bccb769910131417bf93 /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/mask.pxd
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 0478ceb55d0ed30ef1a7da742cd003449bc69a07cf9fdb06789bd2b347cbfffe /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/mask.pyx
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ f6b3160a9002d639e0eff82da8b8d196a42ff6aed490e9faded2107eada4f067 /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/reader_c.pxd
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/__pycache__/models.cpython-310.pyc ADDED
Binary file (2.81 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-310.pyc ADDED
Binary file (9.45 kB). View file
 
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/helpers.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Helpers for WebSocket protocol versions 13 and 8."""
2
+
3
+ import functools
4
+ import re
5
+ from struct import Struct
6
+ from typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple
7
+
8
+ from ..helpers import NO_EXTENSIONS
9
+ from .models import WSHandshakeError
10
+
11
+ UNPACK_LEN3 = Struct("!Q").unpack_from
12
+ UNPACK_CLOSE_CODE = Struct("!H").unpack
13
+ PACK_LEN1 = Struct("!BB").pack
14
+ PACK_LEN2 = Struct("!BBH").pack
15
+ PACK_LEN3 = Struct("!BBQ").pack
16
+ PACK_CLOSE_CODE = Struct("!H").pack
17
+ PACK_RANDBITS = Struct("!L").pack
18
+ MSG_SIZE: Final[int] = 2**14
19
+ MASK_LEN: Final[int] = 4
20
+
21
+ WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
22
+
23
+
24
+ # Used by _websocket_mask_python
25
+ @functools.lru_cache
26
+ def _xor_table() -> List[bytes]:
27
+ return [bytes(a ^ b for a in range(256)) for b in range(256)]
28
+
29
+
30
+ def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
31
+ """Websocket masking function.
32
+
33
+ `mask` is a `bytes` object of length 4; `data` is a `bytearray`
34
+ object of any length. The contents of `data` are masked with `mask`,
35
+ as specified in section 5.3 of RFC 6455.
36
+
37
+ Note that this function mutates the `data` argument.
38
+
39
+ This pure-python implementation may be replaced by an optimized
40
+ version when available.
41
+
42
+ """
43
+ assert isinstance(data, bytearray), data
44
+ assert len(mask) == 4, mask
45
+
46
+ if data:
47
+ _XOR_TABLE = _xor_table()
48
+ a, b, c, d = (_XOR_TABLE[n] for n in mask)
49
+ data[::4] = data[::4].translate(a)
50
+ data[1::4] = data[1::4].translate(b)
51
+ data[2::4] = data[2::4].translate(c)
52
+ data[3::4] = data[3::4].translate(d)
53
+
54
+
55
+ if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover
56
+ websocket_mask = _websocket_mask_python
57
+ else:
58
+ try:
59
+ from .mask import _websocket_mask_cython # type: ignore[import-not-found]
60
+
61
+ websocket_mask = _websocket_mask_cython
62
+ except ImportError: # pragma: no cover
63
+ websocket_mask = _websocket_mask_python
64
+
65
+
66
+ _WS_EXT_RE: Final[Pattern[str]] = re.compile(
67
+ r"^(?:;\s*(?:"
68
+ r"(server_no_context_takeover)|"
69
+ r"(client_no_context_takeover)|"
70
+ r"(server_max_window_bits(?:=(\d+))?)|"
71
+ r"(client_max_window_bits(?:=(\d+))?)))*$"
72
+ )
73
+
74
+ _WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
75
+
76
+
77
+ def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
78
+ if not extstr:
79
+ return 0, False
80
+
81
+ compress = 0
82
+ notakeover = False
83
+ for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
84
+ defext = ext.group(1)
85
+ # Return compress = 15 when get `permessage-deflate`
86
+ if not defext:
87
+ compress = 15
88
+ break
89
+ match = _WS_EXT_RE.match(defext)
90
+ if match:
91
+ compress = 15
92
+ if isserver:
93
+ # Server never fail to detect compress handshake.
94
+ # Server does not need to send max wbit to client
95
+ if match.group(4):
96
+ compress = int(match.group(4))
97
+ # Group3 must match if group4 matches
98
+ # Compress wbit 8 does not support in zlib
99
+ # If compress level not support,
100
+ # CONTINUE to next extension
101
+ if compress > 15 or compress < 9:
102
+ compress = 0
103
+ continue
104
+ if match.group(1):
105
+ notakeover = True
106
+ # Ignore regex group 5 & 6 for client_max_window_bits
107
+ break
108
+ else:
109
+ if match.group(6):
110
+ compress = int(match.group(6))
111
+ # Group5 must match if group6 matches
112
+ # Compress wbit 8 does not support in zlib
113
+ # If compress level not support,
114
+ # FAIL the parse progress
115
+ if compress > 15 or compress < 9:
116
+ raise WSHandshakeError("Invalid window size")
117
+ if match.group(2):
118
+ notakeover = True
119
+ # Ignore regex group 5 & 6 for client_max_window_bits
120
+ break
121
+ # Return Fail if client side and not match
122
+ elif not isserver:
123
+ raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
124
+
125
+ return compress, notakeover
126
+
127
+
128
+ def ws_ext_gen(
129
+ compress: int = 15, isserver: bool = False, server_notakeover: bool = False
130
+ ) -> str:
131
+ # client_notakeover=False not used for server
132
+ # compress wbit 8 does not support in zlib
133
+ if compress < 9 or compress > 15:
134
+ raise ValueError(
135
+ "Compress wbits must between 9 and 15, zlib does not support wbits=8"
136
+ )
137
+ enabledext = ["permessage-deflate"]
138
+ if not isserver:
139
+ enabledext.append("client_max_window_bits")
140
+
141
+ if compress < 15:
142
+ enabledext.append("server_max_window_bits=" + str(compress))
143
+ if server_notakeover:
144
+ enabledext.append("server_no_context_takeover")
145
+ # if client_notakeover:
146
+ # enabledext.append('client_no_context_takeover')
147
+ return "; ".join(enabledext)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/mask.pxd ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ """Cython declarations for websocket masking."""
2
+
3
+ cpdef void _websocket_mask_cython(bytes mask, bytearray data)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/mask.pyx ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython cimport PyBytes_AsString
2
+
3
+
4
+ #from cpython cimport PyByteArray_AsString # cython still not exports that
5
+ cdef extern from "Python.h":
6
+ char* PyByteArray_AsString(bytearray ba) except NULL
7
+
8
+ from libc.stdint cimport uint32_t, uint64_t, uintmax_t
9
+
10
+
11
+ cpdef void _websocket_mask_cython(bytes mask, bytearray data):
12
+ """Note, this function mutates its `data` argument
13
+ """
14
+ cdef:
15
+ Py_ssize_t data_len, i
16
+ # bit operations on signed integers are implementation-specific
17
+ unsigned char * in_buf
18
+ const unsigned char * mask_buf
19
+ uint32_t uint32_msk
20
+ uint64_t uint64_msk
21
+
22
+ assert len(mask) == 4
23
+
24
+ data_len = len(data)
25
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
26
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
27
+ uint32_msk = (<uint32_t*>mask_buf)[0]
28
+
29
+ # TODO: align in_data ptr to achieve even faster speeds
30
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
31
+
32
+ if sizeof(size_t) >= 8:
33
+ uint64_msk = uint32_msk
34
+ uint64_msk = (uint64_msk << 32) | uint32_msk
35
+
36
+ while data_len >= 8:
37
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
38
+ in_buf += 8
39
+ data_len -= 8
40
+
41
+
42
+ while data_len >= 4:
43
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
44
+ in_buf += 4
45
+ data_len -= 4
46
+
47
+ for i in range(0, data_len):
48
+ in_buf[i] ^= mask_buf[i]
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/reader.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Reader for WebSocket protocol versions 13 and 8."""
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from ..helpers import NO_EXTENSIONS
6
+
7
+ if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover
8
+ from .reader_py import (
9
+ WebSocketDataQueue as WebSocketDataQueuePython,
10
+ WebSocketReader as WebSocketReaderPython,
11
+ )
12
+
13
+ WebSocketReader = WebSocketReaderPython
14
+ WebSocketDataQueue = WebSocketDataQueuePython
15
+ else:
16
+ try:
17
+ from .reader_c import ( # type: ignore[import-not-found]
18
+ WebSocketDataQueue as WebSocketDataQueueCython,
19
+ WebSocketReader as WebSocketReaderCython,
20
+ )
21
+
22
+ WebSocketReader = WebSocketReaderCython
23
+ WebSocketDataQueue = WebSocketDataQueueCython
24
+ except ImportError: # pragma: no cover
25
+ from .reader_py import (
26
+ WebSocketDataQueue as WebSocketDataQueuePython,
27
+ WebSocketReader as WebSocketReaderPython,
28
+ )
29
+
30
+ WebSocketReader = WebSocketReaderPython
31
+ WebSocketDataQueue = WebSocketDataQueuePython
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/reader_c.pxd ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cython
2
+
3
+ from .mask cimport _websocket_mask_cython as websocket_mask
4
+
5
+
6
+ cdef unsigned int READ_HEADER
7
+ cdef unsigned int READ_PAYLOAD_LENGTH
8
+ cdef unsigned int READ_PAYLOAD_MASK
9
+ cdef unsigned int READ_PAYLOAD
10
+
11
+ cdef unsigned int OP_CODE_CONTINUATION
12
+ cdef unsigned int OP_CODE_TEXT
13
+ cdef unsigned int OP_CODE_BINARY
14
+ cdef unsigned int OP_CODE_CLOSE
15
+ cdef unsigned int OP_CODE_PING
16
+ cdef unsigned int OP_CODE_PONG
17
+
18
+ cdef object UNPACK_LEN3
19
+ cdef object UNPACK_CLOSE_CODE
20
+ cdef object TUPLE_NEW
21
+
22
+ cdef object WSMsgType
23
+ cdef object WSMessage
24
+
25
+ cdef object WS_MSG_TYPE_TEXT
26
+ cdef object WS_MSG_TYPE_BINARY
27
+
28
+ cdef set ALLOWED_CLOSE_CODES
29
+ cdef set MESSAGE_TYPES_WITH_CONTENT
30
+
31
+ cdef tuple EMPTY_FRAME
32
+ cdef tuple EMPTY_FRAME_ERROR
33
+
34
+ cdef class WebSocketDataQueue:
35
+
36
+ cdef unsigned int _size
37
+ cdef public object _protocol
38
+ cdef unsigned int _limit
39
+ cdef object _loop
40
+ cdef bint _eof
41
+ cdef object _waiter
42
+ cdef object _exception
43
+ cdef public object _buffer
44
+ cdef object _get_buffer
45
+ cdef object _put_buffer
46
+
47
+ cdef void _release_waiter(self)
48
+
49
+ cpdef void feed_data(self, object data, unsigned int size)
50
+
51
+ @cython.locals(size="unsigned int")
52
+ cdef _read_from_buffer(self)
53
+
54
+ cdef class WebSocketReader:
55
+
56
+ cdef WebSocketDataQueue queue
57
+ cdef unsigned int _max_msg_size
58
+
59
+ cdef Exception _exc
60
+ cdef bytearray _partial
61
+ cdef unsigned int _state
62
+
63
+ cdef object _opcode
64
+ cdef object _frame_fin
65
+ cdef object _frame_opcode
66
+ cdef object _frame_payload
67
+ cdef unsigned long long _frame_payload_len
68
+
69
+ cdef bytes _tail
70
+ cdef bint _has_mask
71
+ cdef bytes _frame_mask
72
+ cdef unsigned long long _payload_length
73
+ cdef unsigned int _payload_length_flag
74
+ cdef object _compressed
75
+ cdef object _decompressobj
76
+ cdef bint _compress
77
+
78
+ cpdef tuple feed_data(self, object data)
79
+
80
+ @cython.locals(
81
+ is_continuation=bint,
82
+ fin=bint,
83
+ has_partial=bint,
84
+ payload_merged=bytes,
85
+ opcode="unsigned int",
86
+ )
87
+ cpdef void _feed_data(self, bytes data)
88
+
89
+ @cython.locals(
90
+ start_pos="unsigned int",
91
+ buf_len="unsigned int",
92
+ length="unsigned int",
93
+ chunk_size="unsigned int",
94
+ chunk_len="unsigned int",
95
+ buf_length="unsigned int",
96
+ first_byte="unsigned char",
97
+ second_byte="unsigned char",
98
+ end_pos="unsigned int",
99
+ has_mask=bint,
100
+ fin=bint,
101
+ )
102
+ cpdef list parse_frame(self, bytes buf)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/reader_py.py ADDED
@@ -0,0 +1,468 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Reader for WebSocket protocol versions 13 and 8."""
2
+
3
+ import asyncio
4
+ import builtins
5
+ from collections import deque
6
+ from typing import Deque, Final, List, Optional, Set, Tuple, Union
7
+
8
+ from ..base_protocol import BaseProtocol
9
+ from ..compression_utils import ZLibDecompressor
10
+ from ..helpers import _EXC_SENTINEL, set_exception
11
+ from ..streams import EofStream
12
+ from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask
13
+ from .models import (
14
+ WS_DEFLATE_TRAILING,
15
+ WebSocketError,
16
+ WSCloseCode,
17
+ WSMessage,
18
+ WSMsgType,
19
+ )
20
+
21
+ ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
22
+
23
+ # States for the reader, used to parse the WebSocket frame
24
+ # integer values are used so they can be cythonized
25
+ READ_HEADER = 1
26
+ READ_PAYLOAD_LENGTH = 2
27
+ READ_PAYLOAD_MASK = 3
28
+ READ_PAYLOAD = 4
29
+
30
+ WS_MSG_TYPE_BINARY = WSMsgType.BINARY
31
+ WS_MSG_TYPE_TEXT = WSMsgType.TEXT
32
+
33
+ # WSMsgType values unpacked so they can by cythonized to ints
34
+ OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value
35
+ OP_CODE_TEXT = WSMsgType.TEXT.value
36
+ OP_CODE_BINARY = WSMsgType.BINARY.value
37
+ OP_CODE_CLOSE = WSMsgType.CLOSE.value
38
+ OP_CODE_PING = WSMsgType.PING.value
39
+ OP_CODE_PONG = WSMsgType.PONG.value
40
+
41
+ EMPTY_FRAME_ERROR = (True, b"")
42
+ EMPTY_FRAME = (False, b"")
43
+
44
+ TUPLE_NEW = tuple.__new__
45
+
46
+ int_ = int # Prevent Cython from converting to PyInt
47
+
48
+
49
+ class WebSocketDataQueue:
50
+ """WebSocketDataQueue resumes and pauses an underlying stream.
51
+
52
+ It is a destination for WebSocket data.
53
+ """
54
+
55
+ def __init__(
56
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
57
+ ) -> None:
58
+ self._size = 0
59
+ self._protocol = protocol
60
+ self._limit = limit * 2
61
+ self._loop = loop
62
+ self._eof = False
63
+ self._waiter: Optional[asyncio.Future[None]] = None
64
+ self._exception: Union[BaseException, None] = None
65
+ self._buffer: Deque[Tuple[WSMessage, int]] = deque()
66
+ self._get_buffer = self._buffer.popleft
67
+ self._put_buffer = self._buffer.append
68
+
69
+ def is_eof(self) -> bool:
70
+ return self._eof
71
+
72
+ def exception(self) -> Optional[BaseException]:
73
+ return self._exception
74
+
75
+ def set_exception(
76
+ self,
77
+ exc: "BaseException",
78
+ exc_cause: builtins.BaseException = _EXC_SENTINEL,
79
+ ) -> None:
80
+ self._eof = True
81
+ self._exception = exc
82
+ if (waiter := self._waiter) is not None:
83
+ self._waiter = None
84
+ set_exception(waiter, exc, exc_cause)
85
+
86
+ def _release_waiter(self) -> None:
87
+ if (waiter := self._waiter) is None:
88
+ return
89
+ self._waiter = None
90
+ if not waiter.done():
91
+ waiter.set_result(None)
92
+
93
+ def feed_eof(self) -> None:
94
+ self._eof = True
95
+ self._release_waiter()
96
+
97
+ def feed_data(self, data: "WSMessage", size: "int_") -> None:
98
+ self._size += size
99
+ self._put_buffer((data, size))
100
+ self._release_waiter()
101
+ if self._size > self._limit and not self._protocol._reading_paused:
102
+ self._protocol.pause_reading()
103
+
104
+ async def read(self) -> WSMessage:
105
+ if not self._buffer and not self._eof:
106
+ assert not self._waiter
107
+ self._waiter = self._loop.create_future()
108
+ try:
109
+ await self._waiter
110
+ except (asyncio.CancelledError, asyncio.TimeoutError):
111
+ self._waiter = None
112
+ raise
113
+ return self._read_from_buffer()
114
+
115
+ def _read_from_buffer(self) -> WSMessage:
116
+ if self._buffer:
117
+ data, size = self._get_buffer()
118
+ self._size -= size
119
+ if self._size < self._limit and self._protocol._reading_paused:
120
+ self._protocol.resume_reading()
121
+ return data
122
+ if self._exception is not None:
123
+ raise self._exception
124
+ raise EofStream
125
+
126
+
127
+ class WebSocketReader:
128
+ def __init__(
129
+ self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True
130
+ ) -> None:
131
+ self.queue = queue
132
+ self._max_msg_size = max_msg_size
133
+
134
+ self._exc: Optional[Exception] = None
135
+ self._partial = bytearray()
136
+ self._state = READ_HEADER
137
+
138
+ self._opcode: Optional[int] = None
139
+ self._frame_fin = False
140
+ self._frame_opcode: Optional[int] = None
141
+ self._frame_payload: Union[bytes, bytearray] = b""
142
+ self._frame_payload_len = 0
143
+
144
+ self._tail: bytes = b""
145
+ self._has_mask = False
146
+ self._frame_mask: Optional[bytes] = None
147
+ self._payload_length = 0
148
+ self._payload_length_flag = 0
149
+ self._compressed: Optional[bool] = None
150
+ self._decompressobj: Optional[ZLibDecompressor] = None
151
+ self._compress = compress
152
+
153
+ def feed_eof(self) -> None:
154
+ self.queue.feed_eof()
155
+
156
+ # data can be bytearray on Windows because proactor event loop uses bytearray
157
+ # and asyncio types this to Union[bytes, bytearray, memoryview] so we need
158
+ # coerce data to bytes if it is not
159
+ def feed_data(
160
+ self, data: Union[bytes, bytearray, memoryview]
161
+ ) -> Tuple[bool, bytes]:
162
+ if type(data) is not bytes:
163
+ data = bytes(data)
164
+
165
+ if self._exc is not None:
166
+ return True, data
167
+
168
+ try:
169
+ self._feed_data(data)
170
+ except Exception as exc:
171
+ self._exc = exc
172
+ set_exception(self.queue, exc)
173
+ return EMPTY_FRAME_ERROR
174
+
175
+ return EMPTY_FRAME
176
+
177
+ def _feed_data(self, data: bytes) -> None:
178
+ msg: WSMessage
179
+ for frame in self.parse_frame(data):
180
+ fin = frame[0]
181
+ opcode = frame[1]
182
+ payload = frame[2]
183
+ compressed = frame[3]
184
+
185
+ is_continuation = opcode == OP_CODE_CONTINUATION
186
+ if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation:
187
+ # load text/binary
188
+ if not fin:
189
+ # got partial frame payload
190
+ if not is_continuation:
191
+ self._opcode = opcode
192
+ self._partial += payload
193
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
194
+ raise WebSocketError(
195
+ WSCloseCode.MESSAGE_TOO_BIG,
196
+ "Message size {} exceeds limit {}".format(
197
+ len(self._partial), self._max_msg_size
198
+ ),
199
+ )
200
+ continue
201
+
202
+ has_partial = bool(self._partial)
203
+ if is_continuation:
204
+ if self._opcode is None:
205
+ raise WebSocketError(
206
+ WSCloseCode.PROTOCOL_ERROR,
207
+ "Continuation frame for non started message",
208
+ )
209
+ opcode = self._opcode
210
+ self._opcode = None
211
+ # previous frame was non finished
212
+ # we should get continuation opcode
213
+ elif has_partial:
214
+ raise WebSocketError(
215
+ WSCloseCode.PROTOCOL_ERROR,
216
+ "The opcode in non-fin frame is expected "
217
+ "to be zero, got {!r}".format(opcode),
218
+ )
219
+
220
+ assembled_payload: Union[bytes, bytearray]
221
+ if has_partial:
222
+ assembled_payload = self._partial + payload
223
+ self._partial.clear()
224
+ else:
225
+ assembled_payload = payload
226
+
227
+ if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
228
+ raise WebSocketError(
229
+ WSCloseCode.MESSAGE_TOO_BIG,
230
+ "Message size {} exceeds limit {}".format(
231
+ len(assembled_payload), self._max_msg_size
232
+ ),
233
+ )
234
+
235
+ # Decompress process must to be done after all packets
236
+ # received.
237
+ if compressed:
238
+ if not self._decompressobj:
239
+ self._decompressobj = ZLibDecompressor(
240
+ suppress_deflate_header=True
241
+ )
242
+ payload_merged = self._decompressobj.decompress_sync(
243
+ assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size
244
+ )
245
+ if self._decompressobj.unconsumed_tail:
246
+ left = len(self._decompressobj.unconsumed_tail)
247
+ raise WebSocketError(
248
+ WSCloseCode.MESSAGE_TOO_BIG,
249
+ "Decompressed message size {} exceeds limit {}".format(
250
+ self._max_msg_size + left, self._max_msg_size
251
+ ),
252
+ )
253
+ elif type(assembled_payload) is bytes:
254
+ payload_merged = assembled_payload
255
+ else:
256
+ payload_merged = bytes(assembled_payload)
257
+
258
+ if opcode == OP_CODE_TEXT:
259
+ try:
260
+ text = payload_merged.decode("utf-8")
261
+ except UnicodeDecodeError as exc:
262
+ raise WebSocketError(
263
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
264
+ ) from exc
265
+
266
+ # XXX: The Text and Binary messages here can be a performance
267
+ # bottleneck, so we use tuple.__new__ to improve performance.
268
+ # This is not type safe, but many tests should fail in
269
+ # test_client_ws_functional.py if this is wrong.
270
+ self.queue.feed_data(
271
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),
272
+ len(payload_merged),
273
+ )
274
+ else:
275
+ self.queue.feed_data(
276
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),
277
+ len(payload_merged),
278
+ )
279
+ elif opcode == OP_CODE_CLOSE:
280
+ if len(payload) >= 2:
281
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
282
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
283
+ raise WebSocketError(
284
+ WSCloseCode.PROTOCOL_ERROR,
285
+ f"Invalid close code: {close_code}",
286
+ )
287
+ try:
288
+ close_message = payload[2:].decode("utf-8")
289
+ except UnicodeDecodeError as exc:
290
+ raise WebSocketError(
291
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
292
+ ) from exc
293
+ msg = TUPLE_NEW(
294
+ WSMessage, (WSMsgType.CLOSE, close_code, close_message)
295
+ )
296
+ elif payload:
297
+ raise WebSocketError(
298
+ WSCloseCode.PROTOCOL_ERROR,
299
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
300
+ )
301
+ else:
302
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))
303
+
304
+ self.queue.feed_data(msg, 0)
305
+ elif opcode == OP_CODE_PING:
306
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))
307
+ self.queue.feed_data(msg, len(payload))
308
+
309
+ elif opcode == OP_CODE_PONG:
310
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))
311
+ self.queue.feed_data(msg, len(payload))
312
+
313
+ else:
314
+ raise WebSocketError(
315
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
316
+ )
317
+
318
+ def parse_frame(
319
+ self, buf: bytes
320
+ ) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]:
321
+ """Return the next frame from the socket."""
322
+ frames: List[
323
+ Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]
324
+ ] = []
325
+ if self._tail:
326
+ buf, self._tail = self._tail + buf, b""
327
+
328
+ start_pos: int = 0
329
+ buf_length = len(buf)
330
+
331
+ while True:
332
+ # read header
333
+ if self._state == READ_HEADER:
334
+ if buf_length - start_pos < 2:
335
+ break
336
+ first_byte = buf[start_pos]
337
+ second_byte = buf[start_pos + 1]
338
+ start_pos += 2
339
+
340
+ fin = (first_byte >> 7) & 1
341
+ rsv1 = (first_byte >> 6) & 1
342
+ rsv2 = (first_byte >> 5) & 1
343
+ rsv3 = (first_byte >> 4) & 1
344
+ opcode = first_byte & 0xF
345
+
346
+ # frame-fin = %x0 ; more frames of this message follow
347
+ # / %x1 ; final frame of this message
348
+ # frame-rsv1 = %x0 ;
349
+ # 1 bit, MUST be 0 unless negotiated otherwise
350
+ # frame-rsv2 = %x0 ;
351
+ # 1 bit, MUST be 0 unless negotiated otherwise
352
+ # frame-rsv3 = %x0 ;
353
+ # 1 bit, MUST be 0 unless negotiated otherwise
354
+ #
355
+ # Remove rsv1 from this test for deflate development
356
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
357
+ raise WebSocketError(
358
+ WSCloseCode.PROTOCOL_ERROR,
359
+ "Received frame with non-zero reserved bits",
360
+ )
361
+
362
+ if opcode > 0x7 and fin == 0:
363
+ raise WebSocketError(
364
+ WSCloseCode.PROTOCOL_ERROR,
365
+ "Received fragmented control frame",
366
+ )
367
+
368
+ has_mask = (second_byte >> 7) & 1
369
+ length = second_byte & 0x7F
370
+
371
+ # Control frames MUST have a payload
372
+ # length of 125 bytes or less
373
+ if opcode > 0x7 and length > 125:
374
+ raise WebSocketError(
375
+ WSCloseCode.PROTOCOL_ERROR,
376
+ "Control frame payload cannot be larger than 125 bytes",
377
+ )
378
+
379
+ # Set compress status if last package is FIN
380
+ # OR set compress status if this is first fragment
381
+ # Raise error if not first fragment with rsv1 = 0x1
382
+ if self._frame_fin or self._compressed is None:
383
+ self._compressed = True if rsv1 else False
384
+ elif rsv1:
385
+ raise WebSocketError(
386
+ WSCloseCode.PROTOCOL_ERROR,
387
+ "Received frame with non-zero reserved bits",
388
+ )
389
+
390
+ self._frame_fin = bool(fin)
391
+ self._frame_opcode = opcode
392
+ self._has_mask = bool(has_mask)
393
+ self._payload_length_flag = length
394
+ self._state = READ_PAYLOAD_LENGTH
395
+
396
+ # read payload length
397
+ if self._state == READ_PAYLOAD_LENGTH:
398
+ length_flag = self._payload_length_flag
399
+ if length_flag == 126:
400
+ if buf_length - start_pos < 2:
401
+ break
402
+ first_byte = buf[start_pos]
403
+ second_byte = buf[start_pos + 1]
404
+ start_pos += 2
405
+ self._payload_length = first_byte << 8 | second_byte
406
+ elif length_flag > 126:
407
+ if buf_length - start_pos < 8:
408
+ break
409
+ data = buf[start_pos : start_pos + 8]
410
+ start_pos += 8
411
+ self._payload_length = UNPACK_LEN3(data)[0]
412
+ else:
413
+ self._payload_length = length_flag
414
+
415
+ self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD
416
+
417
+ # read payload mask
418
+ if self._state == READ_PAYLOAD_MASK:
419
+ if buf_length - start_pos < 4:
420
+ break
421
+ self._frame_mask = buf[start_pos : start_pos + 4]
422
+ start_pos += 4
423
+ self._state = READ_PAYLOAD
424
+
425
+ if self._state == READ_PAYLOAD:
426
+ chunk_len = buf_length - start_pos
427
+ if self._payload_length >= chunk_len:
428
+ end_pos = buf_length
429
+ self._payload_length -= chunk_len
430
+ else:
431
+ end_pos = start_pos + self._payload_length
432
+ self._payload_length = 0
433
+
434
+ if self._frame_payload_len:
435
+ if type(self._frame_payload) is not bytearray:
436
+ self._frame_payload = bytearray(self._frame_payload)
437
+ self._frame_payload += buf[start_pos:end_pos]
438
+ else:
439
+ # Fast path for the first frame
440
+ self._frame_payload = buf[start_pos:end_pos]
441
+
442
+ self._frame_payload_len += end_pos - start_pos
443
+ start_pos = end_pos
444
+
445
+ if self._payload_length != 0:
446
+ break
447
+
448
+ if self._has_mask:
449
+ assert self._frame_mask is not None
450
+ if type(self._frame_payload) is not bytearray:
451
+ self._frame_payload = bytearray(self._frame_payload)
452
+ websocket_mask(self._frame_mask, self._frame_payload)
453
+
454
+ frames.append(
455
+ (
456
+ self._frame_fin,
457
+ self._frame_opcode,
458
+ self._frame_payload,
459
+ self._compressed,
460
+ )
461
+ )
462
+ self._frame_payload = b""
463
+ self._frame_payload_len = 0
464
+ self._state = READ_HEADER
465
+
466
+ self._tail = buf[start_pos:] if start_pos < buf_length else b""
467
+
468
+ return frames
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/_websocket/writer.py ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket protocol versions 13 and 8."""
2
+
3
+ import asyncio
4
+ import random
5
+ import zlib
6
+ from functools import partial
7
+ from typing import Any, Final, Optional, Union
8
+
9
+ from ..base_protocol import BaseProtocol
10
+ from ..client_exceptions import ClientConnectionResetError
11
+ from ..compression_utils import ZLibCompressor
12
+ from .helpers import (
13
+ MASK_LEN,
14
+ MSG_SIZE,
15
+ PACK_CLOSE_CODE,
16
+ PACK_LEN1,
17
+ PACK_LEN2,
18
+ PACK_LEN3,
19
+ PACK_RANDBITS,
20
+ websocket_mask,
21
+ )
22
+ from .models import WS_DEFLATE_TRAILING, WSMsgType
23
+
24
+ DEFAULT_LIMIT: Final[int] = 2**16
25
+
26
+ # For websockets, keeping latency low is extremely important as implementations
27
+ # generally expect to be able to send and receive messages quickly. We use a
28
+ # larger chunk size than the default to reduce the number of executor calls
29
+ # since the executor is a significant source of latency and overhead when
30
+ # the chunks are small. A size of 5KiB was chosen because it is also the
31
+ # same value python-zlib-ng choose to use as the threshold to release the GIL.
32
+
33
+ WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
34
+
35
+
36
+ class WebSocketWriter:
37
+ """WebSocket writer.
38
+
39
+ The writer is responsible for sending messages to the client. It is
40
+ created by the protocol when a connection is established. The writer
41
+ should avoid implementing any application logic and should only be
42
+ concerned with the low-level details of the WebSocket protocol.
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ protocol: BaseProtocol,
48
+ transport: asyncio.Transport,
49
+ *,
50
+ use_mask: bool = False,
51
+ limit: int = DEFAULT_LIMIT,
52
+ random: random.Random = random.Random(),
53
+ compress: int = 0,
54
+ notakeover: bool = False,
55
+ ) -> None:
56
+ """Initialize a WebSocket writer."""
57
+ self.protocol = protocol
58
+ self.transport = transport
59
+ self.use_mask = use_mask
60
+ self.get_random_bits = partial(random.getrandbits, 32)
61
+ self.compress = compress
62
+ self.notakeover = notakeover
63
+ self._closing = False
64
+ self._limit = limit
65
+ self._output_size = 0
66
+ self._compressobj: Any = None # actually compressobj
67
+
68
+ async def send_frame(
69
+ self, message: bytes, opcode: int, compress: Optional[int] = None
70
+ ) -> None:
71
+ """Send a frame over the websocket with message as its payload."""
72
+ if self._closing and not (opcode & WSMsgType.CLOSE):
73
+ raise ClientConnectionResetError("Cannot write to closing transport")
74
+
75
+ # RSV are the reserved bits in the frame header. They are used to
76
+ # indicate that the frame is using an extension.
77
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
78
+ rsv = 0
79
+ # Only compress larger packets (disabled)
80
+ # Does small packet needs to be compressed?
81
+ # if self.compress and opcode < 8 and len(message) > 124:
82
+ if (compress or self.compress) and opcode < 8:
83
+ # RSV1 (rsv = 0x40) is set for compressed frames
84
+ # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1
85
+ rsv = 0x40
86
+
87
+ if compress:
88
+ # Do not set self._compress if compressing is for this frame
89
+ compressobj = self._make_compress_obj(compress)
90
+ else: # self.compress
91
+ if not self._compressobj:
92
+ self._compressobj = self._make_compress_obj(self.compress)
93
+ compressobj = self._compressobj
94
+
95
+ message = (
96
+ await compressobj.compress(message)
97
+ + compressobj.flush(
98
+ zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
99
+ )
100
+ ).removesuffix(WS_DEFLATE_TRAILING)
101
+ # Its critical that we do not return control to the event
102
+ # loop until we have finished sending all the compressed
103
+ # data. Otherwise we could end up mixing compressed frames
104
+ # if there are multiple coroutines compressing data.
105
+
106
+ msg_length = len(message)
107
+
108
+ use_mask = self.use_mask
109
+ mask_bit = 0x80 if use_mask else 0
110
+
111
+ # Depending on the message length, the header is assembled differently.
112
+ # The first byte is reserved for the opcode and the RSV bits.
113
+ first_byte = 0x80 | rsv | opcode
114
+ if msg_length < 126:
115
+ header = PACK_LEN1(first_byte, msg_length | mask_bit)
116
+ header_len = 2
117
+ elif msg_length < 65536:
118
+ header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length)
119
+ header_len = 4
120
+ else:
121
+ header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length)
122
+ header_len = 10
123
+
124
+ if self.transport.is_closing():
125
+ raise ClientConnectionResetError("Cannot write to closing transport")
126
+
127
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3
128
+ # If we are using a mask, we need to generate it randomly
129
+ # and apply it to the message before sending it. A mask is
130
+ # a 32-bit value that is applied to the message using a
131
+ # bitwise XOR operation. It is used to prevent certain types
132
+ # of attacks on the websocket protocol. The mask is only used
133
+ # when aiohttp is acting as a client. Servers do not use a mask.
134
+ if use_mask:
135
+ mask = PACK_RANDBITS(self.get_random_bits())
136
+ message = bytearray(message)
137
+ websocket_mask(mask, message)
138
+ self.transport.write(header + mask + message)
139
+ self._output_size += MASK_LEN
140
+ elif msg_length > MSG_SIZE:
141
+ self.transport.write(header)
142
+ self.transport.write(message)
143
+ else:
144
+ self.transport.write(header + message)
145
+
146
+ self._output_size += header_len + msg_length
147
+
148
+ # It is safe to return control to the event loop when using compression
149
+ # after this point as we have already sent or buffered all the data.
150
+
151
+ # Once we have written output_size up to the limit, we call the
152
+ # drain helper which waits for the transport to be ready to accept
153
+ # more data. This is a flow control mechanism to prevent the buffer
154
+ # from growing too large. The drain helper will return right away
155
+ # if the writer is not paused.
156
+ if self._output_size > self._limit:
157
+ self._output_size = 0
158
+ if self.protocol._paused:
159
+ await self.protocol._drain_helper()
160
+
161
+ def _make_compress_obj(self, compress: int) -> ZLibCompressor:
162
+ return ZLibCompressor(
163
+ level=zlib.Z_BEST_SPEED,
164
+ wbits=-compress,
165
+ max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
166
+ )
167
+
168
+ async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
169
+ """Close the websocket, sending the specified code and message."""
170
+ if isinstance(message, str):
171
+ message = message.encode("utf-8")
172
+ try:
173
+ await self.send_frame(
174
+ PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
175
+ )
176
+ finally:
177
+ self._closing = True
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client_exceptions.py ADDED
@@ -0,0 +1,421 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP related errors."""
2
+
3
+ import asyncio
4
+ import warnings
5
+ from typing import TYPE_CHECKING, Optional, Tuple, Union
6
+
7
+ from multidict import MultiMapping
8
+
9
+ from .typedefs import StrOrURL
10
+
11
+ if TYPE_CHECKING:
12
+ import ssl
13
+
14
+ SSLContext = ssl.SSLContext
15
+ else:
16
+ try:
17
+ import ssl
18
+
19
+ SSLContext = ssl.SSLContext
20
+ except ImportError: # pragma: no cover
21
+ ssl = SSLContext = None # type: ignore[assignment]
22
+
23
+ if TYPE_CHECKING:
24
+ from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
25
+ from .http_parser import RawResponseMessage
26
+ else:
27
+ RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None
28
+
29
+ __all__ = (
30
+ "ClientError",
31
+ "ClientConnectionError",
32
+ "ClientConnectionResetError",
33
+ "ClientOSError",
34
+ "ClientConnectorError",
35
+ "ClientProxyConnectionError",
36
+ "ClientSSLError",
37
+ "ClientConnectorDNSError",
38
+ "ClientConnectorSSLError",
39
+ "ClientConnectorCertificateError",
40
+ "ConnectionTimeoutError",
41
+ "SocketTimeoutError",
42
+ "ServerConnectionError",
43
+ "ServerTimeoutError",
44
+ "ServerDisconnectedError",
45
+ "ServerFingerprintMismatch",
46
+ "ClientResponseError",
47
+ "ClientHttpProxyError",
48
+ "WSServerHandshakeError",
49
+ "ContentTypeError",
50
+ "ClientPayloadError",
51
+ "InvalidURL",
52
+ "InvalidUrlClientError",
53
+ "RedirectClientError",
54
+ "NonHttpUrlClientError",
55
+ "InvalidUrlRedirectClientError",
56
+ "NonHttpUrlRedirectClientError",
57
+ "WSMessageTypeError",
58
+ )
59
+
60
+
61
+ class ClientError(Exception):
62
+ """Base class for client connection errors."""
63
+
64
+
65
+ class ClientResponseError(ClientError):
66
+ """Base class for exceptions that occur after getting a response.
67
+
68
+ request_info: An instance of RequestInfo.
69
+ history: A sequence of responses, if redirects occurred.
70
+ status: HTTP status code.
71
+ message: Error message.
72
+ headers: Response headers.
73
+ """
74
+
75
+ def __init__(
76
+ self,
77
+ request_info: RequestInfo,
78
+ history: Tuple[ClientResponse, ...],
79
+ *,
80
+ code: Optional[int] = None,
81
+ status: Optional[int] = None,
82
+ message: str = "",
83
+ headers: Optional[MultiMapping[str]] = None,
84
+ ) -> None:
85
+ self.request_info = request_info
86
+ if code is not None:
87
+ if status is not None:
88
+ raise ValueError(
89
+ "Both code and status arguments are provided; "
90
+ "code is deprecated, use status instead"
91
+ )
92
+ warnings.warn(
93
+ "code argument is deprecated, use status instead",
94
+ DeprecationWarning,
95
+ stacklevel=2,
96
+ )
97
+ if status is not None:
98
+ self.status = status
99
+ elif code is not None:
100
+ self.status = code
101
+ else:
102
+ self.status = 0
103
+ self.message = message
104
+ self.headers = headers
105
+ self.history = history
106
+ self.args = (request_info, history)
107
+
108
+ def __str__(self) -> str:
109
+ return "{}, message={!r}, url={!r}".format(
110
+ self.status,
111
+ self.message,
112
+ str(self.request_info.real_url),
113
+ )
114
+
115
+ def __repr__(self) -> str:
116
+ args = f"{self.request_info!r}, {self.history!r}"
117
+ if self.status != 0:
118
+ args += f", status={self.status!r}"
119
+ if self.message != "":
120
+ args += f", message={self.message!r}"
121
+ if self.headers is not None:
122
+ args += f", headers={self.headers!r}"
123
+ return f"{type(self).__name__}({args})"
124
+
125
+ @property
126
+ def code(self) -> int:
127
+ warnings.warn(
128
+ "code property is deprecated, use status instead",
129
+ DeprecationWarning,
130
+ stacklevel=2,
131
+ )
132
+ return self.status
133
+
134
+ @code.setter
135
+ def code(self, value: int) -> None:
136
+ warnings.warn(
137
+ "code property is deprecated, use status instead",
138
+ DeprecationWarning,
139
+ stacklevel=2,
140
+ )
141
+ self.status = value
142
+
143
+
144
+ class ContentTypeError(ClientResponseError):
145
+ """ContentType found is not valid."""
146
+
147
+
148
+ class WSServerHandshakeError(ClientResponseError):
149
+ """websocket server handshake error."""
150
+
151
+
152
+ class ClientHttpProxyError(ClientResponseError):
153
+ """HTTP proxy error.
154
+
155
+ Raised in :class:`aiohttp.connector.TCPConnector` if
156
+ proxy responds with status other than ``200 OK``
157
+ on ``CONNECT`` request.
158
+ """
159
+
160
+
161
+ class TooManyRedirects(ClientResponseError):
162
+ """Client was redirected too many times."""
163
+
164
+
165
+ class ClientConnectionError(ClientError):
166
+ """Base class for client socket errors."""
167
+
168
+
169
+ class ClientConnectionResetError(ClientConnectionError, ConnectionResetError):
170
+ """ConnectionResetError"""
171
+
172
+
173
+ class ClientOSError(ClientConnectionError, OSError):
174
+ """OSError error."""
175
+
176
+
177
+ class ClientConnectorError(ClientOSError):
178
+ """Client connector error.
179
+
180
+ Raised in :class:`aiohttp.connector.TCPConnector` if
181
+ a connection can not be established.
182
+ """
183
+
184
+ def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
185
+ self._conn_key = connection_key
186
+ self._os_error = os_error
187
+ super().__init__(os_error.errno, os_error.strerror)
188
+ self.args = (connection_key, os_error)
189
+
190
+ @property
191
+ def os_error(self) -> OSError:
192
+ return self._os_error
193
+
194
+ @property
195
+ def host(self) -> str:
196
+ return self._conn_key.host
197
+
198
+ @property
199
+ def port(self) -> Optional[int]:
200
+ return self._conn_key.port
201
+
202
+ @property
203
+ def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
204
+ return self._conn_key.ssl
205
+
206
+ def __str__(self) -> str:
207
+ return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
208
+ self, "default" if self.ssl is True else self.ssl, self.strerror
209
+ )
210
+
211
+ # OSError.__reduce__ does too much black magick
212
+ __reduce__ = BaseException.__reduce__
213
+
214
+
215
+ class ClientConnectorDNSError(ClientConnectorError):
216
+ """DNS resolution failed during client connection.
217
+
218
+ Raised in :class:`aiohttp.connector.TCPConnector` if
219
+ DNS resolution fails.
220
+ """
221
+
222
+
223
+ class ClientProxyConnectionError(ClientConnectorError):
224
+ """Proxy connection error.
225
+
226
+ Raised in :class:`aiohttp.connector.TCPConnector` if
227
+ connection to proxy can not be established.
228
+ """
229
+
230
+
231
+ class UnixClientConnectorError(ClientConnectorError):
232
+ """Unix connector error.
233
+
234
+ Raised in :py:class:`aiohttp.connector.UnixConnector`
235
+ if connection to unix socket can not be established.
236
+ """
237
+
238
+ def __init__(
239
+ self, path: str, connection_key: ConnectionKey, os_error: OSError
240
+ ) -> None:
241
+ self._path = path
242
+ super().__init__(connection_key, os_error)
243
+
244
+ @property
245
+ def path(self) -> str:
246
+ return self._path
247
+
248
+ def __str__(self) -> str:
249
+ return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
250
+ self, "default" if self.ssl is True else self.ssl, self.strerror
251
+ )
252
+
253
+
254
+ class ServerConnectionError(ClientConnectionError):
255
+ """Server connection errors."""
256
+
257
+
258
+ class ServerDisconnectedError(ServerConnectionError):
259
+ """Server disconnected."""
260
+
261
+ def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
262
+ if message is None:
263
+ message = "Server disconnected"
264
+
265
+ self.args = (message,)
266
+ self.message = message
267
+
268
+
269
+ class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
270
+ """Server timeout error."""
271
+
272
+
273
+ class ConnectionTimeoutError(ServerTimeoutError):
274
+ """Connection timeout error."""
275
+
276
+
277
+ class SocketTimeoutError(ServerTimeoutError):
278
+ """Socket timeout error."""
279
+
280
+
281
+ class ServerFingerprintMismatch(ServerConnectionError):
282
+ """SSL certificate does not match expected fingerprint."""
283
+
284
+ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
285
+ self.expected = expected
286
+ self.got = got
287
+ self.host = host
288
+ self.port = port
289
+ self.args = (expected, got, host, port)
290
+
291
+ def __repr__(self) -> str:
292
+ return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
293
+ self.__class__.__name__, self.expected, self.got, self.host, self.port
294
+ )
295
+
296
+
297
+ class ClientPayloadError(ClientError):
298
+ """Response payload error."""
299
+
300
+
301
+ class InvalidURL(ClientError, ValueError):
302
+ """Invalid URL.
303
+
304
+ URL used for fetching is malformed, e.g. it doesn't contains host
305
+ part.
306
+ """
307
+
308
+ # Derive from ValueError for backward compatibility
309
+
310
+ def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None:
311
+ # The type of url is not yarl.URL because the exception can be raised
312
+ # on URL(url) call
313
+ self._url = url
314
+ self._description = description
315
+
316
+ if description:
317
+ super().__init__(url, description)
318
+ else:
319
+ super().__init__(url)
320
+
321
+ @property
322
+ def url(self) -> StrOrURL:
323
+ return self._url
324
+
325
+ @property
326
+ def description(self) -> "str | None":
327
+ return self._description
328
+
329
+ def __repr__(self) -> str:
330
+ return f"<{self.__class__.__name__} {self}>"
331
+
332
+ def __str__(self) -> str:
333
+ if self._description:
334
+ return f"{self._url} - {self._description}"
335
+ return str(self._url)
336
+
337
+
338
+ class InvalidUrlClientError(InvalidURL):
339
+ """Invalid URL client error."""
340
+
341
+
342
+ class RedirectClientError(ClientError):
343
+ """Client redirect error."""
344
+
345
+
346
+ class NonHttpUrlClientError(ClientError):
347
+ """Non http URL client error."""
348
+
349
+
350
+ class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError):
351
+ """Invalid URL redirect client error."""
352
+
353
+
354
+ class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError):
355
+ """Non http URL redirect client error."""
356
+
357
+
358
+ class ClientSSLError(ClientConnectorError):
359
+ """Base error for ssl.*Errors."""
360
+
361
+
362
+ if ssl is not None:
363
+ cert_errors = (ssl.CertificateError,)
364
+ cert_errors_bases = (
365
+ ClientSSLError,
366
+ ssl.CertificateError,
367
+ )
368
+
369
+ ssl_errors = (ssl.SSLError,)
370
+ ssl_error_bases = (ClientSSLError, ssl.SSLError)
371
+ else: # pragma: no cover
372
+ cert_errors = tuple()
373
+ cert_errors_bases = (
374
+ ClientSSLError,
375
+ ValueError,
376
+ )
377
+
378
+ ssl_errors = tuple()
379
+ ssl_error_bases = (ClientSSLError,)
380
+
381
+
382
+ class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
383
+ """Response ssl error."""
384
+
385
+
386
+ class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
387
+ """Response certificate error."""
388
+
389
+ def __init__(
390
+ self, connection_key: ConnectionKey, certificate_error: Exception
391
+ ) -> None:
392
+ self._conn_key = connection_key
393
+ self._certificate_error = certificate_error
394
+ self.args = (connection_key, certificate_error)
395
+
396
+ @property
397
+ def certificate_error(self) -> Exception:
398
+ return self._certificate_error
399
+
400
+ @property
401
+ def host(self) -> str:
402
+ return self._conn_key.host
403
+
404
+ @property
405
+ def port(self) -> Optional[int]:
406
+ return self._conn_key.port
407
+
408
+ @property
409
+ def ssl(self) -> bool:
410
+ return self._conn_key.is_ssl
411
+
412
+ def __str__(self) -> str:
413
+ return (
414
+ "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
415
+ "[{0.certificate_error.__class__.__name__}: "
416
+ "{0.certificate_error.args}]".format(self)
417
+ )
418
+
419
+
420
+ class WSMessageTypeError(TypeError):
421
+ """WebSocket message type is not valid."""
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client_reqrep.py ADDED
@@ -0,0 +1,1315 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import codecs
3
+ import contextlib
4
+ import functools
5
+ import io
6
+ import re
7
+ import sys
8
+ import traceback
9
+ import warnings
10
+ from hashlib import md5, sha1, sha256
11
+ from http.cookies import CookieError, Morsel, SimpleCookie
12
+ from types import MappingProxyType, TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Callable,
17
+ Dict,
18
+ Iterable,
19
+ List,
20
+ Mapping,
21
+ NamedTuple,
22
+ Optional,
23
+ Tuple,
24
+ Type,
25
+ Union,
26
+ )
27
+
28
+ import attr
29
+ from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
30
+ from yarl import URL
31
+
32
+ from . import hdrs, helpers, http, multipart, payload
33
+ from .abc import AbstractStreamWriter
34
+ from .client_exceptions import (
35
+ ClientConnectionError,
36
+ ClientOSError,
37
+ ClientResponseError,
38
+ ContentTypeError,
39
+ InvalidURL,
40
+ ServerFingerprintMismatch,
41
+ )
42
+ from .compression_utils import HAS_BROTLI
43
+ from .formdata import FormData
44
+ from .helpers import (
45
+ _SENTINEL,
46
+ BaseTimerContext,
47
+ BasicAuth,
48
+ HeadersMixin,
49
+ TimerNoop,
50
+ basicauth_from_netrc,
51
+ netrc_from_env,
52
+ noop,
53
+ reify,
54
+ set_exception,
55
+ set_result,
56
+ )
57
+ from .http import (
58
+ SERVER_SOFTWARE,
59
+ HttpVersion,
60
+ HttpVersion10,
61
+ HttpVersion11,
62
+ StreamWriter,
63
+ )
64
+ from .log import client_logger
65
+ from .streams import StreamReader
66
+ from .typedefs import (
67
+ DEFAULT_JSON_DECODER,
68
+ JSONDecoder,
69
+ LooseCookies,
70
+ LooseHeaders,
71
+ Query,
72
+ RawHeaders,
73
+ )
74
+
75
+ if TYPE_CHECKING:
76
+ import ssl
77
+ from ssl import SSLContext
78
+ else:
79
+ try:
80
+ import ssl
81
+ from ssl import SSLContext
82
+ except ImportError: # pragma: no cover
83
+ ssl = None # type: ignore[assignment]
84
+ SSLContext = object # type: ignore[misc,assignment]
85
+
86
+
87
+ __all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
88
+
89
+
90
+ if TYPE_CHECKING:
91
+ from .client import ClientSession
92
+ from .connector import Connection
93
+ from .tracing import Trace
94
+
95
+
96
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
97
+ json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
98
+
99
+
100
+ def _gen_default_accept_encoding() -> str:
101
+ return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
102
+
103
+
104
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
105
+ class ContentDisposition:
106
+ type: Optional[str]
107
+ parameters: "MappingProxyType[str, str]"
108
+ filename: Optional[str]
109
+
110
+
111
+ class _RequestInfo(NamedTuple):
112
+ url: URL
113
+ method: str
114
+ headers: "CIMultiDictProxy[str]"
115
+ real_url: URL
116
+
117
+
118
+ class RequestInfo(_RequestInfo):
119
+
120
+ def __new__(
121
+ cls,
122
+ url: URL,
123
+ method: str,
124
+ headers: "CIMultiDictProxy[str]",
125
+ real_url: URL = _SENTINEL, # type: ignore[assignment]
126
+ ) -> "RequestInfo":
127
+ """Create a new RequestInfo instance.
128
+
129
+ For backwards compatibility, the real_url parameter is optional.
130
+ """
131
+ return tuple.__new__(
132
+ cls, (url, method, headers, url if real_url is _SENTINEL else real_url)
133
+ )
134
+
135
+
136
+ class Fingerprint:
137
+ HASHFUNC_BY_DIGESTLEN = {
138
+ 16: md5,
139
+ 20: sha1,
140
+ 32: sha256,
141
+ }
142
+
143
+ def __init__(self, fingerprint: bytes) -> None:
144
+ digestlen = len(fingerprint)
145
+ hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
146
+ if not hashfunc:
147
+ raise ValueError("fingerprint has invalid length")
148
+ elif hashfunc is md5 or hashfunc is sha1:
149
+ raise ValueError("md5 and sha1 are insecure and not supported. Use sha256.")
150
+ self._hashfunc = hashfunc
151
+ self._fingerprint = fingerprint
152
+
153
+ @property
154
+ def fingerprint(self) -> bytes:
155
+ return self._fingerprint
156
+
157
+ def check(self, transport: asyncio.Transport) -> None:
158
+ if not transport.get_extra_info("sslcontext"):
159
+ return
160
+ sslobj = transport.get_extra_info("ssl_object")
161
+ cert = sslobj.getpeercert(binary_form=True)
162
+ got = self._hashfunc(cert).digest()
163
+ if got != self._fingerprint:
164
+ host, port, *_ = transport.get_extra_info("peername")
165
+ raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
166
+
167
+
168
+ if ssl is not None:
169
+ SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
170
+ else: # pragma: no cover
171
+ SSL_ALLOWED_TYPES = (bool, type(None))
172
+
173
+
174
+ def _merge_ssl_params(
175
+ ssl: Union["SSLContext", bool, Fingerprint],
176
+ verify_ssl: Optional[bool],
177
+ ssl_context: Optional["SSLContext"],
178
+ fingerprint: Optional[bytes],
179
+ ) -> Union["SSLContext", bool, Fingerprint]:
180
+ if ssl is None:
181
+ ssl = True # Double check for backwards compatibility
182
+ if verify_ssl is not None and not verify_ssl:
183
+ warnings.warn(
184
+ "verify_ssl is deprecated, use ssl=False instead",
185
+ DeprecationWarning,
186
+ stacklevel=3,
187
+ )
188
+ if ssl is not True:
189
+ raise ValueError(
190
+ "verify_ssl, ssl_context, fingerprint and ssl "
191
+ "parameters are mutually exclusive"
192
+ )
193
+ else:
194
+ ssl = False
195
+ if ssl_context is not None:
196
+ warnings.warn(
197
+ "ssl_context is deprecated, use ssl=context instead",
198
+ DeprecationWarning,
199
+ stacklevel=3,
200
+ )
201
+ if ssl is not True:
202
+ raise ValueError(
203
+ "verify_ssl, ssl_context, fingerprint and ssl "
204
+ "parameters are mutually exclusive"
205
+ )
206
+ else:
207
+ ssl = ssl_context
208
+ if fingerprint is not None:
209
+ warnings.warn(
210
+ "fingerprint is deprecated, use ssl=Fingerprint(fingerprint) instead",
211
+ DeprecationWarning,
212
+ stacklevel=3,
213
+ )
214
+ if ssl is not True:
215
+ raise ValueError(
216
+ "verify_ssl, ssl_context, fingerprint and ssl "
217
+ "parameters are mutually exclusive"
218
+ )
219
+ else:
220
+ ssl = Fingerprint(fingerprint)
221
+ if not isinstance(ssl, SSL_ALLOWED_TYPES):
222
+ raise TypeError(
223
+ "ssl should be SSLContext, bool, Fingerprint or None, "
224
+ "got {!r} instead.".format(ssl)
225
+ )
226
+ return ssl
227
+
228
+
229
+ _SSL_SCHEMES = frozenset(("https", "wss"))
230
+
231
+
232
+ # ConnectionKey is a NamedTuple because it is used as a key in a dict
233
+ # and a set in the connector. Since a NamedTuple is a tuple it uses
234
+ # the fast native tuple __hash__ and __eq__ implementation in CPython.
235
+ class ConnectionKey(NamedTuple):
236
+ # the key should contain an information about used proxy / TLS
237
+ # to prevent reusing wrong connections from a pool
238
+ host: str
239
+ port: Optional[int]
240
+ is_ssl: bool
241
+ ssl: Union[SSLContext, bool, Fingerprint]
242
+ proxy: Optional[URL]
243
+ proxy_auth: Optional[BasicAuth]
244
+ proxy_headers_hash: Optional[int] # hash(CIMultiDict)
245
+
246
+
247
+ def _is_expected_content_type(
248
+ response_content_type: str, expected_content_type: str
249
+ ) -> bool:
250
+ if expected_content_type == "application/json":
251
+ return json_re.match(response_content_type) is not None
252
+ return expected_content_type in response_content_type
253
+
254
+
255
+ class ClientRequest:
256
+ GET_METHODS = {
257
+ hdrs.METH_GET,
258
+ hdrs.METH_HEAD,
259
+ hdrs.METH_OPTIONS,
260
+ hdrs.METH_TRACE,
261
+ }
262
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
263
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
264
+
265
+ DEFAULT_HEADERS = {
266
+ hdrs.ACCEPT: "*/*",
267
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
268
+ }
269
+
270
+ # Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
271
+ body: Any = b""
272
+ auth = None
273
+ response = None
274
+
275
+ __writer = None # async task for streaming data
276
+ _continue = None # waiter future for '100 Continue' response
277
+
278
+ _skip_auto_headers: Optional["CIMultiDict[None]"] = None
279
+
280
+ # N.B.
281
+ # Adding __del__ method with self._writer closing doesn't make sense
282
+ # because _writer is instance method, thus it keeps a reference to self.
283
+ # Until writer has finished finalizer will not be called.
284
+
285
+ def __init__(
286
+ self,
287
+ method: str,
288
+ url: URL,
289
+ *,
290
+ params: Query = None,
291
+ headers: Optional[LooseHeaders] = None,
292
+ skip_auto_headers: Optional[Iterable[str]] = None,
293
+ data: Any = None,
294
+ cookies: Optional[LooseCookies] = None,
295
+ auth: Optional[BasicAuth] = None,
296
+ version: http.HttpVersion = http.HttpVersion11,
297
+ compress: Union[str, bool, None] = None,
298
+ chunked: Optional[bool] = None,
299
+ expect100: bool = False,
300
+ loop: Optional[asyncio.AbstractEventLoop] = None,
301
+ response_class: Optional[Type["ClientResponse"]] = None,
302
+ proxy: Optional[URL] = None,
303
+ proxy_auth: Optional[BasicAuth] = None,
304
+ timer: Optional[BaseTimerContext] = None,
305
+ session: Optional["ClientSession"] = None,
306
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
307
+ proxy_headers: Optional[LooseHeaders] = None,
308
+ traces: Optional[List["Trace"]] = None,
309
+ trust_env: bool = False,
310
+ server_hostname: Optional[str] = None,
311
+ ):
312
+ if loop is None:
313
+ loop = asyncio.get_event_loop()
314
+ if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
315
+ raise ValueError(
316
+ f"Method cannot contain non-token characters {method!r} "
317
+ f"(found at least {match.group()!r})"
318
+ )
319
+ # URL forbids subclasses, so a simple type check is enough.
320
+ assert type(url) is URL, url
321
+ if proxy is not None:
322
+ assert type(proxy) is URL, proxy
323
+ # FIXME: session is None in tests only, need to fix tests
324
+ # assert session is not None
325
+ if TYPE_CHECKING:
326
+ assert session is not None
327
+ self._session = session
328
+ if params:
329
+ url = url.extend_query(params)
330
+ self.original_url = url
331
+ self.url = url.with_fragment(None) if url.raw_fragment else url
332
+ self.method = method.upper()
333
+ self.chunked = chunked
334
+ self.compress = compress
335
+ self.loop = loop
336
+ self.length = None
337
+ if response_class is None:
338
+ real_response_class = ClientResponse
339
+ else:
340
+ real_response_class = response_class
341
+ self.response_class: Type[ClientResponse] = real_response_class
342
+ self._timer = timer if timer is not None else TimerNoop()
343
+ self._ssl = ssl if ssl is not None else True
344
+ self.server_hostname = server_hostname
345
+
346
+ if loop.get_debug():
347
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
348
+
349
+ self.update_version(version)
350
+ self.update_host(url)
351
+ self.update_headers(headers)
352
+ self.update_auto_headers(skip_auto_headers)
353
+ self.update_cookies(cookies)
354
+ self.update_content_encoding(data)
355
+ self.update_auth(auth, trust_env)
356
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
357
+
358
+ self.update_body_from_data(data)
359
+ if data is not None or self.method not in self.GET_METHODS:
360
+ self.update_transfer_encoding()
361
+ self.update_expect_continue(expect100)
362
+ self._traces = [] if traces is None else traces
363
+
364
+ def __reset_writer(self, _: object = None) -> None:
365
+ self.__writer = None
366
+
367
+ @property
368
+ def skip_auto_headers(self) -> CIMultiDict[None]:
369
+ return self._skip_auto_headers or CIMultiDict()
370
+
371
+ @property
372
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
373
+ return self.__writer
374
+
375
+ @_writer.setter
376
+ def _writer(self, writer: "asyncio.Task[None]") -> None:
377
+ if self.__writer is not None:
378
+ self.__writer.remove_done_callback(self.__reset_writer)
379
+ self.__writer = writer
380
+ writer.add_done_callback(self.__reset_writer)
381
+
382
+ def is_ssl(self) -> bool:
383
+ return self.url.scheme in _SSL_SCHEMES
384
+
385
+ @property
386
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
387
+ return self._ssl
388
+
389
+ @property
390
+ def connection_key(self) -> ConnectionKey:
391
+ if proxy_headers := self.proxy_headers:
392
+ h: Optional[int] = hash(tuple(proxy_headers.items()))
393
+ else:
394
+ h = None
395
+ url = self.url
396
+ return tuple.__new__(
397
+ ConnectionKey,
398
+ (
399
+ url.raw_host or "",
400
+ url.port,
401
+ url.scheme in _SSL_SCHEMES,
402
+ self._ssl,
403
+ self.proxy,
404
+ self.proxy_auth,
405
+ h,
406
+ ),
407
+ )
408
+
409
+ @property
410
+ def host(self) -> str:
411
+ ret = self.url.raw_host
412
+ assert ret is not None
413
+ return ret
414
+
415
+ @property
416
+ def port(self) -> Optional[int]:
417
+ return self.url.port
418
+
419
+ @property
420
+ def request_info(self) -> RequestInfo:
421
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
422
+ # These are created on every request, so we use a NamedTuple
423
+ # for performance reasons. We don't use the RequestInfo.__new__
424
+ # method because it has a different signature which is provided
425
+ # for backwards compatibility only.
426
+ return tuple.__new__(
427
+ RequestInfo, (self.url, self.method, headers, self.original_url)
428
+ )
429
+
430
+ def update_host(self, url: URL) -> None:
431
+ """Update destination host, port and connection type (ssl)."""
432
+ # get host/port
433
+ if not url.raw_host:
434
+ raise InvalidURL(url)
435
+
436
+ # basic auth info
437
+ if url.raw_user or url.raw_password:
438
+ self.auth = helpers.BasicAuth(url.user or "", url.password or "")
439
+
440
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
441
+ """Convert request version to two elements tuple.
442
+
443
+ parser HTTP version '1.1' => (1, 1)
444
+ """
445
+ if isinstance(version, str):
446
+ v = [part.strip() for part in version.split(".", 1)]
447
+ try:
448
+ version = http.HttpVersion(int(v[0]), int(v[1]))
449
+ except ValueError:
450
+ raise ValueError(
451
+ f"Can not parse http version number: {version}"
452
+ ) from None
453
+ self.version = version
454
+
455
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
456
+ """Update request headers."""
457
+ self.headers: CIMultiDict[str] = CIMultiDict()
458
+
459
+ # Build the host header
460
+ host = self.url.host_port_subcomponent
461
+
462
+ # host_port_subcomponent is None when the URL is a relative URL.
463
+ # but we know we do not have a relative URL here.
464
+ assert host is not None
465
+ self.headers[hdrs.HOST] = host
466
+
467
+ if not headers:
468
+ return
469
+
470
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
471
+ headers = headers.items()
472
+
473
+ for key, value in headers: # type: ignore[misc]
474
+ # A special case for Host header
475
+ if key in hdrs.HOST_ALL:
476
+ self.headers[key] = value
477
+ else:
478
+ self.headers.add(key, value)
479
+
480
+ def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
481
+ if skip_auto_headers is not None:
482
+ self._skip_auto_headers = CIMultiDict(
483
+ (hdr, None) for hdr in sorted(skip_auto_headers)
484
+ )
485
+ used_headers = self.headers.copy()
486
+ used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
487
+ else:
488
+ # Fast path when there are no headers to skip
489
+ # which is the most common case.
490
+ used_headers = self.headers
491
+
492
+ for hdr, val in self.DEFAULT_HEADERS.items():
493
+ if hdr not in used_headers:
494
+ self.headers[hdr] = val
495
+
496
+ if hdrs.USER_AGENT not in used_headers:
497
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
498
+
499
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
500
+ """Update request cookies header."""
501
+ if not cookies:
502
+ return
503
+
504
+ c = SimpleCookie()
505
+ if hdrs.COOKIE in self.headers:
506
+ c.load(self.headers.get(hdrs.COOKIE, ""))
507
+ del self.headers[hdrs.COOKIE]
508
+
509
+ if isinstance(cookies, Mapping):
510
+ iter_cookies = cookies.items()
511
+ else:
512
+ iter_cookies = cookies # type: ignore[assignment]
513
+ for name, value in iter_cookies:
514
+ if isinstance(value, Morsel):
515
+ # Preserve coded_value
516
+ mrsl_val = value.get(value.key, Morsel())
517
+ mrsl_val.set(value.key, value.value, value.coded_value)
518
+ c[name] = mrsl_val
519
+ else:
520
+ c[name] = value # type: ignore[assignment]
521
+
522
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
523
+
524
+ def update_content_encoding(self, data: Any) -> None:
525
+ """Set request content encoding."""
526
+ if not data:
527
+ # Don't compress an empty body.
528
+ self.compress = None
529
+ return
530
+
531
+ if self.headers.get(hdrs.CONTENT_ENCODING):
532
+ if self.compress:
533
+ raise ValueError(
534
+ "compress can not be set if Content-Encoding header is set"
535
+ )
536
+ elif self.compress:
537
+ if not isinstance(self.compress, str):
538
+ self.compress = "deflate"
539
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
540
+ self.chunked = True # enable chunked, no need to deal with length
541
+
542
+ def update_transfer_encoding(self) -> None:
543
+ """Analyze transfer-encoding header."""
544
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
545
+
546
+ if "chunked" in te:
547
+ if self.chunked:
548
+ raise ValueError(
549
+ "chunked can not be set "
550
+ 'if "Transfer-Encoding: chunked" header is set'
551
+ )
552
+
553
+ elif self.chunked:
554
+ if hdrs.CONTENT_LENGTH in self.headers:
555
+ raise ValueError(
556
+ "chunked can not be set if Content-Length header is set"
557
+ )
558
+
559
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
560
+ else:
561
+ if hdrs.CONTENT_LENGTH not in self.headers:
562
+ self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
563
+
564
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
565
+ """Set basic auth."""
566
+ if auth is None:
567
+ auth = self.auth
568
+ if auth is None and trust_env and self.url.host is not None:
569
+ netrc_obj = netrc_from_env()
570
+ with contextlib.suppress(LookupError):
571
+ auth = basicauth_from_netrc(netrc_obj, self.url.host)
572
+ if auth is None:
573
+ return
574
+
575
+ if not isinstance(auth, helpers.BasicAuth):
576
+ raise TypeError("BasicAuth() tuple is required instead")
577
+
578
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
579
+
580
+ def update_body_from_data(self, body: Any) -> None:
581
+ if body is None:
582
+ return
583
+
584
+ # FormData
585
+ if isinstance(body, FormData):
586
+ body = body()
587
+
588
+ try:
589
+ body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
590
+ except payload.LookupError:
591
+ body = FormData(body)()
592
+
593
+ self.body = body
594
+
595
+ # enable chunked encoding if needed
596
+ if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
597
+ if (size := body.size) is not None:
598
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
599
+ else:
600
+ self.chunked = True
601
+
602
+ # copy payload headers
603
+ assert body.headers
604
+ headers = self.headers
605
+ skip_headers = self._skip_auto_headers
606
+ for key, value in body.headers.items():
607
+ if key in headers or (skip_headers is not None and key in skip_headers):
608
+ continue
609
+ headers[key] = value
610
+
611
+ def update_expect_continue(self, expect: bool = False) -> None:
612
+ if expect:
613
+ self.headers[hdrs.EXPECT] = "100-continue"
614
+ elif (
615
+ hdrs.EXPECT in self.headers
616
+ and self.headers[hdrs.EXPECT].lower() == "100-continue"
617
+ ):
618
+ expect = True
619
+
620
+ if expect:
621
+ self._continue = self.loop.create_future()
622
+
623
+ def update_proxy(
624
+ self,
625
+ proxy: Optional[URL],
626
+ proxy_auth: Optional[BasicAuth],
627
+ proxy_headers: Optional[LooseHeaders],
628
+ ) -> None:
629
+ self.proxy = proxy
630
+ if proxy is None:
631
+ self.proxy_auth = None
632
+ self.proxy_headers = None
633
+ return
634
+
635
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
636
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
637
+ self.proxy_auth = proxy_auth
638
+
639
+ if proxy_headers is not None and not isinstance(
640
+ proxy_headers, (MultiDict, MultiDictProxy)
641
+ ):
642
+ proxy_headers = CIMultiDict(proxy_headers)
643
+ self.proxy_headers = proxy_headers
644
+
645
+ async def write_bytes(
646
+ self, writer: AbstractStreamWriter, conn: "Connection"
647
+ ) -> None:
648
+ """Support coroutines that yields bytes objects."""
649
+ # 100 response
650
+ if self._continue is not None:
651
+ await writer.drain()
652
+ await self._continue
653
+
654
+ protocol = conn.protocol
655
+ assert protocol is not None
656
+ try:
657
+ if isinstance(self.body, payload.Payload):
658
+ await self.body.write(writer)
659
+ else:
660
+ if isinstance(self.body, (bytes, bytearray)):
661
+ self.body = (self.body,)
662
+
663
+ for chunk in self.body:
664
+ await writer.write(chunk)
665
+ except OSError as underlying_exc:
666
+ reraised_exc = underlying_exc
667
+
668
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
669
+ underlying_exc, asyncio.TimeoutError
670
+ )
671
+ if exc_is_not_timeout:
672
+ reraised_exc = ClientOSError(
673
+ underlying_exc.errno,
674
+ f"Can not write request body for {self.url !s}",
675
+ )
676
+
677
+ set_exception(protocol, reraised_exc, underlying_exc)
678
+ except asyncio.CancelledError:
679
+ # Body hasn't been fully sent, so connection can't be reused.
680
+ conn.close()
681
+ raise
682
+ except Exception as underlying_exc:
683
+ set_exception(
684
+ protocol,
685
+ ClientConnectionError(
686
+ f"Failed to send bytes into the underlying connection {conn !s}",
687
+ ),
688
+ underlying_exc,
689
+ )
690
+ else:
691
+ await writer.write_eof()
692
+ protocol.start_timeout()
693
+
694
+ async def send(self, conn: "Connection") -> "ClientResponse":
695
+ # Specify request target:
696
+ # - CONNECT request must send authority form URI
697
+ # - not CONNECT proxy must send absolute form URI
698
+ # - most common is origin form URI
699
+ if self.method == hdrs.METH_CONNECT:
700
+ connect_host = self.url.host_subcomponent
701
+ assert connect_host is not None
702
+ path = f"{connect_host}:{self.url.port}"
703
+ elif self.proxy and not self.is_ssl():
704
+ path = str(self.url)
705
+ else:
706
+ path = self.url.raw_path_qs
707
+
708
+ protocol = conn.protocol
709
+ assert protocol is not None
710
+ writer = StreamWriter(
711
+ protocol,
712
+ self.loop,
713
+ on_chunk_sent=(
714
+ functools.partial(self._on_chunk_request_sent, self.method, self.url)
715
+ if self._traces
716
+ else None
717
+ ),
718
+ on_headers_sent=(
719
+ functools.partial(self._on_headers_request_sent, self.method, self.url)
720
+ if self._traces
721
+ else None
722
+ ),
723
+ )
724
+
725
+ if self.compress:
726
+ writer.enable_compression(self.compress) # type: ignore[arg-type]
727
+
728
+ if self.chunked is not None:
729
+ writer.enable_chunking()
730
+
731
+ # set default content-type
732
+ if (
733
+ self.method in self.POST_METHODS
734
+ and (
735
+ self._skip_auto_headers is None
736
+ or hdrs.CONTENT_TYPE not in self._skip_auto_headers
737
+ )
738
+ and hdrs.CONTENT_TYPE not in self.headers
739
+ ):
740
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
741
+
742
+ v = self.version
743
+ if hdrs.CONNECTION not in self.headers:
744
+ if conn._connector.force_close:
745
+ if v == HttpVersion11:
746
+ self.headers[hdrs.CONNECTION] = "close"
747
+ elif v == HttpVersion10:
748
+ self.headers[hdrs.CONNECTION] = "keep-alive"
749
+
750
+ # status + headers
751
+ status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
752
+ await writer.write_headers(status_line, self.headers)
753
+ task: Optional["asyncio.Task[None]"]
754
+ if self.body or self._continue is not None or protocol.writing_paused:
755
+ coro = self.write_bytes(writer, conn)
756
+ if sys.version_info >= (3, 12):
757
+ # Optimization for Python 3.12, try to write
758
+ # bytes immediately to avoid having to schedule
759
+ # the task on the event loop.
760
+ task = asyncio.Task(coro, loop=self.loop, eager_start=True)
761
+ else:
762
+ task = self.loop.create_task(coro)
763
+ if task.done():
764
+ task = None
765
+ else:
766
+ self._writer = task
767
+ else:
768
+ # We have nothing to write because
769
+ # - there is no body
770
+ # - the protocol does not have writing paused
771
+ # - we are not waiting for a 100-continue response
772
+ protocol.start_timeout()
773
+ writer.set_eof()
774
+ task = None
775
+ response_class = self.response_class
776
+ assert response_class is not None
777
+ self.response = response_class(
778
+ self.method,
779
+ self.original_url,
780
+ writer=task,
781
+ continue100=self._continue,
782
+ timer=self._timer,
783
+ request_info=self.request_info,
784
+ traces=self._traces,
785
+ loop=self.loop,
786
+ session=self._session,
787
+ )
788
+ return self.response
789
+
790
+ async def close(self) -> None:
791
+ if self.__writer is not None:
792
+ try:
793
+ await self.__writer
794
+ except asyncio.CancelledError:
795
+ if (
796
+ sys.version_info >= (3, 11)
797
+ and (task := asyncio.current_task())
798
+ and task.cancelling()
799
+ ):
800
+ raise
801
+
802
+ def terminate(self) -> None:
803
+ if self.__writer is not None:
804
+ if not self.loop.is_closed():
805
+ self.__writer.cancel()
806
+ self.__writer.remove_done_callback(self.__reset_writer)
807
+ self.__writer = None
808
+
809
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
810
+ for trace in self._traces:
811
+ await trace.send_request_chunk_sent(method, url, chunk)
812
+
813
+ async def _on_headers_request_sent(
814
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
815
+ ) -> None:
816
+ for trace in self._traces:
817
+ await trace.send_request_headers(method, url, headers)
818
+
819
+
820
+ _CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
821
+
822
+
823
+ class ClientResponse(HeadersMixin):
824
+
825
+ # Some of these attributes are None when created,
826
+ # but will be set by the start() method.
827
+ # As the end user will likely never see the None values, we cheat the types below.
828
+ # from the Status-Line of the response
829
+ version: Optional[HttpVersion] = None # HTTP-Version
830
+ status: int = None # type: ignore[assignment] # Status-Code
831
+ reason: Optional[str] = None # Reason-Phrase
832
+
833
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
834
+ _body: Optional[bytes] = None
835
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
836
+ _history: Tuple["ClientResponse", ...] = ()
837
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
838
+
839
+ _connection: Optional["Connection"] = None # current connection
840
+ _cookies: Optional[SimpleCookie] = None
841
+ _continue: Optional["asyncio.Future[bool]"] = None
842
+ _source_traceback: Optional[traceback.StackSummary] = None
843
+ _session: Optional["ClientSession"] = None
844
+ # set up by ClientRequest after ClientResponse object creation
845
+ # post-init stage allows to not change ctor signature
846
+ _closed = True # to allow __del__ for non-initialized properly response
847
+ _released = False
848
+ _in_context = False
849
+
850
+ _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
851
+
852
+ __writer: Optional["asyncio.Task[None]"] = None
853
+
854
+ def __init__(
855
+ self,
856
+ method: str,
857
+ url: URL,
858
+ *,
859
+ writer: "Optional[asyncio.Task[None]]",
860
+ continue100: Optional["asyncio.Future[bool]"],
861
+ timer: BaseTimerContext,
862
+ request_info: RequestInfo,
863
+ traces: List["Trace"],
864
+ loop: asyncio.AbstractEventLoop,
865
+ session: "ClientSession",
866
+ ) -> None:
867
+ # URL forbids subclasses, so a simple type check is enough.
868
+ assert type(url) is URL
869
+
870
+ self.method = method
871
+
872
+ self._real_url = url
873
+ self._url = url.with_fragment(None) if url.raw_fragment else url
874
+ if writer is not None:
875
+ self._writer = writer
876
+ if continue100 is not None:
877
+ self._continue = continue100
878
+ self._request_info = request_info
879
+ self._timer = timer if timer is not None else TimerNoop()
880
+ self._cache: Dict[str, Any] = {}
881
+ self._traces = traces
882
+ self._loop = loop
883
+ # Save reference to _resolve_charset, so that get_encoding() will still
884
+ # work after the response has finished reading the body.
885
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
886
+ if session is not None:
887
+ # store a reference to session #1985
888
+ self._session = session
889
+ self._resolve_charset = session._resolve_charset
890
+ if loop.get_debug():
891
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
892
+
893
+ def __reset_writer(self, _: object = None) -> None:
894
+ self.__writer = None
895
+
896
+ @property
897
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
898
+ """The writer task for streaming data.
899
+
900
+ _writer is only provided for backwards compatibility
901
+ for subclasses that may need to access it.
902
+ """
903
+ return self.__writer
904
+
905
+ @_writer.setter
906
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
907
+ """Set the writer task for streaming data."""
908
+ if self.__writer is not None:
909
+ self.__writer.remove_done_callback(self.__reset_writer)
910
+ self.__writer = writer
911
+ if writer is None:
912
+ return
913
+ if writer.done():
914
+ # The writer is already done, so we can clear it immediately.
915
+ self.__writer = None
916
+ else:
917
+ writer.add_done_callback(self.__reset_writer)
918
+
919
+ @property
920
+ def cookies(self) -> SimpleCookie:
921
+ if self._cookies is None:
922
+ self._cookies = SimpleCookie()
923
+ return self._cookies
924
+
925
+ @cookies.setter
926
+ def cookies(self, cookies: SimpleCookie) -> None:
927
+ self._cookies = cookies
928
+
929
+ @reify
930
+ def url(self) -> URL:
931
+ return self._url
932
+
933
+ @reify
934
+ def url_obj(self) -> URL:
935
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
936
+ return self._url
937
+
938
+ @reify
939
+ def real_url(self) -> URL:
940
+ return self._real_url
941
+
942
+ @reify
943
+ def host(self) -> str:
944
+ assert self._url.host is not None
945
+ return self._url.host
946
+
947
+ @reify
948
+ def headers(self) -> "CIMultiDictProxy[str]":
949
+ return self._headers
950
+
951
+ @reify
952
+ def raw_headers(self) -> RawHeaders:
953
+ return self._raw_headers
954
+
955
+ @reify
956
+ def request_info(self) -> RequestInfo:
957
+ return self._request_info
958
+
959
+ @reify
960
+ def content_disposition(self) -> Optional[ContentDisposition]:
961
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
962
+ if raw is None:
963
+ return None
964
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
965
+ params = MappingProxyType(params_dct)
966
+ filename = multipart.content_disposition_filename(params)
967
+ return ContentDisposition(disposition_type, params, filename)
968
+
969
+ def __del__(self, _warnings: Any = warnings) -> None:
970
+ if self._closed:
971
+ return
972
+
973
+ if self._connection is not None:
974
+ self._connection.release()
975
+ self._cleanup_writer()
976
+
977
+ if self._loop.get_debug():
978
+ kwargs = {"source": self}
979
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
980
+ context = {"client_response": self, "message": "Unclosed response"}
981
+ if self._source_traceback:
982
+ context["source_traceback"] = self._source_traceback
983
+ self._loop.call_exception_handler(context)
984
+
985
+ def __repr__(self) -> str:
986
+ out = io.StringIO()
987
+ ascii_encodable_url = str(self.url)
988
+ if self.reason:
989
+ ascii_encodable_reason = self.reason.encode(
990
+ "ascii", "backslashreplace"
991
+ ).decode("ascii")
992
+ else:
993
+ ascii_encodable_reason = "None"
994
+ print(
995
+ "<ClientResponse({}) [{} {}]>".format(
996
+ ascii_encodable_url, self.status, ascii_encodable_reason
997
+ ),
998
+ file=out,
999
+ )
1000
+ print(self.headers, file=out)
1001
+ return out.getvalue()
1002
+
1003
+ @property
1004
+ def connection(self) -> Optional["Connection"]:
1005
+ return self._connection
1006
+
1007
+ @reify
1008
+ def history(self) -> Tuple["ClientResponse", ...]:
1009
+ """A sequence of of responses, if redirects occurred."""
1010
+ return self._history
1011
+
1012
+ @reify
1013
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
1014
+ links_str = ", ".join(self.headers.getall("link", []))
1015
+
1016
+ if not links_str:
1017
+ return MultiDictProxy(MultiDict())
1018
+
1019
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
1020
+
1021
+ for val in re.split(r",(?=\s*<)", links_str):
1022
+ match = re.match(r"\s*<(.*)>(.*)", val)
1023
+ if match is None: # pragma: no cover
1024
+ # the check exists to suppress mypy error
1025
+ continue
1026
+ url, params_str = match.groups()
1027
+ params = params_str.split(";")[1:]
1028
+
1029
+ link: MultiDict[Union[str, URL]] = MultiDict()
1030
+
1031
+ for param in params:
1032
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
1033
+ if match is None: # pragma: no cover
1034
+ # the check exists to suppress mypy error
1035
+ continue
1036
+ key, _, value, _ = match.groups()
1037
+
1038
+ link.add(key, value)
1039
+
1040
+ key = link.get("rel", url)
1041
+
1042
+ link.add("url", self.url.join(URL(url)))
1043
+
1044
+ links.add(str(key), MultiDictProxy(link))
1045
+
1046
+ return MultiDictProxy(links)
1047
+
1048
+ async def start(self, connection: "Connection") -> "ClientResponse":
1049
+ """Start response processing."""
1050
+ self._closed = False
1051
+ self._protocol = connection.protocol
1052
+ self._connection = connection
1053
+
1054
+ with self._timer:
1055
+ while True:
1056
+ # read response
1057
+ try:
1058
+ protocol = self._protocol
1059
+ message, payload = await protocol.read() # type: ignore[union-attr]
1060
+ except http.HttpProcessingError as exc:
1061
+ raise ClientResponseError(
1062
+ self.request_info,
1063
+ self.history,
1064
+ status=exc.code,
1065
+ message=exc.message,
1066
+ headers=exc.headers,
1067
+ ) from exc
1068
+
1069
+ if message.code < 100 or message.code > 199 or message.code == 101:
1070
+ break
1071
+
1072
+ if self._continue is not None:
1073
+ set_result(self._continue, True)
1074
+ self._continue = None
1075
+
1076
+ # payload eof handler
1077
+ payload.on_eof(self._response_eof)
1078
+
1079
+ # response status
1080
+ self.version = message.version
1081
+ self.status = message.code
1082
+ self.reason = message.reason
1083
+
1084
+ # headers
1085
+ self._headers = message.headers # type is CIMultiDictProxy
1086
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
1087
+
1088
+ # payload
1089
+ self.content = payload
1090
+
1091
+ # cookies
1092
+ if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
1093
+ cookies = SimpleCookie()
1094
+ for hdr in cookie_hdrs:
1095
+ try:
1096
+ cookies.load(hdr)
1097
+ except CookieError as exc:
1098
+ client_logger.warning("Can not load response cookies: %s", exc)
1099
+ self._cookies = cookies
1100
+ return self
1101
+
1102
+ def _response_eof(self) -> None:
1103
+ if self._closed:
1104
+ return
1105
+
1106
+ # protocol could be None because connection could be detached
1107
+ protocol = self._connection and self._connection.protocol
1108
+ if protocol is not None and protocol.upgraded:
1109
+ return
1110
+
1111
+ self._closed = True
1112
+ self._cleanup_writer()
1113
+ self._release_connection()
1114
+
1115
+ @property
1116
+ def closed(self) -> bool:
1117
+ return self._closed
1118
+
1119
+ def close(self) -> None:
1120
+ if not self._released:
1121
+ self._notify_content()
1122
+
1123
+ self._closed = True
1124
+ if self._loop is None or self._loop.is_closed():
1125
+ return
1126
+
1127
+ self._cleanup_writer()
1128
+ if self._connection is not None:
1129
+ self._connection.close()
1130
+ self._connection = None
1131
+
1132
+ def release(self) -> Any:
1133
+ if not self._released:
1134
+ self._notify_content()
1135
+
1136
+ self._closed = True
1137
+
1138
+ self._cleanup_writer()
1139
+ self._release_connection()
1140
+ return noop()
1141
+
1142
+ @property
1143
+ def ok(self) -> bool:
1144
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
1145
+
1146
+ This is **not** a check for ``200 OK`` but a check that the response
1147
+ status is under 400.
1148
+ """
1149
+ return 400 > self.status
1150
+
1151
+ def raise_for_status(self) -> None:
1152
+ if not self.ok:
1153
+ # reason should always be not None for a started response
1154
+ assert self.reason is not None
1155
+
1156
+ # If we're in a context we can rely on __aexit__() to release as the
1157
+ # exception propagates.
1158
+ if not self._in_context:
1159
+ self.release()
1160
+
1161
+ raise ClientResponseError(
1162
+ self.request_info,
1163
+ self.history,
1164
+ status=self.status,
1165
+ message=self.reason,
1166
+ headers=self.headers,
1167
+ )
1168
+
1169
+ def _release_connection(self) -> None:
1170
+ if self._connection is not None:
1171
+ if self.__writer is None:
1172
+ self._connection.release()
1173
+ self._connection = None
1174
+ else:
1175
+ self.__writer.add_done_callback(lambda f: self._release_connection())
1176
+
1177
+ async def _wait_released(self) -> None:
1178
+ if self.__writer is not None:
1179
+ try:
1180
+ await self.__writer
1181
+ except asyncio.CancelledError:
1182
+ if (
1183
+ sys.version_info >= (3, 11)
1184
+ and (task := asyncio.current_task())
1185
+ and task.cancelling()
1186
+ ):
1187
+ raise
1188
+ self._release_connection()
1189
+
1190
+ def _cleanup_writer(self) -> None:
1191
+ if self.__writer is not None:
1192
+ self.__writer.cancel()
1193
+ self._session = None
1194
+
1195
+ def _notify_content(self) -> None:
1196
+ content = self.content
1197
+ if content and content.exception() is None:
1198
+ set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
1199
+ self._released = True
1200
+
1201
+ async def wait_for_close(self) -> None:
1202
+ if self.__writer is not None:
1203
+ try:
1204
+ await self.__writer
1205
+ except asyncio.CancelledError:
1206
+ if (
1207
+ sys.version_info >= (3, 11)
1208
+ and (task := asyncio.current_task())
1209
+ and task.cancelling()
1210
+ ):
1211
+ raise
1212
+ self.release()
1213
+
1214
+ async def read(self) -> bytes:
1215
+ """Read response payload."""
1216
+ if self._body is None:
1217
+ try:
1218
+ self._body = await self.content.read()
1219
+ for trace in self._traces:
1220
+ await trace.send_response_chunk_received(
1221
+ self.method, self.url, self._body
1222
+ )
1223
+ except BaseException:
1224
+ self.close()
1225
+ raise
1226
+ elif self._released: # Response explicitly released
1227
+ raise ClientConnectionError("Connection closed")
1228
+
1229
+ protocol = self._connection and self._connection.protocol
1230
+ if protocol is None or not protocol.upgraded:
1231
+ await self._wait_released() # Underlying connection released
1232
+ return self._body
1233
+
1234
+ def get_encoding(self) -> str:
1235
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
1236
+ mimetype = helpers.parse_mimetype(ctype)
1237
+
1238
+ encoding = mimetype.parameters.get("charset")
1239
+ if encoding:
1240
+ with contextlib.suppress(LookupError, ValueError):
1241
+ return codecs.lookup(encoding).name
1242
+
1243
+ if mimetype.type == "application" and (
1244
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
1245
+ ):
1246
+ # RFC 7159 states that the default encoding is UTF-8.
1247
+ # RFC 7483 defines application/rdap+json
1248
+ return "utf-8"
1249
+
1250
+ if self._body is None:
1251
+ raise RuntimeError(
1252
+ "Cannot compute fallback encoding of a not yet read body"
1253
+ )
1254
+
1255
+ return self._resolve_charset(self, self._body)
1256
+
1257
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
1258
+ """Read response payload and decode."""
1259
+ if self._body is None:
1260
+ await self.read()
1261
+
1262
+ if encoding is None:
1263
+ encoding = self.get_encoding()
1264
+
1265
+ return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
1266
+
1267
+ async def json(
1268
+ self,
1269
+ *,
1270
+ encoding: Optional[str] = None,
1271
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
1272
+ content_type: Optional[str] = "application/json",
1273
+ ) -> Any:
1274
+ """Read and decodes JSON response."""
1275
+ if self._body is None:
1276
+ await self.read()
1277
+
1278
+ if content_type:
1279
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
1280
+ if not _is_expected_content_type(ctype, content_type):
1281
+ raise ContentTypeError(
1282
+ self.request_info,
1283
+ self.history,
1284
+ status=self.status,
1285
+ message=(
1286
+ "Attempt to decode JSON with unexpected mimetype: %s" % ctype
1287
+ ),
1288
+ headers=self.headers,
1289
+ )
1290
+
1291
+ stripped = self._body.strip() # type: ignore[union-attr]
1292
+ if not stripped:
1293
+ return None
1294
+
1295
+ if encoding is None:
1296
+ encoding = self.get_encoding()
1297
+
1298
+ return loads(stripped.decode(encoding))
1299
+
1300
+ async def __aenter__(self) -> "ClientResponse":
1301
+ self._in_context = True
1302
+ return self
1303
+
1304
+ async def __aexit__(
1305
+ self,
1306
+ exc_type: Optional[Type[BaseException]],
1307
+ exc_val: Optional[BaseException],
1308
+ exc_tb: Optional[TracebackType],
1309
+ ) -> None:
1310
+ self._in_context = False
1311
+ # similar to _RequestContextManager, we do not need to check
1312
+ # for exceptions, response object can close connection
1313
+ # if state is broken
1314
+ self.release()
1315
+ await self.wait_for_close()
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/client_ws.py ADDED
@@ -0,0 +1,426 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket client for asyncio."""
2
+
3
+ import asyncio
4
+ import sys
5
+ from types import TracebackType
6
+ from typing import Any, Optional, Type, cast
7
+
8
+ import attr
9
+
10
+ from ._websocket.reader import WebSocketDataQueue
11
+ from .client_exceptions import ClientError, ServerTimeoutError, WSMessageTypeError
12
+ from .client_reqrep import ClientResponse
13
+ from .helpers import calculate_timeout_when, set_result
14
+ from .http import (
15
+ WS_CLOSED_MESSAGE,
16
+ WS_CLOSING_MESSAGE,
17
+ WebSocketError,
18
+ WSCloseCode,
19
+ WSMessage,
20
+ WSMsgType,
21
+ )
22
+ from .http_websocket import _INTERNAL_RECEIVE_TYPES, WebSocketWriter
23
+ from .streams import EofStream
24
+ from .typedefs import (
25
+ DEFAULT_JSON_DECODER,
26
+ DEFAULT_JSON_ENCODER,
27
+ JSONDecoder,
28
+ JSONEncoder,
29
+ )
30
+
31
+ if sys.version_info >= (3, 11):
32
+ import asyncio as async_timeout
33
+ else:
34
+ import async_timeout
35
+
36
+
37
+ @attr.s(frozen=True, slots=True)
38
+ class ClientWSTimeout:
39
+ ws_receive = attr.ib(type=Optional[float], default=None)
40
+ ws_close = attr.ib(type=Optional[float], default=None)
41
+
42
+
43
+ DEFAULT_WS_CLIENT_TIMEOUT = ClientWSTimeout(ws_receive=None, ws_close=10.0)
44
+
45
+
46
+ class ClientWebSocketResponse:
47
+ def __init__(
48
+ self,
49
+ reader: WebSocketDataQueue,
50
+ writer: WebSocketWriter,
51
+ protocol: Optional[str],
52
+ response: ClientResponse,
53
+ timeout: ClientWSTimeout,
54
+ autoclose: bool,
55
+ autoping: bool,
56
+ loop: asyncio.AbstractEventLoop,
57
+ *,
58
+ heartbeat: Optional[float] = None,
59
+ compress: int = 0,
60
+ client_notakeover: bool = False,
61
+ ) -> None:
62
+ self._response = response
63
+ self._conn = response.connection
64
+
65
+ self._writer = writer
66
+ self._reader = reader
67
+ self._protocol = protocol
68
+ self._closed = False
69
+ self._closing = False
70
+ self._close_code: Optional[int] = None
71
+ self._timeout = timeout
72
+ self._autoclose = autoclose
73
+ self._autoping = autoping
74
+ self._heartbeat = heartbeat
75
+ self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
76
+ self._heartbeat_when: float = 0.0
77
+ if heartbeat is not None:
78
+ self._pong_heartbeat = heartbeat / 2.0
79
+ self._pong_response_cb: Optional[asyncio.TimerHandle] = None
80
+ self._loop = loop
81
+ self._waiting: bool = False
82
+ self._close_wait: Optional[asyncio.Future[None]] = None
83
+ self._exception: Optional[BaseException] = None
84
+ self._compress = compress
85
+ self._client_notakeover = client_notakeover
86
+ self._ping_task: Optional[asyncio.Task[None]] = None
87
+
88
+ self._reset_heartbeat()
89
+
90
+ def _cancel_heartbeat(self) -> None:
91
+ self._cancel_pong_response_cb()
92
+ if self._heartbeat_cb is not None:
93
+ self._heartbeat_cb.cancel()
94
+ self._heartbeat_cb = None
95
+ if self._ping_task is not None:
96
+ self._ping_task.cancel()
97
+ self._ping_task = None
98
+
99
+ def _cancel_pong_response_cb(self) -> None:
100
+ if self._pong_response_cb is not None:
101
+ self._pong_response_cb.cancel()
102
+ self._pong_response_cb = None
103
+
104
+ def _reset_heartbeat(self) -> None:
105
+ if self._heartbeat is None:
106
+ return
107
+ self._cancel_pong_response_cb()
108
+ loop = self._loop
109
+ assert loop is not None
110
+ conn = self._conn
111
+ timeout_ceil_threshold = (
112
+ conn._connector._timeout_ceil_threshold if conn is not None else 5
113
+ )
114
+ now = loop.time()
115
+ when = calculate_timeout_when(now, self._heartbeat, timeout_ceil_threshold)
116
+ self._heartbeat_when = when
117
+ if self._heartbeat_cb is None:
118
+ # We do not cancel the previous heartbeat_cb here because
119
+ # it generates a significant amount of TimerHandle churn
120
+ # which causes asyncio to rebuild the heap frequently.
121
+ # Instead _send_heartbeat() will reschedule the next
122
+ # heartbeat if it fires too early.
123
+ self._heartbeat_cb = loop.call_at(when, self._send_heartbeat)
124
+
125
+ def _send_heartbeat(self) -> None:
126
+ self._heartbeat_cb = None
127
+ loop = self._loop
128
+ now = loop.time()
129
+ if now < self._heartbeat_when:
130
+ # Heartbeat fired too early, reschedule
131
+ self._heartbeat_cb = loop.call_at(
132
+ self._heartbeat_when, self._send_heartbeat
133
+ )
134
+ return
135
+
136
+ conn = self._conn
137
+ timeout_ceil_threshold = (
138
+ conn._connector._timeout_ceil_threshold if conn is not None else 5
139
+ )
140
+ when = calculate_timeout_when(now, self._pong_heartbeat, timeout_ceil_threshold)
141
+ self._cancel_pong_response_cb()
142
+ self._pong_response_cb = loop.call_at(when, self._pong_not_received)
143
+
144
+ coro = self._writer.send_frame(b"", WSMsgType.PING)
145
+ if sys.version_info >= (3, 12):
146
+ # Optimization for Python 3.12, try to send the ping
147
+ # immediately to avoid having to schedule
148
+ # the task on the event loop.
149
+ ping_task = asyncio.Task(coro, loop=loop, eager_start=True)
150
+ else:
151
+ ping_task = loop.create_task(coro)
152
+
153
+ if not ping_task.done():
154
+ self._ping_task = ping_task
155
+ ping_task.add_done_callback(self._ping_task_done)
156
+ else:
157
+ self._ping_task_done(ping_task)
158
+
159
+ def _ping_task_done(self, task: "asyncio.Task[None]") -> None:
160
+ """Callback for when the ping task completes."""
161
+ if not task.cancelled() and (exc := task.exception()):
162
+ self._handle_ping_pong_exception(exc)
163
+ self._ping_task = None
164
+
165
+ def _pong_not_received(self) -> None:
166
+ self._handle_ping_pong_exception(ServerTimeoutError())
167
+
168
+ def _handle_ping_pong_exception(self, exc: BaseException) -> None:
169
+ """Handle exceptions raised during ping/pong processing."""
170
+ if self._closed:
171
+ return
172
+ self._set_closed()
173
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
174
+ self._exception = exc
175
+ self._response.close()
176
+ if self._waiting and not self._closing:
177
+ self._reader.feed_data(WSMessage(WSMsgType.ERROR, exc, None), 0)
178
+
179
+ def _set_closed(self) -> None:
180
+ """Set the connection to closed.
181
+
182
+ Cancel any heartbeat timers and set the closed flag.
183
+ """
184
+ self._closed = True
185
+ self._cancel_heartbeat()
186
+
187
+ def _set_closing(self) -> None:
188
+ """Set the connection to closing.
189
+
190
+ Cancel any heartbeat timers and set the closing flag.
191
+ """
192
+ self._closing = True
193
+ self._cancel_heartbeat()
194
+
195
+ @property
196
+ def closed(self) -> bool:
197
+ return self._closed
198
+
199
+ @property
200
+ def close_code(self) -> Optional[int]:
201
+ return self._close_code
202
+
203
+ @property
204
+ def protocol(self) -> Optional[str]:
205
+ return self._protocol
206
+
207
+ @property
208
+ def compress(self) -> int:
209
+ return self._compress
210
+
211
+ @property
212
+ def client_notakeover(self) -> bool:
213
+ return self._client_notakeover
214
+
215
+ def get_extra_info(self, name: str, default: Any = None) -> Any:
216
+ """extra info from connection transport"""
217
+ conn = self._response.connection
218
+ if conn is None:
219
+ return default
220
+ transport = conn.transport
221
+ if transport is None:
222
+ return default
223
+ return transport.get_extra_info(name, default)
224
+
225
+ def exception(self) -> Optional[BaseException]:
226
+ return self._exception
227
+
228
+ async def ping(self, message: bytes = b"") -> None:
229
+ await self._writer.send_frame(message, WSMsgType.PING)
230
+
231
+ async def pong(self, message: bytes = b"") -> None:
232
+ await self._writer.send_frame(message, WSMsgType.PONG)
233
+
234
+ async def send_frame(
235
+ self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None
236
+ ) -> None:
237
+ """Send a frame over the websocket."""
238
+ await self._writer.send_frame(message, opcode, compress)
239
+
240
+ async def send_str(self, data: str, compress: Optional[int] = None) -> None:
241
+ if not isinstance(data, str):
242
+ raise TypeError("data argument must be str (%r)" % type(data))
243
+ await self._writer.send_frame(
244
+ data.encode("utf-8"), WSMsgType.TEXT, compress=compress
245
+ )
246
+
247
+ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
248
+ if not isinstance(data, (bytes, bytearray, memoryview)):
249
+ raise TypeError("data argument must be byte-ish (%r)" % type(data))
250
+ await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress)
251
+
252
+ async def send_json(
253
+ self,
254
+ data: Any,
255
+ compress: Optional[int] = None,
256
+ *,
257
+ dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
258
+ ) -> None:
259
+ await self.send_str(dumps(data), compress=compress)
260
+
261
+ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
262
+ # we need to break `receive()` cycle first,
263
+ # `close()` may be called from different task
264
+ if self._waiting and not self._closing:
265
+ assert self._loop is not None
266
+ self._close_wait = self._loop.create_future()
267
+ self._set_closing()
268
+ self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
269
+ await self._close_wait
270
+
271
+ if self._closed:
272
+ return False
273
+
274
+ self._set_closed()
275
+ try:
276
+ await self._writer.close(code, message)
277
+ except asyncio.CancelledError:
278
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
279
+ self._response.close()
280
+ raise
281
+ except Exception as exc:
282
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
283
+ self._exception = exc
284
+ self._response.close()
285
+ return True
286
+
287
+ if self._close_code:
288
+ self._response.close()
289
+ return True
290
+
291
+ while True:
292
+ try:
293
+ async with async_timeout.timeout(self._timeout.ws_close):
294
+ msg = await self._reader.read()
295
+ except asyncio.CancelledError:
296
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
297
+ self._response.close()
298
+ raise
299
+ except Exception as exc:
300
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
301
+ self._exception = exc
302
+ self._response.close()
303
+ return True
304
+
305
+ if msg.type is WSMsgType.CLOSE:
306
+ self._close_code = msg.data
307
+ self._response.close()
308
+ return True
309
+
310
+ async def receive(self, timeout: Optional[float] = None) -> WSMessage:
311
+ receive_timeout = timeout or self._timeout.ws_receive
312
+
313
+ while True:
314
+ if self._waiting:
315
+ raise RuntimeError("Concurrent call to receive() is not allowed")
316
+
317
+ if self._closed:
318
+ return WS_CLOSED_MESSAGE
319
+ elif self._closing:
320
+ await self.close()
321
+ return WS_CLOSED_MESSAGE
322
+
323
+ try:
324
+ self._waiting = True
325
+ try:
326
+ if receive_timeout:
327
+ # Entering the context manager and creating
328
+ # Timeout() object can take almost 50% of the
329
+ # run time in this loop so we avoid it if
330
+ # there is no read timeout.
331
+ async with async_timeout.timeout(receive_timeout):
332
+ msg = await self._reader.read()
333
+ else:
334
+ msg = await self._reader.read()
335
+ self._reset_heartbeat()
336
+ finally:
337
+ self._waiting = False
338
+ if self._close_wait:
339
+ set_result(self._close_wait, None)
340
+ except (asyncio.CancelledError, asyncio.TimeoutError):
341
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
342
+ raise
343
+ except EofStream:
344
+ self._close_code = WSCloseCode.OK
345
+ await self.close()
346
+ return WSMessage(WSMsgType.CLOSED, None, None)
347
+ except ClientError:
348
+ # Likely ServerDisconnectedError when connection is lost
349
+ self._set_closed()
350
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
351
+ return WS_CLOSED_MESSAGE
352
+ except WebSocketError as exc:
353
+ self._close_code = exc.code
354
+ await self.close(code=exc.code)
355
+ return WSMessage(WSMsgType.ERROR, exc, None)
356
+ except Exception as exc:
357
+ self._exception = exc
358
+ self._set_closing()
359
+ self._close_code = WSCloseCode.ABNORMAL_CLOSURE
360
+ await self.close()
361
+ return WSMessage(WSMsgType.ERROR, exc, None)
362
+
363
+ if msg.type not in _INTERNAL_RECEIVE_TYPES:
364
+ # If its not a close/closing/ping/pong message
365
+ # we can return it immediately
366
+ return msg
367
+
368
+ if msg.type is WSMsgType.CLOSE:
369
+ self._set_closing()
370
+ self._close_code = msg.data
371
+ if not self._closed and self._autoclose:
372
+ await self.close()
373
+ elif msg.type is WSMsgType.CLOSING:
374
+ self._set_closing()
375
+ elif msg.type is WSMsgType.PING and self._autoping:
376
+ await self.pong(msg.data)
377
+ continue
378
+ elif msg.type is WSMsgType.PONG and self._autoping:
379
+ continue
380
+
381
+ return msg
382
+
383
+ async def receive_str(self, *, timeout: Optional[float] = None) -> str:
384
+ msg = await self.receive(timeout)
385
+ if msg.type is not WSMsgType.TEXT:
386
+ raise WSMessageTypeError(
387
+ f"Received message {msg.type}:{msg.data!r} is not WSMsgType.TEXT"
388
+ )
389
+ return cast(str, msg.data)
390
+
391
+ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
392
+ msg = await self.receive(timeout)
393
+ if msg.type is not WSMsgType.BINARY:
394
+ raise WSMessageTypeError(
395
+ f"Received message {msg.type}:{msg.data!r} is not WSMsgType.BINARY"
396
+ )
397
+ return cast(bytes, msg.data)
398
+
399
+ async def receive_json(
400
+ self,
401
+ *,
402
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
403
+ timeout: Optional[float] = None,
404
+ ) -> Any:
405
+ data = await self.receive_str(timeout=timeout)
406
+ return loads(data)
407
+
408
+ def __aiter__(self) -> "ClientWebSocketResponse":
409
+ return self
410
+
411
+ async def __anext__(self) -> WSMessage:
412
+ msg = await self.receive()
413
+ if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
414
+ raise StopAsyncIteration
415
+ return msg
416
+
417
+ async def __aenter__(self) -> "ClientWebSocketResponse":
418
+ return self
419
+
420
+ async def __aexit__(
421
+ self,
422
+ exc_type: Optional[Type[BaseException]],
423
+ exc_val: Optional[BaseException],
424
+ exc_tb: Optional[TracebackType],
425
+ ) -> None:
426
+ await self.close()
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/connector.py ADDED
@@ -0,0 +1,1652 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import functools
3
+ import random
4
+ import socket
5
+ import sys
6
+ import traceback
7
+ import warnings
8
+ from collections import OrderedDict, defaultdict, deque
9
+ from contextlib import suppress
10
+ from http import HTTPStatus
11
+ from itertools import chain, cycle, islice
12
+ from time import monotonic
13
+ from types import TracebackType
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ Awaitable,
18
+ Callable,
19
+ DefaultDict,
20
+ Deque,
21
+ Dict,
22
+ Iterator,
23
+ List,
24
+ Literal,
25
+ Optional,
26
+ Sequence,
27
+ Set,
28
+ Tuple,
29
+ Type,
30
+ Union,
31
+ cast,
32
+ )
33
+
34
+ import aiohappyeyeballs
35
+
36
+ from . import hdrs, helpers
37
+ from .abc import AbstractResolver, ResolveResult
38
+ from .client_exceptions import (
39
+ ClientConnectionError,
40
+ ClientConnectorCertificateError,
41
+ ClientConnectorDNSError,
42
+ ClientConnectorError,
43
+ ClientConnectorSSLError,
44
+ ClientHttpProxyError,
45
+ ClientProxyConnectionError,
46
+ ServerFingerprintMismatch,
47
+ UnixClientConnectorError,
48
+ cert_errors,
49
+ ssl_errors,
50
+ )
51
+ from .client_proto import ResponseHandler
52
+ from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
53
+ from .helpers import (
54
+ ceil_timeout,
55
+ is_ip_address,
56
+ noop,
57
+ sentinel,
58
+ set_exception,
59
+ set_result,
60
+ )
61
+ from .resolver import DefaultResolver
62
+
63
+ if TYPE_CHECKING:
64
+ import ssl
65
+
66
+ SSLContext = ssl.SSLContext
67
+ else:
68
+ try:
69
+ import ssl
70
+
71
+ SSLContext = ssl.SSLContext
72
+ except ImportError: # pragma: no cover
73
+ ssl = None # type: ignore[assignment]
74
+ SSLContext = object # type: ignore[misc,assignment]
75
+
76
+ EMPTY_SCHEMA_SET = frozenset({""})
77
+ HTTP_SCHEMA_SET = frozenset({"http", "https"})
78
+ WS_SCHEMA_SET = frozenset({"ws", "wss"})
79
+
80
+ HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET
81
+ HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET
82
+
83
+ NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < (
84
+ 3,
85
+ 13,
86
+ 1,
87
+ ) or sys.version_info < (3, 12, 7)
88
+ # Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960
89
+ # which first appeared in Python 3.12.7 and 3.13.1
90
+
91
+
92
+ __all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
93
+
94
+
95
+ if TYPE_CHECKING:
96
+ from .client import ClientTimeout
97
+ from .client_reqrep import ConnectionKey
98
+ from .tracing import Trace
99
+
100
+
101
+ class _DeprecationWaiter:
102
+ __slots__ = ("_awaitable", "_awaited")
103
+
104
+ def __init__(self, awaitable: Awaitable[Any]) -> None:
105
+ self._awaitable = awaitable
106
+ self._awaited = False
107
+
108
+ def __await__(self) -> Any:
109
+ self._awaited = True
110
+ return self._awaitable.__await__()
111
+
112
+ def __del__(self) -> None:
113
+ if not self._awaited:
114
+ warnings.warn(
115
+ "Connector.close() is a coroutine, "
116
+ "please use await connector.close()",
117
+ DeprecationWarning,
118
+ )
119
+
120
+
121
+ class Connection:
122
+
123
+ _source_traceback = None
124
+
125
+ def __init__(
126
+ self,
127
+ connector: "BaseConnector",
128
+ key: "ConnectionKey",
129
+ protocol: ResponseHandler,
130
+ loop: asyncio.AbstractEventLoop,
131
+ ) -> None:
132
+ self._key = key
133
+ self._connector = connector
134
+ self._loop = loop
135
+ self._protocol: Optional[ResponseHandler] = protocol
136
+ self._callbacks: List[Callable[[], None]] = []
137
+
138
+ if loop.get_debug():
139
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
140
+
141
+ def __repr__(self) -> str:
142
+ return f"Connection<{self._key}>"
143
+
144
+ def __del__(self, _warnings: Any = warnings) -> None:
145
+ if self._protocol is not None:
146
+ kwargs = {"source": self}
147
+ _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
148
+ if self._loop.is_closed():
149
+ return
150
+
151
+ self._connector._release(self._key, self._protocol, should_close=True)
152
+
153
+ context = {"client_connection": self, "message": "Unclosed connection"}
154
+ if self._source_traceback is not None:
155
+ context["source_traceback"] = self._source_traceback
156
+ self._loop.call_exception_handler(context)
157
+
158
+ def __bool__(self) -> Literal[True]:
159
+ """Force subclasses to not be falsy, to make checks simpler."""
160
+ return True
161
+
162
+ @property
163
+ def loop(self) -> asyncio.AbstractEventLoop:
164
+ warnings.warn(
165
+ "connector.loop property is deprecated", DeprecationWarning, stacklevel=2
166
+ )
167
+ return self._loop
168
+
169
+ @property
170
+ def transport(self) -> Optional[asyncio.Transport]:
171
+ if self._protocol is None:
172
+ return None
173
+ return self._protocol.transport
174
+
175
+ @property
176
+ def protocol(self) -> Optional[ResponseHandler]:
177
+ return self._protocol
178
+
179
+ def add_callback(self, callback: Callable[[], None]) -> None:
180
+ if callback is not None:
181
+ self._callbacks.append(callback)
182
+
183
+ def _notify_release(self) -> None:
184
+ callbacks, self._callbacks = self._callbacks[:], []
185
+
186
+ for cb in callbacks:
187
+ with suppress(Exception):
188
+ cb()
189
+
190
+ def close(self) -> None:
191
+ self._notify_release()
192
+
193
+ if self._protocol is not None:
194
+ self._connector._release(self._key, self._protocol, should_close=True)
195
+ self._protocol = None
196
+
197
+ def release(self) -> None:
198
+ self._notify_release()
199
+
200
+ if self._protocol is not None:
201
+ self._connector._release(self._key, self._protocol)
202
+ self._protocol = None
203
+
204
+ @property
205
+ def closed(self) -> bool:
206
+ return self._protocol is None or not self._protocol.is_connected()
207
+
208
+
209
+ class _TransportPlaceholder:
210
+ """placeholder for BaseConnector.connect function"""
211
+
212
+ __slots__ = ()
213
+
214
+ def close(self) -> None:
215
+ """Close the placeholder transport."""
216
+
217
+
218
+ class BaseConnector:
219
+ """Base connector class.
220
+
221
+ keepalive_timeout - (optional) Keep-alive timeout.
222
+ force_close - Set to True to force close and do reconnect
223
+ after each request (and between redirects).
224
+ limit - The total number of simultaneous connections.
225
+ limit_per_host - Number of simultaneous connections to one host.
226
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
227
+ Disabled by default.
228
+ timeout_ceil_threshold - Trigger ceiling of timeout values when
229
+ it's above timeout_ceil_threshold.
230
+ loop - Optional event loop.
231
+ """
232
+
233
+ _closed = True # prevent AttributeError in __del__ if ctor was failed
234
+ _source_traceback = None
235
+
236
+ # abort transport after 2 seconds (cleanup broken connections)
237
+ _cleanup_closed_period = 2.0
238
+
239
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET
240
+
241
+ def __init__(
242
+ self,
243
+ *,
244
+ keepalive_timeout: Union[object, None, float] = sentinel,
245
+ force_close: bool = False,
246
+ limit: int = 100,
247
+ limit_per_host: int = 0,
248
+ enable_cleanup_closed: bool = False,
249
+ loop: Optional[asyncio.AbstractEventLoop] = None,
250
+ timeout_ceil_threshold: float = 5,
251
+ ) -> None:
252
+
253
+ if force_close:
254
+ if keepalive_timeout is not None and keepalive_timeout is not sentinel:
255
+ raise ValueError(
256
+ "keepalive_timeout cannot be set if force_close is True"
257
+ )
258
+ else:
259
+ if keepalive_timeout is sentinel:
260
+ keepalive_timeout = 15.0
261
+
262
+ loop = loop or asyncio.get_running_loop()
263
+ self._timeout_ceil_threshold = timeout_ceil_threshold
264
+
265
+ self._closed = False
266
+ if loop.get_debug():
267
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
268
+
269
+ # Connection pool of reusable connections.
270
+ # We use a deque to store connections because it has O(1) popleft()
271
+ # and O(1) append() operations to implement a FIFO queue.
272
+ self._conns: DefaultDict[
273
+ ConnectionKey, Deque[Tuple[ResponseHandler, float]]
274
+ ] = defaultdict(deque)
275
+ self._limit = limit
276
+ self._limit_per_host = limit_per_host
277
+ self._acquired: Set[ResponseHandler] = set()
278
+ self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = (
279
+ defaultdict(set)
280
+ )
281
+ self._keepalive_timeout = cast(float, keepalive_timeout)
282
+ self._force_close = force_close
283
+
284
+ # {host_key: FIFO list of waiters}
285
+ # The FIFO is implemented with an OrderedDict with None keys because
286
+ # python does not have an ordered set.
287
+ self._waiters: DefaultDict[
288
+ ConnectionKey, OrderedDict[asyncio.Future[None], None]
289
+ ] = defaultdict(OrderedDict)
290
+
291
+ self._loop = loop
292
+ self._factory = functools.partial(ResponseHandler, loop=loop)
293
+
294
+ # start keep-alive connection cleanup task
295
+ self._cleanup_handle: Optional[asyncio.TimerHandle] = None
296
+
297
+ # start cleanup closed transports task
298
+ self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
299
+
300
+ if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED:
301
+ warnings.warn(
302
+ "enable_cleanup_closed ignored because "
303
+ "https://github.com/python/cpython/pull/118960 is fixed "
304
+ f"in Python version {sys.version_info}",
305
+ DeprecationWarning,
306
+ stacklevel=2,
307
+ )
308
+ enable_cleanup_closed = False
309
+
310
+ self._cleanup_closed_disabled = not enable_cleanup_closed
311
+ self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
312
+ self._cleanup_closed()
313
+
314
+ def __del__(self, _warnings: Any = warnings) -> None:
315
+ if self._closed:
316
+ return
317
+ if not self._conns:
318
+ return
319
+
320
+ conns = [repr(c) for c in self._conns.values()]
321
+
322
+ self._close()
323
+
324
+ kwargs = {"source": self}
325
+ _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
326
+ context = {
327
+ "connector": self,
328
+ "connections": conns,
329
+ "message": "Unclosed connector",
330
+ }
331
+ if self._source_traceback is not None:
332
+ context["source_traceback"] = self._source_traceback
333
+ self._loop.call_exception_handler(context)
334
+
335
+ def __enter__(self) -> "BaseConnector":
336
+ warnings.warn(
337
+ '"with Connector():" is deprecated, '
338
+ 'use "async with Connector():" instead',
339
+ DeprecationWarning,
340
+ )
341
+ return self
342
+
343
+ def __exit__(self, *exc: Any) -> None:
344
+ self._close()
345
+
346
+ async def __aenter__(self) -> "BaseConnector":
347
+ return self
348
+
349
+ async def __aexit__(
350
+ self,
351
+ exc_type: Optional[Type[BaseException]] = None,
352
+ exc_value: Optional[BaseException] = None,
353
+ exc_traceback: Optional[TracebackType] = None,
354
+ ) -> None:
355
+ await self.close()
356
+
357
+ @property
358
+ def force_close(self) -> bool:
359
+ """Ultimately close connection on releasing if True."""
360
+ return self._force_close
361
+
362
+ @property
363
+ def limit(self) -> int:
364
+ """The total number for simultaneous connections.
365
+
366
+ If limit is 0 the connector has no limit.
367
+ The default limit size is 100.
368
+ """
369
+ return self._limit
370
+
371
+ @property
372
+ def limit_per_host(self) -> int:
373
+ """The limit for simultaneous connections to the same endpoint.
374
+
375
+ Endpoints are the same if they are have equal
376
+ (host, port, is_ssl) triple.
377
+ """
378
+ return self._limit_per_host
379
+
380
+ def _cleanup(self) -> None:
381
+ """Cleanup unused transports."""
382
+ if self._cleanup_handle:
383
+ self._cleanup_handle.cancel()
384
+ # _cleanup_handle should be unset, otherwise _release() will not
385
+ # recreate it ever!
386
+ self._cleanup_handle = None
387
+
388
+ now = monotonic()
389
+ timeout = self._keepalive_timeout
390
+
391
+ if self._conns:
392
+ connections = defaultdict(deque)
393
+ deadline = now - timeout
394
+ for key, conns in self._conns.items():
395
+ alive: Deque[Tuple[ResponseHandler, float]] = deque()
396
+ for proto, use_time in conns:
397
+ if proto.is_connected() and use_time - deadline >= 0:
398
+ alive.append((proto, use_time))
399
+ continue
400
+ transport = proto.transport
401
+ proto.close()
402
+ if not self._cleanup_closed_disabled and key.is_ssl:
403
+ self._cleanup_closed_transports.append(transport)
404
+
405
+ if alive:
406
+ connections[key] = alive
407
+
408
+ self._conns = connections
409
+
410
+ if self._conns:
411
+ self._cleanup_handle = helpers.weakref_handle(
412
+ self,
413
+ "_cleanup",
414
+ timeout,
415
+ self._loop,
416
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
417
+ )
418
+
419
+ def _cleanup_closed(self) -> None:
420
+ """Double confirmation for transport close.
421
+
422
+ Some broken ssl servers may leave socket open without proper close.
423
+ """
424
+ if self._cleanup_closed_handle:
425
+ self._cleanup_closed_handle.cancel()
426
+
427
+ for transport in self._cleanup_closed_transports:
428
+ if transport is not None:
429
+ transport.abort()
430
+
431
+ self._cleanup_closed_transports = []
432
+
433
+ if not self._cleanup_closed_disabled:
434
+ self._cleanup_closed_handle = helpers.weakref_handle(
435
+ self,
436
+ "_cleanup_closed",
437
+ self._cleanup_closed_period,
438
+ self._loop,
439
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
440
+ )
441
+
442
+ def close(self) -> Awaitable[None]:
443
+ """Close all opened transports."""
444
+ self._close()
445
+ return _DeprecationWaiter(noop())
446
+
447
+ def _close(self) -> None:
448
+ if self._closed:
449
+ return
450
+
451
+ self._closed = True
452
+
453
+ try:
454
+ if self._loop.is_closed():
455
+ return
456
+
457
+ # cancel cleanup task
458
+ if self._cleanup_handle:
459
+ self._cleanup_handle.cancel()
460
+
461
+ # cancel cleanup close task
462
+ if self._cleanup_closed_handle:
463
+ self._cleanup_closed_handle.cancel()
464
+
465
+ for data in self._conns.values():
466
+ for proto, t0 in data:
467
+ proto.close()
468
+
469
+ for proto in self._acquired:
470
+ proto.close()
471
+
472
+ for transport in self._cleanup_closed_transports:
473
+ if transport is not None:
474
+ transport.abort()
475
+
476
+ finally:
477
+ self._conns.clear()
478
+ self._acquired.clear()
479
+ for keyed_waiters in self._waiters.values():
480
+ for keyed_waiter in keyed_waiters:
481
+ keyed_waiter.cancel()
482
+ self._waiters.clear()
483
+ self._cleanup_handle = None
484
+ self._cleanup_closed_transports.clear()
485
+ self._cleanup_closed_handle = None
486
+
487
+ @property
488
+ def closed(self) -> bool:
489
+ """Is connector closed.
490
+
491
+ A readonly property.
492
+ """
493
+ return self._closed
494
+
495
+ def _available_connections(self, key: "ConnectionKey") -> int:
496
+ """
497
+ Return number of available connections.
498
+
499
+ The limit, limit_per_host and the connection key are taken into account.
500
+
501
+ If it returns less than 1 means that there are no connections
502
+ available.
503
+ """
504
+ # check total available connections
505
+ # If there are no limits, this will always return 1
506
+ total_remain = 1
507
+
508
+ if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0:
509
+ return total_remain
510
+
511
+ # check limit per host
512
+ if host_remain := self._limit_per_host:
513
+ if acquired := self._acquired_per_host.get(key):
514
+ host_remain -= len(acquired)
515
+ if total_remain > host_remain:
516
+ return host_remain
517
+
518
+ return total_remain
519
+
520
+ async def connect(
521
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
522
+ ) -> Connection:
523
+ """Get from pool or create new connection."""
524
+ key = req.connection_key
525
+ if (conn := await self._get(key, traces)) is not None:
526
+ # If we do not have to wait and we can get a connection from the pool
527
+ # we can avoid the timeout ceil logic and directly return the connection
528
+ return conn
529
+
530
+ async with ceil_timeout(timeout.connect, timeout.ceil_threshold):
531
+ if self._available_connections(key) <= 0:
532
+ await self._wait_for_available_connection(key, traces)
533
+ if (conn := await self._get(key, traces)) is not None:
534
+ return conn
535
+
536
+ placeholder = cast(ResponseHandler, _TransportPlaceholder())
537
+ self._acquired.add(placeholder)
538
+ if self._limit_per_host:
539
+ self._acquired_per_host[key].add(placeholder)
540
+
541
+ try:
542
+ # Traces are done inside the try block to ensure that the
543
+ # that the placeholder is still cleaned up if an exception
544
+ # is raised.
545
+ if traces:
546
+ for trace in traces:
547
+ await trace.send_connection_create_start()
548
+ proto = await self._create_connection(req, traces, timeout)
549
+ if traces:
550
+ for trace in traces:
551
+ await trace.send_connection_create_end()
552
+ except BaseException:
553
+ self._release_acquired(key, placeholder)
554
+ raise
555
+ else:
556
+ if self._closed:
557
+ proto.close()
558
+ raise ClientConnectionError("Connector is closed.")
559
+
560
+ # The connection was successfully created, drop the placeholder
561
+ # and add the real connection to the acquired set. There should
562
+ # be no awaits after the proto is added to the acquired set
563
+ # to ensure that the connection is not left in the acquired set
564
+ # on cancellation.
565
+ self._acquired.remove(placeholder)
566
+ self._acquired.add(proto)
567
+ if self._limit_per_host:
568
+ acquired_per_host = self._acquired_per_host[key]
569
+ acquired_per_host.remove(placeholder)
570
+ acquired_per_host.add(proto)
571
+ return Connection(self, key, proto, self._loop)
572
+
573
+ async def _wait_for_available_connection(
574
+ self, key: "ConnectionKey", traces: List["Trace"]
575
+ ) -> None:
576
+ """Wait for an available connection slot."""
577
+ # We loop here because there is a race between
578
+ # the connection limit check and the connection
579
+ # being acquired. If the connection is acquired
580
+ # between the check and the await statement, we
581
+ # need to loop again to check if the connection
582
+ # slot is still available.
583
+ attempts = 0
584
+ while True:
585
+ fut: asyncio.Future[None] = self._loop.create_future()
586
+ keyed_waiters = self._waiters[key]
587
+ keyed_waiters[fut] = None
588
+ if attempts:
589
+ # If we have waited before, we need to move the waiter
590
+ # to the front of the queue as otherwise we might get
591
+ # starved and hit the timeout.
592
+ keyed_waiters.move_to_end(fut, last=False)
593
+
594
+ try:
595
+ # Traces happen in the try block to ensure that the
596
+ # the waiter is still cleaned up if an exception is raised.
597
+ if traces:
598
+ for trace in traces:
599
+ await trace.send_connection_queued_start()
600
+ await fut
601
+ if traces:
602
+ for trace in traces:
603
+ await trace.send_connection_queued_end()
604
+ finally:
605
+ # pop the waiter from the queue if its still
606
+ # there and not already removed by _release_waiter
607
+ keyed_waiters.pop(fut, None)
608
+ if not self._waiters.get(key, True):
609
+ del self._waiters[key]
610
+
611
+ if self._available_connections(key) > 0:
612
+ break
613
+ attempts += 1
614
+
615
+ async def _get(
616
+ self, key: "ConnectionKey", traces: List["Trace"]
617
+ ) -> Optional[Connection]:
618
+ """Get next reusable connection for the key or None.
619
+
620
+ The connection will be marked as acquired.
621
+ """
622
+ if (conns := self._conns.get(key)) is None:
623
+ return None
624
+
625
+ t1 = monotonic()
626
+ while conns:
627
+ proto, t0 = conns.popleft()
628
+ # We will we reuse the connection if its connected and
629
+ # the keepalive timeout has not been exceeded
630
+ if proto.is_connected() and t1 - t0 <= self._keepalive_timeout:
631
+ if not conns:
632
+ # The very last connection was reclaimed: drop the key
633
+ del self._conns[key]
634
+ self._acquired.add(proto)
635
+ if self._limit_per_host:
636
+ self._acquired_per_host[key].add(proto)
637
+ if traces:
638
+ for trace in traces:
639
+ try:
640
+ await trace.send_connection_reuseconn()
641
+ except BaseException:
642
+ self._release_acquired(key, proto)
643
+ raise
644
+ return Connection(self, key, proto, self._loop)
645
+
646
+ # Connection cannot be reused, close it
647
+ transport = proto.transport
648
+ proto.close()
649
+ # only for SSL transports
650
+ if not self._cleanup_closed_disabled and key.is_ssl:
651
+ self._cleanup_closed_transports.append(transport)
652
+
653
+ # No more connections: drop the key
654
+ del self._conns[key]
655
+ return None
656
+
657
+ def _release_waiter(self) -> None:
658
+ """
659
+ Iterates over all waiters until one to be released is found.
660
+
661
+ The one to be released is not finished and
662
+ belongs to a host that has available connections.
663
+ """
664
+ if not self._waiters:
665
+ return
666
+
667
+ # Having the dict keys ordered this avoids to iterate
668
+ # at the same order at each call.
669
+ queues = list(self._waiters)
670
+ random.shuffle(queues)
671
+
672
+ for key in queues:
673
+ if self._available_connections(key) < 1:
674
+ continue
675
+
676
+ waiters = self._waiters[key]
677
+ while waiters:
678
+ waiter, _ = waiters.popitem(last=False)
679
+ if not waiter.done():
680
+ waiter.set_result(None)
681
+ return
682
+
683
+ def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
684
+ """Release acquired connection."""
685
+ if self._closed:
686
+ # acquired connection is already released on connector closing
687
+ return
688
+
689
+ self._acquired.discard(proto)
690
+ if self._limit_per_host and (conns := self._acquired_per_host.get(key)):
691
+ conns.discard(proto)
692
+ if not conns:
693
+ del self._acquired_per_host[key]
694
+ self._release_waiter()
695
+
696
+ def _release(
697
+ self,
698
+ key: "ConnectionKey",
699
+ protocol: ResponseHandler,
700
+ *,
701
+ should_close: bool = False,
702
+ ) -> None:
703
+ if self._closed:
704
+ # acquired connection is already released on connector closing
705
+ return
706
+
707
+ self._release_acquired(key, protocol)
708
+
709
+ if self._force_close or should_close or protocol.should_close:
710
+ transport = protocol.transport
711
+ protocol.close()
712
+
713
+ if key.is_ssl and not self._cleanup_closed_disabled:
714
+ self._cleanup_closed_transports.append(transport)
715
+ return
716
+
717
+ self._conns[key].append((protocol, monotonic()))
718
+
719
+ if self._cleanup_handle is None:
720
+ self._cleanup_handle = helpers.weakref_handle(
721
+ self,
722
+ "_cleanup",
723
+ self._keepalive_timeout,
724
+ self._loop,
725
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
726
+ )
727
+
728
+ async def _create_connection(
729
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
730
+ ) -> ResponseHandler:
731
+ raise NotImplementedError()
732
+
733
+
734
+ class _DNSCacheTable:
735
+ def __init__(self, ttl: Optional[float] = None) -> None:
736
+ self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {}
737
+ self._timestamps: Dict[Tuple[str, int], float] = {}
738
+ self._ttl = ttl
739
+
740
+ def __contains__(self, host: object) -> bool:
741
+ return host in self._addrs_rr
742
+
743
+ def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None:
744
+ self._addrs_rr[key] = (cycle(addrs), len(addrs))
745
+
746
+ if self._ttl is not None:
747
+ self._timestamps[key] = monotonic()
748
+
749
+ def remove(self, key: Tuple[str, int]) -> None:
750
+ self._addrs_rr.pop(key, None)
751
+
752
+ if self._ttl is not None:
753
+ self._timestamps.pop(key, None)
754
+
755
+ def clear(self) -> None:
756
+ self._addrs_rr.clear()
757
+ self._timestamps.clear()
758
+
759
+ def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]:
760
+ loop, length = self._addrs_rr[key]
761
+ addrs = list(islice(loop, length))
762
+ # Consume one more element to shift internal state of `cycle`
763
+ next(loop)
764
+ return addrs
765
+
766
+ def expired(self, key: Tuple[str, int]) -> bool:
767
+ if self._ttl is None:
768
+ return False
769
+
770
+ return self._timestamps[key] + self._ttl < monotonic()
771
+
772
+
773
+ def _make_ssl_context(verified: bool) -> SSLContext:
774
+ """Create SSL context.
775
+
776
+ This method is not async-friendly and should be called from a thread
777
+ because it will load certificates from disk and do other blocking I/O.
778
+ """
779
+ if ssl is None:
780
+ # No ssl support
781
+ return None
782
+ if verified:
783
+ sslcontext = ssl.create_default_context()
784
+ else:
785
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
786
+ sslcontext.options |= ssl.OP_NO_SSLv2
787
+ sslcontext.options |= ssl.OP_NO_SSLv3
788
+ sslcontext.check_hostname = False
789
+ sslcontext.verify_mode = ssl.CERT_NONE
790
+ sslcontext.options |= ssl.OP_NO_COMPRESSION
791
+ sslcontext.set_default_verify_paths()
792
+ sslcontext.set_alpn_protocols(("http/1.1",))
793
+ return sslcontext
794
+
795
+
796
+ # The default SSLContext objects are created at import time
797
+ # since they do blocking I/O to load certificates from disk,
798
+ # and imports should always be done before the event loop starts
799
+ # or in a thread.
800
+ _SSL_CONTEXT_VERIFIED = _make_ssl_context(True)
801
+ _SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False)
802
+
803
+
804
+ class TCPConnector(BaseConnector):
805
+ """TCP connector.
806
+
807
+ verify_ssl - Set to True to check ssl certifications.
808
+ fingerprint - Pass the binary sha256
809
+ digest of the expected certificate in DER format to verify
810
+ that the certificate the server presents matches. See also
811
+ https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning
812
+ resolver - Enable DNS lookups and use this
813
+ resolver
814
+ use_dns_cache - Use memory cache for DNS lookups.
815
+ ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
816
+ family - socket address family
817
+ local_addr - local tuple of (host, port) to bind socket to
818
+
819
+ keepalive_timeout - (optional) Keep-alive timeout.
820
+ force_close - Set to True to force close and do reconnect
821
+ after each request (and between redirects).
822
+ limit - The total number of simultaneous connections.
823
+ limit_per_host - Number of simultaneous connections to one host.
824
+ enable_cleanup_closed - Enables clean-up closed ssl transports.
825
+ Disabled by default.
826
+ happy_eyeballs_delay - This is the “Connection Attempt Delay”
827
+ as defined in RFC 8305. To disable
828
+ the happy eyeballs algorithm, set to None.
829
+ interleave - “First Address Family Count” as defined in RFC 8305
830
+ loop - Optional event loop.
831
+ """
832
+
833
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"})
834
+
835
+ def __init__(
836
+ self,
837
+ *,
838
+ verify_ssl: bool = True,
839
+ fingerprint: Optional[bytes] = None,
840
+ use_dns_cache: bool = True,
841
+ ttl_dns_cache: Optional[int] = 10,
842
+ family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC,
843
+ ssl_context: Optional[SSLContext] = None,
844
+ ssl: Union[bool, Fingerprint, SSLContext] = True,
845
+ local_addr: Optional[Tuple[str, int]] = None,
846
+ resolver: Optional[AbstractResolver] = None,
847
+ keepalive_timeout: Union[None, float, object] = sentinel,
848
+ force_close: bool = False,
849
+ limit: int = 100,
850
+ limit_per_host: int = 0,
851
+ enable_cleanup_closed: bool = False,
852
+ loop: Optional[asyncio.AbstractEventLoop] = None,
853
+ timeout_ceil_threshold: float = 5,
854
+ happy_eyeballs_delay: Optional[float] = 0.25,
855
+ interleave: Optional[int] = None,
856
+ ):
857
+ super().__init__(
858
+ keepalive_timeout=keepalive_timeout,
859
+ force_close=force_close,
860
+ limit=limit,
861
+ limit_per_host=limit_per_host,
862
+ enable_cleanup_closed=enable_cleanup_closed,
863
+ loop=loop,
864
+ timeout_ceil_threshold=timeout_ceil_threshold,
865
+ )
866
+
867
+ self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
868
+ if resolver is None:
869
+ resolver = DefaultResolver(loop=self._loop)
870
+ self._resolver = resolver
871
+
872
+ self._use_dns_cache = use_dns_cache
873
+ self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
874
+ self._throttle_dns_futures: Dict[
875
+ Tuple[str, int], Set["asyncio.Future[None]"]
876
+ ] = {}
877
+ self._family = family
878
+ self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr)
879
+ self._happy_eyeballs_delay = happy_eyeballs_delay
880
+ self._interleave = interleave
881
+ self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set()
882
+
883
+ def close(self) -> Awaitable[None]:
884
+ """Close all ongoing DNS calls."""
885
+ for fut in chain.from_iterable(self._throttle_dns_futures.values()):
886
+ fut.cancel()
887
+
888
+ for t in self._resolve_host_tasks:
889
+ t.cancel()
890
+
891
+ return super().close()
892
+
893
+ @property
894
+ def family(self) -> int:
895
+ """Socket family like AF_INET."""
896
+ return self._family
897
+
898
+ @property
899
+ def use_dns_cache(self) -> bool:
900
+ """True if local DNS caching is enabled."""
901
+ return self._use_dns_cache
902
+
903
+ def clear_dns_cache(
904
+ self, host: Optional[str] = None, port: Optional[int] = None
905
+ ) -> None:
906
+ """Remove specified host/port or clear all dns local cache."""
907
+ if host is not None and port is not None:
908
+ self._cached_hosts.remove((host, port))
909
+ elif host is not None or port is not None:
910
+ raise ValueError("either both host and port or none of them are allowed")
911
+ else:
912
+ self._cached_hosts.clear()
913
+
914
+ async def _resolve_host(
915
+ self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None
916
+ ) -> List[ResolveResult]:
917
+ """Resolve host and return list of addresses."""
918
+ if is_ip_address(host):
919
+ return [
920
+ {
921
+ "hostname": host,
922
+ "host": host,
923
+ "port": port,
924
+ "family": self._family,
925
+ "proto": 0,
926
+ "flags": 0,
927
+ }
928
+ ]
929
+
930
+ if not self._use_dns_cache:
931
+
932
+ if traces:
933
+ for trace in traces:
934
+ await trace.send_dns_resolvehost_start(host)
935
+
936
+ res = await self._resolver.resolve(host, port, family=self._family)
937
+
938
+ if traces:
939
+ for trace in traces:
940
+ await trace.send_dns_resolvehost_end(host)
941
+
942
+ return res
943
+
944
+ key = (host, port)
945
+ if key in self._cached_hosts and not self._cached_hosts.expired(key):
946
+ # get result early, before any await (#4014)
947
+ result = self._cached_hosts.next_addrs(key)
948
+
949
+ if traces:
950
+ for trace in traces:
951
+ await trace.send_dns_cache_hit(host)
952
+ return result
953
+
954
+ futures: Set["asyncio.Future[None]"]
955
+ #
956
+ # If multiple connectors are resolving the same host, we wait
957
+ # for the first one to resolve and then use the result for all of them.
958
+ # We use a throttle to ensure that we only resolve the host once
959
+ # and then use the result for all the waiters.
960
+ #
961
+ if key in self._throttle_dns_futures:
962
+ # get futures early, before any await (#4014)
963
+ futures = self._throttle_dns_futures[key]
964
+ future: asyncio.Future[None] = self._loop.create_future()
965
+ futures.add(future)
966
+ if traces:
967
+ for trace in traces:
968
+ await trace.send_dns_cache_hit(host)
969
+ try:
970
+ await future
971
+ finally:
972
+ futures.discard(future)
973
+ return self._cached_hosts.next_addrs(key)
974
+
975
+ # update dict early, before any await (#4014)
976
+ self._throttle_dns_futures[key] = futures = set()
977
+ # In this case we need to create a task to ensure that we can shield
978
+ # the task from cancellation as cancelling this lookup should not cancel
979
+ # the underlying lookup or else the cancel event will get broadcast to
980
+ # all the waiters across all connections.
981
+ #
982
+ coro = self._resolve_host_with_throttle(key, host, port, futures, traces)
983
+ loop = asyncio.get_running_loop()
984
+ if sys.version_info >= (3, 12):
985
+ # Optimization for Python 3.12, try to send immediately
986
+ resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True)
987
+ else:
988
+ resolved_host_task = loop.create_task(coro)
989
+
990
+ if not resolved_host_task.done():
991
+ self._resolve_host_tasks.add(resolved_host_task)
992
+ resolved_host_task.add_done_callback(self._resolve_host_tasks.discard)
993
+
994
+ try:
995
+ return await asyncio.shield(resolved_host_task)
996
+ except asyncio.CancelledError:
997
+
998
+ def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None:
999
+ with suppress(Exception, asyncio.CancelledError):
1000
+ fut.result()
1001
+
1002
+ resolved_host_task.add_done_callback(drop_exception)
1003
+ raise
1004
+
1005
+ async def _resolve_host_with_throttle(
1006
+ self,
1007
+ key: Tuple[str, int],
1008
+ host: str,
1009
+ port: int,
1010
+ futures: Set["asyncio.Future[None]"],
1011
+ traces: Optional[Sequence["Trace"]],
1012
+ ) -> List[ResolveResult]:
1013
+ """Resolve host and set result for all waiters.
1014
+
1015
+ This method must be run in a task and shielded from cancellation
1016
+ to avoid cancelling the underlying lookup.
1017
+ """
1018
+ if traces:
1019
+ for trace in traces:
1020
+ await trace.send_dns_cache_miss(host)
1021
+ try:
1022
+ if traces:
1023
+ for trace in traces:
1024
+ await trace.send_dns_resolvehost_start(host)
1025
+
1026
+ addrs = await self._resolver.resolve(host, port, family=self._family)
1027
+ if traces:
1028
+ for trace in traces:
1029
+ await trace.send_dns_resolvehost_end(host)
1030
+
1031
+ self._cached_hosts.add(key, addrs)
1032
+ for fut in futures:
1033
+ set_result(fut, None)
1034
+ except BaseException as e:
1035
+ # any DNS exception is set for the waiters to raise the same exception.
1036
+ # This coro is always run in task that is shielded from cancellation so
1037
+ # we should never be propagating cancellation here.
1038
+ for fut in futures:
1039
+ set_exception(fut, e)
1040
+ raise
1041
+ finally:
1042
+ self._throttle_dns_futures.pop(key)
1043
+
1044
+ return self._cached_hosts.next_addrs(key)
1045
+
1046
+ async def _create_connection(
1047
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1048
+ ) -> ResponseHandler:
1049
+ """Create connection.
1050
+
1051
+ Has same keyword arguments as BaseEventLoop.create_connection.
1052
+ """
1053
+ if req.proxy:
1054
+ _, proto = await self._create_proxy_connection(req, traces, timeout)
1055
+ else:
1056
+ _, proto = await self._create_direct_connection(req, traces, timeout)
1057
+
1058
+ return proto
1059
+
1060
+ def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
1061
+ """Logic to get the correct SSL context
1062
+
1063
+ 0. if req.ssl is false, return None
1064
+
1065
+ 1. if ssl_context is specified in req, use it
1066
+ 2. if _ssl_context is specified in self, use it
1067
+ 3. otherwise:
1068
+ 1. if verify_ssl is not specified in req, use self.ssl_context
1069
+ (will generate a default context according to self.verify_ssl)
1070
+ 2. if verify_ssl is True in req, generate a default SSL context
1071
+ 3. if verify_ssl is False in req, generate a SSL context that
1072
+ won't verify
1073
+ """
1074
+ if not req.is_ssl():
1075
+ return None
1076
+
1077
+ if ssl is None: # pragma: no cover
1078
+ raise RuntimeError("SSL is not supported.")
1079
+ sslcontext = req.ssl
1080
+ if isinstance(sslcontext, ssl.SSLContext):
1081
+ return sslcontext
1082
+ if sslcontext is not True:
1083
+ # not verified or fingerprinted
1084
+ return _SSL_CONTEXT_UNVERIFIED
1085
+ sslcontext = self._ssl
1086
+ if isinstance(sslcontext, ssl.SSLContext):
1087
+ return sslcontext
1088
+ if sslcontext is not True:
1089
+ # not verified or fingerprinted
1090
+ return _SSL_CONTEXT_UNVERIFIED
1091
+ return _SSL_CONTEXT_VERIFIED
1092
+
1093
+ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
1094
+ ret = req.ssl
1095
+ if isinstance(ret, Fingerprint):
1096
+ return ret
1097
+ ret = self._ssl
1098
+ if isinstance(ret, Fingerprint):
1099
+ return ret
1100
+ return None
1101
+
1102
+ async def _wrap_create_connection(
1103
+ self,
1104
+ *args: Any,
1105
+ addr_infos: List[aiohappyeyeballs.AddrInfoType],
1106
+ req: ClientRequest,
1107
+ timeout: "ClientTimeout",
1108
+ client_error: Type[Exception] = ClientConnectorError,
1109
+ **kwargs: Any,
1110
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1111
+ try:
1112
+ async with ceil_timeout(
1113
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1114
+ ):
1115
+ sock = await aiohappyeyeballs.start_connection(
1116
+ addr_infos=addr_infos,
1117
+ local_addr_infos=self._local_addr_infos,
1118
+ happy_eyeballs_delay=self._happy_eyeballs_delay,
1119
+ interleave=self._interleave,
1120
+ loop=self._loop,
1121
+ )
1122
+ return await self._loop.create_connection(*args, **kwargs, sock=sock)
1123
+ except cert_errors as exc:
1124
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1125
+ except ssl_errors as exc:
1126
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1127
+ except OSError as exc:
1128
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1129
+ raise
1130
+ raise client_error(req.connection_key, exc) from exc
1131
+
1132
+ async def _wrap_existing_connection(
1133
+ self,
1134
+ *args: Any,
1135
+ req: ClientRequest,
1136
+ timeout: "ClientTimeout",
1137
+ client_error: Type[Exception] = ClientConnectorError,
1138
+ **kwargs: Any,
1139
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1140
+ try:
1141
+ async with ceil_timeout(
1142
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1143
+ ):
1144
+ return await self._loop.create_connection(*args, **kwargs)
1145
+ except cert_errors as exc:
1146
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1147
+ except ssl_errors as exc:
1148
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1149
+ except OSError as exc:
1150
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1151
+ raise
1152
+ raise client_error(req.connection_key, exc) from exc
1153
+
1154
+ def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
1155
+ """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
1156
+
1157
+ It is necessary for TLS-in-TLS so that it is possible to
1158
+ send HTTPS queries through HTTPS proxies.
1159
+
1160
+ This doesn't affect regular HTTP requests, though.
1161
+ """
1162
+ if not req.is_ssl():
1163
+ return
1164
+
1165
+ proxy_url = req.proxy
1166
+ assert proxy_url is not None
1167
+ if proxy_url.scheme != "https":
1168
+ return
1169
+
1170
+ self._check_loop_for_start_tls()
1171
+
1172
+ def _check_loop_for_start_tls(self) -> None:
1173
+ try:
1174
+ self._loop.start_tls
1175
+ except AttributeError as attr_exc:
1176
+ raise RuntimeError(
1177
+ "An HTTPS request is being sent through an HTTPS proxy. "
1178
+ "This needs support for TLS in TLS but it is not implemented "
1179
+ "in your runtime for the stdlib asyncio.\n\n"
1180
+ "Please upgrade to Python 3.11 or higher. For more details, "
1181
+ "please see:\n"
1182
+ "* https://bugs.python.org/issue37179\n"
1183
+ "* https://github.com/python/cpython/pull/28073\n"
1184
+ "* https://docs.aiohttp.org/en/stable/"
1185
+ "client_advanced.html#proxy-support\n"
1186
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
1187
+ ) from attr_exc
1188
+
1189
+ def _loop_supports_start_tls(self) -> bool:
1190
+ try:
1191
+ self._check_loop_for_start_tls()
1192
+ except RuntimeError:
1193
+ return False
1194
+ else:
1195
+ return True
1196
+
1197
+ def _warn_about_tls_in_tls(
1198
+ self,
1199
+ underlying_transport: asyncio.Transport,
1200
+ req: ClientRequest,
1201
+ ) -> None:
1202
+ """Issue a warning if the requested URL has HTTPS scheme."""
1203
+ if req.request_info.url.scheme != "https":
1204
+ return
1205
+
1206
+ asyncio_supports_tls_in_tls = getattr(
1207
+ underlying_transport,
1208
+ "_start_tls_compatible",
1209
+ False,
1210
+ )
1211
+
1212
+ if asyncio_supports_tls_in_tls:
1213
+ return
1214
+
1215
+ warnings.warn(
1216
+ "An HTTPS request is being sent through an HTTPS proxy. "
1217
+ "This support for TLS in TLS is known to be disabled "
1218
+ "in the stdlib asyncio (Python <3.11). This is why you'll probably see "
1219
+ "an error in the log below.\n\n"
1220
+ "It is possible to enable it via monkeypatching. "
1221
+ "For more details, see:\n"
1222
+ "* https://bugs.python.org/issue37179\n"
1223
+ "* https://github.com/python/cpython/pull/28073\n\n"
1224
+ "You can temporarily patch this as follows:\n"
1225
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
1226
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
1227
+ RuntimeWarning,
1228
+ source=self,
1229
+ # Why `4`? At least 3 of the calls in the stack originate
1230
+ # from the methods in this class.
1231
+ stacklevel=3,
1232
+ )
1233
+
1234
+ async def _start_tls_connection(
1235
+ self,
1236
+ underlying_transport: asyncio.Transport,
1237
+ req: ClientRequest,
1238
+ timeout: "ClientTimeout",
1239
+ client_error: Type[Exception] = ClientConnectorError,
1240
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
1241
+ """Wrap the raw TCP transport with TLS."""
1242
+ tls_proto = self._factory() # Create a brand new proto for TLS
1243
+ sslcontext = self._get_ssl_context(req)
1244
+ if TYPE_CHECKING:
1245
+ # _start_tls_connection is unreachable in the current code path
1246
+ # if sslcontext is None.
1247
+ assert sslcontext is not None
1248
+
1249
+ try:
1250
+ async with ceil_timeout(
1251
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1252
+ ):
1253
+ try:
1254
+ tls_transport = await self._loop.start_tls(
1255
+ underlying_transport,
1256
+ tls_proto,
1257
+ sslcontext,
1258
+ server_hostname=req.server_hostname or req.host,
1259
+ ssl_handshake_timeout=timeout.total,
1260
+ )
1261
+ except BaseException:
1262
+ # We need to close the underlying transport since
1263
+ # `start_tls()` probably failed before it had a
1264
+ # chance to do this:
1265
+ underlying_transport.close()
1266
+ raise
1267
+ if isinstance(tls_transport, asyncio.Transport):
1268
+ fingerprint = self._get_fingerprint(req)
1269
+ if fingerprint:
1270
+ try:
1271
+ fingerprint.check(tls_transport)
1272
+ except ServerFingerprintMismatch:
1273
+ tls_transport.close()
1274
+ if not self._cleanup_closed_disabled:
1275
+ self._cleanup_closed_transports.append(tls_transport)
1276
+ raise
1277
+ except cert_errors as exc:
1278
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
1279
+ except ssl_errors as exc:
1280
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
1281
+ except OSError as exc:
1282
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1283
+ raise
1284
+ raise client_error(req.connection_key, exc) from exc
1285
+ except TypeError as type_err:
1286
+ # Example cause looks like this:
1287
+ # TypeError: transport <asyncio.sslproto._SSLProtocolTransport
1288
+ # object at 0x7f760615e460> is not supported by start_tls()
1289
+
1290
+ raise ClientConnectionError(
1291
+ "Cannot initialize a TLS-in-TLS connection to host "
1292
+ f"{req.host!s}:{req.port:d} through an underlying connection "
1293
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
1294
+ f"[{type_err!s}]"
1295
+ ) from type_err
1296
+ else:
1297
+ if tls_transport is None:
1298
+ msg = "Failed to start TLS (possibly caused by closing transport)"
1299
+ raise client_error(req.connection_key, OSError(msg))
1300
+ tls_proto.connection_made(
1301
+ tls_transport
1302
+ ) # Kick the state machine of the new TLS protocol
1303
+
1304
+ return tls_transport, tls_proto
1305
+
1306
+ def _convert_hosts_to_addr_infos(
1307
+ self, hosts: List[ResolveResult]
1308
+ ) -> List[aiohappyeyeballs.AddrInfoType]:
1309
+ """Converts the list of hosts to a list of addr_infos.
1310
+
1311
+ The list of hosts is the result of a DNS lookup. The list of
1312
+ addr_infos is the result of a call to `socket.getaddrinfo()`.
1313
+ """
1314
+ addr_infos: List[aiohappyeyeballs.AddrInfoType] = []
1315
+ for hinfo in hosts:
1316
+ host = hinfo["host"]
1317
+ is_ipv6 = ":" in host
1318
+ family = socket.AF_INET6 if is_ipv6 else socket.AF_INET
1319
+ if self._family and self._family != family:
1320
+ continue
1321
+ addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"])
1322
+ addr_infos.append(
1323
+ (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)
1324
+ )
1325
+ return addr_infos
1326
+
1327
+ async def _create_direct_connection(
1328
+ self,
1329
+ req: ClientRequest,
1330
+ traces: List["Trace"],
1331
+ timeout: "ClientTimeout",
1332
+ *,
1333
+ client_error: Type[Exception] = ClientConnectorError,
1334
+ ) -> Tuple[asyncio.Transport, ResponseHandler]:
1335
+ sslcontext = self._get_ssl_context(req)
1336
+ fingerprint = self._get_fingerprint(req)
1337
+
1338
+ host = req.url.raw_host
1339
+ assert host is not None
1340
+ # Replace multiple trailing dots with a single one.
1341
+ # A trailing dot is only present for fully-qualified domain names.
1342
+ # See https://github.com/aio-libs/aiohttp/pull/7364.
1343
+ if host.endswith(".."):
1344
+ host = host.rstrip(".") + "."
1345
+ port = req.port
1346
+ assert port is not None
1347
+ try:
1348
+ # Cancelling this lookup should not cancel the underlying lookup
1349
+ # or else the cancel event will get broadcast to all the waiters
1350
+ # across all connections.
1351
+ hosts = await self._resolve_host(host, port, traces=traces)
1352
+ except OSError as exc:
1353
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1354
+ raise
1355
+ # in case of proxy it is not ClientProxyConnectionError
1356
+ # it is problem of resolving proxy ip itself
1357
+ raise ClientConnectorDNSError(req.connection_key, exc) from exc
1358
+
1359
+ last_exc: Optional[Exception] = None
1360
+ addr_infos = self._convert_hosts_to_addr_infos(hosts)
1361
+ while addr_infos:
1362
+ # Strip trailing dots, certificates contain FQDN without dots.
1363
+ # See https://github.com/aio-libs/aiohttp/issues/3636
1364
+ server_hostname = (
1365
+ (req.server_hostname or host).rstrip(".") if sslcontext else None
1366
+ )
1367
+
1368
+ try:
1369
+ transp, proto = await self._wrap_create_connection(
1370
+ self._factory,
1371
+ timeout=timeout,
1372
+ ssl=sslcontext,
1373
+ addr_infos=addr_infos,
1374
+ server_hostname=server_hostname,
1375
+ req=req,
1376
+ client_error=client_error,
1377
+ )
1378
+ except (ClientConnectorError, asyncio.TimeoutError) as exc:
1379
+ last_exc = exc
1380
+ aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave)
1381
+ continue
1382
+
1383
+ if req.is_ssl() and fingerprint:
1384
+ try:
1385
+ fingerprint.check(transp)
1386
+ except ServerFingerprintMismatch as exc:
1387
+ transp.close()
1388
+ if not self._cleanup_closed_disabled:
1389
+ self._cleanup_closed_transports.append(transp)
1390
+ last_exc = exc
1391
+ # Remove the bad peer from the list of addr_infos
1392
+ sock: socket.socket = transp.get_extra_info("socket")
1393
+ bad_peer = sock.getpeername()
1394
+ aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer)
1395
+ continue
1396
+
1397
+ return transp, proto
1398
+ else:
1399
+ assert last_exc is not None
1400
+ raise last_exc
1401
+
1402
+ async def _create_proxy_connection(
1403
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1404
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
1405
+ self._fail_on_no_start_tls(req)
1406
+ runtime_has_start_tls = self._loop_supports_start_tls()
1407
+
1408
+ headers: Dict[str, str] = {}
1409
+ if req.proxy_headers is not None:
1410
+ headers = req.proxy_headers # type: ignore[assignment]
1411
+ headers[hdrs.HOST] = req.headers[hdrs.HOST]
1412
+
1413
+ url = req.proxy
1414
+ assert url is not None
1415
+ proxy_req = ClientRequest(
1416
+ hdrs.METH_GET,
1417
+ url,
1418
+ headers=headers,
1419
+ auth=req.proxy_auth,
1420
+ loop=self._loop,
1421
+ ssl=req.ssl,
1422
+ )
1423
+
1424
+ # create connection to proxy server
1425
+ transport, proto = await self._create_direct_connection(
1426
+ proxy_req, [], timeout, client_error=ClientProxyConnectionError
1427
+ )
1428
+
1429
+ auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
1430
+ if auth is not None:
1431
+ if not req.is_ssl():
1432
+ req.headers[hdrs.PROXY_AUTHORIZATION] = auth
1433
+ else:
1434
+ proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
1435
+
1436
+ if req.is_ssl():
1437
+ if runtime_has_start_tls:
1438
+ self._warn_about_tls_in_tls(transport, req)
1439
+
1440
+ # For HTTPS requests over HTTP proxy
1441
+ # we must notify proxy to tunnel connection
1442
+ # so we send CONNECT command:
1443
+ # CONNECT www.python.org:443 HTTP/1.1
1444
+ # Host: www.python.org
1445
+ #
1446
+ # next we must do TLS handshake and so on
1447
+ # to do this we must wrap raw socket into secure one
1448
+ # asyncio handles this perfectly
1449
+ proxy_req.method = hdrs.METH_CONNECT
1450
+ proxy_req.url = req.url
1451
+ key = req.connection_key._replace(
1452
+ proxy=None, proxy_auth=None, proxy_headers_hash=None
1453
+ )
1454
+ conn = Connection(self, key, proto, self._loop)
1455
+ proxy_resp = await proxy_req.send(conn)
1456
+ try:
1457
+ protocol = conn._protocol
1458
+ assert protocol is not None
1459
+
1460
+ # read_until_eof=True will ensure the connection isn't closed
1461
+ # once the response is received and processed allowing
1462
+ # START_TLS to work on the connection below.
1463
+ protocol.set_response_params(
1464
+ read_until_eof=runtime_has_start_tls,
1465
+ timeout_ceil_threshold=self._timeout_ceil_threshold,
1466
+ )
1467
+ resp = await proxy_resp.start(conn)
1468
+ except BaseException:
1469
+ proxy_resp.close()
1470
+ conn.close()
1471
+ raise
1472
+ else:
1473
+ conn._protocol = None
1474
+ try:
1475
+ if resp.status != 200:
1476
+ message = resp.reason
1477
+ if message is None:
1478
+ message = HTTPStatus(resp.status).phrase
1479
+ raise ClientHttpProxyError(
1480
+ proxy_resp.request_info,
1481
+ resp.history,
1482
+ status=resp.status,
1483
+ message=message,
1484
+ headers=resp.headers,
1485
+ )
1486
+ if not runtime_has_start_tls:
1487
+ rawsock = transport.get_extra_info("socket", default=None)
1488
+ if rawsock is None:
1489
+ raise RuntimeError(
1490
+ "Transport does not expose socket instance"
1491
+ )
1492
+ # Duplicate the socket, so now we can close proxy transport
1493
+ rawsock = rawsock.dup()
1494
+ except BaseException:
1495
+ # It shouldn't be closed in `finally` because it's fed to
1496
+ # `loop.start_tls()` and the docs say not to touch it after
1497
+ # passing there.
1498
+ transport.close()
1499
+ raise
1500
+ finally:
1501
+ if not runtime_has_start_tls:
1502
+ transport.close()
1503
+
1504
+ if not runtime_has_start_tls:
1505
+ # HTTP proxy with support for upgrade to HTTPS
1506
+ sslcontext = self._get_ssl_context(req)
1507
+ return await self._wrap_existing_connection(
1508
+ self._factory,
1509
+ timeout=timeout,
1510
+ ssl=sslcontext,
1511
+ sock=rawsock,
1512
+ server_hostname=req.host,
1513
+ req=req,
1514
+ )
1515
+
1516
+ return await self._start_tls_connection(
1517
+ # Access the old transport for the last time before it's
1518
+ # closed and forgotten forever:
1519
+ transport,
1520
+ req=req,
1521
+ timeout=timeout,
1522
+ )
1523
+ finally:
1524
+ proxy_resp.close()
1525
+
1526
+ return transport, proto
1527
+
1528
+
1529
+ class UnixConnector(BaseConnector):
1530
+ """Unix socket connector.
1531
+
1532
+ path - Unix socket path.
1533
+ keepalive_timeout - (optional) Keep-alive timeout.
1534
+ force_close - Set to True to force close and do reconnect
1535
+ after each request (and between redirects).
1536
+ limit - The total number of simultaneous connections.
1537
+ limit_per_host - Number of simultaneous connections to one host.
1538
+ loop - Optional event loop.
1539
+ """
1540
+
1541
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"})
1542
+
1543
+ def __init__(
1544
+ self,
1545
+ path: str,
1546
+ force_close: bool = False,
1547
+ keepalive_timeout: Union[object, float, None] = sentinel,
1548
+ limit: int = 100,
1549
+ limit_per_host: int = 0,
1550
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1551
+ ) -> None:
1552
+ super().__init__(
1553
+ force_close=force_close,
1554
+ keepalive_timeout=keepalive_timeout,
1555
+ limit=limit,
1556
+ limit_per_host=limit_per_host,
1557
+ loop=loop,
1558
+ )
1559
+ self._path = path
1560
+
1561
+ @property
1562
+ def path(self) -> str:
1563
+ """Path to unix socket."""
1564
+ return self._path
1565
+
1566
+ async def _create_connection(
1567
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1568
+ ) -> ResponseHandler:
1569
+ try:
1570
+ async with ceil_timeout(
1571
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1572
+ ):
1573
+ _, proto = await self._loop.create_unix_connection(
1574
+ self._factory, self._path
1575
+ )
1576
+ except OSError as exc:
1577
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1578
+ raise
1579
+ raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
1580
+
1581
+ return proto
1582
+
1583
+
1584
+ class NamedPipeConnector(BaseConnector):
1585
+ """Named pipe connector.
1586
+
1587
+ Only supported by the proactor event loop.
1588
+ See also: https://docs.python.org/3/library/asyncio-eventloop.html
1589
+
1590
+ path - Windows named pipe path.
1591
+ keepalive_timeout - (optional) Keep-alive timeout.
1592
+ force_close - Set to True to force close and do reconnect
1593
+ after each request (and between redirects).
1594
+ limit - The total number of simultaneous connections.
1595
+ limit_per_host - Number of simultaneous connections to one host.
1596
+ loop - Optional event loop.
1597
+ """
1598
+
1599
+ allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"})
1600
+
1601
+ def __init__(
1602
+ self,
1603
+ path: str,
1604
+ force_close: bool = False,
1605
+ keepalive_timeout: Union[object, float, None] = sentinel,
1606
+ limit: int = 100,
1607
+ limit_per_host: int = 0,
1608
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1609
+ ) -> None:
1610
+ super().__init__(
1611
+ force_close=force_close,
1612
+ keepalive_timeout=keepalive_timeout,
1613
+ limit=limit,
1614
+ limit_per_host=limit_per_host,
1615
+ loop=loop,
1616
+ )
1617
+ if not isinstance(
1618
+ self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
1619
+ ):
1620
+ raise RuntimeError(
1621
+ "Named Pipes only available in proactor loop under windows"
1622
+ )
1623
+ self._path = path
1624
+
1625
+ @property
1626
+ def path(self) -> str:
1627
+ """Path to the named pipe."""
1628
+ return self._path
1629
+
1630
+ async def _create_connection(
1631
+ self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
1632
+ ) -> ResponseHandler:
1633
+ try:
1634
+ async with ceil_timeout(
1635
+ timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
1636
+ ):
1637
+ _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
1638
+ self._factory, self._path
1639
+ )
1640
+ # the drain is required so that the connection_made is called
1641
+ # and transport is set otherwise it is not set before the
1642
+ # `assert conn.transport is not None`
1643
+ # in client.py's _request method
1644
+ await asyncio.sleep(0)
1645
+ # other option is to manually set transport like
1646
+ # `proto.transport = trans`
1647
+ except OSError as exc:
1648
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
1649
+ raise
1650
+ raise ClientConnectorError(req.connection_key, exc) from exc
1651
+
1652
+ return cast(ResponseHandler, proto)
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/hdrs.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Headers constants."""
2
+
3
+ # After changing the file content call ./tools/gen.py
4
+ # to regenerate the headers parser
5
+ import itertools
6
+ from typing import Final, Set
7
+
8
+ from multidict import istr
9
+
10
+ METH_ANY: Final[str] = "*"
11
+ METH_CONNECT: Final[str] = "CONNECT"
12
+ METH_HEAD: Final[str] = "HEAD"
13
+ METH_GET: Final[str] = "GET"
14
+ METH_DELETE: Final[str] = "DELETE"
15
+ METH_OPTIONS: Final[str] = "OPTIONS"
16
+ METH_PATCH: Final[str] = "PATCH"
17
+ METH_POST: Final[str] = "POST"
18
+ METH_PUT: Final[str] = "PUT"
19
+ METH_TRACE: Final[str] = "TRACE"
20
+
21
+ METH_ALL: Final[Set[str]] = {
22
+ METH_CONNECT,
23
+ METH_HEAD,
24
+ METH_GET,
25
+ METH_DELETE,
26
+ METH_OPTIONS,
27
+ METH_PATCH,
28
+ METH_POST,
29
+ METH_PUT,
30
+ METH_TRACE,
31
+ }
32
+
33
+ ACCEPT: Final[istr] = istr("Accept")
34
+ ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
35
+ ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
36
+ ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
37
+ ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
38
+ ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
39
+ ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
40
+ ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
41
+ ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
42
+ ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
43
+ ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
44
+ ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
45
+ ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
46
+ AGE: Final[istr] = istr("Age")
47
+ ALLOW: Final[istr] = istr("Allow")
48
+ AUTHORIZATION: Final[istr] = istr("Authorization")
49
+ CACHE_CONTROL: Final[istr] = istr("Cache-Control")
50
+ CONNECTION: Final[istr] = istr("Connection")
51
+ CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
52
+ CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
53
+ CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
54
+ CONTENT_LENGTH: Final[istr] = istr("Content-Length")
55
+ CONTENT_LOCATION: Final[istr] = istr("Content-Location")
56
+ CONTENT_MD5: Final[istr] = istr("Content-MD5")
57
+ CONTENT_RANGE: Final[istr] = istr("Content-Range")
58
+ CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
59
+ CONTENT_TYPE: Final[istr] = istr("Content-Type")
60
+ COOKIE: Final[istr] = istr("Cookie")
61
+ DATE: Final[istr] = istr("Date")
62
+ DESTINATION: Final[istr] = istr("Destination")
63
+ DIGEST: Final[istr] = istr("Digest")
64
+ ETAG: Final[istr] = istr("Etag")
65
+ EXPECT: Final[istr] = istr("Expect")
66
+ EXPIRES: Final[istr] = istr("Expires")
67
+ FORWARDED: Final[istr] = istr("Forwarded")
68
+ FROM: Final[istr] = istr("From")
69
+ HOST: Final[istr] = istr("Host")
70
+ IF_MATCH: Final[istr] = istr("If-Match")
71
+ IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
72
+ IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
73
+ IF_RANGE: Final[istr] = istr("If-Range")
74
+ IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
75
+ KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
76
+ LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
77
+ LAST_MODIFIED: Final[istr] = istr("Last-Modified")
78
+ LINK: Final[istr] = istr("Link")
79
+ LOCATION: Final[istr] = istr("Location")
80
+ MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
81
+ ORIGIN: Final[istr] = istr("Origin")
82
+ PRAGMA: Final[istr] = istr("Pragma")
83
+ PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
84
+ PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
85
+ RANGE: Final[istr] = istr("Range")
86
+ REFERER: Final[istr] = istr("Referer")
87
+ RETRY_AFTER: Final[istr] = istr("Retry-After")
88
+ SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
89
+ SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
90
+ SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
91
+ SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
92
+ SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
93
+ SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
94
+ SERVER: Final[istr] = istr("Server")
95
+ SET_COOKIE: Final[istr] = istr("Set-Cookie")
96
+ TE: Final[istr] = istr("TE")
97
+ TRAILER: Final[istr] = istr("Trailer")
98
+ TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
99
+ UPGRADE: Final[istr] = istr("Upgrade")
100
+ URI: Final[istr] = istr("URI")
101
+ USER_AGENT: Final[istr] = istr("User-Agent")
102
+ VARY: Final[istr] = istr("Vary")
103
+ VIA: Final[istr] = istr("Via")
104
+ WANT_DIGEST: Final[istr] = istr("Want-Digest")
105
+ WARNING: Final[istr] = istr("Warning")
106
+ WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
107
+ X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
108
+ X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
109
+ X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
110
+
111
+ # These are the upper/lower case variants of the headers/methods
112
+ # Example: {'hOst', 'host', 'HoST', 'HOSt', 'hOsT', 'HosT', 'hoSt', ...}
113
+ METH_HEAD_ALL: Final = frozenset(
114
+ map("".join, itertools.product(*zip(METH_HEAD.upper(), METH_HEAD.lower())))
115
+ )
116
+ METH_CONNECT_ALL: Final = frozenset(
117
+ map("".join, itertools.product(*zip(METH_CONNECT.upper(), METH_CONNECT.lower())))
118
+ )
119
+ HOST_ALL: Final = frozenset(
120
+ map("".join, itertools.product(*zip(HOST.upper(), HOST.lower())))
121
+ )
evalkit_cambrian/lib/python3.10/site-packages/aiohttp/http_websocket.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket protocol versions 13 and 8."""
2
+
3
+ from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse
4
+ from ._websocket.models import (
5
+ WS_CLOSED_MESSAGE,
6
+ WS_CLOSING_MESSAGE,
7
+ WebSocketError,
8
+ WSCloseCode,
9
+ WSHandshakeError,
10
+ WSMessage,
11
+ WSMsgType,
12
+ )
13
+ from ._websocket.reader import WebSocketReader
14
+ from ._websocket.writer import WebSocketWriter
15
+
16
+ # Messages that the WebSocketResponse.receive needs to handle internally
17
+ _INTERNAL_RECEIVE_TYPES = frozenset(
18
+ (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG)
19
+ )
20
+
21
+
22
+ __all__ = (
23
+ "WS_CLOSED_MESSAGE",
24
+ "WS_CLOSING_MESSAGE",
25
+ "WS_KEY",
26
+ "WebSocketReader",
27
+ "WebSocketWriter",
28
+ "WSMessage",
29
+ "WebSocketError",
30
+ "WSMsgType",
31
+ "WSCloseCode",
32
+ "ws_ext_gen",
33
+ "ws_ext_parse",
34
+ "WSHandshakeError",
35
+ "WSMessage",
36
+ )