koichi12 commited on
Commit
fbed997
·
verified ·
1 Parent(s): 1b313ad

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +3 -0
  2. .venv/lib/python3.11/site-packages/aiohttp/.hash/_cparser.pxd.hash +1 -0
  3. .venv/lib/python3.11/site-packages/aiohttp/.hash/_find_header.pxd.hash +1 -0
  4. .venv/lib/python3.11/site-packages/aiohttp/.hash/_http_parser.pyx.hash +1 -0
  5. .venv/lib/python3.11/site-packages/aiohttp/.hash/_http_writer.pyx.hash +1 -0
  6. .venv/lib/python3.11/site-packages/aiohttp/.hash/hdrs.py.hash +1 -0
  7. .venv/lib/python3.11/site-packages/aiohttp/__init__.py +264 -0
  8. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc +0 -0
  9. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc +0 -0
  10. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc +0 -0
  11. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc +0 -0
  12. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc +0 -0
  13. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc +0 -0
  14. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc +0 -0
  15. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc +0 -0
  16. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc +0 -0
  17. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_app.cpython-311.pyc +0 -0
  18. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-311.pyc +0 -0
  19. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_log.cpython-311.pyc +0 -0
  20. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_response.cpython-311.pyc +0 -0
  21. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_runner.cpython-311.pyc +0 -0
  22. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-311.pyc +0 -0
  23. .venv/lib/python3.11/site-packages/aiohttp/__pycache__/worker.cpython-311.pyc +0 -0
  24. .venv/lib/python3.11/site-packages/aiohttp/_cparser.pxd +158 -0
  25. .venv/lib/python3.11/site-packages/aiohttp/_http_parser.cpython-311-x86_64-linux-gnu.so +3 -0
  26. .venv/lib/python3.11/site-packages/aiohttp/_http_parser.pyx +837 -0
  27. .venv/lib/python3.11/site-packages/aiohttp/_http_writer.cpython-311-x86_64-linux-gnu.so +3 -0
  28. .venv/lib/python3.11/site-packages/aiohttp/_http_writer.pyx +162 -0
  29. .venv/lib/python3.11/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash +1 -0
  30. .venv/lib/python3.11/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash +1 -0
  31. .venv/lib/python3.11/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash +1 -0
  32. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__init__.py +1 -0
  33. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/__init__.cpython-311.pyc +0 -0
  34. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/helpers.cpython-311.pyc +0 -0
  35. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/models.cpython-311.pyc +0 -0
  36. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/reader.cpython-311.pyc +0 -0
  37. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/reader_c.cpython-311.pyc +0 -0
  38. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-311.pyc +0 -0
  39. .venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/writer.cpython-311.pyc +0 -0
  40. .venv/lib/python3.11/site-packages/aiohttp/_websocket/helpers.py +147 -0
  41. .venv/lib/python3.11/site-packages/aiohttp/_websocket/mask.pxd +3 -0
  42. .venv/lib/python3.11/site-packages/aiohttp/_websocket/mask.pyx +48 -0
  43. .venv/lib/python3.11/site-packages/aiohttp/_websocket/models.py +84 -0
  44. .venv/lib/python3.11/site-packages/aiohttp/_websocket/reader.py +31 -0
  45. .venv/lib/python3.11/site-packages/aiohttp/_websocket/reader_c.pxd +102 -0
  46. .venv/lib/python3.11/site-packages/aiohttp/_websocket/reader_c.py +468 -0
  47. .venv/lib/python3.11/site-packages/aiohttp/_websocket/reader_py.py +468 -0
  48. .venv/lib/python3.11/site-packages/aiohttp/_websocket/writer.py +177 -0
  49. .venv/lib/python3.11/site-packages/aiohttp/base_protocol.py +100 -0
  50. .venv/lib/python3.11/site-packages/aiohttp/client.py +1550 -0
.gitattributes CHANGED
@@ -401,3 +401,6 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cudnn/lib/
401
  .venv/lib/python3.11/site-packages/mistral_common/data/tekken_240718.json filter=lfs diff=lfs merge=lfs -text
402
  .venv/lib/python3.11/site-packages/torchgen/__pycache__/gen.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
403
  .venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
 
 
 
 
401
  .venv/lib/python3.11/site-packages/mistral_common/data/tekken_240718.json filter=lfs diff=lfs merge=lfs -text
402
  .venv/lib/python3.11/site-packages/torchgen/__pycache__/gen.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
403
  .venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
404
+ .venv/lib/python3.11/site-packages/aiohttp/_http_writer.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
405
+ .venv/lib/python3.11/site-packages/aiohttp/_http_parser.cpython-311-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
406
+ .venv/lib/python3.11/site-packages/mpmath/tests/__pycache__/test_functions2.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
.venv/lib/python3.11/site-packages/aiohttp/.hash/_cparser.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ f2318883e549f69de597009a914603b0f1b10381e265ef5d98af499ad973fb98 /home/runner/work/aiohttp/aiohttp/aiohttp/_cparser.pxd
.venv/lib/python3.11/site-packages/aiohttp/.hash/_find_header.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ d067f01423cddb3c442933b5fcc039b18ab651fcec1bc91c577693aafc25cf78 /home/runner/work/aiohttp/aiohttp/aiohttp/_find_header.pxd
.venv/lib/python3.11/site-packages/aiohttp/.hash/_http_parser.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ c107400e3e4b8b3c02ffb9c51abf2722593a1a9a1a41e434df9f47d0730a1ae3 /home/runner/work/aiohttp/aiohttp/aiohttp/_http_parser.pyx
.venv/lib/python3.11/site-packages/aiohttp/.hash/_http_writer.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 7e209c93f1158118935fb56d028576025763b9eb093053debf84d677d171f23a /home/runner/work/aiohttp/aiohttp/aiohttp/_http_writer.pyx
.venv/lib/python3.11/site-packages/aiohttp/.hash/hdrs.py.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ dab8f933203eeb245d60f856e542a45b888d5a110094620e4811f90f816628d1 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py
.venv/lib/python3.11/site-packages/aiohttp/__init__.py ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ __version__ = "3.11.12"
2
+
3
+ from typing import TYPE_CHECKING, Tuple
4
+
5
+ from . import hdrs as hdrs
6
+ from .client import (
7
+ BaseConnector,
8
+ ClientConnectionError,
9
+ ClientConnectionResetError,
10
+ ClientConnectorCertificateError,
11
+ ClientConnectorDNSError,
12
+ ClientConnectorError,
13
+ ClientConnectorSSLError,
14
+ ClientError,
15
+ ClientHttpProxyError,
16
+ ClientOSError,
17
+ ClientPayloadError,
18
+ ClientProxyConnectionError,
19
+ ClientRequest,
20
+ ClientResponse,
21
+ ClientResponseError,
22
+ ClientSession,
23
+ ClientSSLError,
24
+ ClientTimeout,
25
+ ClientWebSocketResponse,
26
+ ClientWSTimeout,
27
+ ConnectionTimeoutError,
28
+ ContentTypeError,
29
+ Fingerprint,
30
+ InvalidURL,
31
+ InvalidUrlClientError,
32
+ InvalidUrlRedirectClientError,
33
+ NamedPipeConnector,
34
+ NonHttpUrlClientError,
35
+ NonHttpUrlRedirectClientError,
36
+ RedirectClientError,
37
+ RequestInfo,
38
+ ServerConnectionError,
39
+ ServerDisconnectedError,
40
+ ServerFingerprintMismatch,
41
+ ServerTimeoutError,
42
+ SocketTimeoutError,
43
+ TCPConnector,
44
+ TooManyRedirects,
45
+ UnixConnector,
46
+ WSMessageTypeError,
47
+ WSServerHandshakeError,
48
+ request,
49
+ )
50
+ from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
51
+ from .formdata import FormData as FormData
52
+ from .helpers import BasicAuth, ChainMapProxy, ETag
53
+ from .http import (
54
+ HttpVersion as HttpVersion,
55
+ HttpVersion10 as HttpVersion10,
56
+ HttpVersion11 as HttpVersion11,
57
+ WebSocketError as WebSocketError,
58
+ WSCloseCode as WSCloseCode,
59
+ WSMessage as WSMessage,
60
+ WSMsgType as WSMsgType,
61
+ )
62
+ from .multipart import (
63
+ BadContentDispositionHeader as BadContentDispositionHeader,
64
+ BadContentDispositionParam as BadContentDispositionParam,
65
+ BodyPartReader as BodyPartReader,
66
+ MultipartReader as MultipartReader,
67
+ MultipartWriter as MultipartWriter,
68
+ content_disposition_filename as content_disposition_filename,
69
+ parse_content_disposition as parse_content_disposition,
70
+ )
71
+ from .payload import (
72
+ PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
73
+ AsyncIterablePayload as AsyncIterablePayload,
74
+ BufferedReaderPayload as BufferedReaderPayload,
75
+ BytesIOPayload as BytesIOPayload,
76
+ BytesPayload as BytesPayload,
77
+ IOBasePayload as IOBasePayload,
78
+ JsonPayload as JsonPayload,
79
+ Payload as Payload,
80
+ StringIOPayload as StringIOPayload,
81
+ StringPayload as StringPayload,
82
+ TextIOPayload as TextIOPayload,
83
+ get_payload as get_payload,
84
+ payload_type as payload_type,
85
+ )
86
+ from .payload_streamer import streamer as streamer
87
+ from .resolver import (
88
+ AsyncResolver as AsyncResolver,
89
+ DefaultResolver as DefaultResolver,
90
+ ThreadedResolver as ThreadedResolver,
91
+ )
92
+ from .streams import (
93
+ EMPTY_PAYLOAD as EMPTY_PAYLOAD,
94
+ DataQueue as DataQueue,
95
+ EofStream as EofStream,
96
+ FlowControlDataQueue as FlowControlDataQueue,
97
+ StreamReader as StreamReader,
98
+ )
99
+ from .tracing import (
100
+ TraceConfig as TraceConfig,
101
+ TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
102
+ TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
103
+ TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
104
+ TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
105
+ TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
106
+ TraceDnsCacheHitParams as TraceDnsCacheHitParams,
107
+ TraceDnsCacheMissParams as TraceDnsCacheMissParams,
108
+ TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
109
+ TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
110
+ TraceRequestChunkSentParams as TraceRequestChunkSentParams,
111
+ TraceRequestEndParams as TraceRequestEndParams,
112
+ TraceRequestExceptionParams as TraceRequestExceptionParams,
113
+ TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
114
+ TraceRequestRedirectParams as TraceRequestRedirectParams,
115
+ TraceRequestStartParams as TraceRequestStartParams,
116
+ TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
117
+ )
118
+
119
+ if TYPE_CHECKING:
120
+ # At runtime these are lazy-loaded at the bottom of the file.
121
+ from .worker import (
122
+ GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
123
+ GunicornWebWorker as GunicornWebWorker,
124
+ )
125
+
126
+ __all__: Tuple[str, ...] = (
127
+ "hdrs",
128
+ # client
129
+ "BaseConnector",
130
+ "ClientConnectionError",
131
+ "ClientConnectionResetError",
132
+ "ClientConnectorCertificateError",
133
+ "ClientConnectorDNSError",
134
+ "ClientConnectorError",
135
+ "ClientConnectorSSLError",
136
+ "ClientError",
137
+ "ClientHttpProxyError",
138
+ "ClientOSError",
139
+ "ClientPayloadError",
140
+ "ClientProxyConnectionError",
141
+ "ClientResponse",
142
+ "ClientRequest",
143
+ "ClientResponseError",
144
+ "ClientSSLError",
145
+ "ClientSession",
146
+ "ClientTimeout",
147
+ "ClientWebSocketResponse",
148
+ "ClientWSTimeout",
149
+ "ConnectionTimeoutError",
150
+ "ContentTypeError",
151
+ "Fingerprint",
152
+ "FlowControlDataQueue",
153
+ "InvalidURL",
154
+ "InvalidUrlClientError",
155
+ "InvalidUrlRedirectClientError",
156
+ "NonHttpUrlClientError",
157
+ "NonHttpUrlRedirectClientError",
158
+ "RedirectClientError",
159
+ "RequestInfo",
160
+ "ServerConnectionError",
161
+ "ServerDisconnectedError",
162
+ "ServerFingerprintMismatch",
163
+ "ServerTimeoutError",
164
+ "SocketTimeoutError",
165
+ "TCPConnector",
166
+ "TooManyRedirects",
167
+ "UnixConnector",
168
+ "NamedPipeConnector",
169
+ "WSServerHandshakeError",
170
+ "request",
171
+ # cookiejar
172
+ "CookieJar",
173
+ "DummyCookieJar",
174
+ # formdata
175
+ "FormData",
176
+ # helpers
177
+ "BasicAuth",
178
+ "ChainMapProxy",
179
+ "ETag",
180
+ # http
181
+ "HttpVersion",
182
+ "HttpVersion10",
183
+ "HttpVersion11",
184
+ "WSMsgType",
185
+ "WSCloseCode",
186
+ "WSMessage",
187
+ "WebSocketError",
188
+ # multipart
189
+ "BadContentDispositionHeader",
190
+ "BadContentDispositionParam",
191
+ "BodyPartReader",
192
+ "MultipartReader",
193
+ "MultipartWriter",
194
+ "content_disposition_filename",
195
+ "parse_content_disposition",
196
+ # payload
197
+ "AsyncIterablePayload",
198
+ "BufferedReaderPayload",
199
+ "BytesIOPayload",
200
+ "BytesPayload",
201
+ "IOBasePayload",
202
+ "JsonPayload",
203
+ "PAYLOAD_REGISTRY",
204
+ "Payload",
205
+ "StringIOPayload",
206
+ "StringPayload",
207
+ "TextIOPayload",
208
+ "get_payload",
209
+ "payload_type",
210
+ # payload_streamer
211
+ "streamer",
212
+ # resolver
213
+ "AsyncResolver",
214
+ "DefaultResolver",
215
+ "ThreadedResolver",
216
+ # streams
217
+ "DataQueue",
218
+ "EMPTY_PAYLOAD",
219
+ "EofStream",
220
+ "StreamReader",
221
+ # tracing
222
+ "TraceConfig",
223
+ "TraceConnectionCreateEndParams",
224
+ "TraceConnectionCreateStartParams",
225
+ "TraceConnectionQueuedEndParams",
226
+ "TraceConnectionQueuedStartParams",
227
+ "TraceConnectionReuseconnParams",
228
+ "TraceDnsCacheHitParams",
229
+ "TraceDnsCacheMissParams",
230
+ "TraceDnsResolveHostEndParams",
231
+ "TraceDnsResolveHostStartParams",
232
+ "TraceRequestChunkSentParams",
233
+ "TraceRequestEndParams",
234
+ "TraceRequestExceptionParams",
235
+ "TraceRequestHeadersSentParams",
236
+ "TraceRequestRedirectParams",
237
+ "TraceRequestStartParams",
238
+ "TraceResponseChunkReceivedParams",
239
+ # workers (imported lazily with __getattr__)
240
+ "GunicornUVLoopWebWorker",
241
+ "GunicornWebWorker",
242
+ "WSMessageTypeError",
243
+ )
244
+
245
+
246
+ def __dir__() -> Tuple[str, ...]:
247
+ return __all__ + ("__doc__",)
248
+
249
+
250
+ def __getattr__(name: str) -> object:
251
+ global GunicornUVLoopWebWorker, GunicornWebWorker
252
+
253
+ # Importing gunicorn takes a long time (>100ms), so only import if actually needed.
254
+ if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
255
+ try:
256
+ from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
257
+ except ImportError:
258
+ return None
259
+
260
+ GunicornUVLoopWebWorker = guv # type: ignore[misc]
261
+ GunicornWebWorker = gw # type: ignore[misc]
262
+ return guv if name == "GunicornUVLoopWebWorker" else gw
263
+
264
+ raise AttributeError(f"module {__name__} has no attribute {name}")
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_exceptions.cpython-311.pyc ADDED
Binary file (20 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_proto.cpython-311.pyc ADDED
Binary file (12.9 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_reqrep.cpython-311.pyc ADDED
Binary file (59.6 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/client_ws.cpython-311.pyc ADDED
Binary file (23.4 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/connector.cpython-311.pyc ADDED
Binary file (73.4 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/http_websocket.cpython-311.pyc ADDED
Binary file (1.06 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/multipart.cpython-311.pyc ADDED
Binary file (54.1 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/payload_streamer.cpython-311.pyc ADDED
Binary file (5.01 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/tcp_helpers.cpython-311.pyc ADDED
Binary file (2 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_app.cpython-311.pyc ADDED
Binary file (30.3 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_fileresponse.cpython-311.pyc ADDED
Binary file (18.6 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_log.cpython-311.pyc ADDED
Binary file (11.6 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_response.cpython-311.pyc ADDED
Binary file (40.6 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_runner.cpython-311.pyc ADDED
Binary file (20.8 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/web_urldispatcher.cpython-311.pyc ADDED
Binary file (75.7 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/__pycache__/worker.cpython-311.pyc ADDED
Binary file (12.2 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_cparser.pxd ADDED
@@ -0,0 +1,158 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
2
+
3
+
4
+ cdef extern from "../vendor/llhttp/build/llhttp.h":
5
+
6
+ struct llhttp__internal_s:
7
+ int32_t _index
8
+ void* _span_pos0
9
+ void* _span_cb0
10
+ int32_t error
11
+ const char* reason
12
+ const char* error_pos
13
+ void* data
14
+ void* _current
15
+ uint64_t content_length
16
+ uint8_t type
17
+ uint8_t method
18
+ uint8_t http_major
19
+ uint8_t http_minor
20
+ uint8_t header_state
21
+ uint8_t lenient_flags
22
+ uint8_t upgrade
23
+ uint8_t finish
24
+ uint16_t flags
25
+ uint16_t status_code
26
+ void* settings
27
+
28
+ ctypedef llhttp__internal_s llhttp__internal_t
29
+ ctypedef llhttp__internal_t llhttp_t
30
+
31
+ ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
32
+ ctypedef int (*llhttp_cb)(llhttp_t*) except -1
33
+
34
+ struct llhttp_settings_s:
35
+ llhttp_cb on_message_begin
36
+ llhttp_data_cb on_url
37
+ llhttp_data_cb on_status
38
+ llhttp_data_cb on_header_field
39
+ llhttp_data_cb on_header_value
40
+ llhttp_cb on_headers_complete
41
+ llhttp_data_cb on_body
42
+ llhttp_cb on_message_complete
43
+ llhttp_cb on_chunk_header
44
+ llhttp_cb on_chunk_complete
45
+
46
+ llhttp_cb on_url_complete
47
+ llhttp_cb on_status_complete
48
+ llhttp_cb on_header_field_complete
49
+ llhttp_cb on_header_value_complete
50
+
51
+ ctypedef llhttp_settings_s llhttp_settings_t
52
+
53
+ enum llhttp_errno:
54
+ HPE_OK,
55
+ HPE_INTERNAL,
56
+ HPE_STRICT,
57
+ HPE_LF_EXPECTED,
58
+ HPE_UNEXPECTED_CONTENT_LENGTH,
59
+ HPE_CLOSED_CONNECTION,
60
+ HPE_INVALID_METHOD,
61
+ HPE_INVALID_URL,
62
+ HPE_INVALID_CONSTANT,
63
+ HPE_INVALID_VERSION,
64
+ HPE_INVALID_HEADER_TOKEN,
65
+ HPE_INVALID_CONTENT_LENGTH,
66
+ HPE_INVALID_CHUNK_SIZE,
67
+ HPE_INVALID_STATUS,
68
+ HPE_INVALID_EOF_STATE,
69
+ HPE_INVALID_TRANSFER_ENCODING,
70
+ HPE_CB_MESSAGE_BEGIN,
71
+ HPE_CB_HEADERS_COMPLETE,
72
+ HPE_CB_MESSAGE_COMPLETE,
73
+ HPE_CB_CHUNK_HEADER,
74
+ HPE_CB_CHUNK_COMPLETE,
75
+ HPE_PAUSED,
76
+ HPE_PAUSED_UPGRADE,
77
+ HPE_USER
78
+
79
+ ctypedef llhttp_errno llhttp_errno_t
80
+
81
+ enum llhttp_flags:
82
+ F_CHUNKED,
83
+ F_CONTENT_LENGTH
84
+
85
+ enum llhttp_type:
86
+ HTTP_REQUEST,
87
+ HTTP_RESPONSE,
88
+ HTTP_BOTH
89
+
90
+ enum llhttp_method:
91
+ HTTP_DELETE,
92
+ HTTP_GET,
93
+ HTTP_HEAD,
94
+ HTTP_POST,
95
+ HTTP_PUT,
96
+ HTTP_CONNECT,
97
+ HTTP_OPTIONS,
98
+ HTTP_TRACE,
99
+ HTTP_COPY,
100
+ HTTP_LOCK,
101
+ HTTP_MKCOL,
102
+ HTTP_MOVE,
103
+ HTTP_PROPFIND,
104
+ HTTP_PROPPATCH,
105
+ HTTP_SEARCH,
106
+ HTTP_UNLOCK,
107
+ HTTP_BIND,
108
+ HTTP_REBIND,
109
+ HTTP_UNBIND,
110
+ HTTP_ACL,
111
+ HTTP_REPORT,
112
+ HTTP_MKACTIVITY,
113
+ HTTP_CHECKOUT,
114
+ HTTP_MERGE,
115
+ HTTP_MSEARCH,
116
+ HTTP_NOTIFY,
117
+ HTTP_SUBSCRIBE,
118
+ HTTP_UNSUBSCRIBE,
119
+ HTTP_PATCH,
120
+ HTTP_PURGE,
121
+ HTTP_MKCALENDAR,
122
+ HTTP_LINK,
123
+ HTTP_UNLINK,
124
+ HTTP_SOURCE,
125
+ HTTP_PRI,
126
+ HTTP_DESCRIBE,
127
+ HTTP_ANNOUNCE,
128
+ HTTP_SETUP,
129
+ HTTP_PLAY,
130
+ HTTP_PAUSE,
131
+ HTTP_TEARDOWN,
132
+ HTTP_GET_PARAMETER,
133
+ HTTP_SET_PARAMETER,
134
+ HTTP_REDIRECT,
135
+ HTTP_RECORD,
136
+ HTTP_FLUSH
137
+
138
+ ctypedef llhttp_method llhttp_method_t;
139
+
140
+ void llhttp_settings_init(llhttp_settings_t* settings)
141
+ void llhttp_init(llhttp_t* parser, llhttp_type type,
142
+ const llhttp_settings_t* settings)
143
+
144
+ llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
145
+
146
+ int llhttp_should_keep_alive(const llhttp_t* parser)
147
+
148
+ void llhttp_resume_after_upgrade(llhttp_t* parser)
149
+
150
+ llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
151
+ const char* llhttp_get_error_reason(const llhttp_t* parser)
152
+ const char* llhttp_get_error_pos(const llhttp_t* parser)
153
+
154
+ const char* llhttp_method_name(llhttp_method_t method)
155
+
156
+ void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
157
+ void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
158
+ void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
.venv/lib/python3.11/site-packages/aiohttp/_http_parser.cpython-311-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aee964b05bf93d9ec038fa10b91d8fd531c57e57ee90cba6c6411f45f7459346
3
+ size 2826344
.venv/lib/python3.11/site-packages/aiohttp/_http_parser.pyx ADDED
@@ -0,0 +1,837 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #cython: language_level=3
2
+ #
3
+ # Based on https://github.com/MagicStack/httptools
4
+ #
5
+
6
+ from cpython cimport (
7
+ Py_buffer,
8
+ PyBUF_SIMPLE,
9
+ PyBuffer_Release,
10
+ PyBytes_AsString,
11
+ PyBytes_AsStringAndSize,
12
+ PyObject_GetBuffer,
13
+ )
14
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc
15
+ from libc.limits cimport ULLONG_MAX
16
+ from libc.string cimport memcpy
17
+
18
+ from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
19
+ from yarl import URL as _URL
20
+
21
+ from aiohttp import hdrs
22
+ from aiohttp.helpers import DEBUG, set_exception
23
+
24
+ from .http_exceptions import (
25
+ BadHttpMessage,
26
+ BadHttpMethod,
27
+ BadStatusLine,
28
+ ContentLengthError,
29
+ InvalidHeader,
30
+ InvalidURLError,
31
+ LineTooLong,
32
+ PayloadEncodingError,
33
+ TransferEncodingError,
34
+ )
35
+ from .http_parser import DeflateBuffer as _DeflateBuffer
36
+ from .http_writer import (
37
+ HttpVersion as _HttpVersion,
38
+ HttpVersion10 as _HttpVersion10,
39
+ HttpVersion11 as _HttpVersion11,
40
+ )
41
+ from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
42
+
43
+ cimport cython
44
+
45
+ from aiohttp cimport _cparser as cparser
46
+
47
+ include "_headers.pxi"
48
+
49
+ from aiohttp cimport _find_header
50
+
51
+ ALLOWED_UPGRADES = frozenset({"websocket"})
52
+ DEF DEFAULT_FREELIST_SIZE = 250
53
+
54
+ cdef extern from "Python.h":
55
+ int PyByteArray_Resize(object, Py_ssize_t) except -1
56
+ Py_ssize_t PyByteArray_Size(object) except -1
57
+ char* PyByteArray_AsString(object)
58
+
59
+ __all__ = ('HttpRequestParser', 'HttpResponseParser',
60
+ 'RawRequestMessage', 'RawResponseMessage')
61
+
62
+ cdef object URL = _URL
63
+ cdef object URL_build = URL.build
64
+ cdef object CIMultiDict = _CIMultiDict
65
+ cdef object CIMultiDictProxy = _CIMultiDictProxy
66
+ cdef object HttpVersion = _HttpVersion
67
+ cdef object HttpVersion10 = _HttpVersion10
68
+ cdef object HttpVersion11 = _HttpVersion11
69
+ cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
70
+ cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
71
+ cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
72
+ cdef object StreamReader = _StreamReader
73
+ cdef object DeflateBuffer = _DeflateBuffer
74
+ cdef bytes EMPTY_BYTES = b""
75
+
76
+ cdef inline object extend(object buf, const char* at, size_t length):
77
+ cdef Py_ssize_t s
78
+ cdef char* ptr
79
+ s = PyByteArray_Size(buf)
80
+ PyByteArray_Resize(buf, s + length)
81
+ ptr = PyByteArray_AsString(buf)
82
+ memcpy(ptr + s, at, length)
83
+
84
+
85
+ DEF METHODS_COUNT = 46;
86
+
87
+ cdef list _http_method = []
88
+
89
+ for i in range(METHODS_COUNT):
90
+ _http_method.append(
91
+ cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
92
+
93
+
94
+ cdef inline str http_method_str(int i):
95
+ if i < METHODS_COUNT:
96
+ return <str>_http_method[i]
97
+ else:
98
+ return "<unknown>"
99
+
100
+ cdef inline object find_header(bytes raw_header):
101
+ cdef Py_ssize_t size
102
+ cdef char *buf
103
+ cdef int idx
104
+ PyBytes_AsStringAndSize(raw_header, &buf, &size)
105
+ idx = _find_header.find_header(buf, size)
106
+ if idx == -1:
107
+ return raw_header.decode('utf-8', 'surrogateescape')
108
+ return headers[idx]
109
+
110
+
111
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
112
+ cdef class RawRequestMessage:
113
+ cdef readonly str method
114
+ cdef readonly str path
115
+ cdef readonly object version # HttpVersion
116
+ cdef readonly object headers # CIMultiDict
117
+ cdef readonly object raw_headers # tuple
118
+ cdef readonly object should_close
119
+ cdef readonly object compression
120
+ cdef readonly object upgrade
121
+ cdef readonly object chunked
122
+ cdef readonly object url # yarl.URL
123
+
124
+ def __init__(self, method, path, version, headers, raw_headers,
125
+ should_close, compression, upgrade, chunked, url):
126
+ self.method = method
127
+ self.path = path
128
+ self.version = version
129
+ self.headers = headers
130
+ self.raw_headers = raw_headers
131
+ self.should_close = should_close
132
+ self.compression = compression
133
+ self.upgrade = upgrade
134
+ self.chunked = chunked
135
+ self.url = url
136
+
137
+ def __repr__(self):
138
+ info = []
139
+ info.append(("method", self.method))
140
+ info.append(("path", self.path))
141
+ info.append(("version", self.version))
142
+ info.append(("headers", self.headers))
143
+ info.append(("raw_headers", self.raw_headers))
144
+ info.append(("should_close", self.should_close))
145
+ info.append(("compression", self.compression))
146
+ info.append(("upgrade", self.upgrade))
147
+ info.append(("chunked", self.chunked))
148
+ info.append(("url", self.url))
149
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
150
+ return '<RawRequestMessage(' + sinfo + ')>'
151
+
152
+ def _replace(self, **dct):
153
+ cdef RawRequestMessage ret
154
+ ret = _new_request_message(self.method,
155
+ self.path,
156
+ self.version,
157
+ self.headers,
158
+ self.raw_headers,
159
+ self.should_close,
160
+ self.compression,
161
+ self.upgrade,
162
+ self.chunked,
163
+ self.url)
164
+ if "method" in dct:
165
+ ret.method = dct["method"]
166
+ if "path" in dct:
167
+ ret.path = dct["path"]
168
+ if "version" in dct:
169
+ ret.version = dct["version"]
170
+ if "headers" in dct:
171
+ ret.headers = dct["headers"]
172
+ if "raw_headers" in dct:
173
+ ret.raw_headers = dct["raw_headers"]
174
+ if "should_close" in dct:
175
+ ret.should_close = dct["should_close"]
176
+ if "compression" in dct:
177
+ ret.compression = dct["compression"]
178
+ if "upgrade" in dct:
179
+ ret.upgrade = dct["upgrade"]
180
+ if "chunked" in dct:
181
+ ret.chunked = dct["chunked"]
182
+ if "url" in dct:
183
+ ret.url = dct["url"]
184
+ return ret
185
+
186
+ cdef _new_request_message(str method,
187
+ str path,
188
+ object version,
189
+ object headers,
190
+ object raw_headers,
191
+ bint should_close,
192
+ object compression,
193
+ bint upgrade,
194
+ bint chunked,
195
+ object url):
196
+ cdef RawRequestMessage ret
197
+ ret = RawRequestMessage.__new__(RawRequestMessage)
198
+ ret.method = method
199
+ ret.path = path
200
+ ret.version = version
201
+ ret.headers = headers
202
+ ret.raw_headers = raw_headers
203
+ ret.should_close = should_close
204
+ ret.compression = compression
205
+ ret.upgrade = upgrade
206
+ ret.chunked = chunked
207
+ ret.url = url
208
+ return ret
209
+
210
+
211
+ @cython.freelist(DEFAULT_FREELIST_SIZE)
212
+ cdef class RawResponseMessage:
213
+ cdef readonly object version # HttpVersion
214
+ cdef readonly int code
215
+ cdef readonly str reason
216
+ cdef readonly object headers # CIMultiDict
217
+ cdef readonly object raw_headers # tuple
218
+ cdef readonly object should_close
219
+ cdef readonly object compression
220
+ cdef readonly object upgrade
221
+ cdef readonly object chunked
222
+
223
+ def __init__(self, version, code, reason, headers, raw_headers,
224
+ should_close, compression, upgrade, chunked):
225
+ self.version = version
226
+ self.code = code
227
+ self.reason = reason
228
+ self.headers = headers
229
+ self.raw_headers = raw_headers
230
+ self.should_close = should_close
231
+ self.compression = compression
232
+ self.upgrade = upgrade
233
+ self.chunked = chunked
234
+
235
+ def __repr__(self):
236
+ info = []
237
+ info.append(("version", self.version))
238
+ info.append(("code", self.code))
239
+ info.append(("reason", self.reason))
240
+ info.append(("headers", self.headers))
241
+ info.append(("raw_headers", self.raw_headers))
242
+ info.append(("should_close", self.should_close))
243
+ info.append(("compression", self.compression))
244
+ info.append(("upgrade", self.upgrade))
245
+ info.append(("chunked", self.chunked))
246
+ sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
247
+ return '<RawResponseMessage(' + sinfo + ')>'
248
+
249
+
250
+ cdef _new_response_message(object version,
251
+ int code,
252
+ str reason,
253
+ object headers,
254
+ object raw_headers,
255
+ bint should_close,
256
+ object compression,
257
+ bint upgrade,
258
+ bint chunked):
259
+ cdef RawResponseMessage ret
260
+ ret = RawResponseMessage.__new__(RawResponseMessage)
261
+ ret.version = version
262
+ ret.code = code
263
+ ret.reason = reason
264
+ ret.headers = headers
265
+ ret.raw_headers = raw_headers
266
+ ret.should_close = should_close
267
+ ret.compression = compression
268
+ ret.upgrade = upgrade
269
+ ret.chunked = chunked
270
+ return ret
271
+
272
+
273
+ @cython.internal
274
+ cdef class HttpParser:
275
+
276
+ cdef:
277
+ cparser.llhttp_t* _cparser
278
+ cparser.llhttp_settings_t* _csettings
279
+
280
+ bytes _raw_name
281
+ object _name
282
+ bytes _raw_value
283
+ bint _has_value
284
+
285
+ object _protocol
286
+ object _loop
287
+ object _timer
288
+
289
+ size_t _max_line_size
290
+ size_t _max_field_size
291
+ size_t _max_headers
292
+ bint _response_with_body
293
+ bint _read_until_eof
294
+
295
+ bint _started
296
+ object _url
297
+ bytearray _buf
298
+ str _path
299
+ str _reason
300
+ list _headers
301
+ list _raw_headers
302
+ bint _upgraded
303
+ list _messages
304
+ object _payload
305
+ bint _payload_error
306
+ object _payload_exception
307
+ object _last_error
308
+ bint _auto_decompress
309
+ int _limit
310
+
311
+ str _content_encoding
312
+
313
+ Py_buffer py_buf
314
+
315
+ def __cinit__(self):
316
+ self._cparser = <cparser.llhttp_t*> \
317
+ PyMem_Malloc(sizeof(cparser.llhttp_t))
318
+ if self._cparser is NULL:
319
+ raise MemoryError()
320
+
321
+ self._csettings = <cparser.llhttp_settings_t*> \
322
+ PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
323
+ if self._csettings is NULL:
324
+ raise MemoryError()
325
+
326
+ def __dealloc__(self):
327
+ PyMem_Free(self._cparser)
328
+ PyMem_Free(self._csettings)
329
+
330
+ cdef _init(
331
+ self, cparser.llhttp_type mode,
332
+ object protocol, object loop, int limit,
333
+ object timer=None,
334
+ size_t max_line_size=8190, size_t max_headers=32768,
335
+ size_t max_field_size=8190, payload_exception=None,
336
+ bint response_with_body=True, bint read_until_eof=False,
337
+ bint auto_decompress=True,
338
+ ):
339
+ cparser.llhttp_settings_init(self._csettings)
340
+ cparser.llhttp_init(self._cparser, mode, self._csettings)
341
+ self._cparser.data = <void*>self
342
+ self._cparser.content_length = 0
343
+
344
+ self._protocol = protocol
345
+ self._loop = loop
346
+ self._timer = timer
347
+
348
+ self._buf = bytearray()
349
+ self._payload = None
350
+ self._payload_error = 0
351
+ self._payload_exception = payload_exception
352
+ self._messages = []
353
+
354
+ self._raw_name = EMPTY_BYTES
355
+ self._raw_value = EMPTY_BYTES
356
+ self._has_value = False
357
+
358
+ self._max_line_size = max_line_size
359
+ self._max_headers = max_headers
360
+ self._max_field_size = max_field_size
361
+ self._response_with_body = response_with_body
362
+ self._read_until_eof = read_until_eof
363
+ self._upgraded = False
364
+ self._auto_decompress = auto_decompress
365
+ self._content_encoding = None
366
+
367
+ self._csettings.on_url = cb_on_url
368
+ self._csettings.on_status = cb_on_status
369
+ self._csettings.on_header_field = cb_on_header_field
370
+ self._csettings.on_header_value = cb_on_header_value
371
+ self._csettings.on_headers_complete = cb_on_headers_complete
372
+ self._csettings.on_body = cb_on_body
373
+ self._csettings.on_message_begin = cb_on_message_begin
374
+ self._csettings.on_message_complete = cb_on_message_complete
375
+ self._csettings.on_chunk_header = cb_on_chunk_header
376
+ self._csettings.on_chunk_complete = cb_on_chunk_complete
377
+
378
+ self._last_error = None
379
+ self._limit = limit
380
+
381
+ cdef _process_header(self):
382
+ cdef str value
383
+ if self._raw_name is not EMPTY_BYTES:
384
+ name = find_header(self._raw_name)
385
+ value = self._raw_value.decode('utf-8', 'surrogateescape')
386
+
387
+ self._headers.append((name, value))
388
+
389
+ if name is CONTENT_ENCODING:
390
+ self._content_encoding = value
391
+
392
+ self._has_value = False
393
+ self._raw_headers.append((self._raw_name, self._raw_value))
394
+ self._raw_name = EMPTY_BYTES
395
+ self._raw_value = EMPTY_BYTES
396
+
397
+ cdef _on_header_field(self, char* at, size_t length):
398
+ if self._has_value:
399
+ self._process_header()
400
+
401
+ if self._raw_name is EMPTY_BYTES:
402
+ self._raw_name = at[:length]
403
+ else:
404
+ self._raw_name += at[:length]
405
+
406
+ cdef _on_header_value(self, char* at, size_t length):
407
+ if self._raw_value is EMPTY_BYTES:
408
+ self._raw_value = at[:length]
409
+ else:
410
+ self._raw_value += at[:length]
411
+ self._has_value = True
412
+
413
+ cdef _on_headers_complete(self):
414
+ self._process_header()
415
+
416
+ should_close = not cparser.llhttp_should_keep_alive(self._cparser)
417
+ upgrade = self._cparser.upgrade
418
+ chunked = self._cparser.flags & cparser.F_CHUNKED
419
+
420
+ raw_headers = tuple(self._raw_headers)
421
+ headers = CIMultiDictProxy(CIMultiDict(self._headers))
422
+
423
+ if self._cparser.type == cparser.HTTP_REQUEST:
424
+ allowed = upgrade and headers.get("upgrade", "").lower() in ALLOWED_UPGRADES
425
+ if allowed or self._cparser.method == cparser.HTTP_CONNECT:
426
+ self._upgraded = True
427
+ else:
428
+ if upgrade and self._cparser.status_code == 101:
429
+ self._upgraded = True
430
+
431
+ # do not support old websocket spec
432
+ if SEC_WEBSOCKET_KEY1 in headers:
433
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
434
+
435
+ encoding = None
436
+ enc = self._content_encoding
437
+ if enc is not None:
438
+ self._content_encoding = None
439
+ enc = enc.lower()
440
+ if enc in ('gzip', 'deflate', 'br'):
441
+ encoding = enc
442
+
443
+ if self._cparser.type == cparser.HTTP_REQUEST:
444
+ method = http_method_str(self._cparser.method)
445
+ msg = _new_request_message(
446
+ method, self._path,
447
+ self.http_version(), headers, raw_headers,
448
+ should_close, encoding, upgrade, chunked, self._url)
449
+ else:
450
+ msg = _new_response_message(
451
+ self.http_version(), self._cparser.status_code, self._reason,
452
+ headers, raw_headers, should_close, encoding,
453
+ upgrade, chunked)
454
+
455
+ if (
456
+ ULLONG_MAX > self._cparser.content_length > 0 or chunked or
457
+ self._cparser.method == cparser.HTTP_CONNECT or
458
+ (self._cparser.status_code >= 199 and
459
+ self._cparser.content_length == 0 and
460
+ self._read_until_eof)
461
+ ):
462
+ payload = StreamReader(
463
+ self._protocol, timer=self._timer, loop=self._loop,
464
+ limit=self._limit)
465
+ else:
466
+ payload = EMPTY_PAYLOAD
467
+
468
+ self._payload = payload
469
+ if encoding is not None and self._auto_decompress:
470
+ self._payload = DeflateBuffer(payload, encoding)
471
+
472
+ if not self._response_with_body:
473
+ payload = EMPTY_PAYLOAD
474
+
475
+ self._messages.append((msg, payload))
476
+
477
+ cdef _on_message_complete(self):
478
+ self._payload.feed_eof()
479
+ self._payload = None
480
+
481
+ cdef _on_chunk_header(self):
482
+ self._payload.begin_http_chunk_receiving()
483
+
484
+ cdef _on_chunk_complete(self):
485
+ self._payload.end_http_chunk_receiving()
486
+
487
+ cdef object _on_status_complete(self):
488
+ pass
489
+
490
+ cdef inline http_version(self):
491
+ cdef cparser.llhttp_t* parser = self._cparser
492
+
493
+ if parser.http_major == 1:
494
+ if parser.http_minor == 0:
495
+ return HttpVersion10
496
+ elif parser.http_minor == 1:
497
+ return HttpVersion11
498
+
499
+ return HttpVersion(parser.http_major, parser.http_minor)
500
+
501
+ ### Public API ###
502
+
503
+ def feed_eof(self):
504
+ cdef bytes desc
505
+
506
+ if self._payload is not None:
507
+ if self._cparser.flags & cparser.F_CHUNKED:
508
+ raise TransferEncodingError(
509
+ "Not enough data for satisfy transfer length header.")
510
+ elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
511
+ raise ContentLengthError(
512
+ "Not enough data for satisfy content length header.")
513
+ elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
514
+ desc = cparser.llhttp_get_error_reason(self._cparser)
515
+ raise PayloadEncodingError(desc.decode('latin-1'))
516
+ else:
517
+ self._payload.feed_eof()
518
+ elif self._started:
519
+ self._on_headers_complete()
520
+ if self._messages:
521
+ return self._messages[-1][0]
522
+
523
+ def feed_data(self, data):
524
+ cdef:
525
+ size_t data_len
526
+ size_t nb
527
+ cdef cparser.llhttp_errno_t errno
528
+
529
+ PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
530
+ data_len = <size_t>self.py_buf.len
531
+
532
+ errno = cparser.llhttp_execute(
533
+ self._cparser,
534
+ <char*>self.py_buf.buf,
535
+ data_len)
536
+
537
+ if errno is cparser.HPE_PAUSED_UPGRADE:
538
+ cparser.llhttp_resume_after_upgrade(self._cparser)
539
+
540
+ nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
541
+
542
+ PyBuffer_Release(&self.py_buf)
543
+
544
+ if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
545
+ if self._payload_error == 0:
546
+ if self._last_error is not None:
547
+ ex = self._last_error
548
+ self._last_error = None
549
+ else:
550
+ after = cparser.llhttp_get_error_pos(self._cparser)
551
+ before = data[:after - <char*>self.py_buf.buf]
552
+ after_b = after.split(b"\r\n", 1)[0]
553
+ before = before.rsplit(b"\r\n", 1)[-1]
554
+ data = before + after_b
555
+ pointer = " " * (len(repr(before))-1) + "^"
556
+ ex = parser_error_from_errno(self._cparser, data, pointer)
557
+ self._payload = None
558
+ raise ex
559
+
560
+ if self._messages:
561
+ messages = self._messages
562
+ self._messages = []
563
+ else:
564
+ messages = ()
565
+
566
+ if self._upgraded:
567
+ return messages, True, data[nb:]
568
+ else:
569
+ return messages, False, b""
570
+
571
+ def set_upgraded(self, val):
572
+ self._upgraded = val
573
+
574
+
575
+ cdef class HttpRequestParser(HttpParser):
576
+
577
+ def __init__(
578
+ self, protocol, loop, int limit, timer=None,
579
+ size_t max_line_size=8190, size_t max_headers=32768,
580
+ size_t max_field_size=8190, payload_exception=None,
581
+ bint response_with_body=True, bint read_until_eof=False,
582
+ bint auto_decompress=True,
583
+ ):
584
+ self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
585
+ max_line_size, max_headers, max_field_size,
586
+ payload_exception, response_with_body, read_until_eof,
587
+ auto_decompress)
588
+
589
+ cdef object _on_status_complete(self):
590
+ cdef int idx1, idx2
591
+ if not self._buf:
592
+ return
593
+ self._path = self._buf.decode('utf-8', 'surrogateescape')
594
+ try:
595
+ idx3 = len(self._path)
596
+ if self._cparser.method == cparser.HTTP_CONNECT:
597
+ # authority-form,
598
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
599
+ self._url = URL.build(authority=self._path, encoded=True)
600
+ elif idx3 > 1 and self._path[0] == '/':
601
+ # origin-form,
602
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
603
+ idx1 = self._path.find("?")
604
+ if idx1 == -1:
605
+ query = ""
606
+ idx2 = self._path.find("#")
607
+ if idx2 == -1:
608
+ path = self._path
609
+ fragment = ""
610
+ else:
611
+ path = self._path[0: idx2]
612
+ fragment = self._path[idx2+1:]
613
+
614
+ else:
615
+ path = self._path[0:idx1]
616
+ idx1 += 1
617
+ idx2 = self._path.find("#", idx1+1)
618
+ if idx2 == -1:
619
+ query = self._path[idx1:]
620
+ fragment = ""
621
+ else:
622
+ query = self._path[idx1: idx2]
623
+ fragment = self._path[idx2+1:]
624
+
625
+ self._url = URL.build(
626
+ path=path,
627
+ query_string=query,
628
+ fragment=fragment,
629
+ encoded=True,
630
+ )
631
+ else:
632
+ # absolute-form for proxy maybe,
633
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
634
+ self._url = URL(self._path, encoded=True)
635
+ finally:
636
+ PyByteArray_Resize(self._buf, 0)
637
+
638
+
639
+ cdef class HttpResponseParser(HttpParser):
640
+
641
+ def __init__(
642
+ self, protocol, loop, int limit, timer=None,
643
+ size_t max_line_size=8190, size_t max_headers=32768,
644
+ size_t max_field_size=8190, payload_exception=None,
645
+ bint response_with_body=True, bint read_until_eof=False,
646
+ bint auto_decompress=True
647
+ ):
648
+ self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
649
+ max_line_size, max_headers, max_field_size,
650
+ payload_exception, response_with_body, read_until_eof,
651
+ auto_decompress)
652
+ # Use strict parsing on dev mode, so users are warned about broken servers.
653
+ if not DEBUG:
654
+ cparser.llhttp_set_lenient_headers(self._cparser, 1)
655
+ cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
656
+ cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
657
+
658
+ cdef object _on_status_complete(self):
659
+ if self._buf:
660
+ self._reason = self._buf.decode('utf-8', 'surrogateescape')
661
+ PyByteArray_Resize(self._buf, 0)
662
+ else:
663
+ self._reason = self._reason or ''
664
+
665
+ cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
666
+ cdef HttpParser pyparser = <HttpParser>parser.data
667
+
668
+ pyparser._started = True
669
+ pyparser._headers = []
670
+ pyparser._raw_headers = []
671
+ PyByteArray_Resize(pyparser._buf, 0)
672
+ pyparser._path = None
673
+ pyparser._reason = None
674
+ return 0
675
+
676
+
677
+ cdef int cb_on_url(cparser.llhttp_t* parser,
678
+ const char *at, size_t length) except -1:
679
+ cdef HttpParser pyparser = <HttpParser>parser.data
680
+ try:
681
+ if length > pyparser._max_line_size:
682
+ raise LineTooLong(
683
+ 'Status line is too long', pyparser._max_line_size, length)
684
+ extend(pyparser._buf, at, length)
685
+ except BaseException as ex:
686
+ pyparser._last_error = ex
687
+ return -1
688
+ else:
689
+ return 0
690
+
691
+
692
+ cdef int cb_on_status(cparser.llhttp_t* parser,
693
+ const char *at, size_t length) except -1:
694
+ cdef HttpParser pyparser = <HttpParser>parser.data
695
+ cdef str reason
696
+ try:
697
+ if length > pyparser._max_line_size:
698
+ raise LineTooLong(
699
+ 'Status line is too long', pyparser._max_line_size, length)
700
+ extend(pyparser._buf, at, length)
701
+ except BaseException as ex:
702
+ pyparser._last_error = ex
703
+ return -1
704
+ else:
705
+ return 0
706
+
707
+
708
+ cdef int cb_on_header_field(cparser.llhttp_t* parser,
709
+ const char *at, size_t length) except -1:
710
+ cdef HttpParser pyparser = <HttpParser>parser.data
711
+ cdef Py_ssize_t size
712
+ try:
713
+ pyparser._on_status_complete()
714
+ size = len(pyparser._raw_name) + length
715
+ if size > pyparser._max_field_size:
716
+ raise LineTooLong(
717
+ 'Header name is too long', pyparser._max_field_size, size)
718
+ pyparser._on_header_field(at, length)
719
+ except BaseException as ex:
720
+ pyparser._last_error = ex
721
+ return -1
722
+ else:
723
+ return 0
724
+
725
+
726
+ cdef int cb_on_header_value(cparser.llhttp_t* parser,
727
+ const char *at, size_t length) except -1:
728
+ cdef HttpParser pyparser = <HttpParser>parser.data
729
+ cdef Py_ssize_t size
730
+ try:
731
+ size = len(pyparser._raw_value) + length
732
+ if size > pyparser._max_field_size:
733
+ raise LineTooLong(
734
+ 'Header value is too long', pyparser._max_field_size, size)
735
+ pyparser._on_header_value(at, length)
736
+ except BaseException as ex:
737
+ pyparser._last_error = ex
738
+ return -1
739
+ else:
740
+ return 0
741
+
742
+
743
+ cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
744
+ cdef HttpParser pyparser = <HttpParser>parser.data
745
+ try:
746
+ pyparser._on_status_complete()
747
+ pyparser._on_headers_complete()
748
+ except BaseException as exc:
749
+ pyparser._last_error = exc
750
+ return -1
751
+ else:
752
+ if pyparser._upgraded or pyparser._cparser.method == cparser.HTTP_CONNECT:
753
+ return 2
754
+ else:
755
+ return 0
756
+
757
+
758
+ cdef int cb_on_body(cparser.llhttp_t* parser,
759
+ const char *at, size_t length) except -1:
760
+ cdef HttpParser pyparser = <HttpParser>parser.data
761
+ cdef bytes body = at[:length]
762
+ try:
763
+ pyparser._payload.feed_data(body, length)
764
+ except BaseException as underlying_exc:
765
+ reraised_exc = underlying_exc
766
+ if pyparser._payload_exception is not None:
767
+ reraised_exc = pyparser._payload_exception(str(underlying_exc))
768
+
769
+ set_exception(pyparser._payload, reraised_exc, underlying_exc)
770
+
771
+ pyparser._payload_error = 1
772
+ return -1
773
+ else:
774
+ return 0
775
+
776
+
777
+ cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
778
+ cdef HttpParser pyparser = <HttpParser>parser.data
779
+ try:
780
+ pyparser._started = False
781
+ pyparser._on_message_complete()
782
+ except BaseException as exc:
783
+ pyparser._last_error = exc
784
+ return -1
785
+ else:
786
+ return 0
787
+
788
+
789
+ cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
790
+ cdef HttpParser pyparser = <HttpParser>parser.data
791
+ try:
792
+ pyparser._on_chunk_header()
793
+ except BaseException as exc:
794
+ pyparser._last_error = exc
795
+ return -1
796
+ else:
797
+ return 0
798
+
799
+
800
+ cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
801
+ cdef HttpParser pyparser = <HttpParser>parser.data
802
+ try:
803
+ pyparser._on_chunk_complete()
804
+ except BaseException as exc:
805
+ pyparser._last_error = exc
806
+ return -1
807
+ else:
808
+ return 0
809
+
810
+
811
+ cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
812
+ cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
813
+ cdef bytes desc = cparser.llhttp_get_error_reason(parser)
814
+
815
+ err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
816
+
817
+ if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
818
+ cparser.HPE_CB_HEADERS_COMPLETE,
819
+ cparser.HPE_CB_MESSAGE_COMPLETE,
820
+ cparser.HPE_CB_CHUNK_HEADER,
821
+ cparser.HPE_CB_CHUNK_COMPLETE,
822
+ cparser.HPE_INVALID_CONSTANT,
823
+ cparser.HPE_INVALID_HEADER_TOKEN,
824
+ cparser.HPE_INVALID_CONTENT_LENGTH,
825
+ cparser.HPE_INVALID_CHUNK_SIZE,
826
+ cparser.HPE_INVALID_EOF_STATE,
827
+ cparser.HPE_INVALID_TRANSFER_ENCODING}:
828
+ return BadHttpMessage(err_msg)
829
+ elif errno == cparser.HPE_INVALID_METHOD:
830
+ return BadHttpMethod(error=err_msg)
831
+ elif errno in {cparser.HPE_INVALID_STATUS,
832
+ cparser.HPE_INVALID_VERSION}:
833
+ return BadStatusLine(error=err_msg)
834
+ elif errno == cparser.HPE_INVALID_URL:
835
+ return InvalidURLError(err_msg)
836
+
837
+ return BadHttpMessage(err_msg)
.venv/lib/python3.11/site-packages/aiohttp/_http_writer.cpython-311-x86_64-linux-gnu.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccc602b42628fb18d8269482715c7a51d0691921e6fe93209e933b2549ac5970
3
+ size 463752
.venv/lib/python3.11/site-packages/aiohttp/_http_writer.pyx ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython.bytes cimport PyBytes_FromStringAndSize
2
+ from cpython.exc cimport PyErr_NoMemory
3
+ from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
4
+ from cpython.object cimport PyObject_Str
5
+ from libc.stdint cimport uint8_t, uint64_t
6
+ from libc.string cimport memcpy
7
+
8
+ from multidict import istr
9
+
10
+ DEF BUF_SIZE = 16 * 1024 # 16KiB
11
+ cdef char BUFFER[BUF_SIZE]
12
+
13
+ cdef object _istr = istr
14
+
15
+
16
+ # ----------------- writer ---------------------------
17
+
18
+ cdef struct Writer:
19
+ char *buf
20
+ Py_ssize_t size
21
+ Py_ssize_t pos
22
+
23
+
24
+ cdef inline void _init_writer(Writer* writer):
25
+ writer.buf = &BUFFER[0]
26
+ writer.size = BUF_SIZE
27
+ writer.pos = 0
28
+
29
+
30
+ cdef inline void _release_writer(Writer* writer):
31
+ if writer.buf != BUFFER:
32
+ PyMem_Free(writer.buf)
33
+
34
+
35
+ cdef inline int _write_byte(Writer* writer, uint8_t ch):
36
+ cdef char * buf
37
+ cdef Py_ssize_t size
38
+
39
+ if writer.pos == writer.size:
40
+ # reallocate
41
+ size = writer.size + BUF_SIZE
42
+ if writer.buf == BUFFER:
43
+ buf = <char*>PyMem_Malloc(size)
44
+ if buf == NULL:
45
+ PyErr_NoMemory()
46
+ return -1
47
+ memcpy(buf, writer.buf, writer.size)
48
+ else:
49
+ buf = <char*>PyMem_Realloc(writer.buf, size)
50
+ if buf == NULL:
51
+ PyErr_NoMemory()
52
+ return -1
53
+ writer.buf = buf
54
+ writer.size = size
55
+ writer.buf[writer.pos] = <char>ch
56
+ writer.pos += 1
57
+ return 0
58
+
59
+
60
+ cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
61
+ cdef uint64_t utf = <uint64_t> symbol
62
+
63
+ if utf < 0x80:
64
+ return _write_byte(writer, <uint8_t>utf)
65
+ elif utf < 0x800:
66
+ if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
67
+ return -1
68
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
69
+ elif 0xD800 <= utf <= 0xDFFF:
70
+ # surogate pair, ignored
71
+ return 0
72
+ elif utf < 0x10000:
73
+ if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
74
+ return -1
75
+ if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
76
+ return -1
77
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
78
+ elif utf > 0x10FFFF:
79
+ # symbol is too large
80
+ return 0
81
+ else:
82
+ if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
83
+ return -1
84
+ if _write_byte(writer,
85
+ <uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
86
+ return -1
87
+ if _write_byte(writer,
88
+ <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
89
+ return -1
90
+ return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
91
+
92
+
93
+ cdef inline int _write_str(Writer* writer, str s):
94
+ cdef Py_UCS4 ch
95
+ for ch in s:
96
+ if _write_utf8(writer, ch) < 0:
97
+ return -1
98
+
99
+
100
+ # --------------- _serialize_headers ----------------------
101
+
102
+ cdef str to_str(object s):
103
+ if type(s) is str:
104
+ return <str>s
105
+ elif type(s) is _istr:
106
+ return PyObject_Str(s)
107
+ elif not isinstance(s, str):
108
+ raise TypeError("Cannot serialize non-str key {!r}".format(s))
109
+ else:
110
+ return str(s)
111
+
112
+
113
+
114
+ def _serialize_headers(str status_line, headers):
115
+ cdef Writer writer
116
+ cdef object key
117
+ cdef object val
118
+ cdef bytes ret
119
+ cdef str key_str
120
+ cdef str val_str
121
+
122
+ _init_writer(&writer)
123
+
124
+ try:
125
+ if _write_str(&writer, status_line) < 0:
126
+ raise
127
+ if _write_byte(&writer, b'\r') < 0:
128
+ raise
129
+ if _write_byte(&writer, b'\n') < 0:
130
+ raise
131
+
132
+ for key, val in headers.items():
133
+ key_str = to_str(key)
134
+ val_str = to_str(val)
135
+
136
+ if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str:
137
+ raise ValueError(
138
+ "Newline or carriage return character detected in HTTP status message or "
139
+ "header. This is a potential security issue."
140
+ )
141
+
142
+ if _write_str(&writer, key_str) < 0:
143
+ raise
144
+ if _write_byte(&writer, b':') < 0:
145
+ raise
146
+ if _write_byte(&writer, b' ') < 0:
147
+ raise
148
+ if _write_str(&writer, val_str) < 0:
149
+ raise
150
+ if _write_byte(&writer, b'\r') < 0:
151
+ raise
152
+ if _write_byte(&writer, b'\n') < 0:
153
+ raise
154
+
155
+ if _write_byte(&writer, b'\r') < 0:
156
+ raise
157
+ if _write_byte(&writer, b'\n') < 0:
158
+ raise
159
+
160
+ return PyBytes_FromStringAndSize(writer.buf, writer.pos)
161
+ finally:
162
+ _release_writer(&writer)
.venv/lib/python3.11/site-packages/aiohttp/_websocket/.hash/mask.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ b01999d409b29bd916e067bc963d5f2d9ee63cfc9ae0bccb769910131417bf93 /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/mask.pxd
.venv/lib/python3.11/site-packages/aiohttp/_websocket/.hash/mask.pyx.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ 0478ceb55d0ed30ef1a7da742cd003449bc69a07cf9fdb06789bd2b347cbfffe /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/mask.pyx
.venv/lib/python3.11/site-packages/aiohttp/_websocket/.hash/reader_c.pxd.hash ADDED
@@ -0,0 +1 @@
 
 
1
+ f6b3160a9002d639e0eff82da8b8d196a42ff6aed490e9faded2107eada4f067 /home/runner/work/aiohttp/aiohttp/aiohttp/_websocket/reader_c.pxd
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """WebSocket protocol versions 13 and 8."""
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/__init__.cpython-311.pyc ADDED
Binary file (242 Bytes). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/helpers.cpython-311.pyc ADDED
Binary file (6.94 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/models.cpython-311.pyc ADDED
Binary file (3.97 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/reader.cpython-311.pyc ADDED
Binary file (914 Bytes). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/reader_c.cpython-311.pyc ADDED
Binary file (18.2 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/reader_py.cpython-311.pyc ADDED
Binary file (18.3 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/__pycache__/writer.cpython-311.pyc ADDED
Binary file (6.85 kB). View file
 
.venv/lib/python3.11/site-packages/aiohttp/_websocket/helpers.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Helpers for WebSocket protocol versions 13 and 8."""
2
+
3
+ import functools
4
+ import re
5
+ from struct import Struct
6
+ from typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple
7
+
8
+ from ..helpers import NO_EXTENSIONS
9
+ from .models import WSHandshakeError
10
+
11
+ UNPACK_LEN3 = Struct("!Q").unpack_from
12
+ UNPACK_CLOSE_CODE = Struct("!H").unpack
13
+ PACK_LEN1 = Struct("!BB").pack
14
+ PACK_LEN2 = Struct("!BBH").pack
15
+ PACK_LEN3 = Struct("!BBQ").pack
16
+ PACK_CLOSE_CODE = Struct("!H").pack
17
+ PACK_RANDBITS = Struct("!L").pack
18
+ MSG_SIZE: Final[int] = 2**14
19
+ MASK_LEN: Final[int] = 4
20
+
21
+ WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
22
+
23
+
24
+ # Used by _websocket_mask_python
25
+ @functools.lru_cache
26
+ def _xor_table() -> List[bytes]:
27
+ return [bytes(a ^ b for a in range(256)) for b in range(256)]
28
+
29
+
30
+ def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
31
+ """Websocket masking function.
32
+
33
+ `mask` is a `bytes` object of length 4; `data` is a `bytearray`
34
+ object of any length. The contents of `data` are masked with `mask`,
35
+ as specified in section 5.3 of RFC 6455.
36
+
37
+ Note that this function mutates the `data` argument.
38
+
39
+ This pure-python implementation may be replaced by an optimized
40
+ version when available.
41
+
42
+ """
43
+ assert isinstance(data, bytearray), data
44
+ assert len(mask) == 4, mask
45
+
46
+ if data:
47
+ _XOR_TABLE = _xor_table()
48
+ a, b, c, d = (_XOR_TABLE[n] for n in mask)
49
+ data[::4] = data[::4].translate(a)
50
+ data[1::4] = data[1::4].translate(b)
51
+ data[2::4] = data[2::4].translate(c)
52
+ data[3::4] = data[3::4].translate(d)
53
+
54
+
55
+ if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover
56
+ websocket_mask = _websocket_mask_python
57
+ else:
58
+ try:
59
+ from .mask import _websocket_mask_cython # type: ignore[import-not-found]
60
+
61
+ websocket_mask = _websocket_mask_cython
62
+ except ImportError: # pragma: no cover
63
+ websocket_mask = _websocket_mask_python
64
+
65
+
66
+ _WS_EXT_RE: Final[Pattern[str]] = re.compile(
67
+ r"^(?:;\s*(?:"
68
+ r"(server_no_context_takeover)|"
69
+ r"(client_no_context_takeover)|"
70
+ r"(server_max_window_bits(?:=(\d+))?)|"
71
+ r"(client_max_window_bits(?:=(\d+))?)))*$"
72
+ )
73
+
74
+ _WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
75
+
76
+
77
+ def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
78
+ if not extstr:
79
+ return 0, False
80
+
81
+ compress = 0
82
+ notakeover = False
83
+ for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
84
+ defext = ext.group(1)
85
+ # Return compress = 15 when get `permessage-deflate`
86
+ if not defext:
87
+ compress = 15
88
+ break
89
+ match = _WS_EXT_RE.match(defext)
90
+ if match:
91
+ compress = 15
92
+ if isserver:
93
+ # Server never fail to detect compress handshake.
94
+ # Server does not need to send max wbit to client
95
+ if match.group(4):
96
+ compress = int(match.group(4))
97
+ # Group3 must match if group4 matches
98
+ # Compress wbit 8 does not support in zlib
99
+ # If compress level not support,
100
+ # CONTINUE to next extension
101
+ if compress > 15 or compress < 9:
102
+ compress = 0
103
+ continue
104
+ if match.group(1):
105
+ notakeover = True
106
+ # Ignore regex group 5 & 6 for client_max_window_bits
107
+ break
108
+ else:
109
+ if match.group(6):
110
+ compress = int(match.group(6))
111
+ # Group5 must match if group6 matches
112
+ # Compress wbit 8 does not support in zlib
113
+ # If compress level not support,
114
+ # FAIL the parse progress
115
+ if compress > 15 or compress < 9:
116
+ raise WSHandshakeError("Invalid window size")
117
+ if match.group(2):
118
+ notakeover = True
119
+ # Ignore regex group 5 & 6 for client_max_window_bits
120
+ break
121
+ # Return Fail if client side and not match
122
+ elif not isserver:
123
+ raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
124
+
125
+ return compress, notakeover
126
+
127
+
128
+ def ws_ext_gen(
129
+ compress: int = 15, isserver: bool = False, server_notakeover: bool = False
130
+ ) -> str:
131
+ # client_notakeover=False not used for server
132
+ # compress wbit 8 does not support in zlib
133
+ if compress < 9 or compress > 15:
134
+ raise ValueError(
135
+ "Compress wbits must between 9 and 15, zlib does not support wbits=8"
136
+ )
137
+ enabledext = ["permessage-deflate"]
138
+ if not isserver:
139
+ enabledext.append("client_max_window_bits")
140
+
141
+ if compress < 15:
142
+ enabledext.append("server_max_window_bits=" + str(compress))
143
+ if server_notakeover:
144
+ enabledext.append("server_no_context_takeover")
145
+ # if client_notakeover:
146
+ # enabledext.append('client_no_context_takeover')
147
+ return "; ".join(enabledext)
.venv/lib/python3.11/site-packages/aiohttp/_websocket/mask.pxd ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ """Cython declarations for websocket masking."""
2
+
3
+ cpdef void _websocket_mask_cython(bytes mask, bytearray data)
.venv/lib/python3.11/site-packages/aiohttp/_websocket/mask.pyx ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from cpython cimport PyBytes_AsString
2
+
3
+
4
+ #from cpython cimport PyByteArray_AsString # cython still not exports that
5
+ cdef extern from "Python.h":
6
+ char* PyByteArray_AsString(bytearray ba) except NULL
7
+
8
+ from libc.stdint cimport uint32_t, uint64_t, uintmax_t
9
+
10
+
11
+ cpdef void _websocket_mask_cython(bytes mask, bytearray data):
12
+ """Note, this function mutates its `data` argument
13
+ """
14
+ cdef:
15
+ Py_ssize_t data_len, i
16
+ # bit operations on signed integers are implementation-specific
17
+ unsigned char * in_buf
18
+ const unsigned char * mask_buf
19
+ uint32_t uint32_msk
20
+ uint64_t uint64_msk
21
+
22
+ assert len(mask) == 4
23
+
24
+ data_len = len(data)
25
+ in_buf = <unsigned char*>PyByteArray_AsString(data)
26
+ mask_buf = <const unsigned char*>PyBytes_AsString(mask)
27
+ uint32_msk = (<uint32_t*>mask_buf)[0]
28
+
29
+ # TODO: align in_data ptr to achieve even faster speeds
30
+ # does it need in python ?! malloc() always aligns to sizeof(long) bytes
31
+
32
+ if sizeof(size_t) >= 8:
33
+ uint64_msk = uint32_msk
34
+ uint64_msk = (uint64_msk << 32) | uint32_msk
35
+
36
+ while data_len >= 8:
37
+ (<uint64_t*>in_buf)[0] ^= uint64_msk
38
+ in_buf += 8
39
+ data_len -= 8
40
+
41
+
42
+ while data_len >= 4:
43
+ (<uint32_t*>in_buf)[0] ^= uint32_msk
44
+ in_buf += 4
45
+ data_len -= 4
46
+
47
+ for i in range(0, data_len):
48
+ in_buf[i] ^= mask_buf[i]
.venv/lib/python3.11/site-packages/aiohttp/_websocket/models.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Models for WebSocket protocol versions 13 and 8."""
2
+
3
+ import json
4
+ from enum import IntEnum
5
+ from typing import Any, Callable, Final, NamedTuple, Optional, cast
6
+
7
+ WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
8
+
9
+
10
+ class WSCloseCode(IntEnum):
11
+ OK = 1000
12
+ GOING_AWAY = 1001
13
+ PROTOCOL_ERROR = 1002
14
+ UNSUPPORTED_DATA = 1003
15
+ ABNORMAL_CLOSURE = 1006
16
+ INVALID_TEXT = 1007
17
+ POLICY_VIOLATION = 1008
18
+ MESSAGE_TOO_BIG = 1009
19
+ MANDATORY_EXTENSION = 1010
20
+ INTERNAL_ERROR = 1011
21
+ SERVICE_RESTART = 1012
22
+ TRY_AGAIN_LATER = 1013
23
+ BAD_GATEWAY = 1014
24
+
25
+
26
+ class WSMsgType(IntEnum):
27
+ # websocket spec types
28
+ CONTINUATION = 0x0
29
+ TEXT = 0x1
30
+ BINARY = 0x2
31
+ PING = 0x9
32
+ PONG = 0xA
33
+ CLOSE = 0x8
34
+
35
+ # aiohttp specific types
36
+ CLOSING = 0x100
37
+ CLOSED = 0x101
38
+ ERROR = 0x102
39
+
40
+ text = TEXT
41
+ binary = BINARY
42
+ ping = PING
43
+ pong = PONG
44
+ close = CLOSE
45
+ closing = CLOSING
46
+ closed = CLOSED
47
+ error = ERROR
48
+
49
+
50
+ class WSMessage(NamedTuple):
51
+ type: WSMsgType
52
+ # To type correctly, this would need some kind of tagged union for each type.
53
+ data: Any
54
+ extra: Optional[str]
55
+
56
+ def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
57
+ """Return parsed JSON data.
58
+
59
+ .. versionadded:: 0.22
60
+ """
61
+ return loads(self.data)
62
+
63
+
64
+ # Constructing the tuple directly to avoid the overhead of
65
+ # the lambda and arg processing since NamedTuples are constructed
66
+ # with a run time built lambda
67
+ # https://github.com/python/cpython/blob/d83fcf8371f2f33c7797bc8f5423a8bca8c46e5c/Lib/collections/__init__.py#L441
68
+ WS_CLOSED_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSED, None, None))
69
+ WS_CLOSING_MESSAGE = tuple.__new__(WSMessage, (WSMsgType.CLOSING, None, None))
70
+
71
+
72
+ class WebSocketError(Exception):
73
+ """WebSocket protocol parser error."""
74
+
75
+ def __init__(self, code: int, message: str) -> None:
76
+ self.code = code
77
+ super().__init__(code, message)
78
+
79
+ def __str__(self) -> str:
80
+ return cast(str, self.args[1])
81
+
82
+
83
+ class WSHandshakeError(Exception):
84
+ """WebSocket protocol handshake error."""
.venv/lib/python3.11/site-packages/aiohttp/_websocket/reader.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Reader for WebSocket protocol versions 13 and 8."""
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from ..helpers import NO_EXTENSIONS
6
+
7
+ if TYPE_CHECKING or NO_EXTENSIONS: # pragma: no cover
8
+ from .reader_py import (
9
+ WebSocketDataQueue as WebSocketDataQueuePython,
10
+ WebSocketReader as WebSocketReaderPython,
11
+ )
12
+
13
+ WebSocketReader = WebSocketReaderPython
14
+ WebSocketDataQueue = WebSocketDataQueuePython
15
+ else:
16
+ try:
17
+ from .reader_c import ( # type: ignore[import-not-found]
18
+ WebSocketDataQueue as WebSocketDataQueueCython,
19
+ WebSocketReader as WebSocketReaderCython,
20
+ )
21
+
22
+ WebSocketReader = WebSocketReaderCython
23
+ WebSocketDataQueue = WebSocketDataQueueCython
24
+ except ImportError: # pragma: no cover
25
+ from .reader_py import (
26
+ WebSocketDataQueue as WebSocketDataQueuePython,
27
+ WebSocketReader as WebSocketReaderPython,
28
+ )
29
+
30
+ WebSocketReader = WebSocketReaderPython
31
+ WebSocketDataQueue = WebSocketDataQueuePython
.venv/lib/python3.11/site-packages/aiohttp/_websocket/reader_c.pxd ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import cython
2
+
3
+ from .mask cimport _websocket_mask_cython as websocket_mask
4
+
5
+
6
+ cdef unsigned int READ_HEADER
7
+ cdef unsigned int READ_PAYLOAD_LENGTH
8
+ cdef unsigned int READ_PAYLOAD_MASK
9
+ cdef unsigned int READ_PAYLOAD
10
+
11
+ cdef unsigned int OP_CODE_CONTINUATION
12
+ cdef unsigned int OP_CODE_TEXT
13
+ cdef unsigned int OP_CODE_BINARY
14
+ cdef unsigned int OP_CODE_CLOSE
15
+ cdef unsigned int OP_CODE_PING
16
+ cdef unsigned int OP_CODE_PONG
17
+
18
+ cdef object UNPACK_LEN3
19
+ cdef object UNPACK_CLOSE_CODE
20
+ cdef object TUPLE_NEW
21
+
22
+ cdef object WSMsgType
23
+ cdef object WSMessage
24
+
25
+ cdef object WS_MSG_TYPE_TEXT
26
+ cdef object WS_MSG_TYPE_BINARY
27
+
28
+ cdef set ALLOWED_CLOSE_CODES
29
+ cdef set MESSAGE_TYPES_WITH_CONTENT
30
+
31
+ cdef tuple EMPTY_FRAME
32
+ cdef tuple EMPTY_FRAME_ERROR
33
+
34
+ cdef class WebSocketDataQueue:
35
+
36
+ cdef unsigned int _size
37
+ cdef public object _protocol
38
+ cdef unsigned int _limit
39
+ cdef object _loop
40
+ cdef bint _eof
41
+ cdef object _waiter
42
+ cdef object _exception
43
+ cdef public object _buffer
44
+ cdef object _get_buffer
45
+ cdef object _put_buffer
46
+
47
+ cdef void _release_waiter(self)
48
+
49
+ cpdef void feed_data(self, object data, unsigned int size)
50
+
51
+ @cython.locals(size="unsigned int")
52
+ cdef _read_from_buffer(self)
53
+
54
+ cdef class WebSocketReader:
55
+
56
+ cdef WebSocketDataQueue queue
57
+ cdef unsigned int _max_msg_size
58
+
59
+ cdef Exception _exc
60
+ cdef bytearray _partial
61
+ cdef unsigned int _state
62
+
63
+ cdef object _opcode
64
+ cdef object _frame_fin
65
+ cdef object _frame_opcode
66
+ cdef object _frame_payload
67
+ cdef unsigned long long _frame_payload_len
68
+
69
+ cdef bytes _tail
70
+ cdef bint _has_mask
71
+ cdef bytes _frame_mask
72
+ cdef unsigned long long _payload_length
73
+ cdef unsigned int _payload_length_flag
74
+ cdef object _compressed
75
+ cdef object _decompressobj
76
+ cdef bint _compress
77
+
78
+ cpdef tuple feed_data(self, object data)
79
+
80
+ @cython.locals(
81
+ is_continuation=bint,
82
+ fin=bint,
83
+ has_partial=bint,
84
+ payload_merged=bytes,
85
+ opcode="unsigned int",
86
+ )
87
+ cpdef void _feed_data(self, bytes data)
88
+
89
+ @cython.locals(
90
+ start_pos="unsigned int",
91
+ buf_len="unsigned int",
92
+ length="unsigned int",
93
+ chunk_size="unsigned int",
94
+ chunk_len="unsigned int",
95
+ buf_length="unsigned int",
96
+ first_byte="unsigned char",
97
+ second_byte="unsigned char",
98
+ end_pos="unsigned int",
99
+ has_mask=bint,
100
+ fin=bint,
101
+ )
102
+ cpdef list parse_frame(self, bytes buf)
.venv/lib/python3.11/site-packages/aiohttp/_websocket/reader_c.py ADDED
@@ -0,0 +1,468 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Reader for WebSocket protocol versions 13 and 8."""
2
+
3
+ import asyncio
4
+ import builtins
5
+ from collections import deque
6
+ from typing import Deque, Final, List, Optional, Set, Tuple, Union
7
+
8
+ from ..base_protocol import BaseProtocol
9
+ from ..compression_utils import ZLibDecompressor
10
+ from ..helpers import _EXC_SENTINEL, set_exception
11
+ from ..streams import EofStream
12
+ from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask
13
+ from .models import (
14
+ WS_DEFLATE_TRAILING,
15
+ WebSocketError,
16
+ WSCloseCode,
17
+ WSMessage,
18
+ WSMsgType,
19
+ )
20
+
21
+ ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
22
+
23
+ # States for the reader, used to parse the WebSocket frame
24
+ # integer values are used so they can be cythonized
25
+ READ_HEADER = 1
26
+ READ_PAYLOAD_LENGTH = 2
27
+ READ_PAYLOAD_MASK = 3
28
+ READ_PAYLOAD = 4
29
+
30
+ WS_MSG_TYPE_BINARY = WSMsgType.BINARY
31
+ WS_MSG_TYPE_TEXT = WSMsgType.TEXT
32
+
33
+ # WSMsgType values unpacked so they can by cythonized to ints
34
+ OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value
35
+ OP_CODE_TEXT = WSMsgType.TEXT.value
36
+ OP_CODE_BINARY = WSMsgType.BINARY.value
37
+ OP_CODE_CLOSE = WSMsgType.CLOSE.value
38
+ OP_CODE_PING = WSMsgType.PING.value
39
+ OP_CODE_PONG = WSMsgType.PONG.value
40
+
41
+ EMPTY_FRAME_ERROR = (True, b"")
42
+ EMPTY_FRAME = (False, b"")
43
+
44
+ TUPLE_NEW = tuple.__new__
45
+
46
+ int_ = int # Prevent Cython from converting to PyInt
47
+
48
+
49
+ class WebSocketDataQueue:
50
+ """WebSocketDataQueue resumes and pauses an underlying stream.
51
+
52
+ It is a destination for WebSocket data.
53
+ """
54
+
55
+ def __init__(
56
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
57
+ ) -> None:
58
+ self._size = 0
59
+ self._protocol = protocol
60
+ self._limit = limit * 2
61
+ self._loop = loop
62
+ self._eof = False
63
+ self._waiter: Optional[asyncio.Future[None]] = None
64
+ self._exception: Union[BaseException, None] = None
65
+ self._buffer: Deque[Tuple[WSMessage, int]] = deque()
66
+ self._get_buffer = self._buffer.popleft
67
+ self._put_buffer = self._buffer.append
68
+
69
+ def is_eof(self) -> bool:
70
+ return self._eof
71
+
72
+ def exception(self) -> Optional[BaseException]:
73
+ return self._exception
74
+
75
+ def set_exception(
76
+ self,
77
+ exc: "BaseException",
78
+ exc_cause: builtins.BaseException = _EXC_SENTINEL,
79
+ ) -> None:
80
+ self._eof = True
81
+ self._exception = exc
82
+ if (waiter := self._waiter) is not None:
83
+ self._waiter = None
84
+ set_exception(waiter, exc, exc_cause)
85
+
86
+ def _release_waiter(self) -> None:
87
+ if (waiter := self._waiter) is None:
88
+ return
89
+ self._waiter = None
90
+ if not waiter.done():
91
+ waiter.set_result(None)
92
+
93
+ def feed_eof(self) -> None:
94
+ self._eof = True
95
+ self._release_waiter()
96
+
97
+ def feed_data(self, data: "WSMessage", size: "int_") -> None:
98
+ self._size += size
99
+ self._put_buffer((data, size))
100
+ self._release_waiter()
101
+ if self._size > self._limit and not self._protocol._reading_paused:
102
+ self._protocol.pause_reading()
103
+
104
+ async def read(self) -> WSMessage:
105
+ if not self._buffer and not self._eof:
106
+ assert not self._waiter
107
+ self._waiter = self._loop.create_future()
108
+ try:
109
+ await self._waiter
110
+ except (asyncio.CancelledError, asyncio.TimeoutError):
111
+ self._waiter = None
112
+ raise
113
+ return self._read_from_buffer()
114
+
115
+ def _read_from_buffer(self) -> WSMessage:
116
+ if self._buffer:
117
+ data, size = self._get_buffer()
118
+ self._size -= size
119
+ if self._size < self._limit and self._protocol._reading_paused:
120
+ self._protocol.resume_reading()
121
+ return data
122
+ if self._exception is not None:
123
+ raise self._exception
124
+ raise EofStream
125
+
126
+
127
+ class WebSocketReader:
128
+ def __init__(
129
+ self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True
130
+ ) -> None:
131
+ self.queue = queue
132
+ self._max_msg_size = max_msg_size
133
+
134
+ self._exc: Optional[Exception] = None
135
+ self._partial = bytearray()
136
+ self._state = READ_HEADER
137
+
138
+ self._opcode: Optional[int] = None
139
+ self._frame_fin = False
140
+ self._frame_opcode: Optional[int] = None
141
+ self._frame_payload: Union[bytes, bytearray] = b""
142
+ self._frame_payload_len = 0
143
+
144
+ self._tail: bytes = b""
145
+ self._has_mask = False
146
+ self._frame_mask: Optional[bytes] = None
147
+ self._payload_length = 0
148
+ self._payload_length_flag = 0
149
+ self._compressed: Optional[bool] = None
150
+ self._decompressobj: Optional[ZLibDecompressor] = None
151
+ self._compress = compress
152
+
153
+ def feed_eof(self) -> None:
154
+ self.queue.feed_eof()
155
+
156
+ # data can be bytearray on Windows because proactor event loop uses bytearray
157
+ # and asyncio types this to Union[bytes, bytearray, memoryview] so we need
158
+ # coerce data to bytes if it is not
159
+ def feed_data(
160
+ self, data: Union[bytes, bytearray, memoryview]
161
+ ) -> Tuple[bool, bytes]:
162
+ if type(data) is not bytes:
163
+ data = bytes(data)
164
+
165
+ if self._exc is not None:
166
+ return True, data
167
+
168
+ try:
169
+ self._feed_data(data)
170
+ except Exception as exc:
171
+ self._exc = exc
172
+ set_exception(self.queue, exc)
173
+ return EMPTY_FRAME_ERROR
174
+
175
+ return EMPTY_FRAME
176
+
177
+ def _feed_data(self, data: bytes) -> None:
178
+ msg: WSMessage
179
+ for frame in self.parse_frame(data):
180
+ fin = frame[0]
181
+ opcode = frame[1]
182
+ payload = frame[2]
183
+ compressed = frame[3]
184
+
185
+ is_continuation = opcode == OP_CODE_CONTINUATION
186
+ if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation:
187
+ # load text/binary
188
+ if not fin:
189
+ # got partial frame payload
190
+ if not is_continuation:
191
+ self._opcode = opcode
192
+ self._partial += payload
193
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
194
+ raise WebSocketError(
195
+ WSCloseCode.MESSAGE_TOO_BIG,
196
+ "Message size {} exceeds limit {}".format(
197
+ len(self._partial), self._max_msg_size
198
+ ),
199
+ )
200
+ continue
201
+
202
+ has_partial = bool(self._partial)
203
+ if is_continuation:
204
+ if self._opcode is None:
205
+ raise WebSocketError(
206
+ WSCloseCode.PROTOCOL_ERROR,
207
+ "Continuation frame for non started message",
208
+ )
209
+ opcode = self._opcode
210
+ self._opcode = None
211
+ # previous frame was non finished
212
+ # we should get continuation opcode
213
+ elif has_partial:
214
+ raise WebSocketError(
215
+ WSCloseCode.PROTOCOL_ERROR,
216
+ "The opcode in non-fin frame is expected "
217
+ "to be zero, got {!r}".format(opcode),
218
+ )
219
+
220
+ assembled_payload: Union[bytes, bytearray]
221
+ if has_partial:
222
+ assembled_payload = self._partial + payload
223
+ self._partial.clear()
224
+ else:
225
+ assembled_payload = payload
226
+
227
+ if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
228
+ raise WebSocketError(
229
+ WSCloseCode.MESSAGE_TOO_BIG,
230
+ "Message size {} exceeds limit {}".format(
231
+ len(assembled_payload), self._max_msg_size
232
+ ),
233
+ )
234
+
235
+ # Decompress process must to be done after all packets
236
+ # received.
237
+ if compressed:
238
+ if not self._decompressobj:
239
+ self._decompressobj = ZLibDecompressor(
240
+ suppress_deflate_header=True
241
+ )
242
+ payload_merged = self._decompressobj.decompress_sync(
243
+ assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size
244
+ )
245
+ if self._decompressobj.unconsumed_tail:
246
+ left = len(self._decompressobj.unconsumed_tail)
247
+ raise WebSocketError(
248
+ WSCloseCode.MESSAGE_TOO_BIG,
249
+ "Decompressed message size {} exceeds limit {}".format(
250
+ self._max_msg_size + left, self._max_msg_size
251
+ ),
252
+ )
253
+ elif type(assembled_payload) is bytes:
254
+ payload_merged = assembled_payload
255
+ else:
256
+ payload_merged = bytes(assembled_payload)
257
+
258
+ if opcode == OP_CODE_TEXT:
259
+ try:
260
+ text = payload_merged.decode("utf-8")
261
+ except UnicodeDecodeError as exc:
262
+ raise WebSocketError(
263
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
264
+ ) from exc
265
+
266
+ # XXX: The Text and Binary messages here can be a performance
267
+ # bottleneck, so we use tuple.__new__ to improve performance.
268
+ # This is not type safe, but many tests should fail in
269
+ # test_client_ws_functional.py if this is wrong.
270
+ self.queue.feed_data(
271
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),
272
+ len(payload_merged),
273
+ )
274
+ else:
275
+ self.queue.feed_data(
276
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),
277
+ len(payload_merged),
278
+ )
279
+ elif opcode == OP_CODE_CLOSE:
280
+ if len(payload) >= 2:
281
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
282
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
283
+ raise WebSocketError(
284
+ WSCloseCode.PROTOCOL_ERROR,
285
+ f"Invalid close code: {close_code}",
286
+ )
287
+ try:
288
+ close_message = payload[2:].decode("utf-8")
289
+ except UnicodeDecodeError as exc:
290
+ raise WebSocketError(
291
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
292
+ ) from exc
293
+ msg = TUPLE_NEW(
294
+ WSMessage, (WSMsgType.CLOSE, close_code, close_message)
295
+ )
296
+ elif payload:
297
+ raise WebSocketError(
298
+ WSCloseCode.PROTOCOL_ERROR,
299
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
300
+ )
301
+ else:
302
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))
303
+
304
+ self.queue.feed_data(msg, 0)
305
+ elif opcode == OP_CODE_PING:
306
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))
307
+ self.queue.feed_data(msg, len(payload))
308
+
309
+ elif opcode == OP_CODE_PONG:
310
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))
311
+ self.queue.feed_data(msg, len(payload))
312
+
313
+ else:
314
+ raise WebSocketError(
315
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
316
+ )
317
+
318
+ def parse_frame(
319
+ self, buf: bytes
320
+ ) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]:
321
+ """Return the next frame from the socket."""
322
+ frames: List[
323
+ Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]
324
+ ] = []
325
+ if self._tail:
326
+ buf, self._tail = self._tail + buf, b""
327
+
328
+ start_pos: int = 0
329
+ buf_length = len(buf)
330
+
331
+ while True:
332
+ # read header
333
+ if self._state == READ_HEADER:
334
+ if buf_length - start_pos < 2:
335
+ break
336
+ first_byte = buf[start_pos]
337
+ second_byte = buf[start_pos + 1]
338
+ start_pos += 2
339
+
340
+ fin = (first_byte >> 7) & 1
341
+ rsv1 = (first_byte >> 6) & 1
342
+ rsv2 = (first_byte >> 5) & 1
343
+ rsv3 = (first_byte >> 4) & 1
344
+ opcode = first_byte & 0xF
345
+
346
+ # frame-fin = %x0 ; more frames of this message follow
347
+ # / %x1 ; final frame of this message
348
+ # frame-rsv1 = %x0 ;
349
+ # 1 bit, MUST be 0 unless negotiated otherwise
350
+ # frame-rsv2 = %x0 ;
351
+ # 1 bit, MUST be 0 unless negotiated otherwise
352
+ # frame-rsv3 = %x0 ;
353
+ # 1 bit, MUST be 0 unless negotiated otherwise
354
+ #
355
+ # Remove rsv1 from this test for deflate development
356
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
357
+ raise WebSocketError(
358
+ WSCloseCode.PROTOCOL_ERROR,
359
+ "Received frame with non-zero reserved bits",
360
+ )
361
+
362
+ if opcode > 0x7 and fin == 0:
363
+ raise WebSocketError(
364
+ WSCloseCode.PROTOCOL_ERROR,
365
+ "Received fragmented control frame",
366
+ )
367
+
368
+ has_mask = (second_byte >> 7) & 1
369
+ length = second_byte & 0x7F
370
+
371
+ # Control frames MUST have a payload
372
+ # length of 125 bytes or less
373
+ if opcode > 0x7 and length > 125:
374
+ raise WebSocketError(
375
+ WSCloseCode.PROTOCOL_ERROR,
376
+ "Control frame payload cannot be larger than 125 bytes",
377
+ )
378
+
379
+ # Set compress status if last package is FIN
380
+ # OR set compress status if this is first fragment
381
+ # Raise error if not first fragment with rsv1 = 0x1
382
+ if self._frame_fin or self._compressed is None:
383
+ self._compressed = True if rsv1 else False
384
+ elif rsv1:
385
+ raise WebSocketError(
386
+ WSCloseCode.PROTOCOL_ERROR,
387
+ "Received frame with non-zero reserved bits",
388
+ )
389
+
390
+ self._frame_fin = bool(fin)
391
+ self._frame_opcode = opcode
392
+ self._has_mask = bool(has_mask)
393
+ self._payload_length_flag = length
394
+ self._state = READ_PAYLOAD_LENGTH
395
+
396
+ # read payload length
397
+ if self._state == READ_PAYLOAD_LENGTH:
398
+ length_flag = self._payload_length_flag
399
+ if length_flag == 126:
400
+ if buf_length - start_pos < 2:
401
+ break
402
+ first_byte = buf[start_pos]
403
+ second_byte = buf[start_pos + 1]
404
+ start_pos += 2
405
+ self._payload_length = first_byte << 8 | second_byte
406
+ elif length_flag > 126:
407
+ if buf_length - start_pos < 8:
408
+ break
409
+ data = buf[start_pos : start_pos + 8]
410
+ start_pos += 8
411
+ self._payload_length = UNPACK_LEN3(data)[0]
412
+ else:
413
+ self._payload_length = length_flag
414
+
415
+ self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD
416
+
417
+ # read payload mask
418
+ if self._state == READ_PAYLOAD_MASK:
419
+ if buf_length - start_pos < 4:
420
+ break
421
+ self._frame_mask = buf[start_pos : start_pos + 4]
422
+ start_pos += 4
423
+ self._state = READ_PAYLOAD
424
+
425
+ if self._state == READ_PAYLOAD:
426
+ chunk_len = buf_length - start_pos
427
+ if self._payload_length >= chunk_len:
428
+ end_pos = buf_length
429
+ self._payload_length -= chunk_len
430
+ else:
431
+ end_pos = start_pos + self._payload_length
432
+ self._payload_length = 0
433
+
434
+ if self._frame_payload_len:
435
+ if type(self._frame_payload) is not bytearray:
436
+ self._frame_payload = bytearray(self._frame_payload)
437
+ self._frame_payload += buf[start_pos:end_pos]
438
+ else:
439
+ # Fast path for the first frame
440
+ self._frame_payload = buf[start_pos:end_pos]
441
+
442
+ self._frame_payload_len += end_pos - start_pos
443
+ start_pos = end_pos
444
+
445
+ if self._payload_length != 0:
446
+ break
447
+
448
+ if self._has_mask:
449
+ assert self._frame_mask is not None
450
+ if type(self._frame_payload) is not bytearray:
451
+ self._frame_payload = bytearray(self._frame_payload)
452
+ websocket_mask(self._frame_mask, self._frame_payload)
453
+
454
+ frames.append(
455
+ (
456
+ self._frame_fin,
457
+ self._frame_opcode,
458
+ self._frame_payload,
459
+ self._compressed,
460
+ )
461
+ )
462
+ self._frame_payload = b""
463
+ self._frame_payload_len = 0
464
+ self._state = READ_HEADER
465
+
466
+ self._tail = buf[start_pos:] if start_pos < buf_length else b""
467
+
468
+ return frames
.venv/lib/python3.11/site-packages/aiohttp/_websocket/reader_py.py ADDED
@@ -0,0 +1,468 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Reader for WebSocket protocol versions 13 and 8."""
2
+
3
+ import asyncio
4
+ import builtins
5
+ from collections import deque
6
+ from typing import Deque, Final, List, Optional, Set, Tuple, Union
7
+
8
+ from ..base_protocol import BaseProtocol
9
+ from ..compression_utils import ZLibDecompressor
10
+ from ..helpers import _EXC_SENTINEL, set_exception
11
+ from ..streams import EofStream
12
+ from .helpers import UNPACK_CLOSE_CODE, UNPACK_LEN3, websocket_mask
13
+ from .models import (
14
+ WS_DEFLATE_TRAILING,
15
+ WebSocketError,
16
+ WSCloseCode,
17
+ WSMessage,
18
+ WSMsgType,
19
+ )
20
+
21
+ ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
22
+
23
+ # States for the reader, used to parse the WebSocket frame
24
+ # integer values are used so they can be cythonized
25
+ READ_HEADER = 1
26
+ READ_PAYLOAD_LENGTH = 2
27
+ READ_PAYLOAD_MASK = 3
28
+ READ_PAYLOAD = 4
29
+
30
+ WS_MSG_TYPE_BINARY = WSMsgType.BINARY
31
+ WS_MSG_TYPE_TEXT = WSMsgType.TEXT
32
+
33
+ # WSMsgType values unpacked so they can by cythonized to ints
34
+ OP_CODE_CONTINUATION = WSMsgType.CONTINUATION.value
35
+ OP_CODE_TEXT = WSMsgType.TEXT.value
36
+ OP_CODE_BINARY = WSMsgType.BINARY.value
37
+ OP_CODE_CLOSE = WSMsgType.CLOSE.value
38
+ OP_CODE_PING = WSMsgType.PING.value
39
+ OP_CODE_PONG = WSMsgType.PONG.value
40
+
41
+ EMPTY_FRAME_ERROR = (True, b"")
42
+ EMPTY_FRAME = (False, b"")
43
+
44
+ TUPLE_NEW = tuple.__new__
45
+
46
+ int_ = int # Prevent Cython from converting to PyInt
47
+
48
+
49
+ class WebSocketDataQueue:
50
+ """WebSocketDataQueue resumes and pauses an underlying stream.
51
+
52
+ It is a destination for WebSocket data.
53
+ """
54
+
55
+ def __init__(
56
+ self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
57
+ ) -> None:
58
+ self._size = 0
59
+ self._protocol = protocol
60
+ self._limit = limit * 2
61
+ self._loop = loop
62
+ self._eof = False
63
+ self._waiter: Optional[asyncio.Future[None]] = None
64
+ self._exception: Union[BaseException, None] = None
65
+ self._buffer: Deque[Tuple[WSMessage, int]] = deque()
66
+ self._get_buffer = self._buffer.popleft
67
+ self._put_buffer = self._buffer.append
68
+
69
+ def is_eof(self) -> bool:
70
+ return self._eof
71
+
72
+ def exception(self) -> Optional[BaseException]:
73
+ return self._exception
74
+
75
+ def set_exception(
76
+ self,
77
+ exc: "BaseException",
78
+ exc_cause: builtins.BaseException = _EXC_SENTINEL,
79
+ ) -> None:
80
+ self._eof = True
81
+ self._exception = exc
82
+ if (waiter := self._waiter) is not None:
83
+ self._waiter = None
84
+ set_exception(waiter, exc, exc_cause)
85
+
86
+ def _release_waiter(self) -> None:
87
+ if (waiter := self._waiter) is None:
88
+ return
89
+ self._waiter = None
90
+ if not waiter.done():
91
+ waiter.set_result(None)
92
+
93
+ def feed_eof(self) -> None:
94
+ self._eof = True
95
+ self._release_waiter()
96
+
97
+ def feed_data(self, data: "WSMessage", size: "int_") -> None:
98
+ self._size += size
99
+ self._put_buffer((data, size))
100
+ self._release_waiter()
101
+ if self._size > self._limit and not self._protocol._reading_paused:
102
+ self._protocol.pause_reading()
103
+
104
+ async def read(self) -> WSMessage:
105
+ if not self._buffer and not self._eof:
106
+ assert not self._waiter
107
+ self._waiter = self._loop.create_future()
108
+ try:
109
+ await self._waiter
110
+ except (asyncio.CancelledError, asyncio.TimeoutError):
111
+ self._waiter = None
112
+ raise
113
+ return self._read_from_buffer()
114
+
115
+ def _read_from_buffer(self) -> WSMessage:
116
+ if self._buffer:
117
+ data, size = self._get_buffer()
118
+ self._size -= size
119
+ if self._size < self._limit and self._protocol._reading_paused:
120
+ self._protocol.resume_reading()
121
+ return data
122
+ if self._exception is not None:
123
+ raise self._exception
124
+ raise EofStream
125
+
126
+
127
+ class WebSocketReader:
128
+ def __init__(
129
+ self, queue: WebSocketDataQueue, max_msg_size: int, compress: bool = True
130
+ ) -> None:
131
+ self.queue = queue
132
+ self._max_msg_size = max_msg_size
133
+
134
+ self._exc: Optional[Exception] = None
135
+ self._partial = bytearray()
136
+ self._state = READ_HEADER
137
+
138
+ self._opcode: Optional[int] = None
139
+ self._frame_fin = False
140
+ self._frame_opcode: Optional[int] = None
141
+ self._frame_payload: Union[bytes, bytearray] = b""
142
+ self._frame_payload_len = 0
143
+
144
+ self._tail: bytes = b""
145
+ self._has_mask = False
146
+ self._frame_mask: Optional[bytes] = None
147
+ self._payload_length = 0
148
+ self._payload_length_flag = 0
149
+ self._compressed: Optional[bool] = None
150
+ self._decompressobj: Optional[ZLibDecompressor] = None
151
+ self._compress = compress
152
+
153
+ def feed_eof(self) -> None:
154
+ self.queue.feed_eof()
155
+
156
+ # data can be bytearray on Windows because proactor event loop uses bytearray
157
+ # and asyncio types this to Union[bytes, bytearray, memoryview] so we need
158
+ # coerce data to bytes if it is not
159
+ def feed_data(
160
+ self, data: Union[bytes, bytearray, memoryview]
161
+ ) -> Tuple[bool, bytes]:
162
+ if type(data) is not bytes:
163
+ data = bytes(data)
164
+
165
+ if self._exc is not None:
166
+ return True, data
167
+
168
+ try:
169
+ self._feed_data(data)
170
+ except Exception as exc:
171
+ self._exc = exc
172
+ set_exception(self.queue, exc)
173
+ return EMPTY_FRAME_ERROR
174
+
175
+ return EMPTY_FRAME
176
+
177
+ def _feed_data(self, data: bytes) -> None:
178
+ msg: WSMessage
179
+ for frame in self.parse_frame(data):
180
+ fin = frame[0]
181
+ opcode = frame[1]
182
+ payload = frame[2]
183
+ compressed = frame[3]
184
+
185
+ is_continuation = opcode == OP_CODE_CONTINUATION
186
+ if opcode == OP_CODE_TEXT or opcode == OP_CODE_BINARY or is_continuation:
187
+ # load text/binary
188
+ if not fin:
189
+ # got partial frame payload
190
+ if not is_continuation:
191
+ self._opcode = opcode
192
+ self._partial += payload
193
+ if self._max_msg_size and len(self._partial) >= self._max_msg_size:
194
+ raise WebSocketError(
195
+ WSCloseCode.MESSAGE_TOO_BIG,
196
+ "Message size {} exceeds limit {}".format(
197
+ len(self._partial), self._max_msg_size
198
+ ),
199
+ )
200
+ continue
201
+
202
+ has_partial = bool(self._partial)
203
+ if is_continuation:
204
+ if self._opcode is None:
205
+ raise WebSocketError(
206
+ WSCloseCode.PROTOCOL_ERROR,
207
+ "Continuation frame for non started message",
208
+ )
209
+ opcode = self._opcode
210
+ self._opcode = None
211
+ # previous frame was non finished
212
+ # we should get continuation opcode
213
+ elif has_partial:
214
+ raise WebSocketError(
215
+ WSCloseCode.PROTOCOL_ERROR,
216
+ "The opcode in non-fin frame is expected "
217
+ "to be zero, got {!r}".format(opcode),
218
+ )
219
+
220
+ assembled_payload: Union[bytes, bytearray]
221
+ if has_partial:
222
+ assembled_payload = self._partial + payload
223
+ self._partial.clear()
224
+ else:
225
+ assembled_payload = payload
226
+
227
+ if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
228
+ raise WebSocketError(
229
+ WSCloseCode.MESSAGE_TOO_BIG,
230
+ "Message size {} exceeds limit {}".format(
231
+ len(assembled_payload), self._max_msg_size
232
+ ),
233
+ )
234
+
235
+ # Decompress process must to be done after all packets
236
+ # received.
237
+ if compressed:
238
+ if not self._decompressobj:
239
+ self._decompressobj = ZLibDecompressor(
240
+ suppress_deflate_header=True
241
+ )
242
+ payload_merged = self._decompressobj.decompress_sync(
243
+ assembled_payload + WS_DEFLATE_TRAILING, self._max_msg_size
244
+ )
245
+ if self._decompressobj.unconsumed_tail:
246
+ left = len(self._decompressobj.unconsumed_tail)
247
+ raise WebSocketError(
248
+ WSCloseCode.MESSAGE_TOO_BIG,
249
+ "Decompressed message size {} exceeds limit {}".format(
250
+ self._max_msg_size + left, self._max_msg_size
251
+ ),
252
+ )
253
+ elif type(assembled_payload) is bytes:
254
+ payload_merged = assembled_payload
255
+ else:
256
+ payload_merged = bytes(assembled_payload)
257
+
258
+ if opcode == OP_CODE_TEXT:
259
+ try:
260
+ text = payload_merged.decode("utf-8")
261
+ except UnicodeDecodeError as exc:
262
+ raise WebSocketError(
263
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
264
+ ) from exc
265
+
266
+ # XXX: The Text and Binary messages here can be a performance
267
+ # bottleneck, so we use tuple.__new__ to improve performance.
268
+ # This is not type safe, but many tests should fail in
269
+ # test_client_ws_functional.py if this is wrong.
270
+ self.queue.feed_data(
271
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_TEXT, text, "")),
272
+ len(payload_merged),
273
+ )
274
+ else:
275
+ self.queue.feed_data(
276
+ TUPLE_NEW(WSMessage, (WS_MSG_TYPE_BINARY, payload_merged, "")),
277
+ len(payload_merged),
278
+ )
279
+ elif opcode == OP_CODE_CLOSE:
280
+ if len(payload) >= 2:
281
+ close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
282
+ if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
283
+ raise WebSocketError(
284
+ WSCloseCode.PROTOCOL_ERROR,
285
+ f"Invalid close code: {close_code}",
286
+ )
287
+ try:
288
+ close_message = payload[2:].decode("utf-8")
289
+ except UnicodeDecodeError as exc:
290
+ raise WebSocketError(
291
+ WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
292
+ ) from exc
293
+ msg = TUPLE_NEW(
294
+ WSMessage, (WSMsgType.CLOSE, close_code, close_message)
295
+ )
296
+ elif payload:
297
+ raise WebSocketError(
298
+ WSCloseCode.PROTOCOL_ERROR,
299
+ f"Invalid close frame: {fin} {opcode} {payload!r}",
300
+ )
301
+ else:
302
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.CLOSE, 0, ""))
303
+
304
+ self.queue.feed_data(msg, 0)
305
+ elif opcode == OP_CODE_PING:
306
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PING, payload, ""))
307
+ self.queue.feed_data(msg, len(payload))
308
+
309
+ elif opcode == OP_CODE_PONG:
310
+ msg = TUPLE_NEW(WSMessage, (WSMsgType.PONG, payload, ""))
311
+ self.queue.feed_data(msg, len(payload))
312
+
313
+ else:
314
+ raise WebSocketError(
315
+ WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
316
+ )
317
+
318
+ def parse_frame(
319
+ self, buf: bytes
320
+ ) -> List[Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]]:
321
+ """Return the next frame from the socket."""
322
+ frames: List[
323
+ Tuple[bool, Optional[int], Union[bytes, bytearray], Optional[bool]]
324
+ ] = []
325
+ if self._tail:
326
+ buf, self._tail = self._tail + buf, b""
327
+
328
+ start_pos: int = 0
329
+ buf_length = len(buf)
330
+
331
+ while True:
332
+ # read header
333
+ if self._state == READ_HEADER:
334
+ if buf_length - start_pos < 2:
335
+ break
336
+ first_byte = buf[start_pos]
337
+ second_byte = buf[start_pos + 1]
338
+ start_pos += 2
339
+
340
+ fin = (first_byte >> 7) & 1
341
+ rsv1 = (first_byte >> 6) & 1
342
+ rsv2 = (first_byte >> 5) & 1
343
+ rsv3 = (first_byte >> 4) & 1
344
+ opcode = first_byte & 0xF
345
+
346
+ # frame-fin = %x0 ; more frames of this message follow
347
+ # / %x1 ; final frame of this message
348
+ # frame-rsv1 = %x0 ;
349
+ # 1 bit, MUST be 0 unless negotiated otherwise
350
+ # frame-rsv2 = %x0 ;
351
+ # 1 bit, MUST be 0 unless negotiated otherwise
352
+ # frame-rsv3 = %x0 ;
353
+ # 1 bit, MUST be 0 unless negotiated otherwise
354
+ #
355
+ # Remove rsv1 from this test for deflate development
356
+ if rsv2 or rsv3 or (rsv1 and not self._compress):
357
+ raise WebSocketError(
358
+ WSCloseCode.PROTOCOL_ERROR,
359
+ "Received frame with non-zero reserved bits",
360
+ )
361
+
362
+ if opcode > 0x7 and fin == 0:
363
+ raise WebSocketError(
364
+ WSCloseCode.PROTOCOL_ERROR,
365
+ "Received fragmented control frame",
366
+ )
367
+
368
+ has_mask = (second_byte >> 7) & 1
369
+ length = second_byte & 0x7F
370
+
371
+ # Control frames MUST have a payload
372
+ # length of 125 bytes or less
373
+ if opcode > 0x7 and length > 125:
374
+ raise WebSocketError(
375
+ WSCloseCode.PROTOCOL_ERROR,
376
+ "Control frame payload cannot be larger than 125 bytes",
377
+ )
378
+
379
+ # Set compress status if last package is FIN
380
+ # OR set compress status if this is first fragment
381
+ # Raise error if not first fragment with rsv1 = 0x1
382
+ if self._frame_fin or self._compressed is None:
383
+ self._compressed = True if rsv1 else False
384
+ elif rsv1:
385
+ raise WebSocketError(
386
+ WSCloseCode.PROTOCOL_ERROR,
387
+ "Received frame with non-zero reserved bits",
388
+ )
389
+
390
+ self._frame_fin = bool(fin)
391
+ self._frame_opcode = opcode
392
+ self._has_mask = bool(has_mask)
393
+ self._payload_length_flag = length
394
+ self._state = READ_PAYLOAD_LENGTH
395
+
396
+ # read payload length
397
+ if self._state == READ_PAYLOAD_LENGTH:
398
+ length_flag = self._payload_length_flag
399
+ if length_flag == 126:
400
+ if buf_length - start_pos < 2:
401
+ break
402
+ first_byte = buf[start_pos]
403
+ second_byte = buf[start_pos + 1]
404
+ start_pos += 2
405
+ self._payload_length = first_byte << 8 | second_byte
406
+ elif length_flag > 126:
407
+ if buf_length - start_pos < 8:
408
+ break
409
+ data = buf[start_pos : start_pos + 8]
410
+ start_pos += 8
411
+ self._payload_length = UNPACK_LEN3(data)[0]
412
+ else:
413
+ self._payload_length = length_flag
414
+
415
+ self._state = READ_PAYLOAD_MASK if self._has_mask else READ_PAYLOAD
416
+
417
+ # read payload mask
418
+ if self._state == READ_PAYLOAD_MASK:
419
+ if buf_length - start_pos < 4:
420
+ break
421
+ self._frame_mask = buf[start_pos : start_pos + 4]
422
+ start_pos += 4
423
+ self._state = READ_PAYLOAD
424
+
425
+ if self._state == READ_PAYLOAD:
426
+ chunk_len = buf_length - start_pos
427
+ if self._payload_length >= chunk_len:
428
+ end_pos = buf_length
429
+ self._payload_length -= chunk_len
430
+ else:
431
+ end_pos = start_pos + self._payload_length
432
+ self._payload_length = 0
433
+
434
+ if self._frame_payload_len:
435
+ if type(self._frame_payload) is not bytearray:
436
+ self._frame_payload = bytearray(self._frame_payload)
437
+ self._frame_payload += buf[start_pos:end_pos]
438
+ else:
439
+ # Fast path for the first frame
440
+ self._frame_payload = buf[start_pos:end_pos]
441
+
442
+ self._frame_payload_len += end_pos - start_pos
443
+ start_pos = end_pos
444
+
445
+ if self._payload_length != 0:
446
+ break
447
+
448
+ if self._has_mask:
449
+ assert self._frame_mask is not None
450
+ if type(self._frame_payload) is not bytearray:
451
+ self._frame_payload = bytearray(self._frame_payload)
452
+ websocket_mask(self._frame_mask, self._frame_payload)
453
+
454
+ frames.append(
455
+ (
456
+ self._frame_fin,
457
+ self._frame_opcode,
458
+ self._frame_payload,
459
+ self._compressed,
460
+ )
461
+ )
462
+ self._frame_payload = b""
463
+ self._frame_payload_len = 0
464
+ self._state = READ_HEADER
465
+
466
+ self._tail = buf[start_pos:] if start_pos < buf_length else b""
467
+
468
+ return frames
.venv/lib/python3.11/site-packages/aiohttp/_websocket/writer.py ADDED
@@ -0,0 +1,177 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """WebSocket protocol versions 13 and 8."""
2
+
3
+ import asyncio
4
+ import random
5
+ import zlib
6
+ from functools import partial
7
+ from typing import Any, Final, Optional, Union
8
+
9
+ from ..base_protocol import BaseProtocol
10
+ from ..client_exceptions import ClientConnectionResetError
11
+ from ..compression_utils import ZLibCompressor
12
+ from .helpers import (
13
+ MASK_LEN,
14
+ MSG_SIZE,
15
+ PACK_CLOSE_CODE,
16
+ PACK_LEN1,
17
+ PACK_LEN2,
18
+ PACK_LEN3,
19
+ PACK_RANDBITS,
20
+ websocket_mask,
21
+ )
22
+ from .models import WS_DEFLATE_TRAILING, WSMsgType
23
+
24
+ DEFAULT_LIMIT: Final[int] = 2**16
25
+
26
+ # For websockets, keeping latency low is extremely important as implementations
27
+ # generally expect to be able to send and receive messages quickly. We use a
28
+ # larger chunk size than the default to reduce the number of executor calls
29
+ # since the executor is a significant source of latency and overhead when
30
+ # the chunks are small. A size of 5KiB was chosen because it is also the
31
+ # same value python-zlib-ng choose to use as the threshold to release the GIL.
32
+
33
+ WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
34
+
35
+
36
+ class WebSocketWriter:
37
+ """WebSocket writer.
38
+
39
+ The writer is responsible for sending messages to the client. It is
40
+ created by the protocol when a connection is established. The writer
41
+ should avoid implementing any application logic and should only be
42
+ concerned with the low-level details of the WebSocket protocol.
43
+ """
44
+
45
+ def __init__(
46
+ self,
47
+ protocol: BaseProtocol,
48
+ transport: asyncio.Transport,
49
+ *,
50
+ use_mask: bool = False,
51
+ limit: int = DEFAULT_LIMIT,
52
+ random: random.Random = random.Random(),
53
+ compress: int = 0,
54
+ notakeover: bool = False,
55
+ ) -> None:
56
+ """Initialize a WebSocket writer."""
57
+ self.protocol = protocol
58
+ self.transport = transport
59
+ self.use_mask = use_mask
60
+ self.get_random_bits = partial(random.getrandbits, 32)
61
+ self.compress = compress
62
+ self.notakeover = notakeover
63
+ self._closing = False
64
+ self._limit = limit
65
+ self._output_size = 0
66
+ self._compressobj: Any = None # actually compressobj
67
+
68
+ async def send_frame(
69
+ self, message: bytes, opcode: int, compress: Optional[int] = None
70
+ ) -> None:
71
+ """Send a frame over the websocket with message as its payload."""
72
+ if self._closing and not (opcode & WSMsgType.CLOSE):
73
+ raise ClientConnectionResetError("Cannot write to closing transport")
74
+
75
+ # RSV are the reserved bits in the frame header. They are used to
76
+ # indicate that the frame is using an extension.
77
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
78
+ rsv = 0
79
+ # Only compress larger packets (disabled)
80
+ # Does small packet needs to be compressed?
81
+ # if self.compress and opcode < 8 and len(message) > 124:
82
+ if (compress or self.compress) and opcode < 8:
83
+ # RSV1 (rsv = 0x40) is set for compressed frames
84
+ # https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1
85
+ rsv = 0x40
86
+
87
+ if compress:
88
+ # Do not set self._compress if compressing is for this frame
89
+ compressobj = self._make_compress_obj(compress)
90
+ else: # self.compress
91
+ if not self._compressobj:
92
+ self._compressobj = self._make_compress_obj(self.compress)
93
+ compressobj = self._compressobj
94
+
95
+ message = (
96
+ await compressobj.compress(message)
97
+ + compressobj.flush(
98
+ zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
99
+ )
100
+ ).removesuffix(WS_DEFLATE_TRAILING)
101
+ # Its critical that we do not return control to the event
102
+ # loop until we have finished sending all the compressed
103
+ # data. Otherwise we could end up mixing compressed frames
104
+ # if there are multiple coroutines compressing data.
105
+
106
+ msg_length = len(message)
107
+
108
+ use_mask = self.use_mask
109
+ mask_bit = 0x80 if use_mask else 0
110
+
111
+ # Depending on the message length, the header is assembled differently.
112
+ # The first byte is reserved for the opcode and the RSV bits.
113
+ first_byte = 0x80 | rsv | opcode
114
+ if msg_length < 126:
115
+ header = PACK_LEN1(first_byte, msg_length | mask_bit)
116
+ header_len = 2
117
+ elif msg_length < 65536:
118
+ header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length)
119
+ header_len = 4
120
+ else:
121
+ header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length)
122
+ header_len = 10
123
+
124
+ if self.transport.is_closing():
125
+ raise ClientConnectionResetError("Cannot write to closing transport")
126
+
127
+ # https://datatracker.ietf.org/doc/html/rfc6455#section-5.3
128
+ # If we are using a mask, we need to generate it randomly
129
+ # and apply it to the message before sending it. A mask is
130
+ # a 32-bit value that is applied to the message using a
131
+ # bitwise XOR operation. It is used to prevent certain types
132
+ # of attacks on the websocket protocol. The mask is only used
133
+ # when aiohttp is acting as a client. Servers do not use a mask.
134
+ if use_mask:
135
+ mask = PACK_RANDBITS(self.get_random_bits())
136
+ message = bytearray(message)
137
+ websocket_mask(mask, message)
138
+ self.transport.write(header + mask + message)
139
+ self._output_size += MASK_LEN
140
+ elif msg_length > MSG_SIZE:
141
+ self.transport.write(header)
142
+ self.transport.write(message)
143
+ else:
144
+ self.transport.write(header + message)
145
+
146
+ self._output_size += header_len + msg_length
147
+
148
+ # It is safe to return control to the event loop when using compression
149
+ # after this point as we have already sent or buffered all the data.
150
+
151
+ # Once we have written output_size up to the limit, we call the
152
+ # drain helper which waits for the transport to be ready to accept
153
+ # more data. This is a flow control mechanism to prevent the buffer
154
+ # from growing too large. The drain helper will return right away
155
+ # if the writer is not paused.
156
+ if self._output_size > self._limit:
157
+ self._output_size = 0
158
+ if self.protocol._paused:
159
+ await self.protocol._drain_helper()
160
+
161
+ def _make_compress_obj(self, compress: int) -> ZLibCompressor:
162
+ return ZLibCompressor(
163
+ level=zlib.Z_BEST_SPEED,
164
+ wbits=-compress,
165
+ max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
166
+ )
167
+
168
+ async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
169
+ """Close the websocket, sending the specified code and message."""
170
+ if isinstance(message, str):
171
+ message = message.encode("utf-8")
172
+ try:
173
+ await self.send_frame(
174
+ PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
175
+ )
176
+ finally:
177
+ self._closing = True
.venv/lib/python3.11/site-packages/aiohttp/base_protocol.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ from typing import Optional, cast
3
+
4
+ from .client_exceptions import ClientConnectionResetError
5
+ from .helpers import set_exception
6
+ from .tcp_helpers import tcp_nodelay
7
+
8
+
9
+ class BaseProtocol(asyncio.Protocol):
10
+ __slots__ = (
11
+ "_loop",
12
+ "_paused",
13
+ "_drain_waiter",
14
+ "_connection_lost",
15
+ "_reading_paused",
16
+ "transport",
17
+ )
18
+
19
+ def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
20
+ self._loop: asyncio.AbstractEventLoop = loop
21
+ self._paused = False
22
+ self._drain_waiter: Optional[asyncio.Future[None]] = None
23
+ self._reading_paused = False
24
+
25
+ self.transport: Optional[asyncio.Transport] = None
26
+
27
+ @property
28
+ def connected(self) -> bool:
29
+ """Return True if the connection is open."""
30
+ return self.transport is not None
31
+
32
+ @property
33
+ def writing_paused(self) -> bool:
34
+ return self._paused
35
+
36
+ def pause_writing(self) -> None:
37
+ assert not self._paused
38
+ self._paused = True
39
+
40
+ def resume_writing(self) -> None:
41
+ assert self._paused
42
+ self._paused = False
43
+
44
+ waiter = self._drain_waiter
45
+ if waiter is not None:
46
+ self._drain_waiter = None
47
+ if not waiter.done():
48
+ waiter.set_result(None)
49
+
50
+ def pause_reading(self) -> None:
51
+ if not self._reading_paused and self.transport is not None:
52
+ try:
53
+ self.transport.pause_reading()
54
+ except (AttributeError, NotImplementedError, RuntimeError):
55
+ pass
56
+ self._reading_paused = True
57
+
58
+ def resume_reading(self) -> None:
59
+ if self._reading_paused and self.transport is not None:
60
+ try:
61
+ self.transport.resume_reading()
62
+ except (AttributeError, NotImplementedError, RuntimeError):
63
+ pass
64
+ self._reading_paused = False
65
+
66
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
67
+ tr = cast(asyncio.Transport, transport)
68
+ tcp_nodelay(tr, True)
69
+ self.transport = tr
70
+
71
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
72
+ # Wake up the writer if currently paused.
73
+ self.transport = None
74
+ if not self._paused:
75
+ return
76
+ waiter = self._drain_waiter
77
+ if waiter is None:
78
+ return
79
+ self._drain_waiter = None
80
+ if waiter.done():
81
+ return
82
+ if exc is None:
83
+ waiter.set_result(None)
84
+ else:
85
+ set_exception(
86
+ waiter,
87
+ ConnectionError("Connection lost"),
88
+ exc,
89
+ )
90
+
91
+ async def _drain_helper(self) -> None:
92
+ if self.transport is None:
93
+ raise ClientConnectionResetError("Connection lost")
94
+ if not self._paused:
95
+ return
96
+ waiter = self._drain_waiter
97
+ if waiter is None:
98
+ waiter = self._loop.create_future()
99
+ self._drain_waiter = waiter
100
+ await asyncio.shield(waiter)
.venv/lib/python3.11/site-packages/aiohttp/client.py ADDED
@@ -0,0 +1,1550 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP Client for asyncio."""
2
+
3
+ import asyncio
4
+ import base64
5
+ import hashlib
6
+ import json
7
+ import os
8
+ import sys
9
+ import traceback
10
+ import warnings
11
+ from contextlib import suppress
12
+ from types import TracebackType
13
+ from typing import (
14
+ TYPE_CHECKING,
15
+ Any,
16
+ Awaitable,
17
+ Callable,
18
+ Coroutine,
19
+ Final,
20
+ FrozenSet,
21
+ Generator,
22
+ Generic,
23
+ Iterable,
24
+ List,
25
+ Mapping,
26
+ Optional,
27
+ Set,
28
+ Tuple,
29
+ Type,
30
+ TypedDict,
31
+ TypeVar,
32
+ Union,
33
+ )
34
+
35
+ import attr
36
+ from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
37
+ from yarl import URL
38
+
39
+ from . import hdrs, http, payload
40
+ from ._websocket.reader import WebSocketDataQueue
41
+ from .abc import AbstractCookieJar
42
+ from .client_exceptions import (
43
+ ClientConnectionError,
44
+ ClientConnectionResetError,
45
+ ClientConnectorCertificateError,
46
+ ClientConnectorDNSError,
47
+ ClientConnectorError,
48
+ ClientConnectorSSLError,
49
+ ClientError,
50
+ ClientHttpProxyError,
51
+ ClientOSError,
52
+ ClientPayloadError,
53
+ ClientProxyConnectionError,
54
+ ClientResponseError,
55
+ ClientSSLError,
56
+ ConnectionTimeoutError,
57
+ ContentTypeError,
58
+ InvalidURL,
59
+ InvalidUrlClientError,
60
+ InvalidUrlRedirectClientError,
61
+ NonHttpUrlClientError,
62
+ NonHttpUrlRedirectClientError,
63
+ RedirectClientError,
64
+ ServerConnectionError,
65
+ ServerDisconnectedError,
66
+ ServerFingerprintMismatch,
67
+ ServerTimeoutError,
68
+ SocketTimeoutError,
69
+ TooManyRedirects,
70
+ WSMessageTypeError,
71
+ WSServerHandshakeError,
72
+ )
73
+ from .client_reqrep import (
74
+ ClientRequest as ClientRequest,
75
+ ClientResponse as ClientResponse,
76
+ Fingerprint as Fingerprint,
77
+ RequestInfo as RequestInfo,
78
+ _merge_ssl_params,
79
+ )
80
+ from .client_ws import (
81
+ DEFAULT_WS_CLIENT_TIMEOUT,
82
+ ClientWebSocketResponse as ClientWebSocketResponse,
83
+ ClientWSTimeout as ClientWSTimeout,
84
+ )
85
+ from .connector import (
86
+ HTTP_AND_EMPTY_SCHEMA_SET,
87
+ BaseConnector as BaseConnector,
88
+ NamedPipeConnector as NamedPipeConnector,
89
+ TCPConnector as TCPConnector,
90
+ UnixConnector as UnixConnector,
91
+ )
92
+ from .cookiejar import CookieJar
93
+ from .helpers import (
94
+ _SENTINEL,
95
+ DEBUG,
96
+ EMPTY_BODY_METHODS,
97
+ BasicAuth,
98
+ TimeoutHandle,
99
+ get_env_proxy_for_url,
100
+ sentinel,
101
+ strip_auth_from_url,
102
+ )
103
+ from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
104
+ from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse
105
+ from .tracing import Trace, TraceConfig
106
+ from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL
107
+
108
+ __all__ = (
109
+ # client_exceptions
110
+ "ClientConnectionError",
111
+ "ClientConnectionResetError",
112
+ "ClientConnectorCertificateError",
113
+ "ClientConnectorDNSError",
114
+ "ClientConnectorError",
115
+ "ClientConnectorSSLError",
116
+ "ClientError",
117
+ "ClientHttpProxyError",
118
+ "ClientOSError",
119
+ "ClientPayloadError",
120
+ "ClientProxyConnectionError",
121
+ "ClientResponseError",
122
+ "ClientSSLError",
123
+ "ConnectionTimeoutError",
124
+ "ContentTypeError",
125
+ "InvalidURL",
126
+ "InvalidUrlClientError",
127
+ "RedirectClientError",
128
+ "NonHttpUrlClientError",
129
+ "InvalidUrlRedirectClientError",
130
+ "NonHttpUrlRedirectClientError",
131
+ "ServerConnectionError",
132
+ "ServerDisconnectedError",
133
+ "ServerFingerprintMismatch",
134
+ "ServerTimeoutError",
135
+ "SocketTimeoutError",
136
+ "TooManyRedirects",
137
+ "WSServerHandshakeError",
138
+ # client_reqrep
139
+ "ClientRequest",
140
+ "ClientResponse",
141
+ "Fingerprint",
142
+ "RequestInfo",
143
+ # connector
144
+ "BaseConnector",
145
+ "TCPConnector",
146
+ "UnixConnector",
147
+ "NamedPipeConnector",
148
+ # client_ws
149
+ "ClientWebSocketResponse",
150
+ # client
151
+ "ClientSession",
152
+ "ClientTimeout",
153
+ "ClientWSTimeout",
154
+ "request",
155
+ "WSMessageTypeError",
156
+ )
157
+
158
+
159
+ if TYPE_CHECKING:
160
+ from ssl import SSLContext
161
+ else:
162
+ SSLContext = None
163
+
164
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
165
+ from typing import Unpack
166
+
167
+
168
+ class _RequestOptions(TypedDict, total=False):
169
+ params: Query
170
+ data: Any
171
+ json: Any
172
+ cookies: Union[LooseCookies, None]
173
+ headers: Union[LooseHeaders, None]
174
+ skip_auto_headers: Union[Iterable[str], None]
175
+ auth: Union[BasicAuth, None]
176
+ allow_redirects: bool
177
+ max_redirects: int
178
+ compress: Union[str, bool, None]
179
+ chunked: Union[bool, None]
180
+ expect100: bool
181
+ raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]]
182
+ read_until_eof: bool
183
+ proxy: Union[StrOrURL, None]
184
+ proxy_auth: Union[BasicAuth, None]
185
+ timeout: "Union[ClientTimeout, _SENTINEL, None]"
186
+ ssl: Union[SSLContext, bool, Fingerprint]
187
+ server_hostname: Union[str, None]
188
+ proxy_headers: Union[LooseHeaders, None]
189
+ trace_request_ctx: Union[Mapping[str, Any], None]
190
+ read_bufsize: Union[int, None]
191
+ auto_decompress: Union[bool, None]
192
+ max_line_size: Union[int, None]
193
+ max_field_size: Union[int, None]
194
+
195
+
196
+ @attr.s(auto_attribs=True, frozen=True, slots=True)
197
+ class ClientTimeout:
198
+ total: Optional[float] = None
199
+ connect: Optional[float] = None
200
+ sock_read: Optional[float] = None
201
+ sock_connect: Optional[float] = None
202
+ ceil_threshold: float = 5
203
+
204
+ # pool_queue_timeout: Optional[float] = None
205
+ # dns_resolution_timeout: Optional[float] = None
206
+ # socket_connect_timeout: Optional[float] = None
207
+ # connection_acquiring_timeout: Optional[float] = None
208
+ # new_connection_timeout: Optional[float] = None
209
+ # http_header_timeout: Optional[float] = None
210
+ # response_body_timeout: Optional[float] = None
211
+
212
+ # to create a timeout specific for a single request, either
213
+ # - create a completely new one to overwrite the default
214
+ # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
215
+ # to overwrite the defaults
216
+
217
+
218
+ # 5 Minute default read timeout
219
+ DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30)
220
+
221
+ # https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2
222
+ IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"})
223
+
224
+ _RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse)
225
+ _CharsetResolver = Callable[[ClientResponse, bytes], str]
226
+
227
+
228
+ class ClientSession:
229
+ """First-class interface for making HTTP requests."""
230
+
231
+ ATTRS = frozenset(
232
+ [
233
+ "_base_url",
234
+ "_base_url_origin",
235
+ "_source_traceback",
236
+ "_connector",
237
+ "_loop",
238
+ "_cookie_jar",
239
+ "_connector_owner",
240
+ "_default_auth",
241
+ "_version",
242
+ "_json_serialize",
243
+ "_requote_redirect_url",
244
+ "_timeout",
245
+ "_raise_for_status",
246
+ "_auto_decompress",
247
+ "_trust_env",
248
+ "_default_headers",
249
+ "_skip_auto_headers",
250
+ "_request_class",
251
+ "_response_class",
252
+ "_ws_response_class",
253
+ "_trace_configs",
254
+ "_read_bufsize",
255
+ "_max_line_size",
256
+ "_max_field_size",
257
+ "_resolve_charset",
258
+ "_default_proxy",
259
+ "_default_proxy_auth",
260
+ "_retry_connection",
261
+ "requote_redirect_url",
262
+ ]
263
+ )
264
+
265
+ _source_traceback: Optional[traceback.StackSummary] = None
266
+ _connector: Optional[BaseConnector] = None
267
+
268
+ def __init__(
269
+ self,
270
+ base_url: Optional[StrOrURL] = None,
271
+ *,
272
+ connector: Optional[BaseConnector] = None,
273
+ loop: Optional[asyncio.AbstractEventLoop] = None,
274
+ cookies: Optional[LooseCookies] = None,
275
+ headers: Optional[LooseHeaders] = None,
276
+ proxy: Optional[StrOrURL] = None,
277
+ proxy_auth: Optional[BasicAuth] = None,
278
+ skip_auto_headers: Optional[Iterable[str]] = None,
279
+ auth: Optional[BasicAuth] = None,
280
+ json_serialize: JSONEncoder = json.dumps,
281
+ request_class: Type[ClientRequest] = ClientRequest,
282
+ response_class: Type[ClientResponse] = ClientResponse,
283
+ ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
284
+ version: HttpVersion = http.HttpVersion11,
285
+ cookie_jar: Optional[AbstractCookieJar] = None,
286
+ connector_owner: bool = True,
287
+ raise_for_status: Union[
288
+ bool, Callable[[ClientResponse], Awaitable[None]]
289
+ ] = False,
290
+ read_timeout: Union[float, _SENTINEL] = sentinel,
291
+ conn_timeout: Optional[float] = None,
292
+ timeout: Union[object, ClientTimeout] = sentinel,
293
+ auto_decompress: bool = True,
294
+ trust_env: bool = False,
295
+ requote_redirect_url: bool = True,
296
+ trace_configs: Optional[List[TraceConfig]] = None,
297
+ read_bufsize: int = 2**16,
298
+ max_line_size: int = 8190,
299
+ max_field_size: int = 8190,
300
+ fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
301
+ ) -> None:
302
+ # We initialise _connector to None immediately, as it's referenced in __del__()
303
+ # and could cause issues if an exception occurs during initialisation.
304
+ self._connector: Optional[BaseConnector] = None
305
+
306
+ if loop is None:
307
+ if connector is not None:
308
+ loop = connector._loop
309
+
310
+ loop = loop or asyncio.get_running_loop()
311
+
312
+ if base_url is None or isinstance(base_url, URL):
313
+ self._base_url: Optional[URL] = base_url
314
+ self._base_url_origin = None if base_url is None else base_url.origin()
315
+ else:
316
+ self._base_url = URL(base_url)
317
+ self._base_url_origin = self._base_url.origin()
318
+ assert self._base_url.absolute, "Only absolute URLs are supported"
319
+ if self._base_url is not None and not self._base_url.path.endswith("/"):
320
+ raise ValueError("base_url must have a trailing '/'")
321
+
322
+ if timeout is sentinel or timeout is None:
323
+ self._timeout = DEFAULT_TIMEOUT
324
+ if read_timeout is not sentinel:
325
+ warnings.warn(
326
+ "read_timeout is deprecated, use timeout argument instead",
327
+ DeprecationWarning,
328
+ stacklevel=2,
329
+ )
330
+ self._timeout = attr.evolve(self._timeout, total=read_timeout)
331
+ if conn_timeout is not None:
332
+ self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
333
+ warnings.warn(
334
+ "conn_timeout is deprecated, use timeout argument instead",
335
+ DeprecationWarning,
336
+ stacklevel=2,
337
+ )
338
+ else:
339
+ if not isinstance(timeout, ClientTimeout):
340
+ raise ValueError(
341
+ f"timeout parameter cannot be of {type(timeout)} type, "
342
+ "please use 'timeout=ClientTimeout(...)'",
343
+ )
344
+ self._timeout = timeout
345
+ if read_timeout is not sentinel:
346
+ raise ValueError(
347
+ "read_timeout and timeout parameters "
348
+ "conflict, please setup "
349
+ "timeout.read"
350
+ )
351
+ if conn_timeout is not None:
352
+ raise ValueError(
353
+ "conn_timeout and timeout parameters "
354
+ "conflict, please setup "
355
+ "timeout.connect"
356
+ )
357
+
358
+ if connector is None:
359
+ connector = TCPConnector(loop=loop)
360
+
361
+ if connector._loop is not loop:
362
+ raise RuntimeError("Session and connector has to use same event loop")
363
+
364
+ self._loop = loop
365
+
366
+ if loop.get_debug():
367
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
368
+
369
+ if cookie_jar is None:
370
+ cookie_jar = CookieJar(loop=loop)
371
+ self._cookie_jar = cookie_jar
372
+
373
+ if cookies:
374
+ self._cookie_jar.update_cookies(cookies)
375
+
376
+ self._connector = connector
377
+ self._connector_owner = connector_owner
378
+ self._default_auth = auth
379
+ self._version = version
380
+ self._json_serialize = json_serialize
381
+ self._raise_for_status = raise_for_status
382
+ self._auto_decompress = auto_decompress
383
+ self._trust_env = trust_env
384
+ self._requote_redirect_url = requote_redirect_url
385
+ self._read_bufsize = read_bufsize
386
+ self._max_line_size = max_line_size
387
+ self._max_field_size = max_field_size
388
+
389
+ # Convert to list of tuples
390
+ if headers:
391
+ real_headers: CIMultiDict[str] = CIMultiDict(headers)
392
+ else:
393
+ real_headers = CIMultiDict()
394
+ self._default_headers: CIMultiDict[str] = real_headers
395
+ if skip_auto_headers is not None:
396
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
397
+ else:
398
+ self._skip_auto_headers = frozenset()
399
+
400
+ self._request_class = request_class
401
+ self._response_class = response_class
402
+ self._ws_response_class = ws_response_class
403
+
404
+ self._trace_configs = trace_configs or []
405
+ for trace_config in self._trace_configs:
406
+ trace_config.freeze()
407
+
408
+ self._resolve_charset = fallback_charset_resolver
409
+
410
+ self._default_proxy = proxy
411
+ self._default_proxy_auth = proxy_auth
412
+ self._retry_connection: bool = True
413
+
414
+ def __init_subclass__(cls: Type["ClientSession"]) -> None:
415
+ warnings.warn(
416
+ "Inheritance class {} from ClientSession "
417
+ "is discouraged".format(cls.__name__),
418
+ DeprecationWarning,
419
+ stacklevel=2,
420
+ )
421
+
422
+ if DEBUG:
423
+
424
+ def __setattr__(self, name: str, val: Any) -> None:
425
+ if name not in self.ATTRS:
426
+ warnings.warn(
427
+ "Setting custom ClientSession.{} attribute "
428
+ "is discouraged".format(name),
429
+ DeprecationWarning,
430
+ stacklevel=2,
431
+ )
432
+ super().__setattr__(name, val)
433
+
434
+ def __del__(self, _warnings: Any = warnings) -> None:
435
+ if not self.closed:
436
+ kwargs = {"source": self}
437
+ _warnings.warn(
438
+ f"Unclosed client session {self!r}", ResourceWarning, **kwargs
439
+ )
440
+ context = {"client_session": self, "message": "Unclosed client session"}
441
+ if self._source_traceback is not None:
442
+ context["source_traceback"] = self._source_traceback
443
+ self._loop.call_exception_handler(context)
444
+
445
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
446
+
447
+ def request(
448
+ self,
449
+ method: str,
450
+ url: StrOrURL,
451
+ **kwargs: Unpack[_RequestOptions],
452
+ ) -> "_RequestContextManager": ...
453
+
454
+ else:
455
+
456
+ def request(
457
+ self, method: str, url: StrOrURL, **kwargs: Any
458
+ ) -> "_RequestContextManager":
459
+ """Perform HTTP request."""
460
+ return _RequestContextManager(self._request(method, url, **kwargs))
461
+
462
+ def _build_url(self, str_or_url: StrOrURL) -> URL:
463
+ url = URL(str_or_url)
464
+ if self._base_url is None:
465
+ return url
466
+ else:
467
+ assert not url.absolute
468
+ return self._base_url.join(url)
469
+
470
+ async def _request(
471
+ self,
472
+ method: str,
473
+ str_or_url: StrOrURL,
474
+ *,
475
+ params: Query = None,
476
+ data: Any = None,
477
+ json: Any = None,
478
+ cookies: Optional[LooseCookies] = None,
479
+ headers: Optional[LooseHeaders] = None,
480
+ skip_auto_headers: Optional[Iterable[str]] = None,
481
+ auth: Optional[BasicAuth] = None,
482
+ allow_redirects: bool = True,
483
+ max_redirects: int = 10,
484
+ compress: Union[str, bool, None] = None,
485
+ chunked: Optional[bool] = None,
486
+ expect100: bool = False,
487
+ raise_for_status: Union[
488
+ None, bool, Callable[[ClientResponse], Awaitable[None]]
489
+ ] = None,
490
+ read_until_eof: bool = True,
491
+ proxy: Optional[StrOrURL] = None,
492
+ proxy_auth: Optional[BasicAuth] = None,
493
+ timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
494
+ verify_ssl: Optional[bool] = None,
495
+ fingerprint: Optional[bytes] = None,
496
+ ssl_context: Optional[SSLContext] = None,
497
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
498
+ server_hostname: Optional[str] = None,
499
+ proxy_headers: Optional[LooseHeaders] = None,
500
+ trace_request_ctx: Optional[Mapping[str, Any]] = None,
501
+ read_bufsize: Optional[int] = None,
502
+ auto_decompress: Optional[bool] = None,
503
+ max_line_size: Optional[int] = None,
504
+ max_field_size: Optional[int] = None,
505
+ ) -> ClientResponse:
506
+
507
+ # NOTE: timeout clamps existing connect and read timeouts. We cannot
508
+ # set the default to None because we need to detect if the user wants
509
+ # to use the existing timeouts by setting timeout to None.
510
+
511
+ if self.closed:
512
+ raise RuntimeError("Session is closed")
513
+
514
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
515
+
516
+ if data is not None and json is not None:
517
+ raise ValueError(
518
+ "data and json parameters can not be used at the same time"
519
+ )
520
+ elif json is not None:
521
+ data = payload.JsonPayload(json, dumps=self._json_serialize)
522
+
523
+ if not isinstance(chunked, bool) and chunked is not None:
524
+ warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
525
+
526
+ redirects = 0
527
+ history: List[ClientResponse] = []
528
+ version = self._version
529
+ params = params or {}
530
+
531
+ # Merge with default headers and transform to CIMultiDict
532
+ headers = self._prepare_headers(headers)
533
+
534
+ try:
535
+ url = self._build_url(str_or_url)
536
+ except ValueError as e:
537
+ raise InvalidUrlClientError(str_or_url) from e
538
+
539
+ assert self._connector is not None
540
+ if url.scheme not in self._connector.allowed_protocol_schema_set:
541
+ raise NonHttpUrlClientError(url)
542
+
543
+ skip_headers: Optional[Iterable[istr]]
544
+ if skip_auto_headers is not None:
545
+ skip_headers = {
546
+ istr(i) for i in skip_auto_headers
547
+ } | self._skip_auto_headers
548
+ elif self._skip_auto_headers:
549
+ skip_headers = self._skip_auto_headers
550
+ else:
551
+ skip_headers = None
552
+
553
+ if proxy is None:
554
+ proxy = self._default_proxy
555
+ if proxy_auth is None:
556
+ proxy_auth = self._default_proxy_auth
557
+
558
+ if proxy is None:
559
+ proxy_headers = None
560
+ else:
561
+ proxy_headers = self._prepare_headers(proxy_headers)
562
+ try:
563
+ proxy = URL(proxy)
564
+ except ValueError as e:
565
+ raise InvalidURL(proxy) from e
566
+
567
+ if timeout is sentinel:
568
+ real_timeout: ClientTimeout = self._timeout
569
+ else:
570
+ if not isinstance(timeout, ClientTimeout):
571
+ real_timeout = ClientTimeout(total=timeout)
572
+ else:
573
+ real_timeout = timeout
574
+ # timeout is cumulative for all request operations
575
+ # (request, redirects, responses, data consuming)
576
+ tm = TimeoutHandle(
577
+ self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
578
+ )
579
+ handle = tm.start()
580
+
581
+ if read_bufsize is None:
582
+ read_bufsize = self._read_bufsize
583
+
584
+ if auto_decompress is None:
585
+ auto_decompress = self._auto_decompress
586
+
587
+ if max_line_size is None:
588
+ max_line_size = self._max_line_size
589
+
590
+ if max_field_size is None:
591
+ max_field_size = self._max_field_size
592
+
593
+ traces = [
594
+ Trace(
595
+ self,
596
+ trace_config,
597
+ trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
598
+ )
599
+ for trace_config in self._trace_configs
600
+ ]
601
+
602
+ for trace in traces:
603
+ await trace.send_request_start(method, url.update_query(params), headers)
604
+
605
+ timer = tm.timer()
606
+ try:
607
+ with timer:
608
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests
609
+ retry_persistent_connection = (
610
+ self._retry_connection and method in IDEMPOTENT_METHODS
611
+ )
612
+ while True:
613
+ url, auth_from_url = strip_auth_from_url(url)
614
+ if not url.raw_host:
615
+ # NOTE: Bail early, otherwise, causes `InvalidURL` through
616
+ # NOTE: `self._request_class()` below.
617
+ err_exc_cls = (
618
+ InvalidUrlRedirectClientError
619
+ if redirects
620
+ else InvalidUrlClientError
621
+ )
622
+ raise err_exc_cls(url)
623
+ # If `auth` was passed for an already authenticated URL,
624
+ # disallow only if this is the initial URL; this is to avoid issues
625
+ # with sketchy redirects that are not the caller's responsibility
626
+ if not history and (auth and auth_from_url):
627
+ raise ValueError(
628
+ "Cannot combine AUTH argument with "
629
+ "credentials encoded in URL"
630
+ )
631
+
632
+ # Override the auth with the one from the URL only if we
633
+ # have no auth, or if we got an auth from a redirect URL
634
+ if auth is None or (history and auth_from_url is not None):
635
+ auth = auth_from_url
636
+
637
+ if (
638
+ auth is None
639
+ and self._default_auth
640
+ and (
641
+ not self._base_url or self._base_url_origin == url.origin()
642
+ )
643
+ ):
644
+ auth = self._default_auth
645
+ # It would be confusing if we support explicit
646
+ # Authorization header with auth argument
647
+ if (
648
+ headers is not None
649
+ and auth is not None
650
+ and hdrs.AUTHORIZATION in headers
651
+ ):
652
+ raise ValueError(
653
+ "Cannot combine AUTHORIZATION header "
654
+ "with AUTH argument or credentials "
655
+ "encoded in URL"
656
+ )
657
+
658
+ all_cookies = self._cookie_jar.filter_cookies(url)
659
+
660
+ if cookies is not None:
661
+ tmp_cookie_jar = CookieJar(
662
+ quote_cookie=self._cookie_jar.quote_cookie
663
+ )
664
+ tmp_cookie_jar.update_cookies(cookies)
665
+ req_cookies = tmp_cookie_jar.filter_cookies(url)
666
+ if req_cookies:
667
+ all_cookies.load(req_cookies)
668
+
669
+ if proxy is not None:
670
+ proxy = URL(proxy)
671
+ elif self._trust_env:
672
+ with suppress(LookupError):
673
+ proxy, proxy_auth = get_env_proxy_for_url(url)
674
+
675
+ req = self._request_class(
676
+ method,
677
+ url,
678
+ params=params,
679
+ headers=headers,
680
+ skip_auto_headers=skip_headers,
681
+ data=data,
682
+ cookies=all_cookies,
683
+ auth=auth,
684
+ version=version,
685
+ compress=compress,
686
+ chunked=chunked,
687
+ expect100=expect100,
688
+ loop=self._loop,
689
+ response_class=self._response_class,
690
+ proxy=proxy,
691
+ proxy_auth=proxy_auth,
692
+ timer=timer,
693
+ session=self,
694
+ ssl=ssl if ssl is not None else True,
695
+ server_hostname=server_hostname,
696
+ proxy_headers=proxy_headers,
697
+ traces=traces,
698
+ trust_env=self.trust_env,
699
+ )
700
+
701
+ # connection timeout
702
+ try:
703
+ conn = await self._connector.connect(
704
+ req, traces=traces, timeout=real_timeout
705
+ )
706
+ except asyncio.TimeoutError as exc:
707
+ raise ConnectionTimeoutError(
708
+ f"Connection timeout to host {url}"
709
+ ) from exc
710
+
711
+ assert conn.transport is not None
712
+
713
+ assert conn.protocol is not None
714
+ conn.protocol.set_response_params(
715
+ timer=timer,
716
+ skip_payload=method in EMPTY_BODY_METHODS,
717
+ read_until_eof=read_until_eof,
718
+ auto_decompress=auto_decompress,
719
+ read_timeout=real_timeout.sock_read,
720
+ read_bufsize=read_bufsize,
721
+ timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
722
+ max_line_size=max_line_size,
723
+ max_field_size=max_field_size,
724
+ )
725
+
726
+ try:
727
+ try:
728
+ resp = await req.send(conn)
729
+ try:
730
+ await resp.start(conn)
731
+ except BaseException:
732
+ resp.close()
733
+ raise
734
+ except BaseException:
735
+ conn.close()
736
+ raise
737
+ except (ClientOSError, ServerDisconnectedError):
738
+ if retry_persistent_connection:
739
+ retry_persistent_connection = False
740
+ continue
741
+ raise
742
+ except ClientError:
743
+ raise
744
+ except OSError as exc:
745
+ if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
746
+ raise
747
+ raise ClientOSError(*exc.args) from exc
748
+
749
+ if cookies := resp._cookies:
750
+ self._cookie_jar.update_cookies(cookies, resp.url)
751
+
752
+ # redirects
753
+ if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
754
+
755
+ for trace in traces:
756
+ await trace.send_request_redirect(
757
+ method, url.update_query(params), headers, resp
758
+ )
759
+
760
+ redirects += 1
761
+ history.append(resp)
762
+ if max_redirects and redirects >= max_redirects:
763
+ resp.close()
764
+ raise TooManyRedirects(
765
+ history[0].request_info, tuple(history)
766
+ )
767
+
768
+ # For 301 and 302, mimic IE, now changed in RFC
769
+ # https://github.com/kennethreitz/requests/pull/269
770
+ if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
771
+ resp.status in (301, 302) and resp.method == hdrs.METH_POST
772
+ ):
773
+ method = hdrs.METH_GET
774
+ data = None
775
+ if headers.get(hdrs.CONTENT_LENGTH):
776
+ headers.pop(hdrs.CONTENT_LENGTH)
777
+
778
+ r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
779
+ hdrs.URI
780
+ )
781
+ if r_url is None:
782
+ # see github.com/aio-libs/aiohttp/issues/2022
783
+ break
784
+ else:
785
+ # reading from correct redirection
786
+ # response is forbidden
787
+ resp.release()
788
+
789
+ try:
790
+ parsed_redirect_url = URL(
791
+ r_url, encoded=not self._requote_redirect_url
792
+ )
793
+ except ValueError as e:
794
+ raise InvalidUrlRedirectClientError(
795
+ r_url,
796
+ "Server attempted redirecting to a location that does not look like a URL",
797
+ ) from e
798
+
799
+ scheme = parsed_redirect_url.scheme
800
+ if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
801
+ resp.close()
802
+ raise NonHttpUrlRedirectClientError(r_url)
803
+ elif not scheme:
804
+ parsed_redirect_url = url.join(parsed_redirect_url)
805
+
806
+ try:
807
+ redirect_origin = parsed_redirect_url.origin()
808
+ except ValueError as origin_val_err:
809
+ raise InvalidUrlRedirectClientError(
810
+ parsed_redirect_url,
811
+ "Invalid redirect URL origin",
812
+ ) from origin_val_err
813
+
814
+ if url.origin() != redirect_origin:
815
+ auth = None
816
+ headers.pop(hdrs.AUTHORIZATION, None)
817
+
818
+ url = parsed_redirect_url
819
+ params = {}
820
+ resp.release()
821
+ continue
822
+
823
+ break
824
+
825
+ # check response status
826
+ if raise_for_status is None:
827
+ raise_for_status = self._raise_for_status
828
+
829
+ if raise_for_status is None:
830
+ pass
831
+ elif callable(raise_for_status):
832
+ await raise_for_status(resp)
833
+ elif raise_for_status:
834
+ resp.raise_for_status()
835
+
836
+ # register connection
837
+ if handle is not None:
838
+ if resp.connection is not None:
839
+ resp.connection.add_callback(handle.cancel)
840
+ else:
841
+ handle.cancel()
842
+
843
+ resp._history = tuple(history)
844
+
845
+ for trace in traces:
846
+ await trace.send_request_end(
847
+ method, url.update_query(params), headers, resp
848
+ )
849
+ return resp
850
+
851
+ except BaseException as e:
852
+ # cleanup timer
853
+ tm.close()
854
+ if handle:
855
+ handle.cancel()
856
+ handle = None
857
+
858
+ for trace in traces:
859
+ await trace.send_request_exception(
860
+ method, url.update_query(params), headers, e
861
+ )
862
+ raise
863
+
864
+ def ws_connect(
865
+ self,
866
+ url: StrOrURL,
867
+ *,
868
+ method: str = hdrs.METH_GET,
869
+ protocols: Iterable[str] = (),
870
+ timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
871
+ receive_timeout: Optional[float] = None,
872
+ autoclose: bool = True,
873
+ autoping: bool = True,
874
+ heartbeat: Optional[float] = None,
875
+ auth: Optional[BasicAuth] = None,
876
+ origin: Optional[str] = None,
877
+ params: Query = None,
878
+ headers: Optional[LooseHeaders] = None,
879
+ proxy: Optional[StrOrURL] = None,
880
+ proxy_auth: Optional[BasicAuth] = None,
881
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
882
+ verify_ssl: Optional[bool] = None,
883
+ fingerprint: Optional[bytes] = None,
884
+ ssl_context: Optional[SSLContext] = None,
885
+ server_hostname: Optional[str] = None,
886
+ proxy_headers: Optional[LooseHeaders] = None,
887
+ compress: int = 0,
888
+ max_msg_size: int = 4 * 1024 * 1024,
889
+ ) -> "_WSRequestContextManager":
890
+ """Initiate websocket connection."""
891
+ return _WSRequestContextManager(
892
+ self._ws_connect(
893
+ url,
894
+ method=method,
895
+ protocols=protocols,
896
+ timeout=timeout,
897
+ receive_timeout=receive_timeout,
898
+ autoclose=autoclose,
899
+ autoping=autoping,
900
+ heartbeat=heartbeat,
901
+ auth=auth,
902
+ origin=origin,
903
+ params=params,
904
+ headers=headers,
905
+ proxy=proxy,
906
+ proxy_auth=proxy_auth,
907
+ ssl=ssl,
908
+ verify_ssl=verify_ssl,
909
+ fingerprint=fingerprint,
910
+ ssl_context=ssl_context,
911
+ server_hostname=server_hostname,
912
+ proxy_headers=proxy_headers,
913
+ compress=compress,
914
+ max_msg_size=max_msg_size,
915
+ )
916
+ )
917
+
918
+ async def _ws_connect(
919
+ self,
920
+ url: StrOrURL,
921
+ *,
922
+ method: str = hdrs.METH_GET,
923
+ protocols: Iterable[str] = (),
924
+ timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel,
925
+ receive_timeout: Optional[float] = None,
926
+ autoclose: bool = True,
927
+ autoping: bool = True,
928
+ heartbeat: Optional[float] = None,
929
+ auth: Optional[BasicAuth] = None,
930
+ origin: Optional[str] = None,
931
+ params: Query = None,
932
+ headers: Optional[LooseHeaders] = None,
933
+ proxy: Optional[StrOrURL] = None,
934
+ proxy_auth: Optional[BasicAuth] = None,
935
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
936
+ verify_ssl: Optional[bool] = None,
937
+ fingerprint: Optional[bytes] = None,
938
+ ssl_context: Optional[SSLContext] = None,
939
+ server_hostname: Optional[str] = None,
940
+ proxy_headers: Optional[LooseHeaders] = None,
941
+ compress: int = 0,
942
+ max_msg_size: int = 4 * 1024 * 1024,
943
+ ) -> ClientWebSocketResponse:
944
+ if timeout is not sentinel:
945
+ if isinstance(timeout, ClientWSTimeout):
946
+ ws_timeout = timeout
947
+ else:
948
+ warnings.warn(
949
+ "parameter 'timeout' of type 'float' "
950
+ "is deprecated, please use "
951
+ "'timeout=ClientWSTimeout(ws_close=...)'",
952
+ DeprecationWarning,
953
+ stacklevel=2,
954
+ )
955
+ ws_timeout = ClientWSTimeout(ws_close=timeout)
956
+ else:
957
+ ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT
958
+ if receive_timeout is not None:
959
+ warnings.warn(
960
+ "float parameter 'receive_timeout' "
961
+ "is deprecated, please use parameter "
962
+ "'timeout=ClientWSTimeout(ws_receive=...)'",
963
+ DeprecationWarning,
964
+ stacklevel=2,
965
+ )
966
+ ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout)
967
+
968
+ if headers is None:
969
+ real_headers: CIMultiDict[str] = CIMultiDict()
970
+ else:
971
+ real_headers = CIMultiDict(headers)
972
+
973
+ default_headers = {
974
+ hdrs.UPGRADE: "websocket",
975
+ hdrs.CONNECTION: "Upgrade",
976
+ hdrs.SEC_WEBSOCKET_VERSION: "13",
977
+ }
978
+
979
+ for key, value in default_headers.items():
980
+ real_headers.setdefault(key, value)
981
+
982
+ sec_key = base64.b64encode(os.urandom(16))
983
+ real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
984
+
985
+ if protocols:
986
+ real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
987
+ if origin is not None:
988
+ real_headers[hdrs.ORIGIN] = origin
989
+ if compress:
990
+ extstr = ws_ext_gen(compress=compress)
991
+ real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
992
+
993
+ # For the sake of backward compatibility, if user passes in None, convert it to True
994
+ if ssl is None:
995
+ warnings.warn(
996
+ "ssl=None is deprecated, please use ssl=True",
997
+ DeprecationWarning,
998
+ stacklevel=2,
999
+ )
1000
+ ssl = True
1001
+ ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
1002
+
1003
+ # send request
1004
+ resp = await self.request(
1005
+ method,
1006
+ url,
1007
+ params=params,
1008
+ headers=real_headers,
1009
+ read_until_eof=False,
1010
+ auth=auth,
1011
+ proxy=proxy,
1012
+ proxy_auth=proxy_auth,
1013
+ ssl=ssl,
1014
+ server_hostname=server_hostname,
1015
+ proxy_headers=proxy_headers,
1016
+ )
1017
+
1018
+ try:
1019
+ # check handshake
1020
+ if resp.status != 101:
1021
+ raise WSServerHandshakeError(
1022
+ resp.request_info,
1023
+ resp.history,
1024
+ message="Invalid response status",
1025
+ status=resp.status,
1026
+ headers=resp.headers,
1027
+ )
1028
+
1029
+ if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
1030
+ raise WSServerHandshakeError(
1031
+ resp.request_info,
1032
+ resp.history,
1033
+ message="Invalid upgrade header",
1034
+ status=resp.status,
1035
+ headers=resp.headers,
1036
+ )
1037
+
1038
+ if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
1039
+ raise WSServerHandshakeError(
1040
+ resp.request_info,
1041
+ resp.history,
1042
+ message="Invalid connection header",
1043
+ status=resp.status,
1044
+ headers=resp.headers,
1045
+ )
1046
+
1047
+ # key calculation
1048
+ r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
1049
+ match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
1050
+ if r_key != match:
1051
+ raise WSServerHandshakeError(
1052
+ resp.request_info,
1053
+ resp.history,
1054
+ message="Invalid challenge response",
1055
+ status=resp.status,
1056
+ headers=resp.headers,
1057
+ )
1058
+
1059
+ # websocket protocol
1060
+ protocol = None
1061
+ if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
1062
+ resp_protocols = [
1063
+ proto.strip()
1064
+ for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
1065
+ ]
1066
+
1067
+ for proto in resp_protocols:
1068
+ if proto in protocols:
1069
+ protocol = proto
1070
+ break
1071
+
1072
+ # websocket compress
1073
+ notakeover = False
1074
+ if compress:
1075
+ compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
1076
+ if compress_hdrs:
1077
+ try:
1078
+ compress, notakeover = ws_ext_parse(compress_hdrs)
1079
+ except WSHandshakeError as exc:
1080
+ raise WSServerHandshakeError(
1081
+ resp.request_info,
1082
+ resp.history,
1083
+ message=exc.args[0],
1084
+ status=resp.status,
1085
+ headers=resp.headers,
1086
+ ) from exc
1087
+ else:
1088
+ compress = 0
1089
+ notakeover = False
1090
+
1091
+ conn = resp.connection
1092
+ assert conn is not None
1093
+ conn_proto = conn.protocol
1094
+ assert conn_proto is not None
1095
+
1096
+ # For WS connection the read_timeout must be either receive_timeout or greater
1097
+ # None == no timeout, i.e. infinite timeout, so None is the max timeout possible
1098
+ if ws_timeout.ws_receive is None:
1099
+ # Reset regardless
1100
+ conn_proto.read_timeout = None
1101
+ elif conn_proto.read_timeout is not None:
1102
+ conn_proto.read_timeout = max(
1103
+ ws_timeout.ws_receive, conn_proto.read_timeout
1104
+ )
1105
+
1106
+ transport = conn.transport
1107
+ assert transport is not None
1108
+ reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop)
1109
+ conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
1110
+ writer = WebSocketWriter(
1111
+ conn_proto,
1112
+ transport,
1113
+ use_mask=True,
1114
+ compress=compress,
1115
+ notakeover=notakeover,
1116
+ )
1117
+ except BaseException:
1118
+ resp.close()
1119
+ raise
1120
+ else:
1121
+ return self._ws_response_class(
1122
+ reader,
1123
+ writer,
1124
+ protocol,
1125
+ resp,
1126
+ ws_timeout,
1127
+ autoclose,
1128
+ autoping,
1129
+ self._loop,
1130
+ heartbeat=heartbeat,
1131
+ compress=compress,
1132
+ client_notakeover=notakeover,
1133
+ )
1134
+
1135
+ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
1136
+ """Add default headers and transform it to CIMultiDict"""
1137
+ # Convert headers to MultiDict
1138
+ result = CIMultiDict(self._default_headers)
1139
+ if headers:
1140
+ if not isinstance(headers, (MultiDictProxy, MultiDict)):
1141
+ headers = CIMultiDict(headers)
1142
+ added_names: Set[str] = set()
1143
+ for key, value in headers.items():
1144
+ if key in added_names:
1145
+ result.add(key, value)
1146
+ else:
1147
+ result[key] = value
1148
+ added_names.add(key)
1149
+ return result
1150
+
1151
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
1152
+
1153
+ def get(
1154
+ self,
1155
+ url: StrOrURL,
1156
+ **kwargs: Unpack[_RequestOptions],
1157
+ ) -> "_RequestContextManager": ...
1158
+
1159
+ def options(
1160
+ self,
1161
+ url: StrOrURL,
1162
+ **kwargs: Unpack[_RequestOptions],
1163
+ ) -> "_RequestContextManager": ...
1164
+
1165
+ def head(
1166
+ self,
1167
+ url: StrOrURL,
1168
+ **kwargs: Unpack[_RequestOptions],
1169
+ ) -> "_RequestContextManager": ...
1170
+
1171
+ def post(
1172
+ self,
1173
+ url: StrOrURL,
1174
+ **kwargs: Unpack[_RequestOptions],
1175
+ ) -> "_RequestContextManager": ...
1176
+
1177
+ def put(
1178
+ self,
1179
+ url: StrOrURL,
1180
+ **kwargs: Unpack[_RequestOptions],
1181
+ ) -> "_RequestContextManager": ...
1182
+
1183
+ def patch(
1184
+ self,
1185
+ url: StrOrURL,
1186
+ **kwargs: Unpack[_RequestOptions],
1187
+ ) -> "_RequestContextManager": ...
1188
+
1189
+ def delete(
1190
+ self,
1191
+ url: StrOrURL,
1192
+ **kwargs: Unpack[_RequestOptions],
1193
+ ) -> "_RequestContextManager": ...
1194
+
1195
+ else:
1196
+
1197
+ def get(
1198
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
1199
+ ) -> "_RequestContextManager":
1200
+ """Perform HTTP GET request."""
1201
+ return _RequestContextManager(
1202
+ self._request(
1203
+ hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs
1204
+ )
1205
+ )
1206
+
1207
+ def options(
1208
+ self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
1209
+ ) -> "_RequestContextManager":
1210
+ """Perform HTTP OPTIONS request."""
1211
+ return _RequestContextManager(
1212
+ self._request(
1213
+ hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
1214
+ )
1215
+ )
1216
+
1217
+ def head(
1218
+ self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
1219
+ ) -> "_RequestContextManager":
1220
+ """Perform HTTP HEAD request."""
1221
+ return _RequestContextManager(
1222
+ self._request(
1223
+ hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
1224
+ )
1225
+ )
1226
+
1227
+ def post(
1228
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1229
+ ) -> "_RequestContextManager":
1230
+ """Perform HTTP POST request."""
1231
+ return _RequestContextManager(
1232
+ self._request(hdrs.METH_POST, url, data=data, **kwargs)
1233
+ )
1234
+
1235
+ def put(
1236
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1237
+ ) -> "_RequestContextManager":
1238
+ """Perform HTTP PUT request."""
1239
+ return _RequestContextManager(
1240
+ self._request(hdrs.METH_PUT, url, data=data, **kwargs)
1241
+ )
1242
+
1243
+ def patch(
1244
+ self, url: StrOrURL, *, data: Any = None, **kwargs: Any
1245
+ ) -> "_RequestContextManager":
1246
+ """Perform HTTP PATCH request."""
1247
+ return _RequestContextManager(
1248
+ self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
1249
+ )
1250
+
1251
+ def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
1252
+ """Perform HTTP DELETE request."""
1253
+ return _RequestContextManager(
1254
+ self._request(hdrs.METH_DELETE, url, **kwargs)
1255
+ )
1256
+
1257
+ async def close(self) -> None:
1258
+ """Close underlying connector.
1259
+
1260
+ Release all acquired resources.
1261
+ """
1262
+ if not self.closed:
1263
+ if self._connector is not None and self._connector_owner:
1264
+ await self._connector.close()
1265
+ self._connector = None
1266
+
1267
+ @property
1268
+ def closed(self) -> bool:
1269
+ """Is client session closed.
1270
+
1271
+ A readonly property.
1272
+ """
1273
+ return self._connector is None or self._connector.closed
1274
+
1275
+ @property
1276
+ def connector(self) -> Optional[BaseConnector]:
1277
+ """Connector instance used for the session."""
1278
+ return self._connector
1279
+
1280
+ @property
1281
+ def cookie_jar(self) -> AbstractCookieJar:
1282
+ """The session cookies."""
1283
+ return self._cookie_jar
1284
+
1285
+ @property
1286
+ def version(self) -> Tuple[int, int]:
1287
+ """The session HTTP protocol version."""
1288
+ return self._version
1289
+
1290
+ @property
1291
+ def requote_redirect_url(self) -> bool:
1292
+ """Do URL requoting on redirection handling."""
1293
+ return self._requote_redirect_url
1294
+
1295
+ @requote_redirect_url.setter
1296
+ def requote_redirect_url(self, val: bool) -> None:
1297
+ """Do URL requoting on redirection handling."""
1298
+ warnings.warn(
1299
+ "session.requote_redirect_url modification is deprecated #2778",
1300
+ DeprecationWarning,
1301
+ stacklevel=2,
1302
+ )
1303
+ self._requote_redirect_url = val
1304
+
1305
+ @property
1306
+ def loop(self) -> asyncio.AbstractEventLoop:
1307
+ """Session's loop."""
1308
+ warnings.warn(
1309
+ "client.loop property is deprecated", DeprecationWarning, stacklevel=2
1310
+ )
1311
+ return self._loop
1312
+
1313
+ @property
1314
+ def timeout(self) -> ClientTimeout:
1315
+ """Timeout for the session."""
1316
+ return self._timeout
1317
+
1318
+ @property
1319
+ def headers(self) -> "CIMultiDict[str]":
1320
+ """The default headers of the client session."""
1321
+ return self._default_headers
1322
+
1323
+ @property
1324
+ def skip_auto_headers(self) -> FrozenSet[istr]:
1325
+ """Headers for which autogeneration should be skipped"""
1326
+ return self._skip_auto_headers
1327
+
1328
+ @property
1329
+ def auth(self) -> Optional[BasicAuth]:
1330
+ """An object that represents HTTP Basic Authorization"""
1331
+ return self._default_auth
1332
+
1333
+ @property
1334
+ def json_serialize(self) -> JSONEncoder:
1335
+ """Json serializer callable"""
1336
+ return self._json_serialize
1337
+
1338
+ @property
1339
+ def connector_owner(self) -> bool:
1340
+ """Should connector be closed on session closing"""
1341
+ return self._connector_owner
1342
+
1343
+ @property
1344
+ def raise_for_status(
1345
+ self,
1346
+ ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
1347
+ """Should `ClientResponse.raise_for_status()` be called for each response."""
1348
+ return self._raise_for_status
1349
+
1350
+ @property
1351
+ def auto_decompress(self) -> bool:
1352
+ """Should the body response be automatically decompressed."""
1353
+ return self._auto_decompress
1354
+
1355
+ @property
1356
+ def trust_env(self) -> bool:
1357
+ """
1358
+ Should proxies information from environment or netrc be trusted.
1359
+
1360
+ Information is from HTTP_PROXY / HTTPS_PROXY environment variables
1361
+ or ~/.netrc file if present.
1362
+ """
1363
+ return self._trust_env
1364
+
1365
+ @property
1366
+ def trace_configs(self) -> List[TraceConfig]:
1367
+ """A list of TraceConfig instances used for client tracing"""
1368
+ return self._trace_configs
1369
+
1370
+ def detach(self) -> None:
1371
+ """Detach connector from session without closing the former.
1372
+
1373
+ Session is switched to closed state anyway.
1374
+ """
1375
+ self._connector = None
1376
+
1377
+ def __enter__(self) -> None:
1378
+ raise TypeError("Use async with instead")
1379
+
1380
+ def __exit__(
1381
+ self,
1382
+ exc_type: Optional[Type[BaseException]],
1383
+ exc_val: Optional[BaseException],
1384
+ exc_tb: Optional[TracebackType],
1385
+ ) -> None:
1386
+ # __exit__ should exist in pair with __enter__ but never executed
1387
+ pass # pragma: no cover
1388
+
1389
+ async def __aenter__(self) -> "ClientSession":
1390
+ return self
1391
+
1392
+ async def __aexit__(
1393
+ self,
1394
+ exc_type: Optional[Type[BaseException]],
1395
+ exc_val: Optional[BaseException],
1396
+ exc_tb: Optional[TracebackType],
1397
+ ) -> None:
1398
+ await self.close()
1399
+
1400
+
1401
+ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
1402
+
1403
+ __slots__ = ("_coro", "_resp")
1404
+
1405
+ def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
1406
+ self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro
1407
+
1408
+ def send(self, arg: None) -> "asyncio.Future[Any]":
1409
+ return self._coro.send(arg)
1410
+
1411
+ def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
1412
+ return self._coro.throw(*args, **kwargs)
1413
+
1414
+ def close(self) -> None:
1415
+ return self._coro.close()
1416
+
1417
+ def __await__(self) -> Generator[Any, None, _RetType]:
1418
+ ret = self._coro.__await__()
1419
+ return ret
1420
+
1421
+ def __iter__(self) -> Generator[Any, None, _RetType]:
1422
+ return self.__await__()
1423
+
1424
+ async def __aenter__(self) -> _RetType:
1425
+ self._resp: _RetType = await self._coro
1426
+ return await self._resp.__aenter__()
1427
+
1428
+ async def __aexit__(
1429
+ self,
1430
+ exc_type: Optional[Type[BaseException]],
1431
+ exc: Optional[BaseException],
1432
+ tb: Optional[TracebackType],
1433
+ ) -> None:
1434
+ await self._resp.__aexit__(exc_type, exc, tb)
1435
+
1436
+
1437
+ _RequestContextManager = _BaseRequestContextManager[ClientResponse]
1438
+ _WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse]
1439
+
1440
+
1441
+ class _SessionRequestContextManager:
1442
+
1443
+ __slots__ = ("_coro", "_resp", "_session")
1444
+
1445
+ def __init__(
1446
+ self,
1447
+ coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
1448
+ session: ClientSession,
1449
+ ) -> None:
1450
+ self._coro = coro
1451
+ self._resp: Optional[ClientResponse] = None
1452
+ self._session = session
1453
+
1454
+ async def __aenter__(self) -> ClientResponse:
1455
+ try:
1456
+ self._resp = await self._coro
1457
+ except BaseException:
1458
+ await self._session.close()
1459
+ raise
1460
+ else:
1461
+ return self._resp
1462
+
1463
+ async def __aexit__(
1464
+ self,
1465
+ exc_type: Optional[Type[BaseException]],
1466
+ exc: Optional[BaseException],
1467
+ tb: Optional[TracebackType],
1468
+ ) -> None:
1469
+ assert self._resp is not None
1470
+ self._resp.close()
1471
+ await self._session.close()
1472
+
1473
+
1474
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
1475
+
1476
+ def request(
1477
+ method: str,
1478
+ url: StrOrURL,
1479
+ *,
1480
+ version: HttpVersion = http.HttpVersion11,
1481
+ connector: Optional[BaseConnector] = None,
1482
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1483
+ **kwargs: Unpack[_RequestOptions],
1484
+ ) -> _SessionRequestContextManager: ...
1485
+
1486
+ else:
1487
+
1488
+ def request(
1489
+ method: str,
1490
+ url: StrOrURL,
1491
+ *,
1492
+ version: HttpVersion = http.HttpVersion11,
1493
+ connector: Optional[BaseConnector] = None,
1494
+ loop: Optional[asyncio.AbstractEventLoop] = None,
1495
+ **kwargs: Any,
1496
+ ) -> _SessionRequestContextManager:
1497
+ """Constructs and sends a request.
1498
+
1499
+ Returns response object.
1500
+ method - HTTP method
1501
+ url - request url
1502
+ params - (optional) Dictionary or bytes to be sent in the query
1503
+ string of the new request
1504
+ data - (optional) Dictionary, bytes, or file-like object to
1505
+ send in the body of the request
1506
+ json - (optional) Any json compatible python object
1507
+ headers - (optional) Dictionary of HTTP Headers to send with
1508
+ the request
1509
+ cookies - (optional) Dict object to send with the request
1510
+ auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
1511
+ auth - aiohttp.helpers.BasicAuth
1512
+ allow_redirects - (optional) If set to False, do not follow
1513
+ redirects
1514
+ version - Request HTTP version.
1515
+ compress - Set to True if request has to be compressed
1516
+ with deflate encoding.
1517
+ chunked - Set to chunk size for chunked transfer encoding.
1518
+ expect100 - Expect 100-continue response from server.
1519
+ connector - BaseConnector sub-class instance to support
1520
+ connection pooling.
1521
+ read_until_eof - Read response until eof if response
1522
+ does not have Content-Length header.
1523
+ loop - Optional event loop.
1524
+ timeout - Optional ClientTimeout settings structure, 5min
1525
+ total timeout by default.
1526
+ Usage::
1527
+ >>> import aiohttp
1528
+ >>> async with aiohttp.request('GET', 'http://python.org/') as resp:
1529
+ ... print(resp)
1530
+ ... data = await resp.read()
1531
+ <ClientResponse(https://www.python.org/) [200 OK]>
1532
+ """
1533
+ connector_owner = False
1534
+ if connector is None:
1535
+ connector_owner = True
1536
+ connector = TCPConnector(loop=loop, force_close=True)
1537
+
1538
+ session = ClientSession(
1539
+ loop=loop,
1540
+ cookies=kwargs.pop("cookies", None),
1541
+ version=version,
1542
+ timeout=kwargs.pop("timeout", sentinel),
1543
+ connector=connector,
1544
+ connector_owner=connector_owner,
1545
+ )
1546
+
1547
+ return _SessionRequestContextManager(
1548
+ session._request(method, url, **kwargs),
1549
+ session,
1550
+ )