Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- parrot/lib/python3.10/site-packages/aiohttp/__init__.py +258 -0
- parrot/lib/python3.10/site-packages/aiohttp/_headers.pxi +83 -0
- parrot/lib/python3.10/site-packages/aiohttp/_helpers.pyi +6 -0
- parrot/lib/python3.10/site-packages/aiohttp/_helpers.pyx +35 -0
- parrot/lib/python3.10/site-packages/aiohttp/_http_writer.pyx +163 -0
- parrot/lib/python3.10/site-packages/aiohttp/_websocket.pyx +56 -0
- parrot/lib/python3.10/site-packages/aiohttp/abc.py +234 -0
- parrot/lib/python3.10/site-packages/aiohttp/base_protocol.py +96 -0
- parrot/lib/python3.10/site-packages/aiohttp/client.py +1506 -0
- parrot/lib/python3.10/site-packages/aiohttp/client_exceptions.py +403 -0
- parrot/lib/python3.10/site-packages/aiohttp/client_proto.py +310 -0
- parrot/lib/python3.10/site-packages/aiohttp/cookiejar.py +479 -0
- parrot/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
- parrot/lib/python3.10/site-packages/aiohttp/helpers.py +1010 -0
- parrot/lib/python3.10/site-packages/aiohttp/http.py +72 -0
- parrot/lib/python3.10/site-packages/aiohttp/http_exceptions.py +105 -0
- parrot/lib/python3.10/site-packages/aiohttp/http_parser.py +1038 -0
- parrot/lib/python3.10/site-packages/aiohttp/http_websocket.py +761 -0
- parrot/lib/python3.10/site-packages/aiohttp/locks.py +41 -0
- parrot/lib/python3.10/site-packages/aiohttp/log.py +8 -0
- parrot/lib/python3.10/site-packages/aiohttp/multipart.py +1073 -0
- parrot/lib/python3.10/site-packages/aiohttp/payload.py +498 -0
- parrot/lib/python3.10/site-packages/aiohttp/py.typed +1 -0
- parrot/lib/python3.10/site-packages/aiohttp/pytest_plugin.py +413 -0
- parrot/lib/python3.10/site-packages/aiohttp/typedefs.py +80 -0
- parrot/lib/python3.10/site-packages/aiohttp/web.py +595 -0
- parrot/lib/python3.10/site-packages/aiohttp/web_fileresponse.py +364 -0
- parrot/lib/python3.10/site-packages/aiohttp/web_middlewares.py +121 -0
- parrot/lib/python3.10/site-packages/aiohttp/web_protocol.py +736 -0
- parrot/lib/python3.10/site-packages/aiohttp/web_response.py +820 -0
- parrot/lib/python3.10/site-packages/aiohttp/worker.py +247 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/__pycache__/colon_fence.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/__pycache__/substitution.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/LICENSE +24 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/__init__.py +1 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/__pycache__/index.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/index.py +172 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/port.yaml +4 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/anchors/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/anchors/__pycache__/index.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/anchors/index.py +129 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/attrs/parse.py +265 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/container/README.md +95 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/container/__pycache__/__init__.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/container/__pycache__/index.cpython-310.pyc +0 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/container/index.py +174 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/container/port.yaml +5 -0
- parrot/lib/python3.10/site-packages/mdit_py_plugins/deflist/LICENSE +22 -0
parrot/lib/python3.10/site-packages/aiohttp/__init__.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__version__ = "3.10.6"
|
| 2 |
+
|
| 3 |
+
from typing import TYPE_CHECKING, Tuple
|
| 4 |
+
|
| 5 |
+
from . import hdrs as hdrs
|
| 6 |
+
from .client import (
|
| 7 |
+
BaseConnector,
|
| 8 |
+
ClientConnectionError,
|
| 9 |
+
ClientConnectionResetError,
|
| 10 |
+
ClientConnectorCertificateError,
|
| 11 |
+
ClientConnectorError,
|
| 12 |
+
ClientConnectorSSLError,
|
| 13 |
+
ClientError,
|
| 14 |
+
ClientHttpProxyError,
|
| 15 |
+
ClientOSError,
|
| 16 |
+
ClientPayloadError,
|
| 17 |
+
ClientProxyConnectionError,
|
| 18 |
+
ClientRequest,
|
| 19 |
+
ClientResponse,
|
| 20 |
+
ClientResponseError,
|
| 21 |
+
ClientSession,
|
| 22 |
+
ClientSSLError,
|
| 23 |
+
ClientTimeout,
|
| 24 |
+
ClientWebSocketResponse,
|
| 25 |
+
ConnectionTimeoutError,
|
| 26 |
+
ContentTypeError,
|
| 27 |
+
Fingerprint,
|
| 28 |
+
InvalidURL,
|
| 29 |
+
InvalidUrlClientError,
|
| 30 |
+
InvalidUrlRedirectClientError,
|
| 31 |
+
NamedPipeConnector,
|
| 32 |
+
NonHttpUrlClientError,
|
| 33 |
+
NonHttpUrlRedirectClientError,
|
| 34 |
+
RedirectClientError,
|
| 35 |
+
RequestInfo,
|
| 36 |
+
ServerConnectionError,
|
| 37 |
+
ServerDisconnectedError,
|
| 38 |
+
ServerFingerprintMismatch,
|
| 39 |
+
ServerTimeoutError,
|
| 40 |
+
SocketTimeoutError,
|
| 41 |
+
TCPConnector,
|
| 42 |
+
TooManyRedirects,
|
| 43 |
+
UnixConnector,
|
| 44 |
+
WSServerHandshakeError,
|
| 45 |
+
request,
|
| 46 |
+
)
|
| 47 |
+
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
| 48 |
+
from .formdata import FormData as FormData
|
| 49 |
+
from .helpers import BasicAuth, ChainMapProxy, ETag
|
| 50 |
+
from .http import (
|
| 51 |
+
HttpVersion as HttpVersion,
|
| 52 |
+
HttpVersion10 as HttpVersion10,
|
| 53 |
+
HttpVersion11 as HttpVersion11,
|
| 54 |
+
WebSocketError as WebSocketError,
|
| 55 |
+
WSCloseCode as WSCloseCode,
|
| 56 |
+
WSMessage as WSMessage,
|
| 57 |
+
WSMsgType as WSMsgType,
|
| 58 |
+
)
|
| 59 |
+
from .multipart import (
|
| 60 |
+
BadContentDispositionHeader as BadContentDispositionHeader,
|
| 61 |
+
BadContentDispositionParam as BadContentDispositionParam,
|
| 62 |
+
BodyPartReader as BodyPartReader,
|
| 63 |
+
MultipartReader as MultipartReader,
|
| 64 |
+
MultipartWriter as MultipartWriter,
|
| 65 |
+
content_disposition_filename as content_disposition_filename,
|
| 66 |
+
parse_content_disposition as parse_content_disposition,
|
| 67 |
+
)
|
| 68 |
+
from .payload import (
|
| 69 |
+
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
| 70 |
+
AsyncIterablePayload as AsyncIterablePayload,
|
| 71 |
+
BufferedReaderPayload as BufferedReaderPayload,
|
| 72 |
+
BytesIOPayload as BytesIOPayload,
|
| 73 |
+
BytesPayload as BytesPayload,
|
| 74 |
+
IOBasePayload as IOBasePayload,
|
| 75 |
+
JsonPayload as JsonPayload,
|
| 76 |
+
Payload as Payload,
|
| 77 |
+
StringIOPayload as StringIOPayload,
|
| 78 |
+
StringPayload as StringPayload,
|
| 79 |
+
TextIOPayload as TextIOPayload,
|
| 80 |
+
get_payload as get_payload,
|
| 81 |
+
payload_type as payload_type,
|
| 82 |
+
)
|
| 83 |
+
from .payload_streamer import streamer as streamer
|
| 84 |
+
from .resolver import (
|
| 85 |
+
AsyncResolver as AsyncResolver,
|
| 86 |
+
DefaultResolver as DefaultResolver,
|
| 87 |
+
ThreadedResolver as ThreadedResolver,
|
| 88 |
+
)
|
| 89 |
+
from .streams import (
|
| 90 |
+
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
| 91 |
+
DataQueue as DataQueue,
|
| 92 |
+
EofStream as EofStream,
|
| 93 |
+
FlowControlDataQueue as FlowControlDataQueue,
|
| 94 |
+
StreamReader as StreamReader,
|
| 95 |
+
)
|
| 96 |
+
from .tracing import (
|
| 97 |
+
TraceConfig as TraceConfig,
|
| 98 |
+
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
| 99 |
+
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
| 100 |
+
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
| 101 |
+
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
| 102 |
+
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
| 103 |
+
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
| 104 |
+
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
| 105 |
+
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
| 106 |
+
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
| 107 |
+
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
| 108 |
+
TraceRequestEndParams as TraceRequestEndParams,
|
| 109 |
+
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
| 110 |
+
TraceRequestHeadersSentParams as TraceRequestHeadersSentParams,
|
| 111 |
+
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
| 112 |
+
TraceRequestStartParams as TraceRequestStartParams,
|
| 113 |
+
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
if TYPE_CHECKING:
|
| 117 |
+
# At runtime these are lazy-loaded at the bottom of the file.
|
| 118 |
+
from .worker import (
|
| 119 |
+
GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
|
| 120 |
+
GunicornWebWorker as GunicornWebWorker,
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
__all__: Tuple[str, ...] = (
|
| 124 |
+
"hdrs",
|
| 125 |
+
# client
|
| 126 |
+
"BaseConnector",
|
| 127 |
+
"ClientConnectionError",
|
| 128 |
+
"ClientConnectionResetError",
|
| 129 |
+
"ClientConnectorCertificateError",
|
| 130 |
+
"ClientConnectorError",
|
| 131 |
+
"ClientConnectorSSLError",
|
| 132 |
+
"ClientError",
|
| 133 |
+
"ClientHttpProxyError",
|
| 134 |
+
"ClientOSError",
|
| 135 |
+
"ClientPayloadError",
|
| 136 |
+
"ClientProxyConnectionError",
|
| 137 |
+
"ClientResponse",
|
| 138 |
+
"ClientRequest",
|
| 139 |
+
"ClientResponseError",
|
| 140 |
+
"ClientSSLError",
|
| 141 |
+
"ClientSession",
|
| 142 |
+
"ClientTimeout",
|
| 143 |
+
"ClientWebSocketResponse",
|
| 144 |
+
"ConnectionTimeoutError",
|
| 145 |
+
"ContentTypeError",
|
| 146 |
+
"Fingerprint",
|
| 147 |
+
"InvalidURL",
|
| 148 |
+
"InvalidUrlClientError",
|
| 149 |
+
"InvalidUrlRedirectClientError",
|
| 150 |
+
"NonHttpUrlClientError",
|
| 151 |
+
"NonHttpUrlRedirectClientError",
|
| 152 |
+
"RedirectClientError",
|
| 153 |
+
"RequestInfo",
|
| 154 |
+
"ServerConnectionError",
|
| 155 |
+
"ServerDisconnectedError",
|
| 156 |
+
"ServerFingerprintMismatch",
|
| 157 |
+
"ServerTimeoutError",
|
| 158 |
+
"SocketTimeoutError",
|
| 159 |
+
"TCPConnector",
|
| 160 |
+
"TooManyRedirects",
|
| 161 |
+
"UnixConnector",
|
| 162 |
+
"NamedPipeConnector",
|
| 163 |
+
"WSServerHandshakeError",
|
| 164 |
+
"request",
|
| 165 |
+
# cookiejar
|
| 166 |
+
"CookieJar",
|
| 167 |
+
"DummyCookieJar",
|
| 168 |
+
# formdata
|
| 169 |
+
"FormData",
|
| 170 |
+
# helpers
|
| 171 |
+
"BasicAuth",
|
| 172 |
+
"ChainMapProxy",
|
| 173 |
+
"ETag",
|
| 174 |
+
# http
|
| 175 |
+
"HttpVersion",
|
| 176 |
+
"HttpVersion10",
|
| 177 |
+
"HttpVersion11",
|
| 178 |
+
"WSMsgType",
|
| 179 |
+
"WSCloseCode",
|
| 180 |
+
"WSMessage",
|
| 181 |
+
"WebSocketError",
|
| 182 |
+
# multipart
|
| 183 |
+
"BadContentDispositionHeader",
|
| 184 |
+
"BadContentDispositionParam",
|
| 185 |
+
"BodyPartReader",
|
| 186 |
+
"MultipartReader",
|
| 187 |
+
"MultipartWriter",
|
| 188 |
+
"content_disposition_filename",
|
| 189 |
+
"parse_content_disposition",
|
| 190 |
+
# payload
|
| 191 |
+
"AsyncIterablePayload",
|
| 192 |
+
"BufferedReaderPayload",
|
| 193 |
+
"BytesIOPayload",
|
| 194 |
+
"BytesPayload",
|
| 195 |
+
"IOBasePayload",
|
| 196 |
+
"JsonPayload",
|
| 197 |
+
"PAYLOAD_REGISTRY",
|
| 198 |
+
"Payload",
|
| 199 |
+
"StringIOPayload",
|
| 200 |
+
"StringPayload",
|
| 201 |
+
"TextIOPayload",
|
| 202 |
+
"get_payload",
|
| 203 |
+
"payload_type",
|
| 204 |
+
# payload_streamer
|
| 205 |
+
"streamer",
|
| 206 |
+
# resolver
|
| 207 |
+
"AsyncResolver",
|
| 208 |
+
"DefaultResolver",
|
| 209 |
+
"ThreadedResolver",
|
| 210 |
+
# streams
|
| 211 |
+
"DataQueue",
|
| 212 |
+
"EMPTY_PAYLOAD",
|
| 213 |
+
"EofStream",
|
| 214 |
+
"FlowControlDataQueue",
|
| 215 |
+
"StreamReader",
|
| 216 |
+
# tracing
|
| 217 |
+
"TraceConfig",
|
| 218 |
+
"TraceConnectionCreateEndParams",
|
| 219 |
+
"TraceConnectionCreateStartParams",
|
| 220 |
+
"TraceConnectionQueuedEndParams",
|
| 221 |
+
"TraceConnectionQueuedStartParams",
|
| 222 |
+
"TraceConnectionReuseconnParams",
|
| 223 |
+
"TraceDnsCacheHitParams",
|
| 224 |
+
"TraceDnsCacheMissParams",
|
| 225 |
+
"TraceDnsResolveHostEndParams",
|
| 226 |
+
"TraceDnsResolveHostStartParams",
|
| 227 |
+
"TraceRequestChunkSentParams",
|
| 228 |
+
"TraceRequestEndParams",
|
| 229 |
+
"TraceRequestExceptionParams",
|
| 230 |
+
"TraceRequestHeadersSentParams",
|
| 231 |
+
"TraceRequestRedirectParams",
|
| 232 |
+
"TraceRequestStartParams",
|
| 233 |
+
"TraceResponseChunkReceivedParams",
|
| 234 |
+
# workers (imported lazily with __getattr__)
|
| 235 |
+
"GunicornUVLoopWebWorker",
|
| 236 |
+
"GunicornWebWorker",
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
def __dir__() -> Tuple[str, ...]:
|
| 241 |
+
return __all__ + ("__author__", "__doc__")
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def __getattr__(name: str) -> object:
|
| 245 |
+
global GunicornUVLoopWebWorker, GunicornWebWorker
|
| 246 |
+
|
| 247 |
+
# Importing gunicorn takes a long time (>100ms), so only import if actually needed.
|
| 248 |
+
if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
|
| 249 |
+
try:
|
| 250 |
+
from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
|
| 251 |
+
except ImportError:
|
| 252 |
+
return None
|
| 253 |
+
|
| 254 |
+
GunicornUVLoopWebWorker = guv # type: ignore[misc]
|
| 255 |
+
GunicornWebWorker = gw # type: ignore[misc]
|
| 256 |
+
return guv if name == "GunicornUVLoopWebWorker" else gw
|
| 257 |
+
|
| 258 |
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|
parrot/lib/python3.10/site-packages/aiohttp/_headers.pxi
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
| 2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
| 3 |
+
|
| 4 |
+
from . import hdrs
|
| 5 |
+
cdef tuple headers = (
|
| 6 |
+
hdrs.ACCEPT,
|
| 7 |
+
hdrs.ACCEPT_CHARSET,
|
| 8 |
+
hdrs.ACCEPT_ENCODING,
|
| 9 |
+
hdrs.ACCEPT_LANGUAGE,
|
| 10 |
+
hdrs.ACCEPT_RANGES,
|
| 11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
| 12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
| 13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
| 14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
| 15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
| 16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
| 17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
| 18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
| 19 |
+
hdrs.AGE,
|
| 20 |
+
hdrs.ALLOW,
|
| 21 |
+
hdrs.AUTHORIZATION,
|
| 22 |
+
hdrs.CACHE_CONTROL,
|
| 23 |
+
hdrs.CONNECTION,
|
| 24 |
+
hdrs.CONTENT_DISPOSITION,
|
| 25 |
+
hdrs.CONTENT_ENCODING,
|
| 26 |
+
hdrs.CONTENT_LANGUAGE,
|
| 27 |
+
hdrs.CONTENT_LENGTH,
|
| 28 |
+
hdrs.CONTENT_LOCATION,
|
| 29 |
+
hdrs.CONTENT_MD5,
|
| 30 |
+
hdrs.CONTENT_RANGE,
|
| 31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
| 32 |
+
hdrs.CONTENT_TYPE,
|
| 33 |
+
hdrs.COOKIE,
|
| 34 |
+
hdrs.DATE,
|
| 35 |
+
hdrs.DESTINATION,
|
| 36 |
+
hdrs.DIGEST,
|
| 37 |
+
hdrs.ETAG,
|
| 38 |
+
hdrs.EXPECT,
|
| 39 |
+
hdrs.EXPIRES,
|
| 40 |
+
hdrs.FORWARDED,
|
| 41 |
+
hdrs.FROM,
|
| 42 |
+
hdrs.HOST,
|
| 43 |
+
hdrs.IF_MATCH,
|
| 44 |
+
hdrs.IF_MODIFIED_SINCE,
|
| 45 |
+
hdrs.IF_NONE_MATCH,
|
| 46 |
+
hdrs.IF_RANGE,
|
| 47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
| 48 |
+
hdrs.KEEP_ALIVE,
|
| 49 |
+
hdrs.LAST_EVENT_ID,
|
| 50 |
+
hdrs.LAST_MODIFIED,
|
| 51 |
+
hdrs.LINK,
|
| 52 |
+
hdrs.LOCATION,
|
| 53 |
+
hdrs.MAX_FORWARDS,
|
| 54 |
+
hdrs.ORIGIN,
|
| 55 |
+
hdrs.PRAGMA,
|
| 56 |
+
hdrs.PROXY_AUTHENTICATE,
|
| 57 |
+
hdrs.PROXY_AUTHORIZATION,
|
| 58 |
+
hdrs.RANGE,
|
| 59 |
+
hdrs.REFERER,
|
| 60 |
+
hdrs.RETRY_AFTER,
|
| 61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
| 62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
| 63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
| 64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
| 65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
| 66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
| 67 |
+
hdrs.SERVER,
|
| 68 |
+
hdrs.SET_COOKIE,
|
| 69 |
+
hdrs.TE,
|
| 70 |
+
hdrs.TRAILER,
|
| 71 |
+
hdrs.TRANSFER_ENCODING,
|
| 72 |
+
hdrs.URI,
|
| 73 |
+
hdrs.UPGRADE,
|
| 74 |
+
hdrs.USER_AGENT,
|
| 75 |
+
hdrs.VARY,
|
| 76 |
+
hdrs.VIA,
|
| 77 |
+
hdrs.WWW_AUTHENTICATE,
|
| 78 |
+
hdrs.WANT_DIGEST,
|
| 79 |
+
hdrs.WARNING,
|
| 80 |
+
hdrs.X_FORWARDED_FOR,
|
| 81 |
+
hdrs.X_FORWARDED_HOST,
|
| 82 |
+
hdrs.X_FORWARDED_PROTO,
|
| 83 |
+
)
|
parrot/lib/python3.10/site-packages/aiohttp/_helpers.pyi
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
+
class reify:
|
| 4 |
+
def __init__(self, wrapped: Any) -> None: ...
|
| 5 |
+
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
| 6 |
+
def __set__(self, inst: Any, value: Any) -> None: ...
|
parrot/lib/python3.10/site-packages/aiohttp/_helpers.pyx
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
cdef _sentinel = object()
|
| 3 |
+
|
| 4 |
+
cdef class reify:
|
| 5 |
+
"""Use as a class method decorator. It operates almost exactly like
|
| 6 |
+
the Python `@property` decorator, but it puts the result of the
|
| 7 |
+
method it decorates into the instance dict after the first call,
|
| 8 |
+
effectively replacing the function it decorates with an instance
|
| 9 |
+
variable. It is, in Python parlance, a data descriptor.
|
| 10 |
+
|
| 11 |
+
"""
|
| 12 |
+
|
| 13 |
+
cdef object wrapped
|
| 14 |
+
cdef object name
|
| 15 |
+
|
| 16 |
+
def __init__(self, wrapped):
|
| 17 |
+
self.wrapped = wrapped
|
| 18 |
+
self.name = wrapped.__name__
|
| 19 |
+
|
| 20 |
+
@property
|
| 21 |
+
def __doc__(self):
|
| 22 |
+
return self.wrapped.__doc__
|
| 23 |
+
|
| 24 |
+
def __get__(self, inst, owner):
|
| 25 |
+
if inst is None:
|
| 26 |
+
return self
|
| 27 |
+
cdef dict cache = inst._cache
|
| 28 |
+
val = cache.get(self.name, _sentinel)
|
| 29 |
+
if val is _sentinel:
|
| 30 |
+
val = self.wrapped(inst)
|
| 31 |
+
cache[self.name] = val
|
| 32 |
+
return val
|
| 33 |
+
|
| 34 |
+
def __set__(self, inst, value):
|
| 35 |
+
raise AttributeError("reified property is read-only")
|
parrot/lib/python3.10/site-packages/aiohttp/_http_writer.pyx
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from cpython.bytes cimport PyBytes_FromStringAndSize
|
| 2 |
+
from cpython.exc cimport PyErr_NoMemory
|
| 3 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
| 4 |
+
from cpython.object cimport PyObject_Str
|
| 5 |
+
from libc.stdint cimport uint8_t, uint64_t
|
| 6 |
+
from libc.string cimport memcpy
|
| 7 |
+
|
| 8 |
+
from multidict import istr
|
| 9 |
+
|
| 10 |
+
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
| 11 |
+
cdef char BUFFER[BUF_SIZE]
|
| 12 |
+
|
| 13 |
+
cdef object _istr = istr
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# ----------------- writer ---------------------------
|
| 17 |
+
|
| 18 |
+
cdef struct Writer:
|
| 19 |
+
char *buf
|
| 20 |
+
Py_ssize_t size
|
| 21 |
+
Py_ssize_t pos
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
cdef inline void _init_writer(Writer* writer):
|
| 25 |
+
writer.buf = &BUFFER[0]
|
| 26 |
+
writer.size = BUF_SIZE
|
| 27 |
+
writer.pos = 0
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
cdef inline void _release_writer(Writer* writer):
|
| 31 |
+
if writer.buf != BUFFER:
|
| 32 |
+
PyMem_Free(writer.buf)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
| 36 |
+
cdef char * buf
|
| 37 |
+
cdef Py_ssize_t size
|
| 38 |
+
|
| 39 |
+
if writer.pos == writer.size:
|
| 40 |
+
# reallocate
|
| 41 |
+
size = writer.size + BUF_SIZE
|
| 42 |
+
if writer.buf == BUFFER:
|
| 43 |
+
buf = <char*>PyMem_Malloc(size)
|
| 44 |
+
if buf == NULL:
|
| 45 |
+
PyErr_NoMemory()
|
| 46 |
+
return -1
|
| 47 |
+
memcpy(buf, writer.buf, writer.size)
|
| 48 |
+
else:
|
| 49 |
+
buf = <char*>PyMem_Realloc(writer.buf, size)
|
| 50 |
+
if buf == NULL:
|
| 51 |
+
PyErr_NoMemory()
|
| 52 |
+
return -1
|
| 53 |
+
writer.buf = buf
|
| 54 |
+
writer.size = size
|
| 55 |
+
writer.buf[writer.pos] = <char>ch
|
| 56 |
+
writer.pos += 1
|
| 57 |
+
return 0
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
| 61 |
+
cdef uint64_t utf = <uint64_t> symbol
|
| 62 |
+
|
| 63 |
+
if utf < 0x80:
|
| 64 |
+
return _write_byte(writer, <uint8_t>utf)
|
| 65 |
+
elif utf < 0x800:
|
| 66 |
+
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
| 67 |
+
return -1
|
| 68 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
| 69 |
+
elif 0xD800 <= utf <= 0xDFFF:
|
| 70 |
+
# surogate pair, ignored
|
| 71 |
+
return 0
|
| 72 |
+
elif utf < 0x10000:
|
| 73 |
+
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
| 74 |
+
return -1
|
| 75 |
+
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
| 76 |
+
return -1
|
| 77 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
| 78 |
+
elif utf > 0x10FFFF:
|
| 79 |
+
# symbol is too large
|
| 80 |
+
return 0
|
| 81 |
+
else:
|
| 82 |
+
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
| 83 |
+
return -1
|
| 84 |
+
if _write_byte(writer,
|
| 85 |
+
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
| 86 |
+
return -1
|
| 87 |
+
if _write_byte(writer,
|
| 88 |
+
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
| 89 |
+
return -1
|
| 90 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
cdef inline int _write_str(Writer* writer, str s):
|
| 94 |
+
cdef Py_UCS4 ch
|
| 95 |
+
for ch in s:
|
| 96 |
+
if _write_utf8(writer, ch) < 0:
|
| 97 |
+
return -1
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
# --------------- _serialize_headers ----------------------
|
| 101 |
+
|
| 102 |
+
cdef str to_str(object s):
|
| 103 |
+
typ = type(s)
|
| 104 |
+
if typ is str:
|
| 105 |
+
return <str>s
|
| 106 |
+
elif typ is _istr:
|
| 107 |
+
return PyObject_Str(s)
|
| 108 |
+
elif not isinstance(s, str):
|
| 109 |
+
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
| 110 |
+
else:
|
| 111 |
+
return str(s)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
cdef void _safe_header(str string) except *:
|
| 115 |
+
if "\r" in string or "\n" in string:
|
| 116 |
+
raise ValueError(
|
| 117 |
+
"Newline or carriage return character detected in HTTP status message or "
|
| 118 |
+
"header. This is a potential security issue."
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _serialize_headers(str status_line, headers):
|
| 123 |
+
cdef Writer writer
|
| 124 |
+
cdef object key
|
| 125 |
+
cdef object val
|
| 126 |
+
cdef bytes ret
|
| 127 |
+
|
| 128 |
+
_init_writer(&writer)
|
| 129 |
+
|
| 130 |
+
for key, val in headers.items():
|
| 131 |
+
_safe_header(to_str(key))
|
| 132 |
+
_safe_header(to_str(val))
|
| 133 |
+
|
| 134 |
+
try:
|
| 135 |
+
if _write_str(&writer, status_line) < 0:
|
| 136 |
+
raise
|
| 137 |
+
if _write_byte(&writer, b'\r') < 0:
|
| 138 |
+
raise
|
| 139 |
+
if _write_byte(&writer, b'\n') < 0:
|
| 140 |
+
raise
|
| 141 |
+
|
| 142 |
+
for key, val in headers.items():
|
| 143 |
+
if _write_str(&writer, to_str(key)) < 0:
|
| 144 |
+
raise
|
| 145 |
+
if _write_byte(&writer, b':') < 0:
|
| 146 |
+
raise
|
| 147 |
+
if _write_byte(&writer, b' ') < 0:
|
| 148 |
+
raise
|
| 149 |
+
if _write_str(&writer, to_str(val)) < 0:
|
| 150 |
+
raise
|
| 151 |
+
if _write_byte(&writer, b'\r') < 0:
|
| 152 |
+
raise
|
| 153 |
+
if _write_byte(&writer, b'\n') < 0:
|
| 154 |
+
raise
|
| 155 |
+
|
| 156 |
+
if _write_byte(&writer, b'\r') < 0:
|
| 157 |
+
raise
|
| 158 |
+
if _write_byte(&writer, b'\n') < 0:
|
| 159 |
+
raise
|
| 160 |
+
|
| 161 |
+
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
| 162 |
+
finally:
|
| 163 |
+
_release_writer(&writer)
|
parrot/lib/python3.10/site-packages/aiohttp/_websocket.pyx
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from cpython cimport PyBytes_AsString
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
| 5 |
+
cdef extern from "Python.h":
|
| 6 |
+
char* PyByteArray_AsString(bytearray ba) except NULL
|
| 7 |
+
|
| 8 |
+
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def _websocket_mask_cython(object mask, object data):
|
| 12 |
+
"""Note, this function mutates its `data` argument
|
| 13 |
+
"""
|
| 14 |
+
cdef:
|
| 15 |
+
Py_ssize_t data_len, i
|
| 16 |
+
# bit operations on signed integers are implementation-specific
|
| 17 |
+
unsigned char * in_buf
|
| 18 |
+
const unsigned char * mask_buf
|
| 19 |
+
uint32_t uint32_msk
|
| 20 |
+
uint64_t uint64_msk
|
| 21 |
+
|
| 22 |
+
assert len(mask) == 4
|
| 23 |
+
|
| 24 |
+
if not isinstance(mask, bytes):
|
| 25 |
+
mask = bytes(mask)
|
| 26 |
+
|
| 27 |
+
if isinstance(data, bytearray):
|
| 28 |
+
data = <bytearray>data
|
| 29 |
+
else:
|
| 30 |
+
data = bytearray(data)
|
| 31 |
+
|
| 32 |
+
data_len = len(data)
|
| 33 |
+
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
| 34 |
+
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
| 35 |
+
uint32_msk = (<uint32_t*>mask_buf)[0]
|
| 36 |
+
|
| 37 |
+
# TODO: align in_data ptr to achieve even faster speeds
|
| 38 |
+
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
| 39 |
+
|
| 40 |
+
if sizeof(size_t) >= 8:
|
| 41 |
+
uint64_msk = uint32_msk
|
| 42 |
+
uint64_msk = (uint64_msk << 32) | uint32_msk
|
| 43 |
+
|
| 44 |
+
while data_len >= 8:
|
| 45 |
+
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
| 46 |
+
in_buf += 8
|
| 47 |
+
data_len -= 8
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
while data_len >= 4:
|
| 51 |
+
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
| 52 |
+
in_buf += 4
|
| 53 |
+
data_len -= 4
|
| 54 |
+
|
| 55 |
+
for i in range(0, data_len):
|
| 56 |
+
in_buf[i] ^= mask_buf[i]
|
parrot/lib/python3.10/site-packages/aiohttp/abc.py
ADDED
|
@@ -0,0 +1,234 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import socket
|
| 4 |
+
from abc import ABC, abstractmethod
|
| 5 |
+
from collections.abc import Sized
|
| 6 |
+
from http.cookies import BaseCookie, Morsel
|
| 7 |
+
from typing import (
|
| 8 |
+
TYPE_CHECKING,
|
| 9 |
+
Any,
|
| 10 |
+
Awaitable,
|
| 11 |
+
Callable,
|
| 12 |
+
Dict,
|
| 13 |
+
Generator,
|
| 14 |
+
Iterable,
|
| 15 |
+
List,
|
| 16 |
+
Optional,
|
| 17 |
+
Tuple,
|
| 18 |
+
TypedDict,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from multidict import CIMultiDict
|
| 22 |
+
from yarl import URL
|
| 23 |
+
|
| 24 |
+
from .typedefs import LooseCookies
|
| 25 |
+
|
| 26 |
+
if TYPE_CHECKING:
|
| 27 |
+
from .web_app import Application
|
| 28 |
+
from .web_exceptions import HTTPException
|
| 29 |
+
from .web_request import BaseRequest, Request
|
| 30 |
+
from .web_response import StreamResponse
|
| 31 |
+
else:
|
| 32 |
+
BaseRequest = Request = Application = StreamResponse = None
|
| 33 |
+
HTTPException = None
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class AbstractRouter(ABC):
|
| 37 |
+
def __init__(self) -> None:
|
| 38 |
+
self._frozen = False
|
| 39 |
+
|
| 40 |
+
def post_init(self, app: Application) -> None:
|
| 41 |
+
"""Post init stage.
|
| 42 |
+
|
| 43 |
+
Not an abstract method for sake of backward compatibility,
|
| 44 |
+
but if the router wants to be aware of the application
|
| 45 |
+
it can override this.
|
| 46 |
+
"""
|
| 47 |
+
|
| 48 |
+
@property
|
| 49 |
+
def frozen(self) -> bool:
|
| 50 |
+
return self._frozen
|
| 51 |
+
|
| 52 |
+
def freeze(self) -> None:
|
| 53 |
+
"""Freeze router."""
|
| 54 |
+
self._frozen = True
|
| 55 |
+
|
| 56 |
+
@abstractmethod
|
| 57 |
+
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
| 58 |
+
"""Return MATCH_INFO for given request"""
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class AbstractMatchInfo(ABC):
|
| 62 |
+
@property # pragma: no branch
|
| 63 |
+
@abstractmethod
|
| 64 |
+
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
| 65 |
+
"""Execute matched request handler"""
|
| 66 |
+
|
| 67 |
+
@property
|
| 68 |
+
@abstractmethod
|
| 69 |
+
def expect_handler(
|
| 70 |
+
self,
|
| 71 |
+
) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
|
| 72 |
+
"""Expect handler for 100-continue processing"""
|
| 73 |
+
|
| 74 |
+
@property # pragma: no branch
|
| 75 |
+
@abstractmethod
|
| 76 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 77 |
+
"""HTTPException instance raised on router's resolving, or None"""
|
| 78 |
+
|
| 79 |
+
@abstractmethod # pragma: no branch
|
| 80 |
+
def get_info(self) -> Dict[str, Any]:
|
| 81 |
+
"""Return a dict with additional info useful for introspection"""
|
| 82 |
+
|
| 83 |
+
@property # pragma: no branch
|
| 84 |
+
@abstractmethod
|
| 85 |
+
def apps(self) -> Tuple[Application, ...]:
|
| 86 |
+
"""Stack of nested applications.
|
| 87 |
+
|
| 88 |
+
Top level application is left-most element.
|
| 89 |
+
|
| 90 |
+
"""
|
| 91 |
+
|
| 92 |
+
@abstractmethod
|
| 93 |
+
def add_app(self, app: Application) -> None:
|
| 94 |
+
"""Add application to the nested apps stack."""
|
| 95 |
+
|
| 96 |
+
@abstractmethod
|
| 97 |
+
def freeze(self) -> None:
|
| 98 |
+
"""Freeze the match info.
|
| 99 |
+
|
| 100 |
+
The method is called after route resolution.
|
| 101 |
+
|
| 102 |
+
After the call .add_app() is forbidden.
|
| 103 |
+
|
| 104 |
+
"""
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class AbstractView(ABC):
|
| 108 |
+
"""Abstract class based view."""
|
| 109 |
+
|
| 110 |
+
def __init__(self, request: Request) -> None:
|
| 111 |
+
self._request = request
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def request(self) -> Request:
|
| 115 |
+
"""Request instance."""
|
| 116 |
+
return self._request
|
| 117 |
+
|
| 118 |
+
@abstractmethod
|
| 119 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 120 |
+
"""Execute the view handler."""
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class ResolveResult(TypedDict):
|
| 124 |
+
"""Resolve result.
|
| 125 |
+
|
| 126 |
+
This is the result returned from an AbstractResolver's
|
| 127 |
+
resolve method.
|
| 128 |
+
|
| 129 |
+
:param hostname: The hostname that was provided.
|
| 130 |
+
:param host: The IP address that was resolved.
|
| 131 |
+
:param port: The port that was resolved.
|
| 132 |
+
:param family: The address family that was resolved.
|
| 133 |
+
:param proto: The protocol that was resolved.
|
| 134 |
+
:param flags: The flags that were resolved.
|
| 135 |
+
"""
|
| 136 |
+
|
| 137 |
+
hostname: str
|
| 138 |
+
host: str
|
| 139 |
+
port: int
|
| 140 |
+
family: int
|
| 141 |
+
proto: int
|
| 142 |
+
flags: int
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
class AbstractResolver(ABC):
|
| 146 |
+
"""Abstract DNS resolver."""
|
| 147 |
+
|
| 148 |
+
@abstractmethod
|
| 149 |
+
async def resolve(
|
| 150 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 151 |
+
) -> List[ResolveResult]:
|
| 152 |
+
"""Return IP address for given hostname"""
|
| 153 |
+
|
| 154 |
+
@abstractmethod
|
| 155 |
+
async def close(self) -> None:
|
| 156 |
+
"""Release resolver"""
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
if TYPE_CHECKING:
|
| 160 |
+
IterableBase = Iterable[Morsel[str]]
|
| 161 |
+
else:
|
| 162 |
+
IterableBase = Iterable
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class AbstractCookieJar(Sized, IterableBase):
|
| 169 |
+
"""Abstract Cookie Jar."""
|
| 170 |
+
|
| 171 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 172 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 173 |
+
|
| 174 |
+
@abstractmethod
|
| 175 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 176 |
+
"""Clear all cookies if no predicate is passed."""
|
| 177 |
+
|
| 178 |
+
@abstractmethod
|
| 179 |
+
def clear_domain(self, domain: str) -> None:
|
| 180 |
+
"""Clear all cookies for domain and all subdomains."""
|
| 181 |
+
|
| 182 |
+
@abstractmethod
|
| 183 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 184 |
+
"""Update cookies."""
|
| 185 |
+
|
| 186 |
+
@abstractmethod
|
| 187 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 188 |
+
"""Return the jar's cookies filtered by their attributes."""
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class AbstractStreamWriter(ABC):
|
| 192 |
+
"""Abstract stream writer."""
|
| 193 |
+
|
| 194 |
+
buffer_size = 0
|
| 195 |
+
output_size = 0
|
| 196 |
+
length: Optional[int] = 0
|
| 197 |
+
|
| 198 |
+
@abstractmethod
|
| 199 |
+
async def write(self, chunk: bytes) -> None:
|
| 200 |
+
"""Write chunk into stream."""
|
| 201 |
+
|
| 202 |
+
@abstractmethod
|
| 203 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 204 |
+
"""Write last chunk."""
|
| 205 |
+
|
| 206 |
+
@abstractmethod
|
| 207 |
+
async def drain(self) -> None:
|
| 208 |
+
"""Flush the write buffer."""
|
| 209 |
+
|
| 210 |
+
@abstractmethod
|
| 211 |
+
def enable_compression(self, encoding: str = "deflate") -> None:
|
| 212 |
+
"""Enable HTTP body compression"""
|
| 213 |
+
|
| 214 |
+
@abstractmethod
|
| 215 |
+
def enable_chunking(self) -> None:
|
| 216 |
+
"""Enable HTTP chunked mode"""
|
| 217 |
+
|
| 218 |
+
@abstractmethod
|
| 219 |
+
async def write_headers(
|
| 220 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 221 |
+
) -> None:
|
| 222 |
+
"""Write HTTP headers"""
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
class AbstractAccessLogger(ABC):
|
| 226 |
+
"""Abstract writer to access log."""
|
| 227 |
+
|
| 228 |
+
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
| 229 |
+
self.logger = logger
|
| 230 |
+
self.log_format = log_format
|
| 231 |
+
|
| 232 |
+
@abstractmethod
|
| 233 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
| 234 |
+
"""Emit log to logger."""
|
parrot/lib/python3.10/site-packages/aiohttp/base_protocol.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from typing import Optional, cast
|
| 3 |
+
|
| 4 |
+
from .client_exceptions import ClientConnectionResetError
|
| 5 |
+
from .helpers import set_exception
|
| 6 |
+
from .tcp_helpers import tcp_nodelay
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class BaseProtocol(asyncio.Protocol):
|
| 10 |
+
__slots__ = (
|
| 11 |
+
"_loop",
|
| 12 |
+
"_paused",
|
| 13 |
+
"_drain_waiter",
|
| 14 |
+
"_connection_lost",
|
| 15 |
+
"_reading_paused",
|
| 16 |
+
"transport",
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 20 |
+
self._loop: asyncio.AbstractEventLoop = loop
|
| 21 |
+
self._paused = False
|
| 22 |
+
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
| 23 |
+
self._reading_paused = False
|
| 24 |
+
|
| 25 |
+
self.transport: Optional[asyncio.Transport] = None
|
| 26 |
+
|
| 27 |
+
@property
|
| 28 |
+
def connected(self) -> bool:
|
| 29 |
+
"""Return True if the connection is open."""
|
| 30 |
+
return self.transport is not None
|
| 31 |
+
|
| 32 |
+
def pause_writing(self) -> None:
|
| 33 |
+
assert not self._paused
|
| 34 |
+
self._paused = True
|
| 35 |
+
|
| 36 |
+
def resume_writing(self) -> None:
|
| 37 |
+
assert self._paused
|
| 38 |
+
self._paused = False
|
| 39 |
+
|
| 40 |
+
waiter = self._drain_waiter
|
| 41 |
+
if waiter is not None:
|
| 42 |
+
self._drain_waiter = None
|
| 43 |
+
if not waiter.done():
|
| 44 |
+
waiter.set_result(None)
|
| 45 |
+
|
| 46 |
+
def pause_reading(self) -> None:
|
| 47 |
+
if not self._reading_paused and self.transport is not None:
|
| 48 |
+
try:
|
| 49 |
+
self.transport.pause_reading()
|
| 50 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
| 51 |
+
pass
|
| 52 |
+
self._reading_paused = True
|
| 53 |
+
|
| 54 |
+
def resume_reading(self) -> None:
|
| 55 |
+
if self._reading_paused and self.transport is not None:
|
| 56 |
+
try:
|
| 57 |
+
self.transport.resume_reading()
|
| 58 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
| 59 |
+
pass
|
| 60 |
+
self._reading_paused = False
|
| 61 |
+
|
| 62 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 63 |
+
tr = cast(asyncio.Transport, transport)
|
| 64 |
+
tcp_nodelay(tr, True)
|
| 65 |
+
self.transport = tr
|
| 66 |
+
|
| 67 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 68 |
+
# Wake up the writer if currently paused.
|
| 69 |
+
self.transport = None
|
| 70 |
+
if not self._paused:
|
| 71 |
+
return
|
| 72 |
+
waiter = self._drain_waiter
|
| 73 |
+
if waiter is None:
|
| 74 |
+
return
|
| 75 |
+
self._drain_waiter = None
|
| 76 |
+
if waiter.done():
|
| 77 |
+
return
|
| 78 |
+
if exc is None:
|
| 79 |
+
waiter.set_result(None)
|
| 80 |
+
else:
|
| 81 |
+
set_exception(
|
| 82 |
+
waiter,
|
| 83 |
+
ConnectionError("Connection lost"),
|
| 84 |
+
exc,
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
async def _drain_helper(self) -> None:
|
| 88 |
+
if not self.connected:
|
| 89 |
+
raise ClientConnectionResetError("Connection lost")
|
| 90 |
+
if not self._paused:
|
| 91 |
+
return
|
| 92 |
+
waiter = self._drain_waiter
|
| 93 |
+
if waiter is None:
|
| 94 |
+
waiter = self._loop.create_future()
|
| 95 |
+
self._drain_waiter = waiter
|
| 96 |
+
await asyncio.shield(waiter)
|
parrot/lib/python3.10/site-packages/aiohttp/client.py
ADDED
|
@@ -0,0 +1,1506 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP Client for asyncio."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import hashlib
|
| 6 |
+
import json
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
import warnings
|
| 11 |
+
from contextlib import suppress
|
| 12 |
+
from types import TracebackType
|
| 13 |
+
from typing import (
|
| 14 |
+
TYPE_CHECKING,
|
| 15 |
+
Any,
|
| 16 |
+
Awaitable,
|
| 17 |
+
Callable,
|
| 18 |
+
Coroutine,
|
| 19 |
+
Final,
|
| 20 |
+
FrozenSet,
|
| 21 |
+
Generator,
|
| 22 |
+
Generic,
|
| 23 |
+
Iterable,
|
| 24 |
+
List,
|
| 25 |
+
Mapping,
|
| 26 |
+
Optional,
|
| 27 |
+
Set,
|
| 28 |
+
Tuple,
|
| 29 |
+
Type,
|
| 30 |
+
TypedDict,
|
| 31 |
+
TypeVar,
|
| 32 |
+
Union,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
import attr
|
| 36 |
+
from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
|
| 37 |
+
from yarl import URL
|
| 38 |
+
|
| 39 |
+
from . import hdrs, http, payload
|
| 40 |
+
from .abc import AbstractCookieJar
|
| 41 |
+
from .client_exceptions import (
|
| 42 |
+
ClientConnectionError,
|
| 43 |
+
ClientConnectionResetError,
|
| 44 |
+
ClientConnectorCertificateError,
|
| 45 |
+
ClientConnectorError,
|
| 46 |
+
ClientConnectorSSLError,
|
| 47 |
+
ClientError,
|
| 48 |
+
ClientHttpProxyError,
|
| 49 |
+
ClientOSError,
|
| 50 |
+
ClientPayloadError,
|
| 51 |
+
ClientProxyConnectionError,
|
| 52 |
+
ClientResponseError,
|
| 53 |
+
ClientSSLError,
|
| 54 |
+
ConnectionTimeoutError,
|
| 55 |
+
ContentTypeError,
|
| 56 |
+
InvalidURL,
|
| 57 |
+
InvalidUrlClientError,
|
| 58 |
+
InvalidUrlRedirectClientError,
|
| 59 |
+
NonHttpUrlClientError,
|
| 60 |
+
NonHttpUrlRedirectClientError,
|
| 61 |
+
RedirectClientError,
|
| 62 |
+
ServerConnectionError,
|
| 63 |
+
ServerDisconnectedError,
|
| 64 |
+
ServerFingerprintMismatch,
|
| 65 |
+
ServerTimeoutError,
|
| 66 |
+
SocketTimeoutError,
|
| 67 |
+
TooManyRedirects,
|
| 68 |
+
WSServerHandshakeError,
|
| 69 |
+
)
|
| 70 |
+
from .client_reqrep import (
|
| 71 |
+
ClientRequest as ClientRequest,
|
| 72 |
+
ClientResponse as ClientResponse,
|
| 73 |
+
Fingerprint as Fingerprint,
|
| 74 |
+
RequestInfo as RequestInfo,
|
| 75 |
+
_merge_ssl_params,
|
| 76 |
+
)
|
| 77 |
+
from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
|
| 78 |
+
from .connector import (
|
| 79 |
+
HTTP_AND_EMPTY_SCHEMA_SET,
|
| 80 |
+
BaseConnector as BaseConnector,
|
| 81 |
+
NamedPipeConnector as NamedPipeConnector,
|
| 82 |
+
TCPConnector as TCPConnector,
|
| 83 |
+
UnixConnector as UnixConnector,
|
| 84 |
+
)
|
| 85 |
+
from .cookiejar import CookieJar
|
| 86 |
+
from .helpers import (
|
| 87 |
+
_SENTINEL,
|
| 88 |
+
DEBUG,
|
| 89 |
+
BasicAuth,
|
| 90 |
+
TimeoutHandle,
|
| 91 |
+
ceil_timeout,
|
| 92 |
+
get_env_proxy_for_url,
|
| 93 |
+
method_must_be_empty_body,
|
| 94 |
+
sentinel,
|
| 95 |
+
strip_auth_from_url,
|
| 96 |
+
)
|
| 97 |
+
from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
|
| 98 |
+
from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
|
| 99 |
+
from .streams import FlowControlDataQueue
|
| 100 |
+
from .tracing import Trace, TraceConfig
|
| 101 |
+
from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL
|
| 102 |
+
|
| 103 |
+
__all__ = (
|
| 104 |
+
# client_exceptions
|
| 105 |
+
"ClientConnectionError",
|
| 106 |
+
"ClientConnectionResetError",
|
| 107 |
+
"ClientConnectorCertificateError",
|
| 108 |
+
"ClientConnectorError",
|
| 109 |
+
"ClientConnectorSSLError",
|
| 110 |
+
"ClientError",
|
| 111 |
+
"ClientHttpProxyError",
|
| 112 |
+
"ClientOSError",
|
| 113 |
+
"ClientPayloadError",
|
| 114 |
+
"ClientProxyConnectionError",
|
| 115 |
+
"ClientResponseError",
|
| 116 |
+
"ClientSSLError",
|
| 117 |
+
"ConnectionTimeoutError",
|
| 118 |
+
"ContentTypeError",
|
| 119 |
+
"InvalidURL",
|
| 120 |
+
"InvalidUrlClientError",
|
| 121 |
+
"RedirectClientError",
|
| 122 |
+
"NonHttpUrlClientError",
|
| 123 |
+
"InvalidUrlRedirectClientError",
|
| 124 |
+
"NonHttpUrlRedirectClientError",
|
| 125 |
+
"ServerConnectionError",
|
| 126 |
+
"ServerDisconnectedError",
|
| 127 |
+
"ServerFingerprintMismatch",
|
| 128 |
+
"ServerTimeoutError",
|
| 129 |
+
"SocketTimeoutError",
|
| 130 |
+
"TooManyRedirects",
|
| 131 |
+
"WSServerHandshakeError",
|
| 132 |
+
# client_reqrep
|
| 133 |
+
"ClientRequest",
|
| 134 |
+
"ClientResponse",
|
| 135 |
+
"Fingerprint",
|
| 136 |
+
"RequestInfo",
|
| 137 |
+
# connector
|
| 138 |
+
"BaseConnector",
|
| 139 |
+
"TCPConnector",
|
| 140 |
+
"UnixConnector",
|
| 141 |
+
"NamedPipeConnector",
|
| 142 |
+
# client_ws
|
| 143 |
+
"ClientWebSocketResponse",
|
| 144 |
+
# client
|
| 145 |
+
"ClientSession",
|
| 146 |
+
"ClientTimeout",
|
| 147 |
+
"request",
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
if TYPE_CHECKING:
|
| 152 |
+
from ssl import SSLContext
|
| 153 |
+
else:
|
| 154 |
+
SSLContext = None
|
| 155 |
+
|
| 156 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 157 |
+
from typing import Unpack
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
class _RequestOptions(TypedDict, total=False):
|
| 161 |
+
params: Query
|
| 162 |
+
data: Any
|
| 163 |
+
json: Any
|
| 164 |
+
cookies: Union[LooseCookies, None]
|
| 165 |
+
headers: Union[LooseHeaders, None]
|
| 166 |
+
skip_auto_headers: Union[Iterable[str], None]
|
| 167 |
+
auth: Union[BasicAuth, None]
|
| 168 |
+
allow_redirects: bool
|
| 169 |
+
max_redirects: int
|
| 170 |
+
compress: Union[str, bool, None]
|
| 171 |
+
chunked: Union[bool, None]
|
| 172 |
+
expect100: bool
|
| 173 |
+
raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]]
|
| 174 |
+
read_until_eof: bool
|
| 175 |
+
proxy: Union[StrOrURL, None]
|
| 176 |
+
proxy_auth: Union[BasicAuth, None]
|
| 177 |
+
timeout: "Union[ClientTimeout, _SENTINEL, None]"
|
| 178 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
| 179 |
+
server_hostname: Union[str, None]
|
| 180 |
+
proxy_headers: Union[LooseHeaders, None]
|
| 181 |
+
trace_request_ctx: Union[Mapping[str, str], None]
|
| 182 |
+
read_bufsize: Union[int, None]
|
| 183 |
+
auto_decompress: Union[bool, None]
|
| 184 |
+
max_line_size: Union[int, None]
|
| 185 |
+
max_field_size: Union[int, None]
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 189 |
+
class ClientTimeout:
|
| 190 |
+
total: Optional[float] = None
|
| 191 |
+
connect: Optional[float] = None
|
| 192 |
+
sock_read: Optional[float] = None
|
| 193 |
+
sock_connect: Optional[float] = None
|
| 194 |
+
ceil_threshold: float = 5
|
| 195 |
+
|
| 196 |
+
# pool_queue_timeout: Optional[float] = None
|
| 197 |
+
# dns_resolution_timeout: Optional[float] = None
|
| 198 |
+
# socket_connect_timeout: Optional[float] = None
|
| 199 |
+
# connection_acquiring_timeout: Optional[float] = None
|
| 200 |
+
# new_connection_timeout: Optional[float] = None
|
| 201 |
+
# http_header_timeout: Optional[float] = None
|
| 202 |
+
# response_body_timeout: Optional[float] = None
|
| 203 |
+
|
| 204 |
+
# to create a timeout specific for a single request, either
|
| 205 |
+
# - create a completely new one to overwrite the default
|
| 206 |
+
# - or use http://www.attrs.org/en/stable/api.html#attr.evolve
|
| 207 |
+
# to overwrite the defaults
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
# 5 Minute default read timeout
|
| 211 |
+
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
|
| 212 |
+
|
| 213 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2
|
| 214 |
+
IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"})
|
| 215 |
+
|
| 216 |
+
_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse)
|
| 217 |
+
_CharsetResolver = Callable[[ClientResponse, bytes], str]
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class ClientSession:
|
| 221 |
+
"""First-class interface for making HTTP requests."""
|
| 222 |
+
|
| 223 |
+
ATTRS = frozenset(
|
| 224 |
+
[
|
| 225 |
+
"_base_url",
|
| 226 |
+
"_source_traceback",
|
| 227 |
+
"_connector",
|
| 228 |
+
"requote_redirect_url",
|
| 229 |
+
"_loop",
|
| 230 |
+
"_cookie_jar",
|
| 231 |
+
"_connector_owner",
|
| 232 |
+
"_default_auth",
|
| 233 |
+
"_version",
|
| 234 |
+
"_json_serialize",
|
| 235 |
+
"_requote_redirect_url",
|
| 236 |
+
"_timeout",
|
| 237 |
+
"_raise_for_status",
|
| 238 |
+
"_auto_decompress",
|
| 239 |
+
"_trust_env",
|
| 240 |
+
"_default_headers",
|
| 241 |
+
"_skip_auto_headers",
|
| 242 |
+
"_request_class",
|
| 243 |
+
"_response_class",
|
| 244 |
+
"_ws_response_class",
|
| 245 |
+
"_trace_configs",
|
| 246 |
+
"_read_bufsize",
|
| 247 |
+
"_max_line_size",
|
| 248 |
+
"_max_field_size",
|
| 249 |
+
"_resolve_charset",
|
| 250 |
+
]
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
| 254 |
+
_connector: Optional[BaseConnector] = None
|
| 255 |
+
|
| 256 |
+
def __init__(
|
| 257 |
+
self,
|
| 258 |
+
base_url: Optional[StrOrURL] = None,
|
| 259 |
+
*,
|
| 260 |
+
connector: Optional[BaseConnector] = None,
|
| 261 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 262 |
+
cookies: Optional[LooseCookies] = None,
|
| 263 |
+
headers: Optional[LooseHeaders] = None,
|
| 264 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 265 |
+
auth: Optional[BasicAuth] = None,
|
| 266 |
+
json_serialize: JSONEncoder = json.dumps,
|
| 267 |
+
request_class: Type[ClientRequest] = ClientRequest,
|
| 268 |
+
response_class: Type[ClientResponse] = ClientResponse,
|
| 269 |
+
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
|
| 270 |
+
version: HttpVersion = http.HttpVersion11,
|
| 271 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 272 |
+
connector_owner: bool = True,
|
| 273 |
+
raise_for_status: Union[
|
| 274 |
+
bool, Callable[[ClientResponse], Awaitable[None]]
|
| 275 |
+
] = False,
|
| 276 |
+
read_timeout: Union[float, _SENTINEL] = sentinel,
|
| 277 |
+
conn_timeout: Optional[float] = None,
|
| 278 |
+
timeout: Union[object, ClientTimeout] = sentinel,
|
| 279 |
+
auto_decompress: bool = True,
|
| 280 |
+
trust_env: bool = False,
|
| 281 |
+
requote_redirect_url: bool = True,
|
| 282 |
+
trace_configs: Optional[List[TraceConfig]] = None,
|
| 283 |
+
read_bufsize: int = 2**16,
|
| 284 |
+
max_line_size: int = 8190,
|
| 285 |
+
max_field_size: int = 8190,
|
| 286 |
+
fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
|
| 287 |
+
) -> None:
|
| 288 |
+
# We initialise _connector to None immediately, as it's referenced in __del__()
|
| 289 |
+
# and could cause issues if an exception occurs during initialisation.
|
| 290 |
+
self._connector: Optional[BaseConnector] = None
|
| 291 |
+
|
| 292 |
+
if loop is None:
|
| 293 |
+
if connector is not None:
|
| 294 |
+
loop = connector._loop
|
| 295 |
+
|
| 296 |
+
loop = loop or asyncio.get_running_loop()
|
| 297 |
+
|
| 298 |
+
if base_url is None or isinstance(base_url, URL):
|
| 299 |
+
self._base_url: Optional[URL] = base_url
|
| 300 |
+
else:
|
| 301 |
+
self._base_url = URL(base_url)
|
| 302 |
+
assert (
|
| 303 |
+
self._base_url.origin() == self._base_url
|
| 304 |
+
), "Only absolute URLs without path part are supported"
|
| 305 |
+
|
| 306 |
+
if timeout is sentinel or timeout is None:
|
| 307 |
+
self._timeout = DEFAULT_TIMEOUT
|
| 308 |
+
if read_timeout is not sentinel:
|
| 309 |
+
warnings.warn(
|
| 310 |
+
"read_timeout is deprecated, " "use timeout argument instead",
|
| 311 |
+
DeprecationWarning,
|
| 312 |
+
stacklevel=2,
|
| 313 |
+
)
|
| 314 |
+
self._timeout = attr.evolve(self._timeout, total=read_timeout)
|
| 315 |
+
if conn_timeout is not None:
|
| 316 |
+
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
|
| 317 |
+
warnings.warn(
|
| 318 |
+
"conn_timeout is deprecated, " "use timeout argument instead",
|
| 319 |
+
DeprecationWarning,
|
| 320 |
+
stacklevel=2,
|
| 321 |
+
)
|
| 322 |
+
else:
|
| 323 |
+
if not isinstance(timeout, ClientTimeout):
|
| 324 |
+
raise ValueError(
|
| 325 |
+
f"timeout parameter cannot be of {type(timeout)} type, "
|
| 326 |
+
"please use 'timeout=ClientTimeout(...)'",
|
| 327 |
+
)
|
| 328 |
+
self._timeout = timeout
|
| 329 |
+
if read_timeout is not sentinel:
|
| 330 |
+
raise ValueError(
|
| 331 |
+
"read_timeout and timeout parameters "
|
| 332 |
+
"conflict, please setup "
|
| 333 |
+
"timeout.read"
|
| 334 |
+
)
|
| 335 |
+
if conn_timeout is not None:
|
| 336 |
+
raise ValueError(
|
| 337 |
+
"conn_timeout and timeout parameters "
|
| 338 |
+
"conflict, please setup "
|
| 339 |
+
"timeout.connect"
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
if connector is None:
|
| 343 |
+
connector = TCPConnector(loop=loop)
|
| 344 |
+
|
| 345 |
+
if connector._loop is not loop:
|
| 346 |
+
raise RuntimeError("Session and connector has to use same event loop")
|
| 347 |
+
|
| 348 |
+
self._loop = loop
|
| 349 |
+
|
| 350 |
+
if loop.get_debug():
|
| 351 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 352 |
+
|
| 353 |
+
if cookie_jar is None:
|
| 354 |
+
cookie_jar = CookieJar(loop=loop)
|
| 355 |
+
self._cookie_jar = cookie_jar
|
| 356 |
+
|
| 357 |
+
if cookies is not None:
|
| 358 |
+
self._cookie_jar.update_cookies(cookies)
|
| 359 |
+
|
| 360 |
+
self._connector = connector
|
| 361 |
+
self._connector_owner = connector_owner
|
| 362 |
+
self._default_auth = auth
|
| 363 |
+
self._version = version
|
| 364 |
+
self._json_serialize = json_serialize
|
| 365 |
+
self._raise_for_status = raise_for_status
|
| 366 |
+
self._auto_decompress = auto_decompress
|
| 367 |
+
self._trust_env = trust_env
|
| 368 |
+
self._requote_redirect_url = requote_redirect_url
|
| 369 |
+
self._read_bufsize = read_bufsize
|
| 370 |
+
self._max_line_size = max_line_size
|
| 371 |
+
self._max_field_size = max_field_size
|
| 372 |
+
|
| 373 |
+
# Convert to list of tuples
|
| 374 |
+
if headers:
|
| 375 |
+
real_headers: CIMultiDict[str] = CIMultiDict(headers)
|
| 376 |
+
else:
|
| 377 |
+
real_headers = CIMultiDict()
|
| 378 |
+
self._default_headers: CIMultiDict[str] = real_headers
|
| 379 |
+
if skip_auto_headers is not None:
|
| 380 |
+
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
|
| 381 |
+
else:
|
| 382 |
+
self._skip_auto_headers = frozenset()
|
| 383 |
+
|
| 384 |
+
self._request_class = request_class
|
| 385 |
+
self._response_class = response_class
|
| 386 |
+
self._ws_response_class = ws_response_class
|
| 387 |
+
|
| 388 |
+
self._trace_configs = trace_configs or []
|
| 389 |
+
for trace_config in self._trace_configs:
|
| 390 |
+
trace_config.freeze()
|
| 391 |
+
|
| 392 |
+
self._resolve_charset = fallback_charset_resolver
|
| 393 |
+
|
| 394 |
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
|
| 395 |
+
warnings.warn(
|
| 396 |
+
"Inheritance class {} from ClientSession "
|
| 397 |
+
"is discouraged".format(cls.__name__),
|
| 398 |
+
DeprecationWarning,
|
| 399 |
+
stacklevel=2,
|
| 400 |
+
)
|
| 401 |
+
|
| 402 |
+
if DEBUG:
|
| 403 |
+
|
| 404 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
| 405 |
+
if name not in self.ATTRS:
|
| 406 |
+
warnings.warn(
|
| 407 |
+
"Setting custom ClientSession.{} attribute "
|
| 408 |
+
"is discouraged".format(name),
|
| 409 |
+
DeprecationWarning,
|
| 410 |
+
stacklevel=2,
|
| 411 |
+
)
|
| 412 |
+
super().__setattr__(name, val)
|
| 413 |
+
|
| 414 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 415 |
+
if not self.closed:
|
| 416 |
+
kwargs = {"source": self}
|
| 417 |
+
_warnings.warn(
|
| 418 |
+
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
|
| 419 |
+
)
|
| 420 |
+
context = {"client_session": self, "message": "Unclosed client session"}
|
| 421 |
+
if self._source_traceback is not None:
|
| 422 |
+
context["source_traceback"] = self._source_traceback
|
| 423 |
+
self._loop.call_exception_handler(context)
|
| 424 |
+
|
| 425 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 426 |
+
|
| 427 |
+
def request(
|
| 428 |
+
self,
|
| 429 |
+
method: str,
|
| 430 |
+
url: StrOrURL,
|
| 431 |
+
**kwargs: Unpack[_RequestOptions],
|
| 432 |
+
) -> "_RequestContextManager": ...
|
| 433 |
+
|
| 434 |
+
else:
|
| 435 |
+
|
| 436 |
+
def request(
|
| 437 |
+
self, method: str, url: StrOrURL, **kwargs: Any
|
| 438 |
+
) -> "_RequestContextManager":
|
| 439 |
+
"""Perform HTTP request."""
|
| 440 |
+
return _RequestContextManager(self._request(method, url, **kwargs))
|
| 441 |
+
|
| 442 |
+
def _build_url(self, str_or_url: StrOrURL) -> URL:
|
| 443 |
+
url = URL(str_or_url)
|
| 444 |
+
if self._base_url is None:
|
| 445 |
+
return url
|
| 446 |
+
else:
|
| 447 |
+
assert not url.is_absolute() and url.path.startswith("/")
|
| 448 |
+
return self._base_url.join(url)
|
| 449 |
+
|
| 450 |
+
async def _request(
|
| 451 |
+
self,
|
| 452 |
+
method: str,
|
| 453 |
+
str_or_url: StrOrURL,
|
| 454 |
+
*,
|
| 455 |
+
params: Query = None,
|
| 456 |
+
data: Any = None,
|
| 457 |
+
json: Any = None,
|
| 458 |
+
cookies: Optional[LooseCookies] = None,
|
| 459 |
+
headers: Optional[LooseHeaders] = None,
|
| 460 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 461 |
+
auth: Optional[BasicAuth] = None,
|
| 462 |
+
allow_redirects: bool = True,
|
| 463 |
+
max_redirects: int = 10,
|
| 464 |
+
compress: Union[str, bool, None] = None,
|
| 465 |
+
chunked: Optional[bool] = None,
|
| 466 |
+
expect100: bool = False,
|
| 467 |
+
raise_for_status: Union[
|
| 468 |
+
None, bool, Callable[[ClientResponse], Awaitable[None]]
|
| 469 |
+
] = None,
|
| 470 |
+
read_until_eof: bool = True,
|
| 471 |
+
proxy: Optional[StrOrURL] = None,
|
| 472 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 473 |
+
timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
|
| 474 |
+
verify_ssl: Optional[bool] = None,
|
| 475 |
+
fingerprint: Optional[bytes] = None,
|
| 476 |
+
ssl_context: Optional[SSLContext] = None,
|
| 477 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 478 |
+
server_hostname: Optional[str] = None,
|
| 479 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 480 |
+
trace_request_ctx: Optional[Mapping[str, str]] = None,
|
| 481 |
+
read_bufsize: Optional[int] = None,
|
| 482 |
+
auto_decompress: Optional[bool] = None,
|
| 483 |
+
max_line_size: Optional[int] = None,
|
| 484 |
+
max_field_size: Optional[int] = None,
|
| 485 |
+
) -> ClientResponse:
|
| 486 |
+
|
| 487 |
+
# NOTE: timeout clamps existing connect and read timeouts. We cannot
|
| 488 |
+
# set the default to None because we need to detect if the user wants
|
| 489 |
+
# to use the existing timeouts by setting timeout to None.
|
| 490 |
+
|
| 491 |
+
if self.closed:
|
| 492 |
+
raise RuntimeError("Session is closed")
|
| 493 |
+
|
| 494 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 495 |
+
|
| 496 |
+
if data is not None and json is not None:
|
| 497 |
+
raise ValueError(
|
| 498 |
+
"data and json parameters can not be used at the same time"
|
| 499 |
+
)
|
| 500 |
+
elif json is not None:
|
| 501 |
+
data = payload.JsonPayload(json, dumps=self._json_serialize)
|
| 502 |
+
|
| 503 |
+
if not isinstance(chunked, bool) and chunked is not None:
|
| 504 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
| 505 |
+
|
| 506 |
+
redirects = 0
|
| 507 |
+
history = []
|
| 508 |
+
version = self._version
|
| 509 |
+
params = params or {}
|
| 510 |
+
|
| 511 |
+
# Merge with default headers and transform to CIMultiDict
|
| 512 |
+
headers = self._prepare_headers(headers)
|
| 513 |
+
proxy_headers = self._prepare_headers(proxy_headers)
|
| 514 |
+
|
| 515 |
+
try:
|
| 516 |
+
url = self._build_url(str_or_url)
|
| 517 |
+
except ValueError as e:
|
| 518 |
+
raise InvalidUrlClientError(str_or_url) from e
|
| 519 |
+
|
| 520 |
+
assert self._connector is not None
|
| 521 |
+
if url.scheme not in self._connector.allowed_protocol_schema_set:
|
| 522 |
+
raise NonHttpUrlClientError(url)
|
| 523 |
+
|
| 524 |
+
skip_headers = set(self._skip_auto_headers)
|
| 525 |
+
if skip_auto_headers is not None:
|
| 526 |
+
for i in skip_auto_headers:
|
| 527 |
+
skip_headers.add(istr(i))
|
| 528 |
+
|
| 529 |
+
if proxy is not None:
|
| 530 |
+
try:
|
| 531 |
+
proxy = URL(proxy)
|
| 532 |
+
except ValueError as e:
|
| 533 |
+
raise InvalidURL(proxy) from e
|
| 534 |
+
|
| 535 |
+
if timeout is sentinel:
|
| 536 |
+
real_timeout: ClientTimeout = self._timeout
|
| 537 |
+
else:
|
| 538 |
+
if not isinstance(timeout, ClientTimeout):
|
| 539 |
+
real_timeout = ClientTimeout(total=timeout)
|
| 540 |
+
else:
|
| 541 |
+
real_timeout = timeout
|
| 542 |
+
# timeout is cumulative for all request operations
|
| 543 |
+
# (request, redirects, responses, data consuming)
|
| 544 |
+
tm = TimeoutHandle(
|
| 545 |
+
self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
|
| 546 |
+
)
|
| 547 |
+
handle = tm.start()
|
| 548 |
+
|
| 549 |
+
if read_bufsize is None:
|
| 550 |
+
read_bufsize = self._read_bufsize
|
| 551 |
+
|
| 552 |
+
if auto_decompress is None:
|
| 553 |
+
auto_decompress = self._auto_decompress
|
| 554 |
+
|
| 555 |
+
if max_line_size is None:
|
| 556 |
+
max_line_size = self._max_line_size
|
| 557 |
+
|
| 558 |
+
if max_field_size is None:
|
| 559 |
+
max_field_size = self._max_field_size
|
| 560 |
+
|
| 561 |
+
traces = [
|
| 562 |
+
Trace(
|
| 563 |
+
self,
|
| 564 |
+
trace_config,
|
| 565 |
+
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
|
| 566 |
+
)
|
| 567 |
+
for trace_config in self._trace_configs
|
| 568 |
+
]
|
| 569 |
+
|
| 570 |
+
for trace in traces:
|
| 571 |
+
await trace.send_request_start(method, url.update_query(params), headers)
|
| 572 |
+
|
| 573 |
+
timer = tm.timer()
|
| 574 |
+
try:
|
| 575 |
+
with timer:
|
| 576 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests
|
| 577 |
+
retry_persistent_connection = method in IDEMPOTENT_METHODS
|
| 578 |
+
while True:
|
| 579 |
+
url, auth_from_url = strip_auth_from_url(url)
|
| 580 |
+
if not url.raw_host:
|
| 581 |
+
# NOTE: Bail early, otherwise, causes `InvalidURL` through
|
| 582 |
+
# NOTE: `self._request_class()` below.
|
| 583 |
+
err_exc_cls = (
|
| 584 |
+
InvalidUrlRedirectClientError
|
| 585 |
+
if redirects
|
| 586 |
+
else InvalidUrlClientError
|
| 587 |
+
)
|
| 588 |
+
raise err_exc_cls(url)
|
| 589 |
+
if auth and auth_from_url:
|
| 590 |
+
raise ValueError(
|
| 591 |
+
"Cannot combine AUTH argument with "
|
| 592 |
+
"credentials encoded in URL"
|
| 593 |
+
)
|
| 594 |
+
|
| 595 |
+
if auth is None:
|
| 596 |
+
auth = auth_from_url
|
| 597 |
+
if auth is None:
|
| 598 |
+
auth = self._default_auth
|
| 599 |
+
# It would be confusing if we support explicit
|
| 600 |
+
# Authorization header with auth argument
|
| 601 |
+
if (
|
| 602 |
+
headers is not None
|
| 603 |
+
and auth is not None
|
| 604 |
+
and hdrs.AUTHORIZATION in headers
|
| 605 |
+
):
|
| 606 |
+
raise ValueError(
|
| 607 |
+
"Cannot combine AUTHORIZATION header "
|
| 608 |
+
"with AUTH argument or credentials "
|
| 609 |
+
"encoded in URL"
|
| 610 |
+
)
|
| 611 |
+
|
| 612 |
+
all_cookies = self._cookie_jar.filter_cookies(url)
|
| 613 |
+
|
| 614 |
+
if cookies is not None:
|
| 615 |
+
tmp_cookie_jar = CookieJar()
|
| 616 |
+
tmp_cookie_jar.update_cookies(cookies)
|
| 617 |
+
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
| 618 |
+
if req_cookies:
|
| 619 |
+
all_cookies.load(req_cookies)
|
| 620 |
+
|
| 621 |
+
if proxy is not None:
|
| 622 |
+
proxy = URL(proxy)
|
| 623 |
+
elif self._trust_env:
|
| 624 |
+
with suppress(LookupError):
|
| 625 |
+
proxy, proxy_auth = get_env_proxy_for_url(url)
|
| 626 |
+
|
| 627 |
+
req = self._request_class(
|
| 628 |
+
method,
|
| 629 |
+
url,
|
| 630 |
+
params=params,
|
| 631 |
+
headers=headers,
|
| 632 |
+
skip_auto_headers=skip_headers if skip_headers else None,
|
| 633 |
+
data=data,
|
| 634 |
+
cookies=all_cookies,
|
| 635 |
+
auth=auth,
|
| 636 |
+
version=version,
|
| 637 |
+
compress=compress,
|
| 638 |
+
chunked=chunked,
|
| 639 |
+
expect100=expect100,
|
| 640 |
+
loop=self._loop,
|
| 641 |
+
response_class=self._response_class,
|
| 642 |
+
proxy=proxy,
|
| 643 |
+
proxy_auth=proxy_auth,
|
| 644 |
+
timer=timer,
|
| 645 |
+
session=self,
|
| 646 |
+
ssl=ssl if ssl is not None else True,
|
| 647 |
+
server_hostname=server_hostname,
|
| 648 |
+
proxy_headers=proxy_headers,
|
| 649 |
+
traces=traces,
|
| 650 |
+
trust_env=self.trust_env,
|
| 651 |
+
)
|
| 652 |
+
|
| 653 |
+
# connection timeout
|
| 654 |
+
try:
|
| 655 |
+
async with ceil_timeout(
|
| 656 |
+
real_timeout.connect,
|
| 657 |
+
ceil_threshold=real_timeout.ceil_threshold,
|
| 658 |
+
):
|
| 659 |
+
conn = await self._connector.connect(
|
| 660 |
+
req, traces=traces, timeout=real_timeout
|
| 661 |
+
)
|
| 662 |
+
except asyncio.TimeoutError as exc:
|
| 663 |
+
raise ConnectionTimeoutError(
|
| 664 |
+
f"Connection timeout to host {url}"
|
| 665 |
+
) from exc
|
| 666 |
+
|
| 667 |
+
assert conn.transport is not None
|
| 668 |
+
|
| 669 |
+
assert conn.protocol is not None
|
| 670 |
+
conn.protocol.set_response_params(
|
| 671 |
+
timer=timer,
|
| 672 |
+
skip_payload=method_must_be_empty_body(method),
|
| 673 |
+
read_until_eof=read_until_eof,
|
| 674 |
+
auto_decompress=auto_decompress,
|
| 675 |
+
read_timeout=real_timeout.sock_read,
|
| 676 |
+
read_bufsize=read_bufsize,
|
| 677 |
+
timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
|
| 678 |
+
max_line_size=max_line_size,
|
| 679 |
+
max_field_size=max_field_size,
|
| 680 |
+
)
|
| 681 |
+
|
| 682 |
+
try:
|
| 683 |
+
try:
|
| 684 |
+
resp = await req.send(conn)
|
| 685 |
+
try:
|
| 686 |
+
await resp.start(conn)
|
| 687 |
+
except BaseException:
|
| 688 |
+
resp.close()
|
| 689 |
+
raise
|
| 690 |
+
except BaseException:
|
| 691 |
+
conn.close()
|
| 692 |
+
raise
|
| 693 |
+
except (ClientOSError, ServerDisconnectedError):
|
| 694 |
+
if retry_persistent_connection:
|
| 695 |
+
retry_persistent_connection = False
|
| 696 |
+
continue
|
| 697 |
+
raise
|
| 698 |
+
except ClientError:
|
| 699 |
+
raise
|
| 700 |
+
except OSError as exc:
|
| 701 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
| 702 |
+
raise
|
| 703 |
+
raise ClientOSError(*exc.args) from exc
|
| 704 |
+
|
| 705 |
+
self._cookie_jar.update_cookies(resp.cookies, resp.url)
|
| 706 |
+
|
| 707 |
+
# redirects
|
| 708 |
+
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
|
| 709 |
+
|
| 710 |
+
for trace in traces:
|
| 711 |
+
await trace.send_request_redirect(
|
| 712 |
+
method, url.update_query(params), headers, resp
|
| 713 |
+
)
|
| 714 |
+
|
| 715 |
+
redirects += 1
|
| 716 |
+
history.append(resp)
|
| 717 |
+
if max_redirects and redirects >= max_redirects:
|
| 718 |
+
resp.close()
|
| 719 |
+
raise TooManyRedirects(
|
| 720 |
+
history[0].request_info, tuple(history)
|
| 721 |
+
)
|
| 722 |
+
|
| 723 |
+
# For 301 and 302, mimic IE, now changed in RFC
|
| 724 |
+
# https://github.com/kennethreitz/requests/pull/269
|
| 725 |
+
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
|
| 726 |
+
resp.status in (301, 302) and resp.method == hdrs.METH_POST
|
| 727 |
+
):
|
| 728 |
+
method = hdrs.METH_GET
|
| 729 |
+
data = None
|
| 730 |
+
if headers.get(hdrs.CONTENT_LENGTH):
|
| 731 |
+
headers.pop(hdrs.CONTENT_LENGTH)
|
| 732 |
+
|
| 733 |
+
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
|
| 734 |
+
hdrs.URI
|
| 735 |
+
)
|
| 736 |
+
if r_url is None:
|
| 737 |
+
# see github.com/aio-libs/aiohttp/issues/2022
|
| 738 |
+
break
|
| 739 |
+
else:
|
| 740 |
+
# reading from correct redirection
|
| 741 |
+
# response is forbidden
|
| 742 |
+
resp.release()
|
| 743 |
+
|
| 744 |
+
try:
|
| 745 |
+
parsed_redirect_url = URL(
|
| 746 |
+
r_url, encoded=not self._requote_redirect_url
|
| 747 |
+
)
|
| 748 |
+
except ValueError as e:
|
| 749 |
+
raise InvalidUrlRedirectClientError(
|
| 750 |
+
r_url,
|
| 751 |
+
"Server attempted redirecting to a location that does not look like a URL",
|
| 752 |
+
) from e
|
| 753 |
+
|
| 754 |
+
scheme = parsed_redirect_url.scheme
|
| 755 |
+
if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
|
| 756 |
+
resp.close()
|
| 757 |
+
raise NonHttpUrlRedirectClientError(r_url)
|
| 758 |
+
elif not scheme:
|
| 759 |
+
parsed_redirect_url = url.join(parsed_redirect_url)
|
| 760 |
+
|
| 761 |
+
try:
|
| 762 |
+
redirect_origin = parsed_redirect_url.origin()
|
| 763 |
+
except ValueError as origin_val_err:
|
| 764 |
+
raise InvalidUrlRedirectClientError(
|
| 765 |
+
parsed_redirect_url,
|
| 766 |
+
"Invalid redirect URL origin",
|
| 767 |
+
) from origin_val_err
|
| 768 |
+
|
| 769 |
+
if url.origin() != redirect_origin:
|
| 770 |
+
auth = None
|
| 771 |
+
headers.pop(hdrs.AUTHORIZATION, None)
|
| 772 |
+
|
| 773 |
+
url = parsed_redirect_url
|
| 774 |
+
params = {}
|
| 775 |
+
resp.release()
|
| 776 |
+
continue
|
| 777 |
+
|
| 778 |
+
break
|
| 779 |
+
|
| 780 |
+
# check response status
|
| 781 |
+
if raise_for_status is None:
|
| 782 |
+
raise_for_status = self._raise_for_status
|
| 783 |
+
|
| 784 |
+
if raise_for_status is None:
|
| 785 |
+
pass
|
| 786 |
+
elif callable(raise_for_status):
|
| 787 |
+
await raise_for_status(resp)
|
| 788 |
+
elif raise_for_status:
|
| 789 |
+
resp.raise_for_status()
|
| 790 |
+
|
| 791 |
+
# register connection
|
| 792 |
+
if handle is not None:
|
| 793 |
+
if resp.connection is not None:
|
| 794 |
+
resp.connection.add_callback(handle.cancel)
|
| 795 |
+
else:
|
| 796 |
+
handle.cancel()
|
| 797 |
+
|
| 798 |
+
resp._history = tuple(history)
|
| 799 |
+
|
| 800 |
+
for trace in traces:
|
| 801 |
+
await trace.send_request_end(
|
| 802 |
+
method, url.update_query(params), headers, resp
|
| 803 |
+
)
|
| 804 |
+
return resp
|
| 805 |
+
|
| 806 |
+
except BaseException as e:
|
| 807 |
+
# cleanup timer
|
| 808 |
+
tm.close()
|
| 809 |
+
if handle:
|
| 810 |
+
handle.cancel()
|
| 811 |
+
handle = None
|
| 812 |
+
|
| 813 |
+
for trace in traces:
|
| 814 |
+
await trace.send_request_exception(
|
| 815 |
+
method, url.update_query(params), headers, e
|
| 816 |
+
)
|
| 817 |
+
raise
|
| 818 |
+
|
| 819 |
+
def ws_connect(
|
| 820 |
+
self,
|
| 821 |
+
url: StrOrURL,
|
| 822 |
+
*,
|
| 823 |
+
method: str = hdrs.METH_GET,
|
| 824 |
+
protocols: Iterable[str] = (),
|
| 825 |
+
timeout: float = 10.0,
|
| 826 |
+
receive_timeout: Optional[float] = None,
|
| 827 |
+
autoclose: bool = True,
|
| 828 |
+
autoping: bool = True,
|
| 829 |
+
heartbeat: Optional[float] = None,
|
| 830 |
+
auth: Optional[BasicAuth] = None,
|
| 831 |
+
origin: Optional[str] = None,
|
| 832 |
+
params: Query = None,
|
| 833 |
+
headers: Optional[LooseHeaders] = None,
|
| 834 |
+
proxy: Optional[StrOrURL] = None,
|
| 835 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 836 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 837 |
+
verify_ssl: Optional[bool] = None,
|
| 838 |
+
fingerprint: Optional[bytes] = None,
|
| 839 |
+
ssl_context: Optional[SSLContext] = None,
|
| 840 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 841 |
+
compress: int = 0,
|
| 842 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 843 |
+
) -> "_WSRequestContextManager":
|
| 844 |
+
"""Initiate websocket connection."""
|
| 845 |
+
return _WSRequestContextManager(
|
| 846 |
+
self._ws_connect(
|
| 847 |
+
url,
|
| 848 |
+
method=method,
|
| 849 |
+
protocols=protocols,
|
| 850 |
+
timeout=timeout,
|
| 851 |
+
receive_timeout=receive_timeout,
|
| 852 |
+
autoclose=autoclose,
|
| 853 |
+
autoping=autoping,
|
| 854 |
+
heartbeat=heartbeat,
|
| 855 |
+
auth=auth,
|
| 856 |
+
origin=origin,
|
| 857 |
+
params=params,
|
| 858 |
+
headers=headers,
|
| 859 |
+
proxy=proxy,
|
| 860 |
+
proxy_auth=proxy_auth,
|
| 861 |
+
ssl=ssl,
|
| 862 |
+
verify_ssl=verify_ssl,
|
| 863 |
+
fingerprint=fingerprint,
|
| 864 |
+
ssl_context=ssl_context,
|
| 865 |
+
proxy_headers=proxy_headers,
|
| 866 |
+
compress=compress,
|
| 867 |
+
max_msg_size=max_msg_size,
|
| 868 |
+
)
|
| 869 |
+
)
|
| 870 |
+
|
| 871 |
+
async def _ws_connect(
|
| 872 |
+
self,
|
| 873 |
+
url: StrOrURL,
|
| 874 |
+
*,
|
| 875 |
+
method: str = hdrs.METH_GET,
|
| 876 |
+
protocols: Iterable[str] = (),
|
| 877 |
+
timeout: float = 10.0,
|
| 878 |
+
receive_timeout: Optional[float] = None,
|
| 879 |
+
autoclose: bool = True,
|
| 880 |
+
autoping: bool = True,
|
| 881 |
+
heartbeat: Optional[float] = None,
|
| 882 |
+
auth: Optional[BasicAuth] = None,
|
| 883 |
+
origin: Optional[str] = None,
|
| 884 |
+
params: Query = None,
|
| 885 |
+
headers: Optional[LooseHeaders] = None,
|
| 886 |
+
proxy: Optional[StrOrURL] = None,
|
| 887 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 888 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
| 889 |
+
verify_ssl: Optional[bool] = None,
|
| 890 |
+
fingerprint: Optional[bytes] = None,
|
| 891 |
+
ssl_context: Optional[SSLContext] = None,
|
| 892 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 893 |
+
compress: int = 0,
|
| 894 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 895 |
+
) -> ClientWebSocketResponse:
|
| 896 |
+
|
| 897 |
+
if headers is None:
|
| 898 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
| 899 |
+
else:
|
| 900 |
+
real_headers = CIMultiDict(headers)
|
| 901 |
+
|
| 902 |
+
default_headers = {
|
| 903 |
+
hdrs.UPGRADE: "websocket",
|
| 904 |
+
hdrs.CONNECTION: "Upgrade",
|
| 905 |
+
hdrs.SEC_WEBSOCKET_VERSION: "13",
|
| 906 |
+
}
|
| 907 |
+
|
| 908 |
+
for key, value in default_headers.items():
|
| 909 |
+
real_headers.setdefault(key, value)
|
| 910 |
+
|
| 911 |
+
sec_key = base64.b64encode(os.urandom(16))
|
| 912 |
+
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
|
| 913 |
+
|
| 914 |
+
if protocols:
|
| 915 |
+
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
|
| 916 |
+
if origin is not None:
|
| 917 |
+
real_headers[hdrs.ORIGIN] = origin
|
| 918 |
+
if compress:
|
| 919 |
+
extstr = ws_ext_gen(compress=compress)
|
| 920 |
+
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
|
| 921 |
+
|
| 922 |
+
# For the sake of backward compatibility, if user passes in None, convert it to True
|
| 923 |
+
if ssl is None:
|
| 924 |
+
warnings.warn(
|
| 925 |
+
"ssl=None is deprecated, please use ssl=True",
|
| 926 |
+
DeprecationWarning,
|
| 927 |
+
stacklevel=2,
|
| 928 |
+
)
|
| 929 |
+
ssl = True
|
| 930 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 931 |
+
|
| 932 |
+
# send request
|
| 933 |
+
resp = await self.request(
|
| 934 |
+
method,
|
| 935 |
+
url,
|
| 936 |
+
params=params,
|
| 937 |
+
headers=real_headers,
|
| 938 |
+
read_until_eof=False,
|
| 939 |
+
auth=auth,
|
| 940 |
+
proxy=proxy,
|
| 941 |
+
proxy_auth=proxy_auth,
|
| 942 |
+
ssl=ssl,
|
| 943 |
+
proxy_headers=proxy_headers,
|
| 944 |
+
)
|
| 945 |
+
|
| 946 |
+
try:
|
| 947 |
+
# check handshake
|
| 948 |
+
if resp.status != 101:
|
| 949 |
+
raise WSServerHandshakeError(
|
| 950 |
+
resp.request_info,
|
| 951 |
+
resp.history,
|
| 952 |
+
message="Invalid response status",
|
| 953 |
+
status=resp.status,
|
| 954 |
+
headers=resp.headers,
|
| 955 |
+
)
|
| 956 |
+
|
| 957 |
+
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
|
| 958 |
+
raise WSServerHandshakeError(
|
| 959 |
+
resp.request_info,
|
| 960 |
+
resp.history,
|
| 961 |
+
message="Invalid upgrade header",
|
| 962 |
+
status=resp.status,
|
| 963 |
+
headers=resp.headers,
|
| 964 |
+
)
|
| 965 |
+
|
| 966 |
+
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
|
| 967 |
+
raise WSServerHandshakeError(
|
| 968 |
+
resp.request_info,
|
| 969 |
+
resp.history,
|
| 970 |
+
message="Invalid connection header",
|
| 971 |
+
status=resp.status,
|
| 972 |
+
headers=resp.headers,
|
| 973 |
+
)
|
| 974 |
+
|
| 975 |
+
# key calculation
|
| 976 |
+
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
|
| 977 |
+
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
|
| 978 |
+
if r_key != match:
|
| 979 |
+
raise WSServerHandshakeError(
|
| 980 |
+
resp.request_info,
|
| 981 |
+
resp.history,
|
| 982 |
+
message="Invalid challenge response",
|
| 983 |
+
status=resp.status,
|
| 984 |
+
headers=resp.headers,
|
| 985 |
+
)
|
| 986 |
+
|
| 987 |
+
# websocket protocol
|
| 988 |
+
protocol = None
|
| 989 |
+
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
|
| 990 |
+
resp_protocols = [
|
| 991 |
+
proto.strip()
|
| 992 |
+
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
| 993 |
+
]
|
| 994 |
+
|
| 995 |
+
for proto in resp_protocols:
|
| 996 |
+
if proto in protocols:
|
| 997 |
+
protocol = proto
|
| 998 |
+
break
|
| 999 |
+
|
| 1000 |
+
# websocket compress
|
| 1001 |
+
notakeover = False
|
| 1002 |
+
if compress:
|
| 1003 |
+
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
|
| 1004 |
+
if compress_hdrs:
|
| 1005 |
+
try:
|
| 1006 |
+
compress, notakeover = ws_ext_parse(compress_hdrs)
|
| 1007 |
+
except WSHandshakeError as exc:
|
| 1008 |
+
raise WSServerHandshakeError(
|
| 1009 |
+
resp.request_info,
|
| 1010 |
+
resp.history,
|
| 1011 |
+
message=exc.args[0],
|
| 1012 |
+
status=resp.status,
|
| 1013 |
+
headers=resp.headers,
|
| 1014 |
+
) from exc
|
| 1015 |
+
else:
|
| 1016 |
+
compress = 0
|
| 1017 |
+
notakeover = False
|
| 1018 |
+
|
| 1019 |
+
conn = resp.connection
|
| 1020 |
+
assert conn is not None
|
| 1021 |
+
conn_proto = conn.protocol
|
| 1022 |
+
assert conn_proto is not None
|
| 1023 |
+
|
| 1024 |
+
# For WS connection the read_timeout must be either receive_timeout or greater
|
| 1025 |
+
# None == no timeout, i.e. infinite timeout, so None is the max timeout possible
|
| 1026 |
+
if receive_timeout is None:
|
| 1027 |
+
# Reset regardless
|
| 1028 |
+
conn_proto.read_timeout = receive_timeout
|
| 1029 |
+
elif conn_proto.read_timeout is not None:
|
| 1030 |
+
# If read_timeout was set check which wins
|
| 1031 |
+
conn_proto.read_timeout = max(receive_timeout, conn_proto.read_timeout)
|
| 1032 |
+
|
| 1033 |
+
transport = conn.transport
|
| 1034 |
+
assert transport is not None
|
| 1035 |
+
reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
|
| 1036 |
+
conn_proto, 2**16, loop=self._loop
|
| 1037 |
+
)
|
| 1038 |
+
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
|
| 1039 |
+
writer = WebSocketWriter(
|
| 1040 |
+
conn_proto,
|
| 1041 |
+
transport,
|
| 1042 |
+
use_mask=True,
|
| 1043 |
+
compress=compress,
|
| 1044 |
+
notakeover=notakeover,
|
| 1045 |
+
)
|
| 1046 |
+
except BaseException:
|
| 1047 |
+
resp.close()
|
| 1048 |
+
raise
|
| 1049 |
+
else:
|
| 1050 |
+
return self._ws_response_class(
|
| 1051 |
+
reader,
|
| 1052 |
+
writer,
|
| 1053 |
+
protocol,
|
| 1054 |
+
resp,
|
| 1055 |
+
timeout,
|
| 1056 |
+
autoclose,
|
| 1057 |
+
autoping,
|
| 1058 |
+
self._loop,
|
| 1059 |
+
receive_timeout=receive_timeout,
|
| 1060 |
+
heartbeat=heartbeat,
|
| 1061 |
+
compress=compress,
|
| 1062 |
+
client_notakeover=notakeover,
|
| 1063 |
+
)
|
| 1064 |
+
|
| 1065 |
+
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
|
| 1066 |
+
"""Add default headers and transform it to CIMultiDict"""
|
| 1067 |
+
# Convert headers to MultiDict
|
| 1068 |
+
result = CIMultiDict(self._default_headers)
|
| 1069 |
+
if headers:
|
| 1070 |
+
if not isinstance(headers, (MultiDictProxy, MultiDict)):
|
| 1071 |
+
headers = CIMultiDict(headers)
|
| 1072 |
+
added_names: Set[str] = set()
|
| 1073 |
+
for key, value in headers.items():
|
| 1074 |
+
if key in added_names:
|
| 1075 |
+
result.add(key, value)
|
| 1076 |
+
else:
|
| 1077 |
+
result[key] = value
|
| 1078 |
+
added_names.add(key)
|
| 1079 |
+
return result
|
| 1080 |
+
|
| 1081 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 1082 |
+
|
| 1083 |
+
def get(
|
| 1084 |
+
self,
|
| 1085 |
+
url: StrOrURL,
|
| 1086 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1087 |
+
) -> "_RequestContextManager": ...
|
| 1088 |
+
|
| 1089 |
+
def options(
|
| 1090 |
+
self,
|
| 1091 |
+
url: StrOrURL,
|
| 1092 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1093 |
+
) -> "_RequestContextManager": ...
|
| 1094 |
+
|
| 1095 |
+
def head(
|
| 1096 |
+
self,
|
| 1097 |
+
url: StrOrURL,
|
| 1098 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1099 |
+
) -> "_RequestContextManager": ...
|
| 1100 |
+
|
| 1101 |
+
def post(
|
| 1102 |
+
self,
|
| 1103 |
+
url: StrOrURL,
|
| 1104 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1105 |
+
) -> "_RequestContextManager": ...
|
| 1106 |
+
|
| 1107 |
+
def put(
|
| 1108 |
+
self,
|
| 1109 |
+
url: StrOrURL,
|
| 1110 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1111 |
+
) -> "_RequestContextManager": ...
|
| 1112 |
+
|
| 1113 |
+
def patch(
|
| 1114 |
+
self,
|
| 1115 |
+
url: StrOrURL,
|
| 1116 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1117 |
+
) -> "_RequestContextManager": ...
|
| 1118 |
+
|
| 1119 |
+
def delete(
|
| 1120 |
+
self,
|
| 1121 |
+
url: StrOrURL,
|
| 1122 |
+
**kwargs: Unpack[_RequestOptions],
|
| 1123 |
+
) -> "_RequestContextManager": ...
|
| 1124 |
+
|
| 1125 |
+
else:
|
| 1126 |
+
|
| 1127 |
+
def get(
|
| 1128 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
| 1129 |
+
) -> "_RequestContextManager":
|
| 1130 |
+
"""Perform HTTP GET request."""
|
| 1131 |
+
return _RequestContextManager(
|
| 1132 |
+
self._request(
|
| 1133 |
+
hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs
|
| 1134 |
+
)
|
| 1135 |
+
)
|
| 1136 |
+
|
| 1137 |
+
def options(
|
| 1138 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
| 1139 |
+
) -> "_RequestContextManager":
|
| 1140 |
+
"""Perform HTTP OPTIONS request."""
|
| 1141 |
+
return _RequestContextManager(
|
| 1142 |
+
self._request(
|
| 1143 |
+
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
|
| 1144 |
+
)
|
| 1145 |
+
)
|
| 1146 |
+
|
| 1147 |
+
def head(
|
| 1148 |
+
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
|
| 1149 |
+
) -> "_RequestContextManager":
|
| 1150 |
+
"""Perform HTTP HEAD request."""
|
| 1151 |
+
return _RequestContextManager(
|
| 1152 |
+
self._request(
|
| 1153 |
+
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
|
| 1154 |
+
)
|
| 1155 |
+
)
|
| 1156 |
+
|
| 1157 |
+
def post(
|
| 1158 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 1159 |
+
) -> "_RequestContextManager":
|
| 1160 |
+
"""Perform HTTP POST request."""
|
| 1161 |
+
return _RequestContextManager(
|
| 1162 |
+
self._request(hdrs.METH_POST, url, data=data, **kwargs)
|
| 1163 |
+
)
|
| 1164 |
+
|
| 1165 |
+
def put(
|
| 1166 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 1167 |
+
) -> "_RequestContextManager":
|
| 1168 |
+
"""Perform HTTP PUT request."""
|
| 1169 |
+
return _RequestContextManager(
|
| 1170 |
+
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
|
| 1171 |
+
)
|
| 1172 |
+
|
| 1173 |
+
def patch(
|
| 1174 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 1175 |
+
) -> "_RequestContextManager":
|
| 1176 |
+
"""Perform HTTP PATCH request."""
|
| 1177 |
+
return _RequestContextManager(
|
| 1178 |
+
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
|
| 1179 |
+
)
|
| 1180 |
+
|
| 1181 |
+
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
|
| 1182 |
+
"""Perform HTTP DELETE request."""
|
| 1183 |
+
return _RequestContextManager(
|
| 1184 |
+
self._request(hdrs.METH_DELETE, url, **kwargs)
|
| 1185 |
+
)
|
| 1186 |
+
|
| 1187 |
+
async def close(self) -> None:
|
| 1188 |
+
"""Close underlying connector.
|
| 1189 |
+
|
| 1190 |
+
Release all acquired resources.
|
| 1191 |
+
"""
|
| 1192 |
+
if not self.closed:
|
| 1193 |
+
if self._connector is not None and self._connector_owner:
|
| 1194 |
+
await self._connector.close()
|
| 1195 |
+
self._connector = None
|
| 1196 |
+
|
| 1197 |
+
@property
|
| 1198 |
+
def closed(self) -> bool:
|
| 1199 |
+
"""Is client session closed.
|
| 1200 |
+
|
| 1201 |
+
A readonly property.
|
| 1202 |
+
"""
|
| 1203 |
+
return self._connector is None or self._connector.closed
|
| 1204 |
+
|
| 1205 |
+
@property
|
| 1206 |
+
def connector(self) -> Optional[BaseConnector]:
|
| 1207 |
+
"""Connector instance used for the session."""
|
| 1208 |
+
return self._connector
|
| 1209 |
+
|
| 1210 |
+
@property
|
| 1211 |
+
def cookie_jar(self) -> AbstractCookieJar:
|
| 1212 |
+
"""The session cookies."""
|
| 1213 |
+
return self._cookie_jar
|
| 1214 |
+
|
| 1215 |
+
@property
|
| 1216 |
+
def version(self) -> Tuple[int, int]:
|
| 1217 |
+
"""The session HTTP protocol version."""
|
| 1218 |
+
return self._version
|
| 1219 |
+
|
| 1220 |
+
@property
|
| 1221 |
+
def requote_redirect_url(self) -> bool:
|
| 1222 |
+
"""Do URL requoting on redirection handling."""
|
| 1223 |
+
return self._requote_redirect_url
|
| 1224 |
+
|
| 1225 |
+
@requote_redirect_url.setter
|
| 1226 |
+
def requote_redirect_url(self, val: bool) -> None:
|
| 1227 |
+
"""Do URL requoting on redirection handling."""
|
| 1228 |
+
warnings.warn(
|
| 1229 |
+
"session.requote_redirect_url modification " "is deprecated #2778",
|
| 1230 |
+
DeprecationWarning,
|
| 1231 |
+
stacklevel=2,
|
| 1232 |
+
)
|
| 1233 |
+
self._requote_redirect_url = val
|
| 1234 |
+
|
| 1235 |
+
@property
|
| 1236 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 1237 |
+
"""Session's loop."""
|
| 1238 |
+
warnings.warn(
|
| 1239 |
+
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
|
| 1240 |
+
)
|
| 1241 |
+
return self._loop
|
| 1242 |
+
|
| 1243 |
+
@property
|
| 1244 |
+
def timeout(self) -> ClientTimeout:
|
| 1245 |
+
"""Timeout for the session."""
|
| 1246 |
+
return self._timeout
|
| 1247 |
+
|
| 1248 |
+
@property
|
| 1249 |
+
def headers(self) -> "CIMultiDict[str]":
|
| 1250 |
+
"""The default headers of the client session."""
|
| 1251 |
+
return self._default_headers
|
| 1252 |
+
|
| 1253 |
+
@property
|
| 1254 |
+
def skip_auto_headers(self) -> FrozenSet[istr]:
|
| 1255 |
+
"""Headers for which autogeneration should be skipped"""
|
| 1256 |
+
return self._skip_auto_headers
|
| 1257 |
+
|
| 1258 |
+
@property
|
| 1259 |
+
def auth(self) -> Optional[BasicAuth]:
|
| 1260 |
+
"""An object that represents HTTP Basic Authorization"""
|
| 1261 |
+
return self._default_auth
|
| 1262 |
+
|
| 1263 |
+
@property
|
| 1264 |
+
def json_serialize(self) -> JSONEncoder:
|
| 1265 |
+
"""Json serializer callable"""
|
| 1266 |
+
return self._json_serialize
|
| 1267 |
+
|
| 1268 |
+
@property
|
| 1269 |
+
def connector_owner(self) -> bool:
|
| 1270 |
+
"""Should connector be closed on session closing"""
|
| 1271 |
+
return self._connector_owner
|
| 1272 |
+
|
| 1273 |
+
@property
|
| 1274 |
+
def raise_for_status(
|
| 1275 |
+
self,
|
| 1276 |
+
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
|
| 1277 |
+
"""Should `ClientResponse.raise_for_status()` be called for each response."""
|
| 1278 |
+
return self._raise_for_status
|
| 1279 |
+
|
| 1280 |
+
@property
|
| 1281 |
+
def auto_decompress(self) -> bool:
|
| 1282 |
+
"""Should the body response be automatically decompressed."""
|
| 1283 |
+
return self._auto_decompress
|
| 1284 |
+
|
| 1285 |
+
@property
|
| 1286 |
+
def trust_env(self) -> bool:
|
| 1287 |
+
"""
|
| 1288 |
+
Should proxies information from environment or netrc be trusted.
|
| 1289 |
+
|
| 1290 |
+
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
|
| 1291 |
+
or ~/.netrc file if present.
|
| 1292 |
+
"""
|
| 1293 |
+
return self._trust_env
|
| 1294 |
+
|
| 1295 |
+
@property
|
| 1296 |
+
def trace_configs(self) -> List[TraceConfig]:
|
| 1297 |
+
"""A list of TraceConfig instances used for client tracing"""
|
| 1298 |
+
return self._trace_configs
|
| 1299 |
+
|
| 1300 |
+
def detach(self) -> None:
|
| 1301 |
+
"""Detach connector from session without closing the former.
|
| 1302 |
+
|
| 1303 |
+
Session is switched to closed state anyway.
|
| 1304 |
+
"""
|
| 1305 |
+
self._connector = None
|
| 1306 |
+
|
| 1307 |
+
def __enter__(self) -> None:
|
| 1308 |
+
raise TypeError("Use async with instead")
|
| 1309 |
+
|
| 1310 |
+
def __exit__(
|
| 1311 |
+
self,
|
| 1312 |
+
exc_type: Optional[Type[BaseException]],
|
| 1313 |
+
exc_val: Optional[BaseException],
|
| 1314 |
+
exc_tb: Optional[TracebackType],
|
| 1315 |
+
) -> None:
|
| 1316 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 1317 |
+
pass # pragma: no cover
|
| 1318 |
+
|
| 1319 |
+
async def __aenter__(self) -> "ClientSession":
|
| 1320 |
+
return self
|
| 1321 |
+
|
| 1322 |
+
async def __aexit__(
|
| 1323 |
+
self,
|
| 1324 |
+
exc_type: Optional[Type[BaseException]],
|
| 1325 |
+
exc_val: Optional[BaseException],
|
| 1326 |
+
exc_tb: Optional[TracebackType],
|
| 1327 |
+
) -> None:
|
| 1328 |
+
await self.close()
|
| 1329 |
+
|
| 1330 |
+
|
| 1331 |
+
class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
|
| 1332 |
+
|
| 1333 |
+
__slots__ = ("_coro", "_resp")
|
| 1334 |
+
|
| 1335 |
+
def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
|
| 1336 |
+
self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro
|
| 1337 |
+
|
| 1338 |
+
def send(self, arg: None) -> "asyncio.Future[Any]":
|
| 1339 |
+
return self._coro.send(arg)
|
| 1340 |
+
|
| 1341 |
+
def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
|
| 1342 |
+
return self._coro.throw(*args, **kwargs)
|
| 1343 |
+
|
| 1344 |
+
def close(self) -> None:
|
| 1345 |
+
return self._coro.close()
|
| 1346 |
+
|
| 1347 |
+
def __await__(self) -> Generator[Any, None, _RetType]:
|
| 1348 |
+
ret = self._coro.__await__()
|
| 1349 |
+
return ret
|
| 1350 |
+
|
| 1351 |
+
def __iter__(self) -> Generator[Any, None, _RetType]:
|
| 1352 |
+
return self.__await__()
|
| 1353 |
+
|
| 1354 |
+
async def __aenter__(self) -> _RetType:
|
| 1355 |
+
self._resp: _RetType = await self._coro
|
| 1356 |
+
return await self._resp.__aenter__()
|
| 1357 |
+
|
| 1358 |
+
async def __aexit__(
|
| 1359 |
+
self,
|
| 1360 |
+
exc_type: Optional[Type[BaseException]],
|
| 1361 |
+
exc: Optional[BaseException],
|
| 1362 |
+
tb: Optional[TracebackType],
|
| 1363 |
+
) -> None:
|
| 1364 |
+
await self._resp.__aexit__(exc_type, exc, tb)
|
| 1365 |
+
|
| 1366 |
+
|
| 1367 |
+
_RequestContextManager = _BaseRequestContextManager[ClientResponse]
|
| 1368 |
+
_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse]
|
| 1369 |
+
|
| 1370 |
+
|
| 1371 |
+
class _SessionRequestContextManager:
|
| 1372 |
+
|
| 1373 |
+
__slots__ = ("_coro", "_resp", "_session")
|
| 1374 |
+
|
| 1375 |
+
def __init__(
|
| 1376 |
+
self,
|
| 1377 |
+
coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
|
| 1378 |
+
session: ClientSession,
|
| 1379 |
+
) -> None:
|
| 1380 |
+
self._coro = coro
|
| 1381 |
+
self._resp: Optional[ClientResponse] = None
|
| 1382 |
+
self._session = session
|
| 1383 |
+
|
| 1384 |
+
async def __aenter__(self) -> ClientResponse:
|
| 1385 |
+
try:
|
| 1386 |
+
self._resp = await self._coro
|
| 1387 |
+
except BaseException:
|
| 1388 |
+
await self._session.close()
|
| 1389 |
+
raise
|
| 1390 |
+
else:
|
| 1391 |
+
return self._resp
|
| 1392 |
+
|
| 1393 |
+
async def __aexit__(
|
| 1394 |
+
self,
|
| 1395 |
+
exc_type: Optional[Type[BaseException]],
|
| 1396 |
+
exc: Optional[BaseException],
|
| 1397 |
+
tb: Optional[TracebackType],
|
| 1398 |
+
) -> None:
|
| 1399 |
+
assert self._resp is not None
|
| 1400 |
+
self._resp.close()
|
| 1401 |
+
await self._session.close()
|
| 1402 |
+
|
| 1403 |
+
|
| 1404 |
+
def request(
|
| 1405 |
+
method: str,
|
| 1406 |
+
url: StrOrURL,
|
| 1407 |
+
*,
|
| 1408 |
+
params: Query = None,
|
| 1409 |
+
data: Any = None,
|
| 1410 |
+
json: Any = None,
|
| 1411 |
+
headers: Optional[LooseHeaders] = None,
|
| 1412 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 1413 |
+
auth: Optional[BasicAuth] = None,
|
| 1414 |
+
allow_redirects: bool = True,
|
| 1415 |
+
max_redirects: int = 10,
|
| 1416 |
+
compress: Optional[str] = None,
|
| 1417 |
+
chunked: Optional[bool] = None,
|
| 1418 |
+
expect100: bool = False,
|
| 1419 |
+
raise_for_status: Optional[bool] = None,
|
| 1420 |
+
read_until_eof: bool = True,
|
| 1421 |
+
proxy: Optional[StrOrURL] = None,
|
| 1422 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 1423 |
+
timeout: Union[ClientTimeout, object] = sentinel,
|
| 1424 |
+
cookies: Optional[LooseCookies] = None,
|
| 1425 |
+
version: HttpVersion = http.HttpVersion11,
|
| 1426 |
+
connector: Optional[BaseConnector] = None,
|
| 1427 |
+
read_bufsize: Optional[int] = None,
|
| 1428 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1429 |
+
max_line_size: int = 8190,
|
| 1430 |
+
max_field_size: int = 8190,
|
| 1431 |
+
) -> _SessionRequestContextManager:
|
| 1432 |
+
"""Constructs and sends a request.
|
| 1433 |
+
|
| 1434 |
+
Returns response object.
|
| 1435 |
+
method - HTTP method
|
| 1436 |
+
url - request url
|
| 1437 |
+
params - (optional) Dictionary or bytes to be sent in the query
|
| 1438 |
+
string of the new request
|
| 1439 |
+
data - (optional) Dictionary, bytes, or file-like object to
|
| 1440 |
+
send in the body of the request
|
| 1441 |
+
json - (optional) Any json compatible python object
|
| 1442 |
+
headers - (optional) Dictionary of HTTP Headers to send with
|
| 1443 |
+
the request
|
| 1444 |
+
cookies - (optional) Dict object to send with the request
|
| 1445 |
+
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
|
| 1446 |
+
auth - aiohttp.helpers.BasicAuth
|
| 1447 |
+
allow_redirects - (optional) If set to False, do not follow
|
| 1448 |
+
redirects
|
| 1449 |
+
version - Request HTTP version.
|
| 1450 |
+
compress - Set to True if request has to be compressed
|
| 1451 |
+
with deflate encoding.
|
| 1452 |
+
chunked - Set to chunk size for chunked transfer encoding.
|
| 1453 |
+
expect100 - Expect 100-continue response from server.
|
| 1454 |
+
connector - BaseConnector sub-class instance to support
|
| 1455 |
+
connection pooling.
|
| 1456 |
+
read_until_eof - Read response until eof if response
|
| 1457 |
+
does not have Content-Length header.
|
| 1458 |
+
loop - Optional event loop.
|
| 1459 |
+
timeout - Optional ClientTimeout settings structure, 5min
|
| 1460 |
+
total timeout by default.
|
| 1461 |
+
Usage::
|
| 1462 |
+
>>> import aiohttp
|
| 1463 |
+
>>> resp = await aiohttp.request('GET', 'http://python.org/')
|
| 1464 |
+
>>> resp
|
| 1465 |
+
<ClientResponse(python.org/) [200]>
|
| 1466 |
+
>>> data = await resp.read()
|
| 1467 |
+
"""
|
| 1468 |
+
connector_owner = False
|
| 1469 |
+
if connector is None:
|
| 1470 |
+
connector_owner = True
|
| 1471 |
+
connector = TCPConnector(loop=loop, force_close=True)
|
| 1472 |
+
|
| 1473 |
+
session = ClientSession(
|
| 1474 |
+
loop=loop,
|
| 1475 |
+
cookies=cookies,
|
| 1476 |
+
version=version,
|
| 1477 |
+
timeout=timeout,
|
| 1478 |
+
connector=connector,
|
| 1479 |
+
connector_owner=connector_owner,
|
| 1480 |
+
)
|
| 1481 |
+
|
| 1482 |
+
return _SessionRequestContextManager(
|
| 1483 |
+
session._request(
|
| 1484 |
+
method,
|
| 1485 |
+
url,
|
| 1486 |
+
params=params,
|
| 1487 |
+
data=data,
|
| 1488 |
+
json=json,
|
| 1489 |
+
headers=headers,
|
| 1490 |
+
skip_auto_headers=skip_auto_headers,
|
| 1491 |
+
auth=auth,
|
| 1492 |
+
allow_redirects=allow_redirects,
|
| 1493 |
+
max_redirects=max_redirects,
|
| 1494 |
+
compress=compress,
|
| 1495 |
+
chunked=chunked,
|
| 1496 |
+
expect100=expect100,
|
| 1497 |
+
raise_for_status=raise_for_status,
|
| 1498 |
+
read_until_eof=read_until_eof,
|
| 1499 |
+
proxy=proxy,
|
| 1500 |
+
proxy_auth=proxy_auth,
|
| 1501 |
+
read_bufsize=read_bufsize,
|
| 1502 |
+
max_line_size=max_line_size,
|
| 1503 |
+
max_field_size=max_field_size,
|
| 1504 |
+
),
|
| 1505 |
+
session,
|
| 1506 |
+
)
|
parrot/lib/python3.10/site-packages/aiohttp/client_exceptions.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP related errors."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import TYPE_CHECKING, Optional, Tuple, Union
|
| 6 |
+
|
| 7 |
+
from multidict import MultiMapping
|
| 8 |
+
|
| 9 |
+
from .typedefs import StrOrURL
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import ssl
|
| 13 |
+
|
| 14 |
+
SSLContext = ssl.SSLContext
|
| 15 |
+
except ImportError: # pragma: no cover
|
| 16 |
+
ssl = SSLContext = None # type: ignore[assignment]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
if TYPE_CHECKING:
|
| 20 |
+
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
| 21 |
+
from .http_parser import RawResponseMessage
|
| 22 |
+
else:
|
| 23 |
+
RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None
|
| 24 |
+
|
| 25 |
+
__all__ = (
|
| 26 |
+
"ClientError",
|
| 27 |
+
"ClientConnectionError",
|
| 28 |
+
"ClientConnectionResetError",
|
| 29 |
+
"ClientOSError",
|
| 30 |
+
"ClientConnectorError",
|
| 31 |
+
"ClientProxyConnectionError",
|
| 32 |
+
"ClientSSLError",
|
| 33 |
+
"ClientConnectorSSLError",
|
| 34 |
+
"ClientConnectorCertificateError",
|
| 35 |
+
"ConnectionTimeoutError",
|
| 36 |
+
"SocketTimeoutError",
|
| 37 |
+
"ServerConnectionError",
|
| 38 |
+
"ServerTimeoutError",
|
| 39 |
+
"ServerDisconnectedError",
|
| 40 |
+
"ServerFingerprintMismatch",
|
| 41 |
+
"ClientResponseError",
|
| 42 |
+
"ClientHttpProxyError",
|
| 43 |
+
"WSServerHandshakeError",
|
| 44 |
+
"ContentTypeError",
|
| 45 |
+
"ClientPayloadError",
|
| 46 |
+
"InvalidURL",
|
| 47 |
+
"InvalidUrlClientError",
|
| 48 |
+
"RedirectClientError",
|
| 49 |
+
"NonHttpUrlClientError",
|
| 50 |
+
"InvalidUrlRedirectClientError",
|
| 51 |
+
"NonHttpUrlRedirectClientError",
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class ClientError(Exception):
|
| 56 |
+
"""Base class for client connection errors."""
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class ClientResponseError(ClientError):
|
| 60 |
+
"""Base class for exceptions that occur after getting a response.
|
| 61 |
+
|
| 62 |
+
request_info: An instance of RequestInfo.
|
| 63 |
+
history: A sequence of responses, if redirects occurred.
|
| 64 |
+
status: HTTP status code.
|
| 65 |
+
message: Error message.
|
| 66 |
+
headers: Response headers.
|
| 67 |
+
"""
|
| 68 |
+
|
| 69 |
+
def __init__(
|
| 70 |
+
self,
|
| 71 |
+
request_info: RequestInfo,
|
| 72 |
+
history: Tuple[ClientResponse, ...],
|
| 73 |
+
*,
|
| 74 |
+
code: Optional[int] = None,
|
| 75 |
+
status: Optional[int] = None,
|
| 76 |
+
message: str = "",
|
| 77 |
+
headers: Optional[MultiMapping[str]] = None,
|
| 78 |
+
) -> None:
|
| 79 |
+
self.request_info = request_info
|
| 80 |
+
if code is not None:
|
| 81 |
+
if status is not None:
|
| 82 |
+
raise ValueError(
|
| 83 |
+
"Both code and status arguments are provided; "
|
| 84 |
+
"code is deprecated, use status instead"
|
| 85 |
+
)
|
| 86 |
+
warnings.warn(
|
| 87 |
+
"code argument is deprecated, use status instead",
|
| 88 |
+
DeprecationWarning,
|
| 89 |
+
stacklevel=2,
|
| 90 |
+
)
|
| 91 |
+
if status is not None:
|
| 92 |
+
self.status = status
|
| 93 |
+
elif code is not None:
|
| 94 |
+
self.status = code
|
| 95 |
+
else:
|
| 96 |
+
self.status = 0
|
| 97 |
+
self.message = message
|
| 98 |
+
self.headers = headers
|
| 99 |
+
self.history = history
|
| 100 |
+
self.args = (request_info, history)
|
| 101 |
+
|
| 102 |
+
def __str__(self) -> str:
|
| 103 |
+
return "{}, message={!r}, url={!r}".format(
|
| 104 |
+
self.status,
|
| 105 |
+
self.message,
|
| 106 |
+
str(self.request_info.real_url),
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
def __repr__(self) -> str:
|
| 110 |
+
args = f"{self.request_info!r}, {self.history!r}"
|
| 111 |
+
if self.status != 0:
|
| 112 |
+
args += f", status={self.status!r}"
|
| 113 |
+
if self.message != "":
|
| 114 |
+
args += f", message={self.message!r}"
|
| 115 |
+
if self.headers is not None:
|
| 116 |
+
args += f", headers={self.headers!r}"
|
| 117 |
+
return f"{type(self).__name__}({args})"
|
| 118 |
+
|
| 119 |
+
@property
|
| 120 |
+
def code(self) -> int:
|
| 121 |
+
warnings.warn(
|
| 122 |
+
"code property is deprecated, use status instead",
|
| 123 |
+
DeprecationWarning,
|
| 124 |
+
stacklevel=2,
|
| 125 |
+
)
|
| 126 |
+
return self.status
|
| 127 |
+
|
| 128 |
+
@code.setter
|
| 129 |
+
def code(self, value: int) -> None:
|
| 130 |
+
warnings.warn(
|
| 131 |
+
"code property is deprecated, use status instead",
|
| 132 |
+
DeprecationWarning,
|
| 133 |
+
stacklevel=2,
|
| 134 |
+
)
|
| 135 |
+
self.status = value
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
class ContentTypeError(ClientResponseError):
|
| 139 |
+
"""ContentType found is not valid."""
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class WSServerHandshakeError(ClientResponseError):
|
| 143 |
+
"""websocket server handshake error."""
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
class ClientHttpProxyError(ClientResponseError):
|
| 147 |
+
"""HTTP proxy error.
|
| 148 |
+
|
| 149 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 150 |
+
proxy responds with status other than ``200 OK``
|
| 151 |
+
on ``CONNECT`` request.
|
| 152 |
+
"""
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class TooManyRedirects(ClientResponseError):
|
| 156 |
+
"""Client was redirected too many times."""
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
class ClientConnectionError(ClientError):
|
| 160 |
+
"""Base class for client socket errors."""
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class ClientConnectionResetError(ClientConnectionError, ConnectionResetError):
|
| 164 |
+
"""ConnectionResetError"""
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class ClientOSError(ClientConnectionError, OSError):
|
| 168 |
+
"""OSError error."""
|
| 169 |
+
|
| 170 |
+
|
| 171 |
+
class ClientConnectorError(ClientOSError):
|
| 172 |
+
"""Client connector error.
|
| 173 |
+
|
| 174 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 175 |
+
a connection can not be established.
|
| 176 |
+
"""
|
| 177 |
+
|
| 178 |
+
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
| 179 |
+
self._conn_key = connection_key
|
| 180 |
+
self._os_error = os_error
|
| 181 |
+
super().__init__(os_error.errno, os_error.strerror)
|
| 182 |
+
self.args = (connection_key, os_error)
|
| 183 |
+
|
| 184 |
+
@property
|
| 185 |
+
def os_error(self) -> OSError:
|
| 186 |
+
return self._os_error
|
| 187 |
+
|
| 188 |
+
@property
|
| 189 |
+
def host(self) -> str:
|
| 190 |
+
return self._conn_key.host
|
| 191 |
+
|
| 192 |
+
@property
|
| 193 |
+
def port(self) -> Optional[int]:
|
| 194 |
+
return self._conn_key.port
|
| 195 |
+
|
| 196 |
+
@property
|
| 197 |
+
def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
|
| 198 |
+
return self._conn_key.ssl
|
| 199 |
+
|
| 200 |
+
def __str__(self) -> str:
|
| 201 |
+
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
| 202 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
| 203 |
+
)
|
| 204 |
+
|
| 205 |
+
# OSError.__reduce__ does too much black magick
|
| 206 |
+
__reduce__ = BaseException.__reduce__
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
class ClientProxyConnectionError(ClientConnectorError):
|
| 210 |
+
"""Proxy connection error.
|
| 211 |
+
|
| 212 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 213 |
+
connection to proxy can not be established.
|
| 214 |
+
"""
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
class UnixClientConnectorError(ClientConnectorError):
|
| 218 |
+
"""Unix connector error.
|
| 219 |
+
|
| 220 |
+
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
| 221 |
+
if connection to unix socket can not be established.
|
| 222 |
+
"""
|
| 223 |
+
|
| 224 |
+
def __init__(
|
| 225 |
+
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
| 226 |
+
) -> None:
|
| 227 |
+
self._path = path
|
| 228 |
+
super().__init__(connection_key, os_error)
|
| 229 |
+
|
| 230 |
+
@property
|
| 231 |
+
def path(self) -> str:
|
| 232 |
+
return self._path
|
| 233 |
+
|
| 234 |
+
def __str__(self) -> str:
|
| 235 |
+
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
| 236 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
class ServerConnectionError(ClientConnectionError):
|
| 241 |
+
"""Server connection errors."""
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
class ServerDisconnectedError(ServerConnectionError):
|
| 245 |
+
"""Server disconnected."""
|
| 246 |
+
|
| 247 |
+
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
| 248 |
+
if message is None:
|
| 249 |
+
message = "Server disconnected"
|
| 250 |
+
|
| 251 |
+
self.args = (message,)
|
| 252 |
+
self.message = message
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
| 256 |
+
"""Server timeout error."""
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
class ConnectionTimeoutError(ServerTimeoutError):
|
| 260 |
+
"""Connection timeout error."""
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
class SocketTimeoutError(ServerTimeoutError):
|
| 264 |
+
"""Socket timeout error."""
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
class ServerFingerprintMismatch(ServerConnectionError):
|
| 268 |
+
"""SSL certificate does not match expected fingerprint."""
|
| 269 |
+
|
| 270 |
+
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
| 271 |
+
self.expected = expected
|
| 272 |
+
self.got = got
|
| 273 |
+
self.host = host
|
| 274 |
+
self.port = port
|
| 275 |
+
self.args = (expected, got, host, port)
|
| 276 |
+
|
| 277 |
+
def __repr__(self) -> str:
|
| 278 |
+
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
| 279 |
+
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
| 280 |
+
)
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
class ClientPayloadError(ClientError):
|
| 284 |
+
"""Response payload error."""
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
class InvalidURL(ClientError, ValueError):
|
| 288 |
+
"""Invalid URL.
|
| 289 |
+
|
| 290 |
+
URL used for fetching is malformed, e.g. it doesn't contains host
|
| 291 |
+
part.
|
| 292 |
+
"""
|
| 293 |
+
|
| 294 |
+
# Derive from ValueError for backward compatibility
|
| 295 |
+
|
| 296 |
+
def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None:
|
| 297 |
+
# The type of url is not yarl.URL because the exception can be raised
|
| 298 |
+
# on URL(url) call
|
| 299 |
+
self._url = url
|
| 300 |
+
self._description = description
|
| 301 |
+
|
| 302 |
+
if description:
|
| 303 |
+
super().__init__(url, description)
|
| 304 |
+
else:
|
| 305 |
+
super().__init__(url)
|
| 306 |
+
|
| 307 |
+
@property
|
| 308 |
+
def url(self) -> StrOrURL:
|
| 309 |
+
return self._url
|
| 310 |
+
|
| 311 |
+
@property
|
| 312 |
+
def description(self) -> "str | None":
|
| 313 |
+
return self._description
|
| 314 |
+
|
| 315 |
+
def __repr__(self) -> str:
|
| 316 |
+
return f"<{self.__class__.__name__} {self}>"
|
| 317 |
+
|
| 318 |
+
def __str__(self) -> str:
|
| 319 |
+
if self._description:
|
| 320 |
+
return f"{self._url} - {self._description}"
|
| 321 |
+
return str(self._url)
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
class InvalidUrlClientError(InvalidURL):
|
| 325 |
+
"""Invalid URL client error."""
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class RedirectClientError(ClientError):
|
| 329 |
+
"""Client redirect error."""
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
class NonHttpUrlClientError(ClientError):
|
| 333 |
+
"""Non http URL client error."""
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError):
|
| 337 |
+
"""Invalid URL redirect client error."""
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError):
|
| 341 |
+
"""Non http URL redirect client error."""
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
class ClientSSLError(ClientConnectorError):
|
| 345 |
+
"""Base error for ssl.*Errors."""
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
if ssl is not None:
|
| 349 |
+
cert_errors = (ssl.CertificateError,)
|
| 350 |
+
cert_errors_bases = (
|
| 351 |
+
ClientSSLError,
|
| 352 |
+
ssl.CertificateError,
|
| 353 |
+
)
|
| 354 |
+
|
| 355 |
+
ssl_errors = (ssl.SSLError,)
|
| 356 |
+
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
| 357 |
+
else: # pragma: no cover
|
| 358 |
+
cert_errors = tuple()
|
| 359 |
+
cert_errors_bases = (
|
| 360 |
+
ClientSSLError,
|
| 361 |
+
ValueError,
|
| 362 |
+
)
|
| 363 |
+
|
| 364 |
+
ssl_errors = tuple()
|
| 365 |
+
ssl_error_bases = (ClientSSLError,)
|
| 366 |
+
|
| 367 |
+
|
| 368 |
+
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
| 369 |
+
"""Response ssl error."""
|
| 370 |
+
|
| 371 |
+
|
| 372 |
+
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
| 373 |
+
"""Response certificate error."""
|
| 374 |
+
|
| 375 |
+
def __init__(
|
| 376 |
+
self, connection_key: ConnectionKey, certificate_error: Exception
|
| 377 |
+
) -> None:
|
| 378 |
+
self._conn_key = connection_key
|
| 379 |
+
self._certificate_error = certificate_error
|
| 380 |
+
self.args = (connection_key, certificate_error)
|
| 381 |
+
|
| 382 |
+
@property
|
| 383 |
+
def certificate_error(self) -> Exception:
|
| 384 |
+
return self._certificate_error
|
| 385 |
+
|
| 386 |
+
@property
|
| 387 |
+
def host(self) -> str:
|
| 388 |
+
return self._conn_key.host
|
| 389 |
+
|
| 390 |
+
@property
|
| 391 |
+
def port(self) -> Optional[int]:
|
| 392 |
+
return self._conn_key.port
|
| 393 |
+
|
| 394 |
+
@property
|
| 395 |
+
def ssl(self) -> bool:
|
| 396 |
+
return self._conn_key.is_ssl
|
| 397 |
+
|
| 398 |
+
def __str__(self) -> str:
|
| 399 |
+
return (
|
| 400 |
+
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
| 401 |
+
"[{0.certificate_error.__class__.__name__}: "
|
| 402 |
+
"{0.certificate_error.args}]".format(self)
|
| 403 |
+
)
|
parrot/lib/python3.10/site-packages/aiohttp/client_proto.py
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from contextlib import suppress
|
| 3 |
+
from typing import Any, Optional, Tuple
|
| 4 |
+
|
| 5 |
+
from .base_protocol import BaseProtocol
|
| 6 |
+
from .client_exceptions import (
|
| 7 |
+
ClientOSError,
|
| 8 |
+
ClientPayloadError,
|
| 9 |
+
ServerDisconnectedError,
|
| 10 |
+
SocketTimeoutError,
|
| 11 |
+
)
|
| 12 |
+
from .helpers import (
|
| 13 |
+
_EXC_SENTINEL,
|
| 14 |
+
BaseTimerContext,
|
| 15 |
+
set_exception,
|
| 16 |
+
status_code_must_be_empty_body,
|
| 17 |
+
)
|
| 18 |
+
from .http import HttpResponseParser, RawResponseMessage
|
| 19 |
+
from .http_exceptions import HttpProcessingError
|
| 20 |
+
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
| 24 |
+
"""Helper class to adapt between Protocol and StreamReader."""
|
| 25 |
+
|
| 26 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 27 |
+
BaseProtocol.__init__(self, loop=loop)
|
| 28 |
+
DataQueue.__init__(self, loop)
|
| 29 |
+
|
| 30 |
+
self._should_close = False
|
| 31 |
+
|
| 32 |
+
self._payload: Optional[StreamReader] = None
|
| 33 |
+
self._skip_payload = False
|
| 34 |
+
self._payload_parser = None
|
| 35 |
+
|
| 36 |
+
self._timer = None
|
| 37 |
+
|
| 38 |
+
self._tail = b""
|
| 39 |
+
self._upgraded = False
|
| 40 |
+
self._parser: Optional[HttpResponseParser] = None
|
| 41 |
+
|
| 42 |
+
self._read_timeout: Optional[float] = None
|
| 43 |
+
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
| 44 |
+
|
| 45 |
+
self._timeout_ceil_threshold: Optional[float] = 5
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def upgraded(self) -> bool:
|
| 49 |
+
return self._upgraded
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def should_close(self) -> bool:
|
| 53 |
+
return (
|
| 54 |
+
self._should_close
|
| 55 |
+
or (self._payload is not None and not self._payload.is_eof())
|
| 56 |
+
or self._upgraded
|
| 57 |
+
or self._exception is not None
|
| 58 |
+
or self._payload_parser is not None
|
| 59 |
+
or bool(self._buffer)
|
| 60 |
+
or bool(self._tail)
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
def force_close(self) -> None:
|
| 64 |
+
self._should_close = True
|
| 65 |
+
|
| 66 |
+
def close(self) -> None:
|
| 67 |
+
transport = self.transport
|
| 68 |
+
if transport is not None:
|
| 69 |
+
transport.close()
|
| 70 |
+
self.transport = None
|
| 71 |
+
self._payload = None
|
| 72 |
+
self._drop_timeout()
|
| 73 |
+
|
| 74 |
+
def is_connected(self) -> bool:
|
| 75 |
+
return self.transport is not None and not self.transport.is_closing()
|
| 76 |
+
|
| 77 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 78 |
+
self._drop_timeout()
|
| 79 |
+
|
| 80 |
+
original_connection_error = exc
|
| 81 |
+
reraised_exc = original_connection_error
|
| 82 |
+
|
| 83 |
+
connection_closed_cleanly = original_connection_error is None
|
| 84 |
+
|
| 85 |
+
if self._payload_parser is not None:
|
| 86 |
+
with suppress(Exception): # FIXME: log this somehow?
|
| 87 |
+
self._payload_parser.feed_eof()
|
| 88 |
+
|
| 89 |
+
uncompleted = None
|
| 90 |
+
if self._parser is not None:
|
| 91 |
+
try:
|
| 92 |
+
uncompleted = self._parser.feed_eof()
|
| 93 |
+
except Exception as underlying_exc:
|
| 94 |
+
if self._payload is not None:
|
| 95 |
+
client_payload_exc_msg = (
|
| 96 |
+
f"Response payload is not completed: {underlying_exc !r}"
|
| 97 |
+
)
|
| 98 |
+
if not connection_closed_cleanly:
|
| 99 |
+
client_payload_exc_msg = (
|
| 100 |
+
f"{client_payload_exc_msg !s}. "
|
| 101 |
+
f"{original_connection_error !r}"
|
| 102 |
+
)
|
| 103 |
+
set_exception(
|
| 104 |
+
self._payload,
|
| 105 |
+
ClientPayloadError(client_payload_exc_msg),
|
| 106 |
+
underlying_exc,
|
| 107 |
+
)
|
| 108 |
+
|
| 109 |
+
if not self.is_eof():
|
| 110 |
+
if isinstance(original_connection_error, OSError):
|
| 111 |
+
reraised_exc = ClientOSError(*original_connection_error.args)
|
| 112 |
+
if connection_closed_cleanly:
|
| 113 |
+
reraised_exc = ServerDisconnectedError(uncompleted)
|
| 114 |
+
# assigns self._should_close to True as side effect,
|
| 115 |
+
# we do it anyway below
|
| 116 |
+
underlying_non_eof_exc = (
|
| 117 |
+
_EXC_SENTINEL
|
| 118 |
+
if connection_closed_cleanly
|
| 119 |
+
else original_connection_error
|
| 120 |
+
)
|
| 121 |
+
assert underlying_non_eof_exc is not None
|
| 122 |
+
assert reraised_exc is not None
|
| 123 |
+
self.set_exception(reraised_exc, underlying_non_eof_exc)
|
| 124 |
+
|
| 125 |
+
self._should_close = True
|
| 126 |
+
self._parser = None
|
| 127 |
+
self._payload = None
|
| 128 |
+
self._payload_parser = None
|
| 129 |
+
self._reading_paused = False
|
| 130 |
+
|
| 131 |
+
super().connection_lost(reraised_exc)
|
| 132 |
+
|
| 133 |
+
def eof_received(self) -> None:
|
| 134 |
+
# should call parser.feed_eof() most likely
|
| 135 |
+
self._drop_timeout()
|
| 136 |
+
|
| 137 |
+
def pause_reading(self) -> None:
|
| 138 |
+
super().pause_reading()
|
| 139 |
+
self._drop_timeout()
|
| 140 |
+
|
| 141 |
+
def resume_reading(self) -> None:
|
| 142 |
+
super().resume_reading()
|
| 143 |
+
self._reschedule_timeout()
|
| 144 |
+
|
| 145 |
+
def set_exception(
|
| 146 |
+
self,
|
| 147 |
+
exc: BaseException,
|
| 148 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 149 |
+
) -> None:
|
| 150 |
+
self._should_close = True
|
| 151 |
+
self._drop_timeout()
|
| 152 |
+
super().set_exception(exc, exc_cause)
|
| 153 |
+
|
| 154 |
+
def set_parser(self, parser: Any, payload: Any) -> None:
|
| 155 |
+
# TODO: actual types are:
|
| 156 |
+
# parser: WebSocketReader
|
| 157 |
+
# payload: FlowControlDataQueue
|
| 158 |
+
# but they are not generi enough
|
| 159 |
+
# Need an ABC for both types
|
| 160 |
+
self._payload = payload
|
| 161 |
+
self._payload_parser = parser
|
| 162 |
+
|
| 163 |
+
self._drop_timeout()
|
| 164 |
+
|
| 165 |
+
if self._tail:
|
| 166 |
+
data, self._tail = self._tail, b""
|
| 167 |
+
self.data_received(data)
|
| 168 |
+
|
| 169 |
+
def set_response_params(
|
| 170 |
+
self,
|
| 171 |
+
*,
|
| 172 |
+
timer: Optional[BaseTimerContext] = None,
|
| 173 |
+
skip_payload: bool = False,
|
| 174 |
+
read_until_eof: bool = False,
|
| 175 |
+
auto_decompress: bool = True,
|
| 176 |
+
read_timeout: Optional[float] = None,
|
| 177 |
+
read_bufsize: int = 2**16,
|
| 178 |
+
timeout_ceil_threshold: float = 5,
|
| 179 |
+
max_line_size: int = 8190,
|
| 180 |
+
max_field_size: int = 8190,
|
| 181 |
+
) -> None:
|
| 182 |
+
self._skip_payload = skip_payload
|
| 183 |
+
|
| 184 |
+
self._read_timeout = read_timeout
|
| 185 |
+
|
| 186 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
| 187 |
+
|
| 188 |
+
self._parser = HttpResponseParser(
|
| 189 |
+
self,
|
| 190 |
+
self._loop,
|
| 191 |
+
read_bufsize,
|
| 192 |
+
timer=timer,
|
| 193 |
+
payload_exception=ClientPayloadError,
|
| 194 |
+
response_with_body=not skip_payload,
|
| 195 |
+
read_until_eof=read_until_eof,
|
| 196 |
+
auto_decompress=auto_decompress,
|
| 197 |
+
max_line_size=max_line_size,
|
| 198 |
+
max_field_size=max_field_size,
|
| 199 |
+
)
|
| 200 |
+
|
| 201 |
+
if self._tail:
|
| 202 |
+
data, self._tail = self._tail, b""
|
| 203 |
+
self.data_received(data)
|
| 204 |
+
|
| 205 |
+
def _drop_timeout(self) -> None:
|
| 206 |
+
if self._read_timeout_handle is not None:
|
| 207 |
+
self._read_timeout_handle.cancel()
|
| 208 |
+
self._read_timeout_handle = None
|
| 209 |
+
|
| 210 |
+
def _reschedule_timeout(self) -> None:
|
| 211 |
+
timeout = self._read_timeout
|
| 212 |
+
if self._read_timeout_handle is not None:
|
| 213 |
+
self._read_timeout_handle.cancel()
|
| 214 |
+
|
| 215 |
+
if timeout:
|
| 216 |
+
self._read_timeout_handle = self._loop.call_later(
|
| 217 |
+
timeout, self._on_read_timeout
|
| 218 |
+
)
|
| 219 |
+
else:
|
| 220 |
+
self._read_timeout_handle = None
|
| 221 |
+
|
| 222 |
+
def start_timeout(self) -> None:
|
| 223 |
+
self._reschedule_timeout()
|
| 224 |
+
|
| 225 |
+
@property
|
| 226 |
+
def read_timeout(self) -> Optional[float]:
|
| 227 |
+
return self._read_timeout
|
| 228 |
+
|
| 229 |
+
@read_timeout.setter
|
| 230 |
+
def read_timeout(self, read_timeout: Optional[float]) -> None:
|
| 231 |
+
self._read_timeout = read_timeout
|
| 232 |
+
|
| 233 |
+
def _on_read_timeout(self) -> None:
|
| 234 |
+
exc = SocketTimeoutError("Timeout on reading data from socket")
|
| 235 |
+
self.set_exception(exc)
|
| 236 |
+
if self._payload is not None:
|
| 237 |
+
set_exception(self._payload, exc)
|
| 238 |
+
|
| 239 |
+
def data_received(self, data: bytes) -> None:
|
| 240 |
+
self._reschedule_timeout()
|
| 241 |
+
|
| 242 |
+
if not data:
|
| 243 |
+
return
|
| 244 |
+
|
| 245 |
+
# custom payload parser
|
| 246 |
+
if self._payload_parser is not None:
|
| 247 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 248 |
+
if eof:
|
| 249 |
+
self._payload = None
|
| 250 |
+
self._payload_parser = None
|
| 251 |
+
|
| 252 |
+
if tail:
|
| 253 |
+
self.data_received(tail)
|
| 254 |
+
return
|
| 255 |
+
else:
|
| 256 |
+
if self._upgraded or self._parser is None:
|
| 257 |
+
# i.e. websocket connection, websocket parser is not set yet
|
| 258 |
+
self._tail += data
|
| 259 |
+
else:
|
| 260 |
+
# parse http messages
|
| 261 |
+
try:
|
| 262 |
+
messages, upgraded, tail = self._parser.feed_data(data)
|
| 263 |
+
except BaseException as underlying_exc:
|
| 264 |
+
if self.transport is not None:
|
| 265 |
+
# connection.release() could be called BEFORE
|
| 266 |
+
# data_received(), the transport is already
|
| 267 |
+
# closed in this case
|
| 268 |
+
self.transport.close()
|
| 269 |
+
# should_close is True after the call
|
| 270 |
+
if isinstance(underlying_exc, HttpProcessingError):
|
| 271 |
+
exc = HttpProcessingError(
|
| 272 |
+
code=underlying_exc.code,
|
| 273 |
+
message=underlying_exc.message,
|
| 274 |
+
headers=underlying_exc.headers,
|
| 275 |
+
)
|
| 276 |
+
else:
|
| 277 |
+
exc = HttpProcessingError()
|
| 278 |
+
self.set_exception(exc, underlying_exc)
|
| 279 |
+
return
|
| 280 |
+
|
| 281 |
+
self._upgraded = upgraded
|
| 282 |
+
|
| 283 |
+
payload: Optional[StreamReader] = None
|
| 284 |
+
for message, payload in messages:
|
| 285 |
+
if message.should_close:
|
| 286 |
+
self._should_close = True
|
| 287 |
+
|
| 288 |
+
self._payload = payload
|
| 289 |
+
|
| 290 |
+
if self._skip_payload or status_code_must_be_empty_body(
|
| 291 |
+
message.code
|
| 292 |
+
):
|
| 293 |
+
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
| 294 |
+
else:
|
| 295 |
+
self.feed_data((message, payload), 0)
|
| 296 |
+
if payload is not None:
|
| 297 |
+
# new message(s) was processed
|
| 298 |
+
# register timeout handler unsubscribing
|
| 299 |
+
# either on end-of-stream or immediately for
|
| 300 |
+
# EMPTY_PAYLOAD
|
| 301 |
+
if payload is not EMPTY_PAYLOAD:
|
| 302 |
+
payload.on_eof(self._drop_timeout)
|
| 303 |
+
else:
|
| 304 |
+
self._drop_timeout()
|
| 305 |
+
|
| 306 |
+
if tail:
|
| 307 |
+
if upgraded:
|
| 308 |
+
self.data_received(tail)
|
| 309 |
+
else:
|
| 310 |
+
self._tail = tail
|
parrot/lib/python3.10/site-packages/aiohttp/cookiejar.py
ADDED
|
@@ -0,0 +1,479 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import calendar
|
| 3 |
+
import contextlib
|
| 4 |
+
import datetime
|
| 5 |
+
import heapq
|
| 6 |
+
import itertools
|
| 7 |
+
import os # noqa
|
| 8 |
+
import pathlib
|
| 9 |
+
import pickle
|
| 10 |
+
import re
|
| 11 |
+
import time
|
| 12 |
+
from collections import defaultdict
|
| 13 |
+
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
| 14 |
+
from typing import (
|
| 15 |
+
DefaultDict,
|
| 16 |
+
Dict,
|
| 17 |
+
Iterable,
|
| 18 |
+
Iterator,
|
| 19 |
+
List,
|
| 20 |
+
Mapping,
|
| 21 |
+
Optional,
|
| 22 |
+
Set,
|
| 23 |
+
Tuple,
|
| 24 |
+
Union,
|
| 25 |
+
cast,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
from yarl import URL
|
| 29 |
+
|
| 30 |
+
from .abc import AbstractCookieJar, ClearCookiePredicate
|
| 31 |
+
from .helpers import is_ip_address
|
| 32 |
+
from .typedefs import LooseCookies, PathLike, StrOrURL
|
| 33 |
+
|
| 34 |
+
__all__ = ("CookieJar", "DummyCookieJar")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
CookieItem = Union[str, "Morsel[str]"]
|
| 38 |
+
|
| 39 |
+
# We cache these string methods here as their use is in performance critical code.
|
| 40 |
+
_FORMAT_PATH = "{}/{}".format
|
| 41 |
+
_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format
|
| 42 |
+
|
| 43 |
+
# The minimum number of scheduled cookie expirations before we start cleaning up
|
| 44 |
+
# the expiration heap. This is a performance optimization to avoid cleaning up the
|
| 45 |
+
# heap too often when there are only a few scheduled expirations.
|
| 46 |
+
_MIN_SCHEDULED_COOKIE_EXPIRATION = 100
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class CookieJar(AbstractCookieJar):
|
| 50 |
+
"""Implements cookie storage adhering to RFC 6265."""
|
| 51 |
+
|
| 52 |
+
DATE_TOKENS_RE = re.compile(
|
| 53 |
+
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
| 54 |
+
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
| 58 |
+
|
| 59 |
+
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
| 60 |
+
|
| 61 |
+
DATE_MONTH_RE = re.compile(
|
| 62 |
+
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
| 63 |
+
re.I,
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
| 67 |
+
|
| 68 |
+
# calendar.timegm() fails for timestamps after datetime.datetime.max
|
| 69 |
+
# Minus one as a loss of precision occurs when timestamp() is called.
|
| 70 |
+
MAX_TIME = (
|
| 71 |
+
int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
|
| 72 |
+
)
|
| 73 |
+
try:
|
| 74 |
+
calendar.timegm(time.gmtime(MAX_TIME))
|
| 75 |
+
except (OSError, ValueError):
|
| 76 |
+
# Hit the maximum representable time on Windows
|
| 77 |
+
# https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
|
| 78 |
+
# Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere
|
| 79 |
+
MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
|
| 80 |
+
except OverflowError:
|
| 81 |
+
# #4515: datetime.max may not be representable on 32-bit platforms
|
| 82 |
+
MAX_TIME = 2**31 - 1
|
| 83 |
+
# Avoid minuses in the future, 3x faster
|
| 84 |
+
SUB_MAX_TIME = MAX_TIME - 1
|
| 85 |
+
|
| 86 |
+
def __init__(
|
| 87 |
+
self,
|
| 88 |
+
*,
|
| 89 |
+
unsafe: bool = False,
|
| 90 |
+
quote_cookie: bool = True,
|
| 91 |
+
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
| 92 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 93 |
+
) -> None:
|
| 94 |
+
super().__init__(loop=loop)
|
| 95 |
+
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
|
| 96 |
+
SimpleCookie
|
| 97 |
+
)
|
| 98 |
+
self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = (
|
| 99 |
+
defaultdict(dict)
|
| 100 |
+
)
|
| 101 |
+
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
| 102 |
+
self._unsafe = unsafe
|
| 103 |
+
self._quote_cookie = quote_cookie
|
| 104 |
+
if treat_as_secure_origin is None:
|
| 105 |
+
treat_as_secure_origin = []
|
| 106 |
+
elif isinstance(treat_as_secure_origin, URL):
|
| 107 |
+
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
| 108 |
+
elif isinstance(treat_as_secure_origin, str):
|
| 109 |
+
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
| 110 |
+
else:
|
| 111 |
+
treat_as_secure_origin = [
|
| 112 |
+
URL(url).origin() if isinstance(url, str) else url.origin()
|
| 113 |
+
for url in treat_as_secure_origin
|
| 114 |
+
]
|
| 115 |
+
self._treat_as_secure_origin = treat_as_secure_origin
|
| 116 |
+
self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = []
|
| 117 |
+
self._expirations: Dict[Tuple[str, str, str], float] = {}
|
| 118 |
+
|
| 119 |
+
def save(self, file_path: PathLike) -> None:
|
| 120 |
+
file_path = pathlib.Path(file_path)
|
| 121 |
+
with file_path.open(mode="wb") as f:
|
| 122 |
+
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
| 123 |
+
|
| 124 |
+
def load(self, file_path: PathLike) -> None:
|
| 125 |
+
file_path = pathlib.Path(file_path)
|
| 126 |
+
with file_path.open(mode="rb") as f:
|
| 127 |
+
self._cookies = pickle.load(f)
|
| 128 |
+
|
| 129 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 130 |
+
if predicate is None:
|
| 131 |
+
self._expire_heap.clear()
|
| 132 |
+
self._cookies.clear()
|
| 133 |
+
self._morsel_cache.clear()
|
| 134 |
+
self._host_only_cookies.clear()
|
| 135 |
+
self._expirations.clear()
|
| 136 |
+
return
|
| 137 |
+
|
| 138 |
+
now = time.time()
|
| 139 |
+
to_del = [
|
| 140 |
+
key
|
| 141 |
+
for (domain, path), cookie in self._cookies.items()
|
| 142 |
+
for name, morsel in cookie.items()
|
| 143 |
+
if (
|
| 144 |
+
(key := (domain, path, name)) in self._expirations
|
| 145 |
+
and self._expirations[key] <= now
|
| 146 |
+
)
|
| 147 |
+
or predicate(morsel)
|
| 148 |
+
]
|
| 149 |
+
if to_del:
|
| 150 |
+
self._delete_cookies(to_del)
|
| 151 |
+
|
| 152 |
+
def clear_domain(self, domain: str) -> None:
|
| 153 |
+
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
| 154 |
+
|
| 155 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
| 156 |
+
self._do_expiration()
|
| 157 |
+
for val in self._cookies.values():
|
| 158 |
+
yield from val.values()
|
| 159 |
+
|
| 160 |
+
def __len__(self) -> int:
|
| 161 |
+
"""Return number of cookies.
|
| 162 |
+
|
| 163 |
+
This function does not iterate self to avoid unnecessary expiration
|
| 164 |
+
checks.
|
| 165 |
+
"""
|
| 166 |
+
return sum(len(cookie.values()) for cookie in self._cookies.values())
|
| 167 |
+
|
| 168 |
+
def _do_expiration(self) -> None:
|
| 169 |
+
"""Remove expired cookies."""
|
| 170 |
+
if not (expire_heap_len := len(self._expire_heap)):
|
| 171 |
+
return
|
| 172 |
+
|
| 173 |
+
# If the expiration heap grows larger than the number expirations
|
| 174 |
+
# times two, we clean it up to avoid keeping expired entries in
|
| 175 |
+
# the heap and consuming memory. We guard this with a minimum
|
| 176 |
+
# threshold to avoid cleaning up the heap too often when there are
|
| 177 |
+
# only a few scheduled expirations.
|
| 178 |
+
if (
|
| 179 |
+
expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION
|
| 180 |
+
and expire_heap_len > len(self._expirations) * 2
|
| 181 |
+
):
|
| 182 |
+
# Remove any expired entries from the expiration heap
|
| 183 |
+
# that do not match the expiration time in the expirations
|
| 184 |
+
# as it means the cookie has been re-added to the heap
|
| 185 |
+
# with a different expiration time.
|
| 186 |
+
self._expire_heap = [
|
| 187 |
+
entry
|
| 188 |
+
for entry in self._expire_heap
|
| 189 |
+
if self._expirations.get(entry[1]) == entry[0]
|
| 190 |
+
]
|
| 191 |
+
heapq.heapify(self._expire_heap)
|
| 192 |
+
|
| 193 |
+
now = time.time()
|
| 194 |
+
to_del: List[Tuple[str, str, str]] = []
|
| 195 |
+
# Find any expired cookies and add them to the to-delete list
|
| 196 |
+
while self._expire_heap:
|
| 197 |
+
when, cookie_key = self._expire_heap[0]
|
| 198 |
+
if when > now:
|
| 199 |
+
break
|
| 200 |
+
heapq.heappop(self._expire_heap)
|
| 201 |
+
# Check if the cookie hasn't been re-added to the heap
|
| 202 |
+
# with a different expiration time as it will be removed
|
| 203 |
+
# later when it reaches the top of the heap and its
|
| 204 |
+
# expiration time is met.
|
| 205 |
+
if self._expirations.get(cookie_key) == when:
|
| 206 |
+
to_del.append(cookie_key)
|
| 207 |
+
|
| 208 |
+
if to_del:
|
| 209 |
+
self._delete_cookies(to_del)
|
| 210 |
+
|
| 211 |
+
def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None:
|
| 212 |
+
for domain, path, name in to_del:
|
| 213 |
+
self._host_only_cookies.discard((domain, name))
|
| 214 |
+
self._cookies[(domain, path)].pop(name, None)
|
| 215 |
+
self._morsel_cache[(domain, path)].pop(name, None)
|
| 216 |
+
self._expirations.pop((domain, path, name), None)
|
| 217 |
+
|
| 218 |
+
def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
|
| 219 |
+
cookie_key = (domain, path, name)
|
| 220 |
+
if self._expirations.get(cookie_key) == when:
|
| 221 |
+
# Avoid adding duplicates to the heap
|
| 222 |
+
return
|
| 223 |
+
heapq.heappush(self._expire_heap, (when, cookie_key))
|
| 224 |
+
self._expirations[cookie_key] = when
|
| 225 |
+
|
| 226 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 227 |
+
"""Update cookies."""
|
| 228 |
+
hostname = response_url.raw_host
|
| 229 |
+
|
| 230 |
+
if not self._unsafe and is_ip_address(hostname):
|
| 231 |
+
# Don't accept cookies from IPs
|
| 232 |
+
return
|
| 233 |
+
|
| 234 |
+
if isinstance(cookies, Mapping):
|
| 235 |
+
cookies = cookies.items()
|
| 236 |
+
|
| 237 |
+
for name, cookie in cookies:
|
| 238 |
+
if not isinstance(cookie, Morsel):
|
| 239 |
+
tmp = SimpleCookie()
|
| 240 |
+
tmp[name] = cookie # type: ignore[assignment]
|
| 241 |
+
cookie = tmp[name]
|
| 242 |
+
|
| 243 |
+
domain = cookie["domain"]
|
| 244 |
+
|
| 245 |
+
# ignore domains with trailing dots
|
| 246 |
+
if domain and domain[-1] == ".":
|
| 247 |
+
domain = ""
|
| 248 |
+
del cookie["domain"]
|
| 249 |
+
|
| 250 |
+
if not domain and hostname is not None:
|
| 251 |
+
# Set the cookie's domain to the response hostname
|
| 252 |
+
# and set its host-only-flag
|
| 253 |
+
self._host_only_cookies.add((hostname, name))
|
| 254 |
+
domain = cookie["domain"] = hostname
|
| 255 |
+
|
| 256 |
+
if domain and domain[0] == ".":
|
| 257 |
+
# Remove leading dot
|
| 258 |
+
domain = domain[1:]
|
| 259 |
+
cookie["domain"] = domain
|
| 260 |
+
|
| 261 |
+
if hostname and not self._is_domain_match(domain, hostname):
|
| 262 |
+
# Setting cookies for different domains is not allowed
|
| 263 |
+
continue
|
| 264 |
+
|
| 265 |
+
path = cookie["path"]
|
| 266 |
+
if not path or path[0] != "/":
|
| 267 |
+
# Set the cookie's path to the response path
|
| 268 |
+
path = response_url.path
|
| 269 |
+
if not path.startswith("/"):
|
| 270 |
+
path = "/"
|
| 271 |
+
else:
|
| 272 |
+
# Cut everything from the last slash to the end
|
| 273 |
+
path = "/" + path[1 : path.rfind("/")]
|
| 274 |
+
cookie["path"] = path
|
| 275 |
+
path = path.rstrip("/")
|
| 276 |
+
|
| 277 |
+
if max_age := cookie["max-age"]:
|
| 278 |
+
try:
|
| 279 |
+
delta_seconds = int(max_age)
|
| 280 |
+
max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
|
| 281 |
+
self._expire_cookie(max_age_expiration, domain, path, name)
|
| 282 |
+
except ValueError:
|
| 283 |
+
cookie["max-age"] = ""
|
| 284 |
+
|
| 285 |
+
elif expires := cookie["expires"]:
|
| 286 |
+
if expire_time := self._parse_date(expires):
|
| 287 |
+
self._expire_cookie(expire_time, domain, path, name)
|
| 288 |
+
else:
|
| 289 |
+
cookie["expires"] = ""
|
| 290 |
+
|
| 291 |
+
key = (domain, path)
|
| 292 |
+
if self._cookies[key].get(name) != cookie:
|
| 293 |
+
# Don't blow away the cache if the same
|
| 294 |
+
# cookie gets set again
|
| 295 |
+
self._cookies[key][name] = cookie
|
| 296 |
+
self._morsel_cache[key].pop(name, None)
|
| 297 |
+
|
| 298 |
+
self._do_expiration()
|
| 299 |
+
|
| 300 |
+
def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
|
| 301 |
+
"""Returns this jar's cookies filtered by their attributes."""
|
| 302 |
+
filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
|
| 303 |
+
SimpleCookie() if self._quote_cookie else BaseCookie()
|
| 304 |
+
)
|
| 305 |
+
if not self._cookies:
|
| 306 |
+
# Skip do_expiration() if there are no cookies.
|
| 307 |
+
return filtered
|
| 308 |
+
self._do_expiration()
|
| 309 |
+
if not self._cookies:
|
| 310 |
+
# Skip rest of function if no non-expired cookies.
|
| 311 |
+
return filtered
|
| 312 |
+
request_url = URL(request_url)
|
| 313 |
+
hostname = request_url.raw_host or ""
|
| 314 |
+
|
| 315 |
+
is_not_secure = request_url.scheme not in ("https", "wss")
|
| 316 |
+
if is_not_secure and self._treat_as_secure_origin:
|
| 317 |
+
request_origin = URL()
|
| 318 |
+
with contextlib.suppress(ValueError):
|
| 319 |
+
request_origin = request_url.origin()
|
| 320 |
+
is_not_secure = request_origin not in self._treat_as_secure_origin
|
| 321 |
+
|
| 322 |
+
# Send shared cookie
|
| 323 |
+
for c in self._cookies[("", "")].values():
|
| 324 |
+
filtered[c.key] = c.value
|
| 325 |
+
|
| 326 |
+
if is_ip_address(hostname):
|
| 327 |
+
if not self._unsafe:
|
| 328 |
+
return filtered
|
| 329 |
+
domains: Iterable[str] = (hostname,)
|
| 330 |
+
else:
|
| 331 |
+
# Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com")
|
| 332 |
+
domains = itertools.accumulate(
|
| 333 |
+
reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED
|
| 334 |
+
)
|
| 335 |
+
|
| 336 |
+
# Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar")
|
| 337 |
+
paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH)
|
| 338 |
+
# Create every combination of (domain, path) pairs.
|
| 339 |
+
pairs = itertools.product(domains, paths)
|
| 340 |
+
|
| 341 |
+
path_len = len(request_url.path)
|
| 342 |
+
# Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
|
| 343 |
+
for p in pairs:
|
| 344 |
+
for name, cookie in self._cookies[p].items():
|
| 345 |
+
domain = cookie["domain"]
|
| 346 |
+
|
| 347 |
+
if (domain, name) in self._host_only_cookies and domain != hostname:
|
| 348 |
+
continue
|
| 349 |
+
|
| 350 |
+
# Skip edge case when the cookie has a trailing slash but request doesn't.
|
| 351 |
+
if len(cookie["path"]) > path_len:
|
| 352 |
+
continue
|
| 353 |
+
|
| 354 |
+
if is_not_secure and cookie["secure"]:
|
| 355 |
+
continue
|
| 356 |
+
|
| 357 |
+
# We already built the Morsel so reuse it here
|
| 358 |
+
if name in self._morsel_cache[p]:
|
| 359 |
+
filtered[name] = self._morsel_cache[p][name]
|
| 360 |
+
continue
|
| 361 |
+
|
| 362 |
+
# It's critical we use the Morsel so the coded_value
|
| 363 |
+
# (based on cookie version) is preserved
|
| 364 |
+
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
| 365 |
+
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
| 366 |
+
self._morsel_cache[p][name] = mrsl_val
|
| 367 |
+
filtered[name] = mrsl_val
|
| 368 |
+
|
| 369 |
+
return filtered
|
| 370 |
+
|
| 371 |
+
@staticmethod
|
| 372 |
+
def _is_domain_match(domain: str, hostname: str) -> bool:
|
| 373 |
+
"""Implements domain matching adhering to RFC 6265."""
|
| 374 |
+
if hostname == domain:
|
| 375 |
+
return True
|
| 376 |
+
|
| 377 |
+
if not hostname.endswith(domain):
|
| 378 |
+
return False
|
| 379 |
+
|
| 380 |
+
non_matching = hostname[: -len(domain)]
|
| 381 |
+
|
| 382 |
+
if not non_matching.endswith("."):
|
| 383 |
+
return False
|
| 384 |
+
|
| 385 |
+
return not is_ip_address(hostname)
|
| 386 |
+
|
| 387 |
+
@classmethod
|
| 388 |
+
def _parse_date(cls, date_str: str) -> Optional[int]:
|
| 389 |
+
"""Implements date string parsing adhering to RFC 6265."""
|
| 390 |
+
if not date_str:
|
| 391 |
+
return None
|
| 392 |
+
|
| 393 |
+
found_time = False
|
| 394 |
+
found_day = False
|
| 395 |
+
found_month = False
|
| 396 |
+
found_year = False
|
| 397 |
+
|
| 398 |
+
hour = minute = second = 0
|
| 399 |
+
day = 0
|
| 400 |
+
month = 0
|
| 401 |
+
year = 0
|
| 402 |
+
|
| 403 |
+
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
| 404 |
+
|
| 405 |
+
token = token_match.group("token")
|
| 406 |
+
|
| 407 |
+
if not found_time:
|
| 408 |
+
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
| 409 |
+
if time_match:
|
| 410 |
+
found_time = True
|
| 411 |
+
hour, minute, second = (int(s) for s in time_match.groups())
|
| 412 |
+
continue
|
| 413 |
+
|
| 414 |
+
if not found_day:
|
| 415 |
+
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
| 416 |
+
if day_match:
|
| 417 |
+
found_day = True
|
| 418 |
+
day = int(day_match.group())
|
| 419 |
+
continue
|
| 420 |
+
|
| 421 |
+
if not found_month:
|
| 422 |
+
month_match = cls.DATE_MONTH_RE.match(token)
|
| 423 |
+
if month_match:
|
| 424 |
+
found_month = True
|
| 425 |
+
assert month_match.lastindex is not None
|
| 426 |
+
month = month_match.lastindex
|
| 427 |
+
continue
|
| 428 |
+
|
| 429 |
+
if not found_year:
|
| 430 |
+
year_match = cls.DATE_YEAR_RE.match(token)
|
| 431 |
+
if year_match:
|
| 432 |
+
found_year = True
|
| 433 |
+
year = int(year_match.group())
|
| 434 |
+
|
| 435 |
+
if 70 <= year <= 99:
|
| 436 |
+
year += 1900
|
| 437 |
+
elif 0 <= year <= 69:
|
| 438 |
+
year += 2000
|
| 439 |
+
|
| 440 |
+
if False in (found_day, found_month, found_year, found_time):
|
| 441 |
+
return None
|
| 442 |
+
|
| 443 |
+
if not 1 <= day <= 31:
|
| 444 |
+
return None
|
| 445 |
+
|
| 446 |
+
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
| 447 |
+
return None
|
| 448 |
+
|
| 449 |
+
return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
|
| 450 |
+
|
| 451 |
+
|
| 452 |
+
class DummyCookieJar(AbstractCookieJar):
|
| 453 |
+
"""Implements a dummy cookie storage.
|
| 454 |
+
|
| 455 |
+
It can be used with the ClientSession when no cookie processing is needed.
|
| 456 |
+
|
| 457 |
+
"""
|
| 458 |
+
|
| 459 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 460 |
+
super().__init__(loop=loop)
|
| 461 |
+
|
| 462 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
| 463 |
+
while False:
|
| 464 |
+
yield None
|
| 465 |
+
|
| 466 |
+
def __len__(self) -> int:
|
| 467 |
+
return 0
|
| 468 |
+
|
| 469 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 470 |
+
pass
|
| 471 |
+
|
| 472 |
+
def clear_domain(self, domain: str) -> None:
|
| 473 |
+
pass
|
| 474 |
+
|
| 475 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 476 |
+
pass
|
| 477 |
+
|
| 478 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 479 |
+
return SimpleCookie()
|
parrot/lib/python3.10/site-packages/aiohttp/formdata.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import warnings
|
| 3 |
+
from typing import Any, Iterable, List, Optional
|
| 4 |
+
from urllib.parse import urlencode
|
| 5 |
+
|
| 6 |
+
from multidict import MultiDict, MultiDictProxy
|
| 7 |
+
|
| 8 |
+
from . import hdrs, multipart, payload
|
| 9 |
+
from .helpers import guess_filename
|
| 10 |
+
from .payload import Payload
|
| 11 |
+
|
| 12 |
+
__all__ = ("FormData",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FormData:
|
| 16 |
+
"""Helper class for form body generation.
|
| 17 |
+
|
| 18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
fields: Iterable[Any] = (),
|
| 24 |
+
quote_fields: bool = True,
|
| 25 |
+
charset: Optional[str] = None,
|
| 26 |
+
) -> None:
|
| 27 |
+
self._writer = multipart.MultipartWriter("form-data")
|
| 28 |
+
self._fields: List[Any] = []
|
| 29 |
+
self._is_multipart = False
|
| 30 |
+
self._is_processed = False
|
| 31 |
+
self._quote_fields = quote_fields
|
| 32 |
+
self._charset = charset
|
| 33 |
+
|
| 34 |
+
if isinstance(fields, dict):
|
| 35 |
+
fields = list(fields.items())
|
| 36 |
+
elif not isinstance(fields, (list, tuple)):
|
| 37 |
+
fields = (fields,)
|
| 38 |
+
self.add_fields(*fields)
|
| 39 |
+
|
| 40 |
+
@property
|
| 41 |
+
def is_multipart(self) -> bool:
|
| 42 |
+
return self._is_multipart
|
| 43 |
+
|
| 44 |
+
def add_field(
|
| 45 |
+
self,
|
| 46 |
+
name: str,
|
| 47 |
+
value: Any,
|
| 48 |
+
*,
|
| 49 |
+
content_type: Optional[str] = None,
|
| 50 |
+
filename: Optional[str] = None,
|
| 51 |
+
content_transfer_encoding: Optional[str] = None,
|
| 52 |
+
) -> None:
|
| 53 |
+
|
| 54 |
+
if isinstance(value, io.IOBase):
|
| 55 |
+
self._is_multipart = True
|
| 56 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
| 57 |
+
msg = (
|
| 58 |
+
"In v4, passing bytes will no longer create a file field. "
|
| 59 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
| 60 |
+
)
|
| 61 |
+
if filename is None and content_transfer_encoding is None:
|
| 62 |
+
warnings.warn(msg, DeprecationWarning)
|
| 63 |
+
filename = name
|
| 64 |
+
|
| 65 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
| 66 |
+
if filename is not None and not isinstance(filename, str):
|
| 67 |
+
raise TypeError(
|
| 68 |
+
"filename must be an instance of str. " "Got: %s" % filename
|
| 69 |
+
)
|
| 70 |
+
if filename is None and isinstance(value, io.IOBase):
|
| 71 |
+
filename = guess_filename(value, name)
|
| 72 |
+
if filename is not None:
|
| 73 |
+
type_options["filename"] = filename
|
| 74 |
+
self._is_multipart = True
|
| 75 |
+
|
| 76 |
+
headers = {}
|
| 77 |
+
if content_type is not None:
|
| 78 |
+
if not isinstance(content_type, str):
|
| 79 |
+
raise TypeError(
|
| 80 |
+
"content_type must be an instance of str. " "Got: %s" % content_type
|
| 81 |
+
)
|
| 82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
| 83 |
+
self._is_multipart = True
|
| 84 |
+
if content_transfer_encoding is not None:
|
| 85 |
+
if not isinstance(content_transfer_encoding, str):
|
| 86 |
+
raise TypeError(
|
| 87 |
+
"content_transfer_encoding must be an instance"
|
| 88 |
+
" of str. Got: %s" % content_transfer_encoding
|
| 89 |
+
)
|
| 90 |
+
msg = (
|
| 91 |
+
"content_transfer_encoding is deprecated. "
|
| 92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
| 93 |
+
)
|
| 94 |
+
warnings.warn(msg, DeprecationWarning)
|
| 95 |
+
self._is_multipart = True
|
| 96 |
+
|
| 97 |
+
self._fields.append((type_options, headers, value))
|
| 98 |
+
|
| 99 |
+
def add_fields(self, *fields: Any) -> None:
|
| 100 |
+
to_add = list(fields)
|
| 101 |
+
|
| 102 |
+
while to_add:
|
| 103 |
+
rec = to_add.pop(0)
|
| 104 |
+
|
| 105 |
+
if isinstance(rec, io.IOBase):
|
| 106 |
+
k = guess_filename(rec, "unknown")
|
| 107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
| 108 |
+
|
| 109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
| 110 |
+
to_add.extend(rec.items())
|
| 111 |
+
|
| 112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
| 113 |
+
k, fp = rec
|
| 114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
| 115 |
+
|
| 116 |
+
else:
|
| 117 |
+
raise TypeError(
|
| 118 |
+
"Only io.IOBase, multidict and (name, file) "
|
| 119 |
+
"pairs allowed, use .add_field() for passing "
|
| 120 |
+
"more complex parameters, got {!r}".format(rec)
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
| 124 |
+
# form data (x-www-form-urlencoded)
|
| 125 |
+
data = []
|
| 126 |
+
for type_options, _, value in self._fields:
|
| 127 |
+
data.append((type_options["name"], value))
|
| 128 |
+
|
| 129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
| 130 |
+
|
| 131 |
+
if charset == "utf-8":
|
| 132 |
+
content_type = "application/x-www-form-urlencoded"
|
| 133 |
+
else:
|
| 134 |
+
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
| 135 |
+
|
| 136 |
+
return payload.BytesPayload(
|
| 137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
| 138 |
+
content_type=content_type,
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
| 142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
| 143 |
+
if self._is_processed:
|
| 144 |
+
raise RuntimeError("Form data has been processed already")
|
| 145 |
+
for dispparams, headers, value in self._fields:
|
| 146 |
+
try:
|
| 147 |
+
if hdrs.CONTENT_TYPE in headers:
|
| 148 |
+
part = payload.get_payload(
|
| 149 |
+
value,
|
| 150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
| 151 |
+
headers=headers,
|
| 152 |
+
encoding=self._charset,
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
part = payload.get_payload(
|
| 156 |
+
value, headers=headers, encoding=self._charset
|
| 157 |
+
)
|
| 158 |
+
except Exception as exc:
|
| 159 |
+
raise TypeError(
|
| 160 |
+
"Can not serialize value type: %r\n "
|
| 161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
| 162 |
+
) from exc
|
| 163 |
+
|
| 164 |
+
if dispparams:
|
| 165 |
+
part.set_content_disposition(
|
| 166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
| 167 |
+
)
|
| 168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
| 169 |
+
# Content-Length which were sent via chunked transfer encoding
|
| 170 |
+
assert part.headers is not None
|
| 171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 172 |
+
|
| 173 |
+
self._writer.append_payload(part)
|
| 174 |
+
|
| 175 |
+
self._is_processed = True
|
| 176 |
+
return self._writer
|
| 177 |
+
|
| 178 |
+
def __call__(self) -> Payload:
|
| 179 |
+
if self._is_multipart:
|
| 180 |
+
return self._gen_form_data()
|
| 181 |
+
else:
|
| 182 |
+
return self._gen_form_urlencoded()
|
parrot/lib/python3.10/site-packages/aiohttp/helpers.py
ADDED
|
@@ -0,0 +1,1010 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Various helper functions"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import binascii
|
| 6 |
+
import contextlib
|
| 7 |
+
import datetime
|
| 8 |
+
import enum
|
| 9 |
+
import functools
|
| 10 |
+
import inspect
|
| 11 |
+
import netrc
|
| 12 |
+
import os
|
| 13 |
+
import platform
|
| 14 |
+
import re
|
| 15 |
+
import sys
|
| 16 |
+
import time
|
| 17 |
+
import weakref
|
| 18 |
+
from collections import namedtuple
|
| 19 |
+
from contextlib import suppress
|
| 20 |
+
from email.parser import HeaderParser
|
| 21 |
+
from email.utils import parsedate
|
| 22 |
+
from math import ceil
|
| 23 |
+
from pathlib import Path
|
| 24 |
+
from types import TracebackType
|
| 25 |
+
from typing import (
|
| 26 |
+
Any,
|
| 27 |
+
Callable,
|
| 28 |
+
ContextManager,
|
| 29 |
+
Dict,
|
| 30 |
+
Generator,
|
| 31 |
+
Generic,
|
| 32 |
+
Iterable,
|
| 33 |
+
Iterator,
|
| 34 |
+
List,
|
| 35 |
+
Mapping,
|
| 36 |
+
Optional,
|
| 37 |
+
Protocol,
|
| 38 |
+
Tuple,
|
| 39 |
+
Type,
|
| 40 |
+
TypeVar,
|
| 41 |
+
Union,
|
| 42 |
+
get_args,
|
| 43 |
+
overload,
|
| 44 |
+
)
|
| 45 |
+
from urllib.parse import quote
|
| 46 |
+
from urllib.request import getproxies, proxy_bypass
|
| 47 |
+
|
| 48 |
+
import attr
|
| 49 |
+
from multidict import MultiDict, MultiDictProxy, MultiMapping
|
| 50 |
+
from yarl import URL
|
| 51 |
+
|
| 52 |
+
from . import hdrs
|
| 53 |
+
from .log import client_logger
|
| 54 |
+
|
| 55 |
+
if sys.version_info >= (3, 11):
|
| 56 |
+
import asyncio as async_timeout
|
| 57 |
+
else:
|
| 58 |
+
import async_timeout
|
| 59 |
+
|
| 60 |
+
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
| 61 |
+
|
| 62 |
+
IS_MACOS = platform.system() == "Darwin"
|
| 63 |
+
IS_WINDOWS = platform.system() == "Windows"
|
| 64 |
+
|
| 65 |
+
PY_310 = sys.version_info >= (3, 10)
|
| 66 |
+
PY_311 = sys.version_info >= (3, 11)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
_T = TypeVar("_T")
|
| 70 |
+
_S = TypeVar("_S")
|
| 71 |
+
|
| 72 |
+
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
|
| 73 |
+
sentinel = _SENTINEL.sentinel
|
| 74 |
+
|
| 75 |
+
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
| 76 |
+
|
| 77 |
+
DEBUG = sys.flags.dev_mode or (
|
| 78 |
+
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
CHAR = {chr(i) for i in range(0, 128)}
|
| 83 |
+
CTL = {chr(i) for i in range(0, 32)} | {
|
| 84 |
+
chr(127),
|
| 85 |
+
}
|
| 86 |
+
SEPARATORS = {
|
| 87 |
+
"(",
|
| 88 |
+
")",
|
| 89 |
+
"<",
|
| 90 |
+
">",
|
| 91 |
+
"@",
|
| 92 |
+
",",
|
| 93 |
+
";",
|
| 94 |
+
":",
|
| 95 |
+
"\\",
|
| 96 |
+
'"',
|
| 97 |
+
"/",
|
| 98 |
+
"[",
|
| 99 |
+
"]",
|
| 100 |
+
"?",
|
| 101 |
+
"=",
|
| 102 |
+
"{",
|
| 103 |
+
"}",
|
| 104 |
+
" ",
|
| 105 |
+
chr(9),
|
| 106 |
+
}
|
| 107 |
+
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class noop:
|
| 111 |
+
def __await__(self) -> Generator[None, None, None]:
|
| 112 |
+
yield
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
| 116 |
+
"""Http basic authentication helper."""
|
| 117 |
+
|
| 118 |
+
def __new__(
|
| 119 |
+
cls, login: str, password: str = "", encoding: str = "latin1"
|
| 120 |
+
) -> "BasicAuth":
|
| 121 |
+
if login is None:
|
| 122 |
+
raise ValueError("None is not allowed as login value")
|
| 123 |
+
|
| 124 |
+
if password is None:
|
| 125 |
+
raise ValueError("None is not allowed as password value")
|
| 126 |
+
|
| 127 |
+
if ":" in login:
|
| 128 |
+
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
| 129 |
+
|
| 130 |
+
return super().__new__(cls, login, password, encoding)
|
| 131 |
+
|
| 132 |
+
@classmethod
|
| 133 |
+
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
| 134 |
+
"""Create a BasicAuth object from an Authorization HTTP header."""
|
| 135 |
+
try:
|
| 136 |
+
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
| 137 |
+
except ValueError:
|
| 138 |
+
raise ValueError("Could not parse authorization header.")
|
| 139 |
+
|
| 140 |
+
if auth_type.lower() != "basic":
|
| 141 |
+
raise ValueError("Unknown authorization method %s" % auth_type)
|
| 142 |
+
|
| 143 |
+
try:
|
| 144 |
+
decoded = base64.b64decode(
|
| 145 |
+
encoded_credentials.encode("ascii"), validate=True
|
| 146 |
+
).decode(encoding)
|
| 147 |
+
except binascii.Error:
|
| 148 |
+
raise ValueError("Invalid base64 encoding.")
|
| 149 |
+
|
| 150 |
+
try:
|
| 151 |
+
# RFC 2617 HTTP Authentication
|
| 152 |
+
# https://www.ietf.org/rfc/rfc2617.txt
|
| 153 |
+
# the colon must be present, but the username and password may be
|
| 154 |
+
# otherwise blank.
|
| 155 |
+
username, password = decoded.split(":", 1)
|
| 156 |
+
except ValueError:
|
| 157 |
+
raise ValueError("Invalid credentials.")
|
| 158 |
+
|
| 159 |
+
return cls(username, password, encoding=encoding)
|
| 160 |
+
|
| 161 |
+
@classmethod
|
| 162 |
+
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
| 163 |
+
"""Create BasicAuth from url."""
|
| 164 |
+
if not isinstance(url, URL):
|
| 165 |
+
raise TypeError("url should be yarl.URL instance")
|
| 166 |
+
if url.user is None and url.password is None:
|
| 167 |
+
return None
|
| 168 |
+
return cls(url.user or "", url.password or "", encoding=encoding)
|
| 169 |
+
|
| 170 |
+
def encode(self) -> str:
|
| 171 |
+
"""Encode credentials."""
|
| 172 |
+
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
| 173 |
+
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 177 |
+
auth = BasicAuth.from_url(url)
|
| 178 |
+
if auth is None:
|
| 179 |
+
return url, None
|
| 180 |
+
else:
|
| 181 |
+
return url.with_user(None), auth
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
def netrc_from_env() -> Optional[netrc.netrc]:
|
| 185 |
+
"""Load netrc from file.
|
| 186 |
+
|
| 187 |
+
Attempt to load it from the path specified by the env-var
|
| 188 |
+
NETRC or in the default location in the user's home directory.
|
| 189 |
+
|
| 190 |
+
Returns None if it couldn't be found or fails to parse.
|
| 191 |
+
"""
|
| 192 |
+
netrc_env = os.environ.get("NETRC")
|
| 193 |
+
|
| 194 |
+
if netrc_env is not None:
|
| 195 |
+
netrc_path = Path(netrc_env)
|
| 196 |
+
else:
|
| 197 |
+
try:
|
| 198 |
+
home_dir = Path.home()
|
| 199 |
+
except RuntimeError as e: # pragma: no cover
|
| 200 |
+
# if pathlib can't resolve home, it may raise a RuntimeError
|
| 201 |
+
client_logger.debug(
|
| 202 |
+
"Could not resolve home directory when "
|
| 203 |
+
"trying to look for .netrc file: %s",
|
| 204 |
+
e,
|
| 205 |
+
)
|
| 206 |
+
return None
|
| 207 |
+
|
| 208 |
+
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
| 209 |
+
|
| 210 |
+
try:
|
| 211 |
+
return netrc.netrc(str(netrc_path))
|
| 212 |
+
except netrc.NetrcParseError as e:
|
| 213 |
+
client_logger.warning("Could not parse .netrc file: %s", e)
|
| 214 |
+
except OSError as e:
|
| 215 |
+
netrc_exists = False
|
| 216 |
+
with contextlib.suppress(OSError):
|
| 217 |
+
netrc_exists = netrc_path.is_file()
|
| 218 |
+
# we couldn't read the file (doesn't exist, permissions, etc.)
|
| 219 |
+
if netrc_env or netrc_exists:
|
| 220 |
+
# only warn if the environment wanted us to load it,
|
| 221 |
+
# or it appears like the default file does actually exist
|
| 222 |
+
client_logger.warning("Could not read .netrc file: %s", e)
|
| 223 |
+
|
| 224 |
+
return None
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 228 |
+
class ProxyInfo:
|
| 229 |
+
proxy: URL
|
| 230 |
+
proxy_auth: Optional[BasicAuth]
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
|
| 234 |
+
"""
|
| 235 |
+
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
|
| 236 |
+
|
| 237 |
+
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
|
| 238 |
+
entry is found for the ``host``.
|
| 239 |
+
"""
|
| 240 |
+
if netrc_obj is None:
|
| 241 |
+
raise LookupError("No .netrc file found")
|
| 242 |
+
auth_from_netrc = netrc_obj.authenticators(host)
|
| 243 |
+
|
| 244 |
+
if auth_from_netrc is None:
|
| 245 |
+
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
|
| 246 |
+
login, account, password = auth_from_netrc
|
| 247 |
+
|
| 248 |
+
# TODO(PY311): username = login or account
|
| 249 |
+
# Up to python 3.10, account could be None if not specified,
|
| 250 |
+
# and login will be empty string if not specified. From 3.11,
|
| 251 |
+
# login and account will be empty string if not specified.
|
| 252 |
+
username = login if (login or account is None) else account
|
| 253 |
+
|
| 254 |
+
# TODO(PY311): Remove this, as password will be empty string
|
| 255 |
+
# if not specified
|
| 256 |
+
if password is None:
|
| 257 |
+
password = ""
|
| 258 |
+
|
| 259 |
+
return BasicAuth(username, password)
|
| 260 |
+
|
| 261 |
+
|
| 262 |
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
| 263 |
+
proxy_urls = {
|
| 264 |
+
k: URL(v)
|
| 265 |
+
for k, v in getproxies().items()
|
| 266 |
+
if k in ("http", "https", "ws", "wss")
|
| 267 |
+
}
|
| 268 |
+
netrc_obj = netrc_from_env()
|
| 269 |
+
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
| 270 |
+
ret = {}
|
| 271 |
+
for proto, val in stripped.items():
|
| 272 |
+
proxy, auth = val
|
| 273 |
+
if proxy.scheme in ("https", "wss"):
|
| 274 |
+
client_logger.warning(
|
| 275 |
+
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
| 276 |
+
)
|
| 277 |
+
continue
|
| 278 |
+
if netrc_obj and auth is None:
|
| 279 |
+
if proxy.host is not None:
|
| 280 |
+
try:
|
| 281 |
+
auth = basicauth_from_netrc(netrc_obj, proxy.host)
|
| 282 |
+
except LookupError:
|
| 283 |
+
auth = None
|
| 284 |
+
ret[proto] = ProxyInfo(proxy, auth)
|
| 285 |
+
return ret
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 289 |
+
"""Get a permitted proxy for the given URL from the env."""
|
| 290 |
+
if url.host is not None and proxy_bypass(url.host):
|
| 291 |
+
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
| 292 |
+
|
| 293 |
+
proxies_in_env = proxies_from_env()
|
| 294 |
+
try:
|
| 295 |
+
proxy_info = proxies_in_env[url.scheme]
|
| 296 |
+
except KeyError:
|
| 297 |
+
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
| 298 |
+
else:
|
| 299 |
+
return proxy_info.proxy, proxy_info.proxy_auth
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 303 |
+
class MimeType:
|
| 304 |
+
type: str
|
| 305 |
+
subtype: str
|
| 306 |
+
suffix: str
|
| 307 |
+
parameters: "MultiDictProxy[str]"
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@functools.lru_cache(maxsize=56)
|
| 311 |
+
def parse_mimetype(mimetype: str) -> MimeType:
|
| 312 |
+
"""Parses a MIME type into its components.
|
| 313 |
+
|
| 314 |
+
mimetype is a MIME type string.
|
| 315 |
+
|
| 316 |
+
Returns a MimeType object.
|
| 317 |
+
|
| 318 |
+
Example:
|
| 319 |
+
|
| 320 |
+
>>> parse_mimetype('text/html; charset=utf-8')
|
| 321 |
+
MimeType(type='text', subtype='html', suffix='',
|
| 322 |
+
parameters={'charset': 'utf-8'})
|
| 323 |
+
|
| 324 |
+
"""
|
| 325 |
+
if not mimetype:
|
| 326 |
+
return MimeType(
|
| 327 |
+
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
| 328 |
+
)
|
| 329 |
+
|
| 330 |
+
parts = mimetype.split(";")
|
| 331 |
+
params: MultiDict[str] = MultiDict()
|
| 332 |
+
for item in parts[1:]:
|
| 333 |
+
if not item:
|
| 334 |
+
continue
|
| 335 |
+
key, _, value = item.partition("=")
|
| 336 |
+
params.add(key.lower().strip(), value.strip(' "'))
|
| 337 |
+
|
| 338 |
+
fulltype = parts[0].strip().lower()
|
| 339 |
+
if fulltype == "*":
|
| 340 |
+
fulltype = "*/*"
|
| 341 |
+
|
| 342 |
+
mtype, _, stype = fulltype.partition("/")
|
| 343 |
+
stype, _, suffix = stype.partition("+")
|
| 344 |
+
|
| 345 |
+
return MimeType(
|
| 346 |
+
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
| 347 |
+
)
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
| 351 |
+
name = getattr(obj, "name", None)
|
| 352 |
+
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
| 353 |
+
return Path(name).name
|
| 354 |
+
return default
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
| 358 |
+
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
def quoted_string(content: str) -> str:
|
| 362 |
+
"""Return 7-bit content as quoted-string.
|
| 363 |
+
|
| 364 |
+
Format content into a quoted-string as defined in RFC5322 for
|
| 365 |
+
Internet Message Format. Notice that this is not the 8-bit HTTP
|
| 366 |
+
format, but the 7-bit email format. Content must be in usascii or
|
| 367 |
+
a ValueError is raised.
|
| 368 |
+
"""
|
| 369 |
+
if not (QCONTENT > set(content)):
|
| 370 |
+
raise ValueError(f"bad content for quoted-string {content!r}")
|
| 371 |
+
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
def content_disposition_header(
|
| 375 |
+
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
| 376 |
+
) -> str:
|
| 377 |
+
"""Sets ``Content-Disposition`` header for MIME.
|
| 378 |
+
|
| 379 |
+
This is the MIME payload Content-Disposition header from RFC 2183
|
| 380 |
+
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
| 381 |
+
RFC 6266.
|
| 382 |
+
|
| 383 |
+
disptype is a disposition type: inline, attachment, form-data.
|
| 384 |
+
Should be valid extension token (see RFC 2183)
|
| 385 |
+
|
| 386 |
+
quote_fields performs value quoting to 7-bit MIME headers
|
| 387 |
+
according to RFC 7578. Set to quote_fields to False if recipient
|
| 388 |
+
can take 8-bit file names and field values.
|
| 389 |
+
|
| 390 |
+
_charset specifies the charset to use when quote_fields is True.
|
| 391 |
+
|
| 392 |
+
params is a dict with disposition params.
|
| 393 |
+
"""
|
| 394 |
+
if not disptype or not (TOKEN > set(disptype)):
|
| 395 |
+
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
| 396 |
+
|
| 397 |
+
value = disptype
|
| 398 |
+
if params:
|
| 399 |
+
lparams = []
|
| 400 |
+
for key, val in params.items():
|
| 401 |
+
if not key or not (TOKEN > set(key)):
|
| 402 |
+
raise ValueError(
|
| 403 |
+
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
| 404 |
+
)
|
| 405 |
+
if quote_fields:
|
| 406 |
+
if key.lower() == "filename":
|
| 407 |
+
qval = quote(val, "", encoding=_charset)
|
| 408 |
+
lparams.append((key, '"%s"' % qval))
|
| 409 |
+
else:
|
| 410 |
+
try:
|
| 411 |
+
qval = quoted_string(val)
|
| 412 |
+
except ValueError:
|
| 413 |
+
qval = "".join(
|
| 414 |
+
(_charset, "''", quote(val, "", encoding=_charset))
|
| 415 |
+
)
|
| 416 |
+
lparams.append((key + "*", qval))
|
| 417 |
+
else:
|
| 418 |
+
lparams.append((key, '"%s"' % qval))
|
| 419 |
+
else:
|
| 420 |
+
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
| 421 |
+
lparams.append((key, '"%s"' % qval))
|
| 422 |
+
sparams = "; ".join("=".join(pair) for pair in lparams)
|
| 423 |
+
value = "; ".join((value, sparams))
|
| 424 |
+
return value
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
class _TSelf(Protocol, Generic[_T]):
|
| 428 |
+
_cache: Dict[str, _T]
|
| 429 |
+
|
| 430 |
+
|
| 431 |
+
class reify(Generic[_T]):
|
| 432 |
+
"""Use as a class method decorator.
|
| 433 |
+
|
| 434 |
+
It operates almost exactly like
|
| 435 |
+
the Python `@property` decorator, but it puts the result of the
|
| 436 |
+
method it decorates into the instance dict after the first call,
|
| 437 |
+
effectively replacing the function it decorates with an instance
|
| 438 |
+
variable. It is, in Python parlance, a data descriptor.
|
| 439 |
+
"""
|
| 440 |
+
|
| 441 |
+
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
| 442 |
+
self.wrapped = wrapped
|
| 443 |
+
self.__doc__ = wrapped.__doc__
|
| 444 |
+
self.name = wrapped.__name__
|
| 445 |
+
|
| 446 |
+
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
| 447 |
+
try:
|
| 448 |
+
try:
|
| 449 |
+
return inst._cache[self.name]
|
| 450 |
+
except KeyError:
|
| 451 |
+
val = self.wrapped(inst)
|
| 452 |
+
inst._cache[self.name] = val
|
| 453 |
+
return val
|
| 454 |
+
except AttributeError:
|
| 455 |
+
if inst is None:
|
| 456 |
+
return self
|
| 457 |
+
raise
|
| 458 |
+
|
| 459 |
+
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
| 460 |
+
raise AttributeError("reified property is read-only")
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
reify_py = reify
|
| 464 |
+
|
| 465 |
+
try:
|
| 466 |
+
from ._helpers import reify as reify_c
|
| 467 |
+
|
| 468 |
+
if not NO_EXTENSIONS:
|
| 469 |
+
reify = reify_c # type: ignore[misc,assignment]
|
| 470 |
+
except ImportError:
|
| 471 |
+
pass
|
| 472 |
+
|
| 473 |
+
|
| 474 |
+
def is_ipv4_address(host: Optional[Union[str, bytes]]) -> bool:
|
| 475 |
+
"""Check if host looks like an IPv4 address.
|
| 476 |
+
|
| 477 |
+
This function does not validate that the format is correct, only that
|
| 478 |
+
the host is a str or bytes, and its all numeric.
|
| 479 |
+
|
| 480 |
+
This check is only meant as a heuristic to ensure that
|
| 481 |
+
a host is not a domain name.
|
| 482 |
+
"""
|
| 483 |
+
if not host:
|
| 484 |
+
return False
|
| 485 |
+
# For a host to be an ipv4 address, it must be all numeric.
|
| 486 |
+
if isinstance(host, str):
|
| 487 |
+
return host.replace(".", "").isdigit()
|
| 488 |
+
if isinstance(host, (bytes, bytearray, memoryview)):
|
| 489 |
+
return host.decode("ascii").replace(".", "").isdigit()
|
| 490 |
+
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
def is_ipv6_address(host: Optional[Union[str, bytes]]) -> bool:
|
| 494 |
+
"""Check if host looks like an IPv6 address.
|
| 495 |
+
|
| 496 |
+
This function does not validate that the format is correct, only that
|
| 497 |
+
the host contains a colon and that it is a str or bytes.
|
| 498 |
+
|
| 499 |
+
This check is only meant as a heuristic to ensure that
|
| 500 |
+
a host is not a domain name.
|
| 501 |
+
"""
|
| 502 |
+
if not host:
|
| 503 |
+
return False
|
| 504 |
+
# The host must contain a colon to be an IPv6 address.
|
| 505 |
+
if isinstance(host, str):
|
| 506 |
+
return ":" in host
|
| 507 |
+
if isinstance(host, (bytes, bytearray, memoryview)):
|
| 508 |
+
return b":" in host
|
| 509 |
+
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
| 510 |
+
|
| 511 |
+
|
| 512 |
+
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
| 513 |
+
"""Check if host looks like an IP Address.
|
| 514 |
+
|
| 515 |
+
This check is only meant as a heuristic to ensure that
|
| 516 |
+
a host is not a domain name.
|
| 517 |
+
"""
|
| 518 |
+
return is_ipv4_address(host) or is_ipv6_address(host)
|
| 519 |
+
|
| 520 |
+
|
| 521 |
+
_cached_current_datetime: Optional[int] = None
|
| 522 |
+
_cached_formatted_datetime = ""
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def rfc822_formatted_time() -> str:
|
| 526 |
+
global _cached_current_datetime
|
| 527 |
+
global _cached_formatted_datetime
|
| 528 |
+
|
| 529 |
+
now = int(time.time())
|
| 530 |
+
if now != _cached_current_datetime:
|
| 531 |
+
# Weekday and month names for HTTP date/time formatting;
|
| 532 |
+
# always English!
|
| 533 |
+
# Tuples are constants stored in codeobject!
|
| 534 |
+
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
| 535 |
+
_monthname = (
|
| 536 |
+
"", # Dummy so we can use 1-based month numbers
|
| 537 |
+
"Jan",
|
| 538 |
+
"Feb",
|
| 539 |
+
"Mar",
|
| 540 |
+
"Apr",
|
| 541 |
+
"May",
|
| 542 |
+
"Jun",
|
| 543 |
+
"Jul",
|
| 544 |
+
"Aug",
|
| 545 |
+
"Sep",
|
| 546 |
+
"Oct",
|
| 547 |
+
"Nov",
|
| 548 |
+
"Dec",
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
| 552 |
+
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
| 553 |
+
_weekdayname[wd],
|
| 554 |
+
day,
|
| 555 |
+
_monthname[month],
|
| 556 |
+
year,
|
| 557 |
+
hh,
|
| 558 |
+
mm,
|
| 559 |
+
ss,
|
| 560 |
+
)
|
| 561 |
+
_cached_current_datetime = now
|
| 562 |
+
return _cached_formatted_datetime
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
| 566 |
+
ref, name = info
|
| 567 |
+
ob = ref()
|
| 568 |
+
if ob is not None:
|
| 569 |
+
with suppress(Exception):
|
| 570 |
+
getattr(ob, name)()
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
def weakref_handle(
|
| 574 |
+
ob: object,
|
| 575 |
+
name: str,
|
| 576 |
+
timeout: float,
|
| 577 |
+
loop: asyncio.AbstractEventLoop,
|
| 578 |
+
timeout_ceil_threshold: float = 5,
|
| 579 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 580 |
+
if timeout is not None and timeout > 0:
|
| 581 |
+
when = loop.time() + timeout
|
| 582 |
+
if timeout >= timeout_ceil_threshold:
|
| 583 |
+
when = ceil(when)
|
| 584 |
+
|
| 585 |
+
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
| 586 |
+
return None
|
| 587 |
+
|
| 588 |
+
|
| 589 |
+
def call_later(
|
| 590 |
+
cb: Callable[[], Any],
|
| 591 |
+
timeout: float,
|
| 592 |
+
loop: asyncio.AbstractEventLoop,
|
| 593 |
+
timeout_ceil_threshold: float = 5,
|
| 594 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 595 |
+
if timeout is None or timeout <= 0:
|
| 596 |
+
return None
|
| 597 |
+
now = loop.time()
|
| 598 |
+
when = calculate_timeout_when(now, timeout, timeout_ceil_threshold)
|
| 599 |
+
return loop.call_at(when, cb)
|
| 600 |
+
|
| 601 |
+
|
| 602 |
+
def calculate_timeout_when(
|
| 603 |
+
loop_time: float,
|
| 604 |
+
timeout: float,
|
| 605 |
+
timeout_ceiling_threshold: float,
|
| 606 |
+
) -> float:
|
| 607 |
+
"""Calculate when to execute a timeout."""
|
| 608 |
+
when = loop_time + timeout
|
| 609 |
+
if timeout > timeout_ceiling_threshold:
|
| 610 |
+
return ceil(when)
|
| 611 |
+
return when
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
class TimeoutHandle:
|
| 615 |
+
"""Timeout handle"""
|
| 616 |
+
|
| 617 |
+
def __init__(
|
| 618 |
+
self,
|
| 619 |
+
loop: asyncio.AbstractEventLoop,
|
| 620 |
+
timeout: Optional[float],
|
| 621 |
+
ceil_threshold: float = 5,
|
| 622 |
+
) -> None:
|
| 623 |
+
self._timeout = timeout
|
| 624 |
+
self._loop = loop
|
| 625 |
+
self._ceil_threshold = ceil_threshold
|
| 626 |
+
self._callbacks: List[
|
| 627 |
+
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
| 628 |
+
] = []
|
| 629 |
+
|
| 630 |
+
def register(
|
| 631 |
+
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
| 632 |
+
) -> None:
|
| 633 |
+
self._callbacks.append((callback, args, kwargs))
|
| 634 |
+
|
| 635 |
+
def close(self) -> None:
|
| 636 |
+
self._callbacks.clear()
|
| 637 |
+
|
| 638 |
+
def start(self) -> Optional[asyncio.TimerHandle]:
|
| 639 |
+
timeout = self._timeout
|
| 640 |
+
if timeout is not None and timeout > 0:
|
| 641 |
+
when = self._loop.time() + timeout
|
| 642 |
+
if timeout >= self._ceil_threshold:
|
| 643 |
+
when = ceil(when)
|
| 644 |
+
return self._loop.call_at(when, self.__call__)
|
| 645 |
+
else:
|
| 646 |
+
return None
|
| 647 |
+
|
| 648 |
+
def timer(self) -> "BaseTimerContext":
|
| 649 |
+
if self._timeout is not None and self._timeout > 0:
|
| 650 |
+
timer = TimerContext(self._loop)
|
| 651 |
+
self.register(timer.timeout)
|
| 652 |
+
return timer
|
| 653 |
+
else:
|
| 654 |
+
return TimerNoop()
|
| 655 |
+
|
| 656 |
+
def __call__(self) -> None:
|
| 657 |
+
for cb, args, kwargs in self._callbacks:
|
| 658 |
+
with suppress(Exception):
|
| 659 |
+
cb(*args, **kwargs)
|
| 660 |
+
|
| 661 |
+
self._callbacks.clear()
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
| 665 |
+
def assert_timeout(self) -> None:
|
| 666 |
+
"""Raise TimeoutError if timeout has been exceeded."""
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
class TimerNoop(BaseTimerContext):
|
| 670 |
+
def __enter__(self) -> BaseTimerContext:
|
| 671 |
+
return self
|
| 672 |
+
|
| 673 |
+
def __exit__(
|
| 674 |
+
self,
|
| 675 |
+
exc_type: Optional[Type[BaseException]],
|
| 676 |
+
exc_val: Optional[BaseException],
|
| 677 |
+
exc_tb: Optional[TracebackType],
|
| 678 |
+
) -> None:
|
| 679 |
+
return
|
| 680 |
+
|
| 681 |
+
|
| 682 |
+
class TimerContext(BaseTimerContext):
|
| 683 |
+
"""Low resolution timeout context manager"""
|
| 684 |
+
|
| 685 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 686 |
+
self._loop = loop
|
| 687 |
+
self._tasks: List[asyncio.Task[Any]] = []
|
| 688 |
+
self._cancelled = False
|
| 689 |
+
|
| 690 |
+
def assert_timeout(self) -> None:
|
| 691 |
+
"""Raise TimeoutError if timer has already been cancelled."""
|
| 692 |
+
if self._cancelled:
|
| 693 |
+
raise asyncio.TimeoutError from None
|
| 694 |
+
|
| 695 |
+
def __enter__(self) -> BaseTimerContext:
|
| 696 |
+
task = asyncio.current_task(loop=self._loop)
|
| 697 |
+
|
| 698 |
+
if task is None:
|
| 699 |
+
raise RuntimeError(
|
| 700 |
+
"Timeout context manager should be used " "inside a task"
|
| 701 |
+
)
|
| 702 |
+
|
| 703 |
+
if self._cancelled:
|
| 704 |
+
raise asyncio.TimeoutError from None
|
| 705 |
+
|
| 706 |
+
self._tasks.append(task)
|
| 707 |
+
return self
|
| 708 |
+
|
| 709 |
+
def __exit__(
|
| 710 |
+
self,
|
| 711 |
+
exc_type: Optional[Type[BaseException]],
|
| 712 |
+
exc_val: Optional[BaseException],
|
| 713 |
+
exc_tb: Optional[TracebackType],
|
| 714 |
+
) -> Optional[bool]:
|
| 715 |
+
if self._tasks:
|
| 716 |
+
self._tasks.pop()
|
| 717 |
+
|
| 718 |
+
if exc_type is asyncio.CancelledError and self._cancelled:
|
| 719 |
+
raise asyncio.TimeoutError from None
|
| 720 |
+
return None
|
| 721 |
+
|
| 722 |
+
def timeout(self) -> None:
|
| 723 |
+
if not self._cancelled:
|
| 724 |
+
for task in set(self._tasks):
|
| 725 |
+
task.cancel()
|
| 726 |
+
|
| 727 |
+
self._cancelled = True
|
| 728 |
+
|
| 729 |
+
|
| 730 |
+
def ceil_timeout(
|
| 731 |
+
delay: Optional[float], ceil_threshold: float = 5
|
| 732 |
+
) -> async_timeout.Timeout:
|
| 733 |
+
if delay is None or delay <= 0:
|
| 734 |
+
return async_timeout.timeout(None)
|
| 735 |
+
|
| 736 |
+
loop = asyncio.get_running_loop()
|
| 737 |
+
now = loop.time()
|
| 738 |
+
when = now + delay
|
| 739 |
+
if delay > ceil_threshold:
|
| 740 |
+
when = ceil(when)
|
| 741 |
+
return async_timeout.timeout_at(when)
|
| 742 |
+
|
| 743 |
+
|
| 744 |
+
class HeadersMixin:
|
| 745 |
+
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
| 746 |
+
|
| 747 |
+
_headers: MultiMapping[str]
|
| 748 |
+
|
| 749 |
+
_content_type: Optional[str] = None
|
| 750 |
+
_content_dict: Optional[Dict[str, str]] = None
|
| 751 |
+
_stored_content_type: Union[str, None, _SENTINEL] = sentinel
|
| 752 |
+
|
| 753 |
+
def _parse_content_type(self, raw: Optional[str]) -> None:
|
| 754 |
+
self._stored_content_type = raw
|
| 755 |
+
if raw is None:
|
| 756 |
+
# default value according to RFC 2616
|
| 757 |
+
self._content_type = "application/octet-stream"
|
| 758 |
+
self._content_dict = {}
|
| 759 |
+
else:
|
| 760 |
+
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
| 761 |
+
self._content_type = msg.get_content_type()
|
| 762 |
+
params = msg.get_params(())
|
| 763 |
+
self._content_dict = dict(params[1:]) # First element is content type again
|
| 764 |
+
|
| 765 |
+
@property
|
| 766 |
+
def content_type(self) -> str:
|
| 767 |
+
"""The value of content part for Content-Type HTTP header."""
|
| 768 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
| 769 |
+
if self._stored_content_type != raw:
|
| 770 |
+
self._parse_content_type(raw)
|
| 771 |
+
assert self._content_type is not None
|
| 772 |
+
return self._content_type
|
| 773 |
+
|
| 774 |
+
@property
|
| 775 |
+
def charset(self) -> Optional[str]:
|
| 776 |
+
"""The value of charset part for Content-Type HTTP header."""
|
| 777 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
| 778 |
+
if self._stored_content_type != raw:
|
| 779 |
+
self._parse_content_type(raw)
|
| 780 |
+
assert self._content_dict is not None
|
| 781 |
+
return self._content_dict.get("charset")
|
| 782 |
+
|
| 783 |
+
@property
|
| 784 |
+
def content_length(self) -> Optional[int]:
|
| 785 |
+
"""The value of Content-Length HTTP header."""
|
| 786 |
+
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
|
| 787 |
+
|
| 788 |
+
if content_length is not None:
|
| 789 |
+
return int(content_length)
|
| 790 |
+
else:
|
| 791 |
+
return None
|
| 792 |
+
|
| 793 |
+
|
| 794 |
+
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
| 795 |
+
if not fut.done():
|
| 796 |
+
fut.set_result(result)
|
| 797 |
+
|
| 798 |
+
|
| 799 |
+
_EXC_SENTINEL = BaseException()
|
| 800 |
+
|
| 801 |
+
|
| 802 |
+
class ErrorableProtocol(Protocol):
|
| 803 |
+
def set_exception(
|
| 804 |
+
self,
|
| 805 |
+
exc: BaseException,
|
| 806 |
+
exc_cause: BaseException = ...,
|
| 807 |
+
) -> None: ... # pragma: no cover
|
| 808 |
+
|
| 809 |
+
|
| 810 |
+
def set_exception(
|
| 811 |
+
fut: "asyncio.Future[_T] | ErrorableProtocol",
|
| 812 |
+
exc: BaseException,
|
| 813 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 814 |
+
) -> None:
|
| 815 |
+
"""Set future exception.
|
| 816 |
+
|
| 817 |
+
If the future is marked as complete, this function is a no-op.
|
| 818 |
+
|
| 819 |
+
:param exc_cause: An exception that is a direct cause of ``exc``.
|
| 820 |
+
Only set if provided.
|
| 821 |
+
"""
|
| 822 |
+
if asyncio.isfuture(fut) and fut.done():
|
| 823 |
+
return
|
| 824 |
+
|
| 825 |
+
exc_is_sentinel = exc_cause is _EXC_SENTINEL
|
| 826 |
+
exc_causes_itself = exc is exc_cause
|
| 827 |
+
if not exc_is_sentinel and not exc_causes_itself:
|
| 828 |
+
exc.__cause__ = exc_cause
|
| 829 |
+
|
| 830 |
+
fut.set_exception(exc)
|
| 831 |
+
|
| 832 |
+
|
| 833 |
+
@functools.total_ordering
|
| 834 |
+
class AppKey(Generic[_T]):
|
| 835 |
+
"""Keys for static typing support in Application."""
|
| 836 |
+
|
| 837 |
+
__slots__ = ("_name", "_t", "__orig_class__")
|
| 838 |
+
|
| 839 |
+
# This may be set by Python when instantiating with a generic type. We need to
|
| 840 |
+
# support this, in order to support types that are not concrete classes,
|
| 841 |
+
# like Iterable, which can't be passed as the second parameter to __init__.
|
| 842 |
+
__orig_class__: Type[object]
|
| 843 |
+
|
| 844 |
+
def __init__(self, name: str, t: Optional[Type[_T]] = None):
|
| 845 |
+
# Prefix with module name to help deduplicate key names.
|
| 846 |
+
frame = inspect.currentframe()
|
| 847 |
+
while frame:
|
| 848 |
+
if frame.f_code.co_name == "<module>":
|
| 849 |
+
module: str = frame.f_globals["__name__"]
|
| 850 |
+
break
|
| 851 |
+
frame = frame.f_back
|
| 852 |
+
|
| 853 |
+
self._name = module + "." + name
|
| 854 |
+
self._t = t
|
| 855 |
+
|
| 856 |
+
def __lt__(self, other: object) -> bool:
|
| 857 |
+
if isinstance(other, AppKey):
|
| 858 |
+
return self._name < other._name
|
| 859 |
+
return True # Order AppKey above other types.
|
| 860 |
+
|
| 861 |
+
def __repr__(self) -> str:
|
| 862 |
+
t = self._t
|
| 863 |
+
if t is None:
|
| 864 |
+
with suppress(AttributeError):
|
| 865 |
+
# Set to type arg.
|
| 866 |
+
t = get_args(self.__orig_class__)[0]
|
| 867 |
+
|
| 868 |
+
if t is None:
|
| 869 |
+
t_repr = "<<Unknown>>"
|
| 870 |
+
elif isinstance(t, type):
|
| 871 |
+
if t.__module__ == "builtins":
|
| 872 |
+
t_repr = t.__qualname__
|
| 873 |
+
else:
|
| 874 |
+
t_repr = f"{t.__module__}.{t.__qualname__}"
|
| 875 |
+
else:
|
| 876 |
+
t_repr = repr(t)
|
| 877 |
+
return f"<AppKey({self._name}, type={t_repr})>"
|
| 878 |
+
|
| 879 |
+
|
| 880 |
+
class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
|
| 881 |
+
__slots__ = ("_maps",)
|
| 882 |
+
|
| 883 |
+
def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
|
| 884 |
+
self._maps = tuple(maps)
|
| 885 |
+
|
| 886 |
+
def __init_subclass__(cls) -> None:
|
| 887 |
+
raise TypeError(
|
| 888 |
+
"Inheritance class {} from ChainMapProxy "
|
| 889 |
+
"is forbidden".format(cls.__name__)
|
| 890 |
+
)
|
| 891 |
+
|
| 892 |
+
@overload # type: ignore[override]
|
| 893 |
+
def __getitem__(self, key: AppKey[_T]) -> _T: ...
|
| 894 |
+
|
| 895 |
+
@overload
|
| 896 |
+
def __getitem__(self, key: str) -> Any: ...
|
| 897 |
+
|
| 898 |
+
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
| 899 |
+
for mapping in self._maps:
|
| 900 |
+
try:
|
| 901 |
+
return mapping[key]
|
| 902 |
+
except KeyError:
|
| 903 |
+
pass
|
| 904 |
+
raise KeyError(key)
|
| 905 |
+
|
| 906 |
+
@overload # type: ignore[override]
|
| 907 |
+
def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ...
|
| 908 |
+
|
| 909 |
+
@overload
|
| 910 |
+
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ...
|
| 911 |
+
|
| 912 |
+
@overload
|
| 913 |
+
def get(self, key: str, default: Any = ...) -> Any: ...
|
| 914 |
+
|
| 915 |
+
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
| 916 |
+
try:
|
| 917 |
+
return self[key]
|
| 918 |
+
except KeyError:
|
| 919 |
+
return default
|
| 920 |
+
|
| 921 |
+
def __len__(self) -> int:
|
| 922 |
+
# reuses stored hash values if possible
|
| 923 |
+
return len(set().union(*self._maps))
|
| 924 |
+
|
| 925 |
+
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
| 926 |
+
d: Dict[Union[str, AppKey[Any]], Any] = {}
|
| 927 |
+
for mapping in reversed(self._maps):
|
| 928 |
+
# reuses stored hash values if possible
|
| 929 |
+
d.update(mapping)
|
| 930 |
+
return iter(d)
|
| 931 |
+
|
| 932 |
+
def __contains__(self, key: object) -> bool:
|
| 933 |
+
return any(key in m for m in self._maps)
|
| 934 |
+
|
| 935 |
+
def __bool__(self) -> bool:
|
| 936 |
+
return any(self._maps)
|
| 937 |
+
|
| 938 |
+
def __repr__(self) -> str:
|
| 939 |
+
content = ", ".join(map(repr, self._maps))
|
| 940 |
+
return f"ChainMapProxy({content})"
|
| 941 |
+
|
| 942 |
+
|
| 943 |
+
# https://tools.ietf.org/html/rfc7232#section-2.3
|
| 944 |
+
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
|
| 945 |
+
_ETAGC_RE = re.compile(_ETAGC)
|
| 946 |
+
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
| 947 |
+
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
| 948 |
+
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
| 949 |
+
|
| 950 |
+
ETAG_ANY = "*"
|
| 951 |
+
|
| 952 |
+
|
| 953 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 954 |
+
class ETag:
|
| 955 |
+
value: str
|
| 956 |
+
is_weak: bool = False
|
| 957 |
+
|
| 958 |
+
|
| 959 |
+
def validate_etag_value(value: str) -> None:
|
| 960 |
+
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
| 961 |
+
raise ValueError(
|
| 962 |
+
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
| 963 |
+
)
|
| 964 |
+
|
| 965 |
+
|
| 966 |
+
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
| 967 |
+
"""Process a date string, return a datetime object"""
|
| 968 |
+
if date_str is not None:
|
| 969 |
+
timetuple = parsedate(date_str)
|
| 970 |
+
if timetuple is not None:
|
| 971 |
+
with suppress(ValueError):
|
| 972 |
+
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
| 973 |
+
return None
|
| 974 |
+
|
| 975 |
+
|
| 976 |
+
@functools.lru_cache
|
| 977 |
+
def must_be_empty_body(method: str, code: int) -> bool:
|
| 978 |
+
"""Check if a request must return an empty body."""
|
| 979 |
+
return (
|
| 980 |
+
status_code_must_be_empty_body(code)
|
| 981 |
+
or method_must_be_empty_body(method)
|
| 982 |
+
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
| 983 |
+
)
|
| 984 |
+
|
| 985 |
+
|
| 986 |
+
def method_must_be_empty_body(method: str) -> bool:
|
| 987 |
+
"""Check if a method must return an empty body."""
|
| 988 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
| 989 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
| 990 |
+
return method.upper() == hdrs.METH_HEAD
|
| 991 |
+
|
| 992 |
+
|
| 993 |
+
def status_code_must_be_empty_body(code: int) -> bool:
|
| 994 |
+
"""Check if a status code must return an empty body."""
|
| 995 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
| 996 |
+
return code in {204, 304} or 100 <= code < 200
|
| 997 |
+
|
| 998 |
+
|
| 999 |
+
def should_remove_content_length(method: str, code: int) -> bool:
|
| 1000 |
+
"""Check if a Content-Length header should be removed.
|
| 1001 |
+
|
| 1002 |
+
This should always be a subset of must_be_empty_body
|
| 1003 |
+
"""
|
| 1004 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
|
| 1005 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
|
| 1006 |
+
return (
|
| 1007 |
+
code in {204, 304}
|
| 1008 |
+
or 100 <= code < 200
|
| 1009 |
+
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
| 1010 |
+
)
|
parrot/lib/python3.10/site-packages/aiohttp/http.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
from http import HTTPStatus
|
| 3 |
+
from typing import Mapping, Tuple
|
| 4 |
+
|
| 5 |
+
from . import __version__
|
| 6 |
+
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
| 7 |
+
from .http_parser import (
|
| 8 |
+
HeadersParser as HeadersParser,
|
| 9 |
+
HttpParser as HttpParser,
|
| 10 |
+
HttpRequestParser as HttpRequestParser,
|
| 11 |
+
HttpResponseParser as HttpResponseParser,
|
| 12 |
+
RawRequestMessage as RawRequestMessage,
|
| 13 |
+
RawResponseMessage as RawResponseMessage,
|
| 14 |
+
)
|
| 15 |
+
from .http_websocket import (
|
| 16 |
+
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
| 17 |
+
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
| 18 |
+
WS_KEY as WS_KEY,
|
| 19 |
+
WebSocketError as WebSocketError,
|
| 20 |
+
WebSocketReader as WebSocketReader,
|
| 21 |
+
WebSocketWriter as WebSocketWriter,
|
| 22 |
+
WSCloseCode as WSCloseCode,
|
| 23 |
+
WSMessage as WSMessage,
|
| 24 |
+
WSMsgType as WSMsgType,
|
| 25 |
+
ws_ext_gen as ws_ext_gen,
|
| 26 |
+
ws_ext_parse as ws_ext_parse,
|
| 27 |
+
)
|
| 28 |
+
from .http_writer import (
|
| 29 |
+
HttpVersion as HttpVersion,
|
| 30 |
+
HttpVersion10 as HttpVersion10,
|
| 31 |
+
HttpVersion11 as HttpVersion11,
|
| 32 |
+
StreamWriter as StreamWriter,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
__all__ = (
|
| 36 |
+
"HttpProcessingError",
|
| 37 |
+
"RESPONSES",
|
| 38 |
+
"SERVER_SOFTWARE",
|
| 39 |
+
# .http_writer
|
| 40 |
+
"StreamWriter",
|
| 41 |
+
"HttpVersion",
|
| 42 |
+
"HttpVersion10",
|
| 43 |
+
"HttpVersion11",
|
| 44 |
+
# .http_parser
|
| 45 |
+
"HeadersParser",
|
| 46 |
+
"HttpParser",
|
| 47 |
+
"HttpRequestParser",
|
| 48 |
+
"HttpResponseParser",
|
| 49 |
+
"RawRequestMessage",
|
| 50 |
+
"RawResponseMessage",
|
| 51 |
+
# .http_websocket
|
| 52 |
+
"WS_CLOSED_MESSAGE",
|
| 53 |
+
"WS_CLOSING_MESSAGE",
|
| 54 |
+
"WS_KEY",
|
| 55 |
+
"WebSocketReader",
|
| 56 |
+
"WebSocketWriter",
|
| 57 |
+
"ws_ext_gen",
|
| 58 |
+
"ws_ext_parse",
|
| 59 |
+
"WSMessage",
|
| 60 |
+
"WebSocketError",
|
| 61 |
+
"WSMsgType",
|
| 62 |
+
"WSCloseCode",
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
| 67 |
+
sys.version_info, __version__
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
RESPONSES: Mapping[int, Tuple[str, str]] = {
|
| 71 |
+
v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
|
| 72 |
+
}
|
parrot/lib/python3.10/site-packages/aiohttp/http_exceptions.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low-level http related exceptions."""
|
| 2 |
+
|
| 3 |
+
from textwrap import indent
|
| 4 |
+
from typing import Optional, Union
|
| 5 |
+
|
| 6 |
+
from .typedefs import _CIMultiDict
|
| 7 |
+
|
| 8 |
+
__all__ = ("HttpProcessingError",)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class HttpProcessingError(Exception):
|
| 12 |
+
"""HTTP error.
|
| 13 |
+
|
| 14 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
| 15 |
+
|
| 16 |
+
code: HTTP Error code.
|
| 17 |
+
message: (optional) Error message.
|
| 18 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
code = 0
|
| 22 |
+
message = ""
|
| 23 |
+
headers = None
|
| 24 |
+
|
| 25 |
+
def __init__(
|
| 26 |
+
self,
|
| 27 |
+
*,
|
| 28 |
+
code: Optional[int] = None,
|
| 29 |
+
message: str = "",
|
| 30 |
+
headers: Optional[_CIMultiDict] = None,
|
| 31 |
+
) -> None:
|
| 32 |
+
if code is not None:
|
| 33 |
+
self.code = code
|
| 34 |
+
self.headers = headers
|
| 35 |
+
self.message = message
|
| 36 |
+
|
| 37 |
+
def __str__(self) -> str:
|
| 38 |
+
msg = indent(self.message, " ")
|
| 39 |
+
return f"{self.code}, message:\n{msg}"
|
| 40 |
+
|
| 41 |
+
def __repr__(self) -> str:
|
| 42 |
+
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class BadHttpMessage(HttpProcessingError):
|
| 46 |
+
|
| 47 |
+
code = 400
|
| 48 |
+
message = "Bad Request"
|
| 49 |
+
|
| 50 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
| 51 |
+
super().__init__(message=message, headers=headers)
|
| 52 |
+
self.args = (message,)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class HttpBadRequest(BadHttpMessage):
|
| 56 |
+
|
| 57 |
+
code = 400
|
| 58 |
+
message = "Bad Request"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class PayloadEncodingError(BadHttpMessage):
|
| 62 |
+
"""Base class for payload errors"""
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class ContentEncodingError(PayloadEncodingError):
|
| 66 |
+
"""Content encoding error."""
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class TransferEncodingError(PayloadEncodingError):
|
| 70 |
+
"""transfer encoding error."""
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class ContentLengthError(PayloadEncodingError):
|
| 74 |
+
"""Not enough data for satisfy content length header."""
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class LineTooLong(BadHttpMessage):
|
| 78 |
+
def __init__(
|
| 79 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
| 80 |
+
) -> None:
|
| 81 |
+
super().__init__(
|
| 82 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
| 83 |
+
)
|
| 84 |
+
self.args = (line, limit, actual_size)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class InvalidHeader(BadHttpMessage):
|
| 88 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
| 89 |
+
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
| 90 |
+
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
| 91 |
+
self.hdr = hdr_s
|
| 92 |
+
self.args = (hdr,)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class BadStatusLine(BadHttpMessage):
|
| 96 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
| 97 |
+
if not isinstance(line, str):
|
| 98 |
+
line = repr(line)
|
| 99 |
+
super().__init__(error or f"Bad status line {line!r}")
|
| 100 |
+
self.args = (line,)
|
| 101 |
+
self.line = line
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class InvalidURLError(BadHttpMessage):
|
| 105 |
+
pass
|
parrot/lib/python3.10/site-packages/aiohttp/http_parser.py
ADDED
|
@@ -0,0 +1,1038 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import re
|
| 4 |
+
import string
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from enum import IntEnum
|
| 7 |
+
from typing import (
|
| 8 |
+
Any,
|
| 9 |
+
ClassVar,
|
| 10 |
+
Final,
|
| 11 |
+
Generic,
|
| 12 |
+
List,
|
| 13 |
+
Literal,
|
| 14 |
+
NamedTuple,
|
| 15 |
+
Optional,
|
| 16 |
+
Pattern,
|
| 17 |
+
Set,
|
| 18 |
+
Tuple,
|
| 19 |
+
Type,
|
| 20 |
+
TypeVar,
|
| 21 |
+
Union,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
| 25 |
+
from yarl import URL
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .base_protocol import BaseProtocol
|
| 29 |
+
from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
|
| 30 |
+
from .helpers import (
|
| 31 |
+
_EXC_SENTINEL,
|
| 32 |
+
DEBUG,
|
| 33 |
+
NO_EXTENSIONS,
|
| 34 |
+
BaseTimerContext,
|
| 35 |
+
method_must_be_empty_body,
|
| 36 |
+
set_exception,
|
| 37 |
+
status_code_must_be_empty_body,
|
| 38 |
+
)
|
| 39 |
+
from .http_exceptions import (
|
| 40 |
+
BadHttpMessage,
|
| 41 |
+
BadStatusLine,
|
| 42 |
+
ContentEncodingError,
|
| 43 |
+
ContentLengthError,
|
| 44 |
+
InvalidHeader,
|
| 45 |
+
InvalidURLError,
|
| 46 |
+
LineTooLong,
|
| 47 |
+
TransferEncodingError,
|
| 48 |
+
)
|
| 49 |
+
from .http_writer import HttpVersion, HttpVersion10
|
| 50 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 51 |
+
from .typedefs import RawHeaders
|
| 52 |
+
|
| 53 |
+
__all__ = (
|
| 54 |
+
"HeadersParser",
|
| 55 |
+
"HttpParser",
|
| 56 |
+
"HttpRequestParser",
|
| 57 |
+
"HttpResponseParser",
|
| 58 |
+
"RawRequestMessage",
|
| 59 |
+
"RawResponseMessage",
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
_SEP = Literal[b"\r\n", b"\n"]
|
| 63 |
+
|
| 64 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
| 65 |
+
|
| 66 |
+
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
|
| 67 |
+
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
|
| 68 |
+
#
|
| 69 |
+
# method = token
|
| 70 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
| 71 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
| 72 |
+
# token = 1*tchar
|
| 73 |
+
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
|
| 74 |
+
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
|
| 75 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
|
| 76 |
+
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
|
| 77 |
+
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class RawRequestMessage(NamedTuple):
|
| 81 |
+
method: str
|
| 82 |
+
path: str
|
| 83 |
+
version: HttpVersion
|
| 84 |
+
headers: "CIMultiDictProxy[str]"
|
| 85 |
+
raw_headers: RawHeaders
|
| 86 |
+
should_close: bool
|
| 87 |
+
compression: Optional[str]
|
| 88 |
+
upgrade: bool
|
| 89 |
+
chunked: bool
|
| 90 |
+
url: URL
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class RawResponseMessage(NamedTuple):
|
| 94 |
+
version: HttpVersion
|
| 95 |
+
code: int
|
| 96 |
+
reason: str
|
| 97 |
+
headers: CIMultiDictProxy[str]
|
| 98 |
+
raw_headers: RawHeaders
|
| 99 |
+
should_close: bool
|
| 100 |
+
compression: Optional[str]
|
| 101 |
+
upgrade: bool
|
| 102 |
+
chunked: bool
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
class ParseState(IntEnum):
|
| 109 |
+
|
| 110 |
+
PARSE_NONE = 0
|
| 111 |
+
PARSE_LENGTH = 1
|
| 112 |
+
PARSE_CHUNKED = 2
|
| 113 |
+
PARSE_UNTIL_EOF = 3
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class ChunkState(IntEnum):
|
| 117 |
+
PARSE_CHUNKED_SIZE = 0
|
| 118 |
+
PARSE_CHUNKED_CHUNK = 1
|
| 119 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
| 120 |
+
PARSE_MAYBE_TRAILERS = 3
|
| 121 |
+
PARSE_TRAILERS = 4
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class HeadersParser:
|
| 125 |
+
def __init__(
|
| 126 |
+
self,
|
| 127 |
+
max_line_size: int = 8190,
|
| 128 |
+
max_headers: int = 32768,
|
| 129 |
+
max_field_size: int = 8190,
|
| 130 |
+
lax: bool = False,
|
| 131 |
+
) -> None:
|
| 132 |
+
self.max_line_size = max_line_size
|
| 133 |
+
self.max_headers = max_headers
|
| 134 |
+
self.max_field_size = max_field_size
|
| 135 |
+
self._lax = lax
|
| 136 |
+
|
| 137 |
+
def parse_headers(
|
| 138 |
+
self, lines: List[bytes]
|
| 139 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
| 140 |
+
headers: CIMultiDict[str] = CIMultiDict()
|
| 141 |
+
# note: "raw" does not mean inclusion of OWS before/after the field value
|
| 142 |
+
raw_headers = []
|
| 143 |
+
|
| 144 |
+
lines_idx = 1
|
| 145 |
+
line = lines[1]
|
| 146 |
+
line_count = len(lines)
|
| 147 |
+
|
| 148 |
+
while line:
|
| 149 |
+
# Parse initial header name : value pair.
|
| 150 |
+
try:
|
| 151 |
+
bname, bvalue = line.split(b":", 1)
|
| 152 |
+
except ValueError:
|
| 153 |
+
raise InvalidHeader(line) from None
|
| 154 |
+
|
| 155 |
+
if len(bname) == 0:
|
| 156 |
+
raise InvalidHeader(bname)
|
| 157 |
+
|
| 158 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
|
| 159 |
+
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
|
| 160 |
+
raise InvalidHeader(line)
|
| 161 |
+
|
| 162 |
+
bvalue = bvalue.lstrip(b" \t")
|
| 163 |
+
if len(bname) > self.max_field_size:
|
| 164 |
+
raise LineTooLong(
|
| 165 |
+
"request header name {}".format(
|
| 166 |
+
bname.decode("utf8", "backslashreplace")
|
| 167 |
+
),
|
| 168 |
+
str(self.max_field_size),
|
| 169 |
+
str(len(bname)),
|
| 170 |
+
)
|
| 171 |
+
name = bname.decode("utf-8", "surrogateescape")
|
| 172 |
+
if not TOKENRE.fullmatch(name):
|
| 173 |
+
raise InvalidHeader(bname)
|
| 174 |
+
|
| 175 |
+
header_length = len(bvalue)
|
| 176 |
+
|
| 177 |
+
# next line
|
| 178 |
+
lines_idx += 1
|
| 179 |
+
line = lines[lines_idx]
|
| 180 |
+
|
| 181 |
+
# consume continuation lines
|
| 182 |
+
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
|
| 183 |
+
|
| 184 |
+
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
|
| 185 |
+
if continuation:
|
| 186 |
+
bvalue_lst = [bvalue]
|
| 187 |
+
while continuation:
|
| 188 |
+
header_length += len(line)
|
| 189 |
+
if header_length > self.max_field_size:
|
| 190 |
+
raise LineTooLong(
|
| 191 |
+
"request header field {}".format(
|
| 192 |
+
bname.decode("utf8", "backslashreplace")
|
| 193 |
+
),
|
| 194 |
+
str(self.max_field_size),
|
| 195 |
+
str(header_length),
|
| 196 |
+
)
|
| 197 |
+
bvalue_lst.append(line)
|
| 198 |
+
|
| 199 |
+
# next line
|
| 200 |
+
lines_idx += 1
|
| 201 |
+
if lines_idx < line_count:
|
| 202 |
+
line = lines[lines_idx]
|
| 203 |
+
if line:
|
| 204 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
| 205 |
+
else:
|
| 206 |
+
line = b""
|
| 207 |
+
break
|
| 208 |
+
bvalue = b"".join(bvalue_lst)
|
| 209 |
+
else:
|
| 210 |
+
if header_length > self.max_field_size:
|
| 211 |
+
raise LineTooLong(
|
| 212 |
+
"request header field {}".format(
|
| 213 |
+
bname.decode("utf8", "backslashreplace")
|
| 214 |
+
),
|
| 215 |
+
str(self.max_field_size),
|
| 216 |
+
str(header_length),
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
bvalue = bvalue.strip(b" \t")
|
| 220 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
| 221 |
+
|
| 222 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
|
| 223 |
+
if "\n" in value or "\r" in value or "\x00" in value:
|
| 224 |
+
raise InvalidHeader(bvalue)
|
| 225 |
+
|
| 226 |
+
headers.add(name, value)
|
| 227 |
+
raw_headers.append((bname, bvalue))
|
| 228 |
+
|
| 229 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
|
| 233 |
+
"""Check if the upgrade header is supported."""
|
| 234 |
+
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
| 238 |
+
lax: ClassVar[bool] = False
|
| 239 |
+
|
| 240 |
+
def __init__(
|
| 241 |
+
self,
|
| 242 |
+
protocol: Optional[BaseProtocol] = None,
|
| 243 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 244 |
+
limit: int = 2**16,
|
| 245 |
+
max_line_size: int = 8190,
|
| 246 |
+
max_headers: int = 32768,
|
| 247 |
+
max_field_size: int = 8190,
|
| 248 |
+
timer: Optional[BaseTimerContext] = None,
|
| 249 |
+
code: Optional[int] = None,
|
| 250 |
+
method: Optional[str] = None,
|
| 251 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
| 252 |
+
response_with_body: bool = True,
|
| 253 |
+
read_until_eof: bool = False,
|
| 254 |
+
auto_decompress: bool = True,
|
| 255 |
+
) -> None:
|
| 256 |
+
self.protocol = protocol
|
| 257 |
+
self.loop = loop
|
| 258 |
+
self.max_line_size = max_line_size
|
| 259 |
+
self.max_headers = max_headers
|
| 260 |
+
self.max_field_size = max_field_size
|
| 261 |
+
self.timer = timer
|
| 262 |
+
self.code = code
|
| 263 |
+
self.method = method
|
| 264 |
+
self.payload_exception = payload_exception
|
| 265 |
+
self.response_with_body = response_with_body
|
| 266 |
+
self.read_until_eof = read_until_eof
|
| 267 |
+
|
| 268 |
+
self._lines: List[bytes] = []
|
| 269 |
+
self._tail = b""
|
| 270 |
+
self._upgraded = False
|
| 271 |
+
self._payload = None
|
| 272 |
+
self._payload_parser: Optional[HttpPayloadParser] = None
|
| 273 |
+
self._auto_decompress = auto_decompress
|
| 274 |
+
self._limit = limit
|
| 275 |
+
self._headers_parser = HeadersParser(
|
| 276 |
+
max_line_size, max_headers, max_field_size, self.lax
|
| 277 |
+
)
|
| 278 |
+
|
| 279 |
+
@abc.abstractmethod
|
| 280 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT: ...
|
| 281 |
+
|
| 282 |
+
@abc.abstractmethod
|
| 283 |
+
def _is_chunked_te(self, te: str) -> bool: ...
|
| 284 |
+
|
| 285 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
| 286 |
+
if self._payload_parser is not None:
|
| 287 |
+
self._payload_parser.feed_eof()
|
| 288 |
+
self._payload_parser = None
|
| 289 |
+
else:
|
| 290 |
+
# try to extract partial message
|
| 291 |
+
if self._tail:
|
| 292 |
+
self._lines.append(self._tail)
|
| 293 |
+
|
| 294 |
+
if self._lines:
|
| 295 |
+
if self._lines[-1] != "\r\n":
|
| 296 |
+
self._lines.append(b"")
|
| 297 |
+
with suppress(Exception):
|
| 298 |
+
return self.parse_message(self._lines)
|
| 299 |
+
return None
|
| 300 |
+
|
| 301 |
+
def feed_data(
|
| 302 |
+
self,
|
| 303 |
+
data: bytes,
|
| 304 |
+
SEP: _SEP = b"\r\n",
|
| 305 |
+
EMPTY: bytes = b"",
|
| 306 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
| 307 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
| 308 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
| 309 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
| 310 |
+
|
| 311 |
+
messages = []
|
| 312 |
+
|
| 313 |
+
if self._tail:
|
| 314 |
+
data, self._tail = self._tail + data, b""
|
| 315 |
+
|
| 316 |
+
data_len = len(data)
|
| 317 |
+
start_pos = 0
|
| 318 |
+
loop = self.loop
|
| 319 |
+
|
| 320 |
+
should_close = False
|
| 321 |
+
while start_pos < data_len:
|
| 322 |
+
|
| 323 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
| 324 |
+
# and split by lines
|
| 325 |
+
if self._payload_parser is None and not self._upgraded:
|
| 326 |
+
pos = data.find(SEP, start_pos)
|
| 327 |
+
# consume \r\n
|
| 328 |
+
if pos == start_pos and not self._lines:
|
| 329 |
+
start_pos = pos + len(SEP)
|
| 330 |
+
continue
|
| 331 |
+
|
| 332 |
+
if pos >= start_pos:
|
| 333 |
+
if should_close:
|
| 334 |
+
raise BadHttpMessage("Data after `Connection: close`")
|
| 335 |
+
|
| 336 |
+
# line found
|
| 337 |
+
line = data[start_pos:pos]
|
| 338 |
+
if SEP == b"\n": # For lax response parsing
|
| 339 |
+
line = line.rstrip(b"\r")
|
| 340 |
+
self._lines.append(line)
|
| 341 |
+
start_pos = pos + len(SEP)
|
| 342 |
+
|
| 343 |
+
# \r\n\r\n found
|
| 344 |
+
if self._lines[-1] == EMPTY:
|
| 345 |
+
try:
|
| 346 |
+
msg: _MsgT = self.parse_message(self._lines)
|
| 347 |
+
finally:
|
| 348 |
+
self._lines.clear()
|
| 349 |
+
|
| 350 |
+
def get_content_length() -> Optional[int]:
|
| 351 |
+
# payload length
|
| 352 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
| 353 |
+
if length_hdr is None:
|
| 354 |
+
return None
|
| 355 |
+
|
| 356 |
+
# Shouldn't allow +/- or other number formats.
|
| 357 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
|
| 358 |
+
# msg.headers is already stripped of leading/trailing wsp
|
| 359 |
+
if not DIGITS.fullmatch(length_hdr):
|
| 360 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
| 361 |
+
|
| 362 |
+
return int(length_hdr)
|
| 363 |
+
|
| 364 |
+
length = get_content_length()
|
| 365 |
+
# do not support old websocket spec
|
| 366 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
| 367 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 368 |
+
|
| 369 |
+
self._upgraded = msg.upgrade and _is_supported_upgrade(
|
| 370 |
+
msg.headers
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
method = getattr(msg, "method", self.method)
|
| 374 |
+
# code is only present on responses
|
| 375 |
+
code = getattr(msg, "code", 0)
|
| 376 |
+
|
| 377 |
+
assert self.protocol is not None
|
| 378 |
+
# calculate payload
|
| 379 |
+
empty_body = status_code_must_be_empty_body(code) or bool(
|
| 380 |
+
method and method_must_be_empty_body(method)
|
| 381 |
+
)
|
| 382 |
+
if not empty_body and (
|
| 383 |
+
((length is not None and length > 0) or msg.chunked)
|
| 384 |
+
and not self._upgraded
|
| 385 |
+
):
|
| 386 |
+
payload = StreamReader(
|
| 387 |
+
self.protocol,
|
| 388 |
+
timer=self.timer,
|
| 389 |
+
loop=loop,
|
| 390 |
+
limit=self._limit,
|
| 391 |
+
)
|
| 392 |
+
payload_parser = HttpPayloadParser(
|
| 393 |
+
payload,
|
| 394 |
+
length=length,
|
| 395 |
+
chunked=msg.chunked,
|
| 396 |
+
method=method,
|
| 397 |
+
compression=msg.compression,
|
| 398 |
+
code=self.code,
|
| 399 |
+
response_with_body=self.response_with_body,
|
| 400 |
+
auto_decompress=self._auto_decompress,
|
| 401 |
+
lax=self.lax,
|
| 402 |
+
)
|
| 403 |
+
if not payload_parser.done:
|
| 404 |
+
self._payload_parser = payload_parser
|
| 405 |
+
elif method == METH_CONNECT:
|
| 406 |
+
assert isinstance(msg, RawRequestMessage)
|
| 407 |
+
payload = StreamReader(
|
| 408 |
+
self.protocol,
|
| 409 |
+
timer=self.timer,
|
| 410 |
+
loop=loop,
|
| 411 |
+
limit=self._limit,
|
| 412 |
+
)
|
| 413 |
+
self._upgraded = True
|
| 414 |
+
self._payload_parser = HttpPayloadParser(
|
| 415 |
+
payload,
|
| 416 |
+
method=msg.method,
|
| 417 |
+
compression=msg.compression,
|
| 418 |
+
auto_decompress=self._auto_decompress,
|
| 419 |
+
lax=self.lax,
|
| 420 |
+
)
|
| 421 |
+
elif not empty_body and length is None and self.read_until_eof:
|
| 422 |
+
payload = StreamReader(
|
| 423 |
+
self.protocol,
|
| 424 |
+
timer=self.timer,
|
| 425 |
+
loop=loop,
|
| 426 |
+
limit=self._limit,
|
| 427 |
+
)
|
| 428 |
+
payload_parser = HttpPayloadParser(
|
| 429 |
+
payload,
|
| 430 |
+
length=length,
|
| 431 |
+
chunked=msg.chunked,
|
| 432 |
+
method=method,
|
| 433 |
+
compression=msg.compression,
|
| 434 |
+
code=self.code,
|
| 435 |
+
response_with_body=self.response_with_body,
|
| 436 |
+
auto_decompress=self._auto_decompress,
|
| 437 |
+
lax=self.lax,
|
| 438 |
+
)
|
| 439 |
+
if not payload_parser.done:
|
| 440 |
+
self._payload_parser = payload_parser
|
| 441 |
+
else:
|
| 442 |
+
payload = EMPTY_PAYLOAD
|
| 443 |
+
|
| 444 |
+
messages.append((msg, payload))
|
| 445 |
+
should_close = msg.should_close
|
| 446 |
+
else:
|
| 447 |
+
self._tail = data[start_pos:]
|
| 448 |
+
data = EMPTY
|
| 449 |
+
break
|
| 450 |
+
|
| 451 |
+
# no parser, just store
|
| 452 |
+
elif self._payload_parser is None and self._upgraded:
|
| 453 |
+
assert not self._lines
|
| 454 |
+
break
|
| 455 |
+
|
| 456 |
+
# feed payload
|
| 457 |
+
elif data and start_pos < data_len:
|
| 458 |
+
assert not self._lines
|
| 459 |
+
assert self._payload_parser is not None
|
| 460 |
+
try:
|
| 461 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
|
| 462 |
+
except BaseException as underlying_exc:
|
| 463 |
+
reraised_exc = underlying_exc
|
| 464 |
+
if self.payload_exception is not None:
|
| 465 |
+
reraised_exc = self.payload_exception(str(underlying_exc))
|
| 466 |
+
|
| 467 |
+
set_exception(
|
| 468 |
+
self._payload_parser.payload,
|
| 469 |
+
reraised_exc,
|
| 470 |
+
underlying_exc,
|
| 471 |
+
)
|
| 472 |
+
|
| 473 |
+
eof = True
|
| 474 |
+
data = b""
|
| 475 |
+
|
| 476 |
+
if eof:
|
| 477 |
+
start_pos = 0
|
| 478 |
+
data_len = len(data)
|
| 479 |
+
self._payload_parser = None
|
| 480 |
+
continue
|
| 481 |
+
else:
|
| 482 |
+
break
|
| 483 |
+
|
| 484 |
+
if data and start_pos < data_len:
|
| 485 |
+
data = data[start_pos:]
|
| 486 |
+
else:
|
| 487 |
+
data = EMPTY
|
| 488 |
+
|
| 489 |
+
return messages, self._upgraded, data
|
| 490 |
+
|
| 491 |
+
def parse_headers(
|
| 492 |
+
self, lines: List[bytes]
|
| 493 |
+
) -> Tuple[
|
| 494 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
| 495 |
+
]:
|
| 496 |
+
"""Parses RFC 5322 headers from a stream.
|
| 497 |
+
|
| 498 |
+
Line continuations are supported. Returns list of header name
|
| 499 |
+
and value pairs. Header name is in upper case.
|
| 500 |
+
"""
|
| 501 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
| 502 |
+
close_conn = None
|
| 503 |
+
encoding = None
|
| 504 |
+
upgrade = False
|
| 505 |
+
chunked = False
|
| 506 |
+
|
| 507 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
|
| 508 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
|
| 509 |
+
singletons = (
|
| 510 |
+
hdrs.CONTENT_LENGTH,
|
| 511 |
+
hdrs.CONTENT_LOCATION,
|
| 512 |
+
hdrs.CONTENT_RANGE,
|
| 513 |
+
hdrs.CONTENT_TYPE,
|
| 514 |
+
hdrs.ETAG,
|
| 515 |
+
hdrs.HOST,
|
| 516 |
+
hdrs.MAX_FORWARDS,
|
| 517 |
+
hdrs.SERVER,
|
| 518 |
+
hdrs.TRANSFER_ENCODING,
|
| 519 |
+
hdrs.USER_AGENT,
|
| 520 |
+
)
|
| 521 |
+
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
|
| 522 |
+
if bad_hdr is not None:
|
| 523 |
+
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
|
| 524 |
+
|
| 525 |
+
# keep-alive
|
| 526 |
+
conn = headers.get(hdrs.CONNECTION)
|
| 527 |
+
if conn:
|
| 528 |
+
v = conn.lower()
|
| 529 |
+
if v == "close":
|
| 530 |
+
close_conn = True
|
| 531 |
+
elif v == "keep-alive":
|
| 532 |
+
close_conn = False
|
| 533 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
|
| 534 |
+
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
|
| 535 |
+
upgrade = True
|
| 536 |
+
|
| 537 |
+
# encoding
|
| 538 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
| 539 |
+
if enc:
|
| 540 |
+
enc = enc.lower()
|
| 541 |
+
if enc in ("gzip", "deflate", "br"):
|
| 542 |
+
encoding = enc
|
| 543 |
+
|
| 544 |
+
# chunking
|
| 545 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
| 546 |
+
if te is not None:
|
| 547 |
+
if self._is_chunked_te(te):
|
| 548 |
+
chunked = True
|
| 549 |
+
|
| 550 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 551 |
+
raise BadHttpMessage(
|
| 552 |
+
"Transfer-Encoding can't be present with Content-Length",
|
| 553 |
+
)
|
| 554 |
+
|
| 555 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
| 556 |
+
|
| 557 |
+
def set_upgraded(self, val: bool) -> None:
|
| 558 |
+
"""Set connection upgraded (to websocket) mode.
|
| 559 |
+
|
| 560 |
+
:param bool val: new state.
|
| 561 |
+
"""
|
| 562 |
+
self._upgraded = val
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
| 566 |
+
"""Read request status line.
|
| 567 |
+
|
| 568 |
+
Exception .http_exceptions.BadStatusLine
|
| 569 |
+
could be raised in case of any errors in status line.
|
| 570 |
+
Returns RawRequestMessage.
|
| 571 |
+
"""
|
| 572 |
+
|
| 573 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
| 574 |
+
# request line
|
| 575 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 576 |
+
try:
|
| 577 |
+
method, path, version = line.split(" ", maxsplit=2)
|
| 578 |
+
except ValueError:
|
| 579 |
+
raise BadStatusLine(line) from None
|
| 580 |
+
|
| 581 |
+
if len(path) > self.max_line_size:
|
| 582 |
+
raise LineTooLong(
|
| 583 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
| 584 |
+
)
|
| 585 |
+
|
| 586 |
+
# method
|
| 587 |
+
if not TOKENRE.fullmatch(method):
|
| 588 |
+
raise BadStatusLine(method)
|
| 589 |
+
|
| 590 |
+
# version
|
| 591 |
+
match = VERSRE.fullmatch(version)
|
| 592 |
+
if match is None:
|
| 593 |
+
raise BadStatusLine(line)
|
| 594 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 595 |
+
|
| 596 |
+
if method == "CONNECT":
|
| 597 |
+
# authority-form,
|
| 598 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
| 599 |
+
url = URL.build(authority=path, encoded=True)
|
| 600 |
+
elif path.startswith("/"):
|
| 601 |
+
# origin-form,
|
| 602 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
| 603 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
| 604 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
| 605 |
+
|
| 606 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
| 607 |
+
# NOTE: parser does, otherwise it results into the same
|
| 608 |
+
# NOTE: HTTP Request-Line input producing different
|
| 609 |
+
# NOTE: `yarl.URL()` objects
|
| 610 |
+
url = URL.build(
|
| 611 |
+
path=path_part,
|
| 612 |
+
query_string=qs_part,
|
| 613 |
+
fragment=url_fragment,
|
| 614 |
+
encoded=True,
|
| 615 |
+
)
|
| 616 |
+
elif path == "*" and method == "OPTIONS":
|
| 617 |
+
# asterisk-form,
|
| 618 |
+
url = URL(path, encoded=True)
|
| 619 |
+
else:
|
| 620 |
+
# absolute-form for proxy maybe,
|
| 621 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
| 622 |
+
url = URL(path, encoded=True)
|
| 623 |
+
if url.scheme == "":
|
| 624 |
+
# not absolute-form
|
| 625 |
+
raise InvalidURLError(
|
| 626 |
+
path.encode(errors="surrogateescape").decode("latin1")
|
| 627 |
+
)
|
| 628 |
+
|
| 629 |
+
# read headers
|
| 630 |
+
(
|
| 631 |
+
headers,
|
| 632 |
+
raw_headers,
|
| 633 |
+
close,
|
| 634 |
+
compression,
|
| 635 |
+
upgrade,
|
| 636 |
+
chunked,
|
| 637 |
+
) = self.parse_headers(lines)
|
| 638 |
+
|
| 639 |
+
if close is None: # then the headers weren't set in the request
|
| 640 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
| 641 |
+
close = True
|
| 642 |
+
else: # HTTP 1.1 must ask to close.
|
| 643 |
+
close = False
|
| 644 |
+
|
| 645 |
+
return RawRequestMessage(
|
| 646 |
+
method,
|
| 647 |
+
path,
|
| 648 |
+
version_o,
|
| 649 |
+
headers,
|
| 650 |
+
raw_headers,
|
| 651 |
+
close,
|
| 652 |
+
compression,
|
| 653 |
+
upgrade,
|
| 654 |
+
chunked,
|
| 655 |
+
url,
|
| 656 |
+
)
|
| 657 |
+
|
| 658 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 659 |
+
if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
|
| 660 |
+
return True
|
| 661 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
|
| 662 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
| 663 |
+
|
| 664 |
+
|
| 665 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
| 666 |
+
"""Read response status line and headers.
|
| 667 |
+
|
| 668 |
+
BadStatusLine could be raised in case of any errors in status line.
|
| 669 |
+
Returns RawResponseMessage.
|
| 670 |
+
"""
|
| 671 |
+
|
| 672 |
+
# Lax mode should only be enabled on response parser.
|
| 673 |
+
lax = not DEBUG
|
| 674 |
+
|
| 675 |
+
def feed_data(
|
| 676 |
+
self,
|
| 677 |
+
data: bytes,
|
| 678 |
+
SEP: Optional[_SEP] = None,
|
| 679 |
+
*args: Any,
|
| 680 |
+
**kwargs: Any,
|
| 681 |
+
) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
|
| 682 |
+
if SEP is None:
|
| 683 |
+
SEP = b"\r\n" if DEBUG else b"\n"
|
| 684 |
+
return super().feed_data(data, SEP, *args, **kwargs)
|
| 685 |
+
|
| 686 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
| 687 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 688 |
+
try:
|
| 689 |
+
version, status = line.split(maxsplit=1)
|
| 690 |
+
except ValueError:
|
| 691 |
+
raise BadStatusLine(line) from None
|
| 692 |
+
|
| 693 |
+
try:
|
| 694 |
+
status, reason = status.split(maxsplit=1)
|
| 695 |
+
except ValueError:
|
| 696 |
+
status = status.strip()
|
| 697 |
+
reason = ""
|
| 698 |
+
|
| 699 |
+
if len(reason) > self.max_line_size:
|
| 700 |
+
raise LineTooLong(
|
| 701 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
| 702 |
+
)
|
| 703 |
+
|
| 704 |
+
# version
|
| 705 |
+
match = VERSRE.fullmatch(version)
|
| 706 |
+
if match is None:
|
| 707 |
+
raise BadStatusLine(line)
|
| 708 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 709 |
+
|
| 710 |
+
# The status code is a three-digit ASCII number, no padding
|
| 711 |
+
if len(status) != 3 or not DIGITS.fullmatch(status):
|
| 712 |
+
raise BadStatusLine(line)
|
| 713 |
+
status_i = int(status)
|
| 714 |
+
|
| 715 |
+
# read headers
|
| 716 |
+
(
|
| 717 |
+
headers,
|
| 718 |
+
raw_headers,
|
| 719 |
+
close,
|
| 720 |
+
compression,
|
| 721 |
+
upgrade,
|
| 722 |
+
chunked,
|
| 723 |
+
) = self.parse_headers(lines)
|
| 724 |
+
|
| 725 |
+
if close is None:
|
| 726 |
+
if version_o <= HttpVersion10:
|
| 727 |
+
close = True
|
| 728 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
|
| 729 |
+
elif 100 <= status_i < 200 or status_i in {204, 304}:
|
| 730 |
+
close = False
|
| 731 |
+
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
|
| 732 |
+
close = False
|
| 733 |
+
else:
|
| 734 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
|
| 735 |
+
close = True
|
| 736 |
+
|
| 737 |
+
return RawResponseMessage(
|
| 738 |
+
version_o,
|
| 739 |
+
status_i,
|
| 740 |
+
reason.strip(),
|
| 741 |
+
headers,
|
| 742 |
+
raw_headers,
|
| 743 |
+
close,
|
| 744 |
+
compression,
|
| 745 |
+
upgrade,
|
| 746 |
+
chunked,
|
| 747 |
+
)
|
| 748 |
+
|
| 749 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 750 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
|
| 751 |
+
return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
|
| 752 |
+
|
| 753 |
+
|
| 754 |
+
class HttpPayloadParser:
|
| 755 |
+
def __init__(
|
| 756 |
+
self,
|
| 757 |
+
payload: StreamReader,
|
| 758 |
+
length: Optional[int] = None,
|
| 759 |
+
chunked: bool = False,
|
| 760 |
+
compression: Optional[str] = None,
|
| 761 |
+
code: Optional[int] = None,
|
| 762 |
+
method: Optional[str] = None,
|
| 763 |
+
response_with_body: bool = True,
|
| 764 |
+
auto_decompress: bool = True,
|
| 765 |
+
lax: bool = False,
|
| 766 |
+
) -> None:
|
| 767 |
+
self._length = 0
|
| 768 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
| 769 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 770 |
+
self._chunk_size = 0
|
| 771 |
+
self._chunk_tail = b""
|
| 772 |
+
self._auto_decompress = auto_decompress
|
| 773 |
+
self._lax = lax
|
| 774 |
+
self.done = False
|
| 775 |
+
|
| 776 |
+
# payload decompression wrapper
|
| 777 |
+
if response_with_body and compression and self._auto_decompress:
|
| 778 |
+
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
| 779 |
+
payload, compression
|
| 780 |
+
)
|
| 781 |
+
else:
|
| 782 |
+
real_payload = payload
|
| 783 |
+
|
| 784 |
+
# payload parser
|
| 785 |
+
if not response_with_body:
|
| 786 |
+
# don't parse payload if it's not expected to be received
|
| 787 |
+
self._type = ParseState.PARSE_NONE
|
| 788 |
+
real_payload.feed_eof()
|
| 789 |
+
self.done = True
|
| 790 |
+
elif chunked:
|
| 791 |
+
self._type = ParseState.PARSE_CHUNKED
|
| 792 |
+
elif length is not None:
|
| 793 |
+
self._type = ParseState.PARSE_LENGTH
|
| 794 |
+
self._length = length
|
| 795 |
+
if self._length == 0:
|
| 796 |
+
real_payload.feed_eof()
|
| 797 |
+
self.done = True
|
| 798 |
+
|
| 799 |
+
self.payload = real_payload
|
| 800 |
+
|
| 801 |
+
def feed_eof(self) -> None:
|
| 802 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
| 803 |
+
self.payload.feed_eof()
|
| 804 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
| 805 |
+
raise ContentLengthError(
|
| 806 |
+
"Not enough data for satisfy content length header."
|
| 807 |
+
)
|
| 808 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 809 |
+
raise TransferEncodingError(
|
| 810 |
+
"Not enough data for satisfy transfer length header."
|
| 811 |
+
)
|
| 812 |
+
|
| 813 |
+
def feed_data(
|
| 814 |
+
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
|
| 815 |
+
) -> Tuple[bool, bytes]:
|
| 816 |
+
# Read specified amount of bytes
|
| 817 |
+
if self._type == ParseState.PARSE_LENGTH:
|
| 818 |
+
required = self._length
|
| 819 |
+
chunk_len = len(chunk)
|
| 820 |
+
|
| 821 |
+
if required >= chunk_len:
|
| 822 |
+
self._length = required - chunk_len
|
| 823 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 824 |
+
if self._length == 0:
|
| 825 |
+
self.payload.feed_eof()
|
| 826 |
+
return True, b""
|
| 827 |
+
else:
|
| 828 |
+
self._length = 0
|
| 829 |
+
self.payload.feed_data(chunk[:required], required)
|
| 830 |
+
self.payload.feed_eof()
|
| 831 |
+
return True, chunk[required:]
|
| 832 |
+
|
| 833 |
+
# Chunked transfer encoding parser
|
| 834 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 835 |
+
if self._chunk_tail:
|
| 836 |
+
chunk = self._chunk_tail + chunk
|
| 837 |
+
self._chunk_tail = b""
|
| 838 |
+
|
| 839 |
+
while chunk:
|
| 840 |
+
|
| 841 |
+
# read next chunk size
|
| 842 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
| 843 |
+
pos = chunk.find(SEP)
|
| 844 |
+
if pos >= 0:
|
| 845 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
| 846 |
+
if i >= 0:
|
| 847 |
+
size_b = chunk[:i] # strip chunk-extensions
|
| 848 |
+
else:
|
| 849 |
+
size_b = chunk[:pos]
|
| 850 |
+
|
| 851 |
+
if self._lax: # Allow whitespace in lax mode.
|
| 852 |
+
size_b = size_b.strip()
|
| 853 |
+
|
| 854 |
+
if not re.fullmatch(HEXDIGITS, size_b):
|
| 855 |
+
exc = TransferEncodingError(
|
| 856 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
| 857 |
+
)
|
| 858 |
+
set_exception(self.payload, exc)
|
| 859 |
+
raise exc
|
| 860 |
+
size = int(bytes(size_b), 16)
|
| 861 |
+
|
| 862 |
+
chunk = chunk[pos + len(SEP) :]
|
| 863 |
+
if size == 0: # eof marker
|
| 864 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 865 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 866 |
+
chunk = chunk[1:]
|
| 867 |
+
else:
|
| 868 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
| 869 |
+
self._chunk_size = size
|
| 870 |
+
self.payload.begin_http_chunk_receiving()
|
| 871 |
+
else:
|
| 872 |
+
self._chunk_tail = chunk
|
| 873 |
+
return False, b""
|
| 874 |
+
|
| 875 |
+
# read chunk and feed buffer
|
| 876 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
| 877 |
+
required = self._chunk_size
|
| 878 |
+
chunk_len = len(chunk)
|
| 879 |
+
|
| 880 |
+
if required > chunk_len:
|
| 881 |
+
self._chunk_size = required - chunk_len
|
| 882 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 883 |
+
return False, b""
|
| 884 |
+
else:
|
| 885 |
+
self._chunk_size = 0
|
| 886 |
+
self.payload.feed_data(chunk[:required], required)
|
| 887 |
+
chunk = chunk[required:]
|
| 888 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
| 889 |
+
self.payload.end_http_chunk_receiving()
|
| 890 |
+
|
| 891 |
+
# toss the CRLF at the end of the chunk
|
| 892 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
| 893 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 894 |
+
chunk = chunk[1:]
|
| 895 |
+
if chunk[: len(SEP)] == SEP:
|
| 896 |
+
chunk = chunk[len(SEP) :]
|
| 897 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 898 |
+
else:
|
| 899 |
+
self._chunk_tail = chunk
|
| 900 |
+
return False, b""
|
| 901 |
+
|
| 902 |
+
# if stream does not contain trailer, after 0\r\n
|
| 903 |
+
# we should get another \r\n otherwise
|
| 904 |
+
# trailers needs to be skipped until \r\n\r\n
|
| 905 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
| 906 |
+
head = chunk[: len(SEP)]
|
| 907 |
+
if head == SEP:
|
| 908 |
+
# end of stream
|
| 909 |
+
self.payload.feed_eof()
|
| 910 |
+
return True, chunk[len(SEP) :]
|
| 911 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
| 912 |
+
# expected that CRLF or LF will be shown at the very first
|
| 913 |
+
# byte next time, otherwise trailers should come. The last
|
| 914 |
+
# CRLF which marks the end of response might not be
|
| 915 |
+
# contained in the same TCP segment which delivered the
|
| 916 |
+
# size indicator.
|
| 917 |
+
if not head:
|
| 918 |
+
return False, b""
|
| 919 |
+
if head == SEP[:1]:
|
| 920 |
+
self._chunk_tail = head
|
| 921 |
+
return False, b""
|
| 922 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
| 923 |
+
|
| 924 |
+
# read and discard trailer up to the CRLF terminator
|
| 925 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
| 926 |
+
pos = chunk.find(SEP)
|
| 927 |
+
if pos >= 0:
|
| 928 |
+
chunk = chunk[pos + len(SEP) :]
|
| 929 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 930 |
+
else:
|
| 931 |
+
self._chunk_tail = chunk
|
| 932 |
+
return False, b""
|
| 933 |
+
|
| 934 |
+
# Read all bytes until eof
|
| 935 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
| 936 |
+
self.payload.feed_data(chunk, len(chunk))
|
| 937 |
+
|
| 938 |
+
return False, b""
|
| 939 |
+
|
| 940 |
+
|
| 941 |
+
class DeflateBuffer:
|
| 942 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
| 943 |
+
|
| 944 |
+
decompressor: Any
|
| 945 |
+
|
| 946 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
| 947 |
+
self.out = out
|
| 948 |
+
self.size = 0
|
| 949 |
+
self.encoding = encoding
|
| 950 |
+
self._started_decoding = False
|
| 951 |
+
|
| 952 |
+
self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
|
| 953 |
+
if encoding == "br":
|
| 954 |
+
if not HAS_BROTLI: # pragma: no cover
|
| 955 |
+
raise ContentEncodingError(
|
| 956 |
+
"Can not decode content-encoding: brotli (br). "
|
| 957 |
+
"Please install `Brotli`"
|
| 958 |
+
)
|
| 959 |
+
self.decompressor = BrotliDecompressor()
|
| 960 |
+
else:
|
| 961 |
+
self.decompressor = ZLibDecompressor(encoding=encoding)
|
| 962 |
+
|
| 963 |
+
def set_exception(
|
| 964 |
+
self,
|
| 965 |
+
exc: BaseException,
|
| 966 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 967 |
+
) -> None:
|
| 968 |
+
set_exception(self.out, exc, exc_cause)
|
| 969 |
+
|
| 970 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
| 971 |
+
if not size:
|
| 972 |
+
return
|
| 973 |
+
|
| 974 |
+
self.size += size
|
| 975 |
+
|
| 976 |
+
# RFC1950
|
| 977 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
| 978 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
| 979 |
+
if (
|
| 980 |
+
not self._started_decoding
|
| 981 |
+
and self.encoding == "deflate"
|
| 982 |
+
and chunk[0] & 0xF != 8
|
| 983 |
+
):
|
| 984 |
+
# Change the decoder to decompress incorrectly compressed data
|
| 985 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
| 986 |
+
self.decompressor = ZLibDecompressor(
|
| 987 |
+
encoding=self.encoding, suppress_deflate_header=True
|
| 988 |
+
)
|
| 989 |
+
|
| 990 |
+
try:
|
| 991 |
+
chunk = self.decompressor.decompress_sync(chunk)
|
| 992 |
+
except Exception:
|
| 993 |
+
raise ContentEncodingError(
|
| 994 |
+
"Can not decode content-encoding: %s" % self.encoding
|
| 995 |
+
)
|
| 996 |
+
|
| 997 |
+
self._started_decoding = True
|
| 998 |
+
|
| 999 |
+
if chunk:
|
| 1000 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1001 |
+
|
| 1002 |
+
def feed_eof(self) -> None:
|
| 1003 |
+
chunk = self.decompressor.flush()
|
| 1004 |
+
|
| 1005 |
+
if chunk or self.size > 0:
|
| 1006 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1007 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
| 1008 |
+
raise ContentEncodingError("deflate")
|
| 1009 |
+
|
| 1010 |
+
self.out.feed_eof()
|
| 1011 |
+
|
| 1012 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 1013 |
+
self.out.begin_http_chunk_receiving()
|
| 1014 |
+
|
| 1015 |
+
def end_http_chunk_receiving(self) -> None:
|
| 1016 |
+
self.out.end_http_chunk_receiving()
|
| 1017 |
+
|
| 1018 |
+
|
| 1019 |
+
HttpRequestParserPy = HttpRequestParser
|
| 1020 |
+
HttpResponseParserPy = HttpResponseParser
|
| 1021 |
+
RawRequestMessagePy = RawRequestMessage
|
| 1022 |
+
RawResponseMessagePy = RawResponseMessage
|
| 1023 |
+
|
| 1024 |
+
try:
|
| 1025 |
+
if not NO_EXTENSIONS:
|
| 1026 |
+
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
|
| 1027 |
+
HttpRequestParser,
|
| 1028 |
+
HttpResponseParser,
|
| 1029 |
+
RawRequestMessage,
|
| 1030 |
+
RawResponseMessage,
|
| 1031 |
+
)
|
| 1032 |
+
|
| 1033 |
+
HttpRequestParserC = HttpRequestParser
|
| 1034 |
+
HttpResponseParserC = HttpResponseParser
|
| 1035 |
+
RawRequestMessageC = RawRequestMessage
|
| 1036 |
+
RawResponseMessageC = RawResponseMessage
|
| 1037 |
+
except ImportError: # pragma: no cover
|
| 1038 |
+
pass
|
parrot/lib/python3.10/site-packages/aiohttp/http_websocket.py
ADDED
|
@@ -0,0 +1,761 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""WebSocket protocol versions 13 and 8."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import functools
|
| 5 |
+
import json
|
| 6 |
+
import random
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
import zlib
|
| 10 |
+
from enum import IntEnum
|
| 11 |
+
from functools import partial
|
| 12 |
+
from struct import Struct
|
| 13 |
+
from typing import (
|
| 14 |
+
Any,
|
| 15 |
+
Callable,
|
| 16 |
+
Final,
|
| 17 |
+
List,
|
| 18 |
+
NamedTuple,
|
| 19 |
+
Optional,
|
| 20 |
+
Pattern,
|
| 21 |
+
Set,
|
| 22 |
+
Tuple,
|
| 23 |
+
Union,
|
| 24 |
+
cast,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
from .base_protocol import BaseProtocol
|
| 28 |
+
from .client_exceptions import ClientConnectionResetError
|
| 29 |
+
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
| 30 |
+
from .helpers import NO_EXTENSIONS, set_exception
|
| 31 |
+
from .streams import DataQueue
|
| 32 |
+
|
| 33 |
+
__all__ = (
|
| 34 |
+
"WS_CLOSED_MESSAGE",
|
| 35 |
+
"WS_CLOSING_MESSAGE",
|
| 36 |
+
"WS_KEY",
|
| 37 |
+
"WebSocketReader",
|
| 38 |
+
"WebSocketWriter",
|
| 39 |
+
"WSMessage",
|
| 40 |
+
"WebSocketError",
|
| 41 |
+
"WSMsgType",
|
| 42 |
+
"WSCloseCode",
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class WSCloseCode(IntEnum):
|
| 47 |
+
OK = 1000
|
| 48 |
+
GOING_AWAY = 1001
|
| 49 |
+
PROTOCOL_ERROR = 1002
|
| 50 |
+
UNSUPPORTED_DATA = 1003
|
| 51 |
+
ABNORMAL_CLOSURE = 1006
|
| 52 |
+
INVALID_TEXT = 1007
|
| 53 |
+
POLICY_VIOLATION = 1008
|
| 54 |
+
MESSAGE_TOO_BIG = 1009
|
| 55 |
+
MANDATORY_EXTENSION = 1010
|
| 56 |
+
INTERNAL_ERROR = 1011
|
| 57 |
+
SERVICE_RESTART = 1012
|
| 58 |
+
TRY_AGAIN_LATER = 1013
|
| 59 |
+
BAD_GATEWAY = 1014
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
| 63 |
+
|
| 64 |
+
# For websockets, keeping latency low is extremely important as implementations
|
| 65 |
+
# generally expect to be able to send and receive messages quickly. We use a
|
| 66 |
+
# larger chunk size than the default to reduce the number of executor calls
|
| 67 |
+
# since the executor is a significant source of latency and overhead when
|
| 68 |
+
# the chunks are small. A size of 5KiB was chosen because it is also the
|
| 69 |
+
# same value python-zlib-ng choose to use as the threshold to release the GIL.
|
| 70 |
+
|
| 71 |
+
WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class WSMsgType(IntEnum):
|
| 75 |
+
# websocket spec types
|
| 76 |
+
CONTINUATION = 0x0
|
| 77 |
+
TEXT = 0x1
|
| 78 |
+
BINARY = 0x2
|
| 79 |
+
PING = 0x9
|
| 80 |
+
PONG = 0xA
|
| 81 |
+
CLOSE = 0x8
|
| 82 |
+
|
| 83 |
+
# aiohttp specific types
|
| 84 |
+
CLOSING = 0x100
|
| 85 |
+
CLOSED = 0x101
|
| 86 |
+
ERROR = 0x102
|
| 87 |
+
|
| 88 |
+
text = TEXT
|
| 89 |
+
binary = BINARY
|
| 90 |
+
ping = PING
|
| 91 |
+
pong = PONG
|
| 92 |
+
close = CLOSE
|
| 93 |
+
closing = CLOSING
|
| 94 |
+
closed = CLOSED
|
| 95 |
+
error = ERROR
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
MESSAGE_TYPES_WITH_CONTENT: Final = frozenset(
|
| 99 |
+
{
|
| 100 |
+
WSMsgType.BINARY,
|
| 101 |
+
WSMsgType.TEXT,
|
| 102 |
+
WSMsgType.CONTINUATION,
|
| 103 |
+
}
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
UNPACK_LEN2 = Struct("!H").unpack_from
|
| 110 |
+
UNPACK_LEN3 = Struct("!Q").unpack_from
|
| 111 |
+
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
| 112 |
+
PACK_LEN1 = Struct("!BB").pack
|
| 113 |
+
PACK_LEN2 = Struct("!BBH").pack
|
| 114 |
+
PACK_LEN3 = Struct("!BBQ").pack
|
| 115 |
+
PACK_CLOSE_CODE = Struct("!H").pack
|
| 116 |
+
PACK_RANDBITS = Struct("!L").pack
|
| 117 |
+
MSG_SIZE: Final[int] = 2**14
|
| 118 |
+
DEFAULT_LIMIT: Final[int] = 2**16
|
| 119 |
+
MASK_LEN: Final[int] = 4
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class WSMessage(NamedTuple):
|
| 123 |
+
type: WSMsgType
|
| 124 |
+
# To type correctly, this would need some kind of tagged union for each type.
|
| 125 |
+
data: Any
|
| 126 |
+
extra: Optional[str]
|
| 127 |
+
|
| 128 |
+
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
| 129 |
+
"""Return parsed JSON data.
|
| 130 |
+
|
| 131 |
+
.. versionadded:: 0.22
|
| 132 |
+
"""
|
| 133 |
+
return loads(self.data)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
| 137 |
+
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class WebSocketError(Exception):
|
| 141 |
+
"""WebSocket protocol parser error."""
|
| 142 |
+
|
| 143 |
+
def __init__(self, code: int, message: str) -> None:
|
| 144 |
+
self.code = code
|
| 145 |
+
super().__init__(code, message)
|
| 146 |
+
|
| 147 |
+
def __str__(self) -> str:
|
| 148 |
+
return cast(str, self.args[1])
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class WSHandshakeError(Exception):
|
| 152 |
+
"""WebSocket protocol handshake error."""
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
native_byteorder: Final[str] = sys.byteorder
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
# Used by _websocket_mask_python
|
| 159 |
+
@functools.lru_cache
|
| 160 |
+
def _xor_table() -> List[bytes]:
|
| 161 |
+
return [bytes(a ^ b for a in range(256)) for b in range(256)]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
| 165 |
+
"""Websocket masking function.
|
| 166 |
+
|
| 167 |
+
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
| 168 |
+
object of any length. The contents of `data` are masked with `mask`,
|
| 169 |
+
as specified in section 5.3 of RFC 6455.
|
| 170 |
+
|
| 171 |
+
Note that this function mutates the `data` argument.
|
| 172 |
+
|
| 173 |
+
This pure-python implementation may be replaced by an optimized
|
| 174 |
+
version when available.
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
assert isinstance(data, bytearray), data
|
| 178 |
+
assert len(mask) == 4, mask
|
| 179 |
+
|
| 180 |
+
if data:
|
| 181 |
+
_XOR_TABLE = _xor_table()
|
| 182 |
+
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
| 183 |
+
data[::4] = data[::4].translate(a)
|
| 184 |
+
data[1::4] = data[1::4].translate(b)
|
| 185 |
+
data[2::4] = data[2::4].translate(c)
|
| 186 |
+
data[3::4] = data[3::4].translate(d)
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
if NO_EXTENSIONS: # pragma: no cover
|
| 190 |
+
_websocket_mask = _websocket_mask_python
|
| 191 |
+
else:
|
| 192 |
+
try:
|
| 193 |
+
from ._websocket import _websocket_mask_cython # type: ignore[import-not-found]
|
| 194 |
+
|
| 195 |
+
_websocket_mask = _websocket_mask_cython
|
| 196 |
+
except ImportError: # pragma: no cover
|
| 197 |
+
_websocket_mask = _websocket_mask_python
|
| 198 |
+
|
| 199 |
+
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
| 203 |
+
r"^(?:;\s*(?:"
|
| 204 |
+
r"(server_no_context_takeover)|"
|
| 205 |
+
r"(client_no_context_takeover)|"
|
| 206 |
+
r"(server_max_window_bits(?:=(\d+))?)|"
|
| 207 |
+
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
| 214 |
+
if not extstr:
|
| 215 |
+
return 0, False
|
| 216 |
+
|
| 217 |
+
compress = 0
|
| 218 |
+
notakeover = False
|
| 219 |
+
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
| 220 |
+
defext = ext.group(1)
|
| 221 |
+
# Return compress = 15 when get `permessage-deflate`
|
| 222 |
+
if not defext:
|
| 223 |
+
compress = 15
|
| 224 |
+
break
|
| 225 |
+
match = _WS_EXT_RE.match(defext)
|
| 226 |
+
if match:
|
| 227 |
+
compress = 15
|
| 228 |
+
if isserver:
|
| 229 |
+
# Server never fail to detect compress handshake.
|
| 230 |
+
# Server does not need to send max wbit to client
|
| 231 |
+
if match.group(4):
|
| 232 |
+
compress = int(match.group(4))
|
| 233 |
+
# Group3 must match if group4 matches
|
| 234 |
+
# Compress wbit 8 does not support in zlib
|
| 235 |
+
# If compress level not support,
|
| 236 |
+
# CONTINUE to next extension
|
| 237 |
+
if compress > 15 or compress < 9:
|
| 238 |
+
compress = 0
|
| 239 |
+
continue
|
| 240 |
+
if match.group(1):
|
| 241 |
+
notakeover = True
|
| 242 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
| 243 |
+
break
|
| 244 |
+
else:
|
| 245 |
+
if match.group(6):
|
| 246 |
+
compress = int(match.group(6))
|
| 247 |
+
# Group5 must match if group6 matches
|
| 248 |
+
# Compress wbit 8 does not support in zlib
|
| 249 |
+
# If compress level not support,
|
| 250 |
+
# FAIL the parse progress
|
| 251 |
+
if compress > 15 or compress < 9:
|
| 252 |
+
raise WSHandshakeError("Invalid window size")
|
| 253 |
+
if match.group(2):
|
| 254 |
+
notakeover = True
|
| 255 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
| 256 |
+
break
|
| 257 |
+
# Return Fail if client side and not match
|
| 258 |
+
elif not isserver:
|
| 259 |
+
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
| 260 |
+
|
| 261 |
+
return compress, notakeover
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def ws_ext_gen(
|
| 265 |
+
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
| 266 |
+
) -> str:
|
| 267 |
+
# client_notakeover=False not used for server
|
| 268 |
+
# compress wbit 8 does not support in zlib
|
| 269 |
+
if compress < 9 or compress > 15:
|
| 270 |
+
raise ValueError(
|
| 271 |
+
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
| 272 |
+
)
|
| 273 |
+
enabledext = ["permessage-deflate"]
|
| 274 |
+
if not isserver:
|
| 275 |
+
enabledext.append("client_max_window_bits")
|
| 276 |
+
|
| 277 |
+
if compress < 15:
|
| 278 |
+
enabledext.append("server_max_window_bits=" + str(compress))
|
| 279 |
+
if server_notakeover:
|
| 280 |
+
enabledext.append("server_no_context_takeover")
|
| 281 |
+
# if client_notakeover:
|
| 282 |
+
# enabledext.append('client_no_context_takeover')
|
| 283 |
+
return "; ".join(enabledext)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
class WSParserState(IntEnum):
|
| 287 |
+
READ_HEADER = 1
|
| 288 |
+
READ_PAYLOAD_LENGTH = 2
|
| 289 |
+
READ_PAYLOAD_MASK = 3
|
| 290 |
+
READ_PAYLOAD = 4
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
class WebSocketReader:
|
| 294 |
+
def __init__(
|
| 295 |
+
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
| 296 |
+
) -> None:
|
| 297 |
+
self.queue = queue
|
| 298 |
+
self._max_msg_size = max_msg_size
|
| 299 |
+
|
| 300 |
+
self._exc: Optional[BaseException] = None
|
| 301 |
+
self._partial = bytearray()
|
| 302 |
+
self._state = WSParserState.READ_HEADER
|
| 303 |
+
|
| 304 |
+
self._opcode: Optional[int] = None
|
| 305 |
+
self._frame_fin = False
|
| 306 |
+
self._frame_opcode: Optional[int] = None
|
| 307 |
+
self._frame_payload = bytearray()
|
| 308 |
+
|
| 309 |
+
self._tail: bytes = b""
|
| 310 |
+
self._has_mask = False
|
| 311 |
+
self._frame_mask: Optional[bytes] = None
|
| 312 |
+
self._payload_length = 0
|
| 313 |
+
self._payload_length_flag = 0
|
| 314 |
+
self._compressed: Optional[bool] = None
|
| 315 |
+
self._decompressobj: Optional[ZLibDecompressor] = None
|
| 316 |
+
self._compress = compress
|
| 317 |
+
|
| 318 |
+
def feed_eof(self) -> None:
|
| 319 |
+
self.queue.feed_eof()
|
| 320 |
+
|
| 321 |
+
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
| 322 |
+
if self._exc:
|
| 323 |
+
return True, data
|
| 324 |
+
|
| 325 |
+
try:
|
| 326 |
+
self._feed_data(data)
|
| 327 |
+
except Exception as exc:
|
| 328 |
+
self._exc = exc
|
| 329 |
+
set_exception(self.queue, exc)
|
| 330 |
+
return True, b""
|
| 331 |
+
|
| 332 |
+
return False, b""
|
| 333 |
+
|
| 334 |
+
def _feed_data(self, data: bytes) -> None:
|
| 335 |
+
for fin, opcode, payload, compressed in self.parse_frame(data):
|
| 336 |
+
if opcode in MESSAGE_TYPES_WITH_CONTENT:
|
| 337 |
+
# load text/binary
|
| 338 |
+
is_continuation = opcode == WSMsgType.CONTINUATION
|
| 339 |
+
if not fin:
|
| 340 |
+
# got partial frame payload
|
| 341 |
+
if not is_continuation:
|
| 342 |
+
self._opcode = opcode
|
| 343 |
+
self._partial += payload
|
| 344 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
| 345 |
+
raise WebSocketError(
|
| 346 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 347 |
+
"Message size {} exceeds limit {}".format(
|
| 348 |
+
len(self._partial), self._max_msg_size
|
| 349 |
+
),
|
| 350 |
+
)
|
| 351 |
+
continue
|
| 352 |
+
|
| 353 |
+
has_partial = bool(self._partial)
|
| 354 |
+
if is_continuation:
|
| 355 |
+
if self._opcode is None:
|
| 356 |
+
raise WebSocketError(
|
| 357 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 358 |
+
"Continuation frame for non started message",
|
| 359 |
+
)
|
| 360 |
+
opcode = self._opcode
|
| 361 |
+
self._opcode = None
|
| 362 |
+
# previous frame was non finished
|
| 363 |
+
# we should get continuation opcode
|
| 364 |
+
elif has_partial:
|
| 365 |
+
raise WebSocketError(
|
| 366 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 367 |
+
"The opcode in non-fin frame is expected "
|
| 368 |
+
"to be zero, got {!r}".format(opcode),
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
if has_partial:
|
| 372 |
+
assembled_payload = self._partial + payload
|
| 373 |
+
self._partial.clear()
|
| 374 |
+
else:
|
| 375 |
+
assembled_payload = payload
|
| 376 |
+
|
| 377 |
+
if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
|
| 378 |
+
raise WebSocketError(
|
| 379 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 380 |
+
"Message size {} exceeds limit {}".format(
|
| 381 |
+
len(assembled_payload), self._max_msg_size
|
| 382 |
+
),
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
# Decompress process must to be done after all packets
|
| 386 |
+
# received.
|
| 387 |
+
if compressed:
|
| 388 |
+
if not self._decompressobj:
|
| 389 |
+
self._decompressobj = ZLibDecompressor(
|
| 390 |
+
suppress_deflate_header=True
|
| 391 |
+
)
|
| 392 |
+
payload_merged = self._decompressobj.decompress_sync(
|
| 393 |
+
assembled_payload + _WS_DEFLATE_TRAILING, self._max_msg_size
|
| 394 |
+
)
|
| 395 |
+
if self._decompressobj.unconsumed_tail:
|
| 396 |
+
left = len(self._decompressobj.unconsumed_tail)
|
| 397 |
+
raise WebSocketError(
|
| 398 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 399 |
+
"Decompressed message size {} exceeds limit {}".format(
|
| 400 |
+
self._max_msg_size + left, self._max_msg_size
|
| 401 |
+
),
|
| 402 |
+
)
|
| 403 |
+
else:
|
| 404 |
+
payload_merged = bytes(assembled_payload)
|
| 405 |
+
|
| 406 |
+
if opcode == WSMsgType.TEXT:
|
| 407 |
+
try:
|
| 408 |
+
text = payload_merged.decode("utf-8")
|
| 409 |
+
except UnicodeDecodeError as exc:
|
| 410 |
+
raise WebSocketError(
|
| 411 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 412 |
+
) from exc
|
| 413 |
+
|
| 414 |
+
self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text))
|
| 415 |
+
continue
|
| 416 |
+
|
| 417 |
+
self.queue.feed_data(
|
| 418 |
+
WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged)
|
| 419 |
+
)
|
| 420 |
+
elif opcode == WSMsgType.CLOSE:
|
| 421 |
+
if len(payload) >= 2:
|
| 422 |
+
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
| 423 |
+
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
| 424 |
+
raise WebSocketError(
|
| 425 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 426 |
+
f"Invalid close code: {close_code}",
|
| 427 |
+
)
|
| 428 |
+
try:
|
| 429 |
+
close_message = payload[2:].decode("utf-8")
|
| 430 |
+
except UnicodeDecodeError as exc:
|
| 431 |
+
raise WebSocketError(
|
| 432 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 433 |
+
) from exc
|
| 434 |
+
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
| 435 |
+
elif payload:
|
| 436 |
+
raise WebSocketError(
|
| 437 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 438 |
+
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
| 439 |
+
)
|
| 440 |
+
else:
|
| 441 |
+
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
| 442 |
+
|
| 443 |
+
self.queue.feed_data(msg, 0)
|
| 444 |
+
|
| 445 |
+
elif opcode == WSMsgType.PING:
|
| 446 |
+
self.queue.feed_data(
|
| 447 |
+
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
elif opcode == WSMsgType.PONG:
|
| 451 |
+
self.queue.feed_data(
|
| 452 |
+
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
else:
|
| 456 |
+
raise WebSocketError(
|
| 457 |
+
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
def parse_frame(
|
| 461 |
+
self, buf: bytes
|
| 462 |
+
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
| 463 |
+
"""Return the next frame from the socket."""
|
| 464 |
+
frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = []
|
| 465 |
+
if self._tail:
|
| 466 |
+
buf, self._tail = self._tail + buf, b""
|
| 467 |
+
|
| 468 |
+
start_pos: int = 0
|
| 469 |
+
buf_length = len(buf)
|
| 470 |
+
|
| 471 |
+
while True:
|
| 472 |
+
# read header
|
| 473 |
+
if self._state is WSParserState.READ_HEADER:
|
| 474 |
+
if buf_length - start_pos < 2:
|
| 475 |
+
break
|
| 476 |
+
data = buf[start_pos : start_pos + 2]
|
| 477 |
+
start_pos += 2
|
| 478 |
+
first_byte, second_byte = data
|
| 479 |
+
|
| 480 |
+
fin = (first_byte >> 7) & 1
|
| 481 |
+
rsv1 = (first_byte >> 6) & 1
|
| 482 |
+
rsv2 = (first_byte >> 5) & 1
|
| 483 |
+
rsv3 = (first_byte >> 4) & 1
|
| 484 |
+
opcode = first_byte & 0xF
|
| 485 |
+
|
| 486 |
+
# frame-fin = %x0 ; more frames of this message follow
|
| 487 |
+
# / %x1 ; final frame of this message
|
| 488 |
+
# frame-rsv1 = %x0 ;
|
| 489 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 490 |
+
# frame-rsv2 = %x0 ;
|
| 491 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 492 |
+
# frame-rsv3 = %x0 ;
|
| 493 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 494 |
+
#
|
| 495 |
+
# Remove rsv1 from this test for deflate development
|
| 496 |
+
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
| 497 |
+
raise WebSocketError(
|
| 498 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 499 |
+
"Received frame with non-zero reserved bits",
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
if opcode > 0x7 and fin == 0:
|
| 503 |
+
raise WebSocketError(
|
| 504 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 505 |
+
"Received fragmented control frame",
|
| 506 |
+
)
|
| 507 |
+
|
| 508 |
+
has_mask = (second_byte >> 7) & 1
|
| 509 |
+
length = second_byte & 0x7F
|
| 510 |
+
|
| 511 |
+
# Control frames MUST have a payload
|
| 512 |
+
# length of 125 bytes or less
|
| 513 |
+
if opcode > 0x7 and length > 125:
|
| 514 |
+
raise WebSocketError(
|
| 515 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 516 |
+
"Control frame payload cannot be " "larger than 125 bytes",
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
# Set compress status if last package is FIN
|
| 520 |
+
# OR set compress status if this is first fragment
|
| 521 |
+
# Raise error if not first fragment with rsv1 = 0x1
|
| 522 |
+
if self._frame_fin or self._compressed is None:
|
| 523 |
+
self._compressed = True if rsv1 else False
|
| 524 |
+
elif rsv1:
|
| 525 |
+
raise WebSocketError(
|
| 526 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 527 |
+
"Received frame with non-zero reserved bits",
|
| 528 |
+
)
|
| 529 |
+
|
| 530 |
+
self._frame_fin = bool(fin)
|
| 531 |
+
self._frame_opcode = opcode
|
| 532 |
+
self._has_mask = bool(has_mask)
|
| 533 |
+
self._payload_length_flag = length
|
| 534 |
+
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
| 535 |
+
|
| 536 |
+
# read payload length
|
| 537 |
+
if self._state is WSParserState.READ_PAYLOAD_LENGTH:
|
| 538 |
+
length_flag = self._payload_length_flag
|
| 539 |
+
if length_flag == 126:
|
| 540 |
+
if buf_length - start_pos < 2:
|
| 541 |
+
break
|
| 542 |
+
data = buf[start_pos : start_pos + 2]
|
| 543 |
+
start_pos += 2
|
| 544 |
+
self._payload_length = UNPACK_LEN2(data)[0]
|
| 545 |
+
elif length_flag > 126:
|
| 546 |
+
if buf_length - start_pos < 8:
|
| 547 |
+
break
|
| 548 |
+
data = buf[start_pos : start_pos + 8]
|
| 549 |
+
start_pos += 8
|
| 550 |
+
self._payload_length = UNPACK_LEN3(data)[0]
|
| 551 |
+
else:
|
| 552 |
+
self._payload_length = length_flag
|
| 553 |
+
|
| 554 |
+
self._state = (
|
| 555 |
+
WSParserState.READ_PAYLOAD_MASK
|
| 556 |
+
if self._has_mask
|
| 557 |
+
else WSParserState.READ_PAYLOAD
|
| 558 |
+
)
|
| 559 |
+
|
| 560 |
+
# read payload mask
|
| 561 |
+
if self._state is WSParserState.READ_PAYLOAD_MASK:
|
| 562 |
+
if buf_length - start_pos < 4:
|
| 563 |
+
break
|
| 564 |
+
self._frame_mask = buf[start_pos : start_pos + 4]
|
| 565 |
+
start_pos += 4
|
| 566 |
+
self._state = WSParserState.READ_PAYLOAD
|
| 567 |
+
|
| 568 |
+
if self._state is WSParserState.READ_PAYLOAD:
|
| 569 |
+
length = self._payload_length
|
| 570 |
+
payload = self._frame_payload
|
| 571 |
+
|
| 572 |
+
chunk_len = buf_length - start_pos
|
| 573 |
+
if length >= chunk_len:
|
| 574 |
+
self._payload_length = length - chunk_len
|
| 575 |
+
payload += buf[start_pos:]
|
| 576 |
+
start_pos = buf_length
|
| 577 |
+
else:
|
| 578 |
+
self._payload_length = 0
|
| 579 |
+
payload += buf[start_pos : start_pos + length]
|
| 580 |
+
start_pos = start_pos + length
|
| 581 |
+
|
| 582 |
+
if self._payload_length != 0:
|
| 583 |
+
break
|
| 584 |
+
|
| 585 |
+
if self._has_mask:
|
| 586 |
+
assert self._frame_mask is not None
|
| 587 |
+
_websocket_mask(self._frame_mask, payload)
|
| 588 |
+
|
| 589 |
+
frames.append(
|
| 590 |
+
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
| 591 |
+
)
|
| 592 |
+
self._frame_payload = bytearray()
|
| 593 |
+
self._state = WSParserState.READ_HEADER
|
| 594 |
+
|
| 595 |
+
self._tail = buf[start_pos:]
|
| 596 |
+
|
| 597 |
+
return frames
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
class WebSocketWriter:
|
| 601 |
+
def __init__(
|
| 602 |
+
self,
|
| 603 |
+
protocol: BaseProtocol,
|
| 604 |
+
transport: asyncio.Transport,
|
| 605 |
+
*,
|
| 606 |
+
use_mask: bool = False,
|
| 607 |
+
limit: int = DEFAULT_LIMIT,
|
| 608 |
+
random: random.Random = random.Random(),
|
| 609 |
+
compress: int = 0,
|
| 610 |
+
notakeover: bool = False,
|
| 611 |
+
) -> None:
|
| 612 |
+
self.protocol = protocol
|
| 613 |
+
self.transport = transport
|
| 614 |
+
self.use_mask = use_mask
|
| 615 |
+
self.get_random_bits = partial(random.getrandbits, 32)
|
| 616 |
+
self.compress = compress
|
| 617 |
+
self.notakeover = notakeover
|
| 618 |
+
self._closing = False
|
| 619 |
+
self._limit = limit
|
| 620 |
+
self._output_size = 0
|
| 621 |
+
self._compressobj: Any = None # actually compressobj
|
| 622 |
+
|
| 623 |
+
async def _send_frame(
|
| 624 |
+
self, message: bytes, opcode: int, compress: Optional[int] = None
|
| 625 |
+
) -> None:
|
| 626 |
+
"""Send a frame over the websocket with message as its payload."""
|
| 627 |
+
if self._closing and not (opcode & WSMsgType.CLOSE):
|
| 628 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 629 |
+
|
| 630 |
+
# RSV are the reserved bits in the frame header. They are used to
|
| 631 |
+
# indicate that the frame is using an extension.
|
| 632 |
+
# https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
|
| 633 |
+
rsv = 0
|
| 634 |
+
# Only compress larger packets (disabled)
|
| 635 |
+
# Does small packet needs to be compressed?
|
| 636 |
+
# if self.compress and opcode < 8 and len(message) > 124:
|
| 637 |
+
if (compress or self.compress) and opcode < 8:
|
| 638 |
+
# RSV1 (rsv = 0x40) is set for compressed frames
|
| 639 |
+
# https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1
|
| 640 |
+
rsv = 0x40
|
| 641 |
+
|
| 642 |
+
if compress:
|
| 643 |
+
# Do not set self._compress if compressing is for this frame
|
| 644 |
+
compressobj = self._make_compress_obj(compress)
|
| 645 |
+
else: # self.compress
|
| 646 |
+
if not self._compressobj:
|
| 647 |
+
self._compressobj = self._make_compress_obj(self.compress)
|
| 648 |
+
compressobj = self._compressobj
|
| 649 |
+
|
| 650 |
+
message = await compressobj.compress(message)
|
| 651 |
+
# Its critical that we do not return control to the event
|
| 652 |
+
# loop until we have finished sending all the compressed
|
| 653 |
+
# data. Otherwise we could end up mixing compressed frames
|
| 654 |
+
# if there are multiple coroutines compressing data.
|
| 655 |
+
message += compressobj.flush(
|
| 656 |
+
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
| 657 |
+
)
|
| 658 |
+
if message.endswith(_WS_DEFLATE_TRAILING):
|
| 659 |
+
message = message[:-4]
|
| 660 |
+
|
| 661 |
+
msg_length = len(message)
|
| 662 |
+
|
| 663 |
+
use_mask = self.use_mask
|
| 664 |
+
mask_bit = 0x80 if use_mask else 0
|
| 665 |
+
|
| 666 |
+
# Depending on the message length, the header is assembled differently.
|
| 667 |
+
# The first byte is reserved for the opcode and the RSV bits.
|
| 668 |
+
first_byte = 0x80 | rsv | opcode
|
| 669 |
+
if msg_length < 126:
|
| 670 |
+
header = PACK_LEN1(first_byte, msg_length | mask_bit)
|
| 671 |
+
header_len = 2
|
| 672 |
+
elif msg_length < (1 << 16):
|
| 673 |
+
header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length)
|
| 674 |
+
header_len = 4
|
| 675 |
+
else:
|
| 676 |
+
header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length)
|
| 677 |
+
header_len = 10
|
| 678 |
+
|
| 679 |
+
# https://datatracker.ietf.org/doc/html/rfc6455#section-5.3
|
| 680 |
+
# If we are using a mask, we need to generate it randomly
|
| 681 |
+
# and apply it to the message before sending it. A mask is
|
| 682 |
+
# a 32-bit value that is applied to the message using a
|
| 683 |
+
# bitwise XOR operation. It is used to prevent certain types
|
| 684 |
+
# of attacks on the websocket protocol. The mask is only used
|
| 685 |
+
# when aiohttp is acting as a client. Servers do not use a mask.
|
| 686 |
+
if use_mask:
|
| 687 |
+
mask = PACK_RANDBITS(self.get_random_bits())
|
| 688 |
+
message = bytearray(message)
|
| 689 |
+
_websocket_mask(mask, message)
|
| 690 |
+
self._write(header + mask + message)
|
| 691 |
+
self._output_size += header_len + MASK_LEN + msg_length
|
| 692 |
+
|
| 693 |
+
else:
|
| 694 |
+
if msg_length > MSG_SIZE:
|
| 695 |
+
self._write(header)
|
| 696 |
+
self._write(message)
|
| 697 |
+
else:
|
| 698 |
+
self._write(header + message)
|
| 699 |
+
|
| 700 |
+
self._output_size += header_len + msg_length
|
| 701 |
+
|
| 702 |
+
# It is safe to return control to the event loop when using compression
|
| 703 |
+
# after this point as we have already sent or buffered all the data.
|
| 704 |
+
|
| 705 |
+
# Once we have written output_size up to the limit, we call the
|
| 706 |
+
# drain helper which waits for the transport to be ready to accept
|
| 707 |
+
# more data. This is a flow control mechanism to prevent the buffer
|
| 708 |
+
# from growing too large. The drain helper will return right away
|
| 709 |
+
# if the writer is not paused.
|
| 710 |
+
if self._output_size > self._limit:
|
| 711 |
+
self._output_size = 0
|
| 712 |
+
await self.protocol._drain_helper()
|
| 713 |
+
|
| 714 |
+
def _make_compress_obj(self, compress: int) -> ZLibCompressor:
|
| 715 |
+
return ZLibCompressor(
|
| 716 |
+
level=zlib.Z_BEST_SPEED,
|
| 717 |
+
wbits=-compress,
|
| 718 |
+
max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
|
| 719 |
+
)
|
| 720 |
+
|
| 721 |
+
def _write(self, data: bytes) -> None:
|
| 722 |
+
if self.transport is None or self.transport.is_closing():
|
| 723 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 724 |
+
self.transport.write(data)
|
| 725 |
+
|
| 726 |
+
async def pong(self, message: Union[bytes, str] = b"") -> None:
|
| 727 |
+
"""Send pong message."""
|
| 728 |
+
if isinstance(message, str):
|
| 729 |
+
message = message.encode("utf-8")
|
| 730 |
+
await self._send_frame(message, WSMsgType.PONG)
|
| 731 |
+
|
| 732 |
+
async def ping(self, message: Union[bytes, str] = b"") -> None:
|
| 733 |
+
"""Send ping message."""
|
| 734 |
+
if isinstance(message, str):
|
| 735 |
+
message = message.encode("utf-8")
|
| 736 |
+
await self._send_frame(message, WSMsgType.PING)
|
| 737 |
+
|
| 738 |
+
async def send(
|
| 739 |
+
self,
|
| 740 |
+
message: Union[str, bytes],
|
| 741 |
+
binary: bool = False,
|
| 742 |
+
compress: Optional[int] = None,
|
| 743 |
+
) -> None:
|
| 744 |
+
"""Send a frame over the websocket with message as its payload."""
|
| 745 |
+
if isinstance(message, str):
|
| 746 |
+
message = message.encode("utf-8")
|
| 747 |
+
if binary:
|
| 748 |
+
await self._send_frame(message, WSMsgType.BINARY, compress)
|
| 749 |
+
else:
|
| 750 |
+
await self._send_frame(message, WSMsgType.TEXT, compress)
|
| 751 |
+
|
| 752 |
+
async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
|
| 753 |
+
"""Close the websocket, sending the specified code and message."""
|
| 754 |
+
if isinstance(message, str):
|
| 755 |
+
message = message.encode("utf-8")
|
| 756 |
+
try:
|
| 757 |
+
await self._send_frame(
|
| 758 |
+
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
| 759 |
+
)
|
| 760 |
+
finally:
|
| 761 |
+
self._closing = True
|
parrot/lib/python3.10/site-packages/aiohttp/locks.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections
|
| 3 |
+
from typing import Any, Deque, Optional
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class EventResultOrError:
|
| 7 |
+
"""Event asyncio lock helper class.
|
| 8 |
+
|
| 9 |
+
Wraps the Event asyncio lock allowing either to awake the
|
| 10 |
+
locked Tasks without any error or raising an exception.
|
| 11 |
+
|
| 12 |
+
thanks to @vorpalsmith for the simple design.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 16 |
+
self._loop = loop
|
| 17 |
+
self._exc: Optional[BaseException] = None
|
| 18 |
+
self._event = asyncio.Event()
|
| 19 |
+
self._waiters: Deque[asyncio.Future[Any]] = collections.deque()
|
| 20 |
+
|
| 21 |
+
def set(self, exc: Optional[BaseException] = None) -> None:
|
| 22 |
+
self._exc = exc
|
| 23 |
+
self._event.set()
|
| 24 |
+
|
| 25 |
+
async def wait(self) -> Any:
|
| 26 |
+
waiter = self._loop.create_task(self._event.wait())
|
| 27 |
+
self._waiters.append(waiter)
|
| 28 |
+
try:
|
| 29 |
+
val = await waiter
|
| 30 |
+
finally:
|
| 31 |
+
self._waiters.remove(waiter)
|
| 32 |
+
|
| 33 |
+
if self._exc is not None:
|
| 34 |
+
raise self._exc
|
| 35 |
+
|
| 36 |
+
return val
|
| 37 |
+
|
| 38 |
+
def cancel(self) -> None:
|
| 39 |
+
"""Cancel all waiters"""
|
| 40 |
+
for waiter in self._waiters:
|
| 41 |
+
waiter.cancel()
|
parrot/lib/python3.10/site-packages/aiohttp/log.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
|
| 3 |
+
access_logger = logging.getLogger("aiohttp.access")
|
| 4 |
+
client_logger = logging.getLogger("aiohttp.client")
|
| 5 |
+
internal_logger = logging.getLogger("aiohttp.internal")
|
| 6 |
+
server_logger = logging.getLogger("aiohttp.server")
|
| 7 |
+
web_logger = logging.getLogger("aiohttp.web")
|
| 8 |
+
ws_logger = logging.getLogger("aiohttp.websocket")
|
parrot/lib/python3.10/site-packages/aiohttp/multipart.py
ADDED
|
@@ -0,0 +1,1073 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import base64
|
| 2 |
+
import binascii
|
| 3 |
+
import json
|
| 4 |
+
import re
|
| 5 |
+
import sys
|
| 6 |
+
import uuid
|
| 7 |
+
import warnings
|
| 8 |
+
import zlib
|
| 9 |
+
from collections import deque
|
| 10 |
+
from types import TracebackType
|
| 11 |
+
from typing import (
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Deque,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterator,
|
| 17 |
+
List,
|
| 18 |
+
Mapping,
|
| 19 |
+
Optional,
|
| 20 |
+
Sequence,
|
| 21 |
+
Tuple,
|
| 22 |
+
Type,
|
| 23 |
+
Union,
|
| 24 |
+
cast,
|
| 25 |
+
)
|
| 26 |
+
from urllib.parse import parse_qsl, unquote, urlencode
|
| 27 |
+
|
| 28 |
+
from multidict import CIMultiDict, CIMultiDictProxy
|
| 29 |
+
|
| 30 |
+
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
| 31 |
+
from .hdrs import (
|
| 32 |
+
CONTENT_DISPOSITION,
|
| 33 |
+
CONTENT_ENCODING,
|
| 34 |
+
CONTENT_LENGTH,
|
| 35 |
+
CONTENT_TRANSFER_ENCODING,
|
| 36 |
+
CONTENT_TYPE,
|
| 37 |
+
)
|
| 38 |
+
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
| 39 |
+
from .http import HeadersParser
|
| 40 |
+
from .payload import (
|
| 41 |
+
JsonPayload,
|
| 42 |
+
LookupError,
|
| 43 |
+
Order,
|
| 44 |
+
Payload,
|
| 45 |
+
StringPayload,
|
| 46 |
+
get_payload,
|
| 47 |
+
payload_type,
|
| 48 |
+
)
|
| 49 |
+
from .streams import StreamReader
|
| 50 |
+
|
| 51 |
+
if sys.version_info >= (3, 11):
|
| 52 |
+
from typing import Self
|
| 53 |
+
else:
|
| 54 |
+
from typing import TypeVar
|
| 55 |
+
|
| 56 |
+
Self = TypeVar("Self", bound="BodyPartReader")
|
| 57 |
+
|
| 58 |
+
__all__ = (
|
| 59 |
+
"MultipartReader",
|
| 60 |
+
"MultipartWriter",
|
| 61 |
+
"BodyPartReader",
|
| 62 |
+
"BadContentDispositionHeader",
|
| 63 |
+
"BadContentDispositionParam",
|
| 64 |
+
"parse_content_disposition",
|
| 65 |
+
"content_disposition_filename",
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
if TYPE_CHECKING:
|
| 70 |
+
from .client_reqrep import ClientResponse
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class BadContentDispositionHeader(RuntimeWarning):
|
| 74 |
+
pass
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class BadContentDispositionParam(RuntimeWarning):
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def parse_content_disposition(
|
| 82 |
+
header: Optional[str],
|
| 83 |
+
) -> Tuple[Optional[str], Dict[str, str]]:
|
| 84 |
+
def is_token(string: str) -> bool:
|
| 85 |
+
return bool(string) and TOKEN >= set(string)
|
| 86 |
+
|
| 87 |
+
def is_quoted(string: str) -> bool:
|
| 88 |
+
return string[0] == string[-1] == '"'
|
| 89 |
+
|
| 90 |
+
def is_rfc5987(string: str) -> bool:
|
| 91 |
+
return is_token(string) and string.count("'") == 2
|
| 92 |
+
|
| 93 |
+
def is_extended_param(string: str) -> bool:
|
| 94 |
+
return string.endswith("*")
|
| 95 |
+
|
| 96 |
+
def is_continuous_param(string: str) -> bool:
|
| 97 |
+
pos = string.find("*") + 1
|
| 98 |
+
if not pos:
|
| 99 |
+
return False
|
| 100 |
+
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
| 101 |
+
return substring.isdigit()
|
| 102 |
+
|
| 103 |
+
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
| 104 |
+
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
| 105 |
+
|
| 106 |
+
if not header:
|
| 107 |
+
return None, {}
|
| 108 |
+
|
| 109 |
+
disptype, *parts = header.split(";")
|
| 110 |
+
if not is_token(disptype):
|
| 111 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 112 |
+
return None, {}
|
| 113 |
+
|
| 114 |
+
params: Dict[str, str] = {}
|
| 115 |
+
while parts:
|
| 116 |
+
item = parts.pop(0)
|
| 117 |
+
|
| 118 |
+
if "=" not in item:
|
| 119 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 120 |
+
return None, {}
|
| 121 |
+
|
| 122 |
+
key, value = item.split("=", 1)
|
| 123 |
+
key = key.lower().strip()
|
| 124 |
+
value = value.lstrip()
|
| 125 |
+
|
| 126 |
+
if key in params:
|
| 127 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 128 |
+
return None, {}
|
| 129 |
+
|
| 130 |
+
if not is_token(key):
|
| 131 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 132 |
+
continue
|
| 133 |
+
|
| 134 |
+
elif is_continuous_param(key):
|
| 135 |
+
if is_quoted(value):
|
| 136 |
+
value = unescape(value[1:-1])
|
| 137 |
+
elif not is_token(value):
|
| 138 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 139 |
+
continue
|
| 140 |
+
|
| 141 |
+
elif is_extended_param(key):
|
| 142 |
+
if is_rfc5987(value):
|
| 143 |
+
encoding, _, value = value.split("'", 2)
|
| 144 |
+
encoding = encoding or "utf-8"
|
| 145 |
+
else:
|
| 146 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 147 |
+
continue
|
| 148 |
+
|
| 149 |
+
try:
|
| 150 |
+
value = unquote(value, encoding, "strict")
|
| 151 |
+
except UnicodeDecodeError: # pragma: nocover
|
| 152 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 153 |
+
continue
|
| 154 |
+
|
| 155 |
+
else:
|
| 156 |
+
failed = True
|
| 157 |
+
if is_quoted(value):
|
| 158 |
+
failed = False
|
| 159 |
+
value = unescape(value[1:-1].lstrip("\\/"))
|
| 160 |
+
elif is_token(value):
|
| 161 |
+
failed = False
|
| 162 |
+
elif parts:
|
| 163 |
+
# maybe just ; in filename, in any case this is just
|
| 164 |
+
# one case fix, for proper fix we need to redesign parser
|
| 165 |
+
_value = f"{value};{parts[0]}"
|
| 166 |
+
if is_quoted(_value):
|
| 167 |
+
parts.pop(0)
|
| 168 |
+
value = unescape(_value[1:-1].lstrip("\\/"))
|
| 169 |
+
failed = False
|
| 170 |
+
|
| 171 |
+
if failed:
|
| 172 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 173 |
+
return None, {}
|
| 174 |
+
|
| 175 |
+
params[key] = value
|
| 176 |
+
|
| 177 |
+
return disptype.lower(), params
|
| 178 |
+
|
| 179 |
+
|
| 180 |
+
def content_disposition_filename(
|
| 181 |
+
params: Mapping[str, str], name: str = "filename"
|
| 182 |
+
) -> Optional[str]:
|
| 183 |
+
name_suf = "%s*" % name
|
| 184 |
+
if not params:
|
| 185 |
+
return None
|
| 186 |
+
elif name_suf in params:
|
| 187 |
+
return params[name_suf]
|
| 188 |
+
elif name in params:
|
| 189 |
+
return params[name]
|
| 190 |
+
else:
|
| 191 |
+
parts = []
|
| 192 |
+
fnparams = sorted(
|
| 193 |
+
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
| 194 |
+
)
|
| 195 |
+
for num, (key, value) in enumerate(fnparams):
|
| 196 |
+
_, tail = key.split("*", 1)
|
| 197 |
+
if tail.endswith("*"):
|
| 198 |
+
tail = tail[:-1]
|
| 199 |
+
if tail == str(num):
|
| 200 |
+
parts.append(value)
|
| 201 |
+
else:
|
| 202 |
+
break
|
| 203 |
+
if not parts:
|
| 204 |
+
return None
|
| 205 |
+
value = "".join(parts)
|
| 206 |
+
if "'" in value:
|
| 207 |
+
encoding, _, value = value.split("'", 2)
|
| 208 |
+
encoding = encoding or "utf-8"
|
| 209 |
+
return unquote(value, encoding, "strict")
|
| 210 |
+
return value
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
class MultipartResponseWrapper:
|
| 214 |
+
"""Wrapper around the MultipartReader.
|
| 215 |
+
|
| 216 |
+
It takes care about
|
| 217 |
+
underlying connection and close it when it needs in.
|
| 218 |
+
"""
|
| 219 |
+
|
| 220 |
+
def __init__(
|
| 221 |
+
self,
|
| 222 |
+
resp: "ClientResponse",
|
| 223 |
+
stream: "MultipartReader",
|
| 224 |
+
) -> None:
|
| 225 |
+
self.resp = resp
|
| 226 |
+
self.stream = stream
|
| 227 |
+
|
| 228 |
+
def __aiter__(self) -> "MultipartResponseWrapper":
|
| 229 |
+
return self
|
| 230 |
+
|
| 231 |
+
async def __anext__(
|
| 232 |
+
self,
|
| 233 |
+
) -> Union["MultipartReader", "BodyPartReader"]:
|
| 234 |
+
part = await self.next()
|
| 235 |
+
if part is None:
|
| 236 |
+
raise StopAsyncIteration
|
| 237 |
+
return part
|
| 238 |
+
|
| 239 |
+
def at_eof(self) -> bool:
|
| 240 |
+
"""Returns True when all response data had been read."""
|
| 241 |
+
return self.resp.content.at_eof()
|
| 242 |
+
|
| 243 |
+
async def next(
|
| 244 |
+
self,
|
| 245 |
+
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
| 246 |
+
"""Emits next multipart reader object."""
|
| 247 |
+
item = await self.stream.next()
|
| 248 |
+
if self.stream.at_eof():
|
| 249 |
+
await self.release()
|
| 250 |
+
return item
|
| 251 |
+
|
| 252 |
+
async def release(self) -> None:
|
| 253 |
+
"""Release the connection gracefully.
|
| 254 |
+
|
| 255 |
+
All remaining content is read to the void.
|
| 256 |
+
"""
|
| 257 |
+
await self.resp.release()
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
class BodyPartReader:
|
| 261 |
+
"""Multipart reader for single body part."""
|
| 262 |
+
|
| 263 |
+
chunk_size = 8192
|
| 264 |
+
|
| 265 |
+
def __init__(
|
| 266 |
+
self,
|
| 267 |
+
boundary: bytes,
|
| 268 |
+
headers: "CIMultiDictProxy[str]",
|
| 269 |
+
content: StreamReader,
|
| 270 |
+
*,
|
| 271 |
+
subtype: str = "mixed",
|
| 272 |
+
default_charset: Optional[str] = None,
|
| 273 |
+
) -> None:
|
| 274 |
+
self.headers = headers
|
| 275 |
+
self._boundary = boundary
|
| 276 |
+
self._boundary_len = len(boundary) + 2 # Boundary + \r\n
|
| 277 |
+
self._content = content
|
| 278 |
+
self._default_charset = default_charset
|
| 279 |
+
self._at_eof = False
|
| 280 |
+
self._is_form_data = subtype == "form-data"
|
| 281 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
|
| 282 |
+
length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None)
|
| 283 |
+
self._length = int(length) if length is not None else None
|
| 284 |
+
self._read_bytes = 0
|
| 285 |
+
self._unread: Deque[bytes] = deque()
|
| 286 |
+
self._prev_chunk: Optional[bytes] = None
|
| 287 |
+
self._content_eof = 0
|
| 288 |
+
self._cache: Dict[str, Any] = {}
|
| 289 |
+
|
| 290 |
+
def __aiter__(self: Self) -> Self:
|
| 291 |
+
return self
|
| 292 |
+
|
| 293 |
+
async def __anext__(self) -> bytes:
|
| 294 |
+
part = await self.next()
|
| 295 |
+
if part is None:
|
| 296 |
+
raise StopAsyncIteration
|
| 297 |
+
return part
|
| 298 |
+
|
| 299 |
+
async def next(self) -> Optional[bytes]:
|
| 300 |
+
item = await self.read()
|
| 301 |
+
if not item:
|
| 302 |
+
return None
|
| 303 |
+
return item
|
| 304 |
+
|
| 305 |
+
async def read(self, *, decode: bool = False) -> bytes:
|
| 306 |
+
"""Reads body part data.
|
| 307 |
+
|
| 308 |
+
decode: Decodes data following by encoding
|
| 309 |
+
method from Content-Encoding header. If it missed
|
| 310 |
+
data remains untouched
|
| 311 |
+
"""
|
| 312 |
+
if self._at_eof:
|
| 313 |
+
return b""
|
| 314 |
+
data = bytearray()
|
| 315 |
+
while not self._at_eof:
|
| 316 |
+
data.extend(await self.read_chunk(self.chunk_size))
|
| 317 |
+
if decode:
|
| 318 |
+
return self.decode(data)
|
| 319 |
+
return data
|
| 320 |
+
|
| 321 |
+
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
| 322 |
+
"""Reads body part content chunk of the specified size.
|
| 323 |
+
|
| 324 |
+
size: chunk size
|
| 325 |
+
"""
|
| 326 |
+
if self._at_eof:
|
| 327 |
+
return b""
|
| 328 |
+
if self._length:
|
| 329 |
+
chunk = await self._read_chunk_from_length(size)
|
| 330 |
+
else:
|
| 331 |
+
chunk = await self._read_chunk_from_stream(size)
|
| 332 |
+
|
| 333 |
+
# For the case of base64 data, we must read a fragment of size with a
|
| 334 |
+
# remainder of 0 by dividing by 4 for string without symbols \n or \r
|
| 335 |
+
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING)
|
| 336 |
+
if encoding and encoding.lower() == "base64":
|
| 337 |
+
stripped_chunk = b"".join(chunk.split())
|
| 338 |
+
remainder = len(stripped_chunk) % 4
|
| 339 |
+
|
| 340 |
+
while remainder != 0 and not self.at_eof():
|
| 341 |
+
over_chunk_size = 4 - remainder
|
| 342 |
+
over_chunk = b""
|
| 343 |
+
|
| 344 |
+
if self._prev_chunk:
|
| 345 |
+
over_chunk = self._prev_chunk[:over_chunk_size]
|
| 346 |
+
self._prev_chunk = self._prev_chunk[len(over_chunk) :]
|
| 347 |
+
|
| 348 |
+
if len(over_chunk) != over_chunk_size:
|
| 349 |
+
over_chunk += await self._content.read(4 - len(over_chunk))
|
| 350 |
+
|
| 351 |
+
if not over_chunk:
|
| 352 |
+
self._at_eof = True
|
| 353 |
+
|
| 354 |
+
stripped_chunk += b"".join(over_chunk.split())
|
| 355 |
+
chunk += over_chunk
|
| 356 |
+
remainder = len(stripped_chunk) % 4
|
| 357 |
+
|
| 358 |
+
self._read_bytes += len(chunk)
|
| 359 |
+
if self._read_bytes == self._length:
|
| 360 |
+
self._at_eof = True
|
| 361 |
+
if self._at_eof:
|
| 362 |
+
clrf = await self._content.readline()
|
| 363 |
+
assert (
|
| 364 |
+
b"\r\n" == clrf
|
| 365 |
+
), "reader did not read all the data or it is malformed"
|
| 366 |
+
return chunk
|
| 367 |
+
|
| 368 |
+
async def _read_chunk_from_length(self, size: int) -> bytes:
|
| 369 |
+
# Reads body part content chunk of the specified size.
|
| 370 |
+
# The body part must has Content-Length header with proper value.
|
| 371 |
+
assert self._length is not None, "Content-Length required for chunked read"
|
| 372 |
+
chunk_size = min(size, self._length - self._read_bytes)
|
| 373 |
+
chunk = await self._content.read(chunk_size)
|
| 374 |
+
if self._content.at_eof():
|
| 375 |
+
self._at_eof = True
|
| 376 |
+
return chunk
|
| 377 |
+
|
| 378 |
+
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
| 379 |
+
# Reads content chunk of body part with unknown length.
|
| 380 |
+
# The Content-Length header for body part is not necessary.
|
| 381 |
+
assert (
|
| 382 |
+
size >= self._boundary_len
|
| 383 |
+
), "Chunk size must be greater or equal than boundary length + 2"
|
| 384 |
+
first_chunk = self._prev_chunk is None
|
| 385 |
+
if first_chunk:
|
| 386 |
+
self._prev_chunk = await self._content.read(size)
|
| 387 |
+
|
| 388 |
+
chunk = b""
|
| 389 |
+
# content.read() may return less than size, so we need to loop to ensure
|
| 390 |
+
# we have enough data to detect the boundary.
|
| 391 |
+
while len(chunk) < self._boundary_len:
|
| 392 |
+
chunk += await self._content.read(size)
|
| 393 |
+
self._content_eof += int(self._content.at_eof())
|
| 394 |
+
assert self._content_eof < 3, "Reading after EOF"
|
| 395 |
+
if self._content_eof:
|
| 396 |
+
break
|
| 397 |
+
if len(chunk) > size:
|
| 398 |
+
self._content.unread_data(chunk[size:])
|
| 399 |
+
chunk = chunk[:size]
|
| 400 |
+
|
| 401 |
+
assert self._prev_chunk is not None
|
| 402 |
+
window = self._prev_chunk + chunk
|
| 403 |
+
sub = b"\r\n" + self._boundary
|
| 404 |
+
if first_chunk:
|
| 405 |
+
idx = window.find(sub)
|
| 406 |
+
else:
|
| 407 |
+
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
| 408 |
+
if idx >= 0:
|
| 409 |
+
# pushing boundary back to content
|
| 410 |
+
with warnings.catch_warnings():
|
| 411 |
+
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
| 412 |
+
self._content.unread_data(window[idx:])
|
| 413 |
+
if size > idx:
|
| 414 |
+
self._prev_chunk = self._prev_chunk[:idx]
|
| 415 |
+
chunk = window[len(self._prev_chunk) : idx]
|
| 416 |
+
if not chunk:
|
| 417 |
+
self._at_eof = True
|
| 418 |
+
result = self._prev_chunk
|
| 419 |
+
self._prev_chunk = chunk
|
| 420 |
+
return result
|
| 421 |
+
|
| 422 |
+
async def readline(self) -> bytes:
|
| 423 |
+
"""Reads body part by line by line."""
|
| 424 |
+
if self._at_eof:
|
| 425 |
+
return b""
|
| 426 |
+
|
| 427 |
+
if self._unread:
|
| 428 |
+
line = self._unread.popleft()
|
| 429 |
+
else:
|
| 430 |
+
line = await self._content.readline()
|
| 431 |
+
|
| 432 |
+
if line.startswith(self._boundary):
|
| 433 |
+
# the very last boundary may not come with \r\n,
|
| 434 |
+
# so set single rules for everyone
|
| 435 |
+
sline = line.rstrip(b"\r\n")
|
| 436 |
+
boundary = self._boundary
|
| 437 |
+
last_boundary = self._boundary + b"--"
|
| 438 |
+
# ensure that we read exactly the boundary, not something alike
|
| 439 |
+
if sline == boundary or sline == last_boundary:
|
| 440 |
+
self._at_eof = True
|
| 441 |
+
self._unread.append(line)
|
| 442 |
+
return b""
|
| 443 |
+
else:
|
| 444 |
+
next_line = await self._content.readline()
|
| 445 |
+
if next_line.startswith(self._boundary):
|
| 446 |
+
line = line[:-2] # strip CRLF but only once
|
| 447 |
+
self._unread.append(next_line)
|
| 448 |
+
|
| 449 |
+
return line
|
| 450 |
+
|
| 451 |
+
async def release(self) -> None:
|
| 452 |
+
"""Like read(), but reads all the data to the void."""
|
| 453 |
+
if self._at_eof:
|
| 454 |
+
return
|
| 455 |
+
while not self._at_eof:
|
| 456 |
+
await self.read_chunk(self.chunk_size)
|
| 457 |
+
|
| 458 |
+
async def text(self, *, encoding: Optional[str] = None) -> str:
|
| 459 |
+
"""Like read(), but assumes that body part contains text data."""
|
| 460 |
+
data = await self.read(decode=True)
|
| 461 |
+
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm
|
| 462 |
+
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send
|
| 463 |
+
encoding = encoding or self.get_charset(default="utf-8")
|
| 464 |
+
return data.decode(encoding)
|
| 465 |
+
|
| 466 |
+
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
| 467 |
+
"""Like read(), but assumes that body parts contains JSON data."""
|
| 468 |
+
data = await self.read(decode=True)
|
| 469 |
+
if not data:
|
| 470 |
+
return None
|
| 471 |
+
encoding = encoding or self.get_charset(default="utf-8")
|
| 472 |
+
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
| 473 |
+
|
| 474 |
+
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
| 475 |
+
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
| 476 |
+
data = await self.read(decode=True)
|
| 477 |
+
if not data:
|
| 478 |
+
return []
|
| 479 |
+
if encoding is not None:
|
| 480 |
+
real_encoding = encoding
|
| 481 |
+
else:
|
| 482 |
+
real_encoding = self.get_charset(default="utf-8")
|
| 483 |
+
try:
|
| 484 |
+
decoded_data = data.rstrip().decode(real_encoding)
|
| 485 |
+
except UnicodeDecodeError:
|
| 486 |
+
raise ValueError("data cannot be decoded with %s encoding" % real_encoding)
|
| 487 |
+
|
| 488 |
+
return parse_qsl(
|
| 489 |
+
decoded_data,
|
| 490 |
+
keep_blank_values=True,
|
| 491 |
+
encoding=real_encoding,
|
| 492 |
+
)
|
| 493 |
+
|
| 494 |
+
def at_eof(self) -> bool:
|
| 495 |
+
"""Returns True if the boundary was reached or False otherwise."""
|
| 496 |
+
return self._at_eof
|
| 497 |
+
|
| 498 |
+
def decode(self, data: bytes) -> bytes:
|
| 499 |
+
"""Decodes data.
|
| 500 |
+
|
| 501 |
+
Decoding is done according the specified Content-Encoding
|
| 502 |
+
or Content-Transfer-Encoding headers value.
|
| 503 |
+
"""
|
| 504 |
+
if CONTENT_TRANSFER_ENCODING in self.headers:
|
| 505 |
+
data = self._decode_content_transfer(data)
|
| 506 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
|
| 507 |
+
if not self._is_form_data and CONTENT_ENCODING in self.headers:
|
| 508 |
+
return self._decode_content(data)
|
| 509 |
+
return data
|
| 510 |
+
|
| 511 |
+
def _decode_content(self, data: bytes) -> bytes:
|
| 512 |
+
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
| 513 |
+
if encoding == "identity":
|
| 514 |
+
return data
|
| 515 |
+
if encoding in {"deflate", "gzip"}:
|
| 516 |
+
return ZLibDecompressor(
|
| 517 |
+
encoding=encoding,
|
| 518 |
+
suppress_deflate_header=True,
|
| 519 |
+
).decompress_sync(data)
|
| 520 |
+
|
| 521 |
+
raise RuntimeError(f"unknown content encoding: {encoding}")
|
| 522 |
+
|
| 523 |
+
def _decode_content_transfer(self, data: bytes) -> bytes:
|
| 524 |
+
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
| 525 |
+
|
| 526 |
+
if encoding == "base64":
|
| 527 |
+
return base64.b64decode(data)
|
| 528 |
+
elif encoding == "quoted-printable":
|
| 529 |
+
return binascii.a2b_qp(data)
|
| 530 |
+
elif encoding in ("binary", "8bit", "7bit"):
|
| 531 |
+
return data
|
| 532 |
+
else:
|
| 533 |
+
raise RuntimeError(
|
| 534 |
+
"unknown content transfer encoding: {}" "".format(encoding)
|
| 535 |
+
)
|
| 536 |
+
|
| 537 |
+
def get_charset(self, default: str) -> str:
|
| 538 |
+
"""Returns charset parameter from Content-Type header or default."""
|
| 539 |
+
ctype = self.headers.get(CONTENT_TYPE, "")
|
| 540 |
+
mimetype = parse_mimetype(ctype)
|
| 541 |
+
return mimetype.parameters.get("charset", self._default_charset or default)
|
| 542 |
+
|
| 543 |
+
@reify
|
| 544 |
+
def name(self) -> Optional[str]:
|
| 545 |
+
"""Returns name specified in Content-Disposition header.
|
| 546 |
+
|
| 547 |
+
If the header is missing or malformed, returns None.
|
| 548 |
+
"""
|
| 549 |
+
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
| 550 |
+
return content_disposition_filename(params, "name")
|
| 551 |
+
|
| 552 |
+
@reify
|
| 553 |
+
def filename(self) -> Optional[str]:
|
| 554 |
+
"""Returns filename specified in Content-Disposition header.
|
| 555 |
+
|
| 556 |
+
Returns None if the header is missing or malformed.
|
| 557 |
+
"""
|
| 558 |
+
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
| 559 |
+
return content_disposition_filename(params, "filename")
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
@payload_type(BodyPartReader, order=Order.try_first)
|
| 563 |
+
class BodyPartReaderPayload(Payload):
|
| 564 |
+
_value: BodyPartReader
|
| 565 |
+
|
| 566 |
+
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
| 567 |
+
super().__init__(value, *args, **kwargs)
|
| 568 |
+
|
| 569 |
+
params: Dict[str, str] = {}
|
| 570 |
+
if value.name is not None:
|
| 571 |
+
params["name"] = value.name
|
| 572 |
+
if value.filename is not None:
|
| 573 |
+
params["filename"] = value.filename
|
| 574 |
+
|
| 575 |
+
if params:
|
| 576 |
+
self.set_content_disposition("attachment", True, **params)
|
| 577 |
+
|
| 578 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 579 |
+
raise TypeError("Unable to decode.")
|
| 580 |
+
|
| 581 |
+
async def write(self, writer: Any) -> None:
|
| 582 |
+
field = self._value
|
| 583 |
+
chunk = await field.read_chunk(size=2**16)
|
| 584 |
+
while chunk:
|
| 585 |
+
await writer.write(field.decode(chunk))
|
| 586 |
+
chunk = await field.read_chunk(size=2**16)
|
| 587 |
+
|
| 588 |
+
|
| 589 |
+
class MultipartReader:
|
| 590 |
+
"""Multipart body reader."""
|
| 591 |
+
|
| 592 |
+
#: Response wrapper, used when multipart readers constructs from response.
|
| 593 |
+
response_wrapper_cls = MultipartResponseWrapper
|
| 594 |
+
#: Multipart reader class, used to handle multipart/* body parts.
|
| 595 |
+
#: None points to type(self)
|
| 596 |
+
multipart_reader_cls: Optional[Type["MultipartReader"]] = None
|
| 597 |
+
#: Body part reader class for non multipart/* content types.
|
| 598 |
+
part_reader_cls = BodyPartReader
|
| 599 |
+
|
| 600 |
+
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
| 601 |
+
self._mimetype = parse_mimetype(headers[CONTENT_TYPE])
|
| 602 |
+
assert self._mimetype.type == "multipart", "multipart/* content type expected"
|
| 603 |
+
if "boundary" not in self._mimetype.parameters:
|
| 604 |
+
raise ValueError(
|
| 605 |
+
"boundary missed for Content-Type: %s" % headers[CONTENT_TYPE]
|
| 606 |
+
)
|
| 607 |
+
|
| 608 |
+
self.headers = headers
|
| 609 |
+
self._boundary = ("--" + self._get_boundary()).encode()
|
| 610 |
+
self._content = content
|
| 611 |
+
self._default_charset: Optional[str] = None
|
| 612 |
+
self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None
|
| 613 |
+
self._at_eof = False
|
| 614 |
+
self._at_bof = True
|
| 615 |
+
self._unread: List[bytes] = []
|
| 616 |
+
|
| 617 |
+
def __aiter__(self: Self) -> Self:
|
| 618 |
+
return self
|
| 619 |
+
|
| 620 |
+
async def __anext__(
|
| 621 |
+
self,
|
| 622 |
+
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
| 623 |
+
part = await self.next()
|
| 624 |
+
if part is None:
|
| 625 |
+
raise StopAsyncIteration
|
| 626 |
+
return part
|
| 627 |
+
|
| 628 |
+
@classmethod
|
| 629 |
+
def from_response(
|
| 630 |
+
cls,
|
| 631 |
+
response: "ClientResponse",
|
| 632 |
+
) -> MultipartResponseWrapper:
|
| 633 |
+
"""Constructs reader instance from HTTP response.
|
| 634 |
+
|
| 635 |
+
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
| 636 |
+
"""
|
| 637 |
+
obj = cls.response_wrapper_cls(
|
| 638 |
+
response, cls(response.headers, response.content)
|
| 639 |
+
)
|
| 640 |
+
return obj
|
| 641 |
+
|
| 642 |
+
def at_eof(self) -> bool:
|
| 643 |
+
"""Returns True if the final boundary was reached, false otherwise."""
|
| 644 |
+
return self._at_eof
|
| 645 |
+
|
| 646 |
+
async def next(
|
| 647 |
+
self,
|
| 648 |
+
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
| 649 |
+
"""Emits the next multipart body part."""
|
| 650 |
+
# So, if we're at BOF, we need to skip till the boundary.
|
| 651 |
+
if self._at_eof:
|
| 652 |
+
return None
|
| 653 |
+
await self._maybe_release_last_part()
|
| 654 |
+
if self._at_bof:
|
| 655 |
+
await self._read_until_first_boundary()
|
| 656 |
+
self._at_bof = False
|
| 657 |
+
else:
|
| 658 |
+
await self._read_boundary()
|
| 659 |
+
if self._at_eof: # we just read the last boundary, nothing to do there
|
| 660 |
+
return None
|
| 661 |
+
|
| 662 |
+
part = await self.fetch_next_part()
|
| 663 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.6
|
| 664 |
+
if (
|
| 665 |
+
self._last_part is None
|
| 666 |
+
and self._mimetype.subtype == "form-data"
|
| 667 |
+
and isinstance(part, BodyPartReader)
|
| 668 |
+
):
|
| 669 |
+
_, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION))
|
| 670 |
+
if params.get("name") == "_charset_":
|
| 671 |
+
# Longest encoding in https://encoding.spec.whatwg.org/encodings.json
|
| 672 |
+
# is 19 characters, so 32 should be more than enough for any valid encoding.
|
| 673 |
+
charset = await part.read_chunk(32)
|
| 674 |
+
if len(charset) > 31:
|
| 675 |
+
raise RuntimeError("Invalid default charset")
|
| 676 |
+
self._default_charset = charset.strip().decode()
|
| 677 |
+
part = await self.fetch_next_part()
|
| 678 |
+
self._last_part = part
|
| 679 |
+
return self._last_part
|
| 680 |
+
|
| 681 |
+
async def release(self) -> None:
|
| 682 |
+
"""Reads all the body parts to the void till the final boundary."""
|
| 683 |
+
while not self._at_eof:
|
| 684 |
+
item = await self.next()
|
| 685 |
+
if item is None:
|
| 686 |
+
break
|
| 687 |
+
await item.release()
|
| 688 |
+
|
| 689 |
+
async def fetch_next_part(
|
| 690 |
+
self,
|
| 691 |
+
) -> Union["MultipartReader", BodyPartReader]:
|
| 692 |
+
"""Returns the next body part reader."""
|
| 693 |
+
headers = await self._read_headers()
|
| 694 |
+
return self._get_part_reader(headers)
|
| 695 |
+
|
| 696 |
+
def _get_part_reader(
|
| 697 |
+
self,
|
| 698 |
+
headers: "CIMultiDictProxy[str]",
|
| 699 |
+
) -> Union["MultipartReader", BodyPartReader]:
|
| 700 |
+
"""Dispatches the response by the `Content-Type` header.
|
| 701 |
+
|
| 702 |
+
Returns a suitable reader instance.
|
| 703 |
+
|
| 704 |
+
:param dict headers: Response headers
|
| 705 |
+
"""
|
| 706 |
+
ctype = headers.get(CONTENT_TYPE, "")
|
| 707 |
+
mimetype = parse_mimetype(ctype)
|
| 708 |
+
|
| 709 |
+
if mimetype.type == "multipart":
|
| 710 |
+
if self.multipart_reader_cls is None:
|
| 711 |
+
return type(self)(headers, self._content)
|
| 712 |
+
return self.multipart_reader_cls(headers, self._content)
|
| 713 |
+
else:
|
| 714 |
+
return self.part_reader_cls(
|
| 715 |
+
self._boundary,
|
| 716 |
+
headers,
|
| 717 |
+
self._content,
|
| 718 |
+
subtype=self._mimetype.subtype,
|
| 719 |
+
default_charset=self._default_charset,
|
| 720 |
+
)
|
| 721 |
+
|
| 722 |
+
def _get_boundary(self) -> str:
|
| 723 |
+
boundary = self._mimetype.parameters["boundary"]
|
| 724 |
+
if len(boundary) > 70:
|
| 725 |
+
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
| 726 |
+
|
| 727 |
+
return boundary
|
| 728 |
+
|
| 729 |
+
async def _readline(self) -> bytes:
|
| 730 |
+
if self._unread:
|
| 731 |
+
return self._unread.pop()
|
| 732 |
+
return await self._content.readline()
|
| 733 |
+
|
| 734 |
+
async def _read_until_first_boundary(self) -> None:
|
| 735 |
+
while True:
|
| 736 |
+
chunk = await self._readline()
|
| 737 |
+
if chunk == b"":
|
| 738 |
+
raise ValueError(
|
| 739 |
+
"Could not find starting boundary %r" % (self._boundary)
|
| 740 |
+
)
|
| 741 |
+
chunk = chunk.rstrip()
|
| 742 |
+
if chunk == self._boundary:
|
| 743 |
+
return
|
| 744 |
+
elif chunk == self._boundary + b"--":
|
| 745 |
+
self._at_eof = True
|
| 746 |
+
return
|
| 747 |
+
|
| 748 |
+
async def _read_boundary(self) -> None:
|
| 749 |
+
chunk = (await self._readline()).rstrip()
|
| 750 |
+
if chunk == self._boundary:
|
| 751 |
+
pass
|
| 752 |
+
elif chunk == self._boundary + b"--":
|
| 753 |
+
self._at_eof = True
|
| 754 |
+
epilogue = await self._readline()
|
| 755 |
+
next_line = await self._readline()
|
| 756 |
+
|
| 757 |
+
# the epilogue is expected and then either the end of input or the
|
| 758 |
+
# parent multipart boundary, if the parent boundary is found then
|
| 759 |
+
# it should be marked as unread and handed to the parent for
|
| 760 |
+
# processing
|
| 761 |
+
if next_line[:2] == b"--":
|
| 762 |
+
self._unread.append(next_line)
|
| 763 |
+
# otherwise the request is likely missing an epilogue and both
|
| 764 |
+
# lines should be passed to the parent for processing
|
| 765 |
+
# (this handles the old behavior gracefully)
|
| 766 |
+
else:
|
| 767 |
+
self._unread.extend([next_line, epilogue])
|
| 768 |
+
else:
|
| 769 |
+
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
| 770 |
+
|
| 771 |
+
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
| 772 |
+
lines = [b""]
|
| 773 |
+
while True:
|
| 774 |
+
chunk = await self._content.readline()
|
| 775 |
+
chunk = chunk.strip()
|
| 776 |
+
lines.append(chunk)
|
| 777 |
+
if not chunk:
|
| 778 |
+
break
|
| 779 |
+
parser = HeadersParser()
|
| 780 |
+
headers, raw_headers = parser.parse_headers(lines)
|
| 781 |
+
return headers
|
| 782 |
+
|
| 783 |
+
async def _maybe_release_last_part(self) -> None:
|
| 784 |
+
"""Ensures that the last read body part is read completely."""
|
| 785 |
+
if self._last_part is not None:
|
| 786 |
+
if not self._last_part.at_eof():
|
| 787 |
+
await self._last_part.release()
|
| 788 |
+
self._unread.extend(self._last_part._unread)
|
| 789 |
+
self._last_part = None
|
| 790 |
+
|
| 791 |
+
|
| 792 |
+
_Part = Tuple[Payload, str, str]
|
| 793 |
+
|
| 794 |
+
|
| 795 |
+
class MultipartWriter(Payload):
|
| 796 |
+
"""Multipart body writer."""
|
| 797 |
+
|
| 798 |
+
_value: None
|
| 799 |
+
|
| 800 |
+
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
| 801 |
+
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
| 802 |
+
# The underlying Payload API demands a str (utf-8), not bytes,
|
| 803 |
+
# so we need to ensure we don't lose anything during conversion.
|
| 804 |
+
# As a result, require the boundary to be ASCII only.
|
| 805 |
+
# In both situations.
|
| 806 |
+
|
| 807 |
+
try:
|
| 808 |
+
self._boundary = boundary.encode("ascii")
|
| 809 |
+
except UnicodeEncodeError:
|
| 810 |
+
raise ValueError("boundary should contain ASCII only chars") from None
|
| 811 |
+
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
| 812 |
+
|
| 813 |
+
super().__init__(None, content_type=ctype)
|
| 814 |
+
|
| 815 |
+
self._parts: List[_Part] = []
|
| 816 |
+
self._is_form_data = subtype == "form-data"
|
| 817 |
+
|
| 818 |
+
def __enter__(self) -> "MultipartWriter":
|
| 819 |
+
return self
|
| 820 |
+
|
| 821 |
+
def __exit__(
|
| 822 |
+
self,
|
| 823 |
+
exc_type: Optional[Type[BaseException]],
|
| 824 |
+
exc_val: Optional[BaseException],
|
| 825 |
+
exc_tb: Optional[TracebackType],
|
| 826 |
+
) -> None:
|
| 827 |
+
pass
|
| 828 |
+
|
| 829 |
+
def __iter__(self) -> Iterator[_Part]:
|
| 830 |
+
return iter(self._parts)
|
| 831 |
+
|
| 832 |
+
def __len__(self) -> int:
|
| 833 |
+
return len(self._parts)
|
| 834 |
+
|
| 835 |
+
def __bool__(self) -> bool:
|
| 836 |
+
return True
|
| 837 |
+
|
| 838 |
+
_valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
| 839 |
+
_invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]")
|
| 840 |
+
|
| 841 |
+
@property
|
| 842 |
+
def _boundary_value(self) -> str:
|
| 843 |
+
"""Wrap boundary parameter value in quotes, if necessary.
|
| 844 |
+
|
| 845 |
+
Reads self.boundary and returns a unicode string.
|
| 846 |
+
"""
|
| 847 |
+
# Refer to RFCs 7231, 7230, 5234.
|
| 848 |
+
#
|
| 849 |
+
# parameter = token "=" ( token / quoted-string )
|
| 850 |
+
# token = 1*tchar
|
| 851 |
+
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
| 852 |
+
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
| 853 |
+
# obs-text = %x80-FF
|
| 854 |
+
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
| 855 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
| 856 |
+
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
| 857 |
+
# / DIGIT / ALPHA
|
| 858 |
+
# ; any VCHAR, except delimiters
|
| 859 |
+
# VCHAR = %x21-7E
|
| 860 |
+
value = self._boundary
|
| 861 |
+
if re.match(self._valid_tchar_regex, value):
|
| 862 |
+
return value.decode("ascii") # cannot fail
|
| 863 |
+
|
| 864 |
+
if re.search(self._invalid_qdtext_char_regex, value):
|
| 865 |
+
raise ValueError("boundary value contains invalid characters")
|
| 866 |
+
|
| 867 |
+
# escape %x5C and %x22
|
| 868 |
+
quoted_value_content = value.replace(b"\\", b"\\\\")
|
| 869 |
+
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
| 870 |
+
|
| 871 |
+
return '"' + quoted_value_content.decode("ascii") + '"'
|
| 872 |
+
|
| 873 |
+
@property
|
| 874 |
+
def boundary(self) -> str:
|
| 875 |
+
return self._boundary.decode("ascii")
|
| 876 |
+
|
| 877 |
+
def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload:
|
| 878 |
+
if headers is None:
|
| 879 |
+
headers = CIMultiDict()
|
| 880 |
+
|
| 881 |
+
if isinstance(obj, Payload):
|
| 882 |
+
obj.headers.update(headers)
|
| 883 |
+
return self.append_payload(obj)
|
| 884 |
+
else:
|
| 885 |
+
try:
|
| 886 |
+
payload = get_payload(obj, headers=headers)
|
| 887 |
+
except LookupError:
|
| 888 |
+
raise TypeError("Cannot create payload from %r" % obj)
|
| 889 |
+
else:
|
| 890 |
+
return self.append_payload(payload)
|
| 891 |
+
|
| 892 |
+
def append_payload(self, payload: Payload) -> Payload:
|
| 893 |
+
"""Adds a new body part to multipart writer."""
|
| 894 |
+
encoding: Optional[str] = None
|
| 895 |
+
te_encoding: Optional[str] = None
|
| 896 |
+
if self._is_form_data:
|
| 897 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.7
|
| 898 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.8
|
| 899 |
+
assert (
|
| 900 |
+
not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING}
|
| 901 |
+
& payload.headers.keys()
|
| 902 |
+
)
|
| 903 |
+
# Set default Content-Disposition in case user doesn't create one
|
| 904 |
+
if CONTENT_DISPOSITION not in payload.headers:
|
| 905 |
+
name = f"section-{len(self._parts)}"
|
| 906 |
+
payload.set_content_disposition("form-data", name=name)
|
| 907 |
+
else:
|
| 908 |
+
# compression
|
| 909 |
+
encoding = payload.headers.get(CONTENT_ENCODING, "").lower()
|
| 910 |
+
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
| 911 |
+
raise RuntimeError(f"unknown content encoding: {encoding}")
|
| 912 |
+
if encoding == "identity":
|
| 913 |
+
encoding = None
|
| 914 |
+
|
| 915 |
+
# te encoding
|
| 916 |
+
te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
| 917 |
+
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
| 918 |
+
raise RuntimeError(f"unknown content transfer encoding: {te_encoding}")
|
| 919 |
+
if te_encoding == "binary":
|
| 920 |
+
te_encoding = None
|
| 921 |
+
|
| 922 |
+
# size
|
| 923 |
+
size = payload.size
|
| 924 |
+
if size is not None and not (encoding or te_encoding):
|
| 925 |
+
payload.headers[CONTENT_LENGTH] = str(size)
|
| 926 |
+
|
| 927 |
+
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
| 928 |
+
return payload
|
| 929 |
+
|
| 930 |
+
def append_json(
|
| 931 |
+
self, obj: Any, headers: Optional[Mapping[str, str]] = None
|
| 932 |
+
) -> Payload:
|
| 933 |
+
"""Helper to append JSON part."""
|
| 934 |
+
if headers is None:
|
| 935 |
+
headers = CIMultiDict()
|
| 936 |
+
|
| 937 |
+
return self.append_payload(JsonPayload(obj, headers=headers))
|
| 938 |
+
|
| 939 |
+
def append_form(
|
| 940 |
+
self,
|
| 941 |
+
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
| 942 |
+
headers: Optional[Mapping[str, str]] = None,
|
| 943 |
+
) -> Payload:
|
| 944 |
+
"""Helper to append form urlencoded part."""
|
| 945 |
+
assert isinstance(obj, (Sequence, Mapping))
|
| 946 |
+
|
| 947 |
+
if headers is None:
|
| 948 |
+
headers = CIMultiDict()
|
| 949 |
+
|
| 950 |
+
if isinstance(obj, Mapping):
|
| 951 |
+
obj = list(obj.items())
|
| 952 |
+
data = urlencode(obj, doseq=True)
|
| 953 |
+
|
| 954 |
+
return self.append_payload(
|
| 955 |
+
StringPayload(
|
| 956 |
+
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
| 957 |
+
)
|
| 958 |
+
)
|
| 959 |
+
|
| 960 |
+
@property
|
| 961 |
+
def size(self) -> Optional[int]:
|
| 962 |
+
"""Size of the payload."""
|
| 963 |
+
total = 0
|
| 964 |
+
for part, encoding, te_encoding in self._parts:
|
| 965 |
+
if encoding or te_encoding or part.size is None:
|
| 966 |
+
return None
|
| 967 |
+
|
| 968 |
+
total += int(
|
| 969 |
+
2
|
| 970 |
+
+ len(self._boundary)
|
| 971 |
+
+ 2
|
| 972 |
+
+ part.size # b'--'+self._boundary+b'\r\n'
|
| 973 |
+
+ len(part._binary_headers)
|
| 974 |
+
+ 2 # b'\r\n'
|
| 975 |
+
)
|
| 976 |
+
|
| 977 |
+
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
| 978 |
+
return total
|
| 979 |
+
|
| 980 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 981 |
+
return "".join(
|
| 982 |
+
"--"
|
| 983 |
+
+ self.boundary
|
| 984 |
+
+ "\n"
|
| 985 |
+
+ part._binary_headers.decode(encoding, errors)
|
| 986 |
+
+ part.decode()
|
| 987 |
+
for part, _e, _te in self._parts
|
| 988 |
+
)
|
| 989 |
+
|
| 990 |
+
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
| 991 |
+
"""Write body."""
|
| 992 |
+
for part, encoding, te_encoding in self._parts:
|
| 993 |
+
if self._is_form_data:
|
| 994 |
+
# https://datatracker.ietf.org/doc/html/rfc7578#section-4.2
|
| 995 |
+
assert CONTENT_DISPOSITION in part.headers
|
| 996 |
+
assert "name=" in part.headers[CONTENT_DISPOSITION]
|
| 997 |
+
|
| 998 |
+
await writer.write(b"--" + self._boundary + b"\r\n")
|
| 999 |
+
await writer.write(part._binary_headers)
|
| 1000 |
+
|
| 1001 |
+
if encoding or te_encoding:
|
| 1002 |
+
w = MultipartPayloadWriter(writer)
|
| 1003 |
+
if encoding:
|
| 1004 |
+
w.enable_compression(encoding)
|
| 1005 |
+
if te_encoding:
|
| 1006 |
+
w.enable_encoding(te_encoding)
|
| 1007 |
+
await part.write(w) # type: ignore[arg-type]
|
| 1008 |
+
await w.write_eof()
|
| 1009 |
+
else:
|
| 1010 |
+
await part.write(writer)
|
| 1011 |
+
|
| 1012 |
+
await writer.write(b"\r\n")
|
| 1013 |
+
|
| 1014 |
+
if close_boundary:
|
| 1015 |
+
await writer.write(b"--" + self._boundary + b"--\r\n")
|
| 1016 |
+
|
| 1017 |
+
|
| 1018 |
+
class MultipartPayloadWriter:
|
| 1019 |
+
def __init__(self, writer: Any) -> None:
|
| 1020 |
+
self._writer = writer
|
| 1021 |
+
self._encoding: Optional[str] = None
|
| 1022 |
+
self._compress: Optional[ZLibCompressor] = None
|
| 1023 |
+
self._encoding_buffer: Optional[bytearray] = None
|
| 1024 |
+
|
| 1025 |
+
def enable_encoding(self, encoding: str) -> None:
|
| 1026 |
+
if encoding == "base64":
|
| 1027 |
+
self._encoding = encoding
|
| 1028 |
+
self._encoding_buffer = bytearray()
|
| 1029 |
+
elif encoding == "quoted-printable":
|
| 1030 |
+
self._encoding = "quoted-printable"
|
| 1031 |
+
|
| 1032 |
+
def enable_compression(
|
| 1033 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 1034 |
+
) -> None:
|
| 1035 |
+
self._compress = ZLibCompressor(
|
| 1036 |
+
encoding=encoding,
|
| 1037 |
+
suppress_deflate_header=True,
|
| 1038 |
+
strategy=strategy,
|
| 1039 |
+
)
|
| 1040 |
+
|
| 1041 |
+
async def write_eof(self) -> None:
|
| 1042 |
+
if self._compress is not None:
|
| 1043 |
+
chunk = self._compress.flush()
|
| 1044 |
+
if chunk:
|
| 1045 |
+
self._compress = None
|
| 1046 |
+
await self.write(chunk)
|
| 1047 |
+
|
| 1048 |
+
if self._encoding == "base64":
|
| 1049 |
+
if self._encoding_buffer:
|
| 1050 |
+
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
| 1051 |
+
|
| 1052 |
+
async def write(self, chunk: bytes) -> None:
|
| 1053 |
+
if self._compress is not None:
|
| 1054 |
+
if chunk:
|
| 1055 |
+
chunk = await self._compress.compress(chunk)
|
| 1056 |
+
if not chunk:
|
| 1057 |
+
return
|
| 1058 |
+
|
| 1059 |
+
if self._encoding == "base64":
|
| 1060 |
+
buf = self._encoding_buffer
|
| 1061 |
+
assert buf is not None
|
| 1062 |
+
buf.extend(chunk)
|
| 1063 |
+
|
| 1064 |
+
if buf:
|
| 1065 |
+
div, mod = divmod(len(buf), 3)
|
| 1066 |
+
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
| 1067 |
+
if enc_chunk:
|
| 1068 |
+
b64chunk = base64.b64encode(enc_chunk)
|
| 1069 |
+
await self._writer.write(b64chunk)
|
| 1070 |
+
elif self._encoding == "quoted-printable":
|
| 1071 |
+
await self._writer.write(binascii.b2a_qp(chunk))
|
| 1072 |
+
else:
|
| 1073 |
+
await self._writer.write(chunk)
|
parrot/lib/python3.10/site-packages/aiohttp/payload.py
ADDED
|
@@ -0,0 +1,498 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import enum
|
| 3 |
+
import io
|
| 4 |
+
import json
|
| 5 |
+
import mimetypes
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
from abc import ABC, abstractmethod
|
| 9 |
+
from itertools import chain
|
| 10 |
+
from typing import (
|
| 11 |
+
IO,
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Dict,
|
| 15 |
+
Final,
|
| 16 |
+
Iterable,
|
| 17 |
+
Optional,
|
| 18 |
+
TextIO,
|
| 19 |
+
Tuple,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict
|
| 25 |
+
|
| 26 |
+
from . import hdrs
|
| 27 |
+
from .abc import AbstractStreamWriter
|
| 28 |
+
from .helpers import (
|
| 29 |
+
_SENTINEL,
|
| 30 |
+
content_disposition_header,
|
| 31 |
+
guess_filename,
|
| 32 |
+
parse_mimetype,
|
| 33 |
+
sentinel,
|
| 34 |
+
)
|
| 35 |
+
from .streams import StreamReader
|
| 36 |
+
from .typedefs import JSONEncoder, _CIMultiDict
|
| 37 |
+
|
| 38 |
+
__all__ = (
|
| 39 |
+
"PAYLOAD_REGISTRY",
|
| 40 |
+
"get_payload",
|
| 41 |
+
"payload_type",
|
| 42 |
+
"Payload",
|
| 43 |
+
"BytesPayload",
|
| 44 |
+
"StringPayload",
|
| 45 |
+
"IOBasePayload",
|
| 46 |
+
"BytesIOPayload",
|
| 47 |
+
"BufferedReaderPayload",
|
| 48 |
+
"TextIOPayload",
|
| 49 |
+
"StringIOPayload",
|
| 50 |
+
"JsonPayload",
|
| 51 |
+
"AsyncIterablePayload",
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB
|
| 55 |
+
|
| 56 |
+
if TYPE_CHECKING:
|
| 57 |
+
from typing import List
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class LookupError(Exception):
|
| 61 |
+
pass
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class Order(str, enum.Enum):
|
| 65 |
+
normal = "normal"
|
| 66 |
+
try_first = "try_first"
|
| 67 |
+
try_last = "try_last"
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
| 71 |
+
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def register_payload(
|
| 75 |
+
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
| 76 |
+
) -> None:
|
| 77 |
+
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class payload_type:
|
| 81 |
+
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
| 82 |
+
self.type = type
|
| 83 |
+
self.order = order
|
| 84 |
+
|
| 85 |
+
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
| 86 |
+
register_payload(factory, self.type, order=self.order)
|
| 87 |
+
return factory
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
PayloadType = Type["Payload"]
|
| 91 |
+
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class PayloadRegistry:
|
| 95 |
+
"""Payload registry.
|
| 96 |
+
|
| 97 |
+
note: we need zope.interface for more efficient adapter search
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
def __init__(self) -> None:
|
| 101 |
+
self._first: List[_PayloadRegistryItem] = []
|
| 102 |
+
self._normal: List[_PayloadRegistryItem] = []
|
| 103 |
+
self._last: List[_PayloadRegistryItem] = []
|
| 104 |
+
|
| 105 |
+
def get(
|
| 106 |
+
self,
|
| 107 |
+
data: Any,
|
| 108 |
+
*args: Any,
|
| 109 |
+
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
| 110 |
+
**kwargs: Any,
|
| 111 |
+
) -> "Payload":
|
| 112 |
+
if isinstance(data, Payload):
|
| 113 |
+
return data
|
| 114 |
+
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
| 115 |
+
if isinstance(data, type):
|
| 116 |
+
return factory(data, *args, **kwargs)
|
| 117 |
+
|
| 118 |
+
raise LookupError()
|
| 119 |
+
|
| 120 |
+
def register(
|
| 121 |
+
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
| 122 |
+
) -> None:
|
| 123 |
+
if order is Order.try_first:
|
| 124 |
+
self._first.append((factory, type))
|
| 125 |
+
elif order is Order.normal:
|
| 126 |
+
self._normal.append((factory, type))
|
| 127 |
+
elif order is Order.try_last:
|
| 128 |
+
self._last.append((factory, type))
|
| 129 |
+
else:
|
| 130 |
+
raise ValueError(f"Unsupported order {order!r}")
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class Payload(ABC):
|
| 134 |
+
|
| 135 |
+
_default_content_type: str = "application/octet-stream"
|
| 136 |
+
_size: Optional[int] = None
|
| 137 |
+
|
| 138 |
+
def __init__(
|
| 139 |
+
self,
|
| 140 |
+
value: Any,
|
| 141 |
+
headers: Optional[
|
| 142 |
+
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
| 143 |
+
] = None,
|
| 144 |
+
content_type: Union[str, None, _SENTINEL] = sentinel,
|
| 145 |
+
filename: Optional[str] = None,
|
| 146 |
+
encoding: Optional[str] = None,
|
| 147 |
+
**kwargs: Any,
|
| 148 |
+
) -> None:
|
| 149 |
+
self._encoding = encoding
|
| 150 |
+
self._filename = filename
|
| 151 |
+
self._headers: _CIMultiDict = CIMultiDict()
|
| 152 |
+
self._value = value
|
| 153 |
+
if content_type is not sentinel and content_type is not None:
|
| 154 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 155 |
+
elif self._filename is not None:
|
| 156 |
+
content_type = mimetypes.guess_type(self._filename)[0]
|
| 157 |
+
if content_type is None:
|
| 158 |
+
content_type = self._default_content_type
|
| 159 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 160 |
+
else:
|
| 161 |
+
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
| 162 |
+
self._headers.update(headers or {})
|
| 163 |
+
|
| 164 |
+
@property
|
| 165 |
+
def size(self) -> Optional[int]:
|
| 166 |
+
"""Size of the payload."""
|
| 167 |
+
return self._size
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def filename(self) -> Optional[str]:
|
| 171 |
+
"""Filename of the payload."""
|
| 172 |
+
return self._filename
|
| 173 |
+
|
| 174 |
+
@property
|
| 175 |
+
def headers(self) -> _CIMultiDict:
|
| 176 |
+
"""Custom item headers"""
|
| 177 |
+
return self._headers
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
def _binary_headers(self) -> bytes:
|
| 181 |
+
return (
|
| 182 |
+
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
| 183 |
+
"utf-8"
|
| 184 |
+
)
|
| 185 |
+
+ b"\r\n"
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
@property
|
| 189 |
+
def encoding(self) -> Optional[str]:
|
| 190 |
+
"""Payload encoding"""
|
| 191 |
+
return self._encoding
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def content_type(self) -> str:
|
| 195 |
+
"""Content type"""
|
| 196 |
+
return self._headers[hdrs.CONTENT_TYPE]
|
| 197 |
+
|
| 198 |
+
def set_content_disposition(
|
| 199 |
+
self,
|
| 200 |
+
disptype: str,
|
| 201 |
+
quote_fields: bool = True,
|
| 202 |
+
_charset: str = "utf-8",
|
| 203 |
+
**params: Any,
|
| 204 |
+
) -> None:
|
| 205 |
+
"""Sets ``Content-Disposition`` header."""
|
| 206 |
+
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
| 207 |
+
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
@abstractmethod
|
| 211 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 212 |
+
"""Return string representation of the value.
|
| 213 |
+
|
| 214 |
+
This is named decode() to allow compatibility with bytes objects.
|
| 215 |
+
"""
|
| 216 |
+
|
| 217 |
+
@abstractmethod
|
| 218 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 219 |
+
"""Write payload.
|
| 220 |
+
|
| 221 |
+
writer is an AbstractStreamWriter instance:
|
| 222 |
+
"""
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
class BytesPayload(Payload):
|
| 226 |
+
_value: bytes
|
| 227 |
+
|
| 228 |
+
def __init__(
|
| 229 |
+
self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
|
| 230 |
+
) -> None:
|
| 231 |
+
if not isinstance(value, (bytes, bytearray, memoryview)):
|
| 232 |
+
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
| 233 |
+
|
| 234 |
+
if "content_type" not in kwargs:
|
| 235 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 236 |
+
|
| 237 |
+
super().__init__(value, *args, **kwargs)
|
| 238 |
+
|
| 239 |
+
if isinstance(value, memoryview):
|
| 240 |
+
self._size = value.nbytes
|
| 241 |
+
else:
|
| 242 |
+
self._size = len(value)
|
| 243 |
+
|
| 244 |
+
if self._size > TOO_LARGE_BYTES_BODY:
|
| 245 |
+
kwargs = {"source": self}
|
| 246 |
+
warnings.warn(
|
| 247 |
+
"Sending a large body directly with raw bytes might"
|
| 248 |
+
" lock the event loop. You should probably pass an "
|
| 249 |
+
"io.BytesIO object instead",
|
| 250 |
+
ResourceWarning,
|
| 251 |
+
**kwargs,
|
| 252 |
+
)
|
| 253 |
+
|
| 254 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 255 |
+
return self._value.decode(encoding, errors)
|
| 256 |
+
|
| 257 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 258 |
+
await writer.write(self._value)
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
class StringPayload(BytesPayload):
|
| 262 |
+
def __init__(
|
| 263 |
+
self,
|
| 264 |
+
value: str,
|
| 265 |
+
*args: Any,
|
| 266 |
+
encoding: Optional[str] = None,
|
| 267 |
+
content_type: Optional[str] = None,
|
| 268 |
+
**kwargs: Any,
|
| 269 |
+
) -> None:
|
| 270 |
+
|
| 271 |
+
if encoding is None:
|
| 272 |
+
if content_type is None:
|
| 273 |
+
real_encoding = "utf-8"
|
| 274 |
+
content_type = "text/plain; charset=utf-8"
|
| 275 |
+
else:
|
| 276 |
+
mimetype = parse_mimetype(content_type)
|
| 277 |
+
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
| 278 |
+
else:
|
| 279 |
+
if content_type is None:
|
| 280 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 281 |
+
real_encoding = encoding
|
| 282 |
+
|
| 283 |
+
super().__init__(
|
| 284 |
+
value.encode(real_encoding),
|
| 285 |
+
encoding=real_encoding,
|
| 286 |
+
content_type=content_type,
|
| 287 |
+
*args,
|
| 288 |
+
**kwargs,
|
| 289 |
+
)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class StringIOPayload(StringPayload):
|
| 293 |
+
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
| 294 |
+
super().__init__(value.read(), *args, **kwargs)
|
| 295 |
+
|
| 296 |
+
|
| 297 |
+
class IOBasePayload(Payload):
|
| 298 |
+
_value: io.IOBase
|
| 299 |
+
|
| 300 |
+
def __init__(
|
| 301 |
+
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
| 302 |
+
) -> None:
|
| 303 |
+
if "filename" not in kwargs:
|
| 304 |
+
kwargs["filename"] = guess_filename(value)
|
| 305 |
+
|
| 306 |
+
super().__init__(value, *args, **kwargs)
|
| 307 |
+
|
| 308 |
+
if self._filename is not None and disposition is not None:
|
| 309 |
+
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
| 310 |
+
self.set_content_disposition(disposition, filename=self._filename)
|
| 311 |
+
|
| 312 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 313 |
+
loop = asyncio.get_event_loop()
|
| 314 |
+
try:
|
| 315 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 316 |
+
while chunk:
|
| 317 |
+
await writer.write(chunk)
|
| 318 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 319 |
+
finally:
|
| 320 |
+
await loop.run_in_executor(None, self._value.close)
|
| 321 |
+
|
| 322 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 323 |
+
return "".join(r.decode(encoding, errors) for r in self._value.readlines())
|
| 324 |
+
|
| 325 |
+
|
| 326 |
+
class TextIOPayload(IOBasePayload):
|
| 327 |
+
_value: io.TextIOBase
|
| 328 |
+
|
| 329 |
+
def __init__(
|
| 330 |
+
self,
|
| 331 |
+
value: TextIO,
|
| 332 |
+
*args: Any,
|
| 333 |
+
encoding: Optional[str] = None,
|
| 334 |
+
content_type: Optional[str] = None,
|
| 335 |
+
**kwargs: Any,
|
| 336 |
+
) -> None:
|
| 337 |
+
|
| 338 |
+
if encoding is None:
|
| 339 |
+
if content_type is None:
|
| 340 |
+
encoding = "utf-8"
|
| 341 |
+
content_type = "text/plain; charset=utf-8"
|
| 342 |
+
else:
|
| 343 |
+
mimetype = parse_mimetype(content_type)
|
| 344 |
+
encoding = mimetype.parameters.get("charset", "utf-8")
|
| 345 |
+
else:
|
| 346 |
+
if content_type is None:
|
| 347 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 348 |
+
|
| 349 |
+
super().__init__(
|
| 350 |
+
value,
|
| 351 |
+
content_type=content_type,
|
| 352 |
+
encoding=encoding,
|
| 353 |
+
*args,
|
| 354 |
+
**kwargs,
|
| 355 |
+
)
|
| 356 |
+
|
| 357 |
+
@property
|
| 358 |
+
def size(self) -> Optional[int]:
|
| 359 |
+
try:
|
| 360 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 361 |
+
except OSError:
|
| 362 |
+
return None
|
| 363 |
+
|
| 364 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 365 |
+
return self._value.read()
|
| 366 |
+
|
| 367 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 368 |
+
loop = asyncio.get_event_loop()
|
| 369 |
+
try:
|
| 370 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 371 |
+
while chunk:
|
| 372 |
+
data = (
|
| 373 |
+
chunk.encode(encoding=self._encoding)
|
| 374 |
+
if self._encoding
|
| 375 |
+
else chunk.encode()
|
| 376 |
+
)
|
| 377 |
+
await writer.write(data)
|
| 378 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2**16)
|
| 379 |
+
finally:
|
| 380 |
+
await loop.run_in_executor(None, self._value.close)
|
| 381 |
+
|
| 382 |
+
|
| 383 |
+
class BytesIOPayload(IOBasePayload):
|
| 384 |
+
_value: io.BytesIO
|
| 385 |
+
|
| 386 |
+
@property
|
| 387 |
+
def size(self) -> int:
|
| 388 |
+
position = self._value.tell()
|
| 389 |
+
end = self._value.seek(0, os.SEEK_END)
|
| 390 |
+
self._value.seek(position)
|
| 391 |
+
return end - position
|
| 392 |
+
|
| 393 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 394 |
+
return self._value.read().decode(encoding, errors)
|
| 395 |
+
|
| 396 |
+
|
| 397 |
+
class BufferedReaderPayload(IOBasePayload):
|
| 398 |
+
_value: io.BufferedIOBase
|
| 399 |
+
|
| 400 |
+
@property
|
| 401 |
+
def size(self) -> Optional[int]:
|
| 402 |
+
try:
|
| 403 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 404 |
+
except (OSError, AttributeError):
|
| 405 |
+
# data.fileno() is not supported, e.g.
|
| 406 |
+
# io.BufferedReader(io.BytesIO(b'data'))
|
| 407 |
+
# For some file-like objects (e.g. tarfile), the fileno() attribute may
|
| 408 |
+
# not exist at all, and will instead raise an AttributeError.
|
| 409 |
+
return None
|
| 410 |
+
|
| 411 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 412 |
+
return self._value.read().decode(encoding, errors)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
class JsonPayload(BytesPayload):
|
| 416 |
+
def __init__(
|
| 417 |
+
self,
|
| 418 |
+
value: Any,
|
| 419 |
+
encoding: str = "utf-8",
|
| 420 |
+
content_type: str = "application/json",
|
| 421 |
+
dumps: JSONEncoder = json.dumps,
|
| 422 |
+
*args: Any,
|
| 423 |
+
**kwargs: Any,
|
| 424 |
+
) -> None:
|
| 425 |
+
|
| 426 |
+
super().__init__(
|
| 427 |
+
dumps(value).encode(encoding),
|
| 428 |
+
content_type=content_type,
|
| 429 |
+
encoding=encoding,
|
| 430 |
+
*args,
|
| 431 |
+
**kwargs,
|
| 432 |
+
)
|
| 433 |
+
|
| 434 |
+
|
| 435 |
+
if TYPE_CHECKING:
|
| 436 |
+
from typing import AsyncIterable, AsyncIterator
|
| 437 |
+
|
| 438 |
+
_AsyncIterator = AsyncIterator[bytes]
|
| 439 |
+
_AsyncIterable = AsyncIterable[bytes]
|
| 440 |
+
else:
|
| 441 |
+
from collections.abc import AsyncIterable, AsyncIterator
|
| 442 |
+
|
| 443 |
+
_AsyncIterator = AsyncIterator
|
| 444 |
+
_AsyncIterable = AsyncIterable
|
| 445 |
+
|
| 446 |
+
|
| 447 |
+
class AsyncIterablePayload(Payload):
|
| 448 |
+
|
| 449 |
+
_iter: Optional[_AsyncIterator] = None
|
| 450 |
+
_value: _AsyncIterable
|
| 451 |
+
|
| 452 |
+
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
| 453 |
+
if not isinstance(value, AsyncIterable):
|
| 454 |
+
raise TypeError(
|
| 455 |
+
"value argument must support "
|
| 456 |
+
"collections.abc.AsyncIterable interface, "
|
| 457 |
+
"got {!r}".format(type(value))
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
if "content_type" not in kwargs:
|
| 461 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 462 |
+
|
| 463 |
+
super().__init__(value, *args, **kwargs)
|
| 464 |
+
|
| 465 |
+
self._iter = value.__aiter__()
|
| 466 |
+
|
| 467 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 468 |
+
if self._iter:
|
| 469 |
+
try:
|
| 470 |
+
# iter is not None check prevents rare cases
|
| 471 |
+
# when the case iterable is used twice
|
| 472 |
+
while True:
|
| 473 |
+
chunk = await self._iter.__anext__()
|
| 474 |
+
await writer.write(chunk)
|
| 475 |
+
except StopAsyncIteration:
|
| 476 |
+
self._iter = None
|
| 477 |
+
|
| 478 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 479 |
+
raise TypeError("Unable to decode.")
|
| 480 |
+
|
| 481 |
+
|
| 482 |
+
class StreamReaderPayload(AsyncIterablePayload):
|
| 483 |
+
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
| 484 |
+
super().__init__(value.iter_any(), *args, **kwargs)
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
PAYLOAD_REGISTRY = PayloadRegistry()
|
| 488 |
+
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
| 489 |
+
PAYLOAD_REGISTRY.register(StringPayload, str)
|
| 490 |
+
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
| 491 |
+
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
| 492 |
+
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
| 493 |
+
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
| 494 |
+
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
| 495 |
+
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
| 496 |
+
# try_last for giving a chance to more specialized async interables like
|
| 497 |
+
# multidict.BodyPartReaderPayload override the default
|
| 498 |
+
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
parrot/lib/python3.10/site-packages/aiohttp/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Marker
|
parrot/lib/python3.10/site-packages/aiohttp/pytest_plugin.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import contextlib
|
| 3 |
+
import inspect
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import (
|
| 6 |
+
Any,
|
| 7 |
+
Awaitable,
|
| 8 |
+
Callable,
|
| 9 |
+
Dict,
|
| 10 |
+
Iterator,
|
| 11 |
+
Optional,
|
| 12 |
+
Protocol,
|
| 13 |
+
Type,
|
| 14 |
+
Union,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
import pytest
|
| 18 |
+
|
| 19 |
+
from .test_utils import (
|
| 20 |
+
BaseTestServer,
|
| 21 |
+
RawTestServer,
|
| 22 |
+
TestClient,
|
| 23 |
+
TestServer,
|
| 24 |
+
loop_context,
|
| 25 |
+
setup_test_loop,
|
| 26 |
+
teardown_test_loop,
|
| 27 |
+
unused_port as _unused_port,
|
| 28 |
+
)
|
| 29 |
+
from .web import Application
|
| 30 |
+
from .web_protocol import _RequestHandler
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
import uvloop
|
| 34 |
+
except ImportError: # pragma: no cover
|
| 35 |
+
uvloop = None # type: ignore[assignment]
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class AiohttpClient(Protocol):
|
| 39 |
+
def __call__(
|
| 40 |
+
self,
|
| 41 |
+
__param: Union[Application, BaseTestServer],
|
| 42 |
+
*,
|
| 43 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 44 |
+
**kwargs: Any
|
| 45 |
+
) -> Awaitable[TestClient]: ...
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class AiohttpServer(Protocol):
|
| 49 |
+
def __call__(
|
| 50 |
+
self, app: Application, *, port: Optional[int] = None, **kwargs: Any
|
| 51 |
+
) -> Awaitable[TestServer]: ...
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class AiohttpRawServer(Protocol):
|
| 55 |
+
def __call__(
|
| 56 |
+
self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
| 57 |
+
) -> Awaitable[RawTestServer]: ...
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
| 61 |
+
parser.addoption(
|
| 62 |
+
"--aiohttp-fast",
|
| 63 |
+
action="store_true",
|
| 64 |
+
default=False,
|
| 65 |
+
help="run tests faster by disabling extra checks",
|
| 66 |
+
)
|
| 67 |
+
parser.addoption(
|
| 68 |
+
"--aiohttp-loop",
|
| 69 |
+
action="store",
|
| 70 |
+
default="pyloop",
|
| 71 |
+
help="run tests with specific loop: pyloop, uvloop or all",
|
| 72 |
+
)
|
| 73 |
+
parser.addoption(
|
| 74 |
+
"--aiohttp-enable-loop-debug",
|
| 75 |
+
action="store_true",
|
| 76 |
+
default=False,
|
| 77 |
+
help="enable event loop debug mode",
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
| 82 |
+
"""Set up pytest fixture.
|
| 83 |
+
|
| 84 |
+
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
| 85 |
+
"""
|
| 86 |
+
func = fixturedef.func
|
| 87 |
+
|
| 88 |
+
if inspect.isasyncgenfunction(func):
|
| 89 |
+
# async generator fixture
|
| 90 |
+
is_async_gen = True
|
| 91 |
+
elif asyncio.iscoroutinefunction(func):
|
| 92 |
+
# regular async fixture
|
| 93 |
+
is_async_gen = False
|
| 94 |
+
else:
|
| 95 |
+
# not an async fixture, nothing to do
|
| 96 |
+
return
|
| 97 |
+
|
| 98 |
+
strip_request = False
|
| 99 |
+
if "request" not in fixturedef.argnames:
|
| 100 |
+
fixturedef.argnames += ("request",)
|
| 101 |
+
strip_request = True
|
| 102 |
+
|
| 103 |
+
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
| 104 |
+
request = kwargs["request"]
|
| 105 |
+
if strip_request:
|
| 106 |
+
del kwargs["request"]
|
| 107 |
+
|
| 108 |
+
# if neither the fixture nor the test use the 'loop' fixture,
|
| 109 |
+
# 'getfixturevalue' will fail because the test is not parameterized
|
| 110 |
+
# (this can be removed someday if 'loop' is no longer parameterized)
|
| 111 |
+
if "loop" not in request.fixturenames:
|
| 112 |
+
raise Exception(
|
| 113 |
+
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
| 114 |
+
"be used in tests depending from it."
|
| 115 |
+
)
|
| 116 |
+
|
| 117 |
+
_loop = request.getfixturevalue("loop")
|
| 118 |
+
|
| 119 |
+
if is_async_gen:
|
| 120 |
+
# for async generators, we need to advance the generator once,
|
| 121 |
+
# then advance it again in a finalizer
|
| 122 |
+
gen = func(*args, **kwargs)
|
| 123 |
+
|
| 124 |
+
def finalizer(): # type: ignore[no-untyped-def]
|
| 125 |
+
try:
|
| 126 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 127 |
+
except StopAsyncIteration:
|
| 128 |
+
pass
|
| 129 |
+
|
| 130 |
+
request.addfinalizer(finalizer)
|
| 131 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 132 |
+
else:
|
| 133 |
+
return _loop.run_until_complete(func(*args, **kwargs))
|
| 134 |
+
|
| 135 |
+
fixturedef.func = wrapper
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
@pytest.fixture
|
| 139 |
+
def fast(request): # type: ignore[no-untyped-def]
|
| 140 |
+
"""--fast config option"""
|
| 141 |
+
return request.config.getoption("--aiohttp-fast")
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
@pytest.fixture
|
| 145 |
+
def loop_debug(request): # type: ignore[no-untyped-def]
|
| 146 |
+
"""--enable-loop-debug config option"""
|
| 147 |
+
return request.config.getoption("--aiohttp-enable-loop-debug")
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@contextlib.contextmanager
|
| 151 |
+
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
| 152 |
+
"""Context manager which checks for RuntimeWarnings.
|
| 153 |
+
|
| 154 |
+
This exists specifically to
|
| 155 |
+
avoid "coroutine 'X' was never awaited" warnings being missed.
|
| 156 |
+
|
| 157 |
+
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
| 158 |
+
"""
|
| 159 |
+
with warnings.catch_warnings(record=True) as _warnings:
|
| 160 |
+
yield
|
| 161 |
+
rw = [
|
| 162 |
+
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
| 163 |
+
for w in _warnings
|
| 164 |
+
if w.category == RuntimeWarning
|
| 165 |
+
]
|
| 166 |
+
if rw:
|
| 167 |
+
raise RuntimeError(
|
| 168 |
+
"{} Runtime Warning{},\n{}".format(
|
| 169 |
+
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
| 170 |
+
)
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@contextlib.contextmanager
|
| 175 |
+
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
| 176 |
+
"""Passthrough loop context.
|
| 177 |
+
|
| 178 |
+
Sets up and tears down a loop unless one is passed in via the loop
|
| 179 |
+
argument when it's passed straight through.
|
| 180 |
+
"""
|
| 181 |
+
if loop:
|
| 182 |
+
# loop already exists, pass it straight through
|
| 183 |
+
yield loop
|
| 184 |
+
else:
|
| 185 |
+
# this shadows loop_context's standard behavior
|
| 186 |
+
loop = setup_test_loop()
|
| 187 |
+
yield loop
|
| 188 |
+
teardown_test_loop(loop, fast=fast)
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
| 192 |
+
"""Fix pytest collecting for coroutines."""
|
| 193 |
+
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
| 194 |
+
return list(collector._genfunctions(name, obj))
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
| 198 |
+
"""Run coroutines in an event loop instead of a normal function call."""
|
| 199 |
+
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
| 200 |
+
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
| 201 |
+
existing_loop = pyfuncitem.funcargs.get(
|
| 202 |
+
"proactor_loop"
|
| 203 |
+
) or pyfuncitem.funcargs.get("loop", None)
|
| 204 |
+
with _runtime_warning_context():
|
| 205 |
+
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
| 206 |
+
testargs = {
|
| 207 |
+
arg: pyfuncitem.funcargs[arg]
|
| 208 |
+
for arg in pyfuncitem._fixtureinfo.argnames
|
| 209 |
+
}
|
| 210 |
+
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
| 211 |
+
|
| 212 |
+
return True
|
| 213 |
+
|
| 214 |
+
|
| 215 |
+
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
| 216 |
+
if "loop_factory" not in metafunc.fixturenames:
|
| 217 |
+
return
|
| 218 |
+
|
| 219 |
+
loops = metafunc.config.option.aiohttp_loop
|
| 220 |
+
avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
|
| 221 |
+
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
| 222 |
+
|
| 223 |
+
if uvloop is not None: # pragma: no cover
|
| 224 |
+
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
| 225 |
+
|
| 226 |
+
if loops == "all":
|
| 227 |
+
loops = "pyloop,uvloop?"
|
| 228 |
+
|
| 229 |
+
factories = {} # type: ignore[var-annotated]
|
| 230 |
+
for name in loops.split(","):
|
| 231 |
+
required = not name.endswith("?")
|
| 232 |
+
name = name.strip(" ?")
|
| 233 |
+
if name not in avail_factories: # pragma: no cover
|
| 234 |
+
if required:
|
| 235 |
+
raise ValueError(
|
| 236 |
+
"Unknown loop '%s', available loops: %s"
|
| 237 |
+
% (name, list(factories.keys()))
|
| 238 |
+
)
|
| 239 |
+
else:
|
| 240 |
+
continue
|
| 241 |
+
factories[name] = avail_factories[name]
|
| 242 |
+
metafunc.parametrize(
|
| 243 |
+
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
@pytest.fixture
|
| 248 |
+
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
| 249 |
+
"""Return an instance of the event loop."""
|
| 250 |
+
policy = loop_factory()
|
| 251 |
+
asyncio.set_event_loop_policy(policy)
|
| 252 |
+
with loop_context(fast=fast) as _loop:
|
| 253 |
+
if loop_debug:
|
| 254 |
+
_loop.set_debug(True) # pragma: no cover
|
| 255 |
+
asyncio.set_event_loop(_loop)
|
| 256 |
+
yield _loop
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
@pytest.fixture
|
| 260 |
+
def proactor_loop(): # type: ignore[no-untyped-def]
|
| 261 |
+
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
| 262 |
+
asyncio.set_event_loop_policy(policy)
|
| 263 |
+
|
| 264 |
+
with loop_context(policy.new_event_loop) as _loop:
|
| 265 |
+
asyncio.set_event_loop(_loop)
|
| 266 |
+
yield _loop
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@pytest.fixture
|
| 270 |
+
def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
|
| 271 |
+
warnings.warn(
|
| 272 |
+
"Deprecated, use aiohttp_unused_port fixture instead",
|
| 273 |
+
DeprecationWarning,
|
| 274 |
+
stacklevel=2,
|
| 275 |
+
)
|
| 276 |
+
return aiohttp_unused_port
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
@pytest.fixture
|
| 280 |
+
def aiohttp_unused_port() -> Callable[[], int]:
|
| 281 |
+
"""Return a port that is unused on the current host."""
|
| 282 |
+
return _unused_port
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
@pytest.fixture
|
| 286 |
+
def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
|
| 287 |
+
"""Factory to create a TestServer instance, given an app.
|
| 288 |
+
|
| 289 |
+
aiohttp_server(app, **kwargs)
|
| 290 |
+
"""
|
| 291 |
+
servers = []
|
| 292 |
+
|
| 293 |
+
async def go(
|
| 294 |
+
app: Application, *, port: Optional[int] = None, **kwargs: Any
|
| 295 |
+
) -> TestServer:
|
| 296 |
+
server = TestServer(app, port=port)
|
| 297 |
+
await server.start_server(loop=loop, **kwargs)
|
| 298 |
+
servers.append(server)
|
| 299 |
+
return server
|
| 300 |
+
|
| 301 |
+
yield go
|
| 302 |
+
|
| 303 |
+
async def finalize() -> None:
|
| 304 |
+
while servers:
|
| 305 |
+
await servers.pop().close()
|
| 306 |
+
|
| 307 |
+
loop.run_until_complete(finalize())
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
@pytest.fixture
|
| 311 |
+
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
| 312 |
+
warnings.warn(
|
| 313 |
+
"Deprecated, use aiohttp_server fixture instead",
|
| 314 |
+
DeprecationWarning,
|
| 315 |
+
stacklevel=2,
|
| 316 |
+
)
|
| 317 |
+
return aiohttp_server
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
@pytest.fixture
|
| 321 |
+
def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
|
| 322 |
+
"""Factory to create a RawTestServer instance, given a web handler.
|
| 323 |
+
|
| 324 |
+
aiohttp_raw_server(handler, **kwargs)
|
| 325 |
+
"""
|
| 326 |
+
servers = []
|
| 327 |
+
|
| 328 |
+
async def go(
|
| 329 |
+
handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
| 330 |
+
) -> RawTestServer:
|
| 331 |
+
server = RawTestServer(handler, port=port)
|
| 332 |
+
await server.start_server(loop=loop, **kwargs)
|
| 333 |
+
servers.append(server)
|
| 334 |
+
return server
|
| 335 |
+
|
| 336 |
+
yield go
|
| 337 |
+
|
| 338 |
+
async def finalize() -> None:
|
| 339 |
+
while servers:
|
| 340 |
+
await servers.pop().close()
|
| 341 |
+
|
| 342 |
+
loop.run_until_complete(finalize())
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
@pytest.fixture
|
| 346 |
+
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
| 347 |
+
aiohttp_raw_server,
|
| 348 |
+
):
|
| 349 |
+
warnings.warn(
|
| 350 |
+
"Deprecated, use aiohttp_raw_server fixture instead",
|
| 351 |
+
DeprecationWarning,
|
| 352 |
+
stacklevel=2,
|
| 353 |
+
)
|
| 354 |
+
return aiohttp_raw_server
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
@pytest.fixture
|
| 358 |
+
def aiohttp_client(
|
| 359 |
+
loop: asyncio.AbstractEventLoop,
|
| 360 |
+
) -> Iterator[AiohttpClient]:
|
| 361 |
+
"""Factory to create a TestClient instance.
|
| 362 |
+
|
| 363 |
+
aiohttp_client(app, **kwargs)
|
| 364 |
+
aiohttp_client(server, **kwargs)
|
| 365 |
+
aiohttp_client(raw_server, **kwargs)
|
| 366 |
+
"""
|
| 367 |
+
clients = []
|
| 368 |
+
|
| 369 |
+
async def go(
|
| 370 |
+
__param: Union[Application, BaseTestServer],
|
| 371 |
+
*args: Any,
|
| 372 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 373 |
+
**kwargs: Any
|
| 374 |
+
) -> TestClient:
|
| 375 |
+
|
| 376 |
+
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
| 377 |
+
__param, (Application, BaseTestServer)
|
| 378 |
+
):
|
| 379 |
+
__param = __param(loop, *args, **kwargs)
|
| 380 |
+
kwargs = {}
|
| 381 |
+
else:
|
| 382 |
+
assert not args, "args should be empty"
|
| 383 |
+
|
| 384 |
+
if isinstance(__param, Application):
|
| 385 |
+
server_kwargs = server_kwargs or {}
|
| 386 |
+
server = TestServer(__param, loop=loop, **server_kwargs)
|
| 387 |
+
client = TestClient(server, loop=loop, **kwargs)
|
| 388 |
+
elif isinstance(__param, BaseTestServer):
|
| 389 |
+
client = TestClient(__param, loop=loop, **kwargs)
|
| 390 |
+
else:
|
| 391 |
+
raise ValueError("Unknown argument type: %r" % type(__param))
|
| 392 |
+
|
| 393 |
+
await client.start_server()
|
| 394 |
+
clients.append(client)
|
| 395 |
+
return client
|
| 396 |
+
|
| 397 |
+
yield go
|
| 398 |
+
|
| 399 |
+
async def finalize() -> None:
|
| 400 |
+
while clients:
|
| 401 |
+
await clients.pop().close()
|
| 402 |
+
|
| 403 |
+
loop.run_until_complete(finalize())
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
@pytest.fixture
|
| 407 |
+
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
| 408 |
+
warnings.warn(
|
| 409 |
+
"Deprecated, use aiohttp_client fixture instead",
|
| 410 |
+
DeprecationWarning,
|
| 411 |
+
stacklevel=2,
|
| 412 |
+
)
|
| 413 |
+
return aiohttp_client
|
parrot/lib/python3.10/site-packages/aiohttp/typedefs.py
ADDED
|
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Awaitable,
|
| 7 |
+
Callable,
|
| 8 |
+
Iterable,
|
| 9 |
+
Mapping,
|
| 10 |
+
Protocol,
|
| 11 |
+
Sequence,
|
| 12 |
+
Tuple,
|
| 13 |
+
Union,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
| 17 |
+
from yarl import URL
|
| 18 |
+
|
| 19 |
+
try:
|
| 20 |
+
# Available in yarl>=1.10.0
|
| 21 |
+
from yarl import Query as _Query
|
| 22 |
+
except ImportError: # pragma: no cover
|
| 23 |
+
SimpleQuery = Union[str, int, float] # pragma: no cover
|
| 24 |
+
QueryVariable = Union[SimpleQuery, "Sequence[SimpleQuery]"] # pragma: no cover
|
| 25 |
+
_Query = Union[ # type: ignore[misc] # pragma: no cover
|
| 26 |
+
None, str, "Mapping[str, QueryVariable]", "Sequence[Tuple[str, QueryVariable]]"
|
| 27 |
+
]
|
| 28 |
+
|
| 29 |
+
Query = _Query
|
| 30 |
+
|
| 31 |
+
DEFAULT_JSON_ENCODER = json.dumps
|
| 32 |
+
DEFAULT_JSON_DECODER = json.loads
|
| 33 |
+
|
| 34 |
+
if TYPE_CHECKING:
|
| 35 |
+
_CIMultiDict = CIMultiDict[str]
|
| 36 |
+
_CIMultiDictProxy = CIMultiDictProxy[str]
|
| 37 |
+
_MultiDict = MultiDict[str]
|
| 38 |
+
_MultiDictProxy = MultiDictProxy[str]
|
| 39 |
+
from http.cookies import BaseCookie, Morsel
|
| 40 |
+
|
| 41 |
+
from .web import Request, StreamResponse
|
| 42 |
+
else:
|
| 43 |
+
_CIMultiDict = CIMultiDict
|
| 44 |
+
_CIMultiDictProxy = CIMultiDictProxy
|
| 45 |
+
_MultiDict = MultiDict
|
| 46 |
+
_MultiDictProxy = MultiDictProxy
|
| 47 |
+
|
| 48 |
+
Byteish = Union[bytes, bytearray, memoryview]
|
| 49 |
+
JSONEncoder = Callable[[Any], str]
|
| 50 |
+
JSONDecoder = Callable[[str], Any]
|
| 51 |
+
LooseHeaders = Union[
|
| 52 |
+
Mapping[str, str],
|
| 53 |
+
Mapping[istr, str],
|
| 54 |
+
_CIMultiDict,
|
| 55 |
+
_CIMultiDictProxy,
|
| 56 |
+
Iterable[Tuple[Union[str, istr], str]],
|
| 57 |
+
]
|
| 58 |
+
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
| 59 |
+
StrOrURL = Union[str, URL]
|
| 60 |
+
|
| 61 |
+
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 62 |
+
LooseCookiesIterables = Iterable[
|
| 63 |
+
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 64 |
+
]
|
| 65 |
+
LooseCookies = Union[
|
| 66 |
+
LooseCookiesMappings,
|
| 67 |
+
LooseCookiesIterables,
|
| 68 |
+
"BaseCookie[str]",
|
| 69 |
+
]
|
| 70 |
+
|
| 71 |
+
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class Middleware(Protocol):
|
| 75 |
+
def __call__(
|
| 76 |
+
self, request: "Request", handler: Handler
|
| 77 |
+
) -> Awaitable["StreamResponse"]: ...
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
PathLike = Union[str, "os.PathLike[str]"]
|
parrot/lib/python3.10/site-packages/aiohttp/web.py
ADDED
|
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import socket
|
| 5 |
+
import sys
|
| 6 |
+
import warnings
|
| 7 |
+
from argparse import ArgumentParser
|
| 8 |
+
from collections.abc import Iterable
|
| 9 |
+
from contextlib import suppress
|
| 10 |
+
from importlib import import_module
|
| 11 |
+
from typing import (
|
| 12 |
+
Any,
|
| 13 |
+
Awaitable,
|
| 14 |
+
Callable,
|
| 15 |
+
Iterable as TypingIterable,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Set,
|
| 19 |
+
Type,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from .abc import AbstractAccessLogger
|
| 25 |
+
from .helpers import AppKey as AppKey
|
| 26 |
+
from .log import access_logger
|
| 27 |
+
from .typedefs import PathLike
|
| 28 |
+
from .web_app import Application as Application, CleanupError as CleanupError
|
| 29 |
+
from .web_exceptions import (
|
| 30 |
+
HTTPAccepted as HTTPAccepted,
|
| 31 |
+
HTTPBadGateway as HTTPBadGateway,
|
| 32 |
+
HTTPBadRequest as HTTPBadRequest,
|
| 33 |
+
HTTPClientError as HTTPClientError,
|
| 34 |
+
HTTPConflict as HTTPConflict,
|
| 35 |
+
HTTPCreated as HTTPCreated,
|
| 36 |
+
HTTPError as HTTPError,
|
| 37 |
+
HTTPException as HTTPException,
|
| 38 |
+
HTTPExpectationFailed as HTTPExpectationFailed,
|
| 39 |
+
HTTPFailedDependency as HTTPFailedDependency,
|
| 40 |
+
HTTPForbidden as HTTPForbidden,
|
| 41 |
+
HTTPFound as HTTPFound,
|
| 42 |
+
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
| 43 |
+
HTTPGone as HTTPGone,
|
| 44 |
+
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
| 45 |
+
HTTPInternalServerError as HTTPInternalServerError,
|
| 46 |
+
HTTPLengthRequired as HTTPLengthRequired,
|
| 47 |
+
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
| 48 |
+
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
| 49 |
+
HTTPMove as HTTPMove,
|
| 50 |
+
HTTPMovedPermanently as HTTPMovedPermanently,
|
| 51 |
+
HTTPMultipleChoices as HTTPMultipleChoices,
|
| 52 |
+
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
| 53 |
+
HTTPNoContent as HTTPNoContent,
|
| 54 |
+
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
| 55 |
+
HTTPNotAcceptable as HTTPNotAcceptable,
|
| 56 |
+
HTTPNotExtended as HTTPNotExtended,
|
| 57 |
+
HTTPNotFound as HTTPNotFound,
|
| 58 |
+
HTTPNotImplemented as HTTPNotImplemented,
|
| 59 |
+
HTTPNotModified as HTTPNotModified,
|
| 60 |
+
HTTPOk as HTTPOk,
|
| 61 |
+
HTTPPartialContent as HTTPPartialContent,
|
| 62 |
+
HTTPPaymentRequired as HTTPPaymentRequired,
|
| 63 |
+
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
| 64 |
+
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
| 65 |
+
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
| 66 |
+
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
| 67 |
+
HTTPRedirection as HTTPRedirection,
|
| 68 |
+
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
| 69 |
+
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
| 70 |
+
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
| 71 |
+
HTTPRequestTimeout as HTTPRequestTimeout,
|
| 72 |
+
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
| 73 |
+
HTTPResetContent as HTTPResetContent,
|
| 74 |
+
HTTPSeeOther as HTTPSeeOther,
|
| 75 |
+
HTTPServerError as HTTPServerError,
|
| 76 |
+
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
| 77 |
+
HTTPSuccessful as HTTPSuccessful,
|
| 78 |
+
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
| 79 |
+
HTTPTooManyRequests as HTTPTooManyRequests,
|
| 80 |
+
HTTPUnauthorized as HTTPUnauthorized,
|
| 81 |
+
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
| 82 |
+
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
| 83 |
+
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
| 84 |
+
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
| 85 |
+
HTTPUseProxy as HTTPUseProxy,
|
| 86 |
+
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
| 87 |
+
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
| 88 |
+
NotAppKeyWarning as NotAppKeyWarning,
|
| 89 |
+
)
|
| 90 |
+
from .web_fileresponse import FileResponse as FileResponse
|
| 91 |
+
from .web_log import AccessLogger
|
| 92 |
+
from .web_middlewares import (
|
| 93 |
+
middleware as middleware,
|
| 94 |
+
normalize_path_middleware as normalize_path_middleware,
|
| 95 |
+
)
|
| 96 |
+
from .web_protocol import (
|
| 97 |
+
PayloadAccessError as PayloadAccessError,
|
| 98 |
+
RequestHandler as RequestHandler,
|
| 99 |
+
RequestPayloadError as RequestPayloadError,
|
| 100 |
+
)
|
| 101 |
+
from .web_request import (
|
| 102 |
+
BaseRequest as BaseRequest,
|
| 103 |
+
FileField as FileField,
|
| 104 |
+
Request as Request,
|
| 105 |
+
)
|
| 106 |
+
from .web_response import (
|
| 107 |
+
ContentCoding as ContentCoding,
|
| 108 |
+
Response as Response,
|
| 109 |
+
StreamResponse as StreamResponse,
|
| 110 |
+
json_response as json_response,
|
| 111 |
+
)
|
| 112 |
+
from .web_routedef import (
|
| 113 |
+
AbstractRouteDef as AbstractRouteDef,
|
| 114 |
+
RouteDef as RouteDef,
|
| 115 |
+
RouteTableDef as RouteTableDef,
|
| 116 |
+
StaticDef as StaticDef,
|
| 117 |
+
delete as delete,
|
| 118 |
+
get as get,
|
| 119 |
+
head as head,
|
| 120 |
+
options as options,
|
| 121 |
+
patch as patch,
|
| 122 |
+
post as post,
|
| 123 |
+
put as put,
|
| 124 |
+
route as route,
|
| 125 |
+
static as static,
|
| 126 |
+
view as view,
|
| 127 |
+
)
|
| 128 |
+
from .web_runner import (
|
| 129 |
+
AppRunner as AppRunner,
|
| 130 |
+
BaseRunner as BaseRunner,
|
| 131 |
+
BaseSite as BaseSite,
|
| 132 |
+
GracefulExit as GracefulExit,
|
| 133 |
+
NamedPipeSite as NamedPipeSite,
|
| 134 |
+
ServerRunner as ServerRunner,
|
| 135 |
+
SockSite as SockSite,
|
| 136 |
+
TCPSite as TCPSite,
|
| 137 |
+
UnixSite as UnixSite,
|
| 138 |
+
)
|
| 139 |
+
from .web_server import Server as Server
|
| 140 |
+
from .web_urldispatcher import (
|
| 141 |
+
AbstractResource as AbstractResource,
|
| 142 |
+
AbstractRoute as AbstractRoute,
|
| 143 |
+
DynamicResource as DynamicResource,
|
| 144 |
+
PlainResource as PlainResource,
|
| 145 |
+
PrefixedSubAppResource as PrefixedSubAppResource,
|
| 146 |
+
Resource as Resource,
|
| 147 |
+
ResourceRoute as ResourceRoute,
|
| 148 |
+
StaticResource as StaticResource,
|
| 149 |
+
UrlDispatcher as UrlDispatcher,
|
| 150 |
+
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
| 151 |
+
View as View,
|
| 152 |
+
)
|
| 153 |
+
from .web_ws import (
|
| 154 |
+
WebSocketReady as WebSocketReady,
|
| 155 |
+
WebSocketResponse as WebSocketResponse,
|
| 156 |
+
WSMsgType as WSMsgType,
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
__all__ = (
|
| 160 |
+
# web_app
|
| 161 |
+
"AppKey",
|
| 162 |
+
"Application",
|
| 163 |
+
"CleanupError",
|
| 164 |
+
# web_exceptions
|
| 165 |
+
"NotAppKeyWarning",
|
| 166 |
+
"HTTPAccepted",
|
| 167 |
+
"HTTPBadGateway",
|
| 168 |
+
"HTTPBadRequest",
|
| 169 |
+
"HTTPClientError",
|
| 170 |
+
"HTTPConflict",
|
| 171 |
+
"HTTPCreated",
|
| 172 |
+
"HTTPError",
|
| 173 |
+
"HTTPException",
|
| 174 |
+
"HTTPExpectationFailed",
|
| 175 |
+
"HTTPFailedDependency",
|
| 176 |
+
"HTTPForbidden",
|
| 177 |
+
"HTTPFound",
|
| 178 |
+
"HTTPGatewayTimeout",
|
| 179 |
+
"HTTPGone",
|
| 180 |
+
"HTTPInsufficientStorage",
|
| 181 |
+
"HTTPInternalServerError",
|
| 182 |
+
"HTTPLengthRequired",
|
| 183 |
+
"HTTPMethodNotAllowed",
|
| 184 |
+
"HTTPMisdirectedRequest",
|
| 185 |
+
"HTTPMove",
|
| 186 |
+
"HTTPMovedPermanently",
|
| 187 |
+
"HTTPMultipleChoices",
|
| 188 |
+
"HTTPNetworkAuthenticationRequired",
|
| 189 |
+
"HTTPNoContent",
|
| 190 |
+
"HTTPNonAuthoritativeInformation",
|
| 191 |
+
"HTTPNotAcceptable",
|
| 192 |
+
"HTTPNotExtended",
|
| 193 |
+
"HTTPNotFound",
|
| 194 |
+
"HTTPNotImplemented",
|
| 195 |
+
"HTTPNotModified",
|
| 196 |
+
"HTTPOk",
|
| 197 |
+
"HTTPPartialContent",
|
| 198 |
+
"HTTPPaymentRequired",
|
| 199 |
+
"HTTPPermanentRedirect",
|
| 200 |
+
"HTTPPreconditionFailed",
|
| 201 |
+
"HTTPPreconditionRequired",
|
| 202 |
+
"HTTPProxyAuthenticationRequired",
|
| 203 |
+
"HTTPRedirection",
|
| 204 |
+
"HTTPRequestEntityTooLarge",
|
| 205 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
| 206 |
+
"HTTPRequestRangeNotSatisfiable",
|
| 207 |
+
"HTTPRequestTimeout",
|
| 208 |
+
"HTTPRequestURITooLong",
|
| 209 |
+
"HTTPResetContent",
|
| 210 |
+
"HTTPSeeOther",
|
| 211 |
+
"HTTPServerError",
|
| 212 |
+
"HTTPServiceUnavailable",
|
| 213 |
+
"HTTPSuccessful",
|
| 214 |
+
"HTTPTemporaryRedirect",
|
| 215 |
+
"HTTPTooManyRequests",
|
| 216 |
+
"HTTPUnauthorized",
|
| 217 |
+
"HTTPUnavailableForLegalReasons",
|
| 218 |
+
"HTTPUnprocessableEntity",
|
| 219 |
+
"HTTPUnsupportedMediaType",
|
| 220 |
+
"HTTPUpgradeRequired",
|
| 221 |
+
"HTTPUseProxy",
|
| 222 |
+
"HTTPVariantAlsoNegotiates",
|
| 223 |
+
"HTTPVersionNotSupported",
|
| 224 |
+
# web_fileresponse
|
| 225 |
+
"FileResponse",
|
| 226 |
+
# web_middlewares
|
| 227 |
+
"middleware",
|
| 228 |
+
"normalize_path_middleware",
|
| 229 |
+
# web_protocol
|
| 230 |
+
"PayloadAccessError",
|
| 231 |
+
"RequestHandler",
|
| 232 |
+
"RequestPayloadError",
|
| 233 |
+
# web_request
|
| 234 |
+
"BaseRequest",
|
| 235 |
+
"FileField",
|
| 236 |
+
"Request",
|
| 237 |
+
# web_response
|
| 238 |
+
"ContentCoding",
|
| 239 |
+
"Response",
|
| 240 |
+
"StreamResponse",
|
| 241 |
+
"json_response",
|
| 242 |
+
# web_routedef
|
| 243 |
+
"AbstractRouteDef",
|
| 244 |
+
"RouteDef",
|
| 245 |
+
"RouteTableDef",
|
| 246 |
+
"StaticDef",
|
| 247 |
+
"delete",
|
| 248 |
+
"get",
|
| 249 |
+
"head",
|
| 250 |
+
"options",
|
| 251 |
+
"patch",
|
| 252 |
+
"post",
|
| 253 |
+
"put",
|
| 254 |
+
"route",
|
| 255 |
+
"static",
|
| 256 |
+
"view",
|
| 257 |
+
# web_runner
|
| 258 |
+
"AppRunner",
|
| 259 |
+
"BaseRunner",
|
| 260 |
+
"BaseSite",
|
| 261 |
+
"GracefulExit",
|
| 262 |
+
"ServerRunner",
|
| 263 |
+
"SockSite",
|
| 264 |
+
"TCPSite",
|
| 265 |
+
"UnixSite",
|
| 266 |
+
"NamedPipeSite",
|
| 267 |
+
# web_server
|
| 268 |
+
"Server",
|
| 269 |
+
# web_urldispatcher
|
| 270 |
+
"AbstractResource",
|
| 271 |
+
"AbstractRoute",
|
| 272 |
+
"DynamicResource",
|
| 273 |
+
"PlainResource",
|
| 274 |
+
"PrefixedSubAppResource",
|
| 275 |
+
"Resource",
|
| 276 |
+
"ResourceRoute",
|
| 277 |
+
"StaticResource",
|
| 278 |
+
"UrlDispatcher",
|
| 279 |
+
"UrlMappingMatchInfo",
|
| 280 |
+
"View",
|
| 281 |
+
# web_ws
|
| 282 |
+
"WebSocketReady",
|
| 283 |
+
"WebSocketResponse",
|
| 284 |
+
"WSMsgType",
|
| 285 |
+
# web
|
| 286 |
+
"run_app",
|
| 287 |
+
)
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
try:
|
| 291 |
+
from ssl import SSLContext
|
| 292 |
+
except ImportError: # pragma: no cover
|
| 293 |
+
SSLContext = Any # type: ignore[misc,assignment]
|
| 294 |
+
|
| 295 |
+
# Only display warning when using -Wdefault, -We, -X dev or similar.
|
| 296 |
+
warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
|
| 297 |
+
|
| 298 |
+
HostSequence = TypingIterable[str]
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
async def _run_app(
|
| 302 |
+
app: Union[Application, Awaitable[Application]],
|
| 303 |
+
*,
|
| 304 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 305 |
+
port: Optional[int] = None,
|
| 306 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 307 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 308 |
+
shutdown_timeout: float = 60.0,
|
| 309 |
+
keepalive_timeout: float = 75.0,
|
| 310 |
+
ssl_context: Optional[SSLContext] = None,
|
| 311 |
+
print: Optional[Callable[..., None]] = print,
|
| 312 |
+
backlog: int = 128,
|
| 313 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 314 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 315 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 316 |
+
handle_signals: bool = True,
|
| 317 |
+
reuse_address: Optional[bool] = None,
|
| 318 |
+
reuse_port: Optional[bool] = None,
|
| 319 |
+
handler_cancellation: bool = False,
|
| 320 |
+
) -> None:
|
| 321 |
+
# An internal function to actually do all dirty job for application running
|
| 322 |
+
if asyncio.iscoroutine(app):
|
| 323 |
+
app = await app
|
| 324 |
+
|
| 325 |
+
app = cast(Application, app)
|
| 326 |
+
|
| 327 |
+
runner = AppRunner(
|
| 328 |
+
app,
|
| 329 |
+
handle_signals=handle_signals,
|
| 330 |
+
access_log_class=access_log_class,
|
| 331 |
+
access_log_format=access_log_format,
|
| 332 |
+
access_log=access_log,
|
| 333 |
+
keepalive_timeout=keepalive_timeout,
|
| 334 |
+
shutdown_timeout=shutdown_timeout,
|
| 335 |
+
handler_cancellation=handler_cancellation,
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
await runner.setup()
|
| 339 |
+
|
| 340 |
+
sites: List[BaseSite] = []
|
| 341 |
+
|
| 342 |
+
try:
|
| 343 |
+
if host is not None:
|
| 344 |
+
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
| 345 |
+
sites.append(
|
| 346 |
+
TCPSite(
|
| 347 |
+
runner,
|
| 348 |
+
host,
|
| 349 |
+
port,
|
| 350 |
+
ssl_context=ssl_context,
|
| 351 |
+
backlog=backlog,
|
| 352 |
+
reuse_address=reuse_address,
|
| 353 |
+
reuse_port=reuse_port,
|
| 354 |
+
)
|
| 355 |
+
)
|
| 356 |
+
else:
|
| 357 |
+
for h in host:
|
| 358 |
+
sites.append(
|
| 359 |
+
TCPSite(
|
| 360 |
+
runner,
|
| 361 |
+
h,
|
| 362 |
+
port,
|
| 363 |
+
ssl_context=ssl_context,
|
| 364 |
+
backlog=backlog,
|
| 365 |
+
reuse_address=reuse_address,
|
| 366 |
+
reuse_port=reuse_port,
|
| 367 |
+
)
|
| 368 |
+
)
|
| 369 |
+
elif path is None and sock is None or port is not None:
|
| 370 |
+
sites.append(
|
| 371 |
+
TCPSite(
|
| 372 |
+
runner,
|
| 373 |
+
port=port,
|
| 374 |
+
ssl_context=ssl_context,
|
| 375 |
+
backlog=backlog,
|
| 376 |
+
reuse_address=reuse_address,
|
| 377 |
+
reuse_port=reuse_port,
|
| 378 |
+
)
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
if path is not None:
|
| 382 |
+
if isinstance(path, (str, os.PathLike)):
|
| 383 |
+
sites.append(
|
| 384 |
+
UnixSite(
|
| 385 |
+
runner,
|
| 386 |
+
path,
|
| 387 |
+
ssl_context=ssl_context,
|
| 388 |
+
backlog=backlog,
|
| 389 |
+
)
|
| 390 |
+
)
|
| 391 |
+
else:
|
| 392 |
+
for p in path:
|
| 393 |
+
sites.append(
|
| 394 |
+
UnixSite(
|
| 395 |
+
runner,
|
| 396 |
+
p,
|
| 397 |
+
ssl_context=ssl_context,
|
| 398 |
+
backlog=backlog,
|
| 399 |
+
)
|
| 400 |
+
)
|
| 401 |
+
|
| 402 |
+
if sock is not None:
|
| 403 |
+
if not isinstance(sock, Iterable):
|
| 404 |
+
sites.append(
|
| 405 |
+
SockSite(
|
| 406 |
+
runner,
|
| 407 |
+
sock,
|
| 408 |
+
ssl_context=ssl_context,
|
| 409 |
+
backlog=backlog,
|
| 410 |
+
)
|
| 411 |
+
)
|
| 412 |
+
else:
|
| 413 |
+
for s in sock:
|
| 414 |
+
sites.append(
|
| 415 |
+
SockSite(
|
| 416 |
+
runner,
|
| 417 |
+
s,
|
| 418 |
+
ssl_context=ssl_context,
|
| 419 |
+
backlog=backlog,
|
| 420 |
+
)
|
| 421 |
+
)
|
| 422 |
+
for site in sites:
|
| 423 |
+
await site.start()
|
| 424 |
+
|
| 425 |
+
if print: # pragma: no branch
|
| 426 |
+
names = sorted(str(s.name) for s in runner.sites)
|
| 427 |
+
print(
|
| 428 |
+
"======== Running on {} ========\n"
|
| 429 |
+
"(Press CTRL+C to quit)".format(", ".join(names))
|
| 430 |
+
)
|
| 431 |
+
|
| 432 |
+
# sleep forever by 1 hour intervals,
|
| 433 |
+
while True:
|
| 434 |
+
await asyncio.sleep(3600)
|
| 435 |
+
finally:
|
| 436 |
+
await runner.cleanup()
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def _cancel_tasks(
|
| 440 |
+
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
| 441 |
+
) -> None:
|
| 442 |
+
if not to_cancel:
|
| 443 |
+
return
|
| 444 |
+
|
| 445 |
+
for task in to_cancel:
|
| 446 |
+
task.cancel()
|
| 447 |
+
|
| 448 |
+
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
| 449 |
+
|
| 450 |
+
for task in to_cancel:
|
| 451 |
+
if task.cancelled():
|
| 452 |
+
continue
|
| 453 |
+
if task.exception() is not None:
|
| 454 |
+
loop.call_exception_handler(
|
| 455 |
+
{
|
| 456 |
+
"message": "unhandled exception during asyncio.run() shutdown",
|
| 457 |
+
"exception": task.exception(),
|
| 458 |
+
"task": task,
|
| 459 |
+
}
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
def run_app(
|
| 464 |
+
app: Union[Application, Awaitable[Application]],
|
| 465 |
+
*,
|
| 466 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 467 |
+
port: Optional[int] = None,
|
| 468 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 469 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 470 |
+
shutdown_timeout: float = 60.0,
|
| 471 |
+
keepalive_timeout: float = 75.0,
|
| 472 |
+
ssl_context: Optional[SSLContext] = None,
|
| 473 |
+
print: Optional[Callable[..., None]] = print,
|
| 474 |
+
backlog: int = 128,
|
| 475 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 476 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 477 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 478 |
+
handle_signals: bool = True,
|
| 479 |
+
reuse_address: Optional[bool] = None,
|
| 480 |
+
reuse_port: Optional[bool] = None,
|
| 481 |
+
handler_cancellation: bool = False,
|
| 482 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 483 |
+
) -> None:
|
| 484 |
+
"""Run an app locally"""
|
| 485 |
+
if loop is None:
|
| 486 |
+
loop = asyncio.new_event_loop()
|
| 487 |
+
|
| 488 |
+
# Configure if and only if in debugging mode and using the default logger
|
| 489 |
+
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
| 490 |
+
if access_log.level == logging.NOTSET:
|
| 491 |
+
access_log.setLevel(logging.DEBUG)
|
| 492 |
+
if not access_log.hasHandlers():
|
| 493 |
+
access_log.addHandler(logging.StreamHandler())
|
| 494 |
+
|
| 495 |
+
main_task = loop.create_task(
|
| 496 |
+
_run_app(
|
| 497 |
+
app,
|
| 498 |
+
host=host,
|
| 499 |
+
port=port,
|
| 500 |
+
path=path,
|
| 501 |
+
sock=sock,
|
| 502 |
+
shutdown_timeout=shutdown_timeout,
|
| 503 |
+
keepalive_timeout=keepalive_timeout,
|
| 504 |
+
ssl_context=ssl_context,
|
| 505 |
+
print=print,
|
| 506 |
+
backlog=backlog,
|
| 507 |
+
access_log_class=access_log_class,
|
| 508 |
+
access_log_format=access_log_format,
|
| 509 |
+
access_log=access_log,
|
| 510 |
+
handle_signals=handle_signals,
|
| 511 |
+
reuse_address=reuse_address,
|
| 512 |
+
reuse_port=reuse_port,
|
| 513 |
+
handler_cancellation=handler_cancellation,
|
| 514 |
+
)
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
try:
|
| 518 |
+
asyncio.set_event_loop(loop)
|
| 519 |
+
loop.run_until_complete(main_task)
|
| 520 |
+
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
| 521 |
+
pass
|
| 522 |
+
finally:
|
| 523 |
+
try:
|
| 524 |
+
main_task.cancel()
|
| 525 |
+
with suppress(asyncio.CancelledError):
|
| 526 |
+
loop.run_until_complete(main_task)
|
| 527 |
+
finally:
|
| 528 |
+
_cancel_tasks(asyncio.all_tasks(loop), loop)
|
| 529 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
| 530 |
+
loop.close()
|
| 531 |
+
|
| 532 |
+
|
| 533 |
+
def main(argv: List[str]) -> None:
|
| 534 |
+
arg_parser = ArgumentParser(
|
| 535 |
+
description="aiohttp.web Application server", prog="aiohttp.web"
|
| 536 |
+
)
|
| 537 |
+
arg_parser.add_argument(
|
| 538 |
+
"entry_func",
|
| 539 |
+
help=(
|
| 540 |
+
"Callable returning the `aiohttp.web.Application` instance to "
|
| 541 |
+
"run. Should be specified in the 'module:function' syntax."
|
| 542 |
+
),
|
| 543 |
+
metavar="entry-func",
|
| 544 |
+
)
|
| 545 |
+
arg_parser.add_argument(
|
| 546 |
+
"-H",
|
| 547 |
+
"--hostname",
|
| 548 |
+
help="TCP/IP hostname to serve on (default: %(default)r)",
|
| 549 |
+
default="localhost",
|
| 550 |
+
)
|
| 551 |
+
arg_parser.add_argument(
|
| 552 |
+
"-P",
|
| 553 |
+
"--port",
|
| 554 |
+
help="TCP/IP port to serve on (default: %(default)r)",
|
| 555 |
+
type=int,
|
| 556 |
+
default="8080",
|
| 557 |
+
)
|
| 558 |
+
arg_parser.add_argument(
|
| 559 |
+
"-U",
|
| 560 |
+
"--path",
|
| 561 |
+
help="Unix file system path to serve on. Specifying a path will cause "
|
| 562 |
+
"hostname and port arguments to be ignored.",
|
| 563 |
+
)
|
| 564 |
+
args, extra_argv = arg_parser.parse_known_args(argv)
|
| 565 |
+
|
| 566 |
+
# Import logic
|
| 567 |
+
mod_str, _, func_str = args.entry_func.partition(":")
|
| 568 |
+
if not func_str or not mod_str:
|
| 569 |
+
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
| 570 |
+
if mod_str.startswith("."):
|
| 571 |
+
arg_parser.error("relative module names not supported")
|
| 572 |
+
try:
|
| 573 |
+
module = import_module(mod_str)
|
| 574 |
+
except ImportError as ex:
|
| 575 |
+
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
| 576 |
+
try:
|
| 577 |
+
func = getattr(module, func_str)
|
| 578 |
+
except AttributeError:
|
| 579 |
+
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
| 580 |
+
|
| 581 |
+
# Compatibility logic
|
| 582 |
+
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
| 583 |
+
arg_parser.error(
|
| 584 |
+
"file system paths not supported by your operating" " environment"
|
| 585 |
+
)
|
| 586 |
+
|
| 587 |
+
logging.basicConfig(level=logging.DEBUG)
|
| 588 |
+
|
| 589 |
+
app = func(extra_argv)
|
| 590 |
+
run_app(app, host=args.hostname, port=args.port, path=args.path)
|
| 591 |
+
arg_parser.exit(message="Stopped\n")
|
| 592 |
+
|
| 593 |
+
|
| 594 |
+
if __name__ == "__main__": # pragma: no branch
|
| 595 |
+
main(sys.argv[1:]) # pragma: no cover
|
parrot/lib/python3.10/site-packages/aiohttp/web_fileresponse.py
ADDED
|
@@ -0,0 +1,364 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import os
|
| 3 |
+
import pathlib
|
| 4 |
+
import sys
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from mimetypes import MimeTypes
|
| 7 |
+
from stat import S_ISREG
|
| 8 |
+
from types import MappingProxyType
|
| 9 |
+
from typing import ( # noqa
|
| 10 |
+
IO,
|
| 11 |
+
TYPE_CHECKING,
|
| 12 |
+
Any,
|
| 13 |
+
Awaitable,
|
| 14 |
+
Callable,
|
| 15 |
+
Final,
|
| 16 |
+
Iterator,
|
| 17 |
+
List,
|
| 18 |
+
Optional,
|
| 19 |
+
Tuple,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from . import hdrs
|
| 25 |
+
from .abc import AbstractStreamWriter
|
| 26 |
+
from .helpers import ETAG_ANY, ETag, must_be_empty_body
|
| 27 |
+
from .typedefs import LooseHeaders, PathLike
|
| 28 |
+
from .web_exceptions import (
|
| 29 |
+
HTTPForbidden,
|
| 30 |
+
HTTPNotFound,
|
| 31 |
+
HTTPNotModified,
|
| 32 |
+
HTTPPartialContent,
|
| 33 |
+
HTTPPreconditionFailed,
|
| 34 |
+
HTTPRequestRangeNotSatisfiable,
|
| 35 |
+
)
|
| 36 |
+
from .web_response import StreamResponse
|
| 37 |
+
|
| 38 |
+
__all__ = ("FileResponse",)
|
| 39 |
+
|
| 40 |
+
if TYPE_CHECKING:
|
| 41 |
+
from .web_request import BaseRequest
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
| 48 |
+
|
| 49 |
+
CONTENT_TYPES: Final[MimeTypes] = MimeTypes()
|
| 50 |
+
|
| 51 |
+
if sys.version_info < (3, 9):
|
| 52 |
+
CONTENT_TYPES.encodings_map[".br"] = "br"
|
| 53 |
+
|
| 54 |
+
# File extension to IANA encodings map that will be checked in the order defined.
|
| 55 |
+
ENCODING_EXTENSIONS = MappingProxyType(
|
| 56 |
+
{ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")}
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
FALLBACK_CONTENT_TYPE = "application/octet-stream"
|
| 60 |
+
|
| 61 |
+
# Provide additional MIME type/extension pairs to be recognized.
|
| 62 |
+
# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only
|
| 63 |
+
ADDITIONAL_CONTENT_TYPES = MappingProxyType(
|
| 64 |
+
{
|
| 65 |
+
"application/gzip": ".gz",
|
| 66 |
+
"application/x-brotli": ".br",
|
| 67 |
+
"application/x-bzip2": ".bz2",
|
| 68 |
+
"application/x-compress": ".Z",
|
| 69 |
+
"application/x-xz": ".xz",
|
| 70 |
+
}
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
# Add custom pairs and clear the encodings map so guess_type ignores them.
|
| 74 |
+
CONTENT_TYPES.encodings_map.clear()
|
| 75 |
+
for content_type, extension in ADDITIONAL_CONTENT_TYPES.items():
|
| 76 |
+
CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined]
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class FileResponse(StreamResponse):
|
| 80 |
+
"""A response object can be used to send files."""
|
| 81 |
+
|
| 82 |
+
def __init__(
|
| 83 |
+
self,
|
| 84 |
+
path: PathLike,
|
| 85 |
+
chunk_size: int = 256 * 1024,
|
| 86 |
+
status: int = 200,
|
| 87 |
+
reason: Optional[str] = None,
|
| 88 |
+
headers: Optional[LooseHeaders] = None,
|
| 89 |
+
) -> None:
|
| 90 |
+
super().__init__(status=status, reason=reason, headers=headers)
|
| 91 |
+
|
| 92 |
+
self._path = pathlib.Path(path)
|
| 93 |
+
self._chunk_size = chunk_size
|
| 94 |
+
|
| 95 |
+
async def _sendfile_fallback(
|
| 96 |
+
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
|
| 97 |
+
) -> AbstractStreamWriter:
|
| 98 |
+
# To keep memory usage low,fobj is transferred in chunks
|
| 99 |
+
# controlled by the constructor's chunk_size argument.
|
| 100 |
+
|
| 101 |
+
chunk_size = self._chunk_size
|
| 102 |
+
loop = asyncio.get_event_loop()
|
| 103 |
+
|
| 104 |
+
await loop.run_in_executor(None, fobj.seek, offset)
|
| 105 |
+
|
| 106 |
+
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
|
| 107 |
+
while chunk:
|
| 108 |
+
await writer.write(chunk)
|
| 109 |
+
count = count - chunk_size
|
| 110 |
+
if count <= 0:
|
| 111 |
+
break
|
| 112 |
+
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
|
| 113 |
+
|
| 114 |
+
await writer.drain()
|
| 115 |
+
return writer
|
| 116 |
+
|
| 117 |
+
async def _sendfile(
|
| 118 |
+
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
|
| 119 |
+
) -> AbstractStreamWriter:
|
| 120 |
+
writer = await super().prepare(request)
|
| 121 |
+
assert writer is not None
|
| 122 |
+
|
| 123 |
+
if NOSENDFILE or self.compression:
|
| 124 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
| 125 |
+
|
| 126 |
+
loop = request._loop
|
| 127 |
+
transport = request.transport
|
| 128 |
+
assert transport is not None
|
| 129 |
+
|
| 130 |
+
try:
|
| 131 |
+
await loop.sendfile(transport, fobj, offset, count)
|
| 132 |
+
except NotImplementedError:
|
| 133 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
| 134 |
+
|
| 135 |
+
await super().write_eof()
|
| 136 |
+
return writer
|
| 137 |
+
|
| 138 |
+
@staticmethod
|
| 139 |
+
def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool:
|
| 140 |
+
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
| 141 |
+
return True
|
| 142 |
+
return any(
|
| 143 |
+
etag.value == etag_value for etag in etags if weak or not etag.is_weak
|
| 144 |
+
)
|
| 145 |
+
|
| 146 |
+
async def _not_modified(
|
| 147 |
+
self, request: "BaseRequest", etag_value: str, last_modified: float
|
| 148 |
+
) -> Optional[AbstractStreamWriter]:
|
| 149 |
+
self.set_status(HTTPNotModified.status_code)
|
| 150 |
+
self._length_check = False
|
| 151 |
+
self.etag = etag_value # type: ignore[assignment]
|
| 152 |
+
self.last_modified = last_modified # type: ignore[assignment]
|
| 153 |
+
# Delete any Content-Length headers provided by user. HTTP 304
|
| 154 |
+
# should always have empty response body
|
| 155 |
+
return await super().prepare(request)
|
| 156 |
+
|
| 157 |
+
async def _precondition_failed(
|
| 158 |
+
self, request: "BaseRequest"
|
| 159 |
+
) -> Optional[AbstractStreamWriter]:
|
| 160 |
+
self.set_status(HTTPPreconditionFailed.status_code)
|
| 161 |
+
self.content_length = 0
|
| 162 |
+
return await super().prepare(request)
|
| 163 |
+
|
| 164 |
+
def _get_file_path_stat_encoding(
|
| 165 |
+
self, accept_encoding: str
|
| 166 |
+
) -> Tuple[pathlib.Path, os.stat_result, Optional[str]]:
|
| 167 |
+
"""Return the file path, stat result, and encoding.
|
| 168 |
+
|
| 169 |
+
If an uncompressed file is returned, the encoding is set to
|
| 170 |
+
:py:data:`None`.
|
| 171 |
+
|
| 172 |
+
This method should be called from a thread executor
|
| 173 |
+
since it calls os.stat which may block.
|
| 174 |
+
"""
|
| 175 |
+
file_path = self._path
|
| 176 |
+
for file_extension, file_encoding in ENCODING_EXTENSIONS.items():
|
| 177 |
+
if file_encoding not in accept_encoding:
|
| 178 |
+
continue
|
| 179 |
+
|
| 180 |
+
compressed_path = file_path.with_suffix(file_path.suffix + file_extension)
|
| 181 |
+
with suppress(OSError):
|
| 182 |
+
# Do not follow symlinks and ignore any non-regular files.
|
| 183 |
+
st = compressed_path.lstat()
|
| 184 |
+
if S_ISREG(st.st_mode):
|
| 185 |
+
return compressed_path, st, file_encoding
|
| 186 |
+
|
| 187 |
+
# Fallback to the uncompressed file
|
| 188 |
+
return file_path, file_path.stat(), None
|
| 189 |
+
|
| 190 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
| 191 |
+
loop = asyncio.get_running_loop()
|
| 192 |
+
# Encoding comparisons should be case-insensitive
|
| 193 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
| 194 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
| 195 |
+
try:
|
| 196 |
+
file_path, st, file_encoding = await loop.run_in_executor(
|
| 197 |
+
None, self._get_file_path_stat_encoding, accept_encoding
|
| 198 |
+
)
|
| 199 |
+
except OSError:
|
| 200 |
+
# Most likely to be FileNotFoundError or OSError for circular
|
| 201 |
+
# symlinks in python >= 3.13, so respond with 404.
|
| 202 |
+
self.set_status(HTTPNotFound.status_code)
|
| 203 |
+
return await super().prepare(request)
|
| 204 |
+
|
| 205 |
+
# Forbid special files like sockets, pipes, devices, etc.
|
| 206 |
+
if not S_ISREG(st.st_mode):
|
| 207 |
+
self.set_status(HTTPForbidden.status_code)
|
| 208 |
+
return await super().prepare(request)
|
| 209 |
+
|
| 210 |
+
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
| 211 |
+
last_modified = st.st_mtime
|
| 212 |
+
|
| 213 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2
|
| 214 |
+
ifmatch = request.if_match
|
| 215 |
+
if ifmatch is not None and not self._etag_match(
|
| 216 |
+
etag_value, ifmatch, weak=False
|
| 217 |
+
):
|
| 218 |
+
return await self._precondition_failed(request)
|
| 219 |
+
|
| 220 |
+
unmodsince = request.if_unmodified_since
|
| 221 |
+
if (
|
| 222 |
+
unmodsince is not None
|
| 223 |
+
and ifmatch is None
|
| 224 |
+
and st.st_mtime > unmodsince.timestamp()
|
| 225 |
+
):
|
| 226 |
+
return await self._precondition_failed(request)
|
| 227 |
+
|
| 228 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2
|
| 229 |
+
ifnonematch = request.if_none_match
|
| 230 |
+
if ifnonematch is not None and self._etag_match(
|
| 231 |
+
etag_value, ifnonematch, weak=True
|
| 232 |
+
):
|
| 233 |
+
return await self._not_modified(request, etag_value, last_modified)
|
| 234 |
+
|
| 235 |
+
modsince = request.if_modified_since
|
| 236 |
+
if (
|
| 237 |
+
modsince is not None
|
| 238 |
+
and ifnonematch is None
|
| 239 |
+
and st.st_mtime <= modsince.timestamp()
|
| 240 |
+
):
|
| 241 |
+
return await self._not_modified(request, etag_value, last_modified)
|
| 242 |
+
|
| 243 |
+
status = self._status
|
| 244 |
+
file_size = st.st_size
|
| 245 |
+
count = file_size
|
| 246 |
+
|
| 247 |
+
start = None
|
| 248 |
+
|
| 249 |
+
ifrange = request.if_range
|
| 250 |
+
if ifrange is None or st.st_mtime <= ifrange.timestamp():
|
| 251 |
+
# If-Range header check:
|
| 252 |
+
# condition = cached date >= last modification date
|
| 253 |
+
# return 206 if True else 200.
|
| 254 |
+
# if False:
|
| 255 |
+
# Range header would not be processed, return 200
|
| 256 |
+
# if True but Range header missing
|
| 257 |
+
# return 200
|
| 258 |
+
try:
|
| 259 |
+
rng = request.http_range
|
| 260 |
+
start = rng.start
|
| 261 |
+
end = rng.stop
|
| 262 |
+
except ValueError:
|
| 263 |
+
# https://tools.ietf.org/html/rfc7233:
|
| 264 |
+
# A server generating a 416 (Range Not Satisfiable) response to
|
| 265 |
+
# a byte-range request SHOULD send a Content-Range header field
|
| 266 |
+
# with an unsatisfied-range value.
|
| 267 |
+
# The complete-length in a 416 response indicates the current
|
| 268 |
+
# length of the selected representation.
|
| 269 |
+
#
|
| 270 |
+
# Will do the same below. Many servers ignore this and do not
|
| 271 |
+
# send a Content-Range header with HTTP 416
|
| 272 |
+
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
| 273 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
| 274 |
+
return await super().prepare(request)
|
| 275 |
+
|
| 276 |
+
# If a range request has been made, convert start, end slice
|
| 277 |
+
# notation into file pointer offset and count
|
| 278 |
+
if start is not None or end is not None:
|
| 279 |
+
if start < 0 and end is None: # return tail of file
|
| 280 |
+
start += file_size
|
| 281 |
+
if start < 0:
|
| 282 |
+
# if Range:bytes=-1000 in request header but file size
|
| 283 |
+
# is only 200, there would be trouble without this
|
| 284 |
+
start = 0
|
| 285 |
+
count = file_size - start
|
| 286 |
+
else:
|
| 287 |
+
# rfc7233:If the last-byte-pos value is
|
| 288 |
+
# absent, or if the value is greater than or equal to
|
| 289 |
+
# the current length of the representation data,
|
| 290 |
+
# the byte range is interpreted as the remainder
|
| 291 |
+
# of the representation (i.e., the server replaces the
|
| 292 |
+
# value of last-byte-pos with a value that is one less than
|
| 293 |
+
# the current length of the selected representation).
|
| 294 |
+
count = (
|
| 295 |
+
min(end if end is not None else file_size, file_size) - start
|
| 296 |
+
)
|
| 297 |
+
|
| 298 |
+
if start >= file_size:
|
| 299 |
+
# HTTP 416 should be returned in this case.
|
| 300 |
+
#
|
| 301 |
+
# According to https://tools.ietf.org/html/rfc7233:
|
| 302 |
+
# If a valid byte-range-set includes at least one
|
| 303 |
+
# byte-range-spec with a first-byte-pos that is less than
|
| 304 |
+
# the current length of the representation, or at least one
|
| 305 |
+
# suffix-byte-range-spec with a non-zero suffix-length,
|
| 306 |
+
# then the byte-range-set is satisfiable. Otherwise, the
|
| 307 |
+
# byte-range-set is unsatisfiable.
|
| 308 |
+
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
| 309 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
| 310 |
+
return await super().prepare(request)
|
| 311 |
+
|
| 312 |
+
status = HTTPPartialContent.status_code
|
| 313 |
+
# Even though you are sending the whole file, you should still
|
| 314 |
+
# return a HTTP 206 for a Range request.
|
| 315 |
+
self.set_status(status)
|
| 316 |
+
|
| 317 |
+
# If the Content-Type header is not already set, guess it based on the
|
| 318 |
+
# extension of the request path. The encoding returned by guess_type
|
| 319 |
+
# can be ignored since the map was cleared above.
|
| 320 |
+
if hdrs.CONTENT_TYPE not in self.headers:
|
| 321 |
+
self.content_type = (
|
| 322 |
+
CONTENT_TYPES.guess_type(self._path)[0] or FALLBACK_CONTENT_TYPE
|
| 323 |
+
)
|
| 324 |
+
|
| 325 |
+
if file_encoding:
|
| 326 |
+
self.headers[hdrs.CONTENT_ENCODING] = file_encoding
|
| 327 |
+
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
| 328 |
+
# Disable compression if we are already sending
|
| 329 |
+
# a compressed file since we don't want to double
|
| 330 |
+
# compress.
|
| 331 |
+
self._compression = False
|
| 332 |
+
|
| 333 |
+
self.etag = etag_value # type: ignore[assignment]
|
| 334 |
+
self.last_modified = st.st_mtime # type: ignore[assignment]
|
| 335 |
+
self.content_length = count
|
| 336 |
+
|
| 337 |
+
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
|
| 338 |
+
|
| 339 |
+
real_start = cast(int, start)
|
| 340 |
+
|
| 341 |
+
if status == HTTPPartialContent.status_code:
|
| 342 |
+
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
|
| 343 |
+
real_start, real_start + count - 1, file_size
|
| 344 |
+
)
|
| 345 |
+
|
| 346 |
+
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
| 347 |
+
if count == 0 or must_be_empty_body(request.method, self.status):
|
| 348 |
+
return await super().prepare(request)
|
| 349 |
+
|
| 350 |
+
try:
|
| 351 |
+
fobj = await loop.run_in_executor(None, file_path.open, "rb")
|
| 352 |
+
except PermissionError:
|
| 353 |
+
self.set_status(HTTPForbidden.status_code)
|
| 354 |
+
return await super().prepare(request)
|
| 355 |
+
|
| 356 |
+
if start: # be aware that start could be None or int=0 here.
|
| 357 |
+
offset = start
|
| 358 |
+
else:
|
| 359 |
+
offset = 0
|
| 360 |
+
|
| 361 |
+
try:
|
| 362 |
+
return await self._sendfile(request, fobj, offset, count)
|
| 363 |
+
finally:
|
| 364 |
+
await asyncio.shield(loop.run_in_executor(None, fobj.close))
|
parrot/lib/python3.10/site-packages/aiohttp/web_middlewares.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import TYPE_CHECKING, Tuple, Type, TypeVar
|
| 3 |
+
|
| 4 |
+
from .typedefs import Handler, Middleware
|
| 5 |
+
from .web_exceptions import HTTPMove, HTTPPermanentRedirect
|
| 6 |
+
from .web_request import Request
|
| 7 |
+
from .web_response import StreamResponse
|
| 8 |
+
from .web_urldispatcher import SystemRoute
|
| 9 |
+
|
| 10 |
+
__all__ = (
|
| 11 |
+
"middleware",
|
| 12 |
+
"normalize_path_middleware",
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from .web_app import Application
|
| 17 |
+
|
| 18 |
+
_Func = TypeVar("_Func")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
|
| 22 |
+
alt_request = request.clone(rel_url=path)
|
| 23 |
+
|
| 24 |
+
match_info = await request.app.router.resolve(alt_request)
|
| 25 |
+
alt_request._match_info = match_info
|
| 26 |
+
|
| 27 |
+
if match_info.http_exception is None:
|
| 28 |
+
return True, alt_request
|
| 29 |
+
|
| 30 |
+
return False, request
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def middleware(f: _Func) -> _Func:
|
| 34 |
+
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
| 35 |
+
return f
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def normalize_path_middleware(
|
| 39 |
+
*,
|
| 40 |
+
append_slash: bool = True,
|
| 41 |
+
remove_slash: bool = False,
|
| 42 |
+
merge_slashes: bool = True,
|
| 43 |
+
redirect_class: Type[HTTPMove] = HTTPPermanentRedirect,
|
| 44 |
+
) -> Middleware:
|
| 45 |
+
"""Factory for producing a middleware that normalizes the path of a request.
|
| 46 |
+
|
| 47 |
+
Normalizing means:
|
| 48 |
+
- Add or remove a trailing slash to the path.
|
| 49 |
+
- Double slashes are replaced by one.
|
| 50 |
+
|
| 51 |
+
The middleware returns as soon as it finds a path that resolves
|
| 52 |
+
correctly. The order if both merge and append/remove are enabled is
|
| 53 |
+
1) merge slashes
|
| 54 |
+
2) append/remove slash
|
| 55 |
+
3) both merge slashes and append/remove slash.
|
| 56 |
+
If the path resolves with at least one of those conditions, it will
|
| 57 |
+
redirect to the new path.
|
| 58 |
+
|
| 59 |
+
Only one of `append_slash` and `remove_slash` can be enabled. If both
|
| 60 |
+
are `True` the factory will raise an assertion error
|
| 61 |
+
|
| 62 |
+
If `append_slash` is `True` the middleware will append a slash when
|
| 63 |
+
needed. If a resource is defined with trailing slash and the request
|
| 64 |
+
comes without it, it will append it automatically.
|
| 65 |
+
|
| 66 |
+
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
|
| 67 |
+
the middleware will remove trailing slashes and redirect if the resource
|
| 68 |
+
is defined
|
| 69 |
+
|
| 70 |
+
If merge_slashes is True, merge multiple consecutive slashes in the
|
| 71 |
+
path into one.
|
| 72 |
+
"""
|
| 73 |
+
correct_configuration = not (append_slash and remove_slash)
|
| 74 |
+
assert correct_configuration, "Cannot both remove and append slash"
|
| 75 |
+
|
| 76 |
+
@middleware
|
| 77 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
| 78 |
+
if isinstance(request.match_info.route, SystemRoute):
|
| 79 |
+
paths_to_check = []
|
| 80 |
+
if "?" in request.raw_path:
|
| 81 |
+
path, query = request.raw_path.split("?", 1)
|
| 82 |
+
query = "?" + query
|
| 83 |
+
else:
|
| 84 |
+
query = ""
|
| 85 |
+
path = request.raw_path
|
| 86 |
+
|
| 87 |
+
if merge_slashes:
|
| 88 |
+
paths_to_check.append(re.sub("//+", "/", path))
|
| 89 |
+
if append_slash and not request.path.endswith("/"):
|
| 90 |
+
paths_to_check.append(path + "/")
|
| 91 |
+
if remove_slash and request.path.endswith("/"):
|
| 92 |
+
paths_to_check.append(path[:-1])
|
| 93 |
+
if merge_slashes and append_slash:
|
| 94 |
+
paths_to_check.append(re.sub("//+", "/", path + "/"))
|
| 95 |
+
if merge_slashes and remove_slash:
|
| 96 |
+
merged_slashes = re.sub("//+", "/", path)
|
| 97 |
+
paths_to_check.append(merged_slashes[:-1])
|
| 98 |
+
|
| 99 |
+
for path in paths_to_check:
|
| 100 |
+
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
|
| 101 |
+
resolves, request = await _check_request_resolves(request, path)
|
| 102 |
+
if resolves:
|
| 103 |
+
raise redirect_class(request.raw_path + query)
|
| 104 |
+
|
| 105 |
+
return await handler(request)
|
| 106 |
+
|
| 107 |
+
return impl
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
def _fix_request_current_app(app: "Application") -> Middleware:
|
| 111 |
+
@middleware
|
| 112 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
| 113 |
+
match_info = request.match_info
|
| 114 |
+
prev = match_info.current_app
|
| 115 |
+
match_info.current_app = app
|
| 116 |
+
try:
|
| 117 |
+
return await handler(request)
|
| 118 |
+
finally:
|
| 119 |
+
match_info.current_app = prev
|
| 120 |
+
|
| 121 |
+
return impl
|
parrot/lib/python3.10/site-packages/aiohttp/web_protocol.py
ADDED
|
@@ -0,0 +1,736 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import asyncio.streams
|
| 3 |
+
import sys
|
| 4 |
+
import traceback
|
| 5 |
+
import warnings
|
| 6 |
+
from collections import deque
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
from html import escape as html_escape
|
| 9 |
+
from http import HTTPStatus
|
| 10 |
+
from logging import Logger
|
| 11 |
+
from typing import (
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Awaitable,
|
| 15 |
+
Callable,
|
| 16 |
+
Deque,
|
| 17 |
+
Optional,
|
| 18 |
+
Sequence,
|
| 19 |
+
Tuple,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
import attr
|
| 26 |
+
import yarl
|
| 27 |
+
|
| 28 |
+
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
| 29 |
+
from .base_protocol import BaseProtocol
|
| 30 |
+
from .helpers import ceil_timeout
|
| 31 |
+
from .http import (
|
| 32 |
+
HttpProcessingError,
|
| 33 |
+
HttpRequestParser,
|
| 34 |
+
HttpVersion10,
|
| 35 |
+
RawRequestMessage,
|
| 36 |
+
StreamWriter,
|
| 37 |
+
)
|
| 38 |
+
from .log import access_logger, server_logger
|
| 39 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 40 |
+
from .tcp_helpers import tcp_keepalive
|
| 41 |
+
from .web_exceptions import HTTPException, HTTPInternalServerError
|
| 42 |
+
from .web_log import AccessLogger
|
| 43 |
+
from .web_request import BaseRequest
|
| 44 |
+
from .web_response import Response, StreamResponse
|
| 45 |
+
|
| 46 |
+
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
| 47 |
+
|
| 48 |
+
if TYPE_CHECKING:
|
| 49 |
+
from .web_server import Server
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
_RequestFactory = Callable[
|
| 53 |
+
[
|
| 54 |
+
RawRequestMessage,
|
| 55 |
+
StreamReader,
|
| 56 |
+
"RequestHandler",
|
| 57 |
+
AbstractStreamWriter,
|
| 58 |
+
"asyncio.Task[None]",
|
| 59 |
+
],
|
| 60 |
+
BaseRequest,
|
| 61 |
+
]
|
| 62 |
+
|
| 63 |
+
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
| 64 |
+
|
| 65 |
+
ERROR = RawRequestMessage(
|
| 66 |
+
"UNKNOWN",
|
| 67 |
+
"/",
|
| 68 |
+
HttpVersion10,
|
| 69 |
+
{}, # type: ignore[arg-type]
|
| 70 |
+
{}, # type: ignore[arg-type]
|
| 71 |
+
True,
|
| 72 |
+
None,
|
| 73 |
+
False,
|
| 74 |
+
False,
|
| 75 |
+
yarl.URL("/"),
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
class RequestPayloadError(Exception):
|
| 80 |
+
"""Payload parsing error."""
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class PayloadAccessError(Exception):
|
| 84 |
+
"""Payload was accessed after response was sent."""
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
_PAYLOAD_ACCESS_ERROR = PayloadAccessError()
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 91 |
+
class _ErrInfo:
|
| 92 |
+
status: int
|
| 93 |
+
exc: BaseException
|
| 94 |
+
message: str
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
class RequestHandler(BaseProtocol):
|
| 101 |
+
"""HTTP protocol implementation.
|
| 102 |
+
|
| 103 |
+
RequestHandler handles incoming HTTP request. It reads request line,
|
| 104 |
+
request headers and request payload and calls handle_request() method.
|
| 105 |
+
By default it always returns with 404 response.
|
| 106 |
+
|
| 107 |
+
RequestHandler handles errors in incoming request, like bad
|
| 108 |
+
status line, bad headers or incomplete payload. If any error occurs,
|
| 109 |
+
connection gets closed.
|
| 110 |
+
|
| 111 |
+
keepalive_timeout -- number of seconds before closing
|
| 112 |
+
keep-alive connection
|
| 113 |
+
|
| 114 |
+
tcp_keepalive -- TCP keep-alive is on, default is on
|
| 115 |
+
|
| 116 |
+
debug -- enable debug mode
|
| 117 |
+
|
| 118 |
+
logger -- custom logger object
|
| 119 |
+
|
| 120 |
+
access_log_class -- custom class for access_logger
|
| 121 |
+
|
| 122 |
+
access_log -- custom logging object
|
| 123 |
+
|
| 124 |
+
access_log_format -- access log format string
|
| 125 |
+
|
| 126 |
+
loop -- Optional event loop
|
| 127 |
+
|
| 128 |
+
max_line_size -- Optional maximum header line size
|
| 129 |
+
|
| 130 |
+
max_field_size -- Optional maximum header field size
|
| 131 |
+
|
| 132 |
+
max_headers -- Optional maximum header size
|
| 133 |
+
|
| 134 |
+
timeout_ceil_threshold -- Optional value to specify
|
| 135 |
+
threshold to ceil() timeout
|
| 136 |
+
values
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
__slots__ = (
|
| 141 |
+
"_request_count",
|
| 142 |
+
"_keepalive",
|
| 143 |
+
"_manager",
|
| 144 |
+
"_request_handler",
|
| 145 |
+
"_request_factory",
|
| 146 |
+
"_tcp_keepalive",
|
| 147 |
+
"_next_keepalive_close_time",
|
| 148 |
+
"_keepalive_handle",
|
| 149 |
+
"_keepalive_timeout",
|
| 150 |
+
"_lingering_time",
|
| 151 |
+
"_messages",
|
| 152 |
+
"_message_tail",
|
| 153 |
+
"_handler_waiter",
|
| 154 |
+
"_waiter",
|
| 155 |
+
"_task_handler",
|
| 156 |
+
"_upgrade",
|
| 157 |
+
"_payload_parser",
|
| 158 |
+
"_request_parser",
|
| 159 |
+
"_reading_paused",
|
| 160 |
+
"logger",
|
| 161 |
+
"debug",
|
| 162 |
+
"access_log",
|
| 163 |
+
"access_logger",
|
| 164 |
+
"_close",
|
| 165 |
+
"_force_close",
|
| 166 |
+
"_current_request",
|
| 167 |
+
"_timeout_ceil_threshold",
|
| 168 |
+
"_request_in_progress",
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
def __init__(
|
| 172 |
+
self,
|
| 173 |
+
manager: "Server",
|
| 174 |
+
*,
|
| 175 |
+
loop: asyncio.AbstractEventLoop,
|
| 176 |
+
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
|
| 177 |
+
tcp_keepalive: bool = True,
|
| 178 |
+
logger: Logger = server_logger,
|
| 179 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 180 |
+
access_log: Logger = access_logger,
|
| 181 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 182 |
+
debug: bool = False,
|
| 183 |
+
max_line_size: int = 8190,
|
| 184 |
+
max_headers: int = 32768,
|
| 185 |
+
max_field_size: int = 8190,
|
| 186 |
+
lingering_time: float = 10.0,
|
| 187 |
+
read_bufsize: int = 2**16,
|
| 188 |
+
auto_decompress: bool = True,
|
| 189 |
+
timeout_ceil_threshold: float = 5,
|
| 190 |
+
):
|
| 191 |
+
super().__init__(loop)
|
| 192 |
+
|
| 193 |
+
self._request_count = 0
|
| 194 |
+
self._keepalive = False
|
| 195 |
+
self._current_request: Optional[BaseRequest] = None
|
| 196 |
+
self._manager: Optional[Server] = manager
|
| 197 |
+
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
| 198 |
+
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
| 199 |
+
|
| 200 |
+
self._tcp_keepalive = tcp_keepalive
|
| 201 |
+
# placeholder to be replaced on keepalive timeout setup
|
| 202 |
+
self._next_keepalive_close_time = 0.0
|
| 203 |
+
self._keepalive_handle: Optional[asyncio.Handle] = None
|
| 204 |
+
self._keepalive_timeout = keepalive_timeout
|
| 205 |
+
self._lingering_time = float(lingering_time)
|
| 206 |
+
|
| 207 |
+
self._messages: Deque[_MsgType] = deque()
|
| 208 |
+
self._message_tail = b""
|
| 209 |
+
|
| 210 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 211 |
+
self._handler_waiter: Optional[asyncio.Future[None]] = None
|
| 212 |
+
self._task_handler: Optional[asyncio.Task[None]] = None
|
| 213 |
+
|
| 214 |
+
self._upgrade = False
|
| 215 |
+
self._payload_parser: Any = None
|
| 216 |
+
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
| 217 |
+
self,
|
| 218 |
+
loop,
|
| 219 |
+
read_bufsize,
|
| 220 |
+
max_line_size=max_line_size,
|
| 221 |
+
max_field_size=max_field_size,
|
| 222 |
+
max_headers=max_headers,
|
| 223 |
+
payload_exception=RequestPayloadError,
|
| 224 |
+
auto_decompress=auto_decompress,
|
| 225 |
+
)
|
| 226 |
+
|
| 227 |
+
self._timeout_ceil_threshold: float = 5
|
| 228 |
+
try:
|
| 229 |
+
self._timeout_ceil_threshold = float(timeout_ceil_threshold)
|
| 230 |
+
except (TypeError, ValueError):
|
| 231 |
+
pass
|
| 232 |
+
|
| 233 |
+
self.logger = logger
|
| 234 |
+
self.debug = debug
|
| 235 |
+
self.access_log = access_log
|
| 236 |
+
if access_log:
|
| 237 |
+
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
| 238 |
+
access_log, access_log_format
|
| 239 |
+
)
|
| 240 |
+
else:
|
| 241 |
+
self.access_logger = None
|
| 242 |
+
|
| 243 |
+
self._close = False
|
| 244 |
+
self._force_close = False
|
| 245 |
+
self._request_in_progress = False
|
| 246 |
+
|
| 247 |
+
def __repr__(self) -> str:
|
| 248 |
+
return "<{} {}>".format(
|
| 249 |
+
self.__class__.__name__,
|
| 250 |
+
"connected" if self.transport is not None else "disconnected",
|
| 251 |
+
)
|
| 252 |
+
|
| 253 |
+
@property
|
| 254 |
+
def keepalive_timeout(self) -> float:
|
| 255 |
+
return self._keepalive_timeout
|
| 256 |
+
|
| 257 |
+
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
| 258 |
+
"""Do worker process exit preparations.
|
| 259 |
+
|
| 260 |
+
We need to clean up everything and stop accepting requests.
|
| 261 |
+
It is especially important for keep-alive connections.
|
| 262 |
+
"""
|
| 263 |
+
self._force_close = True
|
| 264 |
+
|
| 265 |
+
if self._keepalive_handle is not None:
|
| 266 |
+
self._keepalive_handle.cancel()
|
| 267 |
+
|
| 268 |
+
# Wait for graceful handler completion
|
| 269 |
+
if self._request_in_progress:
|
| 270 |
+
# The future is only created when we are shutting
|
| 271 |
+
# down while the handler is still processing a request
|
| 272 |
+
# to avoid creating a future for every request.
|
| 273 |
+
self._handler_waiter = self._loop.create_future()
|
| 274 |
+
try:
|
| 275 |
+
async with ceil_timeout(timeout):
|
| 276 |
+
await self._handler_waiter
|
| 277 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 278 |
+
self._handler_waiter = None
|
| 279 |
+
if (
|
| 280 |
+
sys.version_info >= (3, 11)
|
| 281 |
+
and (task := asyncio.current_task())
|
| 282 |
+
and task.cancelling()
|
| 283 |
+
):
|
| 284 |
+
raise
|
| 285 |
+
# Then cancel handler and wait
|
| 286 |
+
try:
|
| 287 |
+
async with ceil_timeout(timeout):
|
| 288 |
+
if self._current_request is not None:
|
| 289 |
+
self._current_request._cancel(asyncio.CancelledError())
|
| 290 |
+
|
| 291 |
+
if self._task_handler is not None and not self._task_handler.done():
|
| 292 |
+
await asyncio.shield(self._task_handler)
|
| 293 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 294 |
+
if (
|
| 295 |
+
sys.version_info >= (3, 11)
|
| 296 |
+
and (task := asyncio.current_task())
|
| 297 |
+
and task.cancelling()
|
| 298 |
+
):
|
| 299 |
+
raise
|
| 300 |
+
|
| 301 |
+
# force-close non-idle handler
|
| 302 |
+
if self._task_handler is not None:
|
| 303 |
+
self._task_handler.cancel()
|
| 304 |
+
|
| 305 |
+
self.force_close()
|
| 306 |
+
|
| 307 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 308 |
+
super().connection_made(transport)
|
| 309 |
+
|
| 310 |
+
real_transport = cast(asyncio.Transport, transport)
|
| 311 |
+
if self._tcp_keepalive:
|
| 312 |
+
tcp_keepalive(real_transport)
|
| 313 |
+
|
| 314 |
+
assert self._manager is not None
|
| 315 |
+
self._manager.connection_made(self, real_transport)
|
| 316 |
+
|
| 317 |
+
loop = self._loop
|
| 318 |
+
if sys.version_info >= (3, 12):
|
| 319 |
+
task = asyncio.Task(self.start(), loop=loop, eager_start=True)
|
| 320 |
+
else:
|
| 321 |
+
task = loop.create_task(self.start())
|
| 322 |
+
self._task_handler = task
|
| 323 |
+
|
| 324 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 325 |
+
if self._manager is None:
|
| 326 |
+
return
|
| 327 |
+
self._manager.connection_lost(self, exc)
|
| 328 |
+
|
| 329 |
+
# Grab value before setting _manager to None.
|
| 330 |
+
handler_cancellation = self._manager.handler_cancellation
|
| 331 |
+
|
| 332 |
+
self.force_close()
|
| 333 |
+
super().connection_lost(exc)
|
| 334 |
+
self._manager = None
|
| 335 |
+
self._request_factory = None
|
| 336 |
+
self._request_handler = None
|
| 337 |
+
self._request_parser = None
|
| 338 |
+
|
| 339 |
+
if self._keepalive_handle is not None:
|
| 340 |
+
self._keepalive_handle.cancel()
|
| 341 |
+
|
| 342 |
+
if self._current_request is not None:
|
| 343 |
+
if exc is None:
|
| 344 |
+
exc = ConnectionResetError("Connection lost")
|
| 345 |
+
self._current_request._cancel(exc)
|
| 346 |
+
|
| 347 |
+
if handler_cancellation and self._task_handler is not None:
|
| 348 |
+
self._task_handler.cancel()
|
| 349 |
+
|
| 350 |
+
self._task_handler = None
|
| 351 |
+
|
| 352 |
+
if self._payload_parser is not None:
|
| 353 |
+
self._payload_parser.feed_eof()
|
| 354 |
+
self._payload_parser = None
|
| 355 |
+
|
| 356 |
+
def set_parser(self, parser: Any) -> None:
|
| 357 |
+
# Actual type is WebReader
|
| 358 |
+
assert self._payload_parser is None
|
| 359 |
+
|
| 360 |
+
self._payload_parser = parser
|
| 361 |
+
|
| 362 |
+
if self._message_tail:
|
| 363 |
+
self._payload_parser.feed_data(self._message_tail)
|
| 364 |
+
self._message_tail = b""
|
| 365 |
+
|
| 366 |
+
def eof_received(self) -> None:
|
| 367 |
+
pass
|
| 368 |
+
|
| 369 |
+
def data_received(self, data: bytes) -> None:
|
| 370 |
+
if self._force_close or self._close:
|
| 371 |
+
return
|
| 372 |
+
# parse http messages
|
| 373 |
+
messages: Sequence[_MsgType]
|
| 374 |
+
if self._payload_parser is None and not self._upgrade:
|
| 375 |
+
assert self._request_parser is not None
|
| 376 |
+
try:
|
| 377 |
+
messages, upgraded, tail = self._request_parser.feed_data(data)
|
| 378 |
+
except HttpProcessingError as exc:
|
| 379 |
+
messages = [
|
| 380 |
+
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
| 381 |
+
]
|
| 382 |
+
upgraded = False
|
| 383 |
+
tail = b""
|
| 384 |
+
|
| 385 |
+
for msg, payload in messages or ():
|
| 386 |
+
self._request_count += 1
|
| 387 |
+
self._messages.append((msg, payload))
|
| 388 |
+
|
| 389 |
+
waiter = self._waiter
|
| 390 |
+
if messages and waiter is not None and not waiter.done():
|
| 391 |
+
# don't set result twice
|
| 392 |
+
waiter.set_result(None)
|
| 393 |
+
|
| 394 |
+
self._upgrade = upgraded
|
| 395 |
+
if upgraded and tail:
|
| 396 |
+
self._message_tail = tail
|
| 397 |
+
|
| 398 |
+
# no parser, just store
|
| 399 |
+
elif self._payload_parser is None and self._upgrade and data:
|
| 400 |
+
self._message_tail += data
|
| 401 |
+
|
| 402 |
+
# feed payload
|
| 403 |
+
elif data:
|
| 404 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 405 |
+
if eof:
|
| 406 |
+
self.close()
|
| 407 |
+
|
| 408 |
+
def keep_alive(self, val: bool) -> None:
|
| 409 |
+
"""Set keep-alive connection mode.
|
| 410 |
+
|
| 411 |
+
:param bool val: new state.
|
| 412 |
+
"""
|
| 413 |
+
self._keepalive = val
|
| 414 |
+
if self._keepalive_handle:
|
| 415 |
+
self._keepalive_handle.cancel()
|
| 416 |
+
self._keepalive_handle = None
|
| 417 |
+
|
| 418 |
+
def close(self) -> None:
|
| 419 |
+
"""Close connection.
|
| 420 |
+
|
| 421 |
+
Stop accepting new pipelining messages and close
|
| 422 |
+
connection when handlers done processing messages.
|
| 423 |
+
"""
|
| 424 |
+
self._close = True
|
| 425 |
+
if self._waiter:
|
| 426 |
+
self._waiter.cancel()
|
| 427 |
+
|
| 428 |
+
def force_close(self) -> None:
|
| 429 |
+
"""Forcefully close connection."""
|
| 430 |
+
self._force_close = True
|
| 431 |
+
if self._waiter:
|
| 432 |
+
self._waiter.cancel()
|
| 433 |
+
if self.transport is not None:
|
| 434 |
+
self.transport.close()
|
| 435 |
+
self.transport = None
|
| 436 |
+
|
| 437 |
+
def log_access(
|
| 438 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
| 439 |
+
) -> None:
|
| 440 |
+
if self.access_logger is not None:
|
| 441 |
+
self.access_logger.log(request, response, self._loop.time() - time)
|
| 442 |
+
|
| 443 |
+
def log_debug(self, *args: Any, **kw: Any) -> None:
|
| 444 |
+
if self.debug:
|
| 445 |
+
self.logger.debug(*args, **kw)
|
| 446 |
+
|
| 447 |
+
def log_exception(self, *args: Any, **kw: Any) -> None:
|
| 448 |
+
self.logger.exception(*args, **kw)
|
| 449 |
+
|
| 450 |
+
def _process_keepalive(self) -> None:
|
| 451 |
+
self._keepalive_handle = None
|
| 452 |
+
if self._force_close or not self._keepalive:
|
| 453 |
+
return
|
| 454 |
+
|
| 455 |
+
loop = self._loop
|
| 456 |
+
now = loop.time()
|
| 457 |
+
close_time = self._next_keepalive_close_time
|
| 458 |
+
if now <= close_time:
|
| 459 |
+
# Keep alive close check fired too early, reschedule
|
| 460 |
+
self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
|
| 461 |
+
return
|
| 462 |
+
|
| 463 |
+
# handler in idle state
|
| 464 |
+
if self._waiter and not self._waiter.done():
|
| 465 |
+
self.force_close()
|
| 466 |
+
|
| 467 |
+
async def _handle_request(
|
| 468 |
+
self,
|
| 469 |
+
request: BaseRequest,
|
| 470 |
+
start_time: float,
|
| 471 |
+
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
| 472 |
+
) -> Tuple[StreamResponse, bool]:
|
| 473 |
+
self._request_in_progress = True
|
| 474 |
+
try:
|
| 475 |
+
try:
|
| 476 |
+
self._current_request = request
|
| 477 |
+
resp = await request_handler(request)
|
| 478 |
+
finally:
|
| 479 |
+
self._current_request = None
|
| 480 |
+
except HTTPException as exc:
|
| 481 |
+
resp = exc
|
| 482 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 483 |
+
except asyncio.CancelledError:
|
| 484 |
+
raise
|
| 485 |
+
except asyncio.TimeoutError as exc:
|
| 486 |
+
self.log_debug("Request handler timed out.", exc_info=exc)
|
| 487 |
+
resp = self.handle_error(request, 504)
|
| 488 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 489 |
+
except Exception as exc:
|
| 490 |
+
resp = self.handle_error(request, 500, exc)
|
| 491 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 492 |
+
else:
|
| 493 |
+
# Deprecation warning (See #2415)
|
| 494 |
+
if getattr(resp, "__http_exception__", False):
|
| 495 |
+
warnings.warn(
|
| 496 |
+
"returning HTTPException object is deprecated "
|
| 497 |
+
"(#2415) and will be removed, "
|
| 498 |
+
"please raise the exception instead",
|
| 499 |
+
DeprecationWarning,
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 503 |
+
finally:
|
| 504 |
+
self._request_in_progress = False
|
| 505 |
+
if self._handler_waiter is not None:
|
| 506 |
+
self._handler_waiter.set_result(None)
|
| 507 |
+
|
| 508 |
+
return resp, reset
|
| 509 |
+
|
| 510 |
+
async def start(self) -> None:
|
| 511 |
+
"""Process incoming request.
|
| 512 |
+
|
| 513 |
+
It reads request line, request headers and request payload, then
|
| 514 |
+
calls handle_request() method. Subclass has to override
|
| 515 |
+
handle_request(). start() handles various exceptions in request
|
| 516 |
+
or response handling. Connection is being closed always unless
|
| 517 |
+
keep_alive(True) specified.
|
| 518 |
+
"""
|
| 519 |
+
loop = self._loop
|
| 520 |
+
handler = asyncio.current_task(loop)
|
| 521 |
+
assert handler is not None
|
| 522 |
+
manager = self._manager
|
| 523 |
+
assert manager is not None
|
| 524 |
+
keepalive_timeout = self._keepalive_timeout
|
| 525 |
+
resp = None
|
| 526 |
+
assert self._request_factory is not None
|
| 527 |
+
assert self._request_handler is not None
|
| 528 |
+
|
| 529 |
+
while not self._force_close:
|
| 530 |
+
if not self._messages:
|
| 531 |
+
try:
|
| 532 |
+
# wait for next request
|
| 533 |
+
self._waiter = loop.create_future()
|
| 534 |
+
await self._waiter
|
| 535 |
+
finally:
|
| 536 |
+
self._waiter = None
|
| 537 |
+
|
| 538 |
+
message, payload = self._messages.popleft()
|
| 539 |
+
|
| 540 |
+
start = loop.time()
|
| 541 |
+
|
| 542 |
+
manager.requests_count += 1
|
| 543 |
+
writer = StreamWriter(self, loop)
|
| 544 |
+
if isinstance(message, _ErrInfo):
|
| 545 |
+
# make request_factory work
|
| 546 |
+
request_handler = self._make_error_handler(message)
|
| 547 |
+
message = ERROR
|
| 548 |
+
else:
|
| 549 |
+
request_handler = self._request_handler
|
| 550 |
+
|
| 551 |
+
request = self._request_factory(message, payload, self, writer, handler)
|
| 552 |
+
try:
|
| 553 |
+
# a new task is used for copy context vars (#3406)
|
| 554 |
+
coro = self._handle_request(request, start, request_handler)
|
| 555 |
+
if sys.version_info >= (3, 12):
|
| 556 |
+
task = asyncio.Task(coro, loop=loop, eager_start=True)
|
| 557 |
+
else:
|
| 558 |
+
task = loop.create_task(coro)
|
| 559 |
+
try:
|
| 560 |
+
resp, reset = await task
|
| 561 |
+
except ConnectionError:
|
| 562 |
+
self.log_debug("Ignored premature client disconnection")
|
| 563 |
+
break
|
| 564 |
+
|
| 565 |
+
# Drop the processed task from asyncio.Task.all_tasks() early
|
| 566 |
+
del task
|
| 567 |
+
if reset:
|
| 568 |
+
self.log_debug("Ignored premature client disconnection 2")
|
| 569 |
+
break
|
| 570 |
+
|
| 571 |
+
# notify server about keep-alive
|
| 572 |
+
self._keepalive = bool(resp.keep_alive)
|
| 573 |
+
|
| 574 |
+
# check payload
|
| 575 |
+
if not payload.is_eof():
|
| 576 |
+
lingering_time = self._lingering_time
|
| 577 |
+
if not self._force_close and lingering_time:
|
| 578 |
+
self.log_debug(
|
| 579 |
+
"Start lingering close timer for %s sec.", lingering_time
|
| 580 |
+
)
|
| 581 |
+
|
| 582 |
+
now = loop.time()
|
| 583 |
+
end_t = now + lingering_time
|
| 584 |
+
|
| 585 |
+
try:
|
| 586 |
+
while not payload.is_eof() and now < end_t:
|
| 587 |
+
async with ceil_timeout(end_t - now):
|
| 588 |
+
# read and ignore
|
| 589 |
+
await payload.readany()
|
| 590 |
+
now = loop.time()
|
| 591 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 592 |
+
if (
|
| 593 |
+
sys.version_info >= (3, 11)
|
| 594 |
+
and (t := asyncio.current_task())
|
| 595 |
+
and t.cancelling()
|
| 596 |
+
):
|
| 597 |
+
raise
|
| 598 |
+
|
| 599 |
+
# if payload still uncompleted
|
| 600 |
+
if not payload.is_eof() and not self._force_close:
|
| 601 |
+
self.log_debug("Uncompleted request.")
|
| 602 |
+
self.close()
|
| 603 |
+
|
| 604 |
+
payload.set_exception(_PAYLOAD_ACCESS_ERROR)
|
| 605 |
+
|
| 606 |
+
except asyncio.CancelledError:
|
| 607 |
+
self.log_debug("Ignored premature client disconnection")
|
| 608 |
+
raise
|
| 609 |
+
except Exception as exc:
|
| 610 |
+
self.log_exception("Unhandled exception", exc_info=exc)
|
| 611 |
+
self.force_close()
|
| 612 |
+
finally:
|
| 613 |
+
if self.transport is None and resp is not None:
|
| 614 |
+
self.log_debug("Ignored premature client disconnection.")
|
| 615 |
+
elif not self._force_close:
|
| 616 |
+
if self._keepalive and not self._close:
|
| 617 |
+
# start keep-alive timer
|
| 618 |
+
if keepalive_timeout is not None:
|
| 619 |
+
now = loop.time()
|
| 620 |
+
close_time = now + keepalive_timeout
|
| 621 |
+
self._next_keepalive_close_time = close_time
|
| 622 |
+
if self._keepalive_handle is None:
|
| 623 |
+
self._keepalive_handle = loop.call_at(
|
| 624 |
+
close_time, self._process_keepalive
|
| 625 |
+
)
|
| 626 |
+
else:
|
| 627 |
+
break
|
| 628 |
+
|
| 629 |
+
# remove handler, close transport if no handlers left
|
| 630 |
+
if not self._force_close:
|
| 631 |
+
self._task_handler = None
|
| 632 |
+
if self.transport is not None:
|
| 633 |
+
self.transport.close()
|
| 634 |
+
|
| 635 |
+
async def finish_response(
|
| 636 |
+
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
| 637 |
+
) -> Tuple[StreamResponse, bool]:
|
| 638 |
+
"""Prepare the response and write_eof, then log access.
|
| 639 |
+
|
| 640 |
+
This has to
|
| 641 |
+
be called within the context of any exception so the access logger
|
| 642 |
+
can get exception information. Returns True if the client disconnects
|
| 643 |
+
prematurely.
|
| 644 |
+
"""
|
| 645 |
+
request._finish()
|
| 646 |
+
if self._request_parser is not None:
|
| 647 |
+
self._request_parser.set_upgraded(False)
|
| 648 |
+
self._upgrade = False
|
| 649 |
+
if self._message_tail:
|
| 650 |
+
self._request_parser.feed_data(self._message_tail)
|
| 651 |
+
self._message_tail = b""
|
| 652 |
+
try:
|
| 653 |
+
prepare_meth = resp.prepare
|
| 654 |
+
except AttributeError:
|
| 655 |
+
if resp is None:
|
| 656 |
+
self.log_exception("Missing return statement on request handler")
|
| 657 |
+
else:
|
| 658 |
+
self.log_exception(
|
| 659 |
+
"Web-handler should return a response instance, "
|
| 660 |
+
"got {!r}".format(resp)
|
| 661 |
+
)
|
| 662 |
+
exc = HTTPInternalServerError()
|
| 663 |
+
resp = Response(
|
| 664 |
+
status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
|
| 665 |
+
)
|
| 666 |
+
prepare_meth = resp.prepare
|
| 667 |
+
try:
|
| 668 |
+
await prepare_meth(request)
|
| 669 |
+
await resp.write_eof()
|
| 670 |
+
except ConnectionError:
|
| 671 |
+
self.log_access(request, resp, start_time)
|
| 672 |
+
return resp, True
|
| 673 |
+
|
| 674 |
+
self.log_access(request, resp, start_time)
|
| 675 |
+
return resp, False
|
| 676 |
+
|
| 677 |
+
def handle_error(
|
| 678 |
+
self,
|
| 679 |
+
request: BaseRequest,
|
| 680 |
+
status: int = 500,
|
| 681 |
+
exc: Optional[BaseException] = None,
|
| 682 |
+
message: Optional[str] = None,
|
| 683 |
+
) -> StreamResponse:
|
| 684 |
+
"""Handle errors.
|
| 685 |
+
|
| 686 |
+
Returns HTTP response with specific status code. Logs additional
|
| 687 |
+
information. It always closes current connection.
|
| 688 |
+
"""
|
| 689 |
+
self.log_exception("Error handling request", exc_info=exc)
|
| 690 |
+
|
| 691 |
+
# some data already got sent, connection is broken
|
| 692 |
+
if request.writer.output_size > 0:
|
| 693 |
+
raise ConnectionError(
|
| 694 |
+
"Response is sent already, cannot send another response "
|
| 695 |
+
"with the error message"
|
| 696 |
+
)
|
| 697 |
+
|
| 698 |
+
ct = "text/plain"
|
| 699 |
+
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
| 700 |
+
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
| 701 |
+
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
| 702 |
+
tb = None
|
| 703 |
+
if self.debug:
|
| 704 |
+
with suppress(Exception):
|
| 705 |
+
tb = traceback.format_exc()
|
| 706 |
+
|
| 707 |
+
if "text/html" in request.headers.get("Accept", ""):
|
| 708 |
+
if tb:
|
| 709 |
+
tb = html_escape(tb)
|
| 710 |
+
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
| 711 |
+
message = (
|
| 712 |
+
"<html><head>"
|
| 713 |
+
"<title>{title}</title>"
|
| 714 |
+
"</head><body>\n<h1>{title}</h1>"
|
| 715 |
+
"\n{msg}\n</body></html>\n"
|
| 716 |
+
).format(title=title, msg=msg)
|
| 717 |
+
ct = "text/html"
|
| 718 |
+
else:
|
| 719 |
+
if tb:
|
| 720 |
+
msg = tb
|
| 721 |
+
message = title + "\n\n" + msg
|
| 722 |
+
|
| 723 |
+
resp = Response(status=status, text=message, content_type=ct)
|
| 724 |
+
resp.force_close()
|
| 725 |
+
|
| 726 |
+
return resp
|
| 727 |
+
|
| 728 |
+
def _make_error_handler(
|
| 729 |
+
self, err_info: _ErrInfo
|
| 730 |
+
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
| 731 |
+
async def handler(request: BaseRequest) -> StreamResponse:
|
| 732 |
+
return self.handle_error(
|
| 733 |
+
request, err_info.status, err_info.exc, err_info.message
|
| 734 |
+
)
|
| 735 |
+
|
| 736 |
+
return handler
|
parrot/lib/python3.10/site-packages/aiohttp/web_response.py
ADDED
|
@@ -0,0 +1,820 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections.abc
|
| 3 |
+
import datetime
|
| 4 |
+
import enum
|
| 5 |
+
import json
|
| 6 |
+
import math
|
| 7 |
+
import time
|
| 8 |
+
import warnings
|
| 9 |
+
from concurrent.futures import Executor
|
| 10 |
+
from http import HTTPStatus
|
| 11 |
+
from http.cookies import SimpleCookie
|
| 12 |
+
from typing import (
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterator,
|
| 17 |
+
MutableMapping,
|
| 18 |
+
Optional,
|
| 19 |
+
Union,
|
| 20 |
+
cast,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from multidict import CIMultiDict, istr
|
| 24 |
+
|
| 25 |
+
from . import hdrs, payload
|
| 26 |
+
from .abc import AbstractStreamWriter
|
| 27 |
+
from .compression_utils import ZLibCompressor
|
| 28 |
+
from .helpers import (
|
| 29 |
+
ETAG_ANY,
|
| 30 |
+
QUOTED_ETAG_RE,
|
| 31 |
+
ETag,
|
| 32 |
+
HeadersMixin,
|
| 33 |
+
must_be_empty_body,
|
| 34 |
+
parse_http_date,
|
| 35 |
+
rfc822_formatted_time,
|
| 36 |
+
sentinel,
|
| 37 |
+
should_remove_content_length,
|
| 38 |
+
validate_etag_value,
|
| 39 |
+
)
|
| 40 |
+
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
| 41 |
+
from .payload import Payload
|
| 42 |
+
from .typedefs import JSONEncoder, LooseHeaders
|
| 43 |
+
|
| 44 |
+
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
|
| 45 |
+
|
| 46 |
+
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
if TYPE_CHECKING:
|
| 50 |
+
from .web_request import BaseRequest
|
| 51 |
+
|
| 52 |
+
BaseClass = MutableMapping[str, Any]
|
| 53 |
+
else:
|
| 54 |
+
BaseClass = collections.abc.MutableMapping
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# TODO(py311): Convert to StrEnum for wider use
|
| 58 |
+
class ContentCoding(enum.Enum):
|
| 59 |
+
# The content codings that we have support for.
|
| 60 |
+
#
|
| 61 |
+
# Additional registered codings are listed at:
|
| 62 |
+
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
| 63 |
+
deflate = "deflate"
|
| 64 |
+
gzip = "gzip"
|
| 65 |
+
identity = "identity"
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}
|
| 69 |
+
|
| 70 |
+
############################################################
|
| 71 |
+
# HTTP Response classes
|
| 72 |
+
############################################################
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class StreamResponse(BaseClass, HeadersMixin):
|
| 76 |
+
|
| 77 |
+
_length_check = True
|
| 78 |
+
|
| 79 |
+
_body: Union[None, bytes, bytearray, Payload]
|
| 80 |
+
|
| 81 |
+
def __init__(
|
| 82 |
+
self,
|
| 83 |
+
*,
|
| 84 |
+
status: int = 200,
|
| 85 |
+
reason: Optional[str] = None,
|
| 86 |
+
headers: Optional[LooseHeaders] = None,
|
| 87 |
+
) -> None:
|
| 88 |
+
self._body = None
|
| 89 |
+
self._keep_alive: Optional[bool] = None
|
| 90 |
+
self._chunked = False
|
| 91 |
+
self._compression = False
|
| 92 |
+
self._compression_force: Optional[ContentCoding] = None
|
| 93 |
+
self._cookies = SimpleCookie()
|
| 94 |
+
|
| 95 |
+
self._req: Optional[BaseRequest] = None
|
| 96 |
+
self._payload_writer: Optional[AbstractStreamWriter] = None
|
| 97 |
+
self._eof_sent = False
|
| 98 |
+
self._must_be_empty_body: Optional[bool] = None
|
| 99 |
+
self._body_length = 0
|
| 100 |
+
self._state: Dict[str, Any] = {}
|
| 101 |
+
|
| 102 |
+
if headers is not None:
|
| 103 |
+
self._headers: CIMultiDict[str] = CIMultiDict(headers)
|
| 104 |
+
else:
|
| 105 |
+
self._headers = CIMultiDict()
|
| 106 |
+
|
| 107 |
+
self._set_status(status, reason)
|
| 108 |
+
|
| 109 |
+
@property
|
| 110 |
+
def prepared(self) -> bool:
|
| 111 |
+
return self._eof_sent or self._payload_writer is not None
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def task(self) -> "Optional[asyncio.Task[None]]":
|
| 115 |
+
if self._req:
|
| 116 |
+
return self._req.task
|
| 117 |
+
else:
|
| 118 |
+
return None
|
| 119 |
+
|
| 120 |
+
@property
|
| 121 |
+
def status(self) -> int:
|
| 122 |
+
return self._status
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def chunked(self) -> bool:
|
| 126 |
+
return self._chunked
|
| 127 |
+
|
| 128 |
+
@property
|
| 129 |
+
def compression(self) -> bool:
|
| 130 |
+
return self._compression
|
| 131 |
+
|
| 132 |
+
@property
|
| 133 |
+
def reason(self) -> str:
|
| 134 |
+
return self._reason
|
| 135 |
+
|
| 136 |
+
def set_status(
|
| 137 |
+
self,
|
| 138 |
+
status: int,
|
| 139 |
+
reason: Optional[str] = None,
|
| 140 |
+
) -> None:
|
| 141 |
+
assert (
|
| 142 |
+
not self.prepared
|
| 143 |
+
), "Cannot change the response status code after the headers have been sent"
|
| 144 |
+
self._set_status(status, reason)
|
| 145 |
+
|
| 146 |
+
def _set_status(self, status: int, reason: Optional[str]) -> None:
|
| 147 |
+
self._status = int(status)
|
| 148 |
+
if reason is None:
|
| 149 |
+
reason = REASON_PHRASES.get(self._status, "")
|
| 150 |
+
elif "\n" in reason:
|
| 151 |
+
raise ValueError("Reason cannot contain \\n")
|
| 152 |
+
self._reason = reason
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def keep_alive(self) -> Optional[bool]:
|
| 156 |
+
return self._keep_alive
|
| 157 |
+
|
| 158 |
+
def force_close(self) -> None:
|
| 159 |
+
self._keep_alive = False
|
| 160 |
+
|
| 161 |
+
@property
|
| 162 |
+
def body_length(self) -> int:
|
| 163 |
+
return self._body_length
|
| 164 |
+
|
| 165 |
+
@property
|
| 166 |
+
def output_length(self) -> int:
|
| 167 |
+
warnings.warn("output_length is deprecated", DeprecationWarning)
|
| 168 |
+
assert self._payload_writer
|
| 169 |
+
return self._payload_writer.buffer_size
|
| 170 |
+
|
| 171 |
+
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
|
| 172 |
+
"""Enables automatic chunked transfer encoding."""
|
| 173 |
+
self._chunked = True
|
| 174 |
+
|
| 175 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 176 |
+
raise RuntimeError(
|
| 177 |
+
"You can't enable chunked encoding when " "a content length is set"
|
| 178 |
+
)
|
| 179 |
+
if chunk_size is not None:
|
| 180 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
| 181 |
+
|
| 182 |
+
def enable_compression(
|
| 183 |
+
self, force: Optional[Union[bool, ContentCoding]] = None
|
| 184 |
+
) -> None:
|
| 185 |
+
"""Enables response compression encoding."""
|
| 186 |
+
# Backwards compatibility for when force was a bool <0.17.
|
| 187 |
+
if isinstance(force, bool):
|
| 188 |
+
force = ContentCoding.deflate if force else ContentCoding.identity
|
| 189 |
+
warnings.warn(
|
| 190 |
+
"Using boolean for force is deprecated #3318", DeprecationWarning
|
| 191 |
+
)
|
| 192 |
+
elif force is not None:
|
| 193 |
+
assert isinstance(force, ContentCoding), (
|
| 194 |
+
"force should one of " "None, bool or " "ContentEncoding"
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
self._compression = True
|
| 198 |
+
self._compression_force = force
|
| 199 |
+
|
| 200 |
+
@property
|
| 201 |
+
def headers(self) -> "CIMultiDict[str]":
|
| 202 |
+
return self._headers
|
| 203 |
+
|
| 204 |
+
@property
|
| 205 |
+
def cookies(self) -> SimpleCookie:
|
| 206 |
+
return self._cookies
|
| 207 |
+
|
| 208 |
+
def set_cookie(
|
| 209 |
+
self,
|
| 210 |
+
name: str,
|
| 211 |
+
value: str,
|
| 212 |
+
*,
|
| 213 |
+
expires: Optional[str] = None,
|
| 214 |
+
domain: Optional[str] = None,
|
| 215 |
+
max_age: Optional[Union[int, str]] = None,
|
| 216 |
+
path: str = "/",
|
| 217 |
+
secure: Optional[bool] = None,
|
| 218 |
+
httponly: Optional[bool] = None,
|
| 219 |
+
version: Optional[str] = None,
|
| 220 |
+
samesite: Optional[str] = None,
|
| 221 |
+
) -> None:
|
| 222 |
+
"""Set or update response cookie.
|
| 223 |
+
|
| 224 |
+
Sets new cookie or updates existent with new value.
|
| 225 |
+
Also updates only those params which are not None.
|
| 226 |
+
"""
|
| 227 |
+
old = self._cookies.get(name)
|
| 228 |
+
if old is not None and old.coded_value == "":
|
| 229 |
+
# deleted cookie
|
| 230 |
+
self._cookies.pop(name, None)
|
| 231 |
+
|
| 232 |
+
self._cookies[name] = value
|
| 233 |
+
c = self._cookies[name]
|
| 234 |
+
|
| 235 |
+
if expires is not None:
|
| 236 |
+
c["expires"] = expires
|
| 237 |
+
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
|
| 238 |
+
del c["expires"]
|
| 239 |
+
|
| 240 |
+
if domain is not None:
|
| 241 |
+
c["domain"] = domain
|
| 242 |
+
|
| 243 |
+
if max_age is not None:
|
| 244 |
+
c["max-age"] = str(max_age)
|
| 245 |
+
elif "max-age" in c:
|
| 246 |
+
del c["max-age"]
|
| 247 |
+
|
| 248 |
+
c["path"] = path
|
| 249 |
+
|
| 250 |
+
if secure is not None:
|
| 251 |
+
c["secure"] = secure
|
| 252 |
+
if httponly is not None:
|
| 253 |
+
c["httponly"] = httponly
|
| 254 |
+
if version is not None:
|
| 255 |
+
c["version"] = version
|
| 256 |
+
if samesite is not None:
|
| 257 |
+
c["samesite"] = samesite
|
| 258 |
+
|
| 259 |
+
def del_cookie(
|
| 260 |
+
self, name: str, *, domain: Optional[str] = None, path: str = "/"
|
| 261 |
+
) -> None:
|
| 262 |
+
"""Delete cookie.
|
| 263 |
+
|
| 264 |
+
Creates new empty expired cookie.
|
| 265 |
+
"""
|
| 266 |
+
# TODO: do we need domain/path here?
|
| 267 |
+
self._cookies.pop(name, None)
|
| 268 |
+
self.set_cookie(
|
| 269 |
+
name,
|
| 270 |
+
"",
|
| 271 |
+
max_age=0,
|
| 272 |
+
expires="Thu, 01 Jan 1970 00:00:00 GMT",
|
| 273 |
+
domain=domain,
|
| 274 |
+
path=path,
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
@property
|
| 278 |
+
def content_length(self) -> Optional[int]:
|
| 279 |
+
# Just a placeholder for adding setter
|
| 280 |
+
return super().content_length
|
| 281 |
+
|
| 282 |
+
@content_length.setter
|
| 283 |
+
def content_length(self, value: Optional[int]) -> None:
|
| 284 |
+
if value is not None:
|
| 285 |
+
value = int(value)
|
| 286 |
+
if self._chunked:
|
| 287 |
+
raise RuntimeError(
|
| 288 |
+
"You can't set content length when " "chunked encoding is enable"
|
| 289 |
+
)
|
| 290 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(value)
|
| 291 |
+
else:
|
| 292 |
+
self._headers.pop(hdrs.CONTENT_LENGTH, None)
|
| 293 |
+
|
| 294 |
+
@property
|
| 295 |
+
def content_type(self) -> str:
|
| 296 |
+
# Just a placeholder for adding setter
|
| 297 |
+
return super().content_type
|
| 298 |
+
|
| 299 |
+
@content_type.setter
|
| 300 |
+
def content_type(self, value: str) -> None:
|
| 301 |
+
self.content_type # read header values if needed
|
| 302 |
+
self._content_type = str(value)
|
| 303 |
+
self._generate_content_type_header()
|
| 304 |
+
|
| 305 |
+
@property
|
| 306 |
+
def charset(self) -> Optional[str]:
|
| 307 |
+
# Just a placeholder for adding setter
|
| 308 |
+
return super().charset
|
| 309 |
+
|
| 310 |
+
@charset.setter
|
| 311 |
+
def charset(self, value: Optional[str]) -> None:
|
| 312 |
+
ctype = self.content_type # read header values if needed
|
| 313 |
+
if ctype == "application/octet-stream":
|
| 314 |
+
raise RuntimeError(
|
| 315 |
+
"Setting charset for application/octet-stream "
|
| 316 |
+
"doesn't make sense, setup content_type first"
|
| 317 |
+
)
|
| 318 |
+
assert self._content_dict is not None
|
| 319 |
+
if value is None:
|
| 320 |
+
self._content_dict.pop("charset", None)
|
| 321 |
+
else:
|
| 322 |
+
self._content_dict["charset"] = str(value).lower()
|
| 323 |
+
self._generate_content_type_header()
|
| 324 |
+
|
| 325 |
+
@property
|
| 326 |
+
def last_modified(self) -> Optional[datetime.datetime]:
|
| 327 |
+
"""The value of Last-Modified HTTP header, or None.
|
| 328 |
+
|
| 329 |
+
This header is represented as a `datetime` object.
|
| 330 |
+
"""
|
| 331 |
+
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
| 332 |
+
|
| 333 |
+
@last_modified.setter
|
| 334 |
+
def last_modified(
|
| 335 |
+
self, value: Optional[Union[int, float, datetime.datetime, str]]
|
| 336 |
+
) -> None:
|
| 337 |
+
if value is None:
|
| 338 |
+
self._headers.pop(hdrs.LAST_MODIFIED, None)
|
| 339 |
+
elif isinstance(value, (int, float)):
|
| 340 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
| 341 |
+
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
|
| 342 |
+
)
|
| 343 |
+
elif isinstance(value, datetime.datetime):
|
| 344 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
| 345 |
+
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
|
| 346 |
+
)
|
| 347 |
+
elif isinstance(value, str):
|
| 348 |
+
self._headers[hdrs.LAST_MODIFIED] = value
|
| 349 |
+
|
| 350 |
+
@property
|
| 351 |
+
def etag(self) -> Optional[ETag]:
|
| 352 |
+
quoted_value = self._headers.get(hdrs.ETAG)
|
| 353 |
+
if not quoted_value:
|
| 354 |
+
return None
|
| 355 |
+
elif quoted_value == ETAG_ANY:
|
| 356 |
+
return ETag(value=ETAG_ANY)
|
| 357 |
+
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
| 358 |
+
if not match:
|
| 359 |
+
return None
|
| 360 |
+
is_weak, value = match.group(1, 2)
|
| 361 |
+
return ETag(
|
| 362 |
+
is_weak=bool(is_weak),
|
| 363 |
+
value=value,
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
@etag.setter
|
| 367 |
+
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
| 368 |
+
if value is None:
|
| 369 |
+
self._headers.pop(hdrs.ETAG, None)
|
| 370 |
+
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
| 371 |
+
isinstance(value, ETag) and value.value == ETAG_ANY
|
| 372 |
+
):
|
| 373 |
+
self._headers[hdrs.ETAG] = ETAG_ANY
|
| 374 |
+
elif isinstance(value, str):
|
| 375 |
+
validate_etag_value(value)
|
| 376 |
+
self._headers[hdrs.ETAG] = f'"{value}"'
|
| 377 |
+
elif isinstance(value, ETag) and isinstance(value.value, str):
|
| 378 |
+
validate_etag_value(value.value)
|
| 379 |
+
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
| 380 |
+
self._headers[hdrs.ETAG] = hdr_value
|
| 381 |
+
else:
|
| 382 |
+
raise ValueError(
|
| 383 |
+
f"Unsupported etag type: {type(value)}. "
|
| 384 |
+
f"etag must be str, ETag or None"
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
+
def _generate_content_type_header(
|
| 388 |
+
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
| 389 |
+
) -> None:
|
| 390 |
+
assert self._content_dict is not None
|
| 391 |
+
assert self._content_type is not None
|
| 392 |
+
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
|
| 393 |
+
if params:
|
| 394 |
+
ctype = self._content_type + "; " + params
|
| 395 |
+
else:
|
| 396 |
+
ctype = self._content_type
|
| 397 |
+
self._headers[CONTENT_TYPE] = ctype
|
| 398 |
+
|
| 399 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
| 400 |
+
if coding != ContentCoding.identity:
|
| 401 |
+
assert self._payload_writer is not None
|
| 402 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
| 403 |
+
self._payload_writer.enable_compression(coding.value)
|
| 404 |
+
# Compressed payload may have different content length,
|
| 405 |
+
# remove the header
|
| 406 |
+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 407 |
+
|
| 408 |
+
async def _start_compression(self, request: "BaseRequest") -> None:
|
| 409 |
+
if self._compression_force:
|
| 410 |
+
await self._do_start_compression(self._compression_force)
|
| 411 |
+
else:
|
| 412 |
+
# Encoding comparisons should be case-insensitive
|
| 413 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
| 414 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
| 415 |
+
for value, coding in CONTENT_CODINGS.items():
|
| 416 |
+
if value in accept_encoding:
|
| 417 |
+
await self._do_start_compression(coding)
|
| 418 |
+
return
|
| 419 |
+
|
| 420 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
| 421 |
+
if self._eof_sent:
|
| 422 |
+
return None
|
| 423 |
+
if self._payload_writer is not None:
|
| 424 |
+
return self._payload_writer
|
| 425 |
+
self._must_be_empty_body = must_be_empty_body(request.method, self.status)
|
| 426 |
+
return await self._start(request)
|
| 427 |
+
|
| 428 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
| 429 |
+
self._req = request
|
| 430 |
+
writer = self._payload_writer = request._payload_writer
|
| 431 |
+
|
| 432 |
+
await self._prepare_headers()
|
| 433 |
+
await request._prepare_hook(self)
|
| 434 |
+
await self._write_headers()
|
| 435 |
+
|
| 436 |
+
return writer
|
| 437 |
+
|
| 438 |
+
async def _prepare_headers(self) -> None:
|
| 439 |
+
request = self._req
|
| 440 |
+
assert request is not None
|
| 441 |
+
writer = self._payload_writer
|
| 442 |
+
assert writer is not None
|
| 443 |
+
keep_alive = self._keep_alive
|
| 444 |
+
if keep_alive is None:
|
| 445 |
+
keep_alive = request.keep_alive
|
| 446 |
+
self._keep_alive = keep_alive
|
| 447 |
+
|
| 448 |
+
version = request.version
|
| 449 |
+
|
| 450 |
+
headers = self._headers
|
| 451 |
+
for cookie in self._cookies.values():
|
| 452 |
+
value = cookie.output(header="")[1:]
|
| 453 |
+
headers.add(hdrs.SET_COOKIE, value)
|
| 454 |
+
|
| 455 |
+
if self._compression:
|
| 456 |
+
await self._start_compression(request)
|
| 457 |
+
|
| 458 |
+
if self._chunked:
|
| 459 |
+
if version != HttpVersion11:
|
| 460 |
+
raise RuntimeError(
|
| 461 |
+
"Using chunked encoding is forbidden "
|
| 462 |
+
"for HTTP/{0.major}.{0.minor}".format(request.version)
|
| 463 |
+
)
|
| 464 |
+
if not self._must_be_empty_body:
|
| 465 |
+
writer.enable_chunking()
|
| 466 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 467 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 468 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 469 |
+
elif self._length_check:
|
| 470 |
+
writer.length = self.content_length
|
| 471 |
+
if writer.length is None:
|
| 472 |
+
if version >= HttpVersion11:
|
| 473 |
+
if not self._must_be_empty_body:
|
| 474 |
+
writer.enable_chunking()
|
| 475 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 476 |
+
elif not self._must_be_empty_body:
|
| 477 |
+
keep_alive = False
|
| 478 |
+
|
| 479 |
+
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
| 480 |
+
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
|
| 481 |
+
if self._must_be_empty_body:
|
| 482 |
+
if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
|
| 483 |
+
request.method, self.status
|
| 484 |
+
):
|
| 485 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 486 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
|
| 487 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
|
| 488 |
+
if hdrs.TRANSFER_ENCODING in headers:
|
| 489 |
+
del headers[hdrs.TRANSFER_ENCODING]
|
| 490 |
+
elif self.content_length != 0:
|
| 491 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
|
| 492 |
+
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
| 493 |
+
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
| 494 |
+
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
| 495 |
+
|
| 496 |
+
# connection header
|
| 497 |
+
if hdrs.CONNECTION not in headers:
|
| 498 |
+
if keep_alive:
|
| 499 |
+
if version == HttpVersion10:
|
| 500 |
+
headers[hdrs.CONNECTION] = "keep-alive"
|
| 501 |
+
else:
|
| 502 |
+
if version == HttpVersion11:
|
| 503 |
+
headers[hdrs.CONNECTION] = "close"
|
| 504 |
+
|
| 505 |
+
async def _write_headers(self) -> None:
|
| 506 |
+
request = self._req
|
| 507 |
+
assert request is not None
|
| 508 |
+
writer = self._payload_writer
|
| 509 |
+
assert writer is not None
|
| 510 |
+
# status line
|
| 511 |
+
version = request.version
|
| 512 |
+
status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"
|
| 513 |
+
await writer.write_headers(status_line, self._headers)
|
| 514 |
+
|
| 515 |
+
async def write(self, data: bytes) -> None:
|
| 516 |
+
assert isinstance(
|
| 517 |
+
data, (bytes, bytearray, memoryview)
|
| 518 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
| 519 |
+
|
| 520 |
+
if self._eof_sent:
|
| 521 |
+
raise RuntimeError("Cannot call write() after write_eof()")
|
| 522 |
+
if self._payload_writer is None:
|
| 523 |
+
raise RuntimeError("Cannot call write() before prepare()")
|
| 524 |
+
|
| 525 |
+
await self._payload_writer.write(data)
|
| 526 |
+
|
| 527 |
+
async def drain(self) -> None:
|
| 528 |
+
assert not self._eof_sent, "EOF has already been sent"
|
| 529 |
+
assert self._payload_writer is not None, "Response has not been started"
|
| 530 |
+
warnings.warn(
|
| 531 |
+
"drain method is deprecated, use await resp.write()",
|
| 532 |
+
DeprecationWarning,
|
| 533 |
+
stacklevel=2,
|
| 534 |
+
)
|
| 535 |
+
await self._payload_writer.drain()
|
| 536 |
+
|
| 537 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
| 538 |
+
assert isinstance(
|
| 539 |
+
data, (bytes, bytearray, memoryview)
|
| 540 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
| 541 |
+
|
| 542 |
+
if self._eof_sent:
|
| 543 |
+
return
|
| 544 |
+
|
| 545 |
+
assert self._payload_writer is not None, "Response has not been started"
|
| 546 |
+
|
| 547 |
+
await self._payload_writer.write_eof(data)
|
| 548 |
+
self._eof_sent = True
|
| 549 |
+
self._req = None
|
| 550 |
+
self._body_length = self._payload_writer.output_size
|
| 551 |
+
self._payload_writer = None
|
| 552 |
+
|
| 553 |
+
def __repr__(self) -> str:
|
| 554 |
+
if self._eof_sent:
|
| 555 |
+
info = "eof"
|
| 556 |
+
elif self.prepared:
|
| 557 |
+
assert self._req is not None
|
| 558 |
+
info = f"{self._req.method} {self._req.path} "
|
| 559 |
+
else:
|
| 560 |
+
info = "not prepared"
|
| 561 |
+
return f"<{self.__class__.__name__} {self.reason} {info}>"
|
| 562 |
+
|
| 563 |
+
def __getitem__(self, key: str) -> Any:
|
| 564 |
+
return self._state[key]
|
| 565 |
+
|
| 566 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
| 567 |
+
self._state[key] = value
|
| 568 |
+
|
| 569 |
+
def __delitem__(self, key: str) -> None:
|
| 570 |
+
del self._state[key]
|
| 571 |
+
|
| 572 |
+
def __len__(self) -> int:
|
| 573 |
+
return len(self._state)
|
| 574 |
+
|
| 575 |
+
def __iter__(self) -> Iterator[str]:
|
| 576 |
+
return iter(self._state)
|
| 577 |
+
|
| 578 |
+
def __hash__(self) -> int:
|
| 579 |
+
return hash(id(self))
|
| 580 |
+
|
| 581 |
+
def __eq__(self, other: object) -> bool:
|
| 582 |
+
return self is other
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
class Response(StreamResponse):
|
| 586 |
+
def __init__(
|
| 587 |
+
self,
|
| 588 |
+
*,
|
| 589 |
+
body: Any = None,
|
| 590 |
+
status: int = 200,
|
| 591 |
+
reason: Optional[str] = None,
|
| 592 |
+
text: Optional[str] = None,
|
| 593 |
+
headers: Optional[LooseHeaders] = None,
|
| 594 |
+
content_type: Optional[str] = None,
|
| 595 |
+
charset: Optional[str] = None,
|
| 596 |
+
zlib_executor_size: Optional[int] = None,
|
| 597 |
+
zlib_executor: Optional[Executor] = None,
|
| 598 |
+
) -> None:
|
| 599 |
+
if body is not None and text is not None:
|
| 600 |
+
raise ValueError("body and text are not allowed together")
|
| 601 |
+
|
| 602 |
+
if headers is None:
|
| 603 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
| 604 |
+
elif not isinstance(headers, CIMultiDict):
|
| 605 |
+
real_headers = CIMultiDict(headers)
|
| 606 |
+
else:
|
| 607 |
+
real_headers = headers # = cast('CIMultiDict[str]', headers)
|
| 608 |
+
|
| 609 |
+
if content_type is not None and "charset" in content_type:
|
| 610 |
+
raise ValueError("charset must not be in content_type " "argument")
|
| 611 |
+
|
| 612 |
+
if text is not None:
|
| 613 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
| 614 |
+
if content_type or charset:
|
| 615 |
+
raise ValueError(
|
| 616 |
+
"passing both Content-Type header and "
|
| 617 |
+
"content_type or charset params "
|
| 618 |
+
"is forbidden"
|
| 619 |
+
)
|
| 620 |
+
else:
|
| 621 |
+
# fast path for filling headers
|
| 622 |
+
if not isinstance(text, str):
|
| 623 |
+
raise TypeError("text argument must be str (%r)" % type(text))
|
| 624 |
+
if content_type is None:
|
| 625 |
+
content_type = "text/plain"
|
| 626 |
+
if charset is None:
|
| 627 |
+
charset = "utf-8"
|
| 628 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
|
| 629 |
+
body = text.encode(charset)
|
| 630 |
+
text = None
|
| 631 |
+
else:
|
| 632 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
| 633 |
+
if content_type is not None or charset is not None:
|
| 634 |
+
raise ValueError(
|
| 635 |
+
"passing both Content-Type header and "
|
| 636 |
+
"content_type or charset params "
|
| 637 |
+
"is forbidden"
|
| 638 |
+
)
|
| 639 |
+
else:
|
| 640 |
+
if content_type is not None:
|
| 641 |
+
if charset is not None:
|
| 642 |
+
content_type += "; charset=" + charset
|
| 643 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type
|
| 644 |
+
|
| 645 |
+
super().__init__(status=status, reason=reason, headers=real_headers)
|
| 646 |
+
|
| 647 |
+
if text is not None:
|
| 648 |
+
self.text = text
|
| 649 |
+
else:
|
| 650 |
+
self.body = body
|
| 651 |
+
|
| 652 |
+
self._compressed_body: Optional[bytes] = None
|
| 653 |
+
self._zlib_executor_size = zlib_executor_size
|
| 654 |
+
self._zlib_executor = zlib_executor
|
| 655 |
+
|
| 656 |
+
@property
|
| 657 |
+
def body(self) -> Optional[Union[bytes, Payload]]:
|
| 658 |
+
return self._body
|
| 659 |
+
|
| 660 |
+
@body.setter
|
| 661 |
+
def body(self, body: Any) -> None:
|
| 662 |
+
if body is None:
|
| 663 |
+
self._body = None
|
| 664 |
+
elif isinstance(body, (bytes, bytearray)):
|
| 665 |
+
self._body = body
|
| 666 |
+
else:
|
| 667 |
+
try:
|
| 668 |
+
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
|
| 669 |
+
except payload.LookupError:
|
| 670 |
+
raise ValueError("Unsupported body type %r" % type(body))
|
| 671 |
+
|
| 672 |
+
headers = self._headers
|
| 673 |
+
|
| 674 |
+
# set content-type
|
| 675 |
+
if hdrs.CONTENT_TYPE not in headers:
|
| 676 |
+
headers[hdrs.CONTENT_TYPE] = body.content_type
|
| 677 |
+
|
| 678 |
+
# copy payload headers
|
| 679 |
+
if body.headers:
|
| 680 |
+
for key, value in body.headers.items():
|
| 681 |
+
if key not in headers:
|
| 682 |
+
headers[key] = value
|
| 683 |
+
|
| 684 |
+
self._compressed_body = None
|
| 685 |
+
|
| 686 |
+
@property
|
| 687 |
+
def text(self) -> Optional[str]:
|
| 688 |
+
if self._body is None:
|
| 689 |
+
return None
|
| 690 |
+
return self._body.decode(self.charset or "utf-8")
|
| 691 |
+
|
| 692 |
+
@text.setter
|
| 693 |
+
def text(self, text: str) -> None:
|
| 694 |
+
assert text is None or isinstance(
|
| 695 |
+
text, str
|
| 696 |
+
), "text argument must be str (%r)" % type(text)
|
| 697 |
+
|
| 698 |
+
if self.content_type == "application/octet-stream":
|
| 699 |
+
self.content_type = "text/plain"
|
| 700 |
+
if self.charset is None:
|
| 701 |
+
self.charset = "utf-8"
|
| 702 |
+
|
| 703 |
+
self._body = text.encode(self.charset)
|
| 704 |
+
self._compressed_body = None
|
| 705 |
+
|
| 706 |
+
@property
|
| 707 |
+
def content_length(self) -> Optional[int]:
|
| 708 |
+
if self._chunked:
|
| 709 |
+
return None
|
| 710 |
+
|
| 711 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 712 |
+
return super().content_length
|
| 713 |
+
|
| 714 |
+
if self._compressed_body is not None:
|
| 715 |
+
# Return length of the compressed body
|
| 716 |
+
return len(self._compressed_body)
|
| 717 |
+
elif isinstance(self._body, Payload):
|
| 718 |
+
# A payload without content length, or a compressed payload
|
| 719 |
+
return None
|
| 720 |
+
elif self._body is not None:
|
| 721 |
+
return len(self._body)
|
| 722 |
+
else:
|
| 723 |
+
return 0
|
| 724 |
+
|
| 725 |
+
@content_length.setter
|
| 726 |
+
def content_length(self, value: Optional[int]) -> None:
|
| 727 |
+
raise RuntimeError("Content length is set automatically")
|
| 728 |
+
|
| 729 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
| 730 |
+
if self._eof_sent:
|
| 731 |
+
return
|
| 732 |
+
if self._compressed_body is None:
|
| 733 |
+
body: Optional[Union[bytes, Payload]] = self._body
|
| 734 |
+
else:
|
| 735 |
+
body = self._compressed_body
|
| 736 |
+
assert not data, f"data arg is not supported, got {data!r}"
|
| 737 |
+
assert self._req is not None
|
| 738 |
+
assert self._payload_writer is not None
|
| 739 |
+
if body is not None:
|
| 740 |
+
if self._must_be_empty_body:
|
| 741 |
+
await super().write_eof()
|
| 742 |
+
elif isinstance(self._body, Payload):
|
| 743 |
+
await self._body.write(self._payload_writer)
|
| 744 |
+
await super().write_eof()
|
| 745 |
+
else:
|
| 746 |
+
await super().write_eof(cast(bytes, body))
|
| 747 |
+
else:
|
| 748 |
+
await super().write_eof()
|
| 749 |
+
|
| 750 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
| 751 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 752 |
+
if should_remove_content_length(request.method, self.status):
|
| 753 |
+
del self._headers[hdrs.CONTENT_LENGTH]
|
| 754 |
+
elif not self._chunked:
|
| 755 |
+
if isinstance(self._body, Payload):
|
| 756 |
+
if self._body.size is not None:
|
| 757 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size)
|
| 758 |
+
else:
|
| 759 |
+
body_len = len(self._body) if self._body else "0"
|
| 760 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
|
| 761 |
+
if body_len != "0" or (
|
| 762 |
+
self.status != 304 and request.method.upper() != hdrs.METH_HEAD
|
| 763 |
+
):
|
| 764 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
|
| 765 |
+
|
| 766 |
+
return await super()._start(request)
|
| 767 |
+
|
| 768 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
| 769 |
+
if self._chunked or isinstance(self._body, Payload):
|
| 770 |
+
return await super()._do_start_compression(coding)
|
| 771 |
+
|
| 772 |
+
if coding != ContentCoding.identity:
|
| 773 |
+
# Instead of using _payload_writer.enable_compression,
|
| 774 |
+
# compress the whole body
|
| 775 |
+
compressor = ZLibCompressor(
|
| 776 |
+
encoding=str(coding.value),
|
| 777 |
+
max_sync_chunk_size=self._zlib_executor_size,
|
| 778 |
+
executor=self._zlib_executor,
|
| 779 |
+
)
|
| 780 |
+
assert self._body is not None
|
| 781 |
+
if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
|
| 782 |
+
warnings.warn(
|
| 783 |
+
"Synchronous compression of large response bodies "
|
| 784 |
+
f"({len(self._body)} bytes) might block the async event loop. "
|
| 785 |
+
"Consider providing a custom value to zlib_executor_size/"
|
| 786 |
+
"zlib_executor response properties or disabling compression on it."
|
| 787 |
+
)
|
| 788 |
+
self._compressed_body = (
|
| 789 |
+
await compressor.compress(self._body) + compressor.flush()
|
| 790 |
+
)
|
| 791 |
+
assert self._compressed_body is not None
|
| 792 |
+
|
| 793 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
| 794 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
|
| 795 |
+
|
| 796 |
+
|
| 797 |
+
def json_response(
|
| 798 |
+
data: Any = sentinel,
|
| 799 |
+
*,
|
| 800 |
+
text: Optional[str] = None,
|
| 801 |
+
body: Optional[bytes] = None,
|
| 802 |
+
status: int = 200,
|
| 803 |
+
reason: Optional[str] = None,
|
| 804 |
+
headers: Optional[LooseHeaders] = None,
|
| 805 |
+
content_type: str = "application/json",
|
| 806 |
+
dumps: JSONEncoder = json.dumps,
|
| 807 |
+
) -> Response:
|
| 808 |
+
if data is not sentinel:
|
| 809 |
+
if text or body:
|
| 810 |
+
raise ValueError("only one of data, text, or body should be specified")
|
| 811 |
+
else:
|
| 812 |
+
text = dumps(data)
|
| 813 |
+
return Response(
|
| 814 |
+
text=text,
|
| 815 |
+
body=body,
|
| 816 |
+
status=status,
|
| 817 |
+
reason=reason,
|
| 818 |
+
headers=headers,
|
| 819 |
+
content_type=content_type,
|
| 820 |
+
)
|
parrot/lib/python3.10/site-packages/aiohttp/worker.py
ADDED
|
@@ -0,0 +1,247 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Async gunicorn worker for aiohttp.web"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
from types import FrameType
|
| 9 |
+
from typing import Any, Awaitable, Callable, Optional, Union # noqa
|
| 10 |
+
|
| 11 |
+
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
|
| 12 |
+
from gunicorn.workers import base
|
| 13 |
+
|
| 14 |
+
from aiohttp import web
|
| 15 |
+
|
| 16 |
+
from .helpers import set_result
|
| 17 |
+
from .web_app import Application
|
| 18 |
+
from .web_log import AccessLogger
|
| 19 |
+
|
| 20 |
+
try:
|
| 21 |
+
import ssl
|
| 22 |
+
|
| 23 |
+
SSLContext = ssl.SSLContext
|
| 24 |
+
except ImportError: # pragma: no cover
|
| 25 |
+
ssl = None # type: ignore[assignment]
|
| 26 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
|
| 33 |
+
|
| 34 |
+
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
|
| 35 |
+
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
|
| 36 |
+
|
| 37 |
+
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
|
| 38 |
+
super().__init__(*args, **kw)
|
| 39 |
+
|
| 40 |
+
self._task: Optional[asyncio.Task[None]] = None
|
| 41 |
+
self.exit_code = 0
|
| 42 |
+
self._notify_waiter: Optional[asyncio.Future[bool]] = None
|
| 43 |
+
|
| 44 |
+
def init_process(self) -> None:
|
| 45 |
+
# create new event_loop after fork
|
| 46 |
+
asyncio.get_event_loop().close()
|
| 47 |
+
|
| 48 |
+
self.loop = asyncio.new_event_loop()
|
| 49 |
+
asyncio.set_event_loop(self.loop)
|
| 50 |
+
|
| 51 |
+
super().init_process()
|
| 52 |
+
|
| 53 |
+
def run(self) -> None:
|
| 54 |
+
self._task = self.loop.create_task(self._run())
|
| 55 |
+
|
| 56 |
+
try: # ignore all finalization problems
|
| 57 |
+
self.loop.run_until_complete(self._task)
|
| 58 |
+
except Exception:
|
| 59 |
+
self.log.exception("Exception in gunicorn worker")
|
| 60 |
+
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
| 61 |
+
self.loop.close()
|
| 62 |
+
|
| 63 |
+
sys.exit(self.exit_code)
|
| 64 |
+
|
| 65 |
+
async def _run(self) -> None:
|
| 66 |
+
runner = None
|
| 67 |
+
if isinstance(self.wsgi, Application):
|
| 68 |
+
app = self.wsgi
|
| 69 |
+
elif asyncio.iscoroutinefunction(self.wsgi):
|
| 70 |
+
wsgi = await self.wsgi()
|
| 71 |
+
if isinstance(wsgi, web.AppRunner):
|
| 72 |
+
runner = wsgi
|
| 73 |
+
app = runner.app
|
| 74 |
+
else:
|
| 75 |
+
app = wsgi
|
| 76 |
+
else:
|
| 77 |
+
raise RuntimeError(
|
| 78 |
+
"wsgi app should be either Application or "
|
| 79 |
+
"async function returning Application, got {}".format(self.wsgi)
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
if runner is None:
|
| 83 |
+
access_log = self.log.access_log if self.cfg.accesslog else None
|
| 84 |
+
runner = web.AppRunner(
|
| 85 |
+
app,
|
| 86 |
+
logger=self.log,
|
| 87 |
+
keepalive_timeout=self.cfg.keepalive,
|
| 88 |
+
access_log=access_log,
|
| 89 |
+
access_log_format=self._get_valid_log_format(
|
| 90 |
+
self.cfg.access_log_format
|
| 91 |
+
),
|
| 92 |
+
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
|
| 93 |
+
)
|
| 94 |
+
await runner.setup()
|
| 95 |
+
|
| 96 |
+
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
|
| 97 |
+
|
| 98 |
+
runner = runner
|
| 99 |
+
assert runner is not None
|
| 100 |
+
server = runner.server
|
| 101 |
+
assert server is not None
|
| 102 |
+
for sock in self.sockets:
|
| 103 |
+
site = web.SockSite(
|
| 104 |
+
runner,
|
| 105 |
+
sock,
|
| 106 |
+
ssl_context=ctx,
|
| 107 |
+
)
|
| 108 |
+
await site.start()
|
| 109 |
+
|
| 110 |
+
# If our parent changed then we shut down.
|
| 111 |
+
pid = os.getpid()
|
| 112 |
+
try:
|
| 113 |
+
while self.alive: # type: ignore[has-type]
|
| 114 |
+
self.notify()
|
| 115 |
+
|
| 116 |
+
cnt = server.requests_count
|
| 117 |
+
if self.max_requests and cnt > self.max_requests:
|
| 118 |
+
self.alive = False
|
| 119 |
+
self.log.info("Max requests, shutting down: %s", self)
|
| 120 |
+
|
| 121 |
+
elif pid == os.getpid() and self.ppid != os.getppid():
|
| 122 |
+
self.alive = False
|
| 123 |
+
self.log.info("Parent changed, shutting down: %s", self)
|
| 124 |
+
else:
|
| 125 |
+
await self._wait_next_notify()
|
| 126 |
+
except BaseException:
|
| 127 |
+
pass
|
| 128 |
+
|
| 129 |
+
await runner.cleanup()
|
| 130 |
+
|
| 131 |
+
def _wait_next_notify(self) -> "asyncio.Future[bool]":
|
| 132 |
+
self._notify_waiter_done()
|
| 133 |
+
|
| 134 |
+
loop = self.loop
|
| 135 |
+
assert loop is not None
|
| 136 |
+
self._notify_waiter = waiter = loop.create_future()
|
| 137 |
+
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
|
| 138 |
+
|
| 139 |
+
return waiter
|
| 140 |
+
|
| 141 |
+
def _notify_waiter_done(
|
| 142 |
+
self, waiter: Optional["asyncio.Future[bool]"] = None
|
| 143 |
+
) -> None:
|
| 144 |
+
if waiter is None:
|
| 145 |
+
waiter = self._notify_waiter
|
| 146 |
+
if waiter is not None:
|
| 147 |
+
set_result(waiter, True)
|
| 148 |
+
|
| 149 |
+
if waiter is self._notify_waiter:
|
| 150 |
+
self._notify_waiter = None
|
| 151 |
+
|
| 152 |
+
def init_signals(self) -> None:
|
| 153 |
+
# Set up signals through the event loop API.
|
| 154 |
+
|
| 155 |
+
self.loop.add_signal_handler(
|
| 156 |
+
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
self.loop.add_signal_handler(
|
| 160 |
+
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
self.loop.add_signal_handler(
|
| 164 |
+
signal.SIGINT, self.handle_quit, signal.SIGINT, None
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
self.loop.add_signal_handler(
|
| 168 |
+
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
self.loop.add_signal_handler(
|
| 172 |
+
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
self.loop.add_signal_handler(
|
| 176 |
+
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
# Don't let SIGTERM and SIGUSR1 disturb active requests
|
| 180 |
+
# by interrupting system calls
|
| 181 |
+
signal.siginterrupt(signal.SIGTERM, False)
|
| 182 |
+
signal.siginterrupt(signal.SIGUSR1, False)
|
| 183 |
+
# Reset signals so Gunicorn doesn't swallow subprocess return codes
|
| 184 |
+
# See: https://github.com/aio-libs/aiohttp/issues/6130
|
| 185 |
+
|
| 186 |
+
def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
|
| 187 |
+
self.alive = False
|
| 188 |
+
|
| 189 |
+
# worker_int callback
|
| 190 |
+
self.cfg.worker_int(self)
|
| 191 |
+
|
| 192 |
+
# wakeup closing process
|
| 193 |
+
self._notify_waiter_done()
|
| 194 |
+
|
| 195 |
+
def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
|
| 196 |
+
self.alive = False
|
| 197 |
+
self.exit_code = 1
|
| 198 |
+
self.cfg.worker_abort(self)
|
| 199 |
+
sys.exit(1)
|
| 200 |
+
|
| 201 |
+
@staticmethod
|
| 202 |
+
def _create_ssl_context(cfg: Any) -> "SSLContext":
|
| 203 |
+
"""Creates SSLContext instance for usage in asyncio.create_server.
|
| 204 |
+
|
| 205 |
+
See ssl.SSLSocket.__init__ for more details.
|
| 206 |
+
"""
|
| 207 |
+
if ssl is None: # pragma: no cover
|
| 208 |
+
raise RuntimeError("SSL is not supported.")
|
| 209 |
+
|
| 210 |
+
ctx = ssl.SSLContext(cfg.ssl_version)
|
| 211 |
+
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
|
| 212 |
+
ctx.verify_mode = cfg.cert_reqs
|
| 213 |
+
if cfg.ca_certs:
|
| 214 |
+
ctx.load_verify_locations(cfg.ca_certs)
|
| 215 |
+
if cfg.ciphers:
|
| 216 |
+
ctx.set_ciphers(cfg.ciphers)
|
| 217 |
+
return ctx
|
| 218 |
+
|
| 219 |
+
def _get_valid_log_format(self, source_format: str) -> str:
|
| 220 |
+
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
|
| 221 |
+
return self.DEFAULT_AIOHTTP_LOG_FORMAT
|
| 222 |
+
elif re.search(r"%\([^\)]+\)", source_format):
|
| 223 |
+
raise ValueError(
|
| 224 |
+
"Gunicorn's style options in form of `%(name)s` are not "
|
| 225 |
+
"supported for the log formatting. Please use aiohttp's "
|
| 226 |
+
"format specification to configure access log formatting: "
|
| 227 |
+
"http://docs.aiohttp.org/en/stable/logging.html"
|
| 228 |
+
"#format-specification"
|
| 229 |
+
)
|
| 230 |
+
else:
|
| 231 |
+
return source_format
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
class GunicornUVLoopWebWorker(GunicornWebWorker):
|
| 235 |
+
def init_process(self) -> None:
|
| 236 |
+
import uvloop
|
| 237 |
+
|
| 238 |
+
# Close any existing event loop before setting a
|
| 239 |
+
# new policy.
|
| 240 |
+
asyncio.get_event_loop().close()
|
| 241 |
+
|
| 242 |
+
# Setup uvloop policy, so that every
|
| 243 |
+
# asyncio.get_event_loop() will create an instance
|
| 244 |
+
# of uvloop event loop.
|
| 245 |
+
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
| 246 |
+
|
| 247 |
+
super().init_process()
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (188 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/__pycache__/colon_fence.cpython-310.pyc
ADDED
|
Binary file (2.12 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/__pycache__/substitution.cpython-310.pyc
ADDED
|
Binary file (2.46 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/LICENSE
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2015 Vitaly Puzrin, Alex Kocharin.
|
| 2 |
+
Copyright (c) 2018 jebbs
|
| 3 |
+
Copyright (c) 2021- commenthol
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person
|
| 6 |
+
obtaining a copy of this software and associated documentation
|
| 7 |
+
files (the "Software"), to deal in the Software without
|
| 8 |
+
restriction, including without limitation the rights to use,
|
| 9 |
+
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 10 |
+
copies of the Software, and to permit persons to whom the
|
| 11 |
+
Software is furnished to do so, subject to the following
|
| 12 |
+
conditions:
|
| 13 |
+
|
| 14 |
+
The above copyright notice and this permission notice shall be
|
| 15 |
+
included in all copies or substantial portions of the Software.
|
| 16 |
+
|
| 17 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 18 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
| 19 |
+
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 20 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
| 21 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
| 22 |
+
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 23 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 24 |
+
OTHER DEALINGS IN THE SOFTWARE.
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
from .index import admon_plugin # noqa: F401
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (215 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/__pycache__/index.cpython-310.pyc
ADDED
|
Binary file (3.45 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/index.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Process admonitions and pass to cb.
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
from typing import Callable, Optional, Tuple
|
| 5 |
+
|
| 6 |
+
from markdown_it import MarkdownIt
|
| 7 |
+
from markdown_it.rules_block import StateBlock
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def get_tag(params: str) -> Tuple[str, str]:
|
| 11 |
+
if not params.strip():
|
| 12 |
+
return "", ""
|
| 13 |
+
|
| 14 |
+
tag, *_title = params.strip().split(" ")
|
| 15 |
+
joined = " ".join(_title)
|
| 16 |
+
|
| 17 |
+
title = ""
|
| 18 |
+
if not joined:
|
| 19 |
+
title = tag.title()
|
| 20 |
+
elif joined != '""':
|
| 21 |
+
title = joined
|
| 22 |
+
return tag.lower(), title
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def validate(params: str) -> bool:
|
| 26 |
+
tag = params.strip().split(" ", 1)[-1] or ""
|
| 27 |
+
return bool(tag)
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
MIN_MARKERS = 3
|
| 31 |
+
MARKER_STR = "!"
|
| 32 |
+
MARKER_CHAR = ord(MARKER_STR)
|
| 33 |
+
MARKER_LEN = len(MARKER_STR)
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def admonition(state: StateBlock, startLine: int, endLine: int, silent: bool) -> bool:
|
| 37 |
+
start = state.bMarks[startLine] + state.tShift[startLine]
|
| 38 |
+
maximum = state.eMarks[startLine]
|
| 39 |
+
|
| 40 |
+
# Check out the first character quickly, which should filter out most of non-containers
|
| 41 |
+
if MARKER_CHAR != ord(state.src[start]):
|
| 42 |
+
return False
|
| 43 |
+
|
| 44 |
+
# Check out the rest of the marker string
|
| 45 |
+
pos = start + 1
|
| 46 |
+
while pos <= maximum and MARKER_STR[(pos - start) % MARKER_LEN] == state.src[pos]:
|
| 47 |
+
pos += 1
|
| 48 |
+
|
| 49 |
+
marker_count = math.floor((pos - start) / MARKER_LEN)
|
| 50 |
+
if marker_count < MIN_MARKERS:
|
| 51 |
+
return False
|
| 52 |
+
marker_pos = pos - ((pos - start) % MARKER_LEN)
|
| 53 |
+
params = state.src[marker_pos:maximum]
|
| 54 |
+
markup = state.src[start:marker_pos]
|
| 55 |
+
|
| 56 |
+
if not validate(params):
|
| 57 |
+
return False
|
| 58 |
+
|
| 59 |
+
# Since start is found, we can report success here in validation mode
|
| 60 |
+
if silent:
|
| 61 |
+
return True
|
| 62 |
+
|
| 63 |
+
old_parent = state.parentType
|
| 64 |
+
old_line_max = state.lineMax
|
| 65 |
+
old_indent = state.blkIndent
|
| 66 |
+
|
| 67 |
+
blk_start = pos
|
| 68 |
+
while blk_start < maximum and state.src[blk_start] == " ":
|
| 69 |
+
blk_start += 1
|
| 70 |
+
|
| 71 |
+
state.parentType = "admonition"
|
| 72 |
+
state.blkIndent += blk_start - start
|
| 73 |
+
|
| 74 |
+
was_empty = False
|
| 75 |
+
|
| 76 |
+
# Search for the end of the block
|
| 77 |
+
next_line = startLine
|
| 78 |
+
while True:
|
| 79 |
+
next_line += 1
|
| 80 |
+
if next_line >= endLine:
|
| 81 |
+
# unclosed block should be autoclosed by end of document.
|
| 82 |
+
# also block seems to be autoclosed by end of parent
|
| 83 |
+
break
|
| 84 |
+
pos = state.bMarks[next_line] + state.tShift[next_line]
|
| 85 |
+
maximum = state.eMarks[next_line]
|
| 86 |
+
is_empty = state.sCount[next_line] < state.blkIndent
|
| 87 |
+
|
| 88 |
+
# two consecutive empty lines autoclose the block
|
| 89 |
+
if is_empty and was_empty:
|
| 90 |
+
break
|
| 91 |
+
was_empty = is_empty
|
| 92 |
+
|
| 93 |
+
if pos < maximum and state.sCount[next_line] < state.blkIndent:
|
| 94 |
+
# non-empty line with negative indent should stop the block:
|
| 95 |
+
# - !!!
|
| 96 |
+
# test
|
| 97 |
+
break
|
| 98 |
+
|
| 99 |
+
# this will prevent lazy continuations from ever going past our end marker
|
| 100 |
+
state.lineMax = next_line
|
| 101 |
+
|
| 102 |
+
tag, title = get_tag(params)
|
| 103 |
+
|
| 104 |
+
token = state.push("admonition_open", "div", 1)
|
| 105 |
+
token.markup = markup
|
| 106 |
+
token.block = True
|
| 107 |
+
token.attrs = {"class": f"admonition {tag}"}
|
| 108 |
+
token.meta = {"tag": tag}
|
| 109 |
+
token.content = title
|
| 110 |
+
token.info = params
|
| 111 |
+
token.map = [startLine, next_line]
|
| 112 |
+
|
| 113 |
+
if title:
|
| 114 |
+
title_markup = f"{markup} {tag}"
|
| 115 |
+
token = state.push("admonition_title_open", "p", 1)
|
| 116 |
+
token.markup = title_markup
|
| 117 |
+
token.attrs = {"class": "admonition-title"}
|
| 118 |
+
token.map = [startLine, startLine + 1]
|
| 119 |
+
|
| 120 |
+
token = state.push("inline", "", 0)
|
| 121 |
+
token.content = title
|
| 122 |
+
token.map = [startLine, startLine + 1]
|
| 123 |
+
token.children = []
|
| 124 |
+
|
| 125 |
+
token = state.push("admonition_title_close", "p", -1)
|
| 126 |
+
token.markup = title_markup
|
| 127 |
+
|
| 128 |
+
state.md.block.tokenize(state, startLine + 1, next_line)
|
| 129 |
+
|
| 130 |
+
token = state.push("admonition_close", "div", -1)
|
| 131 |
+
token.markup = state.src[start:pos]
|
| 132 |
+
token.block = True
|
| 133 |
+
|
| 134 |
+
state.parentType = old_parent
|
| 135 |
+
state.lineMax = old_line_max
|
| 136 |
+
state.blkIndent = old_indent
|
| 137 |
+
state.line = next_line
|
| 138 |
+
|
| 139 |
+
return True
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def admon_plugin(md: MarkdownIt, render: Optional[Callable] = None) -> None:
|
| 143 |
+
"""Plugin to use
|
| 144 |
+
`python-markdown style admonitions
|
| 145 |
+
<https://python-markdown.github.io/extensions/admonition>`_.
|
| 146 |
+
|
| 147 |
+
.. code-block:: md
|
| 148 |
+
|
| 149 |
+
!!! note
|
| 150 |
+
*content*
|
| 151 |
+
|
| 152 |
+
Note, this is ported from
|
| 153 |
+
`markdown-it-admon
|
| 154 |
+
<https://github.com/commenthol/markdown-it-admon>`_.
|
| 155 |
+
"""
|
| 156 |
+
|
| 157 |
+
def renderDefault(self, tokens, idx, _options, env):
|
| 158 |
+
return self.renderToken(tokens, idx, _options, env)
|
| 159 |
+
|
| 160 |
+
render = render or renderDefault
|
| 161 |
+
|
| 162 |
+
md.add_render_rule("admonition_open", render)
|
| 163 |
+
md.add_render_rule("admonition_close", render)
|
| 164 |
+
md.add_render_rule("admonition_title_open", render)
|
| 165 |
+
md.add_render_rule("admonition_title_close", render)
|
| 166 |
+
|
| 167 |
+
md.block.ruler.before(
|
| 168 |
+
"fence",
|
| 169 |
+
"admonition",
|
| 170 |
+
admonition,
|
| 171 |
+
{"alt": ["paragraph", "reference", "blockquote", "list"]},
|
| 172 |
+
)
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/admon/port.yaml
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
- package: markdown-it-admon
|
| 2 |
+
commit: 9820ba89415c464a3cc18a780f222a0ceb3e18bd
|
| 3 |
+
date: Jul 3, 2021
|
| 4 |
+
version: 1.0.0
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/anchors/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (219 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/anchors/__pycache__/index.cpython-310.pyc
ADDED
|
Binary file (3.69 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/anchors/index.py
ADDED
|
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import Callable, List, Optional, Set
|
| 3 |
+
|
| 4 |
+
from markdown_it import MarkdownIt
|
| 5 |
+
from markdown_it.rules_core import StateCore
|
| 6 |
+
from markdown_it.token import Token
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
def anchors_plugin(
|
| 10 |
+
md: MarkdownIt,
|
| 11 |
+
min_level: int = 1,
|
| 12 |
+
max_level: int = 2,
|
| 13 |
+
slug_func: Optional[Callable[[str], str]] = None,
|
| 14 |
+
permalink: bool = False,
|
| 15 |
+
permalinkSymbol: str = "¶",
|
| 16 |
+
permalinkBefore: bool = False,
|
| 17 |
+
permalinkSpace: bool = True,
|
| 18 |
+
):
|
| 19 |
+
"""Plugin for adding header anchors, based on
|
| 20 |
+
`markdown-it-anchor <https://github.com/valeriangalliat/markdown-it-anchor>`__
|
| 21 |
+
|
| 22 |
+
.. code-block:: md
|
| 23 |
+
|
| 24 |
+
# Title String
|
| 25 |
+
|
| 26 |
+
renders as:
|
| 27 |
+
|
| 28 |
+
.. code-block:: html
|
| 29 |
+
|
| 30 |
+
<h1 id="title-string">Title String <a class="header-anchor" href="#title-string">¶</a></h1>
|
| 31 |
+
|
| 32 |
+
:param min_level: minimum header level to apply anchors
|
| 33 |
+
:param max_level: maximum header level to apply anchors
|
| 34 |
+
:param slug_func: function to convert title text to id slug.
|
| 35 |
+
:param permalink: Add a permalink next to the title
|
| 36 |
+
:param permalinkSymbol: the symbol to show
|
| 37 |
+
:param permalinkBefore: Add the permalink before the title, otherwise after
|
| 38 |
+
:param permalinkSpace: Add a space between the permalink and the title
|
| 39 |
+
|
| 40 |
+
Note, the default slug function aims to mimic the GitHub Markdown format, see:
|
| 41 |
+
|
| 42 |
+
- https://github.com/jch/html-pipeline/blob/master/lib/html/pipeline/toc_filter.rb
|
| 43 |
+
- https://gist.github.com/asabaylus/3071099
|
| 44 |
+
|
| 45 |
+
"""
|
| 46 |
+
selected_levels = list(range(min_level, max_level + 1))
|
| 47 |
+
md.core.ruler.push(
|
| 48 |
+
"anchor",
|
| 49 |
+
_make_anchors_func(
|
| 50 |
+
selected_levels,
|
| 51 |
+
slug_func or slugify,
|
| 52 |
+
permalink,
|
| 53 |
+
permalinkSymbol,
|
| 54 |
+
permalinkBefore,
|
| 55 |
+
permalinkSpace,
|
| 56 |
+
),
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def _make_anchors_func(
|
| 61 |
+
selected_levels: List[int],
|
| 62 |
+
slug_func: Callable[[str], str],
|
| 63 |
+
permalink: bool,
|
| 64 |
+
permalinkSymbol: str,
|
| 65 |
+
permalinkBefore: bool,
|
| 66 |
+
permalinkSpace: bool,
|
| 67 |
+
):
|
| 68 |
+
def _anchor_func(state: StateCore):
|
| 69 |
+
slugs: Set[str] = set()
|
| 70 |
+
for (idx, token) in enumerate(state.tokens):
|
| 71 |
+
if token.type != "heading_open":
|
| 72 |
+
continue
|
| 73 |
+
level = int(token.tag[1])
|
| 74 |
+
if level not in selected_levels:
|
| 75 |
+
continue
|
| 76 |
+
inline_token = state.tokens[idx + 1]
|
| 77 |
+
assert inline_token.children is not None
|
| 78 |
+
title = "".join(
|
| 79 |
+
child.content
|
| 80 |
+
for child in inline_token.children
|
| 81 |
+
if child.type in ["text", "code_inline"]
|
| 82 |
+
)
|
| 83 |
+
slug = unique_slug(slug_func(title), slugs)
|
| 84 |
+
token.attrSet("id", slug)
|
| 85 |
+
|
| 86 |
+
if permalink:
|
| 87 |
+
link_open = Token(
|
| 88 |
+
"link_open",
|
| 89 |
+
"a",
|
| 90 |
+
1,
|
| 91 |
+
)
|
| 92 |
+
link_open.attrSet("class", "header-anchor")
|
| 93 |
+
link_open.attrSet("href", f"#{slug}")
|
| 94 |
+
link_tokens = [
|
| 95 |
+
link_open,
|
| 96 |
+
Token("html_block", "", 0, content=permalinkSymbol),
|
| 97 |
+
Token("link_close", "a", -1),
|
| 98 |
+
]
|
| 99 |
+
if permalinkBefore:
|
| 100 |
+
inline_token.children = (
|
| 101 |
+
link_tokens
|
| 102 |
+
+ (
|
| 103 |
+
[Token("text", "", 0, content=" ")]
|
| 104 |
+
if permalinkSpace
|
| 105 |
+
else []
|
| 106 |
+
)
|
| 107 |
+
+ inline_token.children
|
| 108 |
+
)
|
| 109 |
+
else:
|
| 110 |
+
inline_token.children.extend(
|
| 111 |
+
([Token("text", "", 0, content=" ")] if permalinkSpace else [])
|
| 112 |
+
+ link_tokens
|
| 113 |
+
)
|
| 114 |
+
|
| 115 |
+
return _anchor_func
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
def slugify(title: str):
|
| 119 |
+
return re.sub(r"[^\w\u4e00-\u9fff\- ]", "", title.strip().lower().replace(" ", "-"))
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def unique_slug(slug: str, slugs: set):
|
| 123 |
+
uniq = slug
|
| 124 |
+
i = 1
|
| 125 |
+
while uniq in slugs:
|
| 126 |
+
uniq = f"{slug}-{i}"
|
| 127 |
+
i += 1
|
| 128 |
+
slugs.add(uniq)
|
| 129 |
+
return uniq
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/attrs/parse.py
ADDED
|
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Parser for attributes::
|
| 2 |
+
|
| 3 |
+
attributes { id = "foo", class = "bar baz",
|
| 4 |
+
key1 = "val1", key2 = "val2" }
|
| 5 |
+
|
| 6 |
+
Adapted from:
|
| 7 |
+
https://github.com/jgm/djot/blob/fae7364b86bfce69bc6d5b5eede1f5196d845fd6/djot/attributes.lua#L1
|
| 8 |
+
|
| 9 |
+
syntax:
|
| 10 |
+
|
| 11 |
+
attributes <- '{' whitespace* attribute (whitespace attribute)* whitespace* '}'
|
| 12 |
+
attribute <- identifier | class | keyval
|
| 13 |
+
identifier <- '#' name
|
| 14 |
+
class <- '.' name
|
| 15 |
+
name <- (nonspace, nonpunctuation other than ':', '_', '-')+
|
| 16 |
+
keyval <- key '=' val
|
| 17 |
+
key <- (ASCII_ALPHANUM | ':' | '_' | '-')+
|
| 18 |
+
val <- bareval | quotedval
|
| 19 |
+
bareval <- (ASCII_ALPHANUM | ':' | '_' | '-')+
|
| 20 |
+
quotedval <- '"' ([^"] | '\"') '"'
|
| 21 |
+
"""
|
| 22 |
+
from __future__ import annotations
|
| 23 |
+
|
| 24 |
+
from enum import Enum
|
| 25 |
+
import re
|
| 26 |
+
from typing import Callable
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class State(Enum):
|
| 30 |
+
START = 0
|
| 31 |
+
SCANNING = 1
|
| 32 |
+
SCANNING_ID = 2
|
| 33 |
+
SCANNING_CLASS = 3
|
| 34 |
+
SCANNING_KEY = 4
|
| 35 |
+
SCANNING_VALUE = 5
|
| 36 |
+
SCANNING_BARE_VALUE = 6
|
| 37 |
+
SCANNING_QUOTED_VALUE = 7
|
| 38 |
+
SCANNING_COMMENT = 8
|
| 39 |
+
SCANNING_ESCAPED = 9
|
| 40 |
+
DONE = 10
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
REGEX_SPACE = re.compile(r"\s")
|
| 44 |
+
REGEX_SPACE_PUNCTUATION = re.compile(r"[\s!\"#$%&'()*+,./;<=>?@[\]^`{|}~]")
|
| 45 |
+
REGEX_KEY_CHARACTERS = re.compile(r"[a-zA-Z\d_:-]")
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TokenState:
|
| 49 |
+
def __init__(self):
|
| 50 |
+
self._tokens = []
|
| 51 |
+
self.start: int = 0
|
| 52 |
+
|
| 53 |
+
def set_start(self, start: int) -> None:
|
| 54 |
+
self.start = start
|
| 55 |
+
|
| 56 |
+
def append(self, start: int, end: int, ttype: str):
|
| 57 |
+
self._tokens.append((start, end, ttype))
|
| 58 |
+
|
| 59 |
+
def compile(self, string: str) -> dict[str, str]:
|
| 60 |
+
"""compile the tokens into a dictionary"""
|
| 61 |
+
attributes = {}
|
| 62 |
+
classes = []
|
| 63 |
+
idx = 0
|
| 64 |
+
while idx < len(self._tokens):
|
| 65 |
+
start, end, ttype = self._tokens[idx]
|
| 66 |
+
if ttype == "id":
|
| 67 |
+
attributes["id"] = string[start:end]
|
| 68 |
+
elif ttype == "class":
|
| 69 |
+
classes.append(string[start:end])
|
| 70 |
+
elif ttype == "key":
|
| 71 |
+
key = string[start:end]
|
| 72 |
+
if idx + 1 < len(self._tokens):
|
| 73 |
+
start, end, ttype = self._tokens[idx + 1]
|
| 74 |
+
if ttype == "value":
|
| 75 |
+
if key == "class":
|
| 76 |
+
classes.append(string[start:end])
|
| 77 |
+
else:
|
| 78 |
+
attributes[key] = string[start:end]
|
| 79 |
+
idx += 1
|
| 80 |
+
idx += 1
|
| 81 |
+
if classes:
|
| 82 |
+
attributes["class"] = " ".join(classes)
|
| 83 |
+
return attributes
|
| 84 |
+
|
| 85 |
+
def __str__(self) -> str:
|
| 86 |
+
return str(self._tokens)
|
| 87 |
+
|
| 88 |
+
def __repr__(self) -> str:
|
| 89 |
+
return repr(self._tokens)
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class ParseError(Exception):
|
| 93 |
+
def __init__(self, msg: str, pos: int) -> None:
|
| 94 |
+
self.pos = pos
|
| 95 |
+
super().__init__(msg + f" at position {pos}")
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
def parse(string: str) -> tuple[int, dict[str, str]]:
|
| 99 |
+
"""Parse attributes from start of string.
|
| 100 |
+
|
| 101 |
+
:returns: (length of parsed string, dict of attributes)
|
| 102 |
+
"""
|
| 103 |
+
pos = 0
|
| 104 |
+
state: State = State.START
|
| 105 |
+
tokens = TokenState()
|
| 106 |
+
while pos < len(string):
|
| 107 |
+
state = HANDLERS[state](string[pos], pos, tokens)
|
| 108 |
+
if state == State.DONE:
|
| 109 |
+
return pos, tokens.compile(string)
|
| 110 |
+
pos = pos + 1
|
| 111 |
+
|
| 112 |
+
return pos, tokens.compile(string)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def handle_start(char: str, pos: int, tokens: TokenState) -> State:
|
| 116 |
+
|
| 117 |
+
if char == "{":
|
| 118 |
+
return State.SCANNING
|
| 119 |
+
raise ParseError("Attributes must start with '{'", pos)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def handle_scanning(char: str, pos: int, tokens: TokenState) -> State:
|
| 123 |
+
|
| 124 |
+
if char == " " or char == "\t" or char == "\n" or char == "\r":
|
| 125 |
+
return State.SCANNING
|
| 126 |
+
if char == "}":
|
| 127 |
+
return State.DONE
|
| 128 |
+
if char == "#":
|
| 129 |
+
tokens.set_start(pos)
|
| 130 |
+
return State.SCANNING_ID
|
| 131 |
+
if char == "%":
|
| 132 |
+
tokens.set_start(pos)
|
| 133 |
+
return State.SCANNING_COMMENT
|
| 134 |
+
if char == ".":
|
| 135 |
+
tokens.set_start(pos)
|
| 136 |
+
return State.SCANNING_CLASS
|
| 137 |
+
if REGEX_KEY_CHARACTERS.fullmatch(char):
|
| 138 |
+
tokens.set_start(pos)
|
| 139 |
+
return State.SCANNING_KEY
|
| 140 |
+
|
| 141 |
+
raise ParseError(f"Unexpected character whilst scanning: {char}", pos)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def handle_scanning_comment(char: str, pos: int, tokens: TokenState) -> State:
|
| 145 |
+
|
| 146 |
+
if char == "%":
|
| 147 |
+
return State.SCANNING
|
| 148 |
+
|
| 149 |
+
return State.SCANNING_COMMENT
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def handle_scanning_id(char: str, pos: int, tokens: TokenState) -> State:
|
| 153 |
+
|
| 154 |
+
if not REGEX_SPACE_PUNCTUATION.fullmatch(char):
|
| 155 |
+
return State.SCANNING_ID
|
| 156 |
+
|
| 157 |
+
if char == "}":
|
| 158 |
+
if (pos - 1) > tokens.start:
|
| 159 |
+
tokens.append(tokens.start + 1, pos, "id")
|
| 160 |
+
return State.DONE
|
| 161 |
+
|
| 162 |
+
if REGEX_SPACE.fullmatch(char):
|
| 163 |
+
if (pos - 1) > tokens.start:
|
| 164 |
+
tokens.append(tokens.start + 1, pos, "id")
|
| 165 |
+
return State.SCANNING
|
| 166 |
+
|
| 167 |
+
raise ParseError(f"Unexpected character whilst scanning id: {char}", pos)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def handle_scanning_class(char: str, pos: int, tokens: TokenState) -> State:
|
| 171 |
+
|
| 172 |
+
if not REGEX_SPACE_PUNCTUATION.fullmatch(char):
|
| 173 |
+
return State.SCANNING_CLASS
|
| 174 |
+
|
| 175 |
+
if char == "}":
|
| 176 |
+
if (pos - 1) > tokens.start:
|
| 177 |
+
tokens.append(tokens.start + 1, pos, "class")
|
| 178 |
+
return State.DONE
|
| 179 |
+
|
| 180 |
+
if REGEX_SPACE.fullmatch(char):
|
| 181 |
+
if (pos - 1) > tokens.start:
|
| 182 |
+
tokens.append(tokens.start + 1, pos, "class")
|
| 183 |
+
return State.SCANNING
|
| 184 |
+
|
| 185 |
+
raise ParseError(f"Unexpected character whilst scanning class: {char}", pos)
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
def handle_scanning_key(char: str, pos: int, tokens: TokenState) -> State:
|
| 189 |
+
|
| 190 |
+
if char == "=":
|
| 191 |
+
tokens.append(tokens.start, pos, "key")
|
| 192 |
+
return State.SCANNING_VALUE
|
| 193 |
+
|
| 194 |
+
if REGEX_KEY_CHARACTERS.fullmatch(char):
|
| 195 |
+
return State.SCANNING_KEY
|
| 196 |
+
|
| 197 |
+
raise ParseError(f"Unexpected character whilst scanning key: {char}", pos)
|
| 198 |
+
|
| 199 |
+
|
| 200 |
+
def handle_scanning_value(char: str, pos: int, tokens: TokenState) -> State:
|
| 201 |
+
|
| 202 |
+
if char == '"':
|
| 203 |
+
tokens.set_start(pos)
|
| 204 |
+
return State.SCANNING_QUOTED_VALUE
|
| 205 |
+
|
| 206 |
+
if REGEX_KEY_CHARACTERS.fullmatch(char):
|
| 207 |
+
tokens.set_start(pos)
|
| 208 |
+
return State.SCANNING_BARE_VALUE
|
| 209 |
+
|
| 210 |
+
raise ParseError(f"Unexpected character whilst scanning value: {char}", pos)
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def handle_scanning_bare_value(char: str, pos: int, tokens: TokenState) -> State:
|
| 214 |
+
|
| 215 |
+
if REGEX_KEY_CHARACTERS.fullmatch(char):
|
| 216 |
+
return State.SCANNING_BARE_VALUE
|
| 217 |
+
|
| 218 |
+
if char == "}":
|
| 219 |
+
tokens.append(tokens.start, pos, "value")
|
| 220 |
+
return State.DONE
|
| 221 |
+
|
| 222 |
+
if REGEX_SPACE.fullmatch(char):
|
| 223 |
+
tokens.append(tokens.start, pos, "value")
|
| 224 |
+
return State.SCANNING
|
| 225 |
+
|
| 226 |
+
raise ParseError(f"Unexpected character whilst scanning bare value: {char}", pos)
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def handle_scanning_escaped(char: str, pos: int, tokens: TokenState) -> State:
|
| 230 |
+
return State.SCANNING_QUOTED_VALUE
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def handle_scanning_quoted_value(char: str, pos: int, tokens: TokenState) -> State:
|
| 234 |
+
|
| 235 |
+
if char == '"':
|
| 236 |
+
tokens.append(tokens.start + 1, pos, "value")
|
| 237 |
+
return State.SCANNING
|
| 238 |
+
|
| 239 |
+
if char == "\\":
|
| 240 |
+
return State.SCANNING_ESCAPED
|
| 241 |
+
|
| 242 |
+
if char == "{" or char == "}":
|
| 243 |
+
raise ParseError(
|
| 244 |
+
f"Unexpected character whilst scanning quoted value: {char}", pos
|
| 245 |
+
)
|
| 246 |
+
|
| 247 |
+
if char == "\n":
|
| 248 |
+
tokens.append(tokens.start + 1, pos, "value")
|
| 249 |
+
return State.SCANNING_QUOTED_VALUE
|
| 250 |
+
|
| 251 |
+
return State.SCANNING_QUOTED_VALUE
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
HANDLERS: dict[State, Callable[[str, int, TokenState], State]] = {
|
| 255 |
+
State.START: handle_start,
|
| 256 |
+
State.SCANNING: handle_scanning,
|
| 257 |
+
State.SCANNING_COMMENT: handle_scanning_comment,
|
| 258 |
+
State.SCANNING_ID: handle_scanning_id,
|
| 259 |
+
State.SCANNING_CLASS: handle_scanning_class,
|
| 260 |
+
State.SCANNING_KEY: handle_scanning_key,
|
| 261 |
+
State.SCANNING_VALUE: handle_scanning_value,
|
| 262 |
+
State.SCANNING_BARE_VALUE: handle_scanning_bare_value,
|
| 263 |
+
State.SCANNING_QUOTED_VALUE: handle_scanning_quoted_value,
|
| 264 |
+
State.SCANNING_ESCAPED: handle_scanning_escaped,
|
| 265 |
+
}
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/container/README.md
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# markdown-it-container
|
| 2 |
+
|
| 3 |
+
[](https://travis-ci.org/markdown-it/markdown-it-container)
|
| 4 |
+
[](https://www.npmjs.org/package/markdown-it-container)
|
| 5 |
+
[](https://coveralls.io/r/markdown-it/markdown-it-container?branch=master)
|
| 6 |
+
|
| 7 |
+
> Plugin for creating block-level custom containers for [markdown-it](https://github.com/markdown-it/markdown-it) markdown parser.
|
| 8 |
+
|
| 9 |
+
__v2.+ requires `markdown-it` v5.+, see changelog.__
|
| 10 |
+
|
| 11 |
+
With this plugin you can create block containers like:
|
| 12 |
+
|
| 13 |
+
```
|
| 14 |
+
::: warning
|
| 15 |
+
*here be dragons*
|
| 16 |
+
:::
|
| 17 |
+
```
|
| 18 |
+
|
| 19 |
+
.... and specify how they should be rendered. If no renderer defined, `<div>` with
|
| 20 |
+
container name class will be created:
|
| 21 |
+
|
| 22 |
+
```html
|
| 23 |
+
<div class="warning">
|
| 24 |
+
<em>here be dragons</em>
|
| 25 |
+
</div>
|
| 26 |
+
```
|
| 27 |
+
|
| 28 |
+
Markup is the same as for [fenced code blocks](http://spec.commonmark.org/0.18/#fenced-code-blocks).
|
| 29 |
+
Difference is, that marker use another character and content is rendered as markdown markup.
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
## Installation
|
| 33 |
+
|
| 34 |
+
node.js, browser:
|
| 35 |
+
|
| 36 |
+
```bash
|
| 37 |
+
$ npm install markdown-it-container --save
|
| 38 |
+
$ bower install markdown-it-container --save
|
| 39 |
+
```
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
## API
|
| 43 |
+
|
| 44 |
+
```js
|
| 45 |
+
var md = require('markdown-it')()
|
| 46 |
+
.use(require('markdown-it-container'), name [, options]);
|
| 47 |
+
```
|
| 48 |
+
|
| 49 |
+
Params:
|
| 50 |
+
|
| 51 |
+
- __name__ - container name (mandatory)
|
| 52 |
+
- __options:__
|
| 53 |
+
- __validate__ - optional, function to validate tail after opening marker, should
|
| 54 |
+
return `true` on success.
|
| 55 |
+
- __render__ - optional, renderer function for opening/closing tokens.
|
| 56 |
+
- __marker__ - optional (`:`), character to use in delimiter.
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
## Example
|
| 60 |
+
|
| 61 |
+
```js
|
| 62 |
+
var md = require('markdown-it')();
|
| 63 |
+
|
| 64 |
+
md.use(require('markdown-it-container'), 'spoiler', {
|
| 65 |
+
|
| 66 |
+
validate: function(params) {
|
| 67 |
+
return params.trim().match(/^spoiler\s+(.*)$/);
|
| 68 |
+
},
|
| 69 |
+
|
| 70 |
+
render: function (tokens, idx) {
|
| 71 |
+
var m = tokens[idx].info.trim().match(/^spoiler\s+(.*)$/);
|
| 72 |
+
|
| 73 |
+
if (tokens[idx].nesting === 1) {
|
| 74 |
+
// opening tag
|
| 75 |
+
return '<details><summary>' + md.utils.escapeHtml(m[1]) + '</summary>\n';
|
| 76 |
+
|
| 77 |
+
} else {
|
| 78 |
+
// closing tag
|
| 79 |
+
return '</details>\n';
|
| 80 |
+
}
|
| 81 |
+
}
|
| 82 |
+
});
|
| 83 |
+
|
| 84 |
+
console.log(md.render('::: spoiler click me\n*content*\n:::\n'));
|
| 85 |
+
|
| 86 |
+
// Output:
|
| 87 |
+
//
|
| 88 |
+
// <details><summary>click me</summary>
|
| 89 |
+
// <p><em>content</em></p>
|
| 90 |
+
// </details>
|
| 91 |
+
```
|
| 92 |
+
|
| 93 |
+
## License
|
| 94 |
+
|
| 95 |
+
[MIT](https://github.com/markdown-it/markdown-it-container/blob/master/LICENSE)
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/container/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (223 Bytes). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/container/__pycache__/index.cpython-310.pyc
ADDED
|
Binary file (3.46 kB). View file
|
|
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/container/index.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Process block-level custom containers."""
|
| 2 |
+
from math import floor
|
| 3 |
+
from typing import Callable, Optional
|
| 4 |
+
|
| 5 |
+
from markdown_it import MarkdownIt
|
| 6 |
+
from markdown_it.common.utils import charCodeAt
|
| 7 |
+
from markdown_it.rules_block import StateBlock
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def container_plugin(
|
| 11 |
+
md: MarkdownIt,
|
| 12 |
+
name: str,
|
| 13 |
+
marker: str = ":",
|
| 14 |
+
validate: Optional[Callable[[str, str], bool]] = None,
|
| 15 |
+
render=None,
|
| 16 |
+
):
|
| 17 |
+
"""Plugin ported from
|
| 18 |
+
`markdown-it-container <https://github.com/markdown-it/markdown-it-container>`__.
|
| 19 |
+
|
| 20 |
+
It is a plugin for creating block-level custom containers:
|
| 21 |
+
|
| 22 |
+
.. code-block:: md
|
| 23 |
+
|
| 24 |
+
:::: name
|
| 25 |
+
::: name
|
| 26 |
+
*markdown*
|
| 27 |
+
:::
|
| 28 |
+
::::
|
| 29 |
+
|
| 30 |
+
:param name: the name of the container to parse
|
| 31 |
+
:param marker: the marker character to use
|
| 32 |
+
:param validate: func(marker, param) -> bool, default matches against the name
|
| 33 |
+
:param render: render func
|
| 34 |
+
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
def validateDefault(params: str, *args):
|
| 38 |
+
return params.strip().split(" ", 2)[0] == name
|
| 39 |
+
|
| 40 |
+
def renderDefault(self, tokens, idx, _options, env):
|
| 41 |
+
# add a class to the opening tag
|
| 42 |
+
if tokens[idx].nesting == 1:
|
| 43 |
+
tokens[idx].attrJoin("class", name)
|
| 44 |
+
|
| 45 |
+
return self.renderToken(tokens, idx, _options, env)
|
| 46 |
+
|
| 47 |
+
min_markers = 3
|
| 48 |
+
marker_str = marker
|
| 49 |
+
marker_char = charCodeAt(marker_str, 0)
|
| 50 |
+
marker_len = len(marker_str)
|
| 51 |
+
validate = validate or validateDefault
|
| 52 |
+
render = render or renderDefault
|
| 53 |
+
|
| 54 |
+
def container_func(state: StateBlock, startLine: int, endLine: int, silent: bool):
|
| 55 |
+
|
| 56 |
+
auto_closed = False
|
| 57 |
+
start = state.bMarks[startLine] + state.tShift[startLine]
|
| 58 |
+
maximum = state.eMarks[startLine]
|
| 59 |
+
|
| 60 |
+
# Check out the first character quickly,
|
| 61 |
+
# this should filter out most of non-containers
|
| 62 |
+
if marker_char != state.srcCharCode[start]:
|
| 63 |
+
return False
|
| 64 |
+
|
| 65 |
+
# Check out the rest of the marker string
|
| 66 |
+
pos = start + 1
|
| 67 |
+
while pos <= maximum:
|
| 68 |
+
try:
|
| 69 |
+
character = state.src[pos]
|
| 70 |
+
except IndexError:
|
| 71 |
+
break
|
| 72 |
+
if marker_str[(pos - start) % marker_len] != character:
|
| 73 |
+
break
|
| 74 |
+
pos += 1
|
| 75 |
+
|
| 76 |
+
marker_count = floor((pos - start) / marker_len)
|
| 77 |
+
if marker_count < min_markers:
|
| 78 |
+
return False
|
| 79 |
+
pos -= (pos - start) % marker_len
|
| 80 |
+
|
| 81 |
+
markup = state.src[start:pos]
|
| 82 |
+
params = state.src[pos:maximum]
|
| 83 |
+
assert validate is not None
|
| 84 |
+
if not validate(params, markup):
|
| 85 |
+
return False
|
| 86 |
+
|
| 87 |
+
# Since start is found, we can report success here in validation mode
|
| 88 |
+
if silent:
|
| 89 |
+
return True
|
| 90 |
+
|
| 91 |
+
# Search for the end of the block
|
| 92 |
+
nextLine = startLine
|
| 93 |
+
|
| 94 |
+
while True:
|
| 95 |
+
nextLine += 1
|
| 96 |
+
if nextLine >= endLine:
|
| 97 |
+
# unclosed block should be autoclosed by end of document.
|
| 98 |
+
# also block seems to be autoclosed by end of parent
|
| 99 |
+
break
|
| 100 |
+
|
| 101 |
+
start = state.bMarks[nextLine] + state.tShift[nextLine]
|
| 102 |
+
maximum = state.eMarks[nextLine]
|
| 103 |
+
|
| 104 |
+
if start < maximum and state.sCount[nextLine] < state.blkIndent:
|
| 105 |
+
# non-empty line with negative indent should stop the list:
|
| 106 |
+
# - ```
|
| 107 |
+
# test
|
| 108 |
+
break
|
| 109 |
+
|
| 110 |
+
if marker_char != state.srcCharCode[start]:
|
| 111 |
+
continue
|
| 112 |
+
|
| 113 |
+
if state.sCount[nextLine] - state.blkIndent >= 4:
|
| 114 |
+
# closing fence should be indented less than 4 spaces
|
| 115 |
+
continue
|
| 116 |
+
|
| 117 |
+
pos = start + 1
|
| 118 |
+
while pos <= maximum:
|
| 119 |
+
try:
|
| 120 |
+
character = state.src[pos]
|
| 121 |
+
except IndexError:
|
| 122 |
+
break
|
| 123 |
+
if marker_str[(pos - start) % marker_len] != character:
|
| 124 |
+
break
|
| 125 |
+
pos += 1
|
| 126 |
+
|
| 127 |
+
# closing code fence must be at least as long as the opening one
|
| 128 |
+
if floor((pos - start) / marker_len) < marker_count:
|
| 129 |
+
continue
|
| 130 |
+
|
| 131 |
+
# make sure tail has spaces only
|
| 132 |
+
pos -= (pos - start) % marker_len
|
| 133 |
+
pos = state.skipSpaces(pos)
|
| 134 |
+
|
| 135 |
+
if pos < maximum:
|
| 136 |
+
continue
|
| 137 |
+
|
| 138 |
+
# found!
|
| 139 |
+
auto_closed = True
|
| 140 |
+
break
|
| 141 |
+
|
| 142 |
+
old_parent = state.parentType
|
| 143 |
+
old_line_max = state.lineMax
|
| 144 |
+
state.parentType = "container"
|
| 145 |
+
|
| 146 |
+
# this will prevent lazy continuations from ever going past our end marker
|
| 147 |
+
state.lineMax = nextLine
|
| 148 |
+
|
| 149 |
+
token = state.push(f"container_{name}_open", "div", 1)
|
| 150 |
+
token.markup = markup
|
| 151 |
+
token.block = True
|
| 152 |
+
token.info = params
|
| 153 |
+
token.map = [startLine, nextLine]
|
| 154 |
+
|
| 155 |
+
state.md.block.tokenize(state, startLine + 1, nextLine)
|
| 156 |
+
|
| 157 |
+
token = state.push(f"container_{name}_close", "div", -1)
|
| 158 |
+
token.markup = state.src[start:pos]
|
| 159 |
+
token.block = True
|
| 160 |
+
|
| 161 |
+
state.parentType = old_parent
|
| 162 |
+
state.lineMax = old_line_max
|
| 163 |
+
state.line = nextLine + (1 if auto_closed else 0)
|
| 164 |
+
|
| 165 |
+
return True
|
| 166 |
+
|
| 167 |
+
md.block.ruler.before(
|
| 168 |
+
"fence",
|
| 169 |
+
"container_" + name,
|
| 170 |
+
container_func,
|
| 171 |
+
{"alt": ["paragraph", "reference", "blockquote", "list"]},
|
| 172 |
+
)
|
| 173 |
+
md.add_render_rule(f"container_{name}_open", render)
|
| 174 |
+
md.add_render_rule(f"container_{name}_close", render)
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/container/port.yaml
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
- package: markdown-it-container
|
| 2 |
+
commit: adb3defde3a1c56015895b47ce4c6591b8b1e3a2
|
| 3 |
+
date: Jun 2, 2020
|
| 4 |
+
version: 3.0.0
|
| 5 |
+
changes:
|
parrot/lib/python3.10/site-packages/mdit_py_plugins/deflist/LICENSE
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) 2014-2015 Vitaly Puzrin, Alex Kocharin.
|
| 2 |
+
|
| 3 |
+
Permission is hereby granted, free of charge, to any person
|
| 4 |
+
obtaining a copy of this software and associated documentation
|
| 5 |
+
files (the "Software"), to deal in the Software without
|
| 6 |
+
restriction, including without limitation the rights to use,
|
| 7 |
+
copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 8 |
+
copies of the Software, and to permit persons to whom the
|
| 9 |
+
Software is furnished to do so, subject to the following
|
| 10 |
+
conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be
|
| 13 |
+
included in all copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
| 16 |
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
| 17 |
+
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
| 18 |
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
|
| 19 |
+
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
| 20 |
+
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
| 21 |
+
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
|
| 22 |
+
OTHER DEALINGS IN THE SOFTWARE.
|