diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash b/deepseek/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash new file mode 100644 index 0000000000000000000000000000000000000000..c8d55240e6a55c305c99244966dee5615565716b --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/.hash/hdrs.py.hash @@ -0,0 +1 @@ +dab8f933203eeb245d60f856e542a45b888d5a110094620e4811f90f816628d1 /home/runner/work/aiohttp/aiohttp/aiohttp/hdrs.py diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__init__.py b/deepseek/lib/python3.10/site-packages/aiohttp/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8c80ff3ab7dc68819f340ce9e59da890284792a5 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/__init__.py @@ -0,0 +1,264 @@ +__version__ = "3.11.10" + +from typing import TYPE_CHECKING, Tuple + +from . import hdrs as hdrs +from .client import ( + BaseConnector, + ClientConnectionError, + ClientConnectionResetError, + ClientConnectorCertificateError, + ClientConnectorDNSError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientRequest, + ClientResponse, + ClientResponseError, + ClientSession, + ClientSSLError, + ClientTimeout, + ClientWebSocketResponse, + ClientWSTimeout, + ConnectionTimeoutError, + ContentTypeError, + Fingerprint, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NamedPipeConnector, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, + RequestInfo, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TCPConnector, + TooManyRedirects, + UnixConnector, + WSMessageTypeError, + WSServerHandshakeError, + request, +) +from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar +from .formdata import FormData as FormData +from .helpers import BasicAuth, ChainMapProxy, ETag +from .http import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + WebSocketError as WebSocketError, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, +) +from .multipart import ( + BadContentDispositionHeader as BadContentDispositionHeader, + BadContentDispositionParam as BadContentDispositionParam, + BodyPartReader as BodyPartReader, + MultipartReader as MultipartReader, + MultipartWriter as MultipartWriter, + content_disposition_filename as content_disposition_filename, + parse_content_disposition as parse_content_disposition, +) +from .payload import ( + PAYLOAD_REGISTRY as PAYLOAD_REGISTRY, + AsyncIterablePayload as AsyncIterablePayload, + BufferedReaderPayload as BufferedReaderPayload, + BytesIOPayload as BytesIOPayload, + BytesPayload as BytesPayload, + IOBasePayload as IOBasePayload, + JsonPayload as JsonPayload, + Payload as Payload, + StringIOPayload as StringIOPayload, + StringPayload as StringPayload, + TextIOPayload as TextIOPayload, + get_payload as get_payload, + payload_type as payload_type, +) +from .payload_streamer import streamer as streamer +from .resolver import ( + AsyncResolver as AsyncResolver, + DefaultResolver as DefaultResolver, + ThreadedResolver as ThreadedResolver, +) +from .streams import ( + EMPTY_PAYLOAD as EMPTY_PAYLOAD, + DataQueue as DataQueue, + EofStream as EofStream, + FlowControlDataQueue as FlowControlDataQueue, + StreamReader as StreamReader, +) +from .tracing import ( + TraceConfig as TraceConfig, + TraceConnectionCreateEndParams as TraceConnectionCreateEndParams, + TraceConnectionCreateStartParams as TraceConnectionCreateStartParams, + TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams, + TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams, + TraceConnectionReuseconnParams as TraceConnectionReuseconnParams, + TraceDnsCacheHitParams as TraceDnsCacheHitParams, + TraceDnsCacheMissParams as TraceDnsCacheMissParams, + TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams, + TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams, + TraceRequestChunkSentParams as TraceRequestChunkSentParams, + TraceRequestEndParams as TraceRequestEndParams, + TraceRequestExceptionParams as TraceRequestExceptionParams, + TraceRequestHeadersSentParams as TraceRequestHeadersSentParams, + TraceRequestRedirectParams as TraceRequestRedirectParams, + TraceRequestStartParams as TraceRequestStartParams, + TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams, +) + +if TYPE_CHECKING: + # At runtime these are lazy-loaded at the bottom of the file. + from .worker import ( + GunicornUVLoopWebWorker as GunicornUVLoopWebWorker, + GunicornWebWorker as GunicornWebWorker, + ) + +__all__: Tuple[str, ...] = ( + "hdrs", + # client + "BaseConnector", + "ClientConnectionError", + "ClientConnectionResetError", + "ClientConnectorCertificateError", + "ClientConnectorDNSError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponse", + "ClientRequest", + "ClientResponseError", + "ClientSSLError", + "ClientSession", + "ClientTimeout", + "ClientWebSocketResponse", + "ClientWSTimeout", + "ConnectionTimeoutError", + "ContentTypeError", + "Fingerprint", + "FlowControlDataQueue", + "InvalidURL", + "InvalidUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlClientError", + "NonHttpUrlRedirectClientError", + "RedirectClientError", + "RequestInfo", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "SocketTimeoutError", + "TCPConnector", + "TooManyRedirects", + "UnixConnector", + "NamedPipeConnector", + "WSServerHandshakeError", + "request", + # cookiejar + "CookieJar", + "DummyCookieJar", + # formdata + "FormData", + # helpers + "BasicAuth", + "ChainMapProxy", + "ETag", + # http + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + "WSMsgType", + "WSCloseCode", + "WSMessage", + "WebSocketError", + # multipart + "BadContentDispositionHeader", + "BadContentDispositionParam", + "BodyPartReader", + "MultipartReader", + "MultipartWriter", + "content_disposition_filename", + "parse_content_disposition", + # payload + "AsyncIterablePayload", + "BufferedReaderPayload", + "BytesIOPayload", + "BytesPayload", + "IOBasePayload", + "JsonPayload", + "PAYLOAD_REGISTRY", + "Payload", + "StringIOPayload", + "StringPayload", + "TextIOPayload", + "get_payload", + "payload_type", + # payload_streamer + "streamer", + # resolver + "AsyncResolver", + "DefaultResolver", + "ThreadedResolver", + # streams + "DataQueue", + "EMPTY_PAYLOAD", + "EofStream", + "StreamReader", + # tracing + "TraceConfig", + "TraceConnectionCreateEndParams", + "TraceConnectionCreateStartParams", + "TraceConnectionQueuedEndParams", + "TraceConnectionQueuedStartParams", + "TraceConnectionReuseconnParams", + "TraceDnsCacheHitParams", + "TraceDnsCacheMissParams", + "TraceDnsResolveHostEndParams", + "TraceDnsResolveHostStartParams", + "TraceRequestChunkSentParams", + "TraceRequestEndParams", + "TraceRequestExceptionParams", + "TraceRequestHeadersSentParams", + "TraceRequestRedirectParams", + "TraceRequestStartParams", + "TraceResponseChunkReceivedParams", + # workers (imported lazily with __getattr__) + "GunicornUVLoopWebWorker", + "GunicornWebWorker", + "WSMessageTypeError", +) + + +def __dir__() -> Tuple[str, ...]: + return __all__ + ("__doc__",) + + +def __getattr__(name: str) -> object: + global GunicornUVLoopWebWorker, GunicornWebWorker + + # Importing gunicorn takes a long time (>100ms), so only import if actually needed. + if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"): + try: + from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw + except ImportError: + return None + + GunicornUVLoopWebWorker = guv # type: ignore[misc] + GunicornWebWorker = gw # type: ignore[misc] + return guv if name == "GunicornUVLoopWebWorker" else gw + + raise AttributeError(f"module {__name__} has no attribute {name}") diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..93de7e8fa182a891b8c39aa29df043a15aabe4a8 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2cd5572c584f0531178cef62ee186d5d82ed9a4 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..18a3900f6d31900b5814e316fb9d49879fcb3617 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9d9eb2001f16417401907f27f93adccddf1de569 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be7177d68277cc6b3a53c2d190c5dc74e52bfb7c Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/hdrs.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6400f4a242ec527a1754c8944fb85902a2506d5 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c44403d794ba1df986434c9790bfcdb498ebc1e8 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c271feaff17360a334851176c98f56b080751b1f Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/resolver.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1bbedd5689b9589e9fd1021608fa9ecf170a6b02 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85baebf4503018fa5e1894071d0b4e9870a06e0d Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79b8d8df2e14945542118397dadf588e49226283 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03a91d3192e04bd70118333954e2bba797f672e1 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6f9b863f6449b202fc0dd1e39ebcf0aa66bf7a9 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..551a3ca5169d2c873d6e551c485af2877032e66b Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/_headers.pxi b/deepseek/lib/python3.10/site-packages/aiohttp/_headers.pxi new file mode 100644 index 0000000000000000000000000000000000000000..3744721d4786a6c79b90aa349c8d02fa66204ecc --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/_headers.pxi @@ -0,0 +1,83 @@ +# The file is autogenerated from aiohttp/hdrs.py +# Run ./tools/gen.py to update it after the origin changing. + +from . import hdrs +cdef tuple headers = ( + hdrs.ACCEPT, + hdrs.ACCEPT_CHARSET, + hdrs.ACCEPT_ENCODING, + hdrs.ACCEPT_LANGUAGE, + hdrs.ACCEPT_RANGES, + hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS, + hdrs.ACCESS_CONTROL_ALLOW_HEADERS, + hdrs.ACCESS_CONTROL_ALLOW_METHODS, + hdrs.ACCESS_CONTROL_ALLOW_ORIGIN, + hdrs.ACCESS_CONTROL_EXPOSE_HEADERS, + hdrs.ACCESS_CONTROL_MAX_AGE, + hdrs.ACCESS_CONTROL_REQUEST_HEADERS, + hdrs.ACCESS_CONTROL_REQUEST_METHOD, + hdrs.AGE, + hdrs.ALLOW, + hdrs.AUTHORIZATION, + hdrs.CACHE_CONTROL, + hdrs.CONNECTION, + hdrs.CONTENT_DISPOSITION, + hdrs.CONTENT_ENCODING, + hdrs.CONTENT_LANGUAGE, + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_MD5, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TRANSFER_ENCODING, + hdrs.CONTENT_TYPE, + hdrs.COOKIE, + hdrs.DATE, + hdrs.DESTINATION, + hdrs.DIGEST, + hdrs.ETAG, + hdrs.EXPECT, + hdrs.EXPIRES, + hdrs.FORWARDED, + hdrs.FROM, + hdrs.HOST, + hdrs.IF_MATCH, + hdrs.IF_MODIFIED_SINCE, + hdrs.IF_NONE_MATCH, + hdrs.IF_RANGE, + hdrs.IF_UNMODIFIED_SINCE, + hdrs.KEEP_ALIVE, + hdrs.LAST_EVENT_ID, + hdrs.LAST_MODIFIED, + hdrs.LINK, + hdrs.LOCATION, + hdrs.MAX_FORWARDS, + hdrs.ORIGIN, + hdrs.PRAGMA, + hdrs.PROXY_AUTHENTICATE, + hdrs.PROXY_AUTHORIZATION, + hdrs.RANGE, + hdrs.REFERER, + hdrs.RETRY_AFTER, + hdrs.SEC_WEBSOCKET_ACCEPT, + hdrs.SEC_WEBSOCKET_EXTENSIONS, + hdrs.SEC_WEBSOCKET_KEY, + hdrs.SEC_WEBSOCKET_KEY1, + hdrs.SEC_WEBSOCKET_PROTOCOL, + hdrs.SEC_WEBSOCKET_VERSION, + hdrs.SERVER, + hdrs.SET_COOKIE, + hdrs.TE, + hdrs.TRAILER, + hdrs.TRANSFER_ENCODING, + hdrs.URI, + hdrs.UPGRADE, + hdrs.USER_AGENT, + hdrs.VARY, + hdrs.VIA, + hdrs.WWW_AUTHENTICATE, + hdrs.WANT_DIGEST, + hdrs.WARNING, + hdrs.X_FORWARDED_FOR, + hdrs.X_FORWARDED_HOST, + hdrs.X_FORWARDED_PROTO, +) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/_http_writer.pyx b/deepseek/lib/python3.10/site-packages/aiohttp/_http_writer.pyx new file mode 100644 index 0000000000000000000000000000000000000000..287371334f8fe2ba83c24e880b4dba8951f15f8f --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/_http_writer.pyx @@ -0,0 +1,162 @@ +from cpython.bytes cimport PyBytes_FromStringAndSize +from cpython.exc cimport PyErr_NoMemory +from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc +from cpython.object cimport PyObject_Str +from libc.stdint cimport uint8_t, uint64_t +from libc.string cimport memcpy + +from multidict import istr + +DEF BUF_SIZE = 16 * 1024 # 16KiB +cdef char BUFFER[BUF_SIZE] + +cdef object _istr = istr + + +# ----------------- writer --------------------------- + +cdef struct Writer: + char *buf + Py_ssize_t size + Py_ssize_t pos + + +cdef inline void _init_writer(Writer* writer): + writer.buf = &BUFFER[0] + writer.size = BUF_SIZE + writer.pos = 0 + + +cdef inline void _release_writer(Writer* writer): + if writer.buf != BUFFER: + PyMem_Free(writer.buf) + + +cdef inline int _write_byte(Writer* writer, uint8_t ch): + cdef char * buf + cdef Py_ssize_t size + + if writer.pos == writer.size: + # reallocate + size = writer.size + BUF_SIZE + if writer.buf == BUFFER: + buf = PyMem_Malloc(size) + if buf == NULL: + PyErr_NoMemory() + return -1 + memcpy(buf, writer.buf, writer.size) + else: + buf = PyMem_Realloc(writer.buf, size) + if buf == NULL: + PyErr_NoMemory() + return -1 + writer.buf = buf + writer.size = size + writer.buf[writer.pos] = ch + writer.pos += 1 + return 0 + + +cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol): + cdef uint64_t utf = symbol + + if utf < 0x80: + return _write_byte(writer, utf) + elif utf < 0x800: + if _write_byte(writer, (0xc0 | (utf >> 6))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif 0xD800 <= utf <= 0xDFFF: + # surogate pair, ignored + return 0 + elif utf < 0x10000: + if _write_byte(writer, (0xe0 | (utf >> 12))) < 0: + return -1 + if _write_byte(writer, (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + elif utf > 0x10FFFF: + # symbol is too large + return 0 + else: + if _write_byte(writer, (0xf0 | (utf >> 18))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 12) & 0x3f))) < 0: + return -1 + if _write_byte(writer, + (0x80 | ((utf >> 6) & 0x3f))) < 0: + return -1 + return _write_byte(writer, (0x80 | (utf & 0x3f))) + + +cdef inline int _write_str(Writer* writer, str s): + cdef Py_UCS4 ch + for ch in s: + if _write_utf8(writer, ch) < 0: + return -1 + + +# --------------- _serialize_headers ---------------------- + +cdef str to_str(object s): + if type(s) is str: + return s + elif type(s) is _istr: + return PyObject_Str(s) + elif not isinstance(s, str): + raise TypeError("Cannot serialize non-str key {!r}".format(s)) + else: + return str(s) + + + +def _serialize_headers(str status_line, headers): + cdef Writer writer + cdef object key + cdef object val + cdef bytes ret + cdef str key_str + cdef str val_str + + _init_writer(&writer) + + try: + if _write_str(&writer, status_line) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + for key, val in headers.items(): + key_str = to_str(key) + val_str = to_str(val) + + if "\r" in key_str or "\n" in key_str or "\r" in val_str or "\n" in val_str: + raise ValueError( + "Newline or carriage return character detected in HTTP status message or " + "header. This is a potential security issue." + ) + + if _write_str(&writer, key_str) < 0: + raise + if _write_byte(&writer, b':') < 0: + raise + if _write_byte(&writer, b' ') < 0: + raise + if _write_str(&writer, val_str) < 0: + raise + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + if _write_byte(&writer, b'\r') < 0: + raise + if _write_byte(&writer, b'\n') < 0: + raise + + return PyBytes_FromStringAndSize(writer.buf, writer.pos) + finally: + _release_writer(&writer) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/abc.py b/deepseek/lib/python3.10/site-packages/aiohttp/abc.py new file mode 100644 index 0000000000000000000000000000000000000000..d6f9f782b0fb0c87cca849bb5a7b17331cc8f77f --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/abc.py @@ -0,0 +1,247 @@ +import asyncio +import logging +import socket +import zlib +from abc import ABC, abstractmethod +from collections.abc import Sized +from http.cookies import BaseCookie, Morsel +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Dict, + Generator, + Iterable, + List, + Optional, + Tuple, + TypedDict, +) + +from multidict import CIMultiDict +from yarl import URL + +from .typedefs import LooseCookies + +if TYPE_CHECKING: + from .web_app import Application + from .web_exceptions import HTTPException + from .web_request import BaseRequest, Request + from .web_response import StreamResponse +else: + BaseRequest = Request = Application = StreamResponse = None + HTTPException = None + + +class AbstractRouter(ABC): + def __init__(self) -> None: + self._frozen = False + + def post_init(self, app: Application) -> None: + """Post init stage. + + Not an abstract method for sake of backward compatibility, + but if the router wants to be aware of the application + it can override this. + """ + + @property + def frozen(self) -> bool: + return self._frozen + + def freeze(self) -> None: + """Freeze router.""" + self._frozen = True + + @abstractmethod + async def resolve(self, request: Request) -> "AbstractMatchInfo": + """Return MATCH_INFO for given request""" + + +class AbstractMatchInfo(ABC): + + __slots__ = () + + @property # pragma: no branch + @abstractmethod + def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: + """Execute matched request handler""" + + @property + @abstractmethod + def expect_handler( + self, + ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]: + """Expect handler for 100-continue processing""" + + @property # pragma: no branch + @abstractmethod + def http_exception(self) -> Optional[HTTPException]: + """HTTPException instance raised on router's resolving, or None""" + + @abstractmethod # pragma: no branch + def get_info(self) -> Dict[str, Any]: + """Return a dict with additional info useful for introspection""" + + @property # pragma: no branch + @abstractmethod + def apps(self) -> Tuple[Application, ...]: + """Stack of nested applications. + + Top level application is left-most element. + + """ + + @abstractmethod + def add_app(self, app: Application) -> None: + """Add application to the nested apps stack.""" + + @abstractmethod + def freeze(self) -> None: + """Freeze the match info. + + The method is called after route resolution. + + After the call .add_app() is forbidden. + + """ + + +class AbstractView(ABC): + """Abstract class based view.""" + + def __init__(self, request: Request) -> None: + self._request = request + + @property + def request(self) -> Request: + """Request instance.""" + return self._request + + @abstractmethod + def __await__(self) -> Generator[Any, None, StreamResponse]: + """Execute the view handler.""" + + +class ResolveResult(TypedDict): + """Resolve result. + + This is the result returned from an AbstractResolver's + resolve method. + + :param hostname: The hostname that was provided. + :param host: The IP address that was resolved. + :param port: The port that was resolved. + :param family: The address family that was resolved. + :param proto: The protocol that was resolved. + :param flags: The flags that were resolved. + """ + + hostname: str + host: str + port: int + family: int + proto: int + flags: int + + +class AbstractResolver(ABC): + """Abstract DNS resolver.""" + + @abstractmethod + async def resolve( + self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET + ) -> List[ResolveResult]: + """Return IP address for given hostname""" + + @abstractmethod + async def close(self) -> None: + """Release resolver""" + + +if TYPE_CHECKING: + IterableBase = Iterable[Morsel[str]] +else: + IterableBase = Iterable + + +ClearCookiePredicate = Callable[["Morsel[str]"], bool] + + +class AbstractCookieJar(Sized, IterableBase): + """Abstract Cookie Jar.""" + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + self._loop = loop or asyncio.get_running_loop() + + @abstractmethod + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + """Clear all cookies if no predicate is passed.""" + + @abstractmethod + def clear_domain(self, domain: str) -> None: + """Clear all cookies for domain and all subdomains.""" + + @abstractmethod + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + + @abstractmethod + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + """Return the jar's cookies filtered by their attributes.""" + + +class AbstractStreamWriter(ABC): + """Abstract stream writer.""" + + buffer_size: int = 0 + output_size: int = 0 + length: Optional[int] = 0 + + @abstractmethod + async def write(self, chunk: bytes) -> None: + """Write chunk into stream.""" + + @abstractmethod + async def write_eof(self, chunk: bytes = b"") -> None: + """Write last chunk.""" + + @abstractmethod + async def drain(self) -> None: + """Flush the write buffer.""" + + @abstractmethod + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + """Enable HTTP body compression""" + + @abstractmethod + def enable_chunking(self) -> None: + """Enable HTTP chunked mode""" + + @abstractmethod + async def write_headers( + self, status_line: str, headers: "CIMultiDict[str]" + ) -> None: + """Write HTTP headers""" + + +class AbstractAccessLogger(ABC): + """Abstract writer to access log.""" + + __slots__ = ("logger", "log_format") + + def __init__(self, logger: logging.Logger, log_format: str) -> None: + self.logger = logger + self.log_format = log_format + + @abstractmethod + def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None: + """Emit log to logger.""" + + @property + def enabled(self) -> bool: + """Check if logger is enabled.""" + return True diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/base_protocol.py b/deepseek/lib/python3.10/site-packages/aiohttp/base_protocol.py new file mode 100644 index 0000000000000000000000000000000000000000..b0a67ed6ff68ca5bc48be9ac472ee755369b2720 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/base_protocol.py @@ -0,0 +1,100 @@ +import asyncio +from typing import Optional, cast + +from .client_exceptions import ClientConnectionResetError +from .helpers import set_exception +from .tcp_helpers import tcp_nodelay + + +class BaseProtocol(asyncio.Protocol): + __slots__ = ( + "_loop", + "_paused", + "_drain_waiter", + "_connection_lost", + "_reading_paused", + "transport", + ) + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop: asyncio.AbstractEventLoop = loop + self._paused = False + self._drain_waiter: Optional[asyncio.Future[None]] = None + self._reading_paused = False + + self.transport: Optional[asyncio.Transport] = None + + @property + def connected(self) -> bool: + """Return True if the connection is open.""" + return self.transport is not None + + @property + def writing_paused(self) -> bool: + return self._paused + + def pause_writing(self) -> None: + assert not self._paused + self._paused = True + + def resume_writing(self) -> None: + assert self._paused + self._paused = False + + waiter = self._drain_waiter + if waiter is not None: + self._drain_waiter = None + if not waiter.done(): + waiter.set_result(None) + + def pause_reading(self) -> None: + if not self._reading_paused and self.transport is not None: + try: + self.transport.pause_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = True + + def resume_reading(self) -> None: + if self._reading_paused and self.transport is not None: + try: + self.transport.resume_reading() + except (AttributeError, NotImplementedError, RuntimeError): + pass + self._reading_paused = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + tr = cast(asyncio.Transport, transport) + tcp_nodelay(tr, True) + self.transport = tr + + def connection_lost(self, exc: Optional[BaseException]) -> None: + # Wake up the writer if currently paused. + self.transport = None + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + return + self._drain_waiter = None + if waiter.done(): + return + if exc is None: + waiter.set_result(None) + else: + set_exception( + waiter, + ConnectionError("Connection lost"), + exc, + ) + + async def _drain_helper(self) -> None: + if self.transport is None: + raise ClientConnectionResetError("Connection lost") + if not self._paused: + return + waiter = self._drain_waiter + if waiter is None: + waiter = self._loop.create_future() + self._drain_waiter = waiter + await asyncio.shield(waiter) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/client.py b/deepseek/lib/python3.10/site-packages/aiohttp/client.py new file mode 100644 index 0000000000000000000000000000000000000000..e04a6ff989a9a26b1bc1fe62f9cf884062583c85 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/client.py @@ -0,0 +1,1574 @@ +"""HTTP Client for asyncio.""" + +import asyncio +import base64 +import hashlib +import json +import os +import sys +import traceback +import warnings +from contextlib import suppress +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Coroutine, + Final, + FrozenSet, + Generator, + Generic, + Iterable, + List, + Mapping, + Optional, + Set, + Tuple, + Type, + TypedDict, + TypeVar, + Union, +) + +import attr +from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr +from yarl import URL + +from . import hdrs, http, payload +from ._websocket.reader import WebSocketDataQueue +from .abc import AbstractCookieJar +from .client_exceptions import ( + ClientConnectionError, + ClientConnectionResetError, + ClientConnectorCertificateError, + ClientConnectorDNSError, + ClientConnectorError, + ClientConnectorSSLError, + ClientError, + ClientHttpProxyError, + ClientOSError, + ClientPayloadError, + ClientProxyConnectionError, + ClientResponseError, + ClientSSLError, + ConnectionTimeoutError, + ContentTypeError, + InvalidURL, + InvalidUrlClientError, + InvalidUrlRedirectClientError, + NonHttpUrlClientError, + NonHttpUrlRedirectClientError, + RedirectClientError, + ServerConnectionError, + ServerDisconnectedError, + ServerFingerprintMismatch, + ServerTimeoutError, + SocketTimeoutError, + TooManyRedirects, + WSMessageTypeError, + WSServerHandshakeError, +) +from .client_reqrep import ( + ClientRequest as ClientRequest, + ClientResponse as ClientResponse, + Fingerprint as Fingerprint, + RequestInfo as RequestInfo, + _merge_ssl_params, +) +from .client_ws import ( + DEFAULT_WS_CLIENT_TIMEOUT, + ClientWebSocketResponse as ClientWebSocketResponse, + ClientWSTimeout as ClientWSTimeout, +) +from .connector import ( + HTTP_AND_EMPTY_SCHEMA_SET, + BaseConnector as BaseConnector, + NamedPipeConnector as NamedPipeConnector, + TCPConnector as TCPConnector, + UnixConnector as UnixConnector, +) +from .cookiejar import CookieJar +from .helpers import ( + _SENTINEL, + DEBUG, + EMPTY_BODY_METHODS, + BasicAuth, + TimeoutHandle, + get_env_proxy_for_url, + sentinel, + strip_auth_from_url, +) +from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter +from .http_websocket import WSHandshakeError, ws_ext_gen, ws_ext_parse +from .tracing import Trace, TraceConfig +from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, Query, StrOrURL + +__all__ = ( + # client_exceptions + "ClientConnectionError", + "ClientConnectionResetError", + "ClientConnectorCertificateError", + "ClientConnectorDNSError", + "ClientConnectorError", + "ClientConnectorSSLError", + "ClientError", + "ClientHttpProxyError", + "ClientOSError", + "ClientPayloadError", + "ClientProxyConnectionError", + "ClientResponseError", + "ClientSSLError", + "ConnectionTimeoutError", + "ContentTypeError", + "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", + "ServerConnectionError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ServerTimeoutError", + "SocketTimeoutError", + "TooManyRedirects", + "WSServerHandshakeError", + # client_reqrep + "ClientRequest", + "ClientResponse", + "Fingerprint", + "RequestInfo", + # connector + "BaseConnector", + "TCPConnector", + "UnixConnector", + "NamedPipeConnector", + # client_ws + "ClientWebSocketResponse", + # client + "ClientSession", + "ClientTimeout", + "ClientWSTimeout", + "request", + "WSMessageTypeError", +) + + +if TYPE_CHECKING: + from ssl import SSLContext +else: + SSLContext = None + +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + + +class _RequestOptions(TypedDict, total=False): + params: Query + data: Any + json: Any + cookies: Union[LooseCookies, None] + headers: Union[LooseHeaders, None] + skip_auto_headers: Union[Iterable[str], None] + auth: Union[BasicAuth, None] + allow_redirects: bool + max_redirects: int + compress: Union[str, bool, None] + chunked: Union[bool, None] + expect100: bool + raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] + read_until_eof: bool + proxy: Union[StrOrURL, None] + proxy_auth: Union[BasicAuth, None] + timeout: "Union[ClientTimeout, _SENTINEL, None]" + ssl: Union[SSLContext, bool, Fingerprint] + server_hostname: Union[str, None] + proxy_headers: Union[LooseHeaders, None] + trace_request_ctx: Union[Mapping[str, Any], None] + read_bufsize: Union[int, None] + auto_decompress: Union[bool, None] + max_line_size: Union[int, None] + max_field_size: Union[int, None] + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ClientTimeout: + total: Optional[float] = None + connect: Optional[float] = None + sock_read: Optional[float] = None + sock_connect: Optional[float] = None + ceil_threshold: float = 5 + + # pool_queue_timeout: Optional[float] = None + # dns_resolution_timeout: Optional[float] = None + # socket_connect_timeout: Optional[float] = None + # connection_acquiring_timeout: Optional[float] = None + # new_connection_timeout: Optional[float] = None + # http_header_timeout: Optional[float] = None + # response_body_timeout: Optional[float] = None + + # to create a timeout specific for a single request, either + # - create a completely new one to overwrite the default + # - or use http://www.attrs.org/en/stable/api.html#attr.evolve + # to overwrite the defaults + + +# 5 Minute default read timeout +DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60, sock_connect=30) + +# https://www.rfc-editor.org/rfc/rfc9110#section-9.2.2 +IDEMPOTENT_METHODS = frozenset({"GET", "HEAD", "OPTIONS", "TRACE", "PUT", "DELETE"}) + +_RetType = TypeVar("_RetType", ClientResponse, ClientWebSocketResponse) +_CharsetResolver = Callable[[ClientResponse, bytes], str] + + +class ClientSession: + """First-class interface for making HTTP requests.""" + + ATTRS = frozenset( + [ + "_base_url", + "_base_url_origin", + "_source_traceback", + "_connector", + "_loop", + "_cookie_jar", + "_connector_owner", + "_default_auth", + "_version", + "_json_serialize", + "_requote_redirect_url", + "_timeout", + "_raise_for_status", + "_auto_decompress", + "_trust_env", + "_default_headers", + "_skip_auto_headers", + "_request_class", + "_response_class", + "_ws_response_class", + "_trace_configs", + "_read_bufsize", + "_max_line_size", + "_max_field_size", + "_resolve_charset", + "_default_proxy", + "_default_proxy_auth", + "_retry_connection", + "requote_redirect_url", + ] + ) + + _source_traceback: Optional[traceback.StackSummary] = None + _connector: Optional[BaseConnector] = None + + def __init__( + self, + base_url: Optional[StrOrURL] = None, + *, + connector: Optional[BaseConnector] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + json_serialize: JSONEncoder = json.dumps, + request_class: Type[ClientRequest] = ClientRequest, + response_class: Type[ClientResponse] = ClientResponse, + ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, + version: HttpVersion = http.HttpVersion11, + cookie_jar: Optional[AbstractCookieJar] = None, + connector_owner: bool = True, + raise_for_status: Union[ + bool, Callable[[ClientResponse], Awaitable[None]] + ] = False, + read_timeout: Union[float, _SENTINEL] = sentinel, + conn_timeout: Optional[float] = None, + timeout: Union[object, ClientTimeout] = sentinel, + auto_decompress: bool = True, + trust_env: bool = False, + requote_redirect_url: bool = True, + trace_configs: Optional[List[TraceConfig]] = None, + read_bufsize: int = 2**16, + max_line_size: int = 8190, + max_field_size: int = 8190, + fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", + ) -> None: + # We initialise _connector to None immediately, as it's referenced in __del__() + # and could cause issues if an exception occurs during initialisation. + self._connector: Optional[BaseConnector] = None + + if loop is None: + if connector is not None: + loop = connector._loop + + loop = loop or asyncio.get_running_loop() + + if base_url is None or isinstance(base_url, URL): + self._base_url: Optional[URL] = base_url + self._base_url_origin = None if base_url is None else base_url.origin() + else: + self._base_url = URL(base_url) + self._base_url_origin = self._base_url.origin() + assert self._base_url.absolute, "Only absolute URLs are supported" + if self._base_url is not None and not self._base_url.path.endswith("/"): + raise ValueError("base_url must have a trailing '/'") + + if timeout is sentinel or timeout is None: + self._timeout = DEFAULT_TIMEOUT + if read_timeout is not sentinel: + warnings.warn( + "read_timeout is deprecated, use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + self._timeout = attr.evolve(self._timeout, total=read_timeout) + if conn_timeout is not None: + self._timeout = attr.evolve(self._timeout, connect=conn_timeout) + warnings.warn( + "conn_timeout is deprecated, use timeout argument instead", + DeprecationWarning, + stacklevel=2, + ) + else: + if not isinstance(timeout, ClientTimeout): + raise ValueError( + f"timeout parameter cannot be of {type(timeout)} type, " + "please use 'timeout=ClientTimeout(...)'", + ) + self._timeout = timeout + if read_timeout is not sentinel: + raise ValueError( + "read_timeout and timeout parameters " + "conflict, please setup " + "timeout.read" + ) + if conn_timeout is not None: + raise ValueError( + "conn_timeout and timeout parameters " + "conflict, please setup " + "timeout.connect" + ) + + if connector is None: + connector = TCPConnector(loop=loop) + + if connector._loop is not loop: + raise RuntimeError("Session and connector has to use same event loop") + + self._loop = loop + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + if cookie_jar is None: + cookie_jar = CookieJar(loop=loop) + self._cookie_jar = cookie_jar + + if cookies: + self._cookie_jar.update_cookies(cookies) + + self._connector = connector + self._connector_owner = connector_owner + self._default_auth = auth + self._version = version + self._json_serialize = json_serialize + self._raise_for_status = raise_for_status + self._auto_decompress = auto_decompress + self._trust_env = trust_env + self._requote_redirect_url = requote_redirect_url + self._read_bufsize = read_bufsize + self._max_line_size = max_line_size + self._max_field_size = max_field_size + + # Convert to list of tuples + if headers: + real_headers: CIMultiDict[str] = CIMultiDict(headers) + else: + real_headers = CIMultiDict() + self._default_headers: CIMultiDict[str] = real_headers + if skip_auto_headers is not None: + self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers) + else: + self._skip_auto_headers = frozenset() + + self._request_class = request_class + self._response_class = response_class + self._ws_response_class = ws_response_class + + self._trace_configs = trace_configs or [] + for trace_config in self._trace_configs: + trace_config.freeze() + + self._resolve_charset = fallback_charset_resolver + + self._default_proxy = proxy + self._default_proxy_auth = proxy_auth + self._retry_connection: bool = True + + def __init_subclass__(cls: Type["ClientSession"]) -> None: + warnings.warn( + "Inheritance class {} from ClientSession " + "is discouraged".format(cls.__name__), + DeprecationWarning, + stacklevel=2, + ) + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom ClientSession.{} attribute " + "is discouraged".format(name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def __del__(self, _warnings: Any = warnings) -> None: + if not self.closed: + kwargs = {"source": self} + _warnings.warn( + f"Unclosed client session {self!r}", ResourceWarning, **kwargs + ) + context = {"client_session": self, "message": "Unclosed client session"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, + method: str, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def request( + self, method: str, url: StrOrURL, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP request.""" + return _RequestContextManager(self._request(method, url, **kwargs)) + + def _build_url(self, str_or_url: StrOrURL) -> URL: + url = URL(str_or_url) + if self._base_url is None: + return url + else: + assert not url.absolute + return self._base_url.join(url) + + async def _request( + self, + method: str, + str_or_url: StrOrURL, + *, + params: Query = None, + data: Any = None, + json: Any = None, + cookies: Optional[LooseCookies] = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Union[str, bool, None] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Union[ + None, bool, Callable[[ClientResponse], Awaitable[None]] + ] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, _SENTINEL] = sentinel, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + server_hostname: Optional[str] = None, + proxy_headers: Optional[LooseHeaders] = None, + trace_request_ctx: Optional[Mapping[str, Any]] = None, + read_bufsize: Optional[int] = None, + auto_decompress: Optional[bool] = None, + max_line_size: Optional[int] = None, + max_field_size: Optional[int] = None, + ) -> ClientResponse: + + # NOTE: timeout clamps existing connect and read timeouts. We cannot + # set the default to None because we need to detect if the user wants + # to use the existing timeouts by setting timeout to None. + + if self.closed: + raise RuntimeError("Session is closed") + + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + if data is not None and json is not None: + raise ValueError( + "data and json parameters can not be used at the same time" + ) + elif json is not None: + data = payload.JsonPayload(json, dumps=self._json_serialize) + + if not isinstance(chunked, bool) and chunked is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + + redirects = 0 + history: List[ClientResponse] = [] + version = self._version + params = params or {} + + # Merge with default headers and transform to CIMultiDict + headers = self._prepare_headers(headers) + + try: + url = self._build_url(str_or_url) + except ValueError as e: + raise InvalidUrlClientError(str_or_url) from e + + assert self._connector is not None + if url.scheme not in self._connector.allowed_protocol_schema_set: + raise NonHttpUrlClientError(url) + + skip_headers: Optional[Iterable[istr]] + if skip_auto_headers is not None: + skip_headers = { + istr(i) for i in skip_auto_headers + } | self._skip_auto_headers + elif self._skip_auto_headers: + skip_headers = self._skip_auto_headers + else: + skip_headers = None + + if proxy is None: + proxy = self._default_proxy + if proxy_auth is None: + proxy_auth = self._default_proxy_auth + + if proxy is None: + proxy_headers = None + else: + proxy_headers = self._prepare_headers(proxy_headers) + try: + proxy = URL(proxy) + except ValueError as e: + raise InvalidURL(proxy) from e + + if timeout is sentinel: + real_timeout: ClientTimeout = self._timeout + else: + if not isinstance(timeout, ClientTimeout): + real_timeout = ClientTimeout(total=timeout) + else: + real_timeout = timeout + # timeout is cumulative for all request operations + # (request, redirects, responses, data consuming) + tm = TimeoutHandle( + self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold + ) + handle = tm.start() + + if read_bufsize is None: + read_bufsize = self._read_bufsize + + if auto_decompress is None: + auto_decompress = self._auto_decompress + + if max_line_size is None: + max_line_size = self._max_line_size + + if max_field_size is None: + max_field_size = self._max_field_size + + traces = [ + Trace( + self, + trace_config, + trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx), + ) + for trace_config in self._trace_configs + ] + + for trace in traces: + await trace.send_request_start(method, url.update_query(params), headers) + + timer = tm.timer() + try: + with timer: + # https://www.rfc-editor.org/rfc/rfc9112.html#name-retrying-requests + retry_persistent_connection = ( + self._retry_connection and method in IDEMPOTENT_METHODS + ) + while True: + url, auth_from_url = strip_auth_from_url(url) + if not url.raw_host: + # NOTE: Bail early, otherwise, causes `InvalidURL` through + # NOTE: `self._request_class()` below. + err_exc_cls = ( + InvalidUrlRedirectClientError + if redirects + else InvalidUrlClientError + ) + raise err_exc_cls(url) + # If `auth` was passed for an already authenticated URL, + # disallow only if this is the initial URL; this is to avoid issues + # with sketchy redirects that are not the caller's responsibility + if not history and (auth and auth_from_url): + raise ValueError( + "Cannot combine AUTH argument with " + "credentials encoded in URL" + ) + + # Override the auth with the one from the URL only if we + # have no auth, or if we got an auth from a redirect URL + if auth is None or (history and auth_from_url is not None): + auth = auth_from_url + + if ( + auth is None + and self._default_auth + and ( + not self._base_url or self._base_url_origin == url.origin() + ) + ): + auth = self._default_auth + # It would be confusing if we support explicit + # Authorization header with auth argument + if ( + headers is not None + and auth is not None + and hdrs.AUTHORIZATION in headers + ): + raise ValueError( + "Cannot combine AUTHORIZATION header " + "with AUTH argument or credentials " + "encoded in URL" + ) + + all_cookies = self._cookie_jar.filter_cookies(url) + + if cookies is not None: + tmp_cookie_jar = CookieJar() + tmp_cookie_jar.update_cookies(cookies) + req_cookies = tmp_cookie_jar.filter_cookies(url) + if req_cookies: + all_cookies.load(req_cookies) + + if proxy is not None: + proxy = URL(proxy) + elif self._trust_env: + with suppress(LookupError): + proxy, proxy_auth = get_env_proxy_for_url(url) + + req = self._request_class( + method, + url, + params=params, + headers=headers, + skip_auto_headers=skip_headers, + data=data, + cookies=all_cookies, + auth=auth, + version=version, + compress=compress, + chunked=chunked, + expect100=expect100, + loop=self._loop, + response_class=self._response_class, + proxy=proxy, + proxy_auth=proxy_auth, + timer=timer, + session=self, + ssl=ssl if ssl is not None else True, + server_hostname=server_hostname, + proxy_headers=proxy_headers, + traces=traces, + trust_env=self.trust_env, + ) + + # connection timeout + try: + conn = await self._connector.connect( + req, traces=traces, timeout=real_timeout + ) + except asyncio.TimeoutError as exc: + raise ConnectionTimeoutError( + f"Connection timeout to host {url}" + ) from exc + + assert conn.transport is not None + + assert conn.protocol is not None + conn.protocol.set_response_params( + timer=timer, + skip_payload=method in EMPTY_BODY_METHODS, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + read_timeout=real_timeout.sock_read, + read_bufsize=read_bufsize, + timeout_ceil_threshold=self._connector._timeout_ceil_threshold, + max_line_size=max_line_size, + max_field_size=max_field_size, + ) + + try: + try: + resp = await req.send(conn) + try: + await resp.start(conn) + except BaseException: + resp.close() + raise + except BaseException: + conn.close() + raise + except (ClientOSError, ServerDisconnectedError): + if retry_persistent_connection: + retry_persistent_connection = False + continue + raise + except ClientError: + raise + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientOSError(*exc.args) from exc + + if cookies := resp._cookies: + self._cookie_jar.update_cookies(cookies, resp.url) + + # redirects + if resp.status in (301, 302, 303, 307, 308) and allow_redirects: + + for trace in traces: + await trace.send_request_redirect( + method, url.update_query(params), headers, resp + ) + + redirects += 1 + history.append(resp) + if max_redirects and redirects >= max_redirects: + resp.close() + raise TooManyRedirects( + history[0].request_info, tuple(history) + ) + + # For 301 and 302, mimic IE, now changed in RFC + # https://github.com/kennethreitz/requests/pull/269 + if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or ( + resp.status in (301, 302) and resp.method == hdrs.METH_POST + ): + method = hdrs.METH_GET + data = None + if headers.get(hdrs.CONTENT_LENGTH): + headers.pop(hdrs.CONTENT_LENGTH) + + r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get( + hdrs.URI + ) + if r_url is None: + # see github.com/aio-libs/aiohttp/issues/2022 + break + else: + # reading from correct redirection + # response is forbidden + resp.release() + + try: + parsed_redirect_url = URL( + r_url, encoded=not self._requote_redirect_url + ) + except ValueError as e: + raise InvalidUrlRedirectClientError( + r_url, + "Server attempted redirecting to a location that does not look like a URL", + ) from e + + scheme = parsed_redirect_url.scheme + if scheme not in HTTP_AND_EMPTY_SCHEMA_SET: + resp.close() + raise NonHttpUrlRedirectClientError(r_url) + elif not scheme: + parsed_redirect_url = url.join(parsed_redirect_url) + + try: + redirect_origin = parsed_redirect_url.origin() + except ValueError as origin_val_err: + raise InvalidUrlRedirectClientError( + parsed_redirect_url, + "Invalid redirect URL origin", + ) from origin_val_err + + if url.origin() != redirect_origin: + auth = None + headers.pop(hdrs.AUTHORIZATION, None) + + url = parsed_redirect_url + params = {} + resp.release() + continue + + break + + # check response status + if raise_for_status is None: + raise_for_status = self._raise_for_status + + if raise_for_status is None: + pass + elif callable(raise_for_status): + await raise_for_status(resp) + elif raise_for_status: + resp.raise_for_status() + + # register connection + if handle is not None: + if resp.connection is not None: + resp.connection.add_callback(handle.cancel) + else: + handle.cancel() + + resp._history = tuple(history) + + for trace in traces: + await trace.send_request_end( + method, url.update_query(params), headers, resp + ) + return resp + + except BaseException as e: + # cleanup timer + tm.close() + if handle: + handle.cancel() + handle = None + + for trace in traces: + await trace.send_request_exception( + method, url.update_query(params), headers, e + ) + raise + + def ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Query = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + server_hostname: Optional[str] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> "_WSRequestContextManager": + """Initiate websocket connection.""" + return _WSRequestContextManager( + self._ws_connect( + url, + method=method, + protocols=protocols, + timeout=timeout, + receive_timeout=receive_timeout, + autoclose=autoclose, + autoping=autoping, + heartbeat=heartbeat, + auth=auth, + origin=origin, + params=params, + headers=headers, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + verify_ssl=verify_ssl, + fingerprint=fingerprint, + ssl_context=ssl_context, + server_hostname=server_hostname, + proxy_headers=proxy_headers, + compress=compress, + max_msg_size=max_msg_size, + ) + ) + + async def _ws_connect( + self, + url: StrOrURL, + *, + method: str = hdrs.METH_GET, + protocols: Iterable[str] = (), + timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, + receive_timeout: Optional[float] = None, + autoclose: bool = True, + autoping: bool = True, + heartbeat: Optional[float] = None, + auth: Optional[BasicAuth] = None, + origin: Optional[str] = None, + params: Query = None, + headers: Optional[LooseHeaders] = None, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + verify_ssl: Optional[bool] = None, + fingerprint: Optional[bytes] = None, + ssl_context: Optional[SSLContext] = None, + server_hostname: Optional[str] = None, + proxy_headers: Optional[LooseHeaders] = None, + compress: int = 0, + max_msg_size: int = 4 * 1024 * 1024, + ) -> ClientWebSocketResponse: + if timeout is not sentinel: + if isinstance(timeout, ClientWSTimeout): + ws_timeout = timeout + else: + warnings.warn( + "parameter 'timeout' of type 'float' " + "is deprecated, please use " + "'timeout=ClientWSTimeout(ws_close=...)'", + DeprecationWarning, + stacklevel=2, + ) + ws_timeout = ClientWSTimeout(ws_close=timeout) + else: + ws_timeout = DEFAULT_WS_CLIENT_TIMEOUT + if receive_timeout is not None: + warnings.warn( + "float parameter 'receive_timeout' " + "is deprecated, please use parameter " + "'timeout=ClientWSTimeout(ws_receive=...)'", + DeprecationWarning, + stacklevel=2, + ) + ws_timeout = attr.evolve(ws_timeout, ws_receive=receive_timeout) + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + else: + real_headers = CIMultiDict(headers) + + default_headers = { + hdrs.UPGRADE: "websocket", + hdrs.CONNECTION: "Upgrade", + hdrs.SEC_WEBSOCKET_VERSION: "13", + } + + for key, value in default_headers.items(): + real_headers.setdefault(key, value) + + sec_key = base64.b64encode(os.urandom(16)) + real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() + + if protocols: + real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols) + if origin is not None: + real_headers[hdrs.ORIGIN] = origin + if compress: + extstr = ws_ext_gen(compress=compress) + real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr + + # For the sake of backward compatibility, if user passes in None, convert it to True + if ssl is None: + warnings.warn( + "ssl=None is deprecated, please use ssl=True", + DeprecationWarning, + stacklevel=2, + ) + ssl = True + ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + + # send request + resp = await self.request( + method, + url, + params=params, + headers=real_headers, + read_until_eof=False, + auth=auth, + proxy=proxy, + proxy_auth=proxy_auth, + ssl=ssl, + server_hostname=server_hostname, + proxy_headers=proxy_headers, + ) + + try: + # check handshake + if resp.status != 101: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid response status", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid upgrade header", + status=resp.status, + headers=resp.headers, + ) + + if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade": + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid connection header", + status=resp.status, + headers=resp.headers, + ) + + # key calculation + r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "") + match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode() + if r_key != match: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message="Invalid challenge response", + status=resp.status, + headers=resp.headers, + ) + + # websocket protocol + protocol = None + if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: + resp_protocols = [ + proto.strip() + for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") + ] + + for proto in resp_protocols: + if proto in protocols: + protocol = proto + break + + # websocket compress + notakeover = False + if compress: + compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) + if compress_hdrs: + try: + compress, notakeover = ws_ext_parse(compress_hdrs) + except WSHandshakeError as exc: + raise WSServerHandshakeError( + resp.request_info, + resp.history, + message=exc.args[0], + status=resp.status, + headers=resp.headers, + ) from exc + else: + compress = 0 + notakeover = False + + conn = resp.connection + assert conn is not None + conn_proto = conn.protocol + assert conn_proto is not None + + # For WS connection the read_timeout must be either receive_timeout or greater + # None == no timeout, i.e. infinite timeout, so None is the max timeout possible + if ws_timeout.ws_receive is None: + # Reset regardless + conn_proto.read_timeout = None + elif conn_proto.read_timeout is not None: + conn_proto.read_timeout = max( + ws_timeout.ws_receive, conn_proto.read_timeout + ) + + transport = conn.transport + assert transport is not None + reader = WebSocketDataQueue(conn_proto, 2**16, loop=self._loop) + conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader) + writer = WebSocketWriter( + conn_proto, + transport, + use_mask=True, + compress=compress, + notakeover=notakeover, + ) + except BaseException: + resp.close() + raise + else: + return self._ws_response_class( + reader, + writer, + protocol, + resp, + ws_timeout, + autoclose, + autoping, + self._loop, + heartbeat=heartbeat, + compress=compress, + client_notakeover=notakeover, + ) + + def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": + """Add default headers and transform it to CIMultiDict""" + # Convert headers to MultiDict + result = CIMultiDict(self._default_headers) + if headers: + if not isinstance(headers, (MultiDictProxy, MultiDict)): + headers = CIMultiDict(headers) + added_names: Set[str] = set() + for key, value in headers.items(): + if key in added_names: + result.add(key, value) + else: + result[key] = value + added_names.add(key) + return result + + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def get( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def options( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def head( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def post( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def put( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def patch( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + def delete( + self, + url: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> "_RequestContextManager": ... + + else: + + def get( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP GET request.""" + return _RequestContextManager( + self._request( + hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def options( + self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP OPTIONS request.""" + return _RequestContextManager( + self._request( + hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def head( + self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP HEAD request.""" + return _RequestContextManager( + self._request( + hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs + ) + ) + + def post( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP POST request.""" + return _RequestContextManager( + self._request(hdrs.METH_POST, url, data=data, **kwargs) + ) + + def put( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PUT request.""" + return _RequestContextManager( + self._request(hdrs.METH_PUT, url, data=data, **kwargs) + ) + + def patch( + self, url: StrOrURL, *, data: Any = None, **kwargs: Any + ) -> "_RequestContextManager": + """Perform HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, url, data=data, **kwargs) + ) + + def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager": + """Perform HTTP DELETE request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, url, **kwargs) + ) + + async def close(self) -> None: + """Close underlying connector. + + Release all acquired resources. + """ + if not self.closed: + if self._connector is not None and self._connector_owner: + await self._connector.close() + self._connector = None + + @property + def closed(self) -> bool: + """Is client session closed. + + A readonly property. + """ + return self._connector is None or self._connector.closed + + @property + def connector(self) -> Optional[BaseConnector]: + """Connector instance used for the session.""" + return self._connector + + @property + def cookie_jar(self) -> AbstractCookieJar: + """The session cookies.""" + return self._cookie_jar + + @property + def version(self) -> Tuple[int, int]: + """The session HTTP protocol version.""" + return self._version + + @property + def requote_redirect_url(self) -> bool: + """Do URL requoting on redirection handling.""" + return self._requote_redirect_url + + @requote_redirect_url.setter + def requote_redirect_url(self, val: bool) -> None: + """Do URL requoting on redirection handling.""" + warnings.warn( + "session.requote_redirect_url modification is deprecated #2778", + DeprecationWarning, + stacklevel=2, + ) + self._requote_redirect_url = val + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Session's loop.""" + warnings.warn( + "client.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def timeout(self) -> ClientTimeout: + """Timeout for the session.""" + return self._timeout + + @property + def headers(self) -> "CIMultiDict[str]": + """The default headers of the client session.""" + return self._default_headers + + @property + def skip_auto_headers(self) -> FrozenSet[istr]: + """Headers for which autogeneration should be skipped""" + return self._skip_auto_headers + + @property + def auth(self) -> Optional[BasicAuth]: + """An object that represents HTTP Basic Authorization""" + return self._default_auth + + @property + def json_serialize(self) -> JSONEncoder: + """Json serializer callable""" + return self._json_serialize + + @property + def connector_owner(self) -> bool: + """Should connector be closed on session closing""" + return self._connector_owner + + @property + def raise_for_status( + self, + ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: + """Should `ClientResponse.raise_for_status()` be called for each response.""" + return self._raise_for_status + + @property + def auto_decompress(self) -> bool: + """Should the body response be automatically decompressed.""" + return self._auto_decompress + + @property + def trust_env(self) -> bool: + """ + Should proxies information from environment or netrc be trusted. + + Information is from HTTP_PROXY / HTTPS_PROXY environment variables + or ~/.netrc file if present. + """ + return self._trust_env + + @property + def trace_configs(self) -> List[TraceConfig]: + """A list of TraceConfig instances used for client tracing""" + return self._trace_configs + + def detach(self) -> None: + """Detach connector from session without closing the former. + + Session is switched to closed state anyway. + """ + self._connector = None + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "ClientSession": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]): + + __slots__ = ("_coro", "_resp") + + def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: + self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro + + def send(self, arg: None) -> "asyncio.Future[Any]": + return self._coro.send(arg) + + def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]": + return self._coro.throw(*args, **kwargs) + + def close(self) -> None: + return self._coro.close() + + def __await__(self) -> Generator[Any, None, _RetType]: + ret = self._coro.__await__() + return ret + + def __iter__(self) -> Generator[Any, None, _RetType]: + return self.__await__() + + async def __aenter__(self) -> _RetType: + self._resp: _RetType = await self._coro + return await self._resp.__aenter__() + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self._resp.__aexit__(exc_type, exc, tb) + + +_RequestContextManager = _BaseRequestContextManager[ClientResponse] +_WSRequestContextManager = _BaseRequestContextManager[ClientWebSocketResponse] + + +class _SessionRequestContextManager: + + __slots__ = ("_coro", "_resp", "_session") + + def __init__( + self, + coro: Coroutine["asyncio.Future[Any]", None, ClientResponse], + session: ClientSession, + ) -> None: + self._coro = coro + self._resp: Optional[ClientResponse] = None + self._session = session + + async def __aenter__(self) -> ClientResponse: + try: + self._resp = await self._coro + except BaseException: + await self._session.close() + raise + else: + return self._resp + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + assert self._resp is not None + self._resp.close() + await self._session.close() + + +def request( + method: str, + url: StrOrURL, + *, + params: Query = None, + data: Any = None, + json: Any = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + auth: Optional[BasicAuth] = None, + allow_redirects: bool = True, + max_redirects: int = 10, + compress: Optional[str] = None, + chunked: Optional[bool] = None, + expect100: bool = False, + raise_for_status: Optional[bool] = None, + read_until_eof: bool = True, + proxy: Optional[StrOrURL] = None, + proxy_auth: Optional[BasicAuth] = None, + timeout: Union[ClientTimeout, object] = sentinel, + cookies: Optional[LooseCookies] = None, + version: HttpVersion = http.HttpVersion11, + connector: Optional[BaseConnector] = None, + read_bufsize: Optional[int] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + max_line_size: int = 8190, + max_field_size: int = 8190, +) -> _SessionRequestContextManager: + """Constructs and sends a request. + + Returns response object. + method - HTTP method + url - request url + params - (optional) Dictionary or bytes to be sent in the query + string of the new request + data - (optional) Dictionary, bytes, or file-like object to + send in the body of the request + json - (optional) Any json compatible python object + headers - (optional) Dictionary of HTTP Headers to send with + the request + cookies - (optional) Dict object to send with the request + auth - (optional) BasicAuth named tuple represent HTTP Basic Auth + auth - aiohttp.helpers.BasicAuth + allow_redirects - (optional) If set to False, do not follow + redirects + version - Request HTTP version. + compress - Set to True if request has to be compressed + with deflate encoding. + chunked - Set to chunk size for chunked transfer encoding. + expect100 - Expect 100-continue response from server. + connector - BaseConnector sub-class instance to support + connection pooling. + read_until_eof - Read response until eof if response + does not have Content-Length header. + loop - Optional event loop. + timeout - Optional ClientTimeout settings structure, 5min + total timeout by default. + Usage:: + >>> import aiohttp + >>> resp = await aiohttp.request('GET', 'http://python.org/') + >>> resp + + >>> data = await resp.read() + """ + connector_owner = False + if connector is None: + connector_owner = True + connector = TCPConnector(loop=loop, force_close=True) + + session = ClientSession( + loop=loop, + cookies=cookies, + version=version, + timeout=timeout, + connector=connector, + connector_owner=connector_owner, + ) + + return _SessionRequestContextManager( + session._request( + method, + url, + params=params, + data=data, + json=json, + headers=headers, + skip_auto_headers=skip_auto_headers, + auth=auth, + allow_redirects=allow_redirects, + max_redirects=max_redirects, + compress=compress, + chunked=chunked, + expect100=expect100, + raise_for_status=raise_for_status, + read_until_eof=read_until_eof, + proxy=proxy, + proxy_auth=proxy_auth, + read_bufsize=read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + ), + session, + ) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/client_exceptions.py b/deepseek/lib/python3.10/site-packages/aiohttp/client_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..667da8d5084481de204fc2ca8d4d6ce62095a7ae --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/client_exceptions.py @@ -0,0 +1,417 @@ +"""HTTP related errors.""" + +import asyncio +import warnings +from typing import TYPE_CHECKING, Optional, Tuple, Union + +from multidict import MultiMapping + +from .typedefs import StrOrURL + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = SSLContext = None # type: ignore[assignment] + + +if TYPE_CHECKING: + from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo + from .http_parser import RawResponseMessage +else: + RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None + +__all__ = ( + "ClientError", + "ClientConnectionError", + "ClientConnectionResetError", + "ClientOSError", + "ClientConnectorError", + "ClientProxyConnectionError", + "ClientSSLError", + "ClientConnectorDNSError", + "ClientConnectorSSLError", + "ClientConnectorCertificateError", + "ConnectionTimeoutError", + "SocketTimeoutError", + "ServerConnectionError", + "ServerTimeoutError", + "ServerDisconnectedError", + "ServerFingerprintMismatch", + "ClientResponseError", + "ClientHttpProxyError", + "WSServerHandshakeError", + "ContentTypeError", + "ClientPayloadError", + "InvalidURL", + "InvalidUrlClientError", + "RedirectClientError", + "NonHttpUrlClientError", + "InvalidUrlRedirectClientError", + "NonHttpUrlRedirectClientError", + "WSMessageTypeError", +) + + +class ClientError(Exception): + """Base class for client connection errors.""" + + +class ClientResponseError(ClientError): + """Base class for exceptions that occur after getting a response. + + request_info: An instance of RequestInfo. + history: A sequence of responses, if redirects occurred. + status: HTTP status code. + message: Error message. + headers: Response headers. + """ + + def __init__( + self, + request_info: RequestInfo, + history: Tuple[ClientResponse, ...], + *, + code: Optional[int] = None, + status: Optional[int] = None, + message: str = "", + headers: Optional[MultiMapping[str]] = None, + ) -> None: + self.request_info = request_info + if code is not None: + if status is not None: + raise ValueError( + "Both code and status arguments are provided; " + "code is deprecated, use status instead" + ) + warnings.warn( + "code argument is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + if status is not None: + self.status = status + elif code is not None: + self.status = code + else: + self.status = 0 + self.message = message + self.headers = headers + self.history = history + self.args = (request_info, history) + + def __str__(self) -> str: + return "{}, message={!r}, url={!r}".format( + self.status, + self.message, + str(self.request_info.real_url), + ) + + def __repr__(self) -> str: + args = f"{self.request_info!r}, {self.history!r}" + if self.status != 0: + args += f", status={self.status!r}" + if self.message != "": + args += f", message={self.message!r}" + if self.headers is not None: + args += f", headers={self.headers!r}" + return f"{type(self).__name__}({args})" + + @property + def code(self) -> int: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + return self.status + + @code.setter + def code(self, value: int) -> None: + warnings.warn( + "code property is deprecated, use status instead", + DeprecationWarning, + stacklevel=2, + ) + self.status = value + + +class ContentTypeError(ClientResponseError): + """ContentType found is not valid.""" + + +class WSServerHandshakeError(ClientResponseError): + """websocket server handshake error.""" + + +class ClientHttpProxyError(ClientResponseError): + """HTTP proxy error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + proxy responds with status other than ``200 OK`` + on ``CONNECT`` request. + """ + + +class TooManyRedirects(ClientResponseError): + """Client was redirected too many times.""" + + +class ClientConnectionError(ClientError): + """Base class for client socket errors.""" + + +class ClientConnectionResetError(ClientConnectionError, ConnectionResetError): + """ConnectionResetError""" + + +class ClientOSError(ClientConnectionError, OSError): + """OSError error.""" + + +class ClientConnectorError(ClientOSError): + """Client connector error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + a connection can not be established. + """ + + def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None: + self._conn_key = connection_key + self._os_error = os_error + super().__init__(os_error.errno, os_error.strerror) + self.args = (connection_key, os_error) + + @property + def os_error(self) -> OSError: + return self._os_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]: + return self._conn_key.ssl + + def __str__(self) -> str: + return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format( + self, "default" if self.ssl is True else self.ssl, self.strerror + ) + + # OSError.__reduce__ does too much black magick + __reduce__ = BaseException.__reduce__ + + +class ClientConnectorDNSError(ClientConnectorError): + """DNS resolution failed during client connection. + + Raised in :class:`aiohttp.connector.TCPConnector` if + DNS resolution fails. + """ + + +class ClientProxyConnectionError(ClientConnectorError): + """Proxy connection error. + + Raised in :class:`aiohttp.connector.TCPConnector` if + connection to proxy can not be established. + """ + + +class UnixClientConnectorError(ClientConnectorError): + """Unix connector error. + + Raised in :py:class:`aiohttp.connector.UnixConnector` + if connection to unix socket can not be established. + """ + + def __init__( + self, path: str, connection_key: ConnectionKey, os_error: OSError + ) -> None: + self._path = path + super().__init__(connection_key, os_error) + + @property + def path(self) -> str: + return self._path + + def __str__(self) -> str: + return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format( + self, "default" if self.ssl is True else self.ssl, self.strerror + ) + + +class ServerConnectionError(ClientConnectionError): + """Server connection errors.""" + + +class ServerDisconnectedError(ServerConnectionError): + """Server disconnected.""" + + def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: + if message is None: + message = "Server disconnected" + + self.args = (message,) + self.message = message + + +class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError): + """Server timeout error.""" + + +class ConnectionTimeoutError(ServerTimeoutError): + """Connection timeout error.""" + + +class SocketTimeoutError(ServerTimeoutError): + """Socket timeout error.""" + + +class ServerFingerprintMismatch(ServerConnectionError): + """SSL certificate does not match expected fingerprint.""" + + def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: + self.expected = expected + self.got = got + self.host = host + self.port = port + self.args = (expected, got, host, port) + + def __repr__(self) -> str: + return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( + self.__class__.__name__, self.expected, self.got, self.host, self.port + ) + + +class ClientPayloadError(ClientError): + """Response payload error.""" + + +class InvalidURL(ClientError, ValueError): + """Invalid URL. + + URL used for fetching is malformed, e.g. it doesn't contains host + part. + """ + + # Derive from ValueError for backward compatibility + + def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: + # The type of url is not yarl.URL because the exception can be raised + # on URL(url) call + self._url = url + self._description = description + + if description: + super().__init__(url, description) + else: + super().__init__(url) + + @property + def url(self) -> StrOrURL: + return self._url + + @property + def description(self) -> "str | None": + return self._description + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} {self}>" + + def __str__(self) -> str: + if self._description: + return f"{self._url} - {self._description}" + return str(self._url) + + +class InvalidUrlClientError(InvalidURL): + """Invalid URL client error.""" + + +class RedirectClientError(ClientError): + """Client redirect error.""" + + +class NonHttpUrlClientError(ClientError): + """Non http URL client error.""" + + +class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError): + """Invalid URL redirect client error.""" + + +class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError): + """Non http URL redirect client error.""" + + +class ClientSSLError(ClientConnectorError): + """Base error for ssl.*Errors.""" + + +if ssl is not None: + cert_errors = (ssl.CertificateError,) + cert_errors_bases = ( + ClientSSLError, + ssl.CertificateError, + ) + + ssl_errors = (ssl.SSLError,) + ssl_error_bases = (ClientSSLError, ssl.SSLError) +else: # pragma: no cover + cert_errors = tuple() + cert_errors_bases = ( + ClientSSLError, + ValueError, + ) + + ssl_errors = tuple() + ssl_error_bases = (ClientSSLError,) + + +class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc] + """Response ssl error.""" + + +class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc] + """Response certificate error.""" + + def __init__( + self, connection_key: ConnectionKey, certificate_error: Exception + ) -> None: + self._conn_key = connection_key + self._certificate_error = certificate_error + self.args = (connection_key, certificate_error) + + @property + def certificate_error(self) -> Exception: + return self._certificate_error + + @property + def host(self) -> str: + return self._conn_key.host + + @property + def port(self) -> Optional[int]: + return self._conn_key.port + + @property + def ssl(self) -> bool: + return self._conn_key.is_ssl + + def __str__(self) -> str: + return ( + "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " + "[{0.certificate_error.__class__.__name__}: " + "{0.certificate_error.args}]".format(self) + ) + + +class WSMessageTypeError(TypeError): + """WebSocket message type is not valid.""" diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/client_proto.py b/deepseek/lib/python3.10/site-packages/aiohttp/client_proto.py new file mode 100644 index 0000000000000000000000000000000000000000..79f033e3e12fd64dc309e613ab726e26840ceeab --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/client_proto.py @@ -0,0 +1,307 @@ +import asyncio +from contextlib import suppress +from typing import Any, Optional, Tuple + +from .base_protocol import BaseProtocol +from .client_exceptions import ( + ClientOSError, + ClientPayloadError, + ServerDisconnectedError, + SocketTimeoutError, +) +from .helpers import ( + _EXC_SENTINEL, + EMPTY_BODY_STATUS_CODES, + BaseTimerContext, + set_exception, +) +from .http import HttpResponseParser, RawResponseMessage +from .http_exceptions import HttpProcessingError +from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader + + +class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): + """Helper class to adapt between Protocol and StreamReader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + BaseProtocol.__init__(self, loop=loop) + DataQueue.__init__(self, loop) + + self._should_close = False + + self._payload: Optional[StreamReader] = None + self._skip_payload = False + self._payload_parser = None + + self._timer = None + + self._tail = b"" + self._upgraded = False + self._parser: Optional[HttpResponseParser] = None + + self._read_timeout: Optional[float] = None + self._read_timeout_handle: Optional[asyncio.TimerHandle] = None + + self._timeout_ceil_threshold: Optional[float] = 5 + + @property + def upgraded(self) -> bool: + return self._upgraded + + @property + def should_close(self) -> bool: + return bool( + self._should_close + or (self._payload is not None and not self._payload.is_eof()) + or self._upgraded + or self._exception is not None + or self._payload_parser is not None + or self._buffer + or self._tail + ) + + def force_close(self) -> None: + self._should_close = True + + def close(self) -> None: + transport = self.transport + if transport is not None: + transport.close() + self.transport = None + self._payload = None + self._drop_timeout() + + def is_connected(self) -> bool: + return self.transport is not None and not self.transport.is_closing() + + def connection_lost(self, exc: Optional[BaseException]) -> None: + self._drop_timeout() + + original_connection_error = exc + reraised_exc = original_connection_error + + connection_closed_cleanly = original_connection_error is None + + if self._payload_parser is not None: + with suppress(Exception): # FIXME: log this somehow? + self._payload_parser.feed_eof() + + uncompleted = None + if self._parser is not None: + try: + uncompleted = self._parser.feed_eof() + except Exception as underlying_exc: + if self._payload is not None: + client_payload_exc_msg = ( + f"Response payload is not completed: {underlying_exc !r}" + ) + if not connection_closed_cleanly: + client_payload_exc_msg = ( + f"{client_payload_exc_msg !s}. " + f"{original_connection_error !r}" + ) + set_exception( + self._payload, + ClientPayloadError(client_payload_exc_msg), + underlying_exc, + ) + + if not self.is_eof(): + if isinstance(original_connection_error, OSError): + reraised_exc = ClientOSError(*original_connection_error.args) + if connection_closed_cleanly: + reraised_exc = ServerDisconnectedError(uncompleted) + # assigns self._should_close to True as side effect, + # we do it anyway below + underlying_non_eof_exc = ( + _EXC_SENTINEL + if connection_closed_cleanly + else original_connection_error + ) + assert underlying_non_eof_exc is not None + assert reraised_exc is not None + self.set_exception(reraised_exc, underlying_non_eof_exc) + + self._should_close = True + self._parser = None + self._payload = None + self._payload_parser = None + self._reading_paused = False + + super().connection_lost(reraised_exc) + + def eof_received(self) -> None: + # should call parser.feed_eof() most likely + self._drop_timeout() + + def pause_reading(self) -> None: + super().pause_reading() + self._drop_timeout() + + def resume_reading(self) -> None: + super().resume_reading() + self._reschedule_timeout() + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + self._should_close = True + self._drop_timeout() + super().set_exception(exc, exc_cause) + + def set_parser(self, parser: Any, payload: Any) -> None: + # TODO: actual types are: + # parser: WebSocketReader + # payload: WebSocketDataQueue + # but they are not generi enough + # Need an ABC for both types + self._payload = payload + self._payload_parser = parser + + self._drop_timeout() + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def set_response_params( + self, + *, + timer: Optional[BaseTimerContext] = None, + skip_payload: bool = False, + read_until_eof: bool = False, + auto_decompress: bool = True, + read_timeout: Optional[float] = None, + read_bufsize: int = 2**16, + timeout_ceil_threshold: float = 5, + max_line_size: int = 8190, + max_field_size: int = 8190, + ) -> None: + self._skip_payload = skip_payload + + self._read_timeout = read_timeout + + self._timeout_ceil_threshold = timeout_ceil_threshold + + self._parser = HttpResponseParser( + self, + self._loop, + read_bufsize, + timer=timer, + payload_exception=ClientPayloadError, + response_with_body=not skip_payload, + read_until_eof=read_until_eof, + auto_decompress=auto_decompress, + max_line_size=max_line_size, + max_field_size=max_field_size, + ) + + if self._tail: + data, self._tail = self._tail, b"" + self.data_received(data) + + def _drop_timeout(self) -> None: + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + self._read_timeout_handle = None + + def _reschedule_timeout(self) -> None: + timeout = self._read_timeout + if self._read_timeout_handle is not None: + self._read_timeout_handle.cancel() + + if timeout: + self._read_timeout_handle = self._loop.call_later( + timeout, self._on_read_timeout + ) + else: + self._read_timeout_handle = None + + def start_timeout(self) -> None: + self._reschedule_timeout() + + @property + def read_timeout(self) -> Optional[float]: + return self._read_timeout + + @read_timeout.setter + def read_timeout(self, read_timeout: Optional[float]) -> None: + self._read_timeout = read_timeout + + def _on_read_timeout(self) -> None: + exc = SocketTimeoutError("Timeout on reading data from socket") + self.set_exception(exc) + if self._payload is not None: + set_exception(self._payload, exc) + + def data_received(self, data: bytes) -> None: + self._reschedule_timeout() + + if not data: + return + + # custom payload parser - currently always WebSocketReader + if self._payload_parser is not None: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self._payload = None + self._payload_parser = None + + if tail: + self.data_received(tail) + return + + if self._upgraded or self._parser is None: + # i.e. websocket connection, websocket parser is not set yet + self._tail += data + return + + # parse http messages + try: + messages, upgraded, tail = self._parser.feed_data(data) + except BaseException as underlying_exc: + if self.transport is not None: + # connection.release() could be called BEFORE + # data_received(), the transport is already + # closed in this case + self.transport.close() + # should_close is True after the call + if isinstance(underlying_exc, HttpProcessingError): + exc = HttpProcessingError( + code=underlying_exc.code, + message=underlying_exc.message, + headers=underlying_exc.headers, + ) + else: + exc = HttpProcessingError() + self.set_exception(exc, underlying_exc) + return + + self._upgraded = upgraded + + payload: Optional[StreamReader] = None + for message, payload in messages: + if message.should_close: + self._should_close = True + + self._payload = payload + + if self._skip_payload or message.code in EMPTY_BODY_STATUS_CODES: + self.feed_data((message, EMPTY_PAYLOAD), 0) + else: + self.feed_data((message, payload), 0) + + if payload is not None: + # new message(s) was processed + # register timeout handler unsubscribing + # either on end-of-stream or immediately for + # EMPTY_PAYLOAD + if payload is not EMPTY_PAYLOAD: + payload.on_eof(self._drop_timeout) + else: + self._drop_timeout() + + if upgraded and tail: + self.data_received(tail) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/compression_utils.py b/deepseek/lib/python3.10/site-packages/aiohttp/compression_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ebe8857f487466049c8c6e9e825da7aea6c9d7b9 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/compression_utils.py @@ -0,0 +1,173 @@ +import asyncio +import zlib +from concurrent.futures import Executor +from typing import Optional, cast + +try: + try: + import brotlicffi as brotli + except ImportError: + import brotli + + HAS_BROTLI = True +except ImportError: # pragma: no cover + HAS_BROTLI = False + +MAX_SYNC_CHUNK_SIZE = 1024 + + +def encoding_to_mode( + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, +) -> int: + if encoding == "gzip": + return 16 + zlib.MAX_WBITS + + return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS + + +class ZlibBaseHandler: + def __init__( + self, + mode: int, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + self._mode = mode + self._executor = executor + self._max_sync_chunk_size = max_sync_chunk_size + + +class ZLibCompressor(ZlibBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, + level: Optional[int] = None, + wbits: Optional[int] = None, + strategy: int = zlib.Z_DEFAULT_STRATEGY, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + super().__init__( + mode=( + encoding_to_mode(encoding, suppress_deflate_header) + if wbits is None + else wbits + ), + executor=executor, + max_sync_chunk_size=max_sync_chunk_size, + ) + if level is None: + self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy) + else: + self._compressor = zlib.compressobj( + wbits=self._mode, strategy=strategy, level=level + ) + self._compress_lock = asyncio.Lock() + + def compress_sync(self, data: bytes) -> bytes: + return self._compressor.compress(data) + + async def compress(self, data: bytes) -> bytes: + """Compress the data and returned the compressed bytes. + + Note that flush() must be called after the last call to compress() + + If the data size is large than the max_sync_chunk_size, the compression + will be done in the executor. Otherwise, the compression will be done + in the event loop. + """ + async with self._compress_lock: + # To ensure the stream is consistent in the event + # there are multiple writers, we need to lock + # the compressor so that only one writer can + # compress at a time. + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._compressor.compress, data + ) + return self.compress_sync(data) + + def flush(self, mode: int = zlib.Z_FINISH) -> bytes: + return self._compressor.flush(mode) + + +class ZLibDecompressor(ZlibBaseHandler): + def __init__( + self, + encoding: Optional[str] = None, + suppress_deflate_header: bool = False, + executor: Optional[Executor] = None, + max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + ): + super().__init__( + mode=encoding_to_mode(encoding, suppress_deflate_header), + executor=executor, + max_sync_chunk_size=max_sync_chunk_size, + ) + self._decompressor = zlib.decompressobj(wbits=self._mode) + + def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes: + return self._decompressor.decompress(data, max_length) + + async def decompress(self, data: bytes, max_length: int = 0) -> bytes: + """Decompress the data and return the decompressed bytes. + + If the data size is large than the max_sync_chunk_size, the decompression + will be done in the executor. Otherwise, the decompression will be done + in the event loop. + """ + if ( + self._max_sync_chunk_size is not None + and len(data) > self._max_sync_chunk_size + ): + return await asyncio.get_running_loop().run_in_executor( + self._executor, self._decompressor.decompress, data, max_length + ) + return self.decompress_sync(data, max_length) + + def flush(self, length: int = 0) -> bytes: + return ( + self._decompressor.flush(length) + if length > 0 + else self._decompressor.flush() + ) + + @property + def eof(self) -> bool: + return self._decompressor.eof + + @property + def unconsumed_tail(self) -> bytes: + return self._decompressor.unconsumed_tail + + @property + def unused_data(self) -> bytes: + return self._decompressor.unused_data + + +class BrotliDecompressor: + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self) -> None: + if not HAS_BROTLI: + raise RuntimeError( + "The brotli decompression is not available. " + "Please install `Brotli` module" + ) + self._obj = brotli.Decompressor() + + def decompress_sync(self, data: bytes) -> bytes: + if hasattr(self._obj, "decompress"): + return cast(bytes, self._obj.decompress(data)) + return cast(bytes, self._obj.process(data)) + + def flush(self) -> bytes: + if hasattr(self._obj, "flush"): + return cast(bytes, self._obj.flush()) + return b"" diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/connector.py b/deepseek/lib/python3.10/site-packages/aiohttp/connector.py new file mode 100644 index 0000000000000000000000000000000000000000..93bc2513b20889e9fbcbf298f1f176844531fcc9 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/connector.py @@ -0,0 +1,1646 @@ +import asyncio +import functools +import random +import socket +import sys +import traceback +import warnings +from collections import OrderedDict, defaultdict, deque +from contextlib import suppress +from http import HTTPStatus +from itertools import chain, cycle, islice +from time import monotonic +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + DefaultDict, + Deque, + Dict, + Iterator, + List, + Literal, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, + cast, +) + +import aiohappyeyeballs + +from . import hdrs, helpers +from .abc import AbstractResolver, ResolveResult +from .client_exceptions import ( + ClientConnectionError, + ClientConnectorCertificateError, + ClientConnectorDNSError, + ClientConnectorError, + ClientConnectorSSLError, + ClientHttpProxyError, + ClientProxyConnectionError, + ServerFingerprintMismatch, + UnixClientConnectorError, + cert_errors, + ssl_errors, +) +from .client_proto import ResponseHandler +from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params +from .helpers import ( + ceil_timeout, + is_ip_address, + noop, + sentinel, + set_exception, + set_result, +) +from .resolver import DefaultResolver + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +EMPTY_SCHEMA_SET = frozenset({""}) +HTTP_SCHEMA_SET = frozenset({"http", "https"}) +WS_SCHEMA_SET = frozenset({"ws", "wss"}) + +HTTP_AND_EMPTY_SCHEMA_SET = HTTP_SCHEMA_SET | EMPTY_SCHEMA_SET +HIGH_LEVEL_SCHEMA_SET = HTTP_AND_EMPTY_SCHEMA_SET | WS_SCHEMA_SET + +NEEDS_CLEANUP_CLOSED = (3, 13, 0) <= sys.version_info < ( + 3, + 13, + 1, +) or sys.version_info < (3, 12, 7) +# Cleanup closed is no longer needed after https://github.com/python/cpython/pull/118960 +# which first appeared in Python 3.12.7 and 3.13.1 + + +__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector") + + +if TYPE_CHECKING: + from .client import ClientTimeout + from .client_reqrep import ConnectionKey + from .tracing import Trace + + +class _DeprecationWaiter: + __slots__ = ("_awaitable", "_awaited") + + def __init__(self, awaitable: Awaitable[Any]) -> None: + self._awaitable = awaitable + self._awaited = False + + def __await__(self) -> Any: + self._awaited = True + return self._awaitable.__await__() + + def __del__(self) -> None: + if not self._awaited: + warnings.warn( + "Connector.close() is a coroutine, " + "please use await connector.close()", + DeprecationWarning, + ) + + +class Connection: + + _source_traceback = None + + def __init__( + self, + connector: "BaseConnector", + key: "ConnectionKey", + protocol: ResponseHandler, + loop: asyncio.AbstractEventLoop, + ) -> None: + self._key = key + self._connector = connector + self._loop = loop + self._protocol: Optional[ResponseHandler] = protocol + self._callbacks: List[Callable[[], None]] = [] + + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + def __repr__(self) -> str: + return f"Connection<{self._key}>" + + def __del__(self, _warnings: Any = warnings) -> None: + if self._protocol is not None: + kwargs = {"source": self} + _warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs) + if self._loop.is_closed(): + return + + self._connector._release(self._key, self._protocol, should_close=True) + + context = {"client_connection": self, "message": "Unclosed connection"} + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __bool__(self) -> Literal[True]: + """Force subclasses to not be falsy, to make checks simpler.""" + return True + + @property + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "connector.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def protocol(self) -> Optional[ResponseHandler]: + return self._protocol + + def add_callback(self, callback: Callable[[], None]) -> None: + if callback is not None: + self._callbacks.append(callback) + + def _notify_release(self) -> None: + callbacks, self._callbacks = self._callbacks[:], [] + + for cb in callbacks: + with suppress(Exception): + cb() + + def close(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release(self._key, self._protocol, should_close=True) + self._protocol = None + + def release(self) -> None: + self._notify_release() + + if self._protocol is not None: + self._connector._release(self._key, self._protocol) + self._protocol = None + + @property + def closed(self) -> bool: + return self._protocol is None or not self._protocol.is_connected() + + +class _TransportPlaceholder: + """placeholder for BaseConnector.connect function""" + + __slots__ = () + + def close(self) -> None: + """Close the placeholder transport.""" + + +class BaseConnector: + """Base connector class. + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + timeout_ceil_threshold - Trigger ceiling of timeout values when + it's above timeout_ceil_threshold. + loop - Optional event loop. + """ + + _closed = True # prevent AttributeError in __del__ if ctor was failed + _source_traceback = None + + # abort transport after 2 seconds (cleanup broken connections) + _cleanup_closed_period = 2.0 + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET + + def __init__( + self, + *, + keepalive_timeout: Union[object, None, float] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout_ceil_threshold: float = 5, + ) -> None: + + if force_close: + if keepalive_timeout is not None and keepalive_timeout is not sentinel: + raise ValueError( + "keepalive_timeout cannot be set if force_close is True" + ) + else: + if keepalive_timeout is sentinel: + keepalive_timeout = 15.0 + + loop = loop or asyncio.get_running_loop() + self._timeout_ceil_threshold = timeout_ceil_threshold + + self._closed = False + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) + + # Connection pool of reusable connections. + # We use a deque to store connections because it has O(1) popleft() + # and O(1) append() operations to implement a FIFO queue. + self._conns: DefaultDict[ + ConnectionKey, Deque[Tuple[ResponseHandler, float]] + ] = defaultdict(deque) + self._limit = limit + self._limit_per_host = limit_per_host + self._acquired: Set[ResponseHandler] = set() + self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = ( + defaultdict(set) + ) + self._keepalive_timeout = cast(float, keepalive_timeout) + self._force_close = force_close + + # {host_key: FIFO list of waiters} + # The FIFO is implemented with an OrderedDict with None keys because + # python does not have an ordered set. + self._waiters: DefaultDict[ + ConnectionKey, OrderedDict[asyncio.Future[None], None] + ] = defaultdict(OrderedDict) + + self._loop = loop + self._factory = functools.partial(ResponseHandler, loop=loop) + + # start keep-alive connection cleanup task + self._cleanup_handle: Optional[asyncio.TimerHandle] = None + + # start cleanup closed transports task + self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + + if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: + warnings.warn( + "enable_cleanup_closed ignored because " + "https://github.com/python/cpython/pull/118960 is fixed " + f"in Python version {sys.version_info}", + DeprecationWarning, + stacklevel=2, + ) + enable_cleanup_closed = False + + self._cleanup_closed_disabled = not enable_cleanup_closed + self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] + self._cleanup_closed() + + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return + if not self._conns: + return + + conns = [repr(c) for c in self._conns.values()] + + self._close() + + kwargs = {"source": self} + _warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs) + context = { + "connector": self, + "connections": conns, + "message": "Unclosed connector", + } + if self._source_traceback is not None: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) + + def __enter__(self) -> "BaseConnector": + warnings.warn( + '"with Connector():" is deprecated, ' + 'use "async with Connector():" instead', + DeprecationWarning, + ) + return self + + def __exit__(self, *exc: Any) -> None: + self._close() + + async def __aenter__(self) -> "BaseConnector": + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + exc_traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + @property + def force_close(self) -> bool: + """Ultimately close connection on releasing if True.""" + return self._force_close + + @property + def limit(self) -> int: + """The total number for simultaneous connections. + + If limit is 0 the connector has no limit. + The default limit size is 100. + """ + return self._limit + + @property + def limit_per_host(self) -> int: + """The limit for simultaneous connections to the same endpoint. + + Endpoints are the same if they are have equal + (host, port, is_ssl) triple. + """ + return self._limit_per_host + + def _cleanup(self) -> None: + """Cleanup unused transports.""" + if self._cleanup_handle: + self._cleanup_handle.cancel() + # _cleanup_handle should be unset, otherwise _release() will not + # recreate it ever! + self._cleanup_handle = None + + now = monotonic() + timeout = self._keepalive_timeout + + if self._conns: + connections = defaultdict(deque) + deadline = now - timeout + for key, conns in self._conns.items(): + alive: Deque[Tuple[ResponseHandler, float]] = deque() + for proto, use_time in conns: + if proto.is_connected() and use_time - deadline >= 0: + alive.append((proto, use_time)) + continue + transport = proto.transport + proto.close() + if not self._cleanup_closed_disabled and key.is_ssl: + self._cleanup_closed_transports.append(transport) + + if alive: + connections[key] = alive + + self._conns = connections + + if self._conns: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + def _cleanup_closed(self) -> None: + """Double confirmation for transport close. + + Some broken ssl servers may leave socket open without proper close. + """ + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + self._cleanup_closed_transports = [] + + if not self._cleanup_closed_disabled: + self._cleanup_closed_handle = helpers.weakref_handle( + self, + "_cleanup_closed", + self._cleanup_closed_period, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + def close(self) -> Awaitable[None]: + """Close all opened transports.""" + self._close() + return _DeprecationWaiter(noop()) + + def _close(self) -> None: + if self._closed: + return + + self._closed = True + + try: + if self._loop.is_closed(): + return + + # cancel cleanup task + if self._cleanup_handle: + self._cleanup_handle.cancel() + + # cancel cleanup close task + if self._cleanup_closed_handle: + self._cleanup_closed_handle.cancel() + + for data in self._conns.values(): + for proto, t0 in data: + proto.close() + + for proto in self._acquired: + proto.close() + + for transport in self._cleanup_closed_transports: + if transport is not None: + transport.abort() + + finally: + self._conns.clear() + self._acquired.clear() + for keyed_waiters in self._waiters.values(): + for keyed_waiter in keyed_waiters: + keyed_waiter.cancel() + self._waiters.clear() + self._cleanup_handle = None + self._cleanup_closed_transports.clear() + self._cleanup_closed_handle = None + + @property + def closed(self) -> bool: + """Is connector closed. + + A readonly property. + """ + return self._closed + + def _available_connections(self, key: "ConnectionKey") -> int: + """ + Return number of available connections. + + The limit, limit_per_host and the connection key are taken into account. + + If it returns less than 1 means that there are no connections + available. + """ + # check total available connections + # If there are no limits, this will always return 1 + total_remain = 1 + + if self._limit and (total_remain := self._limit - len(self._acquired)) <= 0: + return total_remain + + # check limit per host + if host_remain := self._limit_per_host: + if acquired := self._acquired_per_host.get(key): + host_remain -= len(acquired) + if total_remain > host_remain: + return host_remain + + return total_remain + + async def connect( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> Connection: + """Get from pool or create new connection.""" + key = req.connection_key + if (conn := await self._get(key, traces)) is not None: + # If we do not have to wait and we can get a connection from the pool + # we can avoid the timeout ceil logic and directly return the connection + return conn + + async with ceil_timeout(timeout.connect, timeout.ceil_threshold): + if self._available_connections(key) <= 0: + await self._wait_for_available_connection(key, traces) + if (conn := await self._get(key, traces)) is not None: + return conn + + placeholder = cast(ResponseHandler, _TransportPlaceholder()) + self._acquired.add(placeholder) + if self._limit_per_host: + self._acquired_per_host[key].add(placeholder) + + try: + # Traces are done inside the try block to ensure that the + # that the placeholder is still cleaned up if an exception + # is raised. + if traces: + for trace in traces: + await trace.send_connection_create_start() + proto = await self._create_connection(req, traces, timeout) + if traces: + for trace in traces: + await trace.send_connection_create_end() + except BaseException: + self._release_acquired(key, placeholder) + raise + else: + if self._closed: + proto.close() + raise ClientConnectionError("Connector is closed.") + + # The connection was successfully created, drop the placeholder + # and add the real connection to the acquired set. There should + # be no awaits after the proto is added to the acquired set + # to ensure that the connection is not left in the acquired set + # on cancellation. + self._acquired.remove(placeholder) + self._acquired.add(proto) + if self._limit_per_host: + acquired_per_host = self._acquired_per_host[key] + acquired_per_host.remove(placeholder) + acquired_per_host.add(proto) + return Connection(self, key, proto, self._loop) + + async def _wait_for_available_connection( + self, key: "ConnectionKey", traces: List["Trace"] + ) -> None: + """Wait for an available connection slot.""" + # We loop here because there is a race between + # the connection limit check and the connection + # being acquired. If the connection is acquired + # between the check and the await statement, we + # need to loop again to check if the connection + # slot is still available. + attempts = 0 + while True: + fut: asyncio.Future[None] = self._loop.create_future() + keyed_waiters = self._waiters[key] + keyed_waiters[fut] = None + if attempts: + # If we have waited before, we need to move the waiter + # to the front of the queue as otherwise we might get + # starved and hit the timeout. + keyed_waiters.move_to_end(fut, last=False) + + try: + # Traces happen in the try block to ensure that the + # the waiter is still cleaned up if an exception is raised. + if traces: + for trace in traces: + await trace.send_connection_queued_start() + await fut + if traces: + for trace in traces: + await trace.send_connection_queued_end() + finally: + # pop the waiter from the queue if its still + # there and not already removed by _release_waiter + keyed_waiters.pop(fut, None) + if not self._waiters.get(key, True): + del self._waiters[key] + + if self._available_connections(key) > 0: + break + attempts += 1 + + async def _get( + self, key: "ConnectionKey", traces: List["Trace"] + ) -> Optional[Connection]: + """Get next reusable connection for the key or None. + + The connection will be marked as acquired. + """ + if (conns := self._conns.get(key)) is None: + return None + + t1 = monotonic() + while conns: + proto, t0 = conns.popleft() + # We will we reuse the connection if its connected and + # the keepalive timeout has not been exceeded + if proto.is_connected() and t1 - t0 <= self._keepalive_timeout: + if not conns: + # The very last connection was reclaimed: drop the key + del self._conns[key] + self._acquired.add(proto) + if self._limit_per_host: + self._acquired_per_host[key].add(proto) + if traces: + for trace in traces: + try: + await trace.send_connection_reuseconn() + except BaseException: + self._release_acquired(key, proto) + raise + return Connection(self, key, proto, self._loop) + + # Connection cannot be reused, close it + transport = proto.transport + proto.close() + # only for SSL transports + if not self._cleanup_closed_disabled and key.is_ssl: + self._cleanup_closed_transports.append(transport) + + # No more connections: drop the key + del self._conns[key] + return None + + def _release_waiter(self) -> None: + """ + Iterates over all waiters until one to be released is found. + + The one to be released is not finished and + belongs to a host that has available connections. + """ + if not self._waiters: + return + + # Having the dict keys ordered this avoids to iterate + # at the same order at each call. + queues = list(self._waiters) + random.shuffle(queues) + + for key in queues: + if self._available_connections(key) < 1: + continue + + waiters = self._waiters[key] + while waiters: + waiter, _ = waiters.popitem(last=False) + if not waiter.done(): + waiter.set_result(None) + return + + def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None: + """Release acquired connection.""" + if self._closed: + # acquired connection is already released on connector closing + return + + self._acquired.discard(proto) + if self._limit_per_host and (conns := self._acquired_per_host.get(key)): + conns.discard(proto) + if not conns: + del self._acquired_per_host[key] + self._release_waiter() + + def _release( + self, + key: "ConnectionKey", + protocol: ResponseHandler, + *, + should_close: bool = False, + ) -> None: + if self._closed: + # acquired connection is already released on connector closing + return + + self._release_acquired(key, protocol) + + if self._force_close or should_close or protocol.should_close: + transport = protocol.transport + protocol.close() + + if key.is_ssl and not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transport) + return + + self._conns[key].append((protocol, monotonic())) + + if self._cleanup_handle is None: + self._cleanup_handle = helpers.weakref_handle( + self, + "_cleanup", + self._keepalive_timeout, + self._loop, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + raise NotImplementedError() + + +class _DNSCacheTable: + def __init__(self, ttl: Optional[float] = None) -> None: + self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} + self._timestamps: Dict[Tuple[str, int], float] = {} + self._ttl = ttl + + def __contains__(self, host: object) -> bool: + return host in self._addrs_rr + + def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: + self._addrs_rr[key] = (cycle(addrs), len(addrs)) + + if self._ttl is not None: + self._timestamps[key] = monotonic() + + def remove(self, key: Tuple[str, int]) -> None: + self._addrs_rr.pop(key, None) + + if self._ttl is not None: + self._timestamps.pop(key, None) + + def clear(self) -> None: + self._addrs_rr.clear() + self._timestamps.clear() + + def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: + loop, length = self._addrs_rr[key] + addrs = list(islice(loop, length)) + # Consume one more element to shift internal state of `cycle` + next(loop) + return addrs + + def expired(self, key: Tuple[str, int]) -> bool: + if self._ttl is None: + return False + + return self._timestamps[key] + self._ttl < monotonic() + + +def _make_ssl_context(verified: bool) -> SSLContext: + """Create SSL context. + + This method is not async-friendly and should be called from a thread + because it will load certificates from disk and do other blocking I/O. + """ + if ssl is None: + # No ssl support + return None + if verified: + return ssl.create_default_context() + sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) + sslcontext.options |= ssl.OP_NO_SSLv2 + sslcontext.options |= ssl.OP_NO_SSLv3 + sslcontext.check_hostname = False + sslcontext.verify_mode = ssl.CERT_NONE + sslcontext.options |= ssl.OP_NO_COMPRESSION + sslcontext.set_default_verify_paths() + return sslcontext + + +# The default SSLContext objects are created at import time +# since they do blocking I/O to load certificates from disk, +# and imports should always be done before the event loop starts +# or in a thread. +_SSL_CONTEXT_VERIFIED = _make_ssl_context(True) +_SSL_CONTEXT_UNVERIFIED = _make_ssl_context(False) + + +class TCPConnector(BaseConnector): + """TCP connector. + + verify_ssl - Set to True to check ssl certifications. + fingerprint - Pass the binary sha256 + digest of the expected certificate in DER format to verify + that the certificate the server presents matches. See also + https://en.wikipedia.org/wiki/HTTP_Public_Key_Pinning + resolver - Enable DNS lookups and use this + resolver + use_dns_cache - Use memory cache for DNS lookups. + ttl_dns_cache - Max seconds having cached a DNS entry, None forever. + family - socket address family + local_addr - local tuple of (host, port) to bind socket to + + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + enable_cleanup_closed - Enables clean-up closed ssl transports. + Disabled by default. + happy_eyeballs_delay - This is the “Connection Attempt Delay” + as defined in RFC 8305. To disable + the happy eyeballs algorithm, set to None. + interleave - “First Address Family Count” as defined in RFC 8305 + loop - Optional event loop. + """ + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"tcp"}) + + def __init__( + self, + *, + verify_ssl: bool = True, + fingerprint: Optional[bytes] = None, + use_dns_cache: bool = True, + ttl_dns_cache: Optional[int] = 10, + family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC, + ssl_context: Optional[SSLContext] = None, + ssl: Union[bool, Fingerprint, SSLContext] = True, + local_addr: Optional[Tuple[str, int]] = None, + resolver: Optional[AbstractResolver] = None, + keepalive_timeout: Union[None, float, object] = sentinel, + force_close: bool = False, + limit: int = 100, + limit_per_host: int = 0, + enable_cleanup_closed: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, + timeout_ceil_threshold: float = 5, + happy_eyeballs_delay: Optional[float] = 0.25, + interleave: Optional[int] = None, + ): + super().__init__( + keepalive_timeout=keepalive_timeout, + force_close=force_close, + limit=limit, + limit_per_host=limit_per_host, + enable_cleanup_closed=enable_cleanup_closed, + loop=loop, + timeout_ceil_threshold=timeout_ceil_threshold, + ) + + self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) + if resolver is None: + resolver = DefaultResolver(loop=self._loop) + self._resolver = resolver + + self._use_dns_cache = use_dns_cache + self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) + self._throttle_dns_futures: Dict[ + Tuple[str, int], Set["asyncio.Future[None]"] + ] = {} + self._family = family + self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) + self._happy_eyeballs_delay = happy_eyeballs_delay + self._interleave = interleave + self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() + + def close(self) -> Awaitable[None]: + """Close all ongoing DNS calls.""" + for fut in chain.from_iterable(self._throttle_dns_futures.values()): + fut.cancel() + + for t in self._resolve_host_tasks: + t.cancel() + + return super().close() + + @property + def family(self) -> int: + """Socket family like AF_INET.""" + return self._family + + @property + def use_dns_cache(self) -> bool: + """True if local DNS caching is enabled.""" + return self._use_dns_cache + + def clear_dns_cache( + self, host: Optional[str] = None, port: Optional[int] = None + ) -> None: + """Remove specified host/port or clear all dns local cache.""" + if host is not None and port is not None: + self._cached_hosts.remove((host, port)) + elif host is not None or port is not None: + raise ValueError("either both host and port or none of them are allowed") + else: + self._cached_hosts.clear() + + async def _resolve_host( + self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None + ) -> List[ResolveResult]: + """Resolve host and return list of addresses.""" + if is_ip_address(host): + return [ + { + "hostname": host, + "host": host, + "port": port, + "family": self._family, + "proto": 0, + "flags": 0, + } + ] + + if not self._use_dns_cache: + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + res = await self._resolver.resolve(host, port, family=self._family) + + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + return res + + key = (host, port) + if key in self._cached_hosts and not self._cached_hosts.expired(key): + # get result early, before any await (#4014) + result = self._cached_hosts.next_addrs(key) + + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + return result + + futures: Set["asyncio.Future[None]"] + # + # If multiple connectors are resolving the same host, we wait + # for the first one to resolve and then use the result for all of them. + # We use a throttle to ensure that we only resolve the host once + # and then use the result for all the waiters. + # + if key in self._throttle_dns_futures: + # get futures early, before any await (#4014) + futures = self._throttle_dns_futures[key] + future: asyncio.Future[None] = self._loop.create_future() + futures.add(future) + if traces: + for trace in traces: + await trace.send_dns_cache_hit(host) + try: + await future + finally: + futures.discard(future) + return self._cached_hosts.next_addrs(key) + + # update dict early, before any await (#4014) + self._throttle_dns_futures[key] = futures = set() + # In this case we need to create a task to ensure that we can shield + # the task from cancellation as cancelling this lookup should not cancel + # the underlying lookup or else the cancel event will get broadcast to + # all the waiters across all connections. + # + coro = self._resolve_host_with_throttle(key, host, port, futures, traces) + loop = asyncio.get_running_loop() + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to send immediately + resolved_host_task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + resolved_host_task = loop.create_task(coro) + + if not resolved_host_task.done(): + self._resolve_host_tasks.add(resolved_host_task) + resolved_host_task.add_done_callback(self._resolve_host_tasks.discard) + + try: + return await asyncio.shield(resolved_host_task) + except asyncio.CancelledError: + + def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: + with suppress(Exception, asyncio.CancelledError): + fut.result() + + resolved_host_task.add_done_callback(drop_exception) + raise + + async def _resolve_host_with_throttle( + self, + key: Tuple[str, int], + host: str, + port: int, + futures: Set["asyncio.Future[None]"], + traces: Optional[Sequence["Trace"]], + ) -> List[ResolveResult]: + """Resolve host and set result for all waiters. + + This method must be run in a task and shielded from cancellation + to avoid cancelling the underlying lookup. + """ + if traces: + for trace in traces: + await trace.send_dns_cache_miss(host) + try: + if traces: + for trace in traces: + await trace.send_dns_resolvehost_start(host) + + addrs = await self._resolver.resolve(host, port, family=self._family) + if traces: + for trace in traces: + await trace.send_dns_resolvehost_end(host) + + self._cached_hosts.add(key, addrs) + for fut in futures: + set_result(fut, None) + except BaseException as e: + # any DNS exception is set for the waiters to raise the same exception. + # This coro is always run in task that is shielded from cancellation so + # we should never be propagating cancellation here. + for fut in futures: + set_exception(fut, e) + raise + finally: + self._throttle_dns_futures.pop(key) + + return self._cached_hosts.next_addrs(key) + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + """Create connection. + + Has same keyword arguments as BaseEventLoop.create_connection. + """ + if req.proxy: + _, proto = await self._create_proxy_connection(req, traces, timeout) + else: + _, proto = await self._create_direct_connection(req, traces, timeout) + + return proto + + def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + """Logic to get the correct SSL context + + 0. if req.ssl is false, return None + + 1. if ssl_context is specified in req, use it + 2. if _ssl_context is specified in self, use it + 3. otherwise: + 1. if verify_ssl is not specified in req, use self.ssl_context + (will generate a default context according to self.verify_ssl) + 2. if verify_ssl is True in req, generate a default SSL context + 3. if verify_ssl is False in req, generate a SSL context that + won't verify + """ + if not req.is_ssl(): + return None + + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + sslcontext = req.ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return _SSL_CONTEXT_UNVERIFIED + sslcontext = self._ssl + if isinstance(sslcontext, ssl.SSLContext): + return sslcontext + if sslcontext is not True: + # not verified or fingerprinted + return _SSL_CONTEXT_UNVERIFIED + return _SSL_CONTEXT_VERIFIED + + def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: + ret = req.ssl + if isinstance(ret, Fingerprint): + return ret + ret = self._ssl + if isinstance(ret, Fingerprint): + return ret + return None + + async def _wrap_create_connection( + self, + *args: Any, + addr_infos: List[aiohappyeyeballs.AddrInfoType], + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + sock = await aiohappyeyeballs.start_connection( + addr_infos=addr_infos, + local_addr_infos=self._local_addr_infos, + happy_eyeballs_delay=self._happy_eyeballs_delay, + interleave=self._interleave, + loop=self._loop, + ) + return await self._loop.create_connection(*args, **kwargs, sock=sock) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + async def _wrap_existing_connection( + self, + *args: Any, + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + **kwargs: Any, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + return await self._loop.create_connection(*args, **kwargs) + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + + def _fail_on_no_start_tls(self, req: "ClientRequest") -> None: + """Raise a :py:exc:`RuntimeError` on missing ``start_tls()``. + + It is necessary for TLS-in-TLS so that it is possible to + send HTTPS queries through HTTPS proxies. + + This doesn't affect regular HTTP requests, though. + """ + if not req.is_ssl(): + return + + proxy_url = req.proxy + assert proxy_url is not None + if proxy_url.scheme != "https": + return + + self._check_loop_for_start_tls() + + def _check_loop_for_start_tls(self) -> None: + try: + self._loop.start_tls + except AttributeError as attr_exc: + raise RuntimeError( + "An HTTPS request is being sent through an HTTPS proxy. " + "This needs support for TLS in TLS but it is not implemented " + "in your runtime for the stdlib asyncio.\n\n" + "Please upgrade to Python 3.11 or higher. For more details, " + "please see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n" + "* https://docs.aiohttp.org/en/stable/" + "client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + ) from attr_exc + + def _loop_supports_start_tls(self) -> bool: + try: + self._check_loop_for_start_tls() + except RuntimeError: + return False + else: + return True + + def _warn_about_tls_in_tls( + self, + underlying_transport: asyncio.Transport, + req: ClientRequest, + ) -> None: + """Issue a warning if the requested URL has HTTPS scheme.""" + if req.request_info.url.scheme != "https": + return + + asyncio_supports_tls_in_tls = getattr( + underlying_transport, + "_start_tls_compatible", + False, + ) + + if asyncio_supports_tls_in_tls: + return + + warnings.warn( + "An HTTPS request is being sent through an HTTPS proxy. " + "This support for TLS in TLS is known to be disabled " + "in the stdlib asyncio (Python <3.11). This is why you'll probably see " + "an error in the log below.\n\n" + "It is possible to enable it via monkeypatching. " + "For more details, see:\n" + "* https://bugs.python.org/issue37179\n" + "* https://github.com/python/cpython/pull/28073\n\n" + "You can temporarily patch this as follows:\n" + "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n" + "* https://github.com/aio-libs/aiohttp/discussions/6044\n", + RuntimeWarning, + source=self, + # Why `4`? At least 3 of the calls in the stack originate + # from the methods in this class. + stacklevel=3, + ) + + async def _start_tls_connection( + self, + underlying_transport: asyncio.Transport, + req: ClientRequest, + timeout: "ClientTimeout", + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + """Wrap the raw TCP transport with TLS.""" + tls_proto = self._factory() # Create a brand new proto for TLS + sslcontext = self._get_ssl_context(req) + if TYPE_CHECKING: + # _start_tls_connection is unreachable in the current code path + # if sslcontext is None. + assert sslcontext is not None + + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + try: + tls_transport = await self._loop.start_tls( + underlying_transport, + tls_proto, + sslcontext, + server_hostname=req.server_hostname or req.host, + ssl_handshake_timeout=timeout.total, + ) + except BaseException: + # We need to close the underlying transport since + # `start_tls()` probably failed before it had a + # chance to do this: + underlying_transport.close() + raise + if isinstance(tls_transport, asyncio.Transport): + fingerprint = self._get_fingerprint(req) + if fingerprint: + try: + fingerprint.check(tls_transport) + except ServerFingerprintMismatch: + tls_transport.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(tls_transport) + raise + except cert_errors as exc: + raise ClientConnectorCertificateError(req.connection_key, exc) from exc + except ssl_errors as exc: + raise ClientConnectorSSLError(req.connection_key, exc) from exc + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise client_error(req.connection_key, exc) from exc + except TypeError as type_err: + # Example cause looks like this: + # TypeError: transport is not supported by start_tls() + + raise ClientConnectionError( + "Cannot initialize a TLS-in-TLS connection to host " + f"{req.host!s}:{req.port:d} through an underlying connection " + f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} " + f"[{type_err!s}]" + ) from type_err + else: + if tls_transport is None: + msg = "Failed to start TLS (possibly caused by closing transport)" + raise client_error(req.connection_key, OSError(msg)) + tls_proto.connection_made( + tls_transport + ) # Kick the state machine of the new TLS protocol + + return tls_transport, tls_proto + + def _convert_hosts_to_addr_infos( + self, hosts: List[ResolveResult] + ) -> List[aiohappyeyeballs.AddrInfoType]: + """Converts the list of hosts to a list of addr_infos. + + The list of hosts is the result of a DNS lookup. The list of + addr_infos is the result of a call to `socket.getaddrinfo()`. + """ + addr_infos: List[aiohappyeyeballs.AddrInfoType] = [] + for hinfo in hosts: + host = hinfo["host"] + is_ipv6 = ":" in host + family = socket.AF_INET6 if is_ipv6 else socket.AF_INET + if self._family and self._family != family: + continue + addr = (host, hinfo["port"], 0, 0) if is_ipv6 else (host, hinfo["port"]) + addr_infos.append( + (family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr) + ) + return addr_infos + + async def _create_direct_connection( + self, + req: ClientRequest, + traces: List["Trace"], + timeout: "ClientTimeout", + *, + client_error: Type[Exception] = ClientConnectorError, + ) -> Tuple[asyncio.Transport, ResponseHandler]: + sslcontext = self._get_ssl_context(req) + fingerprint = self._get_fingerprint(req) + + host = req.url.raw_host + assert host is not None + # Replace multiple trailing dots with a single one. + # A trailing dot is only present for fully-qualified domain names. + # See https://github.com/aio-libs/aiohttp/pull/7364. + if host.endswith(".."): + host = host.rstrip(".") + "." + port = req.port + assert port is not None + try: + # Cancelling this lookup should not cancel the underlying lookup + # or else the cancel event will get broadcast to all the waiters + # across all connections. + hosts = await self._resolve_host(host, port, traces=traces) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + # in case of proxy it is not ClientProxyConnectionError + # it is problem of resolving proxy ip itself + raise ClientConnectorDNSError(req.connection_key, exc) from exc + + last_exc: Optional[Exception] = None + addr_infos = self._convert_hosts_to_addr_infos(hosts) + while addr_infos: + # Strip trailing dots, certificates contain FQDN without dots. + # See https://github.com/aio-libs/aiohttp/issues/3636 + server_hostname = ( + (req.server_hostname or host).rstrip(".") if sslcontext else None + ) + + try: + transp, proto = await self._wrap_create_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + addr_infos=addr_infos, + server_hostname=server_hostname, + req=req, + client_error=client_error, + ) + except (ClientConnectorError, asyncio.TimeoutError) as exc: + last_exc = exc + aiohappyeyeballs.pop_addr_infos_interleave(addr_infos, self._interleave) + continue + + if req.is_ssl() and fingerprint: + try: + fingerprint.check(transp) + except ServerFingerprintMismatch as exc: + transp.close() + if not self._cleanup_closed_disabled: + self._cleanup_closed_transports.append(transp) + last_exc = exc + # Remove the bad peer from the list of addr_infos + sock: socket.socket = transp.get_extra_info("socket") + bad_peer = sock.getpeername() + aiohappyeyeballs.remove_addr_infos(addr_infos, bad_peer) + continue + + return transp, proto + else: + assert last_exc is not None + raise last_exc + + async def _create_proxy_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + self._fail_on_no_start_tls(req) + runtime_has_start_tls = self._loop_supports_start_tls() + + headers: Dict[str, str] = {} + if req.proxy_headers is not None: + headers = req.proxy_headers # type: ignore[assignment] + headers[hdrs.HOST] = req.headers[hdrs.HOST] + + url = req.proxy + assert url is not None + proxy_req = ClientRequest( + hdrs.METH_GET, + url, + headers=headers, + auth=req.proxy_auth, + loop=self._loop, + ssl=req.ssl, + ) + + # create connection to proxy server + transport, proto = await self._create_direct_connection( + proxy_req, [], timeout, client_error=ClientProxyConnectionError + ) + + auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None) + if auth is not None: + if not req.is_ssl(): + req.headers[hdrs.PROXY_AUTHORIZATION] = auth + else: + proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth + + if req.is_ssl(): + if runtime_has_start_tls: + self._warn_about_tls_in_tls(transport, req) + + # For HTTPS requests over HTTP proxy + # we must notify proxy to tunnel connection + # so we send CONNECT command: + # CONNECT www.python.org:443 HTTP/1.1 + # Host: www.python.org + # + # next we must do TLS handshake and so on + # to do this we must wrap raw socket into secure one + # asyncio handles this perfectly + proxy_req.method = hdrs.METH_CONNECT + proxy_req.url = req.url + key = req.connection_key._replace( + proxy=None, proxy_auth=None, proxy_headers_hash=None + ) + conn = Connection(self, key, proto, self._loop) + proxy_resp = await proxy_req.send(conn) + try: + protocol = conn._protocol + assert protocol is not None + + # read_until_eof=True will ensure the connection isn't closed + # once the response is received and processed allowing + # START_TLS to work on the connection below. + protocol.set_response_params( + read_until_eof=runtime_has_start_tls, + timeout_ceil_threshold=self._timeout_ceil_threshold, + ) + resp = await proxy_resp.start(conn) + except BaseException: + proxy_resp.close() + conn.close() + raise + else: + conn._protocol = None + try: + if resp.status != 200: + message = resp.reason + if message is None: + message = HTTPStatus(resp.status).phrase + raise ClientHttpProxyError( + proxy_resp.request_info, + resp.history, + status=resp.status, + message=message, + headers=resp.headers, + ) + if not runtime_has_start_tls: + rawsock = transport.get_extra_info("socket", default=None) + if rawsock is None: + raise RuntimeError( + "Transport does not expose socket instance" + ) + # Duplicate the socket, so now we can close proxy transport + rawsock = rawsock.dup() + except BaseException: + # It shouldn't be closed in `finally` because it's fed to + # `loop.start_tls()` and the docs say not to touch it after + # passing there. + transport.close() + raise + finally: + if not runtime_has_start_tls: + transport.close() + + if not runtime_has_start_tls: + # HTTP proxy with support for upgrade to HTTPS + sslcontext = self._get_ssl_context(req) + return await self._wrap_existing_connection( + self._factory, + timeout=timeout, + ssl=sslcontext, + sock=rawsock, + server_hostname=req.host, + req=req, + ) + + return await self._start_tls_connection( + # Access the old transport for the last time before it's + # closed and forgotten forever: + transport, + req=req, + timeout=timeout, + ) + finally: + proxy_resp.close() + + return transport, proto + + +class UnixConnector(BaseConnector): + """Unix socket connector. + + path - Unix socket path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"unix"}) + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + self._path = path + + @property + def path(self) -> str: + """Path to unix socket.""" + return self._path + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + _, proto = await self._loop.create_unix_connection( + self._factory, self._path + ) + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc + + return proto + + +class NamedPipeConnector(BaseConnector): + """Named pipe connector. + + Only supported by the proactor event loop. + See also: https://docs.python.org/3/library/asyncio-eventloop.html + + path - Windows named pipe path. + keepalive_timeout - (optional) Keep-alive timeout. + force_close - Set to True to force close and do reconnect + after each request (and between redirects). + limit - The total number of simultaneous connections. + limit_per_host - Number of simultaneous connections to one host. + loop - Optional event loop. + """ + + allowed_protocol_schema_set = HIGH_LEVEL_SCHEMA_SET | frozenset({"npipe"}) + + def __init__( + self, + path: str, + force_close: bool = False, + keepalive_timeout: Union[object, float, None] = sentinel, + limit: int = 100, + limit_per_host: int = 0, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__( + force_close=force_close, + keepalive_timeout=keepalive_timeout, + limit=limit, + limit_per_host=limit_per_host, + loop=loop, + ) + if not isinstance( + self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined] + ): + raise RuntimeError( + "Named Pipes only available in proactor loop under windows" + ) + self._path = path + + @property + def path(self) -> str: + """Path to the named pipe.""" + return self._path + + async def _create_connection( + self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + ) -> ResponseHandler: + try: + async with ceil_timeout( + timeout.sock_connect, ceil_threshold=timeout.ceil_threshold + ): + _, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] + self._factory, self._path + ) + # the drain is required so that the connection_made is called + # and transport is set otherwise it is not set before the + # `assert conn.transport is not None` + # in client.py's _request method + await asyncio.sleep(0) + # other option is to manually set transport like + # `proto.transport = trans` + except OSError as exc: + if exc.errno is None and isinstance(exc, asyncio.TimeoutError): + raise + raise ClientConnectorError(req.connection_key, exc) from exc + + return cast(ResponseHandler, proto) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/cookiejar.py b/deepseek/lib/python3.10/site-packages/aiohttp/cookiejar.py new file mode 100644 index 0000000000000000000000000000000000000000..ef04bda5ad67bbcfb2d7b8983920ba9b1a80c938 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/cookiejar.py @@ -0,0 +1,487 @@ +import asyncio +import calendar +import contextlib +import datetime +import heapq +import itertools +import os # noqa +import pathlib +import pickle +import re +import time +import warnings +from collections import defaultdict +from http.cookies import BaseCookie, Morsel, SimpleCookie +from typing import ( + DefaultDict, + Dict, + Iterable, + Iterator, + List, + Mapping, + Optional, + Set, + Tuple, + Union, + cast, +) + +from yarl import URL + +from .abc import AbstractCookieJar, ClearCookiePredicate +from .helpers import is_ip_address +from .typedefs import LooseCookies, PathLike, StrOrURL + +__all__ = ("CookieJar", "DummyCookieJar") + + +CookieItem = Union[str, "Morsel[str]"] + +# We cache these string methods here as their use is in performance critical code. +_FORMAT_PATH = "{}/{}".format +_FORMAT_DOMAIN_REVERSED = "{1}.{0}".format + +# The minimum number of scheduled cookie expirations before we start cleaning up +# the expiration heap. This is a performance optimization to avoid cleaning up the +# heap too often when there are only a few scheduled expirations. +_MIN_SCHEDULED_COOKIE_EXPIRATION = 100 + + +class CookieJar(AbstractCookieJar): + """Implements cookie storage adhering to RFC 6265.""" + + DATE_TOKENS_RE = re.compile( + r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*" + r"(?P[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)" + ) + + DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})") + + DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})") + + DATE_MONTH_RE = re.compile( + "(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|(aug)|(sep)|(oct)|(nov)|(dec)", + re.I, + ) + + DATE_YEAR_RE = re.compile(r"(\d{2,4})") + + # calendar.timegm() fails for timestamps after datetime.datetime.max + # Minus one as a loss of precision occurs when timestamp() is called. + MAX_TIME = ( + int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1 + ) + try: + calendar.timegm(time.gmtime(MAX_TIME)) + except (OSError, ValueError): + # Hit the maximum representable time on Windows + # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 + # Throws ValueError on PyPy 3.9, OSError elsewhere + MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) + except OverflowError: + # #4515: datetime.max may not be representable on 32-bit platforms + MAX_TIME = 2**31 - 1 + # Avoid minuses in the future, 3x faster + SUB_MAX_TIME = MAX_TIME - 1 + + def __init__( + self, + *, + unsafe: bool = False, + quote_cookie: bool = True, + treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + super().__init__(loop=loop) + self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( + SimpleCookie + ) + self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = ( + defaultdict(dict) + ) + self._host_only_cookies: Set[Tuple[str, str]] = set() + self._unsafe = unsafe + self._quote_cookie = quote_cookie + if treat_as_secure_origin is None: + treat_as_secure_origin = [] + elif isinstance(treat_as_secure_origin, URL): + treat_as_secure_origin = [treat_as_secure_origin.origin()] + elif isinstance(treat_as_secure_origin, str): + treat_as_secure_origin = [URL(treat_as_secure_origin).origin()] + else: + treat_as_secure_origin = [ + URL(url).origin() if isinstance(url, str) else url.origin() + for url in treat_as_secure_origin + ] + self._treat_as_secure_origin = treat_as_secure_origin + self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] + self._expirations: Dict[Tuple[str, str, str], float] = {} + + def save(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="wb") as f: + pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL) + + def load(self, file_path: PathLike) -> None: + file_path = pathlib.Path(file_path) + with file_path.open(mode="rb") as f: + self._cookies = pickle.load(f) + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + if predicate is None: + self._expire_heap.clear() + self._cookies.clear() + self._morsel_cache.clear() + self._host_only_cookies.clear() + self._expirations.clear() + return + + now = time.time() + to_del = [ + key + for (domain, path), cookie in self._cookies.items() + for name, morsel in cookie.items() + if ( + (key := (domain, path, name)) in self._expirations + and self._expirations[key] <= now + ) + or predicate(morsel) + ] + if to_del: + self._delete_cookies(to_del) + + def clear_domain(self, domain: str) -> None: + self.clear(lambda x: self._is_domain_match(domain, x["domain"])) + + def __iter__(self) -> "Iterator[Morsel[str]]": + self._do_expiration() + for val in self._cookies.values(): + yield from val.values() + + def __len__(self) -> int: + """Return number of cookies. + + This function does not iterate self to avoid unnecessary expiration + checks. + """ + return sum(len(cookie.values()) for cookie in self._cookies.values()) + + def _do_expiration(self) -> None: + """Remove expired cookies.""" + if not (expire_heap_len := len(self._expire_heap)): + return + + # If the expiration heap grows larger than the number expirations + # times two, we clean it up to avoid keeping expired entries in + # the heap and consuming memory. We guard this with a minimum + # threshold to avoid cleaning up the heap too often when there are + # only a few scheduled expirations. + if ( + expire_heap_len > _MIN_SCHEDULED_COOKIE_EXPIRATION + and expire_heap_len > len(self._expirations) * 2 + ): + # Remove any expired entries from the expiration heap + # that do not match the expiration time in the expirations + # as it means the cookie has been re-added to the heap + # with a different expiration time. + self._expire_heap = [ + entry + for entry in self._expire_heap + if self._expirations.get(entry[1]) == entry[0] + ] + heapq.heapify(self._expire_heap) + + now = time.time() + to_del: List[Tuple[str, str, str]] = [] + # Find any expired cookies and add them to the to-delete list + while self._expire_heap: + when, cookie_key = self._expire_heap[0] + if when > now: + break + heapq.heappop(self._expire_heap) + # Check if the cookie hasn't been re-added to the heap + # with a different expiration time as it will be removed + # later when it reaches the top of the heap and its + # expiration time is met. + if self._expirations.get(cookie_key) == when: + to_del.append(cookie_key) + + if to_del: + self._delete_cookies(to_del) + + def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: + for domain, path, name in to_del: + self._host_only_cookies.discard((domain, name)) + self._cookies[(domain, path)].pop(name, None) + self._morsel_cache[(domain, path)].pop(name, None) + self._expirations.pop((domain, path, name), None) + + def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None: + cookie_key = (domain, path, name) + if self._expirations.get(cookie_key) == when: + # Avoid adding duplicates to the heap + return + heapq.heappush(self._expire_heap, (when, cookie_key)) + self._expirations[cookie_key] = when + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + """Update cookies.""" + hostname = response_url.raw_host + + if not self._unsafe and is_ip_address(hostname): + # Don't accept cookies from IPs + return + + if isinstance(cookies, Mapping): + cookies = cookies.items() + + for name, cookie in cookies: + if not isinstance(cookie, Morsel): + tmp = SimpleCookie() + tmp[name] = cookie # type: ignore[assignment] + cookie = tmp[name] + + domain = cookie["domain"] + + # ignore domains with trailing dots + if domain and domain[-1] == ".": + domain = "" + del cookie["domain"] + + if not domain and hostname is not None: + # Set the cookie's domain to the response hostname + # and set its host-only-flag + self._host_only_cookies.add((hostname, name)) + domain = cookie["domain"] = hostname + + if domain and domain[0] == ".": + # Remove leading dot + domain = domain[1:] + cookie["domain"] = domain + + if hostname and not self._is_domain_match(domain, hostname): + # Setting cookies for different domains is not allowed + continue + + path = cookie["path"] + if not path or path[0] != "/": + # Set the cookie's path to the response path + path = response_url.path + if not path.startswith("/"): + path = "/" + else: + # Cut everything from the last slash to the end + path = "/" + path[1 : path.rfind("/")] + cookie["path"] = path + path = path.rstrip("/") + + if max_age := cookie["max-age"]: + try: + delta_seconds = int(max_age) + max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME) + self._expire_cookie(max_age_expiration, domain, path, name) + except ValueError: + cookie["max-age"] = "" + + elif expires := cookie["expires"]: + if expire_time := self._parse_date(expires): + self._expire_cookie(expire_time, domain, path, name) + else: + cookie["expires"] = "" + + key = (domain, path) + if self._cookies[key].get(name) != cookie: + # Don't blow away the cache if the same + # cookie gets set again + self._cookies[key][name] = cookie + self._morsel_cache[key].pop(name, None) + + self._do_expiration() + + def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]": + """Returns this jar's cookies filtered by their attributes.""" + filtered: Union[SimpleCookie, "BaseCookie[str]"] = ( + SimpleCookie() if self._quote_cookie else BaseCookie() + ) + if not self._cookies: + # Skip do_expiration() if there are no cookies. + return filtered + self._do_expiration() + if not self._cookies: + # Skip rest of function if no non-expired cookies. + return filtered + if type(request_url) is not URL: + warnings.warn( + "filter_cookies expects yarl.URL instances only," + f"and will stop working in 4.x, got {type(request_url)}", + DeprecationWarning, + stacklevel=2, + ) + request_url = URL(request_url) + hostname = request_url.raw_host or "" + + is_not_secure = request_url.scheme not in ("https", "wss") + if is_not_secure and self._treat_as_secure_origin: + request_origin = URL() + with contextlib.suppress(ValueError): + request_origin = request_url.origin() + is_not_secure = request_origin not in self._treat_as_secure_origin + + # Send shared cookie + for c in self._cookies[("", "")].values(): + filtered[c.key] = c.value + + if is_ip_address(hostname): + if not self._unsafe: + return filtered + domains: Iterable[str] = (hostname,) + else: + # Get all the subdomains that might match a cookie (e.g. "foo.bar.com", "bar.com", "com") + domains = itertools.accumulate( + reversed(hostname.split(".")), _FORMAT_DOMAIN_REVERSED + ) + + # Get all the path prefixes that might match a cookie (e.g. "", "/foo", "/foo/bar") + paths = itertools.accumulate(request_url.path.split("/"), _FORMAT_PATH) + # Create every combination of (domain, path) pairs. + pairs = itertools.product(domains, paths) + + path_len = len(request_url.path) + # Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4 + for p in pairs: + for name, cookie in self._cookies[p].items(): + domain = cookie["domain"] + + if (domain, name) in self._host_only_cookies and domain != hostname: + continue + + # Skip edge case when the cookie has a trailing slash but request doesn't. + if len(cookie["path"]) > path_len: + continue + + if is_not_secure and cookie["secure"]: + continue + + # We already built the Morsel so reuse it here + if name in self._morsel_cache[p]: + filtered[name] = self._morsel_cache[p][name] + continue + + # It's critical we use the Morsel so the coded_value + # (based on cookie version) is preserved + mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel())) + mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) + self._morsel_cache[p][name] = mrsl_val + filtered[name] = mrsl_val + + return filtered + + @staticmethod + def _is_domain_match(domain: str, hostname: str) -> bool: + """Implements domain matching adhering to RFC 6265.""" + if hostname == domain: + return True + + if not hostname.endswith(domain): + return False + + non_matching = hostname[: -len(domain)] + + if not non_matching.endswith("."): + return False + + return not is_ip_address(hostname) + + @classmethod + def _parse_date(cls, date_str: str) -> Optional[int]: + """Implements date string parsing adhering to RFC 6265.""" + if not date_str: + return None + + found_time = False + found_day = False + found_month = False + found_year = False + + hour = minute = second = 0 + day = 0 + month = 0 + year = 0 + + for token_match in cls.DATE_TOKENS_RE.finditer(date_str): + + token = token_match.group("token") + + if not found_time: + time_match = cls.DATE_HMS_TIME_RE.match(token) + if time_match: + found_time = True + hour, minute, second = (int(s) for s in time_match.groups()) + continue + + if not found_day: + day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) + if day_match: + found_day = True + day = int(day_match.group()) + continue + + if not found_month: + month_match = cls.DATE_MONTH_RE.match(token) + if month_match: + found_month = True + assert month_match.lastindex is not None + month = month_match.lastindex + continue + + if not found_year: + year_match = cls.DATE_YEAR_RE.match(token) + if year_match: + found_year = True + year = int(year_match.group()) + + if 70 <= year <= 99: + year += 1900 + elif 0 <= year <= 69: + year += 2000 + + if False in (found_day, found_month, found_year, found_time): + return None + + if not 1 <= day <= 31: + return None + + if year < 1601 or hour > 23 or minute > 59 or second > 59: + return None + + return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1)) + + +class DummyCookieJar(AbstractCookieJar): + """Implements a dummy cookie storage. + + It can be used with the ClientSession when no cookie processing is needed. + + """ + + def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + super().__init__(loop=loop) + + def __iter__(self) -> "Iterator[Morsel[str]]": + while False: + yield None + + def __len__(self) -> int: + return 0 + + def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + pass + + def clear_domain(self, domain: str) -> None: + pass + + def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None: + pass + + def filter_cookies(self, request_url: URL) -> "BaseCookie[str]": + return SimpleCookie() diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/formdata.py b/deepseek/lib/python3.10/site-packages/aiohttp/formdata.py new file mode 100644 index 0000000000000000000000000000000000000000..73056f4bc45f2ec140c00e7a3983e7e0e21c4343 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/formdata.py @@ -0,0 +1,182 @@ +import io +import warnings +from typing import Any, Iterable, List, Optional +from urllib.parse import urlencode + +from multidict import MultiDict, MultiDictProxy + +from . import hdrs, multipart, payload +from .helpers import guess_filename +from .payload import Payload + +__all__ = ("FormData",) + + +class FormData: + """Helper class for form body generation. + + Supports multipart/form-data and application/x-www-form-urlencoded. + """ + + def __init__( + self, + fields: Iterable[Any] = (), + quote_fields: bool = True, + charset: Optional[str] = None, + *, + default_to_multipart: bool = False, + ) -> None: + self._writer = multipart.MultipartWriter("form-data") + self._fields: List[Any] = [] + self._is_multipart = default_to_multipart + self._is_processed = False + self._quote_fields = quote_fields + self._charset = charset + + if isinstance(fields, dict): + fields = list(fields.items()) + elif not isinstance(fields, (list, tuple)): + fields = (fields,) + self.add_fields(*fields) + + @property + def is_multipart(self) -> bool: + return self._is_multipart + + def add_field( + self, + name: str, + value: Any, + *, + content_type: Optional[str] = None, + filename: Optional[str] = None, + content_transfer_encoding: Optional[str] = None, + ) -> None: + + if isinstance(value, io.IOBase): + self._is_multipart = True + elif isinstance(value, (bytes, bytearray, memoryview)): + msg = ( + "In v4, passing bytes will no longer create a file field. " + "Please explicitly use the filename parameter or pass a BytesIO object." + ) + if filename is None and content_transfer_encoding is None: + warnings.warn(msg, DeprecationWarning) + filename = name + + type_options: MultiDict[str] = MultiDict({"name": name}) + if filename is not None and not isinstance(filename, str): + raise TypeError("filename must be an instance of str. Got: %s" % filename) + if filename is None and isinstance(value, io.IOBase): + filename = guess_filename(value, name) + if filename is not None: + type_options["filename"] = filename + self._is_multipart = True + + headers = {} + if content_type is not None: + if not isinstance(content_type, str): + raise TypeError( + "content_type must be an instance of str. Got: %s" % content_type + ) + headers[hdrs.CONTENT_TYPE] = content_type + self._is_multipart = True + if content_transfer_encoding is not None: + if not isinstance(content_transfer_encoding, str): + raise TypeError( + "content_transfer_encoding must be an instance" + " of str. Got: %s" % content_transfer_encoding + ) + msg = ( + "content_transfer_encoding is deprecated. " + "To maintain compatibility with v4 please pass a BytesPayload." + ) + warnings.warn(msg, DeprecationWarning) + self._is_multipart = True + + self._fields.append((type_options, headers, value)) + + def add_fields(self, *fields: Any) -> None: + to_add = list(fields) + + while to_add: + rec = to_add.pop(0) + + if isinstance(rec, io.IOBase): + k = guess_filename(rec, "unknown") + self.add_field(k, rec) # type: ignore[arg-type] + + elif isinstance(rec, (MultiDictProxy, MultiDict)): + to_add.extend(rec.items()) + + elif isinstance(rec, (list, tuple)) and len(rec) == 2: + k, fp = rec + self.add_field(k, fp) # type: ignore[arg-type] + + else: + raise TypeError( + "Only io.IOBase, multidict and (name, file) " + "pairs allowed, use .add_field() for passing " + "more complex parameters, got {!r}".format(rec) + ) + + def _gen_form_urlencoded(self) -> payload.BytesPayload: + # form data (x-www-form-urlencoded) + data = [] + for type_options, _, value in self._fields: + data.append((type_options["name"], value)) + + charset = self._charset if self._charset is not None else "utf-8" + + if charset == "utf-8": + content_type = "application/x-www-form-urlencoded" + else: + content_type = "application/x-www-form-urlencoded; charset=%s" % charset + + return payload.BytesPayload( + urlencode(data, doseq=True, encoding=charset).encode(), + content_type=content_type, + ) + + def _gen_form_data(self) -> multipart.MultipartWriter: + """Encode a list of fields using the multipart/form-data MIME format""" + if self._is_processed: + raise RuntimeError("Form data has been processed already") + for dispparams, headers, value in self._fields: + try: + if hdrs.CONTENT_TYPE in headers: + part = payload.get_payload( + value, + content_type=headers[hdrs.CONTENT_TYPE], + headers=headers, + encoding=self._charset, + ) + else: + part = payload.get_payload( + value, headers=headers, encoding=self._charset + ) + except Exception as exc: + raise TypeError( + "Can not serialize value type: %r\n " + "headers: %r\n value: %r" % (type(value), headers, value) + ) from exc + + if dispparams: + part.set_content_disposition( + "form-data", quote_fields=self._quote_fields, **dispparams + ) + # FIXME cgi.FieldStorage doesn't likes body parts with + # Content-Length which were sent via chunked transfer encoding + assert part.headers is not None + part.headers.popall(hdrs.CONTENT_LENGTH, None) + + self._writer.append_payload(part) + + self._is_processed = True + return self._writer + + def __call__(self) -> Payload: + if self._is_multipart: + return self._gen_form_data() + else: + return self._gen_form_urlencoded() diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/helpers.py b/deepseek/lib/python3.10/site-packages/aiohttp/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..8038931ebec525c614ffd3539f38fd4e8214c2fb --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/helpers.py @@ -0,0 +1,944 @@ +"""Various helper functions""" + +import asyncio +import base64 +import binascii +import contextlib +import datetime +import enum +import functools +import inspect +import netrc +import os +import platform +import re +import sys +import time +import weakref +from collections import namedtuple +from contextlib import suppress +from email.parser import HeaderParser +from email.utils import parsedate +from math import ceil +from pathlib import Path +from types import TracebackType +from typing import ( + Any, + Callable, + ContextManager, + Dict, + Generator, + Generic, + Iterable, + Iterator, + List, + Mapping, + Optional, + Protocol, + Tuple, + Type, + TypeVar, + Union, + get_args, + overload, +) +from urllib.parse import quote +from urllib.request import getproxies, proxy_bypass + +import attr +from multidict import MultiDict, MultiDictProxy, MultiMapping +from propcache.api import under_cached_property as reify +from yarl import URL + +from . import hdrs +from .log import client_logger + +if sys.version_info >= (3, 11): + import asyncio as async_timeout +else: + import async_timeout + +__all__ = ("BasicAuth", "ChainMapProxy", "ETag", "reify") + +IS_MACOS = platform.system() == "Darwin" +IS_WINDOWS = platform.system() == "Windows" + +PY_310 = sys.version_info >= (3, 10) +PY_311 = sys.version_info >= (3, 11) + + +_T = TypeVar("_T") +_S = TypeVar("_S") + +_SENTINEL = enum.Enum("_SENTINEL", "sentinel") +sentinel = _SENTINEL.sentinel + +NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) + +# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 +EMPTY_BODY_STATUS_CODES = frozenset((204, 304, *range(100, 200))) +# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1 +# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2 +EMPTY_BODY_METHODS = hdrs.METH_HEAD_ALL + +DEBUG = sys.flags.dev_mode or ( + not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG")) +) + + +CHAR = {chr(i) for i in range(0, 128)} +CTL = {chr(i) for i in range(0, 32)} | { + chr(127), +} +SEPARATORS = { + "(", + ")", + "<", + ">", + "@", + ",", + ";", + ":", + "\\", + '"', + "/", + "[", + "]", + "?", + "=", + "{", + "}", + " ", + chr(9), +} +TOKEN = CHAR ^ CTL ^ SEPARATORS + + +class noop: + def __await__(self) -> Generator[None, None, None]: + yield + + +class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])): + """Http basic authentication helper.""" + + def __new__( + cls, login: str, password: str = "", encoding: str = "latin1" + ) -> "BasicAuth": + if login is None: + raise ValueError("None is not allowed as login value") + + if password is None: + raise ValueError("None is not allowed as password value") + + if ":" in login: + raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)') + + return super().__new__(cls, login, password, encoding) + + @classmethod + def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth": + """Create a BasicAuth object from an Authorization HTTP header.""" + try: + auth_type, encoded_credentials = auth_header.split(" ", 1) + except ValueError: + raise ValueError("Could not parse authorization header.") + + if auth_type.lower() != "basic": + raise ValueError("Unknown authorization method %s" % auth_type) + + try: + decoded = base64.b64decode( + encoded_credentials.encode("ascii"), validate=True + ).decode(encoding) + except binascii.Error: + raise ValueError("Invalid base64 encoding.") + + try: + # RFC 2617 HTTP Authentication + # https://www.ietf.org/rfc/rfc2617.txt + # the colon must be present, but the username and password may be + # otherwise blank. + username, password = decoded.split(":", 1) + except ValueError: + raise ValueError("Invalid credentials.") + + return cls(username, password, encoding=encoding) + + @classmethod + def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]: + """Create BasicAuth from url.""" + if not isinstance(url, URL): + raise TypeError("url should be yarl.URL instance") + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: + return None + return cls(url.user or "", url.password or "", encoding=encoding) + + def encode(self) -> str: + """Encode credentials.""" + creds = (f"{self.login}:{self.password}").encode(self.encoding) + return "Basic %s" % base64.b64encode(creds).decode(self.encoding) + + +def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Remove user and password from URL if present and return BasicAuth object.""" + # Check raw_user and raw_password first as yarl is likely + # to already have these values parsed from the netloc in the cache. + if url.raw_user is None and url.raw_password is None: + return url, None + return url.with_user(None), BasicAuth(url.user or "", url.password or "") + + +def netrc_from_env() -> Optional[netrc.netrc]: + """Load netrc from file. + + Attempt to load it from the path specified by the env-var + NETRC or in the default location in the user's home directory. + + Returns None if it couldn't be found or fails to parse. + """ + netrc_env = os.environ.get("NETRC") + + if netrc_env is not None: + netrc_path = Path(netrc_env) + else: + try: + home_dir = Path.home() + except RuntimeError as e: # pragma: no cover + # if pathlib can't resolve home, it may raise a RuntimeError + client_logger.debug( + "Could not resolve home directory when " + "trying to look for .netrc file: %s", + e, + ) + return None + + netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc") + + try: + return netrc.netrc(str(netrc_path)) + except netrc.NetrcParseError as e: + client_logger.warning("Could not parse .netrc file: %s", e) + except OSError as e: + netrc_exists = False + with contextlib.suppress(OSError): + netrc_exists = netrc_path.is_file() + # we couldn't read the file (doesn't exist, permissions, etc.) + if netrc_env or netrc_exists: + # only warn if the environment wanted us to load it, + # or it appears like the default file does actually exist + client_logger.warning("Could not read .netrc file: %s", e) + + return None + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ProxyInfo: + proxy: URL + proxy_auth: Optional[BasicAuth] + + +def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth: + """ + Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``. + + :raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no + entry is found for the ``host``. + """ + if netrc_obj is None: + raise LookupError("No .netrc file found") + auth_from_netrc = netrc_obj.authenticators(host) + + if auth_from_netrc is None: + raise LookupError(f"No entry for {host!s} found in the `.netrc` file.") + login, account, password = auth_from_netrc + + # TODO(PY311): username = login or account + # Up to python 3.10, account could be None if not specified, + # and login will be empty string if not specified. From 3.11, + # login and account will be empty string if not specified. + username = login if (login or account is None) else account + + # TODO(PY311): Remove this, as password will be empty string + # if not specified + if password is None: + password = "" + + return BasicAuth(username, password) + + +def proxies_from_env() -> Dict[str, ProxyInfo]: + proxy_urls = { + k: URL(v) + for k, v in getproxies().items() + if k in ("http", "https", "ws", "wss") + } + netrc_obj = netrc_from_env() + stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()} + ret = {} + for proto, val in stripped.items(): + proxy, auth = val + if proxy.scheme in ("https", "wss"): + client_logger.warning( + "%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy + ) + continue + if netrc_obj and auth is None: + if proxy.host is not None: + try: + auth = basicauth_from_netrc(netrc_obj, proxy.host) + except LookupError: + auth = None + ret[proto] = ProxyInfo(proxy, auth) + return ret + + +def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: + """Get a permitted proxy for the given URL from the env.""" + if url.host is not None and proxy_bypass(url.host): + raise LookupError(f"Proxying is disallowed for `{url.host!r}`") + + proxies_in_env = proxies_from_env() + try: + proxy_info = proxies_in_env[url.scheme] + except KeyError: + raise LookupError(f"No proxies found for `{url!s}` in the env") + else: + return proxy_info.proxy, proxy_info.proxy_auth + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class MimeType: + type: str + subtype: str + suffix: str + parameters: "MultiDictProxy[str]" + + +@functools.lru_cache(maxsize=56) +def parse_mimetype(mimetype: str) -> MimeType: + """Parses a MIME type into its components. + + mimetype is a MIME type string. + + Returns a MimeType object. + + Example: + + >>> parse_mimetype('text/html; charset=utf-8') + MimeType(type='text', subtype='html', suffix='', + parameters={'charset': 'utf-8'}) + + """ + if not mimetype: + return MimeType( + type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict()) + ) + + parts = mimetype.split(";") + params: MultiDict[str] = MultiDict() + for item in parts[1:]: + if not item: + continue + key, _, value = item.partition("=") + params.add(key.lower().strip(), value.strip(' "')) + + fulltype = parts[0].strip().lower() + if fulltype == "*": + fulltype = "*/*" + + mtype, _, stype = fulltype.partition("/") + stype, _, suffix = stype.partition("+") + + return MimeType( + type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params) + ) + + +def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: + name = getattr(obj, "name", None) + if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": + return Path(name).name + return default + + +not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]") +QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"} + + +def quoted_string(content: str) -> str: + """Return 7-bit content as quoted-string. + + Format content into a quoted-string as defined in RFC5322 for + Internet Message Format. Notice that this is not the 8-bit HTTP + format, but the 7-bit email format. Content must be in usascii or + a ValueError is raised. + """ + if not (QCONTENT > set(content)): + raise ValueError(f"bad content for quoted-string {content!r}") + return not_qtext_re.sub(lambda x: "\\" + x.group(0), content) + + +def content_disposition_header( + disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str +) -> str: + """Sets ``Content-Disposition`` header for MIME. + + This is the MIME payload Content-Disposition header from RFC 2183 + and RFC 7579 section 4.2, not the HTTP Content-Disposition from + RFC 6266. + + disptype is a disposition type: inline, attachment, form-data. + Should be valid extension token (see RFC 2183) + + quote_fields performs value quoting to 7-bit MIME headers + according to RFC 7578. Set to quote_fields to False if recipient + can take 8-bit file names and field values. + + _charset specifies the charset to use when quote_fields is True. + + params is a dict with disposition params. + """ + if not disptype or not (TOKEN > set(disptype)): + raise ValueError(f"bad content disposition type {disptype!r}") + + value = disptype + if params: + lparams = [] + for key, val in params.items(): + if not key or not (TOKEN > set(key)): + raise ValueError(f"bad content disposition parameter {key!r}={val!r}") + if quote_fields: + if key.lower() == "filename": + qval = quote(val, "", encoding=_charset) + lparams.append((key, '"%s"' % qval)) + else: + try: + qval = quoted_string(val) + except ValueError: + qval = "".join( + (_charset, "''", quote(val, "", encoding=_charset)) + ) + lparams.append((key + "*", qval)) + else: + lparams.append((key, '"%s"' % qval)) + else: + qval = val.replace("\\", "\\\\").replace('"', '\\"') + lparams.append((key, '"%s"' % qval)) + sparams = "; ".join("=".join(pair) for pair in lparams) + value = "; ".join((value, sparams)) + return value + + +def is_ip_address(host: Optional[str]) -> bool: + """Check if host looks like an IP Address. + + This check is only meant as a heuristic to ensure that + a host is not a domain name. + """ + if not host: + return False + # For a host to be an ipv4 address, it must be all numeric. + # The host must contain a colon to be an IPv6 address. + return ":" in host or host.replace(".", "").isdigit() + + +_cached_current_datetime: Optional[int] = None +_cached_formatted_datetime = "" + + +def rfc822_formatted_time() -> str: + global _cached_current_datetime + global _cached_formatted_datetime + + now = int(time.time()) + if now != _cached_current_datetime: + # Weekday and month names for HTTP date/time formatting; + # always English! + # Tuples are constants stored in codeobject! + _weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun") + _monthname = ( + "", # Dummy so we can use 1-based month numbers + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", + ) + + year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now) + _cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( + _weekdayname[wd], + day, + _monthname[month], + year, + hh, + mm, + ss, + ) + _cached_current_datetime = now + return _cached_formatted_datetime + + +def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: + ref, name = info + ob = ref() + if ob is not None: + with suppress(Exception): + getattr(ob, name)() + + +def weakref_handle( + ob: object, + name: str, + timeout: float, + loop: asyncio.AbstractEventLoop, + timeout_ceil_threshold: float = 5, +) -> Optional[asyncio.TimerHandle]: + if timeout is not None and timeout > 0: + when = loop.time() + timeout + if timeout >= timeout_ceil_threshold: + when = ceil(when) + + return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name)) + return None + + +def call_later( + cb: Callable[[], Any], + timeout: float, + loop: asyncio.AbstractEventLoop, + timeout_ceil_threshold: float = 5, +) -> Optional[asyncio.TimerHandle]: + if timeout is None or timeout <= 0: + return None + now = loop.time() + when = calculate_timeout_when(now, timeout, timeout_ceil_threshold) + return loop.call_at(when, cb) + + +def calculate_timeout_when( + loop_time: float, + timeout: float, + timeout_ceiling_threshold: float, +) -> float: + """Calculate when to execute a timeout.""" + when = loop_time + timeout + if timeout > timeout_ceiling_threshold: + return ceil(when) + return when + + +class TimeoutHandle: + """Timeout handle""" + + __slots__ = ("_timeout", "_loop", "_ceil_threshold", "_callbacks") + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + timeout: Optional[float], + ceil_threshold: float = 5, + ) -> None: + self._timeout = timeout + self._loop = loop + self._ceil_threshold = ceil_threshold + self._callbacks: List[ + Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] + ] = [] + + def register( + self, callback: Callable[..., None], *args: Any, **kwargs: Any + ) -> None: + self._callbacks.append((callback, args, kwargs)) + + def close(self) -> None: + self._callbacks.clear() + + def start(self) -> Optional[asyncio.TimerHandle]: + timeout = self._timeout + if timeout is not None and timeout > 0: + when = self._loop.time() + timeout + if timeout >= self._ceil_threshold: + when = ceil(when) + return self._loop.call_at(when, self.__call__) + else: + return None + + def timer(self) -> "BaseTimerContext": + if self._timeout is not None and self._timeout > 0: + timer = TimerContext(self._loop) + self.register(timer.timeout) + return timer + else: + return TimerNoop() + + def __call__(self) -> None: + for cb, args, kwargs in self._callbacks: + with suppress(Exception): + cb(*args, **kwargs) + + self._callbacks.clear() + + +class BaseTimerContext(ContextManager["BaseTimerContext"]): + + __slots__ = () + + def assert_timeout(self) -> None: + """Raise TimeoutError if timeout has been exceeded.""" + + +class TimerNoop(BaseTimerContext): + + __slots__ = () + + def __enter__(self) -> BaseTimerContext: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + return + + +class TimerContext(BaseTimerContext): + """Low resolution timeout context manager""" + + __slots__ = ("_loop", "_tasks", "_cancelled", "_cancelling") + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._tasks: List[asyncio.Task[Any]] = [] + self._cancelled = False + self._cancelling = 0 + + def assert_timeout(self) -> None: + """Raise TimeoutError if timer has already been cancelled.""" + if self._cancelled: + raise asyncio.TimeoutError from None + + def __enter__(self) -> BaseTimerContext: + task = asyncio.current_task(loop=self._loop) + if task is None: + raise RuntimeError("Timeout context manager should be used inside a task") + + if sys.version_info >= (3, 11): + # Remember if the task was already cancelling + # so when we __exit__ we can decide if we should + # raise asyncio.TimeoutError or let the cancellation propagate + self._cancelling = task.cancelling() + + if self._cancelled: + raise asyncio.TimeoutError from None + + self._tasks.append(task) + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Optional[bool]: + enter_task: Optional[asyncio.Task[Any]] = None + if self._tasks: + enter_task = self._tasks.pop() + + if exc_type is asyncio.CancelledError and self._cancelled: + assert enter_task is not None + # The timeout was hit, and the task was cancelled + # so we need to uncancel the last task that entered the context manager + # since the cancellation should not leak out of the context manager + if sys.version_info >= (3, 11): + # If the task was already cancelling don't raise + # asyncio.TimeoutError and instead return None + # to allow the cancellation to propagate + if enter_task.uncancel() > self._cancelling: + return None + raise asyncio.TimeoutError from exc_val + return None + + def timeout(self) -> None: + if not self._cancelled: + for task in set(self._tasks): + task.cancel() + + self._cancelled = True + + +def ceil_timeout( + delay: Optional[float], ceil_threshold: float = 5 +) -> async_timeout.Timeout: + if delay is None or delay <= 0: + return async_timeout.timeout(None) + + loop = asyncio.get_running_loop() + now = loop.time() + when = now + delay + if delay > ceil_threshold: + when = ceil(when) + return async_timeout.timeout_at(when) + + +class HeadersMixin: + """Mixin for handling headers.""" + + ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) + + _headers: MultiMapping[str] + _content_type: Optional[str] = None + _content_dict: Optional[Dict[str, str]] = None + _stored_content_type: Union[str, None, _SENTINEL] = sentinel + + def _parse_content_type(self, raw: Optional[str]) -> None: + self._stored_content_type = raw + if raw is None: + # default value according to RFC 2616 + self._content_type = "application/octet-stream" + self._content_dict = {} + else: + msg = HeaderParser().parsestr("Content-Type: " + raw) + self._content_type = msg.get_content_type() + params = msg.get_params(()) + self._content_dict = dict(params[1:]) # First element is content type again + + @property + def content_type(self) -> str: + """The value of content part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + assert self._content_type is not None + return self._content_type + + @property + def charset(self) -> Optional[str]: + """The value of charset part for Content-Type HTTP header.""" + raw = self._headers.get(hdrs.CONTENT_TYPE) + if self._stored_content_type != raw: + self._parse_content_type(raw) + assert self._content_dict is not None + return self._content_dict.get("charset") + + @property + def content_length(self) -> Optional[int]: + """The value of Content-Length HTTP header.""" + content_length = self._headers.get(hdrs.CONTENT_LENGTH) + return None if content_length is None else int(content_length) + + +def set_result(fut: "asyncio.Future[_T]", result: _T) -> None: + if not fut.done(): + fut.set_result(result) + + +_EXC_SENTINEL = BaseException() + + +class ErrorableProtocol(Protocol): + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = ..., + ) -> None: ... # pragma: no cover + + +def set_exception( + fut: "asyncio.Future[_T] | ErrorableProtocol", + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, +) -> None: + """Set future exception. + + If the future is marked as complete, this function is a no-op. + + :param exc_cause: An exception that is a direct cause of ``exc``. + Only set if provided. + """ + if asyncio.isfuture(fut) and fut.done(): + return + + exc_is_sentinel = exc_cause is _EXC_SENTINEL + exc_causes_itself = exc is exc_cause + if not exc_is_sentinel and not exc_causes_itself: + exc.__cause__ = exc_cause + + fut.set_exception(exc) + + +@functools.total_ordering +class AppKey(Generic[_T]): + """Keys for static typing support in Application.""" + + __slots__ = ("_name", "_t", "__orig_class__") + + # This may be set by Python when instantiating with a generic type. We need to + # support this, in order to support types that are not concrete classes, + # like Iterable, which can't be passed as the second parameter to __init__. + __orig_class__: Type[object] + + def __init__(self, name: str, t: Optional[Type[_T]] = None): + # Prefix with module name to help deduplicate key names. + frame = inspect.currentframe() + while frame: + if frame.f_code.co_name == "": + module: str = frame.f_globals["__name__"] + break + frame = frame.f_back + + self._name = module + "." + name + self._t = t + + def __lt__(self, other: object) -> bool: + if isinstance(other, AppKey): + return self._name < other._name + return True # Order AppKey above other types. + + def __repr__(self) -> str: + t = self._t + if t is None: + with suppress(AttributeError): + # Set to type arg. + t = get_args(self.__orig_class__)[0] + + if t is None: + t_repr = "<>" + elif isinstance(t, type): + if t.__module__ == "builtins": + t_repr = t.__qualname__ + else: + t_repr = f"{t.__module__}.{t.__qualname__}" + else: + t_repr = repr(t) + return f"" + + +class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]): + __slots__ = ("_maps",) + + def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None: + self._maps = tuple(maps) + + def __init_subclass__(cls) -> None: + raise TypeError( + "Inheritance class {} from ChainMapProxy " + "is forbidden".format(cls.__name__) + ) + + @overload # type: ignore[override] + def __getitem__(self, key: AppKey[_T]) -> _T: ... + + @overload + def __getitem__(self, key: str) -> Any: ... + + def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + for mapping in self._maps: + try: + return mapping[key] + except KeyError: + pass + raise KeyError(key) + + @overload # type: ignore[override] + def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ... + + @overload + def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... + + @overload + def get(self, key: str, default: Any = ...) -> Any: ... + + def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + try: + return self[key] + except KeyError: + return default + + def __len__(self) -> int: + # reuses stored hash values if possible + return len(set().union(*self._maps)) + + def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: + d: Dict[Union[str, AppKey[Any]], Any] = {} + for mapping in reversed(self._maps): + # reuses stored hash values if possible + d.update(mapping) + return iter(d) + + def __contains__(self, key: object) -> bool: + return any(key in m for m in self._maps) + + def __bool__(self) -> bool: + return any(self._maps) + + def __repr__(self) -> str: + content = ", ".join(map(repr, self._maps)) + return f"ChainMapProxy({content})" + + +# https://tools.ietf.org/html/rfc7232#section-2.3 +_ETAGC = r"[!\x23-\x7E\x80-\xff]+" +_ETAGC_RE = re.compile(_ETAGC) +_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"' +QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG) +LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)") + +ETAG_ANY = "*" + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class ETag: + value: str + is_weak: bool = False + + +def validate_etag_value(value: str) -> None: + if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value): + raise ValueError( + f"Value {value!r} is not a valid etag. Maybe it contains '\"'?" + ) + + +def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: + """Process a date string, return a datetime object""" + if date_str is not None: + timetuple = parsedate(date_str) + if timetuple is not None: + with suppress(ValueError): + return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) + return None + + +@functools.lru_cache +def must_be_empty_body(method: str, code: int) -> bool: + """Check if a request must return an empty body.""" + return ( + code in EMPTY_BODY_STATUS_CODES + or method in EMPTY_BODY_METHODS + or (200 <= code < 300 and method in hdrs.METH_CONNECT_ALL) + ) + + +def should_remove_content_length(method: str, code: int) -> bool: + """Check if a Content-Length header should be removed. + + This should always be a subset of must_be_empty_body + """ + # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8 + # https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4 + return code in EMPTY_BODY_STATUS_CODES or ( + 200 <= code < 300 and method in hdrs.METH_CONNECT_ALL + ) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/http.py b/deepseek/lib/python3.10/site-packages/aiohttp/http.py new file mode 100644 index 0000000000000000000000000000000000000000..a1feae2d9b8fe631d539a15dbf8e5ea2914d70d5 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/http.py @@ -0,0 +1,72 @@ +import sys +from http import HTTPStatus +from typing import Mapping, Tuple + +from . import __version__ +from .http_exceptions import HttpProcessingError as HttpProcessingError +from .http_parser import ( + HeadersParser as HeadersParser, + HttpParser as HttpParser, + HttpRequestParser as HttpRequestParser, + HttpResponseParser as HttpResponseParser, + RawRequestMessage as RawRequestMessage, + RawResponseMessage as RawResponseMessage, +) +from .http_websocket import ( + WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE, + WS_KEY as WS_KEY, + WebSocketError as WebSocketError, + WebSocketReader as WebSocketReader, + WebSocketWriter as WebSocketWriter, + WSCloseCode as WSCloseCode, + WSMessage as WSMessage, + WSMsgType as WSMsgType, + ws_ext_gen as ws_ext_gen, + ws_ext_parse as ws_ext_parse, +) +from .http_writer import ( + HttpVersion as HttpVersion, + HttpVersion10 as HttpVersion10, + HttpVersion11 as HttpVersion11, + StreamWriter as StreamWriter, +) + +__all__ = ( + "HttpProcessingError", + "RESPONSES", + "SERVER_SOFTWARE", + # .http_writer + "StreamWriter", + "HttpVersion", + "HttpVersion10", + "HttpVersion11", + # .http_parser + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", + # .http_websocket + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "ws_ext_gen", + "ws_ext_parse", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", +) + + +SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( + sys.version_info, __version__ +) + +RESPONSES: Mapping[int, Tuple[str, str]] = { + v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() +} diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/http_exceptions.py b/deepseek/lib/python3.10/site-packages/aiohttp/http_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..b8dda999acf599c1dbb616c20037c255ba27aa5b --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/http_exceptions.py @@ -0,0 +1,112 @@ +"""Low-level http related exceptions.""" + +from textwrap import indent +from typing import Optional, Union + +from .typedefs import _CIMultiDict + +__all__ = ("HttpProcessingError",) + + +class HttpProcessingError(Exception): + """HTTP error. + + Shortcut for raising HTTP errors with custom code, message and headers. + + code: HTTP Error code. + message: (optional) Error message. + headers: (optional) Headers to be sent in response, a list of pairs + """ + + code = 0 + message = "" + headers = None + + def __init__( + self, + *, + code: Optional[int] = None, + message: str = "", + headers: Optional[_CIMultiDict] = None, + ) -> None: + if code is not None: + self.code = code + self.headers = headers + self.message = message + + def __str__(self) -> str: + msg = indent(self.message, " ") + return f"{self.code}, message:\n{msg}" + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>" + + +class BadHttpMessage(HttpProcessingError): + + code = 400 + message = "Bad Request" + + def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: + super().__init__(message=message, headers=headers) + self.args = (message,) + + +class HttpBadRequest(BadHttpMessage): + + code = 400 + message = "Bad Request" + + +class PayloadEncodingError(BadHttpMessage): + """Base class for payload errors""" + + +class ContentEncodingError(PayloadEncodingError): + """Content encoding error.""" + + +class TransferEncodingError(PayloadEncodingError): + """transfer encoding error.""" + + +class ContentLengthError(PayloadEncodingError): + """Not enough data for satisfy content length header.""" + + +class LineTooLong(BadHttpMessage): + def __init__( + self, line: str, limit: str = "Unknown", actual_size: str = "Unknown" + ) -> None: + super().__init__( + f"Got more than {limit} bytes ({actual_size}) when reading {line}." + ) + self.args = (line, limit, actual_size) + + +class InvalidHeader(BadHttpMessage): + def __init__(self, hdr: Union[bytes, str]) -> None: + hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr + super().__init__(f"Invalid HTTP header: {hdr!r}") + self.hdr = hdr_s + self.args = (hdr,) + + +class BadStatusLine(BadHttpMessage): + def __init__(self, line: str = "", error: Optional[str] = None) -> None: + if not isinstance(line, str): + line = repr(line) + super().__init__(error or f"Bad status line {line!r}") + self.args = (line,) + self.line = line + + +class BadHttpMethod(BadStatusLine): + """Invalid HTTP method in status line.""" + + def __init__(self, line: str = "", error: Optional[str] = None) -> None: + super().__init__(line, error or f"Bad HTTP method in status line {line!r}") + + +class InvalidURLError(BadHttpMessage): + pass diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/http_parser.py b/deepseek/lib/python3.10/site-packages/aiohttp/http_parser.py new file mode 100644 index 0000000000000000000000000000000000000000..1b8b5b4d49e1cc14a9d9659da7f5533d77a44cc7 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/http_parser.py @@ -0,0 +1,1046 @@ +import abc +import asyncio +import re +import string +from contextlib import suppress +from enum import IntEnum +from typing import ( + Any, + ClassVar, + Final, + Generic, + List, + Literal, + NamedTuple, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, istr +from yarl import URL + +from . import hdrs +from .base_protocol import BaseProtocol +from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor +from .helpers import ( + _EXC_SENTINEL, + DEBUG, + EMPTY_BODY_METHODS, + EMPTY_BODY_STATUS_CODES, + NO_EXTENSIONS, + BaseTimerContext, + set_exception, +) +from .http_exceptions import ( + BadHttpMessage, + BadHttpMethod, + BadStatusLine, + ContentEncodingError, + ContentLengthError, + InvalidHeader, + InvalidURLError, + LineTooLong, + TransferEncodingError, +) +from .http_writer import HttpVersion, HttpVersion10 +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import RawHeaders + +__all__ = ( + "HeadersParser", + "HttpParser", + "HttpRequestParser", + "HttpResponseParser", + "RawRequestMessage", + "RawResponseMessage", +) + +_SEP = Literal[b"\r\n", b"\n"] + +ASCIISET: Final[Set[str]] = set(string.printable) + +# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview +# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens +# +# method = token +# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." / +# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA +# token = 1*tchar +_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~") +TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+") +VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII) +DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII) +HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+") + + +class RawRequestMessage(NamedTuple): + method: str + path: str + version: HttpVersion + headers: "CIMultiDictProxy[str]" + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + url: URL + + +class RawResponseMessage(NamedTuple): + version: HttpVersion + code: int + reason: str + headers: CIMultiDictProxy[str] + raw_headers: RawHeaders + should_close: bool + compression: Optional[str] + upgrade: bool + chunked: bool + + +_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage) + + +class ParseState(IntEnum): + + PARSE_NONE = 0 + PARSE_LENGTH = 1 + PARSE_CHUNKED = 2 + PARSE_UNTIL_EOF = 3 + + +class ChunkState(IntEnum): + PARSE_CHUNKED_SIZE = 0 + PARSE_CHUNKED_CHUNK = 1 + PARSE_CHUNKED_CHUNK_EOF = 2 + PARSE_MAYBE_TRAILERS = 3 + PARSE_TRAILERS = 4 + + +class HeadersParser: + def __init__( + self, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lax: bool = False, + ) -> None: + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self._lax = lax + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: + headers: CIMultiDict[str] = CIMultiDict() + # note: "raw" does not mean inclusion of OWS before/after the field value + raw_headers = [] + + lines_idx = 1 + line = lines[1] + line_count = len(lines) + + while line: + # Parse initial header name : value pair. + try: + bname, bvalue = line.split(b":", 1) + except ValueError: + raise InvalidHeader(line) from None + + if len(bname) == 0: + raise InvalidHeader(bname) + + # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2 + if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"} + raise InvalidHeader(line) + + bvalue = bvalue.lstrip(b" \t") + if len(bname) > self.max_field_size: + raise LineTooLong( + "request header name {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(len(bname)), + ) + name = bname.decode("utf-8", "surrogateescape") + if not TOKENRE.fullmatch(name): + raise InvalidHeader(bname) + + header_length = len(bvalue) + + # next line + lines_idx += 1 + line = lines[lines_idx] + + # consume continuation lines + continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t') + + # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding + if continuation: + bvalue_lst = [bvalue] + while continuation: + header_length += len(line) + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(header_length), + ) + bvalue_lst.append(line) + + # next line + lines_idx += 1 + if lines_idx < line_count: + line = lines[lines_idx] + if line: + continuation = line[0] in (32, 9) # (' ', '\t') + else: + line = b"" + break + bvalue = b"".join(bvalue_lst) + else: + if header_length > self.max_field_size: + raise LineTooLong( + "request header field {}".format( + bname.decode("utf8", "backslashreplace") + ), + str(self.max_field_size), + str(header_length), + ) + + bvalue = bvalue.strip(b" \t") + value = bvalue.decode("utf-8", "surrogateescape") + + # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5 + if "\n" in value or "\r" in value or "\x00" in value: + raise InvalidHeader(bvalue) + + headers.add(name, value) + raw_headers.append((bname, bvalue)) + + return (CIMultiDictProxy(headers), tuple(raw_headers)) + + +def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool: + """Check if the upgrade header is supported.""" + return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"} + + +class HttpParser(abc.ABC, Generic[_MsgT]): + lax: ClassVar[bool] = False + + def __init__( + self, + protocol: Optional[BaseProtocol] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + limit: int = 2**16, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + timer: Optional[BaseTimerContext] = None, + code: Optional[int] = None, + method: Optional[str] = None, + payload_exception: Optional[Type[BaseException]] = None, + response_with_body: bool = True, + read_until_eof: bool = False, + auto_decompress: bool = True, + ) -> None: + self.protocol = protocol + self.loop = loop + self.max_line_size = max_line_size + self.max_headers = max_headers + self.max_field_size = max_field_size + self.timer = timer + self.code = code + self.method = method + self.payload_exception = payload_exception + self.response_with_body = response_with_body + self.read_until_eof = read_until_eof + + self._lines: List[bytes] = [] + self._tail = b"" + self._upgraded = False + self._payload = None + self._payload_parser: Optional[HttpPayloadParser] = None + self._auto_decompress = auto_decompress + self._limit = limit + self._headers_parser = HeadersParser( + max_line_size, max_headers, max_field_size, self.lax + ) + + @abc.abstractmethod + def parse_message(self, lines: List[bytes]) -> _MsgT: ... + + @abc.abstractmethod + def _is_chunked_te(self, te: str) -> bool: ... + + def feed_eof(self) -> Optional[_MsgT]: + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + else: + # try to extract partial message + if self._tail: + self._lines.append(self._tail) + + if self._lines: + if self._lines[-1] != "\r\n": + self._lines.append(b"") + with suppress(Exception): + return self.parse_message(self._lines) + return None + + def feed_data( + self, + data: bytes, + SEP: _SEP = b"\r\n", + EMPTY: bytes = b"", + CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, + METH_CONNECT: str = hdrs.METH_CONNECT, + SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, + ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: + + messages = [] + + if self._tail: + data, self._tail = self._tail + data, b"" + + data_len = len(data) + start_pos = 0 + loop = self.loop + + should_close = False + while start_pos < data_len: + + # read HTTP message (request/response line + headers), \r\n\r\n + # and split by lines + if self._payload_parser is None and not self._upgraded: + pos = data.find(SEP, start_pos) + # consume \r\n + if pos == start_pos and not self._lines: + start_pos = pos + len(SEP) + continue + + if pos >= start_pos: + if should_close: + raise BadHttpMessage("Data after `Connection: close`") + + # line found + line = data[start_pos:pos] + if SEP == b"\n": # For lax response parsing + line = line.rstrip(b"\r") + self._lines.append(line) + start_pos = pos + len(SEP) + + # \r\n\r\n found + if self._lines[-1] == EMPTY: + try: + msg: _MsgT = self.parse_message(self._lines) + finally: + self._lines.clear() + + def get_content_length() -> Optional[int]: + # payload length + length_hdr = msg.headers.get(CONTENT_LENGTH) + if length_hdr is None: + return None + + # Shouldn't allow +/- or other number formats. + # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2 + # msg.headers is already stripped of leading/trailing wsp + if not DIGITS.fullmatch(length_hdr): + raise InvalidHeader(CONTENT_LENGTH) + + return int(length_hdr) + + length = get_content_length() + # do not support old websocket spec + if SEC_WEBSOCKET_KEY1 in msg.headers: + raise InvalidHeader(SEC_WEBSOCKET_KEY1) + + self._upgraded = msg.upgrade and _is_supported_upgrade( + msg.headers + ) + + method = getattr(msg, "method", self.method) + # code is only present on responses + code = getattr(msg, "code", 0) + + assert self.protocol is not None + # calculate payload + empty_body = code in EMPTY_BODY_STATUS_CODES or bool( + method and method in EMPTY_BODY_METHODS + ) + if not empty_body and ( + ((length is not None and length > 0) or msg.chunked) + and not self._upgraded + ): + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + elif method == METH_CONNECT: + assert isinstance(msg, RawRequestMessage) + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + self._upgraded = True + self._payload_parser = HttpPayloadParser( + payload, + method=msg.method, + compression=msg.compression, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + elif not empty_body and length is None and self.read_until_eof: + payload = StreamReader( + self.protocol, + timer=self.timer, + loop=loop, + limit=self._limit, + ) + payload_parser = HttpPayloadParser( + payload, + length=length, + chunked=msg.chunked, + method=method, + compression=msg.compression, + code=self.code, + response_with_body=self.response_with_body, + auto_decompress=self._auto_decompress, + lax=self.lax, + ) + if not payload_parser.done: + self._payload_parser = payload_parser + else: + payload = EMPTY_PAYLOAD + + messages.append((msg, payload)) + should_close = msg.should_close + else: + self._tail = data[start_pos:] + data = EMPTY + break + + # no parser, just store + elif self._payload_parser is None and self._upgraded: + assert not self._lines + break + + # feed payload + elif data and start_pos < data_len: + assert not self._lines + assert self._payload_parser is not None + try: + eof, data = self._payload_parser.feed_data(data[start_pos:], SEP) + except BaseException as underlying_exc: + reraised_exc = underlying_exc + if self.payload_exception is not None: + reraised_exc = self.payload_exception(str(underlying_exc)) + + set_exception( + self._payload_parser.payload, + reraised_exc, + underlying_exc, + ) + + eof = True + data = b"" + + if eof: + start_pos = 0 + data_len = len(data) + self._payload_parser = None + continue + else: + break + + if data and start_pos < data_len: + data = data[start_pos:] + else: + data = EMPTY + + return messages, self._upgraded, data + + def parse_headers( + self, lines: List[bytes] + ) -> Tuple[ + "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool + ]: + """Parses RFC 5322 headers from a stream. + + Line continuations are supported. Returns list of header name + and value pairs. Header name is in upper case. + """ + headers, raw_headers = self._headers_parser.parse_headers(lines) + close_conn = None + encoding = None + upgrade = False + chunked = False + + # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6 + # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf + singletons = ( + hdrs.CONTENT_LENGTH, + hdrs.CONTENT_LOCATION, + hdrs.CONTENT_RANGE, + hdrs.CONTENT_TYPE, + hdrs.ETAG, + hdrs.HOST, + hdrs.MAX_FORWARDS, + hdrs.SERVER, + hdrs.TRANSFER_ENCODING, + hdrs.USER_AGENT, + ) + bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None) + if bad_hdr is not None: + raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.") + + # keep-alive + conn = headers.get(hdrs.CONNECTION) + if conn: + v = conn.lower() + if v == "close": + close_conn = True + elif v == "keep-alive": + close_conn = False + # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols + elif v == "upgrade" and headers.get(hdrs.UPGRADE): + upgrade = True + + # encoding + enc = headers.get(hdrs.CONTENT_ENCODING) + if enc: + enc = enc.lower() + if enc in ("gzip", "deflate", "br"): + encoding = enc + + # chunking + te = headers.get(hdrs.TRANSFER_ENCODING) + if te is not None: + if self._is_chunked_te(te): + chunked = True + + if hdrs.CONTENT_LENGTH in headers: + raise BadHttpMessage( + "Transfer-Encoding can't be present with Content-Length", + ) + + return (headers, raw_headers, close_conn, encoding, upgrade, chunked) + + def set_upgraded(self, val: bool) -> None: + """Set connection upgraded (to websocket) mode. + + :param bool val: new state. + """ + self._upgraded = val + + +class HttpRequestParser(HttpParser[RawRequestMessage]): + """Read request status line. + + Exception .http_exceptions.BadStatusLine + could be raised in case of any errors in status line. + Returns RawRequestMessage. + """ + + def parse_message(self, lines: List[bytes]) -> RawRequestMessage: + # request line + line = lines[0].decode("utf-8", "surrogateescape") + try: + method, path, version = line.split(" ", maxsplit=2) + except ValueError: + raise BadHttpMethod(line) from None + + if len(path) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(path)) + ) + + # method + if not TOKENRE.fullmatch(method): + raise BadHttpMethod(method) + + # version + match = VERSRE.fullmatch(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + if method == "CONNECT": + # authority-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3 + url = URL.build(authority=path, encoded=True) + elif path.startswith("/"): + # origin-form, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1 + path_part, _hash_separator, url_fragment = path.partition("#") + path_part, _question_mark_separator, qs_part = path_part.partition("?") + + # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based + # NOTE: parser does, otherwise it results into the same + # NOTE: HTTP Request-Line input producing different + # NOTE: `yarl.URL()` objects + url = URL.build( + path=path_part, + query_string=qs_part, + fragment=url_fragment, + encoded=True, + ) + elif path == "*" and method == "OPTIONS": + # asterisk-form, + url = URL(path, encoded=True) + else: + # absolute-form for proxy maybe, + # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2 + url = URL(path, encoded=True) + if url.scheme == "": + # not absolute-form + raise InvalidURLError( + path.encode(errors="surrogateescape").decode("latin1") + ) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: # then the headers weren't set in the request + if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close + close = True + else: # HTTP 1.1 must ask to close. + close = False + + return RawRequestMessage( + method, + path, + version_o, + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + url, + ) + + def _is_chunked_te(self, te: str) -> bool: + if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked": + return True + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3 + raise BadHttpMessage("Request has invalid `Transfer-Encoding`") + + +class HttpResponseParser(HttpParser[RawResponseMessage]): + """Read response status line and headers. + + BadStatusLine could be raised in case of any errors in status line. + Returns RawResponseMessage. + """ + + # Lax mode should only be enabled on response parser. + lax = not DEBUG + + def feed_data( + self, + data: bytes, + SEP: Optional[_SEP] = None, + *args: Any, + **kwargs: Any, + ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]: + if SEP is None: + SEP = b"\r\n" if DEBUG else b"\n" + return super().feed_data(data, SEP, *args, **kwargs) + + def parse_message(self, lines: List[bytes]) -> RawResponseMessage: + line = lines[0].decode("utf-8", "surrogateescape") + try: + version, status = line.split(maxsplit=1) + except ValueError: + raise BadStatusLine(line) from None + + try: + status, reason = status.split(maxsplit=1) + except ValueError: + status = status.strip() + reason = "" + + if len(reason) > self.max_line_size: + raise LineTooLong( + "Status line is too long", str(self.max_line_size), str(len(reason)) + ) + + # version + match = VERSRE.fullmatch(version) + if match is None: + raise BadStatusLine(line) + version_o = HttpVersion(int(match.group(1)), int(match.group(2))) + + # The status code is a three-digit ASCII number, no padding + if len(status) != 3 or not DIGITS.fullmatch(status): + raise BadStatusLine(line) + status_i = int(status) + + # read headers + ( + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) = self.parse_headers(lines) + + if close is None: + if version_o <= HttpVersion10: + close = True + # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length + elif 100 <= status_i < 200 or status_i in {204, 304}: + close = False + elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers: + close = False + else: + # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8 + close = True + + return RawResponseMessage( + version_o, + status_i, + reason.strip(), + headers, + raw_headers, + close, + compression, + upgrade, + chunked, + ) + + def _is_chunked_te(self, te: str) -> bool: + # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2 + return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked" + + +class HttpPayloadParser: + def __init__( + self, + payload: StreamReader, + length: Optional[int] = None, + chunked: bool = False, + compression: Optional[str] = None, + code: Optional[int] = None, + method: Optional[str] = None, + response_with_body: bool = True, + auto_decompress: bool = True, + lax: bool = False, + ) -> None: + self._length = 0 + self._type = ParseState.PARSE_UNTIL_EOF + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + self._chunk_size = 0 + self._chunk_tail = b"" + self._auto_decompress = auto_decompress + self._lax = lax + self.done = False + + # payload decompression wrapper + if response_with_body and compression and self._auto_decompress: + real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( + payload, compression + ) + else: + real_payload = payload + + # payload parser + if not response_with_body: + # don't parse payload if it's not expected to be received + self._type = ParseState.PARSE_NONE + real_payload.feed_eof() + self.done = True + elif chunked: + self._type = ParseState.PARSE_CHUNKED + elif length is not None: + self._type = ParseState.PARSE_LENGTH + self._length = length + if self._length == 0: + real_payload.feed_eof() + self.done = True + + self.payload = real_payload + + def feed_eof(self) -> None: + if self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_eof() + elif self._type == ParseState.PARSE_LENGTH: + raise ContentLengthError( + "Not enough data for satisfy content length header." + ) + elif self._type == ParseState.PARSE_CHUNKED: + raise TransferEncodingError( + "Not enough data for satisfy transfer length header." + ) + + def feed_data( + self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";" + ) -> Tuple[bool, bytes]: + # Read specified amount of bytes + if self._type == ParseState.PARSE_LENGTH: + required = self._length + chunk_len = len(chunk) + + if required >= chunk_len: + self._length = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + if self._length == 0: + self.payload.feed_eof() + return True, b"" + else: + self._length = 0 + self.payload.feed_data(chunk[:required], required) + self.payload.feed_eof() + return True, chunk[required:] + + # Chunked transfer encoding parser + elif self._type == ParseState.PARSE_CHUNKED: + if self._chunk_tail: + chunk = self._chunk_tail + chunk + self._chunk_tail = b"" + + while chunk: + + # read next chunk size + if self._chunk == ChunkState.PARSE_CHUNKED_SIZE: + pos = chunk.find(SEP) + if pos >= 0: + i = chunk.find(CHUNK_EXT, 0, pos) + if i >= 0: + size_b = chunk[:i] # strip chunk-extensions + # Verify no LF in the chunk-extension + if b"\n" in (ext := chunk[i:pos]): + exc = BadHttpMessage( + f"Unexpected LF in chunk-extension: {ext!r}" + ) + set_exception(self.payload, exc) + raise exc + else: + size_b = chunk[:pos] + + if self._lax: # Allow whitespace in lax mode. + size_b = size_b.strip() + + if not re.fullmatch(HEXDIGITS, size_b): + exc = TransferEncodingError( + chunk[:pos].decode("ascii", "surrogateescape") + ) + set_exception(self.payload, exc) + raise exc + size = int(bytes(size_b), 16) + + chunk = chunk[pos + len(SEP) :] + if size == 0: # eof marker + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] + else: + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK + self._chunk_size = size + self.payload.begin_http_chunk_receiving() + else: + self._chunk_tail = chunk + return False, b"" + + # read chunk and feed buffer + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK: + required = self._chunk_size + chunk_len = len(chunk) + + if required > chunk_len: + self._chunk_size = required - chunk_len + self.payload.feed_data(chunk, chunk_len) + return False, b"" + else: + self._chunk_size = 0 + self.payload.feed_data(chunk[:required], required) + chunk = chunk[required:] + self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF + self.payload.end_http_chunk_receiving() + + # toss the CRLF at the end of the chunk + if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF: + if self._lax and chunk.startswith(b"\r"): + chunk = chunk[1:] + if chunk[: len(SEP)] == SEP: + chunk = chunk[len(SEP) :] + self._chunk = ChunkState.PARSE_CHUNKED_SIZE + else: + self._chunk_tail = chunk + return False, b"" + + # if stream does not contain trailer, after 0\r\n + # we should get another \r\n otherwise + # trailers needs to be skipped until \r\n\r\n + if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS: + head = chunk[: len(SEP)] + if head == SEP: + # end of stream + self.payload.feed_eof() + return True, chunk[len(SEP) :] + # Both CR and LF, or only LF may not be received yet. It is + # expected that CRLF or LF will be shown at the very first + # byte next time, otherwise trailers should come. The last + # CRLF which marks the end of response might not be + # contained in the same TCP segment which delivered the + # size indicator. + if not head: + return False, b"" + if head == SEP[:1]: + self._chunk_tail = head + return False, b"" + self._chunk = ChunkState.PARSE_TRAILERS + + # read and discard trailer up to the CRLF terminator + if self._chunk == ChunkState.PARSE_TRAILERS: + pos = chunk.find(SEP) + if pos >= 0: + chunk = chunk[pos + len(SEP) :] + self._chunk = ChunkState.PARSE_MAYBE_TRAILERS + else: + self._chunk_tail = chunk + return False, b"" + + # Read all bytes until eof + elif self._type == ParseState.PARSE_UNTIL_EOF: + self.payload.feed_data(chunk, len(chunk)) + + return False, b"" + + +class DeflateBuffer: + """DeflateStream decompress stream and feed data into specified stream.""" + + decompressor: Any + + def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + self.out = out + self.size = 0 + self.encoding = encoding + self._started_decoding = False + + self.decompressor: Union[BrotliDecompressor, ZLibDecompressor] + if encoding == "br": + if not HAS_BROTLI: # pragma: no cover + raise ContentEncodingError( + "Can not decode content-encoding: brotli (br). " + "Please install `Brotli`" + ) + self.decompressor = BrotliDecompressor() + else: + self.decompressor = ZLibDecompressor(encoding=encoding) + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + set_exception(self.out, exc, exc_cause) + + def feed_data(self, chunk: bytes, size: int) -> None: + if not size: + return + + self.size += size + + # RFC1950 + # bits 0..3 = CM = 0b1000 = 8 = "deflate" + # bits 4..7 = CINFO = 1..7 = windows size. + if ( + not self._started_decoding + and self.encoding == "deflate" + and chunk[0] & 0xF != 8 + ): + # Change the decoder to decompress incorrectly compressed data + # Actually we should issue a warning about non-RFC-compliant data. + self.decompressor = ZLibDecompressor( + encoding=self.encoding, suppress_deflate_header=True + ) + + try: + chunk = self.decompressor.decompress_sync(chunk) + except Exception: + raise ContentEncodingError( + "Can not decode content-encoding: %s" % self.encoding + ) + + self._started_decoding = True + + if chunk: + self.out.feed_data(chunk, len(chunk)) + + def feed_eof(self) -> None: + chunk = self.decompressor.flush() + + if chunk or self.size > 0: + self.out.feed_data(chunk, len(chunk)) + if self.encoding == "deflate" and not self.decompressor.eof: + raise ContentEncodingError("deflate") + + self.out.feed_eof() + + def begin_http_chunk_receiving(self) -> None: + self.out.begin_http_chunk_receiving() + + def end_http_chunk_receiving(self) -> None: + self.out.end_http_chunk_receiving() + + +HttpRequestParserPy = HttpRequestParser +HttpResponseParserPy = HttpResponseParser +RawRequestMessagePy = RawRequestMessage +RawResponseMessagePy = RawResponseMessage + +try: + if not NO_EXTENSIONS: + from ._http_parser import ( # type: ignore[import-not-found,no-redef] + HttpRequestParser, + HttpResponseParser, + RawRequestMessage, + RawResponseMessage, + ) + + HttpRequestParserC = HttpRequestParser + HttpResponseParserC = HttpResponseParser + RawRequestMessageC = RawRequestMessage + RawResponseMessageC = RawResponseMessage +except ImportError: # pragma: no cover + pass diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/http_websocket.py b/deepseek/lib/python3.10/site-packages/aiohttp/http_websocket.py new file mode 100644 index 0000000000000000000000000000000000000000..6b4b30e02b247e30e0c84d3eb118b749bbe52079 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/http_websocket.py @@ -0,0 +1,36 @@ +"""WebSocket protocol versions 13 and 8.""" + +from ._websocket.helpers import WS_KEY, ws_ext_gen, ws_ext_parse +from ._websocket.models import ( + WS_CLOSED_MESSAGE, + WS_CLOSING_MESSAGE, + WebSocketError, + WSCloseCode, + WSHandshakeError, + WSMessage, + WSMsgType, +) +from ._websocket.reader import WebSocketReader +from ._websocket.writer import WebSocketWriter + +# Messages that the WebSocketResponse.receive needs to handle internally +_INTERNAL_RECEIVE_TYPES = frozenset( + (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.PING, WSMsgType.PONG) +) + + +__all__ = ( + "WS_CLOSED_MESSAGE", + "WS_CLOSING_MESSAGE", + "WS_KEY", + "WebSocketReader", + "WebSocketWriter", + "WSMessage", + "WebSocketError", + "WSMsgType", + "WSCloseCode", + "ws_ext_gen", + "ws_ext_parse", + "WSHandshakeError", + "WSMessage", +) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/multipart.py b/deepseek/lib/python3.10/site-packages/aiohttp/multipart.py new file mode 100644 index 0000000000000000000000000000000000000000..e0bcce0744983e41d449a7712b6407d839777649 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/multipart.py @@ -0,0 +1,1071 @@ +import base64 +import binascii +import json +import re +import sys +import uuid +import warnings +import zlib +from collections import deque +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Deque, + Dict, + Iterator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) +from urllib.parse import parse_qsl, unquote, urlencode + +from multidict import CIMultiDict, CIMultiDictProxy + +from .compression_utils import ZLibCompressor, ZLibDecompressor +from .hdrs import ( + CONTENT_DISPOSITION, + CONTENT_ENCODING, + CONTENT_LENGTH, + CONTENT_TRANSFER_ENCODING, + CONTENT_TYPE, +) +from .helpers import CHAR, TOKEN, parse_mimetype, reify +from .http import HeadersParser +from .payload import ( + JsonPayload, + LookupError, + Order, + Payload, + StringPayload, + get_payload, + payload_type, +) +from .streams import StreamReader + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing import TypeVar + + Self = TypeVar("Self", bound="BodyPartReader") + +__all__ = ( + "MultipartReader", + "MultipartWriter", + "BodyPartReader", + "BadContentDispositionHeader", + "BadContentDispositionParam", + "parse_content_disposition", + "content_disposition_filename", +) + + +if TYPE_CHECKING: + from .client_reqrep import ClientResponse + + +class BadContentDispositionHeader(RuntimeWarning): + pass + + +class BadContentDispositionParam(RuntimeWarning): + pass + + +def parse_content_disposition( + header: Optional[str], +) -> Tuple[Optional[str], Dict[str, str]]: + def is_token(string: str) -> bool: + return bool(string) and TOKEN >= set(string) + + def is_quoted(string: str) -> bool: + return string[0] == string[-1] == '"' + + def is_rfc5987(string: str) -> bool: + return is_token(string) and string.count("'") == 2 + + def is_extended_param(string: str) -> bool: + return string.endswith("*") + + def is_continuous_param(string: str) -> bool: + pos = string.find("*") + 1 + if not pos: + return False + substring = string[pos:-1] if string.endswith("*") else string[pos:] + return substring.isdigit() + + def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: + return re.sub(f"\\\\([{chars}])", "\\1", text) + + if not header: + return None, {} + + disptype, *parts = header.split(";") + if not is_token(disptype): + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params: Dict[str, str] = {} + while parts: + item = parts.pop(0) + + if "=" not in item: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + key, value = item.split("=", 1) + key = key.lower().strip() + value = value.lstrip() + + if key in params: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + if not is_token(key): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_continuous_param(key): + if is_quoted(value): + value = unescape(value[1:-1]) + elif not is_token(value): + warnings.warn(BadContentDispositionParam(item)) + continue + + elif is_extended_param(key): + if is_rfc5987(value): + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + else: + warnings.warn(BadContentDispositionParam(item)) + continue + + try: + value = unquote(value, encoding, "strict") + except UnicodeDecodeError: # pragma: nocover + warnings.warn(BadContentDispositionParam(item)) + continue + + else: + failed = True + if is_quoted(value): + failed = False + value = unescape(value[1:-1].lstrip("\\/")) + elif is_token(value): + failed = False + elif parts: + # maybe just ; in filename, in any case this is just + # one case fix, for proper fix we need to redesign parser + _value = f"{value};{parts[0]}" + if is_quoted(_value): + parts.pop(0) + value = unescape(_value[1:-1].lstrip("\\/")) + failed = False + + if failed: + warnings.warn(BadContentDispositionHeader(header)) + return None, {} + + params[key] = value + + return disptype.lower(), params + + +def content_disposition_filename( + params: Mapping[str, str], name: str = "filename" +) -> Optional[str]: + name_suf = "%s*" % name + if not params: + return None + elif name_suf in params: + return params[name_suf] + elif name in params: + return params[name] + else: + parts = [] + fnparams = sorted( + (key, value) for key, value in params.items() if key.startswith(name_suf) + ) + for num, (key, value) in enumerate(fnparams): + _, tail = key.split("*", 1) + if tail.endswith("*"): + tail = tail[:-1] + if tail == str(num): + parts.append(value) + else: + break + if not parts: + return None + value = "".join(parts) + if "'" in value: + encoding, _, value = value.split("'", 2) + encoding = encoding or "utf-8" + return unquote(value, encoding, "strict") + return value + + +class MultipartResponseWrapper: + """Wrapper around the MultipartReader. + + It takes care about + underlying connection and close it when it needs in. + """ + + def __init__( + self, + resp: "ClientResponse", + stream: "MultipartReader", + ) -> None: + self.resp = resp + self.stream = stream + + def __aiter__(self) -> "MultipartResponseWrapper": + return self + + async def __anext__( + self, + ) -> Union["MultipartReader", "BodyPartReader"]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + def at_eof(self) -> bool: + """Returns True when all response data had been read.""" + return self.resp.content.at_eof() + + async def next( + self, + ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: + """Emits next multipart reader object.""" + item = await self.stream.next() + if self.stream.at_eof(): + await self.release() + return item + + async def release(self) -> None: + """Release the connection gracefully. + + All remaining content is read to the void. + """ + await self.resp.release() + + +class BodyPartReader: + """Multipart reader for single body part.""" + + chunk_size = 8192 + + def __init__( + self, + boundary: bytes, + headers: "CIMultiDictProxy[str]", + content: StreamReader, + *, + subtype: str = "mixed", + default_charset: Optional[str] = None, + ) -> None: + self.headers = headers + self._boundary = boundary + self._boundary_len = len(boundary) + 2 # Boundary + \r\n + self._content = content + self._default_charset = default_charset + self._at_eof = False + self._is_form_data = subtype == "form-data" + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) + self._length = int(length) if length is not None else None + self._read_bytes = 0 + self._unread: Deque[bytes] = deque() + self._prev_chunk: Optional[bytes] = None + self._content_eof = 0 + self._cache: Dict[str, Any] = {} + + def __aiter__(self: Self) -> Self: + return self + + async def __anext__(self) -> bytes: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + async def next(self) -> Optional[bytes]: + item = await self.read() + if not item: + return None + return item + + async def read(self, *, decode: bool = False) -> bytes: + """Reads body part data. + + decode: Decodes data following by encoding + method from Content-Encoding header. If it missed + data remains untouched + """ + if self._at_eof: + return b"" + data = bytearray() + while not self._at_eof: + data.extend(await self.read_chunk(self.chunk_size)) + if decode: + return self.decode(data) + return data + + async def read_chunk(self, size: int = chunk_size) -> bytes: + """Reads body part content chunk of the specified size. + + size: chunk size + """ + if self._at_eof: + return b"" + if self._length: + chunk = await self._read_chunk_from_length(size) + else: + chunk = await self._read_chunk_from_stream(size) + + # For the case of base64 data, we must read a fragment of size with a + # remainder of 0 by dividing by 4 for string without symbols \n or \r + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING) + if encoding and encoding.lower() == "base64": + stripped_chunk = b"".join(chunk.split()) + remainder = len(stripped_chunk) % 4 + + while remainder != 0 and not self.at_eof(): + over_chunk_size = 4 - remainder + over_chunk = b"" + + if self._prev_chunk: + over_chunk = self._prev_chunk[:over_chunk_size] + self._prev_chunk = self._prev_chunk[len(over_chunk) :] + + if len(over_chunk) != over_chunk_size: + over_chunk += await self._content.read(4 - len(over_chunk)) + + if not over_chunk: + self._at_eof = True + + stripped_chunk += b"".join(over_chunk.split()) + chunk += over_chunk + remainder = len(stripped_chunk) % 4 + + self._read_bytes += len(chunk) + if self._read_bytes == self._length: + self._at_eof = True + if self._at_eof: + clrf = await self._content.readline() + assert ( + b"\r\n" == clrf + ), "reader did not read all the data or it is malformed" + return chunk + + async def _read_chunk_from_length(self, size: int) -> bytes: + # Reads body part content chunk of the specified size. + # The body part must has Content-Length header with proper value. + assert self._length is not None, "Content-Length required for chunked read" + chunk_size = min(size, self._length - self._read_bytes) + chunk = await self._content.read(chunk_size) + if self._content.at_eof(): + self._at_eof = True + return chunk + + async def _read_chunk_from_stream(self, size: int) -> bytes: + # Reads content chunk of body part with unknown length. + # The Content-Length header for body part is not necessary. + assert ( + size >= self._boundary_len + ), "Chunk size must be greater or equal than boundary length + 2" + first_chunk = self._prev_chunk is None + if first_chunk: + self._prev_chunk = await self._content.read(size) + + chunk = b"" + # content.read() may return less than size, so we need to loop to ensure + # we have enough data to detect the boundary. + while len(chunk) < self._boundary_len: + chunk += await self._content.read(size) + self._content_eof += int(self._content.at_eof()) + assert self._content_eof < 3, "Reading after EOF" + if self._content_eof: + break + if len(chunk) > size: + self._content.unread_data(chunk[size:]) + chunk = chunk[:size] + + assert self._prev_chunk is not None + window = self._prev_chunk + chunk + sub = b"\r\n" + self._boundary + if first_chunk: + idx = window.find(sub) + else: + idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub))) + if idx >= 0: + # pushing boundary back to content + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + self._content.unread_data(window[idx:]) + if size > idx: + self._prev_chunk = self._prev_chunk[:idx] + chunk = window[len(self._prev_chunk) : idx] + if not chunk: + self._at_eof = True + result = self._prev_chunk + self._prev_chunk = chunk + return result + + async def readline(self) -> bytes: + """Reads body part by line by line.""" + if self._at_eof: + return b"" + + if self._unread: + line = self._unread.popleft() + else: + line = await self._content.readline() + + if line.startswith(self._boundary): + # the very last boundary may not come with \r\n, + # so set single rules for everyone + sline = line.rstrip(b"\r\n") + boundary = self._boundary + last_boundary = self._boundary + b"--" + # ensure that we read exactly the boundary, not something alike + if sline == boundary or sline == last_boundary: + self._at_eof = True + self._unread.append(line) + return b"" + else: + next_line = await self._content.readline() + if next_line.startswith(self._boundary): + line = line[:-2] # strip CRLF but only once + self._unread.append(next_line) + + return line + + async def release(self) -> None: + """Like read(), but reads all the data to the void.""" + if self._at_eof: + return + while not self._at_eof: + await self.read_chunk(self.chunk_size) + + async def text(self, *, encoding: Optional[str] = None) -> str: + """Like read(), but assumes that body part contains text data.""" + data = await self.read(decode=True) + # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm + # and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send + encoding = encoding or self.get_charset(default="utf-8") + return data.decode(encoding) + + async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: + """Like read(), but assumes that body parts contains JSON data.""" + data = await self.read(decode=True) + if not data: + return None + encoding = encoding or self.get_charset(default="utf-8") + return cast(Dict[str, Any], json.loads(data.decode(encoding))) + + async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: + """Like read(), but assumes that body parts contain form urlencoded data.""" + data = await self.read(decode=True) + if not data: + return [] + if encoding is not None: + real_encoding = encoding + else: + real_encoding = self.get_charset(default="utf-8") + try: + decoded_data = data.rstrip().decode(real_encoding) + except UnicodeDecodeError: + raise ValueError("data cannot be decoded with %s encoding" % real_encoding) + + return parse_qsl( + decoded_data, + keep_blank_values=True, + encoding=real_encoding, + ) + + def at_eof(self) -> bool: + """Returns True if the boundary was reached or False otherwise.""" + return self._at_eof + + def decode(self, data: bytes) -> bytes: + """Decodes data. + + Decoding is done according the specified Content-Encoding + or Content-Transfer-Encoding headers value. + """ + if CONTENT_TRANSFER_ENCODING in self.headers: + data = self._decode_content_transfer(data) + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + if not self._is_form_data and CONTENT_ENCODING in self.headers: + return self._decode_content(data) + return data + + def _decode_content(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_ENCODING, "").lower() + if encoding == "identity": + return data + if encoding in {"deflate", "gzip"}: + return ZLibDecompressor( + encoding=encoding, + suppress_deflate_header=True, + ).decompress_sync(data) + + raise RuntimeError(f"unknown content encoding: {encoding}") + + def _decode_content_transfer(self, data: bytes) -> bytes: + encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + + if encoding == "base64": + return base64.b64decode(data) + elif encoding == "quoted-printable": + return binascii.a2b_qp(data) + elif encoding in ("binary", "8bit", "7bit"): + return data + else: + raise RuntimeError(f"unknown content transfer encoding: {encoding}") + + def get_charset(self, default: str) -> str: + """Returns charset parameter from Content-Type header or default.""" + ctype = self.headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + return mimetype.parameters.get("charset", self._default_charset or default) + + @reify + def name(self) -> Optional[str]: + """Returns name specified in Content-Disposition header. + + If the header is missing or malformed, returns None. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "name") + + @reify + def filename(self) -> Optional[str]: + """Returns filename specified in Content-Disposition header. + + Returns None if the header is missing or malformed. + """ + _, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION)) + return content_disposition_filename(params, "filename") + + +@payload_type(BodyPartReader, order=Order.try_first) +class BodyPartReaderPayload(Payload): + _value: BodyPartReader + + def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value, *args, **kwargs) + + params: Dict[str, str] = {} + if value.name is not None: + params["name"] = value.name + if value.filename is not None: + params["filename"] = value.filename + + if params: + self.set_content_disposition("attachment", True, **params) + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + + async def write(self, writer: Any) -> None: + field = self._value + chunk = await field.read_chunk(size=2**16) + while chunk: + await writer.write(field.decode(chunk)) + chunk = await field.read_chunk(size=2**16) + + +class MultipartReader: + """Multipart body reader.""" + + #: Response wrapper, used when multipart readers constructs from response. + response_wrapper_cls = MultipartResponseWrapper + #: Multipart reader class, used to handle multipart/* body parts. + #: None points to type(self) + multipart_reader_cls: Optional[Type["MultipartReader"]] = None + #: Body part reader class for non multipart/* content types. + part_reader_cls = BodyPartReader + + def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: + self._mimetype = parse_mimetype(headers[CONTENT_TYPE]) + assert self._mimetype.type == "multipart", "multipart/* content type expected" + if "boundary" not in self._mimetype.parameters: + raise ValueError( + "boundary missed for Content-Type: %s" % headers[CONTENT_TYPE] + ) + + self.headers = headers + self._boundary = ("--" + self._get_boundary()).encode() + self._content = content + self._default_charset: Optional[str] = None + self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None + self._at_eof = False + self._at_bof = True + self._unread: List[bytes] = [] + + def __aiter__(self: Self) -> Self: + return self + + async def __anext__( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + part = await self.next() + if part is None: + raise StopAsyncIteration + return part + + @classmethod + def from_response( + cls, + response: "ClientResponse", + ) -> MultipartResponseWrapper: + """Constructs reader instance from HTTP response. + + :param response: :class:`~aiohttp.client.ClientResponse` instance + """ + obj = cls.response_wrapper_cls( + response, cls(response.headers, response.content) + ) + return obj + + def at_eof(self) -> bool: + """Returns True if the final boundary was reached, false otherwise.""" + return self._at_eof + + async def next( + self, + ) -> Optional[Union["MultipartReader", BodyPartReader]]: + """Emits the next multipart body part.""" + # So, if we're at BOF, we need to skip till the boundary. + if self._at_eof: + return None + await self._maybe_release_last_part() + if self._at_bof: + await self._read_until_first_boundary() + self._at_bof = False + else: + await self._read_boundary() + if self._at_eof: # we just read the last boundary, nothing to do there + return None + + part = await self.fetch_next_part() + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.6 + if ( + self._last_part is None + and self._mimetype.subtype == "form-data" + and isinstance(part, BodyPartReader) + ): + _, params = parse_content_disposition(part.headers.get(CONTENT_DISPOSITION)) + if params.get("name") == "_charset_": + # Longest encoding in https://encoding.spec.whatwg.org/encodings.json + # is 19 characters, so 32 should be more than enough for any valid encoding. + charset = await part.read_chunk(32) + if len(charset) > 31: + raise RuntimeError("Invalid default charset") + self._default_charset = charset.strip().decode() + part = await self.fetch_next_part() + self._last_part = part + return self._last_part + + async def release(self) -> None: + """Reads all the body parts to the void till the final boundary.""" + while not self._at_eof: + item = await self.next() + if item is None: + break + await item.release() + + async def fetch_next_part( + self, + ) -> Union["MultipartReader", BodyPartReader]: + """Returns the next body part reader.""" + headers = await self._read_headers() + return self._get_part_reader(headers) + + def _get_part_reader( + self, + headers: "CIMultiDictProxy[str]", + ) -> Union["MultipartReader", BodyPartReader]: + """Dispatches the response by the `Content-Type` header. + + Returns a suitable reader instance. + + :param dict headers: Response headers + """ + ctype = headers.get(CONTENT_TYPE, "") + mimetype = parse_mimetype(ctype) + + if mimetype.type == "multipart": + if self.multipart_reader_cls is None: + return type(self)(headers, self._content) + return self.multipart_reader_cls(headers, self._content) + else: + return self.part_reader_cls( + self._boundary, + headers, + self._content, + subtype=self._mimetype.subtype, + default_charset=self._default_charset, + ) + + def _get_boundary(self) -> str: + boundary = self._mimetype.parameters["boundary"] + if len(boundary) > 70: + raise ValueError("boundary %r is too long (70 chars max)" % boundary) + + return boundary + + async def _readline(self) -> bytes: + if self._unread: + return self._unread.pop() + return await self._content.readline() + + async def _read_until_first_boundary(self) -> None: + while True: + chunk = await self._readline() + if chunk == b"": + raise ValueError( + "Could not find starting boundary %r" % (self._boundary) + ) + chunk = chunk.rstrip() + if chunk == self._boundary: + return + elif chunk == self._boundary + b"--": + self._at_eof = True + return + + async def _read_boundary(self) -> None: + chunk = (await self._readline()).rstrip() + if chunk == self._boundary: + pass + elif chunk == self._boundary + b"--": + self._at_eof = True + epilogue = await self._readline() + next_line = await self._readline() + + # the epilogue is expected and then either the end of input or the + # parent multipart boundary, if the parent boundary is found then + # it should be marked as unread and handed to the parent for + # processing + if next_line[:2] == b"--": + self._unread.append(next_line) + # otherwise the request is likely missing an epilogue and both + # lines should be passed to the parent for processing + # (this handles the old behavior gracefully) + else: + self._unread.extend([next_line, epilogue]) + else: + raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}") + + async def _read_headers(self) -> "CIMultiDictProxy[str]": + lines = [b""] + while True: + chunk = await self._content.readline() + chunk = chunk.strip() + lines.append(chunk) + if not chunk: + break + parser = HeadersParser() + headers, raw_headers = parser.parse_headers(lines) + return headers + + async def _maybe_release_last_part(self) -> None: + """Ensures that the last read body part is read completely.""" + if self._last_part is not None: + if not self._last_part.at_eof(): + await self._last_part.release() + self._unread.extend(self._last_part._unread) + self._last_part = None + + +_Part = Tuple[Payload, str, str] + + +class MultipartWriter(Payload): + """Multipart body writer.""" + + _value: None + + def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: + boundary = boundary if boundary is not None else uuid.uuid4().hex + # The underlying Payload API demands a str (utf-8), not bytes, + # so we need to ensure we don't lose anything during conversion. + # As a result, require the boundary to be ASCII only. + # In both situations. + + try: + self._boundary = boundary.encode("ascii") + except UnicodeEncodeError: + raise ValueError("boundary should contain ASCII only chars") from None + ctype = f"multipart/{subtype}; boundary={self._boundary_value}" + + super().__init__(None, content_type=ctype) + + self._parts: List[_Part] = [] + self._is_form_data = subtype == "form-data" + + def __enter__(self) -> "MultipartWriter": + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> None: + pass + + def __iter__(self) -> Iterator[_Part]: + return iter(self._parts) + + def __len__(self) -> int: + return len(self._parts) + + def __bool__(self) -> bool: + return True + + _valid_tchar_regex = re.compile(rb"\A[!#$%&'*+\-.^_`|~\w]+\Z") + _invalid_qdtext_char_regex = re.compile(rb"[\x00-\x08\x0A-\x1F\x7F]") + + @property + def _boundary_value(self) -> str: + """Wrap boundary parameter value in quotes, if necessary. + + Reads self.boundary and returns a unicode string. + """ + # Refer to RFCs 7231, 7230, 5234. + # + # parameter = token "=" ( token / quoted-string ) + # token = 1*tchar + # quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE + # qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text + # obs-text = %x80-FF + # quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text ) + # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" + # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" + # / DIGIT / ALPHA + # ; any VCHAR, except delimiters + # VCHAR = %x21-7E + value = self._boundary + if re.match(self._valid_tchar_regex, value): + return value.decode("ascii") # cannot fail + + if re.search(self._invalid_qdtext_char_regex, value): + raise ValueError("boundary value contains invalid characters") + + # escape %x5C and %x22 + quoted_value_content = value.replace(b"\\", b"\\\\") + quoted_value_content = quoted_value_content.replace(b'"', b'\\"') + + return '"' + quoted_value_content.decode("ascii") + '"' + + @property + def boundary(self) -> str: + return self._boundary.decode("ascii") + + def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Payload): + obj.headers.update(headers) + return self.append_payload(obj) + else: + try: + payload = get_payload(obj, headers=headers) + except LookupError: + raise TypeError("Cannot create payload from %r" % obj) + else: + return self.append_payload(payload) + + def append_payload(self, payload: Payload) -> Payload: + """Adds a new body part to multipart writer.""" + encoding: Optional[str] = None + te_encoding: Optional[str] = None + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 + assert ( + not {CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TRANSFER_ENCODING} + & payload.headers.keys() + ) + # Set default Content-Disposition in case user doesn't create one + if CONTENT_DISPOSITION not in payload.headers: + name = f"section-{len(self._parts)}" + payload.set_content_disposition("form-data", name=name) + else: + # compression + encoding = payload.headers.get(CONTENT_ENCODING, "").lower() + if encoding and encoding not in ("deflate", "gzip", "identity"): + raise RuntimeError(f"unknown content encoding: {encoding}") + if encoding == "identity": + encoding = None + + # te encoding + te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, "").lower() + if te_encoding not in ("", "base64", "quoted-printable", "binary"): + raise RuntimeError(f"unknown content transfer encoding: {te_encoding}") + if te_encoding == "binary": + te_encoding = None + + # size + size = payload.size + if size is not None and not (encoding or te_encoding): + payload.headers[CONTENT_LENGTH] = str(size) + + self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type] + return payload + + def append_json( + self, obj: Any, headers: Optional[Mapping[str, str]] = None + ) -> Payload: + """Helper to append JSON part.""" + if headers is None: + headers = CIMultiDict() + + return self.append_payload(JsonPayload(obj, headers=headers)) + + def append_form( + self, + obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], + headers: Optional[Mapping[str, str]] = None, + ) -> Payload: + """Helper to append form urlencoded part.""" + assert isinstance(obj, (Sequence, Mapping)) + + if headers is None: + headers = CIMultiDict() + + if isinstance(obj, Mapping): + obj = list(obj.items()) + data = urlencode(obj, doseq=True) + + return self.append_payload( + StringPayload( + data, headers=headers, content_type="application/x-www-form-urlencoded" + ) + ) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + total = 0 + for part, encoding, te_encoding in self._parts: + if encoding or te_encoding or part.size is None: + return None + + total += int( + 2 + + len(self._boundary) + + 2 + + part.size # b'--'+self._boundary+b'\r\n' + + len(part._binary_headers) + + 2 # b'\r\n' + ) + + total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n' + return total + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join( + "--" + + self.boundary + + "\n" + + part._binary_headers.decode(encoding, errors) + + part.decode() + for part, _e, _te in self._parts + ) + + async def write(self, writer: Any, close_boundary: bool = True) -> None: + """Write body.""" + for part, encoding, te_encoding in self._parts: + if self._is_form_data: + # https://datatracker.ietf.org/doc/html/rfc7578#section-4.2 + assert CONTENT_DISPOSITION in part.headers + assert "name=" in part.headers[CONTENT_DISPOSITION] + + await writer.write(b"--" + self._boundary + b"\r\n") + await writer.write(part._binary_headers) + + if encoding or te_encoding: + w = MultipartPayloadWriter(writer) + if encoding: + w.enable_compression(encoding) + if te_encoding: + w.enable_encoding(te_encoding) + await part.write(w) # type: ignore[arg-type] + await w.write_eof() + else: + await part.write(writer) + + await writer.write(b"\r\n") + + if close_boundary: + await writer.write(b"--" + self._boundary + b"--\r\n") + + +class MultipartPayloadWriter: + def __init__(self, writer: Any) -> None: + self._writer = writer + self._encoding: Optional[str] = None + self._compress: Optional[ZLibCompressor] = None + self._encoding_buffer: Optional[bytearray] = None + + def enable_encoding(self, encoding: str) -> None: + if encoding == "base64": + self._encoding = encoding + self._encoding_buffer = bytearray() + elif encoding == "quoted-printable": + self._encoding = "quoted-printable" + + def enable_compression( + self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY + ) -> None: + self._compress = ZLibCompressor( + encoding=encoding, + suppress_deflate_header=True, + strategy=strategy, + ) + + async def write_eof(self) -> None: + if self._compress is not None: + chunk = self._compress.flush() + if chunk: + self._compress = None + await self.write(chunk) + + if self._encoding == "base64": + if self._encoding_buffer: + await self._writer.write(base64.b64encode(self._encoding_buffer)) + + async def write(self, chunk: bytes) -> None: + if self._compress is not None: + if chunk: + chunk = await self._compress.compress(chunk) + if not chunk: + return + + if self._encoding == "base64": + buf = self._encoding_buffer + assert buf is not None + buf.extend(chunk) + + if buf: + div, mod = divmod(len(buf), 3) + enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :]) + if enc_chunk: + b64chunk = base64.b64encode(enc_chunk) + await self._writer.write(b64chunk) + elif self._encoding == "quoted-printable": + await self._writer.write(binascii.b2a_qp(chunk)) + else: + await self._writer.write(chunk) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/payload.py b/deepseek/lib/python3.10/site-packages/aiohttp/payload.py new file mode 100644 index 0000000000000000000000000000000000000000..3f6d3672db29a7f4f580c8a7a5e1fb1ac29ebf2d --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/payload.py @@ -0,0 +1,519 @@ +import asyncio +import enum +import io +import json +import mimetypes +import os +import sys +import warnings +from abc import ABC, abstractmethod +from itertools import chain +from typing import ( + IO, + TYPE_CHECKING, + Any, + Dict, + Final, + Iterable, + Optional, + TextIO, + Tuple, + Type, + Union, +) + +from multidict import CIMultiDict + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + _SENTINEL, + content_disposition_header, + guess_filename, + parse_mimetype, + sentinel, +) +from .streams import StreamReader +from .typedefs import JSONEncoder, _CIMultiDict + +__all__ = ( + "PAYLOAD_REGISTRY", + "get_payload", + "payload_type", + "Payload", + "BytesPayload", + "StringPayload", + "IOBasePayload", + "BytesIOPayload", + "BufferedReaderPayload", + "TextIOPayload", + "StringIOPayload", + "JsonPayload", + "AsyncIterablePayload", +) + +TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB + +if TYPE_CHECKING: + from typing import List + + +class LookupError(Exception): + pass + + +class Order(str, enum.Enum): + normal = "normal" + try_first = "try_first" + try_last = "try_last" + + +def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": + return PAYLOAD_REGISTRY.get(data, *args, **kwargs) + + +def register_payload( + factory: Type["Payload"], type: Any, *, order: Order = Order.normal +) -> None: + PAYLOAD_REGISTRY.register(factory, type, order=order) + + +class payload_type: + def __init__(self, type: Any, *, order: Order = Order.normal) -> None: + self.type = type + self.order = order + + def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: + register_payload(factory, self.type, order=self.order) + return factory + + +PayloadType = Type["Payload"] +_PayloadRegistryItem = Tuple[PayloadType, Any] + + +class PayloadRegistry: + """Payload registry. + + note: we need zope.interface for more efficient adapter search + """ + + __slots__ = ("_first", "_normal", "_last", "_normal_lookup") + + def __init__(self) -> None: + self._first: List[_PayloadRegistryItem] = [] + self._normal: List[_PayloadRegistryItem] = [] + self._last: List[_PayloadRegistryItem] = [] + self._normal_lookup: Dict[Any, PayloadType] = {} + + def get( + self, + data: Any, + *args: Any, + _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + **kwargs: Any, + ) -> "Payload": + if self._first: + for factory, type_ in self._first: + if isinstance(data, type_): + return factory(data, *args, **kwargs) + # Try the fast lookup first + if lookup_factory := self._normal_lookup.get(type(data)): + return lookup_factory(data, *args, **kwargs) + # Bail early if its already a Payload + if isinstance(data, Payload): + return data + # Fallback to the slower linear search + for factory, type_ in _CHAIN(self._normal, self._last): + if isinstance(data, type_): + return factory(data, *args, **kwargs) + raise LookupError() + + def register( + self, factory: PayloadType, type: Any, *, order: Order = Order.normal + ) -> None: + if order is Order.try_first: + self._first.append((factory, type)) + elif order is Order.normal: + self._normal.append((factory, type)) + if isinstance(type, Iterable): + for t in type: + self._normal_lookup[t] = factory + else: + self._normal_lookup[type] = factory + elif order is Order.try_last: + self._last.append((factory, type)) + else: + raise ValueError(f"Unsupported order {order!r}") + + +class Payload(ABC): + + _default_content_type: str = "application/octet-stream" + _size: Optional[int] = None + + def __init__( + self, + value: Any, + headers: Optional[ + Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] + ] = None, + content_type: Union[str, None, _SENTINEL] = sentinel, + filename: Optional[str] = None, + encoding: Optional[str] = None, + **kwargs: Any, + ) -> None: + self._encoding = encoding + self._filename = filename + self._headers: _CIMultiDict = CIMultiDict() + self._value = value + if content_type is not sentinel and content_type is not None: + self._headers[hdrs.CONTENT_TYPE] = content_type + elif self._filename is not None: + if sys.version_info >= (3, 13): + guesser = mimetypes.guess_file_type + else: + guesser = mimetypes.guess_type + content_type = guesser(self._filename)[0] + if content_type is None: + content_type = self._default_content_type + self._headers[hdrs.CONTENT_TYPE] = content_type + else: + self._headers[hdrs.CONTENT_TYPE] = self._default_content_type + if headers: + self._headers.update(headers) + + @property + def size(self) -> Optional[int]: + """Size of the payload.""" + return self._size + + @property + def filename(self) -> Optional[str]: + """Filename of the payload.""" + return self._filename + + @property + def headers(self) -> _CIMultiDict: + """Custom item headers""" + return self._headers + + @property + def _binary_headers(self) -> bytes: + return ( + "".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode( + "utf-8" + ) + + b"\r\n" + ) + + @property + def encoding(self) -> Optional[str]: + """Payload encoding""" + return self._encoding + + @property + def content_type(self) -> str: + """Content type""" + return self._headers[hdrs.CONTENT_TYPE] + + def set_content_disposition( + self, + disptype: str, + quote_fields: bool = True, + _charset: str = "utf-8", + **params: Any, + ) -> None: + """Sets ``Content-Disposition`` header.""" + self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( + disptype, quote_fields=quote_fields, _charset=_charset, **params + ) + + @abstractmethod + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + """Return string representation of the value. + + This is named decode() to allow compatibility with bytes objects. + """ + + @abstractmethod + async def write(self, writer: AbstractStreamWriter) -> None: + """Write payload. + + writer is an AbstractStreamWriter instance: + """ + + +class BytesPayload(Payload): + _value: bytes + + def __init__( + self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any + ) -> None: + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + if isinstance(value, memoryview): + self._size = value.nbytes + elif isinstance(value, (bytes, bytearray)): + self._size = len(value) + else: + raise TypeError(f"value argument must be byte-ish, not {type(value)!r}") + + if self._size > TOO_LARGE_BYTES_BODY: + kwargs = {"source": self} + warnings.warn( + "Sending a large body directly with raw bytes might" + " lock the event loop. You should probably pass an " + "io.BytesIO object instead", + ResourceWarning, + **kwargs, + ) + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.decode(encoding, errors) + + async def write(self, writer: AbstractStreamWriter) -> None: + await writer.write(self._value) + + +class StringPayload(BytesPayload): + def __init__( + self, + value: str, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + real_encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + real_encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + real_encoding = encoding + + super().__init__( + value.encode(real_encoding), + encoding=real_encoding, + content_type=content_type, + *args, + **kwargs, + ) + + +class StringIOPayload(StringPayload): + def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: + super().__init__(value.read(), *args, **kwargs) + + +class IOBasePayload(Payload): + _value: io.IOBase + + def __init__( + self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any + ) -> None: + if "filename" not in kwargs: + kwargs["filename"] = guess_filename(value) + + super().__init__(value, *args, **kwargs) + + if self._filename is not None and disposition is not None: + if hdrs.CONTENT_DISPOSITION not in self.headers: + self.set_content_disposition(disposition, filename=self._filename) + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + await writer.write(chunk) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return "".join(r.decode(encoding, errors) for r in self._value.readlines()) + + +class TextIOPayload(IOBasePayload): + _value: io.TextIOBase + + def __init__( + self, + value: TextIO, + *args: Any, + encoding: Optional[str] = None, + content_type: Optional[str] = None, + **kwargs: Any, + ) -> None: + + if encoding is None: + if content_type is None: + encoding = "utf-8" + content_type = "text/plain; charset=utf-8" + else: + mimetype = parse_mimetype(content_type) + encoding = mimetype.parameters.get("charset", "utf-8") + else: + if content_type is None: + content_type = "text/plain; charset=%s" % encoding + + super().__init__( + value, + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except OSError: + return None + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read() + + async def write(self, writer: AbstractStreamWriter) -> None: + loop = asyncio.get_event_loop() + try: + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + while chunk: + data = ( + chunk.encode(encoding=self._encoding) + if self._encoding + else chunk.encode() + ) + await writer.write(data) + chunk = await loop.run_in_executor(None, self._value.read, 2**16) + finally: + await loop.run_in_executor(None, self._value.close) + + +class BytesIOPayload(IOBasePayload): + _value: io.BytesIO + + @property + def size(self) -> int: + position = self._value.tell() + end = self._value.seek(0, os.SEEK_END) + self._value.seek(position) + return end - position + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + + +class BufferedReaderPayload(IOBasePayload): + _value: io.BufferedIOBase + + @property + def size(self) -> Optional[int]: + try: + return os.fstat(self._value.fileno()).st_size - self._value.tell() + except (OSError, AttributeError): + # data.fileno() is not supported, e.g. + # io.BufferedReader(io.BytesIO(b'data')) + # For some file-like objects (e.g. tarfile), the fileno() attribute may + # not exist at all, and will instead raise an AttributeError. + return None + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + return self._value.read().decode(encoding, errors) + + +class JsonPayload(BytesPayload): + def __init__( + self, + value: Any, + encoding: str = "utf-8", + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, + *args: Any, + **kwargs: Any, + ) -> None: + + super().__init__( + dumps(value).encode(encoding), + content_type=content_type, + encoding=encoding, + *args, + **kwargs, + ) + + +if TYPE_CHECKING: + from typing import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator[bytes] + _AsyncIterable = AsyncIterable[bytes] +else: + from collections.abc import AsyncIterable, AsyncIterator + + _AsyncIterator = AsyncIterator + _AsyncIterable = AsyncIterable + + +class AsyncIterablePayload(Payload): + + _iter: Optional[_AsyncIterator] = None + _value: _AsyncIterable + + def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: + if not isinstance(value, AsyncIterable): + raise TypeError( + "value argument must support " + "collections.abc.AsyncIterable interface, " + "got {!r}".format(type(value)) + ) + + if "content_type" not in kwargs: + kwargs["content_type"] = "application/octet-stream" + + super().__init__(value, *args, **kwargs) + + self._iter = value.__aiter__() + + async def write(self, writer: AbstractStreamWriter) -> None: + if self._iter: + try: + # iter is not None check prevents rare cases + # when the case iterable is used twice + while True: + chunk = await self._iter.__anext__() + await writer.write(chunk) + except StopAsyncIteration: + self._iter = None + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + + +class StreamReaderPayload(AsyncIterablePayload): + def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None: + super().__init__(value.iter_any(), *args, **kwargs) + + +PAYLOAD_REGISTRY = PayloadRegistry() +PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview)) +PAYLOAD_REGISTRY.register(StringPayload, str) +PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO) +PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase) +PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO) +PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom)) +PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase) +PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader) +# try_last for giving a chance to more specialized async interables like +# multidict.BodyPartReaderPayload override the default +PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/payload_streamer.py b/deepseek/lib/python3.10/site-packages/aiohttp/payload_streamer.py new file mode 100644 index 0000000000000000000000000000000000000000..831fdc0a77f302acaf9a000be408fe7c9a9035aa --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/payload_streamer.py @@ -0,0 +1,78 @@ +""" +Payload implementation for coroutines as data provider. + +As a simple case, you can upload data from file:: + + @aiohttp.streamer + async def file_sender(writer, file_name=None): + with open(file_name, 'rb') as f: + chunk = f.read(2**16) + while chunk: + await writer.write(chunk) + + chunk = f.read(2**16) + +Then you can use `file_sender` like this: + + async with session.post('http://httpbin.org/post', + data=file_sender(file_name='huge_file')) as resp: + print(await resp.text()) + +..note:: Coroutine must accept `writer` as first argument + +""" + +import types +import warnings +from typing import Any, Awaitable, Callable, Dict, Tuple + +from .abc import AbstractStreamWriter +from .payload import Payload, payload_type + +__all__ = ("streamer",) + + +class _stream_wrapper: + def __init__( + self, + coro: Callable[..., Awaitable[None]], + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> None: + self.coro = types.coroutine(coro) + self.args = args + self.kwargs = kwargs + + async def __call__(self, writer: AbstractStreamWriter) -> None: + await self.coro(writer, *self.args, **self.kwargs) + + +class streamer: + def __init__(self, coro: Callable[..., Awaitable[None]]) -> None: + warnings.warn( + "@streamer is deprecated, use async generators instead", + DeprecationWarning, + stacklevel=2, + ) + self.coro = coro + + def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper: + return _stream_wrapper(self.coro, args, kwargs) + + +@payload_type(_stream_wrapper) +class StreamWrapperPayload(Payload): + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) + + def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str: + raise TypeError("Unable to decode.") + + +@payload_type(streamer) +class StreamPayload(StreamWrapperPayload): + def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None: + super().__init__(value(), *args, **kwargs) + + async def write(self, writer: AbstractStreamWriter) -> None: + await self._value(writer) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/py.typed b/deepseek/lib/python3.10/site-packages/aiohttp/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..f5642f79f21d872f010979dcf6f0c4a415acc19d --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/py.typed @@ -0,0 +1 @@ +Marker diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/pytest_plugin.py b/deepseek/lib/python3.10/site-packages/aiohttp/pytest_plugin.py new file mode 100644 index 0000000000000000000000000000000000000000..7ce60faa4a4f628a75fe68e94b7f6de6acccc9af --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/pytest_plugin.py @@ -0,0 +1,436 @@ +import asyncio +import contextlib +import inspect +import warnings +from typing import ( + Any, + Awaitable, + Callable, + Dict, + Iterator, + Optional, + Protocol, + Type, + Union, + overload, +) + +import pytest + +from .test_utils import ( + BaseTestServer, + RawTestServer, + TestClient, + TestServer, + loop_context, + setup_test_loop, + teardown_test_loop, + unused_port as _unused_port, +) +from .web import Application, BaseRequest, Request +from .web_protocol import _RequestHandler + +try: + import uvloop +except ImportError: # pragma: no cover + uvloop = None # type: ignore[assignment] + + +class AiohttpClient(Protocol): + @overload + async def __call__( + self, + __param: Application, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> TestClient[Request, Application]: ... + @overload + async def __call__( + self, + __param: BaseTestServer, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> TestClient[BaseRequest, None]: ... + + +class AiohttpServer(Protocol): + def __call__( + self, app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[TestServer]: ... + + +class AiohttpRawServer(Protocol): + def __call__( + self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> Awaitable[RawTestServer]: ... + + +def pytest_addoption(parser): # type: ignore[no-untyped-def] + parser.addoption( + "--aiohttp-fast", + action="store_true", + default=False, + help="run tests faster by disabling extra checks", + ) + parser.addoption( + "--aiohttp-loop", + action="store", + default="pyloop", + help="run tests with specific loop: pyloop, uvloop or all", + ) + parser.addoption( + "--aiohttp-enable-loop-debug", + action="store_true", + default=False, + help="enable event loop debug mode", + ) + + +def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def] + """Set up pytest fixture. + + Allow fixtures to be coroutines. Run coroutine fixtures in an event loop. + """ + func = fixturedef.func + + if inspect.isasyncgenfunction(func): + # async generator fixture + is_async_gen = True + elif asyncio.iscoroutinefunction(func): + # regular async fixture + is_async_gen = False + else: + # not an async fixture, nothing to do + return + + strip_request = False + if "request" not in fixturedef.argnames: + fixturedef.argnames += ("request",) + strip_request = True + + def wrapper(*args, **kwargs): # type: ignore[no-untyped-def] + request = kwargs["request"] + if strip_request: + del kwargs["request"] + + # if neither the fixture nor the test use the 'loop' fixture, + # 'getfixturevalue' will fail because the test is not parameterized + # (this can be removed someday if 'loop' is no longer parameterized) + if "loop" not in request.fixturenames: + raise Exception( + "Asynchronous fixtures must depend on the 'loop' fixture or " + "be used in tests depending from it." + ) + + _loop = request.getfixturevalue("loop") + + if is_async_gen: + # for async generators, we need to advance the generator once, + # then advance it again in a finalizer + gen = func(*args, **kwargs) + + def finalizer(): # type: ignore[no-untyped-def] + try: + return _loop.run_until_complete(gen.__anext__()) + except StopAsyncIteration: + pass + + request.addfinalizer(finalizer) + return _loop.run_until_complete(gen.__anext__()) + else: + return _loop.run_until_complete(func(*args, **kwargs)) + + fixturedef.func = wrapper + + +@pytest.fixture +def fast(request): # type: ignore[no-untyped-def] + """--fast config option""" + return request.config.getoption("--aiohttp-fast") + + +@pytest.fixture +def loop_debug(request): # type: ignore[no-untyped-def] + """--enable-loop-debug config option""" + return request.config.getoption("--aiohttp-enable-loop-debug") + + +@contextlib.contextmanager +def _runtime_warning_context(): # type: ignore[no-untyped-def] + """Context manager which checks for RuntimeWarnings. + + This exists specifically to + avoid "coroutine 'X' was never awaited" warnings being missed. + + If RuntimeWarnings occur in the context a RuntimeError is raised. + """ + with warnings.catch_warnings(record=True) as _warnings: + yield + rw = [ + "{w.filename}:{w.lineno}:{w.message}".format(w=w) + for w in _warnings + if w.category == RuntimeWarning + ] + if rw: + raise RuntimeError( + "{} Runtime Warning{},\n{}".format( + len(rw), "" if len(rw) == 1 else "s", "\n".join(rw) + ) + ) + + +@contextlib.contextmanager +def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def] + """Passthrough loop context. + + Sets up and tears down a loop unless one is passed in via the loop + argument when it's passed straight through. + """ + if loop: + # loop already exists, pass it straight through + yield loop + else: + # this shadows loop_context's standard behavior + loop = setup_test_loop() + yield loop + teardown_test_loop(loop, fast=fast) + + +def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def] + """Fix pytest collecting for coroutines.""" + if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj): + return list(collector._genfunctions(name, obj)) + + +def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def] + """Run coroutines in an event loop instead of a normal function call.""" + fast = pyfuncitem.config.getoption("--aiohttp-fast") + if asyncio.iscoroutinefunction(pyfuncitem.function): + existing_loop = pyfuncitem.funcargs.get( + "proactor_loop" + ) or pyfuncitem.funcargs.get("loop", None) + with _runtime_warning_context(): + with _passthrough_loop_context(existing_loop, fast=fast) as _loop: + testargs = { + arg: pyfuncitem.funcargs[arg] + for arg in pyfuncitem._fixtureinfo.argnames + } + _loop.run_until_complete(pyfuncitem.obj(**testargs)) + + return True + + +def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def] + if "loop_factory" not in metafunc.fixturenames: + return + + loops = metafunc.config.option.aiohttp_loop + avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]] + avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy} + + if uvloop is not None: # pragma: no cover + avail_factories["uvloop"] = uvloop.EventLoopPolicy + + if loops == "all": + loops = "pyloop,uvloop?" + + factories = {} # type: ignore[var-annotated] + for name in loops.split(","): + required = not name.endswith("?") + name = name.strip(" ?") + if name not in avail_factories: # pragma: no cover + if required: + raise ValueError( + "Unknown loop '%s', available loops: %s" + % (name, list(factories.keys())) + ) + else: + continue + factories[name] = avail_factories[name] + metafunc.parametrize( + "loop_factory", list(factories.values()), ids=list(factories.keys()) + ) + + +@pytest.fixture +def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def] + """Return an instance of the event loop.""" + policy = loop_factory() + asyncio.set_event_loop_policy(policy) + with loop_context(fast=fast) as _loop: + if loop_debug: + _loop.set_debug(True) # pragma: no cover + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def proactor_loop(): # type: ignore[no-untyped-def] + policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined] + asyncio.set_event_loop_policy(policy) + + with loop_context(policy.new_event_loop) as _loop: + asyncio.set_event_loop(_loop) + yield _loop + + +@pytest.fixture +def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]: + warnings.warn( + "Deprecated, use aiohttp_unused_port fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_unused_port + + +@pytest.fixture +def aiohttp_unused_port() -> Callable[[], int]: + """Return a port that is unused on the current host.""" + return _unused_port + + +@pytest.fixture +def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]: + """Factory to create a TestServer instance, given an app. + + aiohttp_server(app, **kwargs) + """ + servers = [] + + async def go( + app: Application, *, port: Optional[int] = None, **kwargs: Any + ) -> TestServer: + server = TestServer(app, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_server + + +@pytest.fixture +def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]: + """Factory to create a RawTestServer instance, given a web handler. + + aiohttp_raw_server(handler, **kwargs) + """ + servers = [] + + async def go( + handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + ) -> RawTestServer: + server = RawTestServer(handler, port=port) + await server.start_server(loop=loop, **kwargs) + servers.append(server) + return server + + yield go + + async def finalize() -> None: + while servers: + await servers.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover + aiohttp_raw_server, +): + warnings.warn( + "Deprecated, use aiohttp_raw_server fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_raw_server + + +@pytest.fixture +def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]: + """Factory to create a TestClient instance. + + aiohttp_client(app, **kwargs) + aiohttp_client(server, **kwargs) + aiohttp_client(raw_server, **kwargs) + """ + clients = [] + + @overload + async def go( + __param: Application, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> TestClient[Request, Application]: ... + + @overload + async def go( + __param: BaseTestServer, + *, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> TestClient[BaseRequest, None]: ... + + async def go( + __param: Union[Application, BaseTestServer], + *args: Any, + server_kwargs: Optional[Dict[str, Any]] = None, + **kwargs: Any, + ) -> TestClient[Any, Any]: + if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] + __param, (Application, BaseTestServer) + ): + __param = __param(loop, *args, **kwargs) + kwargs = {} + else: + assert not args, "args should be empty" + + if isinstance(__param, Application): + server_kwargs = server_kwargs or {} + server = TestServer(__param, loop=loop, **server_kwargs) + client = TestClient(server, loop=loop, **kwargs) + elif isinstance(__param, BaseTestServer): + client = TestClient(__param, loop=loop, **kwargs) + else: + raise ValueError("Unknown argument type: %r" % type(__param)) + + await client.start_server() + clients.append(client) + return client + + yield go + + async def finalize() -> None: + while clients: + await clients.pop().close() + + loop.run_until_complete(finalize()) + + +@pytest.fixture +def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover + warnings.warn( + "Deprecated, use aiohttp_client fixture instead", + DeprecationWarning, + stacklevel=2, + ) + return aiohttp_client diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/streams.py b/deepseek/lib/python3.10/site-packages/aiohttp/streams.py new file mode 100644 index 0000000000000000000000000000000000000000..029d577b88cba2d64b445de0b7ddd33856f5079b --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/streams.py @@ -0,0 +1,723 @@ +import asyncio +import collections +import warnings +from typing import ( + Awaitable, + Callable, + Deque, + Final, + Generic, + List, + Optional, + Tuple, + TypeVar, +) + +from .base_protocol import BaseProtocol +from .helpers import ( + _EXC_SENTINEL, + BaseTimerContext, + TimerNoop, + set_exception, + set_result, +) +from .log import internal_logger + +__all__ = ( + "EMPTY_PAYLOAD", + "EofStream", + "StreamReader", + "DataQueue", +) + +_T = TypeVar("_T") + + +class EofStream(Exception): + """eof stream indication.""" + + +class AsyncStreamIterator(Generic[_T]): + + __slots__ = ("read_func",) + + def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None: + self.read_func = read_func + + def __aiter__(self) -> "AsyncStreamIterator[_T]": + return self + + async def __anext__(self) -> _T: + try: + rv = await self.read_func() + except EofStream: + raise StopAsyncIteration + if rv == b"": + raise StopAsyncIteration + return rv + + +class ChunkTupleAsyncStreamIterator: + + __slots__ = ("_stream",) + + def __init__(self, stream: "StreamReader") -> None: + self._stream = stream + + def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": + return self + + async def __anext__(self) -> Tuple[bytes, bool]: + rv = await self._stream.readchunk() + if rv == (b"", False): + raise StopAsyncIteration + return rv + + +class AsyncStreamReaderMixin: + + __slots__ = () + + def __aiter__(self) -> AsyncStreamIterator[bytes]: + return AsyncStreamIterator(self.readline) # type: ignore[attr-defined] + + def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: + """Returns an asynchronous iterator that yields chunks of size n.""" + return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined] + + def iter_any(self) -> AsyncStreamIterator[bytes]: + """Yield all available data as soon as it is received.""" + return AsyncStreamIterator(self.readany) # type: ignore[attr-defined] + + def iter_chunks(self) -> ChunkTupleAsyncStreamIterator: + """Yield chunks of data as they are received by the server. + + The yielded objects are tuples + of (bytes, bool) as returned by the StreamReader.readchunk method. + """ + return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type] + + +class StreamReader(AsyncStreamReaderMixin): + """An enhancement of asyncio.StreamReader. + + Supports asynchronous iteration by line, chunk or as available:: + + async for line in reader: + ... + async for chunk in reader.iter_chunked(1024): + ... + async for slice in reader.iter_any(): + ... + + """ + + __slots__ = ( + "_protocol", + "_low_water", + "_high_water", + "_loop", + "_size", + "_cursor", + "_http_chunk_splits", + "_buffer", + "_buffer_offset", + "_eof", + "_waiter", + "_eof_waiter", + "_exception", + "_timer", + "_eof_callbacks", + "_eof_counter", + "total_bytes", + ) + + def __init__( + self, + protocol: BaseProtocol, + limit: int, + *, + timer: Optional[BaseTimerContext] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + ) -> None: + self._protocol = protocol + self._low_water = limit + self._high_water = limit * 2 + if loop is None: + loop = asyncio.get_event_loop() + self._loop = loop + self._size = 0 + self._cursor = 0 + self._http_chunk_splits: Optional[List[int]] = None + self._buffer: Deque[bytes] = collections.deque() + self._buffer_offset = 0 + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._eof_waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._timer = TimerNoop() if timer is None else timer + self._eof_callbacks: List[Callable[[], None]] = [] + self._eof_counter = 0 + self.total_bytes = 0 + + def __repr__(self) -> str: + info = [self.__class__.__name__] + if self._size: + info.append("%d bytes" % self._size) + if self._eof: + info.append("eof") + if self._low_water != 2**16: # default limit + info.append("low=%d high=%d" % (self._low_water, self._high_water)) + if self._waiter: + info.append("w=%r" % self._waiter) + if self._exception: + info.append("e=%r" % self._exception) + return "<%s>" % " ".join(info) + + def get_read_buffer_limits(self) -> Tuple[int, int]: + return (self._low_water, self._high_water) + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + self._exception = exc + self._eof_callbacks.clear() + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_exception(waiter, exc, exc_cause) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_exception(waiter, exc, exc_cause) + + def on_eof(self, callback: Callable[[], None]) -> None: + if self._eof: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + else: + self._eof_callbacks.append(callback) + + def feed_eof(self) -> None: + self._eof = True + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + waiter = self._eof_waiter + if waiter is not None: + self._eof_waiter = None + set_result(waiter, None) + + for cb in self._eof_callbacks: + try: + cb() + except Exception: + internal_logger.exception("Exception in eof callback") + + self._eof_callbacks.clear() + + def is_eof(self) -> bool: + """Return True if 'feed_eof' was called.""" + return self._eof + + def at_eof(self) -> bool: + """Return True if the buffer is empty and 'feed_eof' was called.""" + return self._eof and not self._buffer + + async def wait_eof(self) -> None: + if self._eof: + return + + assert self._eof_waiter is None + self._eof_waiter = self._loop.create_future() + try: + await self._eof_waiter + finally: + self._eof_waiter = None + + def unread_data(self, data: bytes) -> None: + """rollback reading some data from stream, inserting it to buffer head.""" + warnings.warn( + "unread_data() is deprecated " + "and will be removed in future releases (#3260)", + DeprecationWarning, + stacklevel=2, + ) + if not data: + return + + if self._buffer_offset: + self._buffer[0] = self._buffer[0][self._buffer_offset :] + self._buffer_offset = 0 + self._size += len(data) + self._cursor -= len(data) + self._buffer.appendleft(data) + self._eof_counter = 0 + + # TODO: size is ignored, remove the param later + def feed_data(self, data: bytes, size: int = 0) -> None: + assert not self._eof, "feed_data after feed_eof" + + if not data: + return + + data_len = len(data) + self._size += data_len + self._buffer.append(data) + self.total_bytes += data_len + + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + if self._size > self._high_water and not self._protocol._reading_paused: + self._protocol.pause_reading() + + def begin_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + if self.total_bytes: + raise RuntimeError( + "Called begin_http_chunk_receiving when some data was already fed" + ) + self._http_chunk_splits = [] + + def end_http_chunk_receiving(self) -> None: + if self._http_chunk_splits is None: + raise RuntimeError( + "Called end_chunk_receiving without calling " + "begin_chunk_receiving first" + ) + + # self._http_chunk_splits contains logical byte offsets from start of + # the body transfer. Each offset is the offset of the end of a chunk. + # "Logical" means bytes, accessible for a user. + # If no chunks containing logical data were received, current position + # is difinitely zero. + pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0 + + if self.total_bytes == pos: + # We should not add empty chunks here. So we check for that. + # Note, when chunked + gzip is used, we can receive a chunk + # of compressed data, but that data may not be enough for gzip FSM + # to yield any uncompressed data. That's why current position may + # not change after receiving a chunk. + return + + self._http_chunk_splits.append(self.total_bytes) + + # wake up readchunk when end of http chunk received + waiter = self._waiter + if waiter is not None: + self._waiter = None + set_result(waiter, None) + + async def _wait(self, func_name: str) -> None: + if not self._protocol.connected: + raise RuntimeError("Connection closed.") + + # StreamReader uses a future to link the protocol feed_data() method + # to a read coroutine. Running two read coroutines at the same time + # would have an unexpected behaviour. It would not possible to know + # which coroutine would get the next data. + if self._waiter is not None: + raise RuntimeError( + "%s() called while another coroutine is " + "already waiting for incoming data" % func_name + ) + + waiter = self._waiter = self._loop.create_future() + try: + with self._timer: + await waiter + finally: + self._waiter = None + + async def readline(self) -> bytes: + return await self.readuntil() + + async def readuntil(self, separator: bytes = b"\n") -> bytes: + seplen = len(separator) + if seplen == 0: + raise ValueError("Separator should be at least one-byte string") + + if self._exception is not None: + raise self._exception + + chunk = b"" + chunk_size = 0 + not_enough = True + + while not_enough: + while self._buffer and not_enough: + offset = self._buffer_offset + ichar = self._buffer[0].find(separator, offset) + 1 + # Read from current offset to found separator or to the end. + data = self._read_nowait_chunk( + ichar - offset + seplen - 1 if ichar else -1 + ) + chunk += data + chunk_size += len(data) + if ichar: + not_enough = False + + if chunk_size > self._high_water: + raise ValueError("Chunk too big") + + if self._eof: + break + + if not_enough: + await self._wait("readuntil") + + return chunk + + async def read(self, n: int = -1) -> bytes: + if self._exception is not None: + raise self._exception + + # migration problem; with DataQueue you have to catch + # EofStream exception, so common way is to run payload.read() inside + # infinite loop. what can cause real infinite loop with StreamReader + # lets keep this code one major release. + if __debug__: + if self._eof and not self._buffer: + self._eof_counter = getattr(self, "_eof_counter", 0) + 1 + if self._eof_counter > 5: + internal_logger.warning( + "Multiple access to StreamReader in eof state, " + "might be infinite loop.", + stack_info=True, + ) + + if not n: + return b"" + + if n < 0: + # This used to just loop creating a new waiter hoping to + # collect everything in self._buffer, but that would + # deadlock if the subprocess sends more than self.limit + # bytes. So just call self.readany() until EOF. + blocks = [] + while True: + block = await self.readany() + if not block: + break + blocks.append(block) + return b"".join(blocks) + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("read") + + return self._read_nowait(n) + + async def readany(self) -> bytes: + if self._exception is not None: + raise self._exception + + # TODO: should be `if` instead of `while` + # because waiter maybe triggered on chunk end, + # without feeding any data + while not self._buffer and not self._eof: + await self._wait("readany") + + return self._read_nowait(-1) + + async def readchunk(self) -> Tuple[bytes, bool]: + """Returns a tuple of (data, end_of_http_chunk). + + When chunked transfer + encoding is used, end_of_http_chunk is a boolean indicating if the end + of the data corresponds to the end of a HTTP chunk , otherwise it is + always False. + """ + while True: + if self._exception is not None: + raise self._exception + + while self._http_chunk_splits: + pos = self._http_chunk_splits.pop(0) + if pos == self._cursor: + return (b"", True) + if pos > self._cursor: + return (self._read_nowait(pos - self._cursor), True) + internal_logger.warning( + "Skipping HTTP chunk end due to data " + "consumption beyond chunk boundary" + ) + + if self._buffer: + return (self._read_nowait_chunk(-1), False) + # return (self._read_nowait(-1), False) + + if self._eof: + # Special case for signifying EOF. + # (b'', True) is not a final return value actually. + return (b"", False) + + await self._wait("readchunk") + + async def readexactly(self, n: int) -> bytes: + if self._exception is not None: + raise self._exception + + blocks: List[bytes] = [] + while n > 0: + block = await self.read(n) + if not block: + partial = b"".join(blocks) + raise asyncio.IncompleteReadError(partial, len(partial) + n) + blocks.append(block) + n -= len(block) + + return b"".join(blocks) + + def read_nowait(self, n: int = -1) -> bytes: + # default was changed to be consistent with .read(-1) + # + # I believe the most users don't know about the method and + # they are not affected. + if self._exception is not None: + raise self._exception + + if self._waiter and not self._waiter.done(): + raise RuntimeError( + "Called while some coroutine is waiting for incoming data." + ) + + return self._read_nowait(n) + + def _read_nowait_chunk(self, n: int) -> bytes: + first_buffer = self._buffer[0] + offset = self._buffer_offset + if n != -1 and len(first_buffer) - offset > n: + data = first_buffer[offset : offset + n] + self._buffer_offset += n + + elif offset: + self._buffer.popleft() + data = first_buffer[offset:] + self._buffer_offset = 0 + + else: + data = self._buffer.popleft() + + data_len = len(data) + self._size -= data_len + self._cursor += data_len + + chunk_splits = self._http_chunk_splits + # Prevent memory leak: drop useless chunk splits + while chunk_splits and chunk_splits[0] < self._cursor: + chunk_splits.pop(0) + + if self._size < self._low_water and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + + def _read_nowait(self, n: int) -> bytes: + """Read not more than n bytes, or whole buffer if n == -1""" + self._timer.assert_timeout() + + chunks = [] + while self._buffer: + chunk = self._read_nowait_chunk(n) + chunks.append(chunk) + if n != -1: + n -= len(chunk) + if n == 0: + break + + return b"".join(chunks) if chunks else b"" + + +class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init] + + __slots__ = ("_read_eof_chunk",) + + def __init__(self) -> None: + self._read_eof_chunk = False + + def __repr__(self) -> str: + return "<%s>" % self.__class__.__name__ + + def exception(self) -> Optional[BaseException]: + return None + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + pass + + def on_eof(self, callback: Callable[[], None]) -> None: + try: + callback() + except Exception: + internal_logger.exception("Exception in eof callback") + + def feed_eof(self) -> None: + pass + + def is_eof(self) -> bool: + return True + + def at_eof(self) -> bool: + return True + + async def wait_eof(self) -> None: + return + + def feed_data(self, data: bytes, n: int = 0) -> None: + pass + + async def readline(self) -> bytes: + return b"" + + async def read(self, n: int = -1) -> bytes: + return b"" + + # TODO add async def readuntil + + async def readany(self) -> bytes: + return b"" + + async def readchunk(self) -> Tuple[bytes, bool]: + if not self._read_eof_chunk: + self._read_eof_chunk = True + return (b"", False) + + return (b"", True) + + async def readexactly(self, n: int) -> bytes: + raise asyncio.IncompleteReadError(b"", n) + + def read_nowait(self, n: int = -1) -> bytes: + return b"" + + +EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader() + + +class DataQueue(Generic[_T]): + """DataQueue is a general-purpose blocking queue with one reader.""" + + def __init__(self, loop: asyncio.AbstractEventLoop) -> None: + self._loop = loop + self._eof = False + self._waiter: Optional[asyncio.Future[None]] = None + self._exception: Optional[BaseException] = None + self._buffer: Deque[Tuple[_T, int]] = collections.deque() + + def __len__(self) -> int: + return len(self._buffer) + + def is_eof(self) -> bool: + return self._eof + + def at_eof(self) -> bool: + return self._eof and not self._buffer + + def exception(self) -> Optional[BaseException]: + return self._exception + + def set_exception( + self, + exc: BaseException, + exc_cause: BaseException = _EXC_SENTINEL, + ) -> None: + self._eof = True + self._exception = exc + if (waiter := self._waiter) is not None: + self._waiter = None + set_exception(waiter, exc, exc_cause) + + def feed_data(self, data: _T, size: int = 0) -> None: + self._buffer.append((data, size)) + if (waiter := self._waiter) is not None: + self._waiter = None + set_result(waiter, None) + + def feed_eof(self) -> None: + self._eof = True + if (waiter := self._waiter) is not None: + self._waiter = None + set_result(waiter, None) + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + if self._buffer: + data, _ = self._buffer.popleft() + return data + if self._exception is not None: + raise self._exception + raise EofStream + + def __aiter__(self) -> AsyncStreamIterator[_T]: + return AsyncStreamIterator(self.read) + + +class FlowControlDataQueue(DataQueue[_T]): + """FlowControlDataQueue resumes and pauses an underlying stream. + + It is a destination for parsed data. + + This class is deprecated and will be removed in version 4.0. + """ + + def __init__( + self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop + ) -> None: + super().__init__(loop=loop) + self._size = 0 + self._protocol = protocol + self._limit = limit * 2 + + def feed_data(self, data: _T, size: int = 0) -> None: + super().feed_data(data, size) + self._size += size + + if self._size > self._limit and not self._protocol._reading_paused: + self._protocol.pause_reading() + + async def read(self) -> _T: + if not self._buffer and not self._eof: + assert not self._waiter + self._waiter = self._loop.create_future() + try: + await self._waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._waiter = None + raise + if self._buffer: + data, size = self._buffer.popleft() + self._size -= size + if self._size < self._limit and self._protocol._reading_paused: + self._protocol.resume_reading() + return data + if self._exception is not None: + raise self._exception + raise EofStream diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/test_utils.py b/deepseek/lib/python3.10/site-packages/aiohttp/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..be6e9b3353eecdd4ad359bb1dc1fc378a4f9a236 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/test_utils.py @@ -0,0 +1,770 @@ +"""Utilities shared by tests.""" + +import asyncio +import contextlib +import gc +import inspect +import ipaddress +import os +import socket +import sys +import warnings +from abc import ABC, abstractmethod +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Generic, + Iterator, + List, + Optional, + Type, + TypeVar, + cast, + overload, +) +from unittest import IsolatedAsyncioTestCase, mock + +from aiosignal import Signal +from multidict import CIMultiDict, CIMultiDictProxy +from yarl import URL + +import aiohttp +from aiohttp.client import ( + _RequestContextManager, + _RequestOptions, + _WSRequestContextManager, +) + +from . import ClientSession, hdrs +from .abc import AbstractCookieJar +from .client_reqrep import ClientResponse +from .client_ws import ClientWebSocketResponse +from .helpers import sentinel +from .http import HttpVersion, RawRequestMessage +from .streams import EMPTY_PAYLOAD, StreamReader +from .typedefs import StrOrURL +from .web import ( + Application, + AppRunner, + BaseRequest, + BaseRunner, + Request, + Server, + ServerRunner, + SockSite, + UrlMappingMatchInfo, +) +from .web_protocol import _RequestHandler + +if TYPE_CHECKING: + from ssl import SSLContext +else: + SSLContext = None + +if sys.version_info >= (3, 11) and TYPE_CHECKING: + from typing import Unpack + +if sys.version_info >= (3, 11): + from typing import Self +else: + Self = Any + +_ApplicationNone = TypeVar("_ApplicationNone", Application, None) +_Request = TypeVar("_Request", bound=BaseRequest) + +REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin" + + +def get_unused_port_socket( + host: str, family: socket.AddressFamily = socket.AF_INET +) -> socket.socket: + return get_port_socket(host, 0, family) + + +def get_port_socket( + host: str, port: int, family: socket.AddressFamily +) -> socket.socket: + s = socket.socket(family, socket.SOCK_STREAM) + if REUSE_ADDRESS: + # Windows has different semantics for SO_REUSEADDR, + # so don't set it. Ref: + # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + s.bind((host, port)) + return s + + +def unused_port() -> int: + """Return a port that is unused on the current host.""" + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return cast(int, s.getsockname()[1]) + + +class BaseTestServer(ABC): + __test__ = False + + def __init__( + self, + *, + scheme: str = "", + loop: Optional[asyncio.AbstractEventLoop] = None, + host: str = "127.0.0.1", + port: Optional[int] = None, + skip_url_asserts: bool = False, + socket_factory: Callable[ + [str, int, socket.AddressFamily], socket.socket + ] = get_port_socket, + **kwargs: Any, + ) -> None: + self._loop = loop + self.runner: Optional[BaseRunner] = None + self._root: Optional[URL] = None + self.host = host + self.port = port + self._closed = False + self.scheme = scheme + self.skip_url_asserts = skip_url_asserts + self.socket_factory = socket_factory + + async def start_server( + self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any + ) -> None: + if self.runner: + return + self._loop = loop + self._ssl = kwargs.pop("ssl", None) + self.runner = await self._make_runner(handler_cancellation=True, **kwargs) + await self.runner.setup() + if not self.port: + self.port = 0 + absolute_host = self.host + try: + version = ipaddress.ip_address(self.host).version + except ValueError: + version = 4 + if version == 6: + absolute_host = f"[{self.host}]" + family = socket.AF_INET6 if version == 6 else socket.AF_INET + _sock = self.socket_factory(self.host, self.port, family) + self.host, self.port = _sock.getsockname()[:2] + site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl) + await site.start() + server = site._server + assert server is not None + sockets = server.sockets # type: ignore[attr-defined] + assert sockets is not None + self.port = sockets[0].getsockname()[1] + if not self.scheme: + self.scheme = "https" if self._ssl else "http" + self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}") + + @abstractmethod # pragma: no cover + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + pass + + def make_url(self, path: StrOrURL) -> URL: + assert self._root is not None + url = URL(path) + if not self.skip_url_asserts: + assert not url.absolute + return self._root.join(url) + else: + return URL(str(self._root) + str(path)) + + @property + def started(self) -> bool: + return self.runner is not None + + @property + def closed(self) -> bool: + return self._closed + + @property + def handler(self) -> Server: + # for backward compatibility + # web.Server instance + runner = self.runner + assert runner is not None + assert runner.server is not None + return runner.server + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run when the object is garbage collected, and on + exit when used as a context manager. + + """ + if self.started and not self.closed: + assert self.runner is not None + await self.runner.cleanup() + self._root = None + self.port = None + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> "BaseTestServer": + await self.start_server(loop=self._loop) + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> None: + await self.close() + + +class TestServer(BaseTestServer): + def __init__( + self, + app: Application, + *, + scheme: str = "", + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ): + self.app = app + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, **kwargs: Any) -> BaseRunner: + return AppRunner(self.app, **kwargs) + + +class RawTestServer(BaseTestServer): + def __init__( + self, + handler: _RequestHandler, + *, + scheme: str = "", + host: str = "127.0.0.1", + port: Optional[int] = None, + **kwargs: Any, + ) -> None: + self._handler = handler + super().__init__(scheme=scheme, host=host, port=port, **kwargs) + + async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner: + srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs) + return ServerRunner(srv, debug=debug, **kwargs) + + +class TestClient(Generic[_Request, _ApplicationNone]): + """ + A test client implementation. + + To write functional tests for aiohttp based servers. + + """ + + __test__ = False + + @overload + def __init__( + self: "TestClient[Request, Application]", + server: TestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + **kwargs: Any, + ) -> None: ... + @overload + def __init__( + self: "TestClient[_Request, None]", + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + **kwargs: Any, + ) -> None: ... + def __init__( + self, + server: BaseTestServer, + *, + cookie_jar: Optional[AbstractCookieJar] = None, + loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, + ) -> None: + if not isinstance(server, BaseTestServer): + raise TypeError( + "server must be TestServer instance, found type: %r" % type(server) + ) + self._server = server + self._loop = loop + if cookie_jar is None: + cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop) + self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) + self._session._retry_connection = False + self._closed = False + self._responses: List[ClientResponse] = [] + self._websockets: List[ClientWebSocketResponse] = [] + + async def start_server(self) -> None: + await self._server.start_server(loop=self._loop) + + @property + def host(self) -> str: + return self._server.host + + @property + def port(self) -> Optional[int]: + return self._server.port + + @property + def server(self) -> BaseTestServer: + return self._server + + @property + def app(self) -> _ApplicationNone: + return getattr(self._server, "app", None) # type: ignore[return-value] + + @property + def session(self) -> ClientSession: + """An internal aiohttp.ClientSession. + + Unlike the methods on the TestClient, client session requests + do not automatically include the host in the url queried, and + will require an absolute path to the resource. + + """ + return self._session + + def make_url(self, path: StrOrURL) -> URL: + return self._server.make_url(path) + + async def _request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> ClientResponse: + resp = await self._session.request(method, self.make_url(path), **kwargs) + # save it to close later + self._responses.append(resp) + return resp + + if sys.version_info >= (3, 11) and TYPE_CHECKING: + + def request( + self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions] + ) -> _RequestContextManager: ... + + def get( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def options( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def head( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def post( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def put( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def patch( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + def delete( + self, + path: StrOrURL, + **kwargs: Unpack[_RequestOptions], + ) -> _RequestContextManager: ... + + else: + + def request( + self, method: str, path: StrOrURL, **kwargs: Any + ) -> _RequestContextManager: + """Routes a request to tested http server. + + The interface is identical to aiohttp.ClientSession.request, + except the loop kwarg is overridden by the instance used by the + test server. + + """ + return _RequestContextManager(self._request(method, path, **kwargs)) + + def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP GET request.""" + return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs)) + + def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP POST request.""" + return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs)) + + def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP OPTIONS request.""" + return _RequestContextManager( + self._request(hdrs.METH_OPTIONS, path, **kwargs) + ) + + def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP HEAD request.""" + return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs)) + + def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PUT request.""" + return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs)) + + def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_PATCH, path, **kwargs) + ) + + def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager: + """Perform an HTTP PATCH request.""" + return _RequestContextManager( + self._request(hdrs.METH_DELETE, path, **kwargs) + ) + + def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager: + """Initiate websocket connection. + + The api corresponds to aiohttp.ClientSession.ws_connect. + + """ + return _WSRequestContextManager(self._ws_connect(path, **kwargs)) + + async def _ws_connect( + self, path: StrOrURL, **kwargs: Any + ) -> ClientWebSocketResponse: + ws = await self._session.ws_connect(self.make_url(path), **kwargs) + self._websockets.append(ws) + return ws + + async def close(self) -> None: + """Close all fixtures created by the test client. + + After that point, the TestClient is no longer usable. + + This is an idempotent function: running close multiple times + will not have any additional effects. + + close is also run on exit when used as a(n) (asynchronous) + context manager. + + """ + if not self._closed: + for resp in self._responses: + resp.close() + for ws in self._websockets: + await ws.close() + await self._session.close() + await self._server.close() + self._closed = True + + def __enter__(self) -> None: + raise TypeError("Use async with instead") + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + # __exit__ should exist in pair with __enter__ but never executed + pass # pragma: no cover + + async def __aenter__(self) -> Self: + await self.start_server() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc: Optional[BaseException], + tb: Optional[TracebackType], + ) -> None: + await self.close() + + +class AioHTTPTestCase(IsolatedAsyncioTestCase): + """A base class to allow for unittest web applications using aiohttp. + + Provides the following: + + * self.client (aiohttp.test_utils.TestClient): an aiohttp test client. + * self.loop (asyncio.BaseEventLoop): the event loop in which the + application and server are running. + * self.app (aiohttp.web.Application): the application returned by + self.get_application() + + Note that the TestClient's methods are asynchronous: you have to + execute function on the test client using asynchronous methods. + """ + + async def get_application(self) -> Application: + """Get application. + + This method should be overridden + to return the aiohttp.web.Application + object to test. + """ + return self.get_app() + + def get_app(self) -> Application: + """Obsolete method used to constructing web application. + + Use .get_application() coroutine instead. + """ + raise RuntimeError("Did you forget to define get_application()?") + + async def asyncSetUp(self) -> None: + self.loop = asyncio.get_running_loop() + return await self.setUpAsync() + + async def setUpAsync(self) -> None: + self.app = await self.get_application() + self.server = await self.get_server(self.app) + self.client = await self.get_client(self.server) + + await self.client.start_server() + + async def asyncTearDown(self) -> None: + return await self.tearDownAsync() + + async def tearDownAsync(self) -> None: + await self.client.close() + + async def get_server(self, app: Application) -> TestServer: + """Return a TestServer instance.""" + return TestServer(app, loop=self.loop) + + async def get_client(self, server: TestServer) -> TestClient[Request, Application]: + """Return a TestClient instance.""" + return TestClient(server, loop=self.loop) + + +def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any: + """ + A decorator dedicated to use with asynchronous AioHTTPTestCase test methods. + + In 3.8+, this does nothing. + """ + warnings.warn( + "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+", + DeprecationWarning, + stacklevel=2, + ) + return func + + +_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop] + + +@contextlib.contextmanager +def loop_context( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False +) -> Iterator[asyncio.AbstractEventLoop]: + """A contextmanager that creates an event_loop, for test purposes. + + Handles the creation and cleanup of a test loop. + """ + loop = setup_test_loop(loop_factory) + yield loop + teardown_test_loop(loop, fast=fast) + + +def setup_test_loop( + loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, +) -> asyncio.AbstractEventLoop: + """Create and return an asyncio.BaseEventLoop instance. + + The caller should also call teardown_test_loop, + once they are done with the loop. + """ + loop = loop_factory() + asyncio.set_event_loop(loop) + return loop + + +def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None: + """Teardown and cleanup an event_loop created by setup_test_loop.""" + closed = loop.is_closed() + if not closed: + loop.call_soon(loop.stop) + loop.run_forever() + loop.close() + + if not fast: + gc.collect() + + asyncio.set_event_loop(None) + + +def _create_app_mock() -> mock.MagicMock: + def get_dict(app: Any, key: str) -> Any: + return app.__app_dict[key] + + def set_dict(app: Any, key: str, value: Any) -> None: + app.__app_dict[key] = value + + app = mock.MagicMock(spec=Application) + app.__app_dict = {} + app.__getitem__ = get_dict + app.__setitem__ = set_dict + + app._debug = False + app.on_response_prepare = Signal(app) + app.on_response_prepare.freeze() + return app + + +def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: + transport = mock.Mock() + + def get_extra_info(key: str) -> Optional[SSLContext]: + if key == "sslcontext": + return sslcontext + else: + return None + + transport.get_extra_info.side_effect = get_extra_info + return transport + + +def make_mocked_request( + method: str, + path: str, + headers: Any = None, + *, + match_info: Any = sentinel, + version: HttpVersion = HttpVersion(1, 1), + closing: bool = False, + app: Any = None, + writer: Any = sentinel, + protocol: Any = sentinel, + transport: Any = sentinel, + payload: StreamReader = EMPTY_PAYLOAD, + sslcontext: Optional[SSLContext] = None, + client_max_size: int = 1024**2, + loop: Any = ..., +) -> Request: + """Creates mocked web.Request testing purposes. + + Useful in unit tests, when spinning full web server is overkill or + specific conditions and errors are hard to trigger. + """ + task = mock.Mock() + if loop is ...: + # no loop passed, try to get the current one if + # its is running as we need a real loop to create + # executor jobs to be able to do testing + # with a real executor + try: + loop = asyncio.get_running_loop() + except RuntimeError: + loop = mock.Mock() + loop.create_future.return_value = () + + if version < HttpVersion(1, 1): + closing = True + + if headers: + headers = CIMultiDictProxy(CIMultiDict(headers)) + raw_hdrs = tuple( + (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items() + ) + else: + headers = CIMultiDictProxy(CIMultiDict()) + raw_hdrs = () + + chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower() + + message = RawRequestMessage( + method, + path, + version, + headers, + raw_hdrs, + closing, + None, + False, + chunked, + URL(path), + ) + if app is None: + app = _create_app_mock() + + if transport is sentinel: + transport = _create_transport(sslcontext) + + if protocol is sentinel: + protocol = mock.Mock() + protocol.transport = transport + + if writer is sentinel: + writer = mock.Mock() + writer.write_headers = make_mocked_coro(None) + writer.write = make_mocked_coro(None) + writer.write_eof = make_mocked_coro(None) + writer.drain = make_mocked_coro(None) + writer.transport = transport + + protocol.transport = transport + protocol.writer = writer + + req = Request( + message, payload, protocol, writer, task, loop, client_max_size=client_max_size + ) + + match_info = UrlMappingMatchInfo( + {} if match_info is sentinel else match_info, mock.Mock() + ) + match_info.add_app(app) + req._match_info = match_info + + return req + + +def make_mocked_coro( + return_value: Any = sentinel, raise_exception: Any = sentinel +) -> Any: + """Creates a coroutine mock.""" + + async def mock_coro(*args: Any, **kwargs: Any) -> Any: + if raise_exception is not sentinel: + raise raise_exception + if not inspect.isawaitable(return_value): + return return_value + await return_value + + return mock.Mock(wraps=mock_coro) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/typedefs.py b/deepseek/lib/python3.10/site-packages/aiohttp/typedefs.py new file mode 100644 index 0000000000000000000000000000000000000000..cc8c0825b4e522f7d1b6cf0058564f322fb5a905 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/typedefs.py @@ -0,0 +1,69 @@ +import json +import os +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Iterable, + Mapping, + Protocol, + Tuple, + Union, +) + +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr +from yarl import URL, Query as _Query + +Query = _Query + +DEFAULT_JSON_ENCODER = json.dumps +DEFAULT_JSON_DECODER = json.loads + +if TYPE_CHECKING: + _CIMultiDict = CIMultiDict[str] + _CIMultiDictProxy = CIMultiDictProxy[str] + _MultiDict = MultiDict[str] + _MultiDictProxy = MultiDictProxy[str] + from http.cookies import BaseCookie, Morsel + + from .web import Request, StreamResponse +else: + _CIMultiDict = CIMultiDict + _CIMultiDictProxy = CIMultiDictProxy + _MultiDict = MultiDict + _MultiDictProxy = MultiDictProxy + +Byteish = Union[bytes, bytearray, memoryview] +JSONEncoder = Callable[[Any], str] +JSONDecoder = Callable[[str], Any] +LooseHeaders = Union[ + Mapping[str, str], + Mapping[istr, str], + _CIMultiDict, + _CIMultiDictProxy, + Iterable[Tuple[Union[str, istr], str]], +] +RawHeaders = Tuple[Tuple[bytes, bytes], ...] +StrOrURL = Union[str, URL] + +LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +LooseCookiesIterables = Iterable[ + Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] +] +LooseCookies = Union[ + LooseCookiesMappings, + LooseCookiesIterables, + "BaseCookie[str]", +] + +Handler = Callable[["Request"], Awaitable["StreamResponse"]] + + +class Middleware(Protocol): + def __call__( + self, request: "Request", handler: Handler + ) -> Awaitable["StreamResponse"]: ... + + +PathLike = Union[str, "os.PathLike[str]"] diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web.py b/deepseek/lib/python3.10/site-packages/aiohttp/web.py new file mode 100644 index 0000000000000000000000000000000000000000..f975b66533174ffe6b947d78412f638de5359561 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web.py @@ -0,0 +1,601 @@ +import asyncio +import logging +import os +import socket +import sys +import warnings +from argparse import ArgumentParser +from collections.abc import Iterable +from contextlib import suppress +from importlib import import_module +from typing import ( + Any, + Awaitable, + Callable, + Iterable as TypingIterable, + List, + Optional, + Set, + Type, + Union, + cast, +) + +from .abc import AbstractAccessLogger +from .helpers import AppKey as AppKey +from .log import access_logger +from .typedefs import PathLike +from .web_app import Application as Application, CleanupError as CleanupError +from .web_exceptions import ( + HTTPAccepted as HTTPAccepted, + HTTPBadGateway as HTTPBadGateway, + HTTPBadRequest as HTTPBadRequest, + HTTPClientError as HTTPClientError, + HTTPConflict as HTTPConflict, + HTTPCreated as HTTPCreated, + HTTPError as HTTPError, + HTTPException as HTTPException, + HTTPExpectationFailed as HTTPExpectationFailed, + HTTPFailedDependency as HTTPFailedDependency, + HTTPForbidden as HTTPForbidden, + HTTPFound as HTTPFound, + HTTPGatewayTimeout as HTTPGatewayTimeout, + HTTPGone as HTTPGone, + HTTPInsufficientStorage as HTTPInsufficientStorage, + HTTPInternalServerError as HTTPInternalServerError, + HTTPLengthRequired as HTTPLengthRequired, + HTTPMethodNotAllowed as HTTPMethodNotAllowed, + HTTPMisdirectedRequest as HTTPMisdirectedRequest, + HTTPMove as HTTPMove, + HTTPMovedPermanently as HTTPMovedPermanently, + HTTPMultipleChoices as HTTPMultipleChoices, + HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired, + HTTPNoContent as HTTPNoContent, + HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation, + HTTPNotAcceptable as HTTPNotAcceptable, + HTTPNotExtended as HTTPNotExtended, + HTTPNotFound as HTTPNotFound, + HTTPNotImplemented as HTTPNotImplemented, + HTTPNotModified as HTTPNotModified, + HTTPOk as HTTPOk, + HTTPPartialContent as HTTPPartialContent, + HTTPPaymentRequired as HTTPPaymentRequired, + HTTPPermanentRedirect as HTTPPermanentRedirect, + HTTPPreconditionFailed as HTTPPreconditionFailed, + HTTPPreconditionRequired as HTTPPreconditionRequired, + HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired, + HTTPRedirection as HTTPRedirection, + HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge, + HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge, + HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable, + HTTPRequestTimeout as HTTPRequestTimeout, + HTTPRequestURITooLong as HTTPRequestURITooLong, + HTTPResetContent as HTTPResetContent, + HTTPSeeOther as HTTPSeeOther, + HTTPServerError as HTTPServerError, + HTTPServiceUnavailable as HTTPServiceUnavailable, + HTTPSuccessful as HTTPSuccessful, + HTTPTemporaryRedirect as HTTPTemporaryRedirect, + HTTPTooManyRequests as HTTPTooManyRequests, + HTTPUnauthorized as HTTPUnauthorized, + HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons, + HTTPUnprocessableEntity as HTTPUnprocessableEntity, + HTTPUnsupportedMediaType as HTTPUnsupportedMediaType, + HTTPUpgradeRequired as HTTPUpgradeRequired, + HTTPUseProxy as HTTPUseProxy, + HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates, + HTTPVersionNotSupported as HTTPVersionNotSupported, + NotAppKeyWarning as NotAppKeyWarning, +) +from .web_fileresponse import FileResponse as FileResponse +from .web_log import AccessLogger +from .web_middlewares import ( + middleware as middleware, + normalize_path_middleware as normalize_path_middleware, +) +from .web_protocol import ( + PayloadAccessError as PayloadAccessError, + RequestHandler as RequestHandler, + RequestPayloadError as RequestPayloadError, +) +from .web_request import ( + BaseRequest as BaseRequest, + FileField as FileField, + Request as Request, +) +from .web_response import ( + ContentCoding as ContentCoding, + Response as Response, + StreamResponse as StreamResponse, + json_response as json_response, +) +from .web_routedef import ( + AbstractRouteDef as AbstractRouteDef, + RouteDef as RouteDef, + RouteTableDef as RouteTableDef, + StaticDef as StaticDef, + delete as delete, + get as get, + head as head, + options as options, + patch as patch, + post as post, + put as put, + route as route, + static as static, + view as view, +) +from .web_runner import ( + AppRunner as AppRunner, + BaseRunner as BaseRunner, + BaseSite as BaseSite, + GracefulExit as GracefulExit, + NamedPipeSite as NamedPipeSite, + ServerRunner as ServerRunner, + SockSite as SockSite, + TCPSite as TCPSite, + UnixSite as UnixSite, +) +from .web_server import Server as Server +from .web_urldispatcher import ( + AbstractResource as AbstractResource, + AbstractRoute as AbstractRoute, + DynamicResource as DynamicResource, + PlainResource as PlainResource, + PrefixedSubAppResource as PrefixedSubAppResource, + Resource as Resource, + ResourceRoute as ResourceRoute, + StaticResource as StaticResource, + UrlDispatcher as UrlDispatcher, + UrlMappingMatchInfo as UrlMappingMatchInfo, + View as View, +) +from .web_ws import ( + WebSocketReady as WebSocketReady, + WebSocketResponse as WebSocketResponse, + WSMsgType as WSMsgType, +) + +__all__ = ( + # web_app + "AppKey", + "Application", + "CleanupError", + # web_exceptions + "NotAppKeyWarning", + "HTTPAccepted", + "HTTPBadGateway", + "HTTPBadRequest", + "HTTPClientError", + "HTTPConflict", + "HTTPCreated", + "HTTPError", + "HTTPException", + "HTTPExpectationFailed", + "HTTPFailedDependency", + "HTTPForbidden", + "HTTPFound", + "HTTPGatewayTimeout", + "HTTPGone", + "HTTPInsufficientStorage", + "HTTPInternalServerError", + "HTTPLengthRequired", + "HTTPMethodNotAllowed", + "HTTPMisdirectedRequest", + "HTTPMove", + "HTTPMovedPermanently", + "HTTPMultipleChoices", + "HTTPNetworkAuthenticationRequired", + "HTTPNoContent", + "HTTPNonAuthoritativeInformation", + "HTTPNotAcceptable", + "HTTPNotExtended", + "HTTPNotFound", + "HTTPNotImplemented", + "HTTPNotModified", + "HTTPOk", + "HTTPPartialContent", + "HTTPPaymentRequired", + "HTTPPermanentRedirect", + "HTTPPreconditionFailed", + "HTTPPreconditionRequired", + "HTTPProxyAuthenticationRequired", + "HTTPRedirection", + "HTTPRequestEntityTooLarge", + "HTTPRequestHeaderFieldsTooLarge", + "HTTPRequestRangeNotSatisfiable", + "HTTPRequestTimeout", + "HTTPRequestURITooLong", + "HTTPResetContent", + "HTTPSeeOther", + "HTTPServerError", + "HTTPServiceUnavailable", + "HTTPSuccessful", + "HTTPTemporaryRedirect", + "HTTPTooManyRequests", + "HTTPUnauthorized", + "HTTPUnavailableForLegalReasons", + "HTTPUnprocessableEntity", + "HTTPUnsupportedMediaType", + "HTTPUpgradeRequired", + "HTTPUseProxy", + "HTTPVariantAlsoNegotiates", + "HTTPVersionNotSupported", + # web_fileresponse + "FileResponse", + # web_middlewares + "middleware", + "normalize_path_middleware", + # web_protocol + "PayloadAccessError", + "RequestHandler", + "RequestPayloadError", + # web_request + "BaseRequest", + "FileField", + "Request", + # web_response + "ContentCoding", + "Response", + "StreamResponse", + "json_response", + # web_routedef + "AbstractRouteDef", + "RouteDef", + "RouteTableDef", + "StaticDef", + "delete", + "get", + "head", + "options", + "patch", + "post", + "put", + "route", + "static", + "view", + # web_runner + "AppRunner", + "BaseRunner", + "BaseSite", + "GracefulExit", + "ServerRunner", + "SockSite", + "TCPSite", + "UnixSite", + "NamedPipeSite", + # web_server + "Server", + # web_urldispatcher + "AbstractResource", + "AbstractRoute", + "DynamicResource", + "PlainResource", + "PrefixedSubAppResource", + "Resource", + "ResourceRoute", + "StaticResource", + "UrlDispatcher", + "UrlMappingMatchInfo", + "View", + # web_ws + "WebSocketReady", + "WebSocketResponse", + "WSMsgType", + # web + "run_app", +) + + +try: + from ssl import SSLContext +except ImportError: # pragma: no cover + SSLContext = Any # type: ignore[misc,assignment] + +# Only display warning when using -Wdefault, -We, -X dev or similar. +warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True) + +HostSequence = TypingIterable[str] + + +async def _run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Union[PathLike, TypingIterable[PathLike], None] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Optional[Callable[..., None]] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + handler_cancellation: bool = False, +) -> None: + # An internal function to actually do all dirty job for application running + if asyncio.iscoroutine(app): + app = await app + + app = cast(Application, app) + + runner = AppRunner( + app, + handle_signals=handle_signals, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + keepalive_timeout=keepalive_timeout, + shutdown_timeout=shutdown_timeout, + handler_cancellation=handler_cancellation, + ) + + await runner.setup() + + sites: List[BaseSite] = [] + + try: + if host is not None: + if isinstance(host, (str, bytes, bytearray, memoryview)): + sites.append( + TCPSite( + runner, + host, + port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + else: + for h in host: + sites.append( + TCPSite( + runner, + h, + port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + elif path is None and sock is None or port is not None: + sites.append( + TCPSite( + runner, + port=port, + ssl_context=ssl_context, + backlog=backlog, + reuse_address=reuse_address, + reuse_port=reuse_port, + ) + ) + + if path is not None: + if isinstance(path, (str, os.PathLike)): + sites.append( + UnixSite( + runner, + path, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for p in path: + sites.append( + UnixSite( + runner, + p, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + + if sock is not None: + if not isinstance(sock, Iterable): + sites.append( + SockSite( + runner, + sock, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + else: + for s in sock: + sites.append( + SockSite( + runner, + s, + ssl_context=ssl_context, + backlog=backlog, + ) + ) + for site in sites: + await site.start() + + if print: # pragma: no branch + names = sorted(str(s.name) for s in runner.sites) + print( + "======== Running on {} ========\n" + "(Press CTRL+C to quit)".format(", ".join(names)) + ) + + # sleep forever by 1 hour intervals, + while True: + await asyncio.sleep(3600) + finally: + await runner.cleanup() + + +def _cancel_tasks( + to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop +) -> None: + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + +def run_app( + app: Union[Application, Awaitable[Application]], + *, + host: Optional[Union[str, HostSequence]] = None, + port: Optional[int] = None, + path: Union[PathLike, TypingIterable[PathLike], None] = None, + sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + shutdown_timeout: float = 60.0, + keepalive_timeout: float = 75.0, + ssl_context: Optional[SSLContext] = None, + print: Optional[Callable[..., None]] = print, + backlog: int = 128, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_format: str = AccessLogger.LOG_FORMAT, + access_log: Optional[logging.Logger] = access_logger, + handle_signals: bool = True, + reuse_address: Optional[bool] = None, + reuse_port: Optional[bool] = None, + handler_cancellation: bool = False, + loop: Optional[asyncio.AbstractEventLoop] = None, +) -> None: + """Run an app locally""" + if loop is None: + loop = asyncio.new_event_loop() + + # Configure if and only if in debugging mode and using the default logger + if loop.get_debug() and access_log and access_log.name == "aiohttp.access": + if access_log.level == logging.NOTSET: + access_log.setLevel(logging.DEBUG) + if not access_log.hasHandlers(): + access_log.addHandler(logging.StreamHandler()) + + main_task = loop.create_task( + _run_app( + app, + host=host, + port=port, + path=path, + sock=sock, + shutdown_timeout=shutdown_timeout, + keepalive_timeout=keepalive_timeout, + ssl_context=ssl_context, + print=print, + backlog=backlog, + access_log_class=access_log_class, + access_log_format=access_log_format, + access_log=access_log, + handle_signals=handle_signals, + reuse_address=reuse_address, + reuse_port=reuse_port, + handler_cancellation=handler_cancellation, + ) + ) + + try: + asyncio.set_event_loop(loop) + loop.run_until_complete(main_task) + except (GracefulExit, KeyboardInterrupt): # pragma: no cover + pass + finally: + try: + main_task.cancel() + with suppress(asyncio.CancelledError): + loop.run_until_complete(main_task) + finally: + _cancel_tasks(asyncio.all_tasks(loop), loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + loop.close() + + +def main(argv: List[str]) -> None: + arg_parser = ArgumentParser( + description="aiohttp.web Application server", prog="aiohttp.web" + ) + arg_parser.add_argument( + "entry_func", + help=( + "Callable returning the `aiohttp.web.Application` instance to " + "run. Should be specified in the 'module:function' syntax." + ), + metavar="entry-func", + ) + arg_parser.add_argument( + "-H", + "--hostname", + help="TCP/IP hostname to serve on (default: localhost)", + default=None, + ) + arg_parser.add_argument( + "-P", + "--port", + help="TCP/IP port to serve on (default: %(default)r)", + type=int, + default=8080, + ) + arg_parser.add_argument( + "-U", + "--path", + help="Unix file system path to serve on. Can be combined with hostname " + "to serve on both Unix and TCP.", + ) + args, extra_argv = arg_parser.parse_known_args(argv) + + # Import logic + mod_str, _, func_str = args.entry_func.partition(":") + if not func_str or not mod_str: + arg_parser.error("'entry-func' not in 'module:function' syntax") + if mod_str.startswith("."): + arg_parser.error("relative module names not supported") + try: + module = import_module(mod_str) + except ImportError as ex: + arg_parser.error(f"unable to import {mod_str}: {ex}") + try: + func = getattr(module, func_str) + except AttributeError: + arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}") + + # Compatibility logic + if args.path is not None and not hasattr(socket, "AF_UNIX"): + arg_parser.error( + "file system paths not supported by your operating environment" + ) + + logging.basicConfig(level=logging.DEBUG) + + if args.path and args.hostname is None: + host = port = None + else: + host = args.hostname or "localhost" + port = args.port + + app = func(extra_argv) + run_app(app, host=host, port=port, path=args.path) + arg_parser.exit(message="Stopped\n") + + +if __name__ == "__main__": # pragma: no branch + main(sys.argv[1:]) # pragma: no cover diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web_fileresponse.py b/deepseek/lib/python3.10/site-packages/aiohttp/web_fileresponse.py new file mode 100644 index 0000000000000000000000000000000000000000..be9cf87e0691b8bd2e69da4e79cc9467e85e0062 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web_fileresponse.py @@ -0,0 +1,418 @@ +import asyncio +import io +import os +import pathlib +import sys +from contextlib import suppress +from enum import Enum, auto +from mimetypes import MimeTypes +from stat import S_ISREG +from types import MappingProxyType +from typing import ( # noqa + IO, + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Final, + Iterator, + List, + Optional, + Set, + Tuple, + Union, + cast, +) + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ETAG_ANY, ETag, must_be_empty_body +from .typedefs import LooseHeaders, PathLike +from .web_exceptions import ( + HTTPForbidden, + HTTPNotFound, + HTTPNotModified, + HTTPPartialContent, + HTTPPreconditionFailed, + HTTPRequestRangeNotSatisfiable, +) +from .web_response import StreamResponse + +__all__ = ("FileResponse",) + +if TYPE_CHECKING: + from .web_request import BaseRequest + + +_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]] + + +NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE")) + +CONTENT_TYPES: Final[MimeTypes] = MimeTypes() + +# File extension to IANA encodings map that will be checked in the order defined. +ENCODING_EXTENSIONS = MappingProxyType( + {ext: CONTENT_TYPES.encodings_map[ext] for ext in (".br", ".gz")} +) + +FALLBACK_CONTENT_TYPE = "application/octet-stream" + +# Provide additional MIME type/extension pairs to be recognized. +# https://en.wikipedia.org/wiki/List_of_archive_formats#Compression_only +ADDITIONAL_CONTENT_TYPES = MappingProxyType( + { + "application/gzip": ".gz", + "application/x-brotli": ".br", + "application/x-bzip2": ".bz2", + "application/x-compress": ".Z", + "application/x-xz": ".xz", + } +) + + +class _FileResponseResult(Enum): + """The result of the file response.""" + + SEND_FILE = auto() # Ie a regular file to send + NOT_ACCEPTABLE = auto() # Ie a socket, or non-regular file + PRE_CONDITION_FAILED = auto() # Ie If-Match or If-None-Match failed + NOT_MODIFIED = auto() # 304 Not Modified + + +# Add custom pairs and clear the encodings map so guess_type ignores them. +CONTENT_TYPES.encodings_map.clear() +for content_type, extension in ADDITIONAL_CONTENT_TYPES.items(): + CONTENT_TYPES.add_type(content_type, extension) # type: ignore[attr-defined] + + +_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() + + +class FileResponse(StreamResponse): + """A response object can be used to send files.""" + + def __init__( + self, + path: PathLike, + chunk_size: int = 256 * 1024, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + ) -> None: + super().__init__(status=status, reason=reason, headers=headers) + + self._path = pathlib.Path(path) + self._chunk_size = chunk_size + + def _seek_and_read(self, fobj: IO[Any], offset: int, chunk_size: int) -> bytes: + fobj.seek(offset) + return fobj.read(chunk_size) # type: ignore[no-any-return] + + async def _sendfile_fallback( + self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + # To keep memory usage low,fobj is transferred in chunks + # controlled by the constructor's chunk_size argument. + + chunk_size = self._chunk_size + loop = asyncio.get_event_loop() + chunk = await loop.run_in_executor( + None, self._seek_and_read, fobj, offset, chunk_size + ) + while chunk: + await writer.write(chunk) + count = count - chunk_size + if count <= 0: + break + chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count)) + + await writer.drain() + return writer + + async def _sendfile( + self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int + ) -> AbstractStreamWriter: + writer = await super().prepare(request) + assert writer is not None + + if NOSENDFILE or self.compression: + return await self._sendfile_fallback(writer, fobj, offset, count) + + loop = request._loop + transport = request.transport + assert transport is not None + + try: + await loop.sendfile(transport, fobj, offset, count) + except NotImplementedError: + return await self._sendfile_fallback(writer, fobj, offset, count) + + await super().write_eof() + return writer + + @staticmethod + def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool: + if len(etags) == 1 and etags[0].value == ETAG_ANY: + return True + return any( + etag.value == etag_value for etag in etags if weak or not etag.is_weak + ) + + async def _not_modified( + self, request: "BaseRequest", etag_value: str, last_modified: float + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPNotModified.status_code) + self._length_check = False + self.etag = etag_value # type: ignore[assignment] + self.last_modified = last_modified # type: ignore[assignment] + # Delete any Content-Length headers provided by user. HTTP 304 + # should always have empty response body + return await super().prepare(request) + + async def _precondition_failed( + self, request: "BaseRequest" + ) -> Optional[AbstractStreamWriter]: + self.set_status(HTTPPreconditionFailed.status_code) + self.content_length = 0 + return await super().prepare(request) + + def _make_response( + self, request: "BaseRequest", accept_encoding: str + ) -> Tuple[ + _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str] + ]: + """Return the response result, io object, stat result, and encoding. + + If an uncompressed file is returned, the encoding is set to + :py:data:`None`. + + This method should be called from a thread executor + since it calls os.stat which may block. + """ + file_path, st, file_encoding = self._get_file_path_stat_encoding( + accept_encoding + ) + if not file_path: + return _FileResponseResult.NOT_ACCEPTABLE, None, st, None + + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.1-2 + if (ifmatch := request.if_match) is not None and not self._etag_match( + etag_value, ifmatch, weak=False + ): + return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding + + if ( + (unmodsince := request.if_unmodified_since) is not None + and ifmatch is None + and st.st_mtime > unmodsince.timestamp() + ): + return _FileResponseResult.PRE_CONDITION_FAILED, None, st, file_encoding + + # https://www.rfc-editor.org/rfc/rfc9110#section-13.1.2-2 + if (ifnonematch := request.if_none_match) is not None and self._etag_match( + etag_value, ifnonematch, weak=True + ): + return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding + + if ( + (modsince := request.if_modified_since) is not None + and ifnonematch is None + and st.st_mtime <= modsince.timestamp() + ): + return _FileResponseResult.NOT_MODIFIED, None, st, file_encoding + + fobj = file_path.open("rb") + with suppress(OSError): + # fstat() may not be available on all platforms + # Once we open the file, we want the fstat() to ensure + # the file has not changed between the first stat() + # and the open(). + st = os.stat(fobj.fileno()) + return _FileResponseResult.SEND_FILE, fobj, st, file_encoding + + def _get_file_path_stat_encoding( + self, accept_encoding: str + ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]: + file_path = self._path + for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): + if file_encoding not in accept_encoding: + continue + + compressed_path = file_path.with_suffix(file_path.suffix + file_extension) + with suppress(OSError): + # Do not follow symlinks and ignore any non-regular files. + st = compressed_path.lstat() + if S_ISREG(st.st_mode): + return compressed_path, st, file_encoding + + # Fallback to the uncompressed file + st = file_path.stat() + return file_path if S_ISREG(st.st_mode) else None, st, None + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + loop = asyncio.get_running_loop() + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + try: + response_result, fobj, st, file_encoding = await loop.run_in_executor( + None, self._make_response, request, accept_encoding + ) + except PermissionError: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + except OSError: + # Most likely to be FileNotFoundError or OSError for circular + # symlinks in python >= 3.13, so respond with 404. + self.set_status(HTTPNotFound.status_code) + return await super().prepare(request) + + # Forbid special files like sockets, pipes, devices, etc. + if response_result is _FileResponseResult.NOT_ACCEPTABLE: + self.set_status(HTTPForbidden.status_code) + return await super().prepare(request) + + if response_result is _FileResponseResult.PRE_CONDITION_FAILED: + return await self._precondition_failed(request) + + if response_result is _FileResponseResult.NOT_MODIFIED: + etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}" + last_modified = st.st_mtime + return await self._not_modified(request, etag_value, last_modified) + + assert fobj is not None + try: + return await self._prepare_open_file(request, fobj, st, file_encoding) + finally: + # We do not await here because we do not want to wait + # for the executor to finish before returning the response + # so the connection can begin servicing another request + # as soon as possible. + close_future = loop.run_in_executor(None, fobj.close) + # Hold a strong reference to the future to prevent it from being + # garbage collected before it completes. + _CLOSE_FUTURES.add(close_future) + close_future.add_done_callback(_CLOSE_FUTURES.remove) + + async def _prepare_open_file( + self, + request: "BaseRequest", + fobj: io.BufferedReader, + st: os.stat_result, + file_encoding: Optional[str], + ) -> Optional[AbstractStreamWriter]: + status = self._status + file_size: int = st.st_size + file_mtime: float = st.st_mtime + count: int = file_size + start: Optional[int] = None + + if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp(): + # If-Range header check: + # condition = cached date >= last modification date + # return 206 if True else 200. + # if False: + # Range header would not be processed, return 200 + # if True but Range header missing + # return 200 + try: + rng = request.http_range + start = rng.start + end: Optional[int] = rng.stop + except ValueError: + # https://tools.ietf.org/html/rfc7233: + # A server generating a 416 (Range Not Satisfiable) response to + # a byte-range request SHOULD send a Content-Range header field + # with an unsatisfied-range value. + # The complete-length in a 416 response indicates the current + # length of the selected representation. + # + # Will do the same below. Many servers ignore this and do not + # send a Content-Range header with HTTP 416 + self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + # If a range request has been made, convert start, end slice + # notation into file pointer offset and count + if start is not None: + if start < 0 and end is None: # return tail of file + start += file_size + if start < 0: + # if Range:bytes=-1000 in request header but file size + # is only 200, there would be trouble without this + start = 0 + count = file_size - start + else: + # rfc7233:If the last-byte-pos value is + # absent, or if the value is greater than or equal to + # the current length of the representation data, + # the byte range is interpreted as the remainder + # of the representation (i.e., the server replaces the + # value of last-byte-pos with a value that is one less than + # the current length of the selected representation). + count = ( + min(end if end is not None else file_size, file_size) - start + ) + + if start >= file_size: + # HTTP 416 should be returned in this case. + # + # According to https://tools.ietf.org/html/rfc7233: + # If a valid byte-range-set includes at least one + # byte-range-spec with a first-byte-pos that is less than + # the current length of the representation, or at least one + # suffix-byte-range-spec with a non-zero suffix-length, + # then the byte-range-set is satisfiable. Otherwise, the + # byte-range-set is unsatisfiable. + self._headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}" + self.set_status(HTTPRequestRangeNotSatisfiable.status_code) + return await super().prepare(request) + + status = HTTPPartialContent.status_code + # Even though you are sending the whole file, you should still + # return a HTTP 206 for a Range request. + self.set_status(status) + + # If the Content-Type header is not already set, guess it based on the + # extension of the request path. The encoding returned by guess_type + # can be ignored since the map was cleared above. + if hdrs.CONTENT_TYPE not in self._headers: + if sys.version_info >= (3, 13): + guesser = CONTENT_TYPES.guess_file_type + else: + guesser = CONTENT_TYPES.guess_type + self.content_type = guesser(self._path)[0] or FALLBACK_CONTENT_TYPE + + if file_encoding: + self._headers[hdrs.CONTENT_ENCODING] = file_encoding + self._headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING + # Disable compression if we are already sending + # a compressed file since we don't want to double + # compress. + self._compression = False + + self.etag = f"{st.st_mtime_ns:x}-{st.st_size:x}" # type: ignore[assignment] + self.last_modified = file_mtime # type: ignore[assignment] + self.content_length = count + + self._headers[hdrs.ACCEPT_RANGES] = "bytes" + + if status == HTTPPartialContent.status_code: + real_start = start + assert real_start is not None + self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( + real_start, real_start + count - 1, file_size + ) + + # If we are sending 0 bytes calling sendfile() will throw a ValueError + if count == 0 or must_be_empty_body(request.method, status): + return await super().prepare(request) + + # be aware that start could be None or int=0 here. + offset = start or 0 + + return await self._sendfile(request, fobj, offset, count) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web_middlewares.py b/deepseek/lib/python3.10/site-packages/aiohttp/web_middlewares.py new file mode 100644 index 0000000000000000000000000000000000000000..2f1f5f58e6e38845d4d2d4ffdd2748fc519fa5bf --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web_middlewares.py @@ -0,0 +1,121 @@ +import re +from typing import TYPE_CHECKING, Tuple, Type, TypeVar + +from .typedefs import Handler, Middleware +from .web_exceptions import HTTPMove, HTTPPermanentRedirect +from .web_request import Request +from .web_response import StreamResponse +from .web_urldispatcher import SystemRoute + +__all__ = ( + "middleware", + "normalize_path_middleware", +) + +if TYPE_CHECKING: + from .web_app import Application + +_Func = TypeVar("_Func") + + +async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: + alt_request = request.clone(rel_url=path) + + match_info = await request.app.router.resolve(alt_request) + alt_request._match_info = match_info + + if match_info.http_exception is None: + return True, alt_request + + return False, request + + +def middleware(f: _Func) -> _Func: + f.__middleware_version__ = 1 # type: ignore[attr-defined] + return f + + +def normalize_path_middleware( + *, + append_slash: bool = True, + remove_slash: bool = False, + merge_slashes: bool = True, + redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, +) -> Middleware: + """Factory for producing a middleware that normalizes the path of a request. + + Normalizing means: + - Add or remove a trailing slash to the path. + - Double slashes are replaced by one. + + The middleware returns as soon as it finds a path that resolves + correctly. The order if both merge and append/remove are enabled is + 1) merge slashes + 2) append/remove slash + 3) both merge slashes and append/remove slash. + If the path resolves with at least one of those conditions, it will + redirect to the new path. + + Only one of `append_slash` and `remove_slash` can be enabled. If both + are `True` the factory will raise an assertion error + + If `append_slash` is `True` the middleware will append a slash when + needed. If a resource is defined with trailing slash and the request + comes without it, it will append it automatically. + + If `remove_slash` is `True`, `append_slash` must be `False`. When enabled + the middleware will remove trailing slashes and redirect if the resource + is defined + + If merge_slashes is True, merge multiple consecutive slashes in the + path into one. + """ + correct_configuration = not (append_slash and remove_slash) + assert correct_configuration, "Cannot both remove and append slash" + + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + if isinstance(request.match_info.route, SystemRoute): + paths_to_check = [] + if "?" in request.raw_path: + path, query = request.raw_path.split("?", 1) + query = "?" + query + else: + query = "" + path = request.raw_path + + if merge_slashes: + paths_to_check.append(re.sub("//+", "/", path)) + if append_slash and not request.path.endswith("/"): + paths_to_check.append(path + "/") + if remove_slash and request.path.endswith("/"): + paths_to_check.append(path[:-1]) + if merge_slashes and append_slash: + paths_to_check.append(re.sub("//+", "/", path + "/")) + if merge_slashes and remove_slash: + merged_slashes = re.sub("//+", "/", path) + paths_to_check.append(merged_slashes[:-1]) + + for path in paths_to_check: + path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg + resolves, request = await _check_request_resolves(request, path) + if resolves: + raise redirect_class(request.raw_path + query) + + return await handler(request) + + return impl + + +def _fix_request_current_app(app: "Application") -> Middleware: + @middleware + async def impl(request: Request, handler: Handler) -> StreamResponse: + match_info = request.match_info + prev = match_info.current_app + match_info.current_app = app + try: + return await handler(request) + finally: + match_info.current_app = prev + + return impl diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web_protocol.py b/deepseek/lib/python3.10/site-packages/aiohttp/web_protocol.py new file mode 100644 index 0000000000000000000000000000000000000000..e8bb41abf9754203c0b22c9f1e76beef6c5feb59 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web_protocol.py @@ -0,0 +1,746 @@ +import asyncio +import asyncio.streams +import sys +import traceback +import warnings +from collections import deque +from contextlib import suppress +from html import escape as html_escape +from http import HTTPStatus +from logging import Logger +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Deque, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import attr +import yarl + +from .abc import AbstractAccessLogger, AbstractStreamWriter +from .base_protocol import BaseProtocol +from .helpers import ceil_timeout +from .http import ( + HttpProcessingError, + HttpRequestParser, + HttpVersion10, + RawRequestMessage, + StreamWriter, +) +from .http_exceptions import BadHttpMethod +from .log import access_logger, server_logger +from .streams import EMPTY_PAYLOAD, StreamReader +from .tcp_helpers import tcp_keepalive +from .web_exceptions import HTTPException, HTTPInternalServerError +from .web_log import AccessLogger +from .web_request import BaseRequest +from .web_response import Response, StreamResponse + +__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError") + +if TYPE_CHECKING: + from .web_server import Server + + +_RequestFactory = Callable[ + [ + RawRequestMessage, + StreamReader, + "RequestHandler", + AbstractStreamWriter, + "asyncio.Task[None]", + ], + BaseRequest, +] + +_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]] + +ERROR = RawRequestMessage( + "UNKNOWN", + "/", + HttpVersion10, + {}, # type: ignore[arg-type] + {}, # type: ignore[arg-type] + True, + None, + False, + False, + yarl.URL("/"), +) + + +class RequestPayloadError(Exception): + """Payload parsing error.""" + + +class PayloadAccessError(Exception): + """Payload was accessed after response was sent.""" + + +_PAYLOAD_ACCESS_ERROR = PayloadAccessError() + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class _ErrInfo: + status: int + exc: BaseException + message: str + + +_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] + + +class RequestHandler(BaseProtocol): + """HTTP protocol implementation. + + RequestHandler handles incoming HTTP request. It reads request line, + request headers and request payload and calls handle_request() method. + By default it always returns with 404 response. + + RequestHandler handles errors in incoming request, like bad + status line, bad headers or incomplete payload. If any error occurs, + connection gets closed. + + keepalive_timeout -- number of seconds before closing + keep-alive connection + + tcp_keepalive -- TCP keep-alive is on, default is on + + debug -- enable debug mode + + logger -- custom logger object + + access_log_class -- custom class for access_logger + + access_log -- custom logging object + + access_log_format -- access log format string + + loop -- Optional event loop + + max_line_size -- Optional maximum header line size + + max_field_size -- Optional maximum header field size + + max_headers -- Optional maximum header size + + timeout_ceil_threshold -- Optional value to specify + threshold to ceil() timeout + values + + """ + + __slots__ = ( + "_request_count", + "_keepalive", + "_manager", + "_request_handler", + "_request_factory", + "_tcp_keepalive", + "_next_keepalive_close_time", + "_keepalive_handle", + "_keepalive_timeout", + "_lingering_time", + "_messages", + "_message_tail", + "_handler_waiter", + "_waiter", + "_task_handler", + "_upgrade", + "_payload_parser", + "_request_parser", + "_reading_paused", + "logger", + "debug", + "access_log", + "access_logger", + "_close", + "_force_close", + "_current_request", + "_timeout_ceil_threshold", + "_request_in_progress", + ) + + def __init__( + self, + manager: "Server", + *, + loop: asyncio.AbstractEventLoop, + # Default should be high enough that it's likely longer than a reverse proxy. + keepalive_timeout: float = 3630, + tcp_keepalive: bool = True, + logger: Logger = server_logger, + access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log: Logger = access_logger, + access_log_format: str = AccessLogger.LOG_FORMAT, + debug: bool = False, + max_line_size: int = 8190, + max_headers: int = 32768, + max_field_size: int = 8190, + lingering_time: float = 10.0, + read_bufsize: int = 2**16, + auto_decompress: bool = True, + timeout_ceil_threshold: float = 5, + ): + super().__init__(loop) + + # _request_count is the number of requests processed with the same connection. + self._request_count = 0 + self._keepalive = False + self._current_request: Optional[BaseRequest] = None + self._manager: Optional[Server] = manager + self._request_handler: Optional[_RequestHandler] = manager.request_handler + self._request_factory: Optional[_RequestFactory] = manager.request_factory + + self._tcp_keepalive = tcp_keepalive + # placeholder to be replaced on keepalive timeout setup + self._next_keepalive_close_time = 0.0 + self._keepalive_handle: Optional[asyncio.Handle] = None + self._keepalive_timeout = keepalive_timeout + self._lingering_time = float(lingering_time) + + self._messages: Deque[_MsgType] = deque() + self._message_tail = b"" + + self._waiter: Optional[asyncio.Future[None]] = None + self._handler_waiter: Optional[asyncio.Future[None]] = None + self._task_handler: Optional[asyncio.Task[None]] = None + + self._upgrade = False + self._payload_parser: Any = None + self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( + self, + loop, + read_bufsize, + max_line_size=max_line_size, + max_field_size=max_field_size, + max_headers=max_headers, + payload_exception=RequestPayloadError, + auto_decompress=auto_decompress, + ) + + self._timeout_ceil_threshold: float = 5 + try: + self._timeout_ceil_threshold = float(timeout_ceil_threshold) + except (TypeError, ValueError): + pass + + self.logger = logger + self.debug = debug + self.access_log = access_log + if access_log: + self.access_logger: Optional[AbstractAccessLogger] = access_log_class( + access_log, access_log_format + ) + else: + self.access_logger = None + + self._close = False + self._force_close = False + self._request_in_progress = False + + def __repr__(self) -> str: + return "<{} {}>".format( + self.__class__.__name__, + "connected" if self.transport is not None else "disconnected", + ) + + @property + def keepalive_timeout(self) -> float: + return self._keepalive_timeout + + async def shutdown(self, timeout: Optional[float] = 15.0) -> None: + """Do worker process exit preparations. + + We need to clean up everything and stop accepting requests. + It is especially important for keep-alive connections. + """ + self._force_close = True + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + # Wait for graceful handler completion + if self._request_in_progress: + # The future is only created when we are shutting + # down while the handler is still processing a request + # to avoid creating a future for every request. + self._handler_waiter = self._loop.create_future() + try: + async with ceil_timeout(timeout): + await self._handler_waiter + except (asyncio.CancelledError, asyncio.TimeoutError): + self._handler_waiter = None + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + # Then cancel handler and wait + try: + async with ceil_timeout(timeout): + if self._current_request is not None: + self._current_request._cancel(asyncio.CancelledError()) + + if self._task_handler is not None and not self._task_handler.done(): + await asyncio.shield(self._task_handler) + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + + # force-close non-idle handler + if self._task_handler is not None: + self._task_handler.cancel() + + self.force_close() + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + super().connection_made(transport) + + real_transport = cast(asyncio.Transport, transport) + if self._tcp_keepalive: + tcp_keepalive(real_transport) + + assert self._manager is not None + self._manager.connection_made(self, real_transport) + + loop = self._loop + if sys.version_info >= (3, 12): + task = asyncio.Task(self.start(), loop=loop, eager_start=True) + else: + task = loop.create_task(self.start()) + self._task_handler = task + + def connection_lost(self, exc: Optional[BaseException]) -> None: + if self._manager is None: + return + self._manager.connection_lost(self, exc) + + # Grab value before setting _manager to None. + handler_cancellation = self._manager.handler_cancellation + + self.force_close() + super().connection_lost(exc) + self._manager = None + self._request_factory = None + self._request_handler = None + self._request_parser = None + + if self._keepalive_handle is not None: + self._keepalive_handle.cancel() + + if self._current_request is not None: + if exc is None: + exc = ConnectionResetError("Connection lost") + self._current_request._cancel(exc) + + if handler_cancellation and self._task_handler is not None: + self._task_handler.cancel() + + self._task_handler = None + + if self._payload_parser is not None: + self._payload_parser.feed_eof() + self._payload_parser = None + + def set_parser(self, parser: Any) -> None: + # Actual type is WebReader + assert self._payload_parser is None + + self._payload_parser = parser + + if self._message_tail: + self._payload_parser.feed_data(self._message_tail) + self._message_tail = b"" + + def eof_received(self) -> None: + pass + + def data_received(self, data: bytes) -> None: + if self._force_close or self._close: + return + # parse http messages + messages: Sequence[_MsgType] + if self._payload_parser is None and not self._upgrade: + assert self._request_parser is not None + try: + messages, upgraded, tail = self._request_parser.feed_data(data) + except HttpProcessingError as exc: + messages = [ + (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD) + ] + upgraded = False + tail = b"" + + for msg, payload in messages or (): + self._request_count += 1 + self._messages.append((msg, payload)) + + waiter = self._waiter + if messages and waiter is not None and not waiter.done(): + # don't set result twice + waiter.set_result(None) + + self._upgrade = upgraded + if upgraded and tail: + self._message_tail = tail + + # no parser, just store + elif self._payload_parser is None and self._upgrade and data: + self._message_tail += data + + # feed payload + elif data: + eof, tail = self._payload_parser.feed_data(data) + if eof: + self.close() + + def keep_alive(self, val: bool) -> None: + """Set keep-alive connection mode. + + :param bool val: new state. + """ + self._keepalive = val + if self._keepalive_handle: + self._keepalive_handle.cancel() + self._keepalive_handle = None + + def close(self) -> None: + """Close connection. + + Stop accepting new pipelining messages and close + connection when handlers done processing messages. + """ + self._close = True + if self._waiter: + self._waiter.cancel() + + def force_close(self) -> None: + """Forcefully close connection.""" + self._force_close = True + if self._waiter: + self._waiter.cancel() + if self.transport is not None: + self.transport.close() + self.transport = None + + def log_access( + self, request: BaseRequest, response: StreamResponse, time: float + ) -> None: + if self.access_logger is not None and self.access_logger.enabled: + self.access_logger.log(request, response, self._loop.time() - time) + + def log_debug(self, *args: Any, **kw: Any) -> None: + if self.debug: + self.logger.debug(*args, **kw) + + def log_exception(self, *args: Any, **kw: Any) -> None: + self.logger.exception(*args, **kw) + + def _process_keepalive(self) -> None: + self._keepalive_handle = None + if self._force_close or not self._keepalive: + return + + loop = self._loop + now = loop.time() + close_time = self._next_keepalive_close_time + if now <= close_time: + # Keep alive close check fired too early, reschedule + self._keepalive_handle = loop.call_at(close_time, self._process_keepalive) + return + + # handler in idle state + if self._waiter and not self._waiter.done(): + self.force_close() + + async def _handle_request( + self, + request: BaseRequest, + start_time: float, + request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], + ) -> Tuple[StreamResponse, bool]: + self._request_in_progress = True + try: + try: + self._current_request = request + resp = await request_handler(request) + finally: + self._current_request = None + except HTTPException as exc: + resp = exc + resp, reset = await self.finish_response(request, resp, start_time) + except asyncio.CancelledError: + raise + except asyncio.TimeoutError as exc: + self.log_debug("Request handler timed out.", exc_info=exc) + resp = self.handle_error(request, 504) + resp, reset = await self.finish_response(request, resp, start_time) + except Exception as exc: + resp = self.handle_error(request, 500, exc) + resp, reset = await self.finish_response(request, resp, start_time) + else: + # Deprecation warning (See #2415) + if getattr(resp, "__http_exception__", False): + warnings.warn( + "returning HTTPException object is deprecated " + "(#2415) and will be removed, " + "please raise the exception instead", + DeprecationWarning, + ) + + resp, reset = await self.finish_response(request, resp, start_time) + finally: + self._request_in_progress = False + if self._handler_waiter is not None: + self._handler_waiter.set_result(None) + + return resp, reset + + async def start(self) -> None: + """Process incoming request. + + It reads request line, request headers and request payload, then + calls handle_request() method. Subclass has to override + handle_request(). start() handles various exceptions in request + or response handling. Connection is being closed always unless + keep_alive(True) specified. + """ + loop = self._loop + handler = asyncio.current_task(loop) + assert handler is not None + manager = self._manager + assert manager is not None + keepalive_timeout = self._keepalive_timeout + resp = None + assert self._request_factory is not None + assert self._request_handler is not None + + while not self._force_close: + if not self._messages: + try: + # wait for next request + self._waiter = loop.create_future() + await self._waiter + finally: + self._waiter = None + + message, payload = self._messages.popleft() + + start = loop.time() + + manager.requests_count += 1 + writer = StreamWriter(self, loop) + if isinstance(message, _ErrInfo): + # make request_factory work + request_handler = self._make_error_handler(message) + message = ERROR + else: + request_handler = self._request_handler + + request = self._request_factory(message, payload, self, writer, handler) + try: + # a new task is used for copy context vars (#3406) + coro = self._handle_request(request, start, request_handler) + if sys.version_info >= (3, 12): + task = asyncio.Task(coro, loop=loop, eager_start=True) + else: + task = loop.create_task(coro) + try: + resp, reset = await task + except ConnectionError: + self.log_debug("Ignored premature client disconnection") + break + + # Drop the processed task from asyncio.Task.all_tasks() early + del task + if reset: + self.log_debug("Ignored premature client disconnection 2") + break + + # notify server about keep-alive + self._keepalive = bool(resp.keep_alive) + + # check payload + if not payload.is_eof(): + lingering_time = self._lingering_time + if not self._force_close and lingering_time: + self.log_debug( + "Start lingering close timer for %s sec.", lingering_time + ) + + now = loop.time() + end_t = now + lingering_time + + try: + while not payload.is_eof() and now < end_t: + async with ceil_timeout(end_t - now): + # read and ignore + await payload.readany() + now = loop.time() + except (asyncio.CancelledError, asyncio.TimeoutError): + if ( + sys.version_info >= (3, 11) + and (t := asyncio.current_task()) + and t.cancelling() + ): + raise + + # if payload still uncompleted + if not payload.is_eof() and not self._force_close: + self.log_debug("Uncompleted request.") + self.close() + + payload.set_exception(_PAYLOAD_ACCESS_ERROR) + + except asyncio.CancelledError: + self.log_debug("Ignored premature client disconnection") + raise + except Exception as exc: + self.log_exception("Unhandled exception", exc_info=exc) + self.force_close() + finally: + if self.transport is None and resp is not None: + self.log_debug("Ignored premature client disconnection.") + elif not self._force_close: + if self._keepalive and not self._close: + # start keep-alive timer + if keepalive_timeout is not None: + now = loop.time() + close_time = now + keepalive_timeout + self._next_keepalive_close_time = close_time + if self._keepalive_handle is None: + self._keepalive_handle = loop.call_at( + close_time, self._process_keepalive + ) + else: + break + + # remove handler, close transport if no handlers left + if not self._force_close: + self._task_handler = None + if self.transport is not None: + self.transport.close() + + async def finish_response( + self, request: BaseRequest, resp: StreamResponse, start_time: float + ) -> Tuple[StreamResponse, bool]: + """Prepare the response and write_eof, then log access. + + This has to + be called within the context of any exception so the access logger + can get exception information. Returns True if the client disconnects + prematurely. + """ + request._finish() + if self._request_parser is not None: + self._request_parser.set_upgraded(False) + self._upgrade = False + if self._message_tail: + self._request_parser.feed_data(self._message_tail) + self._message_tail = b"" + try: + prepare_meth = resp.prepare + except AttributeError: + if resp is None: + self.log_exception("Missing return statement on request handler") + else: + self.log_exception( + "Web-handler should return a response instance, " + "got {!r}".format(resp) + ) + exc = HTTPInternalServerError() + resp = Response( + status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers + ) + prepare_meth = resp.prepare + try: + await prepare_meth(request) + await resp.write_eof() + except ConnectionError: + self.log_access(request, resp, start_time) + return resp, True + + self.log_access(request, resp, start_time) + return resp, False + + def handle_error( + self, + request: BaseRequest, + status: int = 500, + exc: Optional[BaseException] = None, + message: Optional[str] = None, + ) -> StreamResponse: + """Handle errors. + + Returns HTTP response with specific status code. Logs additional + information. It always closes current connection. + """ + if self._request_count == 1 and isinstance(exc, BadHttpMethod): + # BadHttpMethod is common when a client sends non-HTTP + # or encrypted traffic to an HTTP port. This is expected + # to happen when connected to the public internet so we log + # it at the debug level as to not fill logs with noise. + self.logger.debug("Error handling request", exc_info=exc) + else: + self.log_exception("Error handling request", exc_info=exc) + + # some data already got sent, connection is broken + if request.writer.output_size > 0: + raise ConnectionError( + "Response is sent already, cannot send another response " + "with the error message" + ) + + ct = "text/plain" + if status == HTTPStatus.INTERNAL_SERVER_ERROR: + title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) + msg = HTTPStatus.INTERNAL_SERVER_ERROR.description + tb = None + if self.debug: + with suppress(Exception): + tb = traceback.format_exc() + + if "text/html" in request.headers.get("Accept", ""): + if tb: + tb = html_escape(tb) + msg = f"

Traceback:

\n
{tb}
" + message = ( + "" + "{title}" + "\n

{title}

" + "\n{msg}\n\n" + ).format(title=title, msg=msg) + ct = "text/html" + else: + if tb: + msg = tb + message = title + "\n\n" + msg + + resp = Response(status=status, text=message, content_type=ct) + resp.force_close() + + return resp + + def _make_error_handler( + self, err_info: _ErrInfo + ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]: + async def handler(request: BaseRequest) -> StreamResponse: + return self.handle_error( + request, err_info.status, err_info.exc, err_info.message + ) + + return handler diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web_request.py b/deepseek/lib/python3.10/site-packages/aiohttp/web_request.py new file mode 100644 index 0000000000000000000000000000000000000000..f11d49020a08daefb93ebc683c40542c7d556e5d --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web_request.py @@ -0,0 +1,916 @@ +import asyncio +import datetime +import io +import re +import socket +import string +import tempfile +import types +import warnings +from http.cookies import SimpleCookie +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Final, + Iterator, + Mapping, + MutableMapping, + Optional, + Pattern, + Tuple, + Union, + cast, +) +from urllib.parse import parse_qsl + +import attr +from multidict import ( + CIMultiDict, + CIMultiDictProxy, + MultiDict, + MultiDictProxy, + MultiMapping, +) +from yarl import URL + +from . import hdrs +from .abc import AbstractStreamWriter +from .helpers import ( + _SENTINEL, + DEBUG, + ETAG_ANY, + LIST_QUOTED_ETAG_RE, + ChainMapProxy, + ETag, + HeadersMixin, + parse_http_date, + reify, + sentinel, + set_exception, +) +from .http_parser import RawRequestMessage +from .http_writer import HttpVersion +from .multipart import BodyPartReader, MultipartReader +from .streams import EmptyStreamReader, StreamReader +from .typedefs import ( + DEFAULT_JSON_DECODER, + JSONDecoder, + LooseHeaders, + RawHeaders, + StrOrURL, +) +from .web_exceptions import HTTPRequestEntityTooLarge +from .web_response import StreamResponse + +__all__ = ("BaseRequest", "FileField", "Request") + + +if TYPE_CHECKING: + from .web_app import Application + from .web_protocol import RequestHandler + from .web_urldispatcher import UrlMappingMatchInfo + + +@attr.s(auto_attribs=True, frozen=True, slots=True) +class FileField: + name: str + filename: str + file: io.BufferedReader + content_type: str + headers: CIMultiDictProxy[str] + + +_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-" +# '-' at the end to prevent interpretation as range in a char class + +_TOKEN: Final[str] = rf"[{_TCHAR}]+" + +_QDTEXT: Final[str] = r"[{}]".format( + r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F))) +) +# qdtext includes 0x5C to escape 0x5D ('\]') +# qdtext excludes obs-text (because obsoleted, and encoding not specified) + +_QUOTED_PAIR: Final[str] = r"\\[\t !-~]" + +_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( + qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR +) + +_FORWARDED_PAIR: Final[str] = ( + r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( + token=_TOKEN, quoted_string=_QUOTED_STRING + ) +) + +_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") +# same pattern as _QUOTED_PAIR but contains a capture group + +_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR) + +############################################################ +# HTTP Request +############################################################ + + +class BaseRequest(MutableMapping[str, Any], HeadersMixin): + + POST_METHODS = { + hdrs.METH_PATCH, + hdrs.METH_POST, + hdrs.METH_PUT, + hdrs.METH_TRACE, + hdrs.METH_DELETE, + } + + ATTRS = HeadersMixin.ATTRS | frozenset( + [ + "_message", + "_protocol", + "_payload_writer", + "_payload", + "_headers", + "_method", + "_version", + "_rel_url", + "_post", + "_read_bytes", + "_state", + "_cache", + "_task", + "_client_max_size", + "_loop", + "_transport_sslcontext", + "_transport_peername", + ] + ) + _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None + _read_bytes: Optional[bytes] = None + + def __init__( + self, + message: RawRequestMessage, + payload: StreamReader, + protocol: "RequestHandler", + payload_writer: AbstractStreamWriter, + task: "asyncio.Task[None]", + loop: asyncio.AbstractEventLoop, + *, + client_max_size: int = 1024**2, + state: Optional[Dict[str, Any]] = None, + scheme: Optional[str] = None, + host: Optional[str] = None, + remote: Optional[str] = None, + ) -> None: + self._message = message + self._protocol = protocol + self._payload_writer = payload_writer + + self._payload = payload + self._headers: CIMultiDictProxy[str] = message.headers + self._method = message.method + self._version = message.version + self._cache: Dict[str, Any] = {} + url = message.url + if url.absolute: + if scheme is not None: + url = url.with_scheme(scheme) + if host is not None: + url = url.with_host(host) + # absolute URL is given, + # override auto-calculating url, host, and scheme + # all other properties should be good + self._cache["url"] = url + self._cache["host"] = url.host + self._cache["scheme"] = url.scheme + self._rel_url = url.relative() + else: + self._rel_url = url + if scheme is not None: + self._cache["scheme"] = scheme + if host is not None: + self._cache["host"] = host + + self._state = {} if state is None else state + self._task = task + self._client_max_size = client_max_size + self._loop = loop + + transport = protocol.transport + assert transport is not None + self._transport_sslcontext = transport.get_extra_info("sslcontext") + self._transport_peername = transport.get_extra_info("peername") + + if remote is not None: + self._cache["remote"] = remote + + def clone( + self, + *, + method: Union[str, _SENTINEL] = sentinel, + rel_url: Union[StrOrURL, _SENTINEL] = sentinel, + headers: Union[LooseHeaders, _SENTINEL] = sentinel, + scheme: Union[str, _SENTINEL] = sentinel, + host: Union[str, _SENTINEL] = sentinel, + remote: Union[str, _SENTINEL] = sentinel, + client_max_size: Union[int, _SENTINEL] = sentinel, + ) -> "BaseRequest": + """Clone itself with replacement some attributes. + + Creates and returns a new instance of Request object. If no parameters + are given, an exact copy is returned. If a parameter is not passed, it + will reuse the one from the current request object. + """ + if self._read_bytes: + raise RuntimeError("Cannot clone request after reading its content") + + dct: Dict[str, Any] = {} + if method is not sentinel: + dct["method"] = method + if rel_url is not sentinel: + new_url: URL = URL(rel_url) + dct["url"] = new_url + dct["path"] = str(new_url) + if headers is not sentinel: + # a copy semantic + dct["headers"] = CIMultiDictProxy(CIMultiDict(headers)) + dct["raw_headers"] = tuple( + (k.encode("utf-8"), v.encode("utf-8")) + for k, v in dct["headers"].items() + ) + + message = self._message._replace(**dct) + + kwargs = {} + if scheme is not sentinel: + kwargs["scheme"] = scheme + if host is not sentinel: + kwargs["host"] = host + if remote is not sentinel: + kwargs["remote"] = remote + if client_max_size is sentinel: + client_max_size = self._client_max_size + + return self.__class__( + message, + self._payload, + self._protocol, + self._payload_writer, + self._task, + self._loop, + client_max_size=client_max_size, + state=self._state.copy(), + **kwargs, + ) + + @property + def task(self) -> "asyncio.Task[None]": + return self._task + + @property + def protocol(self) -> "RequestHandler": + return self._protocol + + @property + def transport(self) -> Optional[asyncio.Transport]: + if self._protocol is None: + return None + return self._protocol.transport + + @property + def writer(self) -> AbstractStreamWriter: + return self._payload_writer + + @property + def client_max_size(self) -> int: + return self._client_max_size + + @reify + def message(self) -> RawRequestMessage: + warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3) + return self._message + + @reify + def rel_url(self) -> URL: + return self._rel_url + + @reify + def loop(self) -> asyncio.AbstractEventLoop: + warnings.warn( + "request.loop property is deprecated", DeprecationWarning, stacklevel=2 + ) + return self._loop + + # MutableMapping API + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + ######## + + @reify + def secure(self) -> bool: + """A bool indicating if the request is handled with SSL.""" + return self.scheme == "https" + + @reify + def forwarded(self) -> Tuple[Mapping[str, str], ...]: + """A tuple containing all parsed Forwarded header(s). + + Makes an effort to parse Forwarded headers as specified by RFC 7239: + + - It adds one (immutable) dictionary per Forwarded 'field-value', ie + per proxy. The element corresponds to the data in the Forwarded + field-value added by the first proxy encountered by the client. Each + subsequent item corresponds to those added by later proxies. + - It checks that every value has valid syntax in general as specified + in section 4: either a 'token' or a 'quoted-string'. + - It un-escapes found escape sequences. + - It does NOT validate 'by' and 'for' contents as specified in section + 6. + - It does NOT validate 'host' contents (Host ABNF). + - It does NOT validate 'proto' contents for valid URI scheme names. + + Returns a tuple containing one or more immutable dicts + """ + elems = [] + for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): + length = len(field_value) + pos = 0 + need_separator = False + elem: Dict[str, str] = {} + elems.append(types.MappingProxyType(elem)) + while 0 <= pos < length: + match = _FORWARDED_PAIR_RE.match(field_value, pos) + if match is not None: # got a valid forwarded-pair + if need_separator: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + else: + name, value, port = match.groups() + if value[0] == '"': + # quoted string: remove quotes and unescape + value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1]) + if port: + value += port + elem[name.lower()] = value + pos += len(match.group(0)) + need_separator = True + elif field_value[pos] == ",": # next forwarded-element + need_separator = False + elem = {} + elems.append(types.MappingProxyType(elem)) + pos += 1 + elif field_value[pos] == ";": # next forwarded-pair + need_separator = False + pos += 1 + elif field_value[pos] in " \t": + # Allow whitespace even between forwarded-pairs, though + # RFC 7239 doesn't. This simplifies code and is in line + # with Postel's law. + pos += 1 + else: + # bad syntax here, skip to next comma + pos = field_value.find(",", pos) + return tuple(elems) + + @reify + def scheme(self) -> str: + """A string representing the scheme of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(scheme=new_scheme) call. + - type of connection to peer: HTTPS if socket is SSL, HTTP otherwise. + + 'http' or 'https'. + """ + if self._transport_sslcontext: + return "https" + else: + return "http" + + @reify + def method(self) -> str: + """Read only property for getting HTTP method. + + The value is upper-cased str like 'GET', 'POST', 'PUT' etc. + """ + return self._method + + @reify + def version(self) -> HttpVersion: + """Read only property for getting HTTP version of request. + + Returns aiohttp.protocol.HttpVersion instance. + """ + return self._version + + @reify + def host(self) -> str: + """Hostname of the request. + + Hostname is resolved in this order: + + - overridden value by .clone(host=new_host) call. + - HOST HTTP header + - socket.getfqdn() value + + For example, 'example.com' or 'localhost:8080'. + + For historical reasons, the port number may be included. + """ + host = self._message.headers.get(hdrs.HOST) + if host is not None: + return host + return socket.getfqdn() + + @reify + def remote(self) -> Optional[str]: + """Remote IP of client initiated HTTP request. + + The IP is resolved in this order: + + - overridden value by .clone(remote=new_remote) call. + - peername of opened socket + """ + if self._transport_peername is None: + return None + if isinstance(self._transport_peername, (list, tuple)): + return str(self._transport_peername[0]) + return str(self._transport_peername) + + @reify + def url(self) -> URL: + """The full URL of the request.""" + # authority is used here because it may include the port number + # and we want yarl to parse it correctly + return URL.build(scheme=self.scheme, authority=self.host).join(self._rel_url) + + @reify + def path(self) -> str: + """The URL including *PATH INFO* without the host or scheme. + + E.g., ``/app/blog`` + """ + return self._rel_url.path + + @reify + def path_qs(self) -> str: + """The URL including PATH_INFO and the query string. + + E.g, /app/blog?id=10 + """ + return str(self._rel_url) + + @reify + def raw_path(self) -> str: + """The URL including raw *PATH INFO* without the host or scheme. + + Warning, the path is unquoted and may contains non valid URL characters + + E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters`` + """ + return self._message.path + + @reify + def query(self) -> "MultiMapping[str]": + """A multidict with all the variables in the query string.""" + return self._rel_url.query + + @reify + def query_string(self) -> str: + """The query string in the URL. + + E.g., id=10 + """ + return self._rel_url.query_string + + @reify + def headers(self) -> CIMultiDictProxy[str]: + """A case-insensitive multidict proxy with all headers.""" + return self._headers + + @reify + def raw_headers(self) -> RawHeaders: + """A sequence of pairs for all headers.""" + return self._message.raw_headers + + @reify + def if_modified_since(self) -> Optional[datetime.datetime]: + """The value of If-Modified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) + + @reify + def if_unmodified_since(self) -> Optional[datetime.datetime]: + """The value of If-Unmodified-Since HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE)) + + @staticmethod + def _etag_values(etag_header: str) -> Iterator[ETag]: + """Extract `ETag` objects from raw header.""" + if etag_header == ETAG_ANY: + yield ETag( + is_weak=False, + value=ETAG_ANY, + ) + else: + for match in LIST_QUOTED_ETAG_RE.finditer(etag_header): + is_weak, value, garbage = match.group(2, 3, 4) + # Any symbol captured by 4th group means + # that the following sequence is invalid. + if garbage: + break + + yield ETag( + is_weak=bool(is_weak), + value=value, + ) + + @classmethod + def _if_match_or_none_impl( + cls, header_value: Optional[str] + ) -> Optional[Tuple[ETag, ...]]: + if not header_value: + return None + + return tuple(cls._etag_values(header_value)) + + @reify + def if_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) + + @reify + def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + """The value of If-None-Match HTTP header, or None. + + This header is represented as a `tuple` of `ETag` objects. + """ + return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) + + @reify + def if_range(self) -> Optional[datetime.datetime]: + """The value of If-Range HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self.headers.get(hdrs.IF_RANGE)) + + @reify + def keep_alive(self) -> bool: + """Is keepalive enabled by client?""" + return not self._message.should_close + + @reify + def cookies(self) -> Mapping[str, str]: + """Return request cookies. + + A read-only dictionary-like object. + """ + raw = self.headers.get(hdrs.COOKIE, "") + parsed = SimpleCookie(raw) + return MappingProxyType({key: val.value for key, val in parsed.items()}) + + @reify + def http_range(self) -> slice: + """The content of Range HTTP header. + + Return a slice instance. + + """ + rng = self._headers.get(hdrs.RANGE) + start, end = None, None + if rng is not None: + try: + pattern = r"^bytes=(\d*)-(\d*)$" + start, end = re.findall(pattern, rng)[0] + except IndexError: # pattern was not found in header + raise ValueError("range not in acceptable format") + + end = int(end) if end else None + start = int(start) if start else None + + if start is None and end is not None: + # end with no start is to return tail of content + start = -end + end = None + + if start is not None and end is not None: + # end is inclusive in range header, exclusive for slice + end += 1 + + if start >= end: + raise ValueError("start cannot be after end") + + if start is end is None: # No valid range supplied + raise ValueError("No start or end of range specified") + + return slice(start, end, 1) + + @reify + def content(self) -> StreamReader: + """Return raw payload stream.""" + return self._payload + + @property + def has_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + warnings.warn( + "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2 + ) + return not self._payload.at_eof() + + @property + def can_read_body(self) -> bool: + """Return True if request's HTTP BODY can be read, False otherwise.""" + return not self._payload.at_eof() + + @reify + def body_exists(self) -> bool: + """Return True if request has HTTP BODY, False otherwise.""" + return type(self._payload) is not EmptyStreamReader + + async def release(self) -> None: + """Release request. + + Eat unread part of HTTP BODY if present. + """ + while not self._payload.at_eof(): + await self._payload.readany() + + async def read(self) -> bytes: + """Read request body if present. + + Returns bytes object with full request content. + """ + if self._read_bytes is None: + body = bytearray() + while True: + chunk = await self._payload.readany() + body.extend(chunk) + if self._client_max_size: + body_size = len(body) + if body_size >= self._client_max_size: + raise HTTPRequestEntityTooLarge( + max_size=self._client_max_size, actual_size=body_size + ) + if not chunk: + break + self._read_bytes = bytes(body) + return self._read_bytes + + async def text(self) -> str: + """Return BODY as text using encoding from .charset.""" + bytes_body = await self.read() + encoding = self.charset or "utf-8" + return bytes_body.decode(encoding) + + async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any: + """Return BODY as JSON.""" + body = await self.text() + return loads(body) + + async def multipart(self) -> MultipartReader: + """Return async iterator to process BODY as multipart.""" + return MultipartReader(self._headers, self._payload) + + async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": + """Return POST parameters.""" + if self._post is not None: + return self._post + if self._method not in self.POST_METHODS: + self._post = MultiDictProxy(MultiDict()) + return self._post + + content_type = self.content_type + if content_type not in ( + "", + "application/x-www-form-urlencoded", + "multipart/form-data", + ): + self._post = MultiDictProxy(MultiDict()) + return self._post + + out: MultiDict[Union[str, bytes, FileField]] = MultiDict() + + if content_type == "multipart/form-data": + multipart = await self.multipart() + max_size = self._client_max_size + + field = await multipart.next() + while field is not None: + size = 0 + field_ct = field.headers.get(hdrs.CONTENT_TYPE) + + if isinstance(field, BodyPartReader): + assert field.name is not None + + # Note that according to RFC 7578, the Content-Type header + # is optional, even for files, so we can't assume it's + # present. + # https://tools.ietf.org/html/rfc7578#section-4.4 + if field.filename: + # store file in temp file + tmp = await self._loop.run_in_executor( + None, tempfile.TemporaryFile + ) + chunk = await field.read_chunk(size=2**16) + while chunk: + chunk = field.decode(chunk) + await self._loop.run_in_executor(None, tmp.write, chunk) + size += len(chunk) + if 0 < max_size < size: + await self._loop.run_in_executor(None, tmp.close) + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + chunk = await field.read_chunk(size=2**16) + await self._loop.run_in_executor(None, tmp.seek, 0) + + if field_ct is None: + field_ct = "application/octet-stream" + + ff = FileField( + field.name, + field.filename, + cast(io.BufferedReader, tmp), + field_ct, + field.headers, + ) + out.add(field.name, ff) + else: + # deal with ordinary data + value = await field.read(decode=True) + if field_ct is None or field_ct.startswith("text/"): + charset = field.get_charset(default="utf-8") + out.add(field.name, value.decode(charset)) + else: + out.add(field.name, value) + size += len(value) + if 0 < max_size < size: + raise HTTPRequestEntityTooLarge( + max_size=max_size, actual_size=size + ) + else: + raise ValueError( + "To decode nested multipart you need to use custom reader", + ) + + field = await multipart.next() + else: + data = await self.read() + if data: + charset = self.charset or "utf-8" + out.extend( + parse_qsl( + data.rstrip().decode(charset), + keep_blank_values=True, + encoding=charset, + ) + ) + + self._post = MultiDictProxy(out) + return self._post + + def get_extra_info(self, name: str, default: Any = None) -> Any: + """Extra info from protocol transport""" + protocol = self._protocol + if protocol is None: + return default + + transport = protocol.transport + if transport is None: + return default + + return transport.get_extra_info(name, default) + + def __repr__(self) -> str: + ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( + "ascii" + ) + return "<{} {} {} >".format( + self.__class__.__name__, self._method, ascii_encodable_path + ) + + def __eq__(self, other: object) -> bool: + return id(self) == id(other) + + def __bool__(self) -> bool: + return True + + async def _prepare_hook(self, response: StreamResponse) -> None: + return + + def _cancel(self, exc: BaseException) -> None: + set_exception(self._payload, exc) + + def _finish(self) -> None: + if self._post is None or self.content_type != "multipart/form-data": + return + + # NOTE: Release file descriptors for the + # NOTE: `tempfile.Temporaryfile`-created `_io.BufferedRandom` + # NOTE: instances of files sent within multipart request body + # NOTE: via HTTP POST request. + for file_name, file_field_object in self._post.items(): + if isinstance(file_field_object, FileField): + file_field_object.file.close() + + +class Request(BaseRequest): + + ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"]) + + _match_info: Optional["UrlMappingMatchInfo"] = None + + if DEBUG: + + def __setattr__(self, name: str, val: Any) -> None: + if name not in self.ATTRS: + warnings.warn( + "Setting custom {}.{} attribute " + "is discouraged".format(self.__class__.__name__, name), + DeprecationWarning, + stacklevel=2, + ) + super().__setattr__(name, val) + + def clone( + self, + *, + method: Union[str, _SENTINEL] = sentinel, + rel_url: Union[StrOrURL, _SENTINEL] = sentinel, + headers: Union[LooseHeaders, _SENTINEL] = sentinel, + scheme: Union[str, _SENTINEL] = sentinel, + host: Union[str, _SENTINEL] = sentinel, + remote: Union[str, _SENTINEL] = sentinel, + client_max_size: Union[int, _SENTINEL] = sentinel, + ) -> "Request": + ret = super().clone( + method=method, + rel_url=rel_url, + headers=headers, + scheme=scheme, + host=host, + remote=remote, + client_max_size=client_max_size, + ) + new_ret = cast(Request, ret) + new_ret._match_info = self._match_info + return new_ret + + @reify + def match_info(self) -> "UrlMappingMatchInfo": + """Result of route resolving.""" + match_info = self._match_info + assert match_info is not None + return match_info + + @property + def app(self) -> "Application": + """Application instance.""" + match_info = self._match_info + assert match_info is not None + return match_info.current_app + + @property + def config_dict(self) -> ChainMapProxy: + match_info = self._match_info + assert match_info is not None + lst = match_info.apps + app = self.app + idx = lst.index(app) + sublist = list(reversed(lst[: idx + 1])) + return ChainMapProxy(sublist) + + async def _prepare_hook(self, response: StreamResponse) -> None: + match_info = self._match_info + if match_info is None: + return + for app in match_info._apps: + if on_response_prepare := app.on_response_prepare: + await on_response_prepare.send(self, response) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web_response.py b/deepseek/lib/python3.10/site-packages/aiohttp/web_response.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2be24f1a35318bbe918fcfa56186b9f2af070c --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web_response.py @@ -0,0 +1,840 @@ +import asyncio +import collections.abc +import datetime +import enum +import json +import math +import time +import warnings +import zlib +from concurrent.futures import Executor +from http import HTTPStatus +from http.cookies import SimpleCookie +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Iterator, + MutableMapping, + Optional, + Union, + cast, +) + +from multidict import CIMultiDict, istr + +from . import hdrs, payload +from .abc import AbstractStreamWriter +from .compression_utils import ZLibCompressor +from .helpers import ( + ETAG_ANY, + QUOTED_ETAG_RE, + ETag, + HeadersMixin, + must_be_empty_body, + parse_http_date, + rfc822_formatted_time, + sentinel, + should_remove_content_length, + validate_etag_value, +) +from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11 +from .payload import Payload +from .typedefs import JSONEncoder, LooseHeaders + +REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus} +LARGE_BODY_SIZE = 1024**2 + +__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response") + + +if TYPE_CHECKING: + from .web_request import BaseRequest + + BaseClass = MutableMapping[str, Any] +else: + BaseClass = collections.abc.MutableMapping + + +# TODO(py311): Convert to StrEnum for wider use +class ContentCoding(enum.Enum): + # The content codings that we have support for. + # + # Additional registered codings are listed at: + # https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding + deflate = "deflate" + gzip = "gzip" + identity = "identity" + + +CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding} + +############################################################ +# HTTP Response classes +############################################################ + + +class StreamResponse(BaseClass, HeadersMixin): + + _body: Union[None, bytes, bytearray, Payload] + _length_check = True + _body = None + _keep_alive: Optional[bool] = None + _chunked: bool = False + _compression: bool = False + _compression_strategy: int = zlib.Z_DEFAULT_STRATEGY + _compression_force: Optional[ContentCoding] = None + _req: Optional["BaseRequest"] = None + _payload_writer: Optional[AbstractStreamWriter] = None + _eof_sent: bool = False + _must_be_empty_body: Optional[bool] = None + _body_length = 0 + _cookies: Optional[SimpleCookie] = None + + def __init__( + self, + *, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + _real_headers: Optional[CIMultiDict[str]] = None, + ) -> None: + """Initialize a new stream response object. + + _real_headers is an internal parameter used to pass a pre-populated + headers object. It is used by the `Response` class to avoid copying + the headers when creating a new response object. It is not intended + to be used by external code. + """ + self._state: Dict[str, Any] = {} + + if _real_headers is not None: + self._headers = _real_headers + elif headers is not None: + self._headers: CIMultiDict[str] = CIMultiDict(headers) + else: + self._headers = CIMultiDict() + + self._set_status(status, reason) + + @property + def prepared(self) -> bool: + return self._eof_sent or self._payload_writer is not None + + @property + def task(self) -> "Optional[asyncio.Task[None]]": + if self._req: + return self._req.task + else: + return None + + @property + def status(self) -> int: + return self._status + + @property + def chunked(self) -> bool: + return self._chunked + + @property + def compression(self) -> bool: + return self._compression + + @property + def reason(self) -> str: + return self._reason + + def set_status( + self, + status: int, + reason: Optional[str] = None, + ) -> None: + assert ( + not self.prepared + ), "Cannot change the response status code after the headers have been sent" + self._set_status(status, reason) + + def _set_status(self, status: int, reason: Optional[str]) -> None: + self._status = int(status) + if reason is None: + reason = REASON_PHRASES.get(self._status, "") + elif "\n" in reason: + raise ValueError("Reason cannot contain \\n") + self._reason = reason + + @property + def keep_alive(self) -> Optional[bool]: + return self._keep_alive + + def force_close(self) -> None: + self._keep_alive = False + + @property + def body_length(self) -> int: + return self._body_length + + @property + def output_length(self) -> int: + warnings.warn("output_length is deprecated", DeprecationWarning) + assert self._payload_writer + return self._payload_writer.buffer_size + + def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: + """Enables automatic chunked transfer encoding.""" + if hdrs.CONTENT_LENGTH in self._headers: + raise RuntimeError( + "You can't enable chunked encoding when a content length is set" + ) + if chunk_size is not None: + warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) + self._chunked = True + + def enable_compression( + self, + force: Optional[Union[bool, ContentCoding]] = None, + strategy: int = zlib.Z_DEFAULT_STRATEGY, + ) -> None: + """Enables response compression encoding.""" + # Backwards compatibility for when force was a bool <0.17. + if isinstance(force, bool): + force = ContentCoding.deflate if force else ContentCoding.identity + warnings.warn( + "Using boolean for force is deprecated #3318", DeprecationWarning + ) + elif force is not None: + assert isinstance( + force, ContentCoding + ), "force should one of None, bool or ContentEncoding" + + self._compression = True + self._compression_force = force + self._compression_strategy = strategy + + @property + def headers(self) -> "CIMultiDict[str]": + return self._headers + + @property + def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() + return self._cookies + + def set_cookie( + self, + name: str, + value: str, + *, + expires: Optional[str] = None, + domain: Optional[str] = None, + max_age: Optional[Union[int, str]] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + version: Optional[str] = None, + samesite: Optional[str] = None, + ) -> None: + """Set or update response cookie. + + Sets new cookie or updates existent with new value. + Also updates only those params which are not None. + """ + if self._cookies is None: + self._cookies = SimpleCookie() + + self._cookies[name] = value + c = self._cookies[name] + + if expires is not None: + c["expires"] = expires + elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT": + del c["expires"] + + if domain is not None: + c["domain"] = domain + + if max_age is not None: + c["max-age"] = str(max_age) + elif "max-age" in c: + del c["max-age"] + + c["path"] = path + + if secure is not None: + c["secure"] = secure + if httponly is not None: + c["httponly"] = httponly + if version is not None: + c["version"] = version + if samesite is not None: + c["samesite"] = samesite + + def del_cookie( + self, + name: str, + *, + domain: Optional[str] = None, + path: str = "/", + secure: Optional[bool] = None, + httponly: Optional[bool] = None, + samesite: Optional[str] = None, + ) -> None: + """Delete cookie. + + Creates new empty expired cookie. + """ + # TODO: do we need domain/path here? + if self._cookies is not None: + self._cookies.pop(name, None) + self.set_cookie( + name, + "", + max_age=0, + expires="Thu, 01 Jan 1970 00:00:00 GMT", + domain=domain, + path=path, + secure=secure, + httponly=httponly, + samesite=samesite, + ) + + @property + def content_length(self) -> Optional[int]: + # Just a placeholder for adding setter + return super().content_length + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + if value is not None: + value = int(value) + if self._chunked: + raise RuntimeError( + "You can't set content length when chunked encoding is enable" + ) + self._headers[hdrs.CONTENT_LENGTH] = str(value) + else: + self._headers.pop(hdrs.CONTENT_LENGTH, None) + + @property + def content_type(self) -> str: + # Just a placeholder for adding setter + return super().content_type + + @content_type.setter + def content_type(self, value: str) -> None: + self.content_type # read header values if needed + self._content_type = str(value) + self._generate_content_type_header() + + @property + def charset(self) -> Optional[str]: + # Just a placeholder for adding setter + return super().charset + + @charset.setter + def charset(self, value: Optional[str]) -> None: + ctype = self.content_type # read header values if needed + if ctype == "application/octet-stream": + raise RuntimeError( + "Setting charset for application/octet-stream " + "doesn't make sense, setup content_type first" + ) + assert self._content_dict is not None + if value is None: + self._content_dict.pop("charset", None) + else: + self._content_dict["charset"] = str(value).lower() + self._generate_content_type_header() + + @property + def last_modified(self) -> Optional[datetime.datetime]: + """The value of Last-Modified HTTP header, or None. + + This header is represented as a `datetime` object. + """ + return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED)) + + @last_modified.setter + def last_modified( + self, value: Optional[Union[int, float, datetime.datetime, str]] + ) -> None: + if value is None: + self._headers.pop(hdrs.LAST_MODIFIED, None) + elif isinstance(value, (int, float)): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value)) + ) + elif isinstance(value, datetime.datetime): + self._headers[hdrs.LAST_MODIFIED] = time.strftime( + "%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple() + ) + elif isinstance(value, str): + self._headers[hdrs.LAST_MODIFIED] = value + + @property + def etag(self) -> Optional[ETag]: + quoted_value = self._headers.get(hdrs.ETAG) + if not quoted_value: + return None + elif quoted_value == ETAG_ANY: + return ETag(value=ETAG_ANY) + match = QUOTED_ETAG_RE.fullmatch(quoted_value) + if not match: + return None + is_weak, value = match.group(1, 2) + return ETag( + is_weak=bool(is_weak), + value=value, + ) + + @etag.setter + def etag(self, value: Optional[Union[ETag, str]]) -> None: + if value is None: + self._headers.pop(hdrs.ETAG, None) + elif (isinstance(value, str) and value == ETAG_ANY) or ( + isinstance(value, ETag) and value.value == ETAG_ANY + ): + self._headers[hdrs.ETAG] = ETAG_ANY + elif isinstance(value, str): + validate_etag_value(value) + self._headers[hdrs.ETAG] = f'"{value}"' + elif isinstance(value, ETag) and isinstance(value.value, str): + validate_etag_value(value.value) + hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"' + self._headers[hdrs.ETAG] = hdr_value + else: + raise ValueError( + f"Unsupported etag type: {type(value)}. " + f"etag must be str, ETag or None" + ) + + def _generate_content_type_header( + self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE + ) -> None: + assert self._content_dict is not None + assert self._content_type is not None + params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items()) + if params: + ctype = self._content_type + "; " + params + else: + ctype = self._content_type + self._headers[CONTENT_TYPE] = ctype + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if coding is ContentCoding.identity: + return + assert self._payload_writer is not None + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._payload_writer.enable_compression( + coding.value, self._compression_strategy + ) + # Compressed payload may have different content length, + # remove the header + self._headers.popall(hdrs.CONTENT_LENGTH, None) + + async def _start_compression(self, request: "BaseRequest") -> None: + if self._compression_force: + await self._do_start_compression(self._compression_force) + return + # Encoding comparisons should be case-insensitive + # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 + accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower() + for value, coding in CONTENT_CODINGS.items(): + if value in accept_encoding: + await self._do_start_compression(coding) + return + + async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + if self._eof_sent: + return None + if self._payload_writer is not None: + return self._payload_writer + self._must_be_empty_body = must_be_empty_body(request.method, self.status) + return await self._start(request) + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + self._req = request + writer = self._payload_writer = request._payload_writer + + await self._prepare_headers() + await request._prepare_hook(self) + await self._write_headers() + + return writer + + async def _prepare_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + keep_alive = self._keep_alive + if keep_alive is None: + keep_alive = request.keep_alive + self._keep_alive = keep_alive + + version = request.version + + headers = self._headers + if self._cookies: + for cookie in self._cookies.values(): + value = cookie.output(header="")[1:] + headers.add(hdrs.SET_COOKIE, value) + + if self._compression: + await self._start_compression(request) + + if self._chunked: + if version != HttpVersion11: + raise RuntimeError( + "Using chunked encoding is forbidden " + "for HTTP/{0.major}.{0.minor}".format(request.version) + ) + if not self._must_be_empty_body: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + elif self._length_check: # Disabled for WebSockets + writer.length = self.content_length + if writer.length is None: + if version >= HttpVersion11: + if not self._must_be_empty_body: + writer.enable_chunking() + headers[hdrs.TRANSFER_ENCODING] = "chunked" + elif not self._must_be_empty_body: + keep_alive = False + + # HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2 + # HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4 + if self._must_be_empty_body: + if hdrs.CONTENT_LENGTH in headers and should_remove_content_length( + request.method, self.status + ): + del headers[hdrs.CONTENT_LENGTH] + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10 + # https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13 + if hdrs.TRANSFER_ENCODING in headers: + del headers[hdrs.TRANSFER_ENCODING] + elif (writer.length if self._length_check else self.content_length) != 0: + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 + headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream") + headers.setdefault(hdrs.DATE, rfc822_formatted_time()) + headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE) + + # connection header + if hdrs.CONNECTION not in headers: + if keep_alive: + if version == HttpVersion10: + headers[hdrs.CONNECTION] = "keep-alive" + elif version == HttpVersion11: + headers[hdrs.CONNECTION] = "close" + + async def _write_headers(self) -> None: + request = self._req + assert request is not None + writer = self._payload_writer + assert writer is not None + # status line + version = request.version + status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}" + await writer.write_headers(status_line, self._headers) + + async def write(self, data: bytes) -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + raise RuntimeError("Cannot call write() after write_eof()") + if self._payload_writer is None: + raise RuntimeError("Cannot call write() before prepare()") + + await self._payload_writer.write(data) + + async def drain(self) -> None: + assert not self._eof_sent, "EOF has already been sent" + assert self._payload_writer is not None, "Response has not been started" + warnings.warn( + "drain method is deprecated, use await resp.write()", + DeprecationWarning, + stacklevel=2, + ) + await self._payload_writer.drain() + + async def write_eof(self, data: bytes = b"") -> None: + assert isinstance( + data, (bytes, bytearray, memoryview) + ), "data argument must be byte-ish (%r)" % type(data) + + if self._eof_sent: + return + + assert self._payload_writer is not None, "Response has not been started" + + await self._payload_writer.write_eof(data) + self._eof_sent = True + self._req = None + self._body_length = self._payload_writer.output_size + self._payload_writer = None + + def __repr__(self) -> str: + if self._eof_sent: + info = "eof" + elif self.prepared: + assert self._req is not None + info = f"{self._req.method} {self._req.path} " + else: + info = "not prepared" + return f"<{self.__class__.__name__} {self.reason} {info}>" + + def __getitem__(self, key: str) -> Any: + return self._state[key] + + def __setitem__(self, key: str, value: Any) -> None: + self._state[key] = value + + def __delitem__(self, key: str) -> None: + del self._state[key] + + def __len__(self) -> int: + return len(self._state) + + def __iter__(self) -> Iterator[str]: + return iter(self._state) + + def __hash__(self) -> int: + return hash(id(self)) + + def __eq__(self, other: object) -> bool: + return self is other + + +class Response(StreamResponse): + + _compressed_body: Optional[bytes] = None + + def __init__( + self, + *, + body: Any = None, + status: int = 200, + reason: Optional[str] = None, + text: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: Optional[str] = None, + charset: Optional[str] = None, + zlib_executor_size: Optional[int] = None, + zlib_executor: Optional[Executor] = None, + ) -> None: + if body is not None and text is not None: + raise ValueError("body and text are not allowed together") + + if headers is None: + real_headers: CIMultiDict[str] = CIMultiDict() + elif not isinstance(headers, CIMultiDict): + real_headers = CIMultiDict(headers) + else: + real_headers = headers # = cast('CIMultiDict[str]', headers) + + if content_type is not None and "charset" in content_type: + raise ValueError("charset must not be in content_type argument") + + if text is not None: + if hdrs.CONTENT_TYPE in real_headers: + if content_type or charset: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + else: + # fast path for filling headers + if not isinstance(text, str): + raise TypeError("text argument must be str (%r)" % type(text)) + if content_type is None: + content_type = "text/plain" + if charset is None: + charset = "utf-8" + real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset + body = text.encode(charset) + text = None + elif hdrs.CONTENT_TYPE in real_headers: + if content_type is not None or charset is not None: + raise ValueError( + "passing both Content-Type header and " + "content_type or charset params " + "is forbidden" + ) + elif content_type is not None: + if charset is not None: + content_type += "; charset=" + charset + real_headers[hdrs.CONTENT_TYPE] = content_type + + super().__init__(status=status, reason=reason, _real_headers=real_headers) + + if text is not None: + self.text = text + else: + self.body = body + + self._zlib_executor_size = zlib_executor_size + self._zlib_executor = zlib_executor + + @property + def body(self) -> Optional[Union[bytes, Payload]]: + return self._body + + @body.setter + def body(self, body: Any) -> None: + if body is None: + self._body = None + elif isinstance(body, (bytes, bytearray)): + self._body = body + else: + try: + self._body = body = payload.PAYLOAD_REGISTRY.get(body) + except payload.LookupError: + raise ValueError("Unsupported body type %r" % type(body)) + + headers = self._headers + + # set content-type + if hdrs.CONTENT_TYPE not in headers: + headers[hdrs.CONTENT_TYPE] = body.content_type + + # copy payload headers + if body.headers: + for key, value in body.headers.items(): + if key not in headers: + headers[key] = value + + self._compressed_body = None + + @property + def text(self) -> Optional[str]: + if self._body is None: + return None + return self._body.decode(self.charset or "utf-8") + + @text.setter + def text(self, text: str) -> None: + assert text is None or isinstance( + text, str + ), "text argument must be str (%r)" % type(text) + + if self.content_type == "application/octet-stream": + self.content_type = "text/plain" + if self.charset is None: + self.charset = "utf-8" + + self._body = text.encode(self.charset) + self._compressed_body = None + + @property + def content_length(self) -> Optional[int]: + if self._chunked: + return None + + if hdrs.CONTENT_LENGTH in self._headers: + return int(self._headers[hdrs.CONTENT_LENGTH]) + + if self._compressed_body is not None: + # Return length of the compressed body + return len(self._compressed_body) + elif isinstance(self._body, Payload): + # A payload without content length, or a compressed payload + return None + elif self._body is not None: + return len(self._body) + else: + return 0 + + @content_length.setter + def content_length(self, value: Optional[int]) -> None: + raise RuntimeError("Content length is set automatically") + + async def write_eof(self, data: bytes = b"") -> None: + if self._eof_sent: + return + if self._compressed_body is None: + body: Optional[Union[bytes, Payload]] = self._body + else: + body = self._compressed_body + assert not data, f"data arg is not supported, got {data!r}" + assert self._req is not None + assert self._payload_writer is not None + if body is None or self._must_be_empty_body: + await super().write_eof() + elif isinstance(self._body, Payload): + await self._body.write(self._payload_writer) + await super().write_eof() + else: + await super().write_eof(cast(bytes, body)) + + async def _start(self, request: "BaseRequest") -> AbstractStreamWriter: + if hdrs.CONTENT_LENGTH in self._headers: + if should_remove_content_length(request.method, self.status): + del self._headers[hdrs.CONTENT_LENGTH] + elif not self._chunked: + if isinstance(self._body, Payload): + if self._body.size is not None: + self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size) + else: + body_len = len(self._body) if self._body else "0" + # https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7 + if body_len != "0" or ( + self.status != 304 and request.method not in hdrs.METH_HEAD_ALL + ): + self._headers[hdrs.CONTENT_LENGTH] = str(body_len) + + return await super()._start(request) + + async def _do_start_compression(self, coding: ContentCoding) -> None: + if self._chunked or isinstance(self._body, Payload): + return await super()._do_start_compression(coding) + if coding is ContentCoding.identity: + return + # Instead of using _payload_writer.enable_compression, + # compress the whole body + compressor = ZLibCompressor( + encoding=coding.value, + max_sync_chunk_size=self._zlib_executor_size, + executor=self._zlib_executor, + ) + assert self._body is not None + if self._zlib_executor_size is None and len(self._body) > LARGE_BODY_SIZE: + warnings.warn( + "Synchronous compression of large response bodies " + f"({len(self._body)} bytes) might block the async event loop. " + "Consider providing a custom value to zlib_executor_size/" + "zlib_executor response properties or disabling compression on it." + ) + self._compressed_body = ( + await compressor.compress(self._body) + compressor.flush() + ) + self._headers[hdrs.CONTENT_ENCODING] = coding.value + self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body)) + + +def json_response( + data: Any = sentinel, + *, + text: Optional[str] = None, + body: Optional[bytes] = None, + status: int = 200, + reason: Optional[str] = None, + headers: Optional[LooseHeaders] = None, + content_type: str = "application/json", + dumps: JSONEncoder = json.dumps, +) -> Response: + if data is not sentinel: + if text or body: + raise ValueError("only one of data, text, or body should be specified") + else: + text = dumps(data) + return Response( + text=text, + body=body, + status=status, + reason=reason, + headers=headers, + content_type=content_type, + ) diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py b/deepseek/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py new file mode 100644 index 0000000000000000000000000000000000000000..6443c500a3339f9c67b086bcb860970280a9ef3d --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py @@ -0,0 +1,1301 @@ +import abc +import asyncio +import base64 +import functools +import hashlib +import html +import inspect +import keyword +import os +import re +import sys +import warnings +from functools import wraps +from pathlib import Path +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Awaitable, + Callable, + Container, + Dict, + Final, + Generator, + Iterable, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Pattern, + Set, + Sized, + Tuple, + Type, + TypedDict, + Union, + cast, +) + +from yarl import URL, __version__ as yarl_version + +from . import hdrs +from .abc import AbstractMatchInfo, AbstractRouter, AbstractView +from .helpers import DEBUG +from .http import HttpVersion11 +from .typedefs import Handler, PathLike +from .web_exceptions import ( + HTTPException, + HTTPExpectationFailed, + HTTPForbidden, + HTTPMethodNotAllowed, + HTTPNotFound, +) +from .web_fileresponse import FileResponse +from .web_request import Request +from .web_response import Response, StreamResponse +from .web_routedef import AbstractRouteDef + +__all__ = ( + "UrlDispatcher", + "UrlMappingMatchInfo", + "AbstractResource", + "Resource", + "PlainResource", + "DynamicResource", + "AbstractRoute", + "ResourceRoute", + "StaticResource", + "View", +) + + +if TYPE_CHECKING: + from .web_app import Application + + BaseDict = Dict[str, str] +else: + BaseDict = dict + +CIRCULAR_SYMLINK_ERROR = ( + (OSError,) + if sys.version_info < (3, 10) and sys.platform.startswith("win32") + else (RuntimeError,) if sys.version_info < (3, 13) else () +) + +YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) + +HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( + r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" +) +ROUTE_RE: Final[Pattern[str]] = re.compile( + r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})" +) +PATH_SEP: Final[str] = re.escape("/") + + +_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] +_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] + +html_escape = functools.partial(html.escape, quote=True) + + +class _InfoDict(TypedDict, total=False): + path: str + + formatter: str + pattern: Pattern[str] + + directory: Path + prefix: str + routes: Mapping[str, "AbstractRoute"] + + app: "Application" + + domain: str + + rule: "AbstractRuleMatching" + + http_exception: HTTPException + + +class AbstractResource(Sized, Iterable["AbstractRoute"]): + def __init__(self, *, name: Optional[str] = None) -> None: + self._name = name + + @property + def name(self) -> Optional[str]: + return self._name + + @property + @abc.abstractmethod + def canonical(self) -> str: + """Exposes the resource's canonical path. + + For example '/foo/bar/{name}' + + """ + + @abc.abstractmethod # pragma: no branch + def url_for(self, **kwargs: str) -> URL: + """Construct url for resource with additional params.""" + + @abc.abstractmethod # pragma: no branch + async def resolve(self, request: Request) -> _Resolve: + """Resolve resource. + + Return (UrlMappingMatchInfo, allowed_methods) pair. + """ + + @abc.abstractmethod + def add_prefix(self, prefix: str) -> None: + """Add a prefix to processed URLs. + + Required for subapplications support. + """ + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + def freeze(self) -> None: + pass + + @abc.abstractmethod + def raw_match(self, path: str) -> bool: + """Perform a raw match against path""" + + +class AbstractRoute(abc.ABC): + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + *, + expect_handler: Optional[_ExpectHandler] = None, + resource: Optional[AbstractResource] = None, + ) -> None: + + if expect_handler is None: + expect_handler = _default_expect_handler + + assert asyncio.iscoroutinefunction( + expect_handler + ), f"Coroutine is expected, got {expect_handler!r}" + + method = method.upper() + if not HTTP_METHOD_RE.match(method): + raise ValueError(f"{method} is not allowed HTTP method") + + assert callable(handler), handler + if asyncio.iscoroutinefunction(handler): + pass + elif inspect.isgeneratorfunction(handler): + warnings.warn( + "Bare generators are deprecated, use @coroutine wrapper", + DeprecationWarning, + ) + elif isinstance(handler, type) and issubclass(handler, AbstractView): + pass + else: + warnings.warn( + "Bare functions are deprecated, use async ones", DeprecationWarning + ) + + @wraps(handler) + async def handler_wrapper(request: Request) -> StreamResponse: + result = old_handler(request) # type: ignore[call-arg] + if asyncio.iscoroutine(result): + result = await result + assert isinstance(result, StreamResponse) + return result + + old_handler = handler + handler = handler_wrapper + + self._method = method + self._handler = handler + self._expect_handler = expect_handler + self._resource = resource + + @property + def method(self) -> str: + return self._method + + @property + def handler(self) -> Handler: + return self._handler + + @property + @abc.abstractmethod + def name(self) -> Optional[str]: + """Optional route's name, always equals to resource's name.""" + + @property + def resource(self) -> Optional[AbstractResource]: + return self._resource + + @abc.abstractmethod + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @abc.abstractmethod # pragma: no branch + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + + async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]: + return await self._expect_handler(request) + + +class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): + + __slots__ = ("_route", "_apps", "_current_app", "_frozen") + + def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None: + super().__init__(match_dict) + self._route = route + self._apps: List[Application] = [] + self._current_app: Optional[Application] = None + self._frozen = False + + @property + def handler(self) -> Handler: + return self._route.handler + + @property + def route(self) -> AbstractRoute: + return self._route + + @property + def expect_handler(self) -> _ExpectHandler: + return self._route.handle_expect_header + + @property + def http_exception(self) -> Optional[HTTPException]: + return None + + def get_info(self) -> _InfoDict: # type: ignore[override] + return self._route.get_info() + + @property + def apps(self) -> Tuple["Application", ...]: + return tuple(self._apps) + + def add_app(self, app: "Application") -> None: + if self._frozen: + raise RuntimeError("Cannot change apps stack after .freeze() call") + if self._current_app is None: + self._current_app = app + self._apps.insert(0, app) + + @property + def current_app(self) -> "Application": + app = self._current_app + assert app is not None + return app + + @current_app.setter + def current_app(self, app: "Application") -> None: + if DEBUG: # pragma: no cover + if app not in self._apps: + raise RuntimeError( + "Expected one of the following apps {!r}, got {!r}".format( + self._apps, app + ) + ) + self._current_app = app + + def freeze(self) -> None: + self._frozen = True + + def __repr__(self) -> str: + return f"" + + +class MatchInfoError(UrlMappingMatchInfo): + + __slots__ = ("_exception",) + + def __init__(self, http_exception: HTTPException) -> None: + self._exception = http_exception + super().__init__({}, SystemRoute(self._exception)) + + @property + def http_exception(self) -> HTTPException: + return self._exception + + def __repr__(self) -> str: + return "".format( + self._exception.status, self._exception.reason + ) + + +async def _default_expect_handler(request: Request) -> None: + """Default handler for Expect header. + + Just send "100 Continue" to client. + raise HTTPExpectationFailed if value of header is not "100-continue" + """ + expect = request.headers.get(hdrs.EXPECT, "") + if request.version == HttpVersion11: + if expect.lower() == "100-continue": + await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n") + # Reset output_size as we haven't started the main body yet. + request.writer.output_size = 0 + else: + raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect) + + +class Resource(AbstractResource): + def __init__(self, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + self._routes: Dict[str, ResourceRoute] = {} + self._any_route: Optional[ResourceRoute] = None + self._allowed_methods: Set[str] = set() + + def add_route( + self, + method: str, + handler: Union[Type[AbstractView], Handler], + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> "ResourceRoute": + if route := self._routes.get(method, self._any_route): + raise RuntimeError( + "Added route will never be executed, " + f"method {route.method} is already " + "registered" + ) + + route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler) + self.register_route(route_obj) + return route_obj + + def register_route(self, route: "ResourceRoute") -> None: + assert isinstance( + route, ResourceRoute + ), f"Instance of Route class is required, got {route!r}" + if route.method == hdrs.METH_ANY: + self._any_route = route + self._allowed_methods.add(route.method) + self._routes[route.method] = route + + async def resolve(self, request: Request) -> _Resolve: + if (match_dict := self._match(request.rel_url.path_safe)) is None: + return None, set() + if route := self._routes.get(request.method, self._any_route): + return UrlMappingMatchInfo(match_dict, route), self._allowed_methods + return None, self._allowed_methods + + @abc.abstractmethod + def _match(self, path: str) -> Optional[Dict[str, str]]: + pass # pragma: no cover + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator["ResourceRoute"]: + return iter(self._routes.values()) + + # TODO: implement all abstract methods + + +class PlainResource(Resource): + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + assert not path or path.startswith("/") + self._path = path + + @property + def canonical(self) -> str: + return self._path + + def freeze(self) -> None: + if not self._path: + self._path = "/" + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._path = prefix + self._path + + def _match(self, path: str) -> Optional[Dict[str, str]]: + # string comparison is about 10 times faster than regexp matching + if self._path == path: + return {} + return None + + def raw_match(self, path: str) -> bool: + return self._path == path + + def get_info(self) -> _InfoDict: + return {"path": self._path} + + def url_for(self) -> URL: # type: ignore[override] + return URL.build(path=self._path, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return f"" + + +class DynamicResource(Resource): + + DYN = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*)\}") + DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") + GOOD = r"[^{}/]+" + + def __init__(self, path: str, *, name: Optional[str] = None) -> None: + super().__init__(name=name) + self._orig_path = path + pattern = "" + formatter = "" + for part in ROUTE_RE.split(path): + match = self.DYN.fullmatch(part) + if match: + pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD) + formatter += "{" + match.group("var") + "}" + continue + + match = self.DYN_WITH_RE.fullmatch(part) + if match: + pattern += "(?P<{var}>{re})".format(**match.groupdict()) + formatter += "{" + match.group("var") + "}" + continue + + if "{" in part or "}" in part: + raise ValueError(f"Invalid path '{path}'['{part}']") + + part = _requote_path(part) + formatter += part + pattern += re.escape(part) + + try: + compiled = re.compile(pattern) + except re.error as exc: + raise ValueError(f"Bad pattern '{pattern}': {exc}") from None + assert compiled.pattern.startswith(PATH_SEP) + assert formatter.startswith("/") + self._pattern = compiled + self._formatter = formatter + + @property + def canonical(self) -> str: + return self._formatter + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) + self._formatter = prefix + self._formatter + + def _match(self, path: str) -> Optional[Dict[str, str]]: + match = self._pattern.fullmatch(path) + if match is None: + return None + return { + key: _unquote_path_safe(value) for key, value in match.groupdict().items() + } + + def raw_match(self, path: str) -> bool: + return self._orig_path == path + + def get_info(self) -> _InfoDict: + return {"formatter": self._formatter, "pattern": self._pattern} + + def url_for(self, **parts: str) -> URL: + url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()}) + return URL.build(path=url, encoded=True) + + def __repr__(self) -> str: + name = "'" + self.name + "' " if self.name is not None else "" + return "".format( + name=name, formatter=self._formatter + ) + + +class PrefixResource(AbstractResource): + def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: + assert not prefix or prefix.startswith("/"), prefix + assert prefix in ("", "/") or not prefix.endswith("/"), prefix + super().__init__(name=name) + self._prefix = _requote_path(prefix) + self._prefix2 = self._prefix + "/" + + @property + def canonical(self) -> str: + return self._prefix + + def add_prefix(self, prefix: str) -> None: + assert prefix.startswith("/") + assert not prefix.endswith("/") + assert len(prefix) > 1 + self._prefix = prefix + self._prefix + self._prefix2 = self._prefix + "/" + + def raw_match(self, prefix: str) -> bool: + return False + + # TODO: impl missing abstract methods + + +class StaticResource(PrefixResource): + VERSION_KEY = "v" + + def __init__( + self, + prefix: str, + directory: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> None: + super().__init__(prefix, name=name) + try: + directory = Path(directory).expanduser().resolve(strict=True) + except FileNotFoundError as error: + raise ValueError(f"'{directory}' does not exist") from error + if not directory.is_dir(): + raise ValueError(f"'{directory}' is not a directory") + self._directory = directory + self._show_index = show_index + self._chunk_size = chunk_size + self._follow_symlinks = follow_symlinks + self._expect_handler = expect_handler + self._append_version = append_version + + self._routes = { + "GET": ResourceRoute( + "GET", self._handle, self, expect_handler=expect_handler + ), + "HEAD": ResourceRoute( + "HEAD", self._handle, self, expect_handler=expect_handler + ), + } + self._allowed_methods = set(self._routes) + + def url_for( # type: ignore[override] + self, + *, + filename: PathLike, + append_version: Optional[bool] = None, + ) -> URL: + if append_version is None: + append_version = self._append_version + filename = str(filename).lstrip("/") + + url = URL.build(path=self._prefix, encoded=True) + # filename is not encoded + if YARL_VERSION < (1, 6): + url = url / filename.replace("%", "%25") + else: + url = url / filename + + if append_version: + unresolved_path = self._directory.joinpath(filename) + try: + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + filepath = normalized_path.resolve() + else: + filepath = unresolved_path.resolve() + filepath.relative_to(self._directory) + except (ValueError, FileNotFoundError): + # ValueError for case when path point to symlink + # with follow_symlinks is False + return url # relatively safe + if filepath.is_file(): + # TODO cache file content + # with file watcher for cache invalidation + with filepath.open("rb") as f: + file_bytes = f.read() + h = self._get_file_hash(file_bytes) + url = url.with_query({self.VERSION_KEY: h}) + return url + return url + + @staticmethod + def _get_file_hash(byte_array: bytes) -> str: + m = hashlib.sha256() # todo sha256 can be configurable param + m.update(byte_array) + b64 = base64.urlsafe_b64encode(m.digest()) + return b64.decode("ascii") + + def get_info(self) -> _InfoDict: + return { + "directory": self._directory, + "prefix": self._prefix, + "routes": self._routes, + } + + def set_options_route(self, handler: Handler) -> None: + if "OPTIONS" in self._routes: + raise RuntimeError("OPTIONS route was set already") + self._routes["OPTIONS"] = ResourceRoute( + "OPTIONS", handler, self, expect_handler=self._expect_handler + ) + self._allowed_methods.add("OPTIONS") + + async def resolve(self, request: Request) -> _Resolve: + path = request.rel_url.path_safe + method = request.method + if not path.startswith(self._prefix2) and path != self._prefix: + return None, set() + + allowed_methods = self._allowed_methods + if method not in allowed_methods: + return None, allowed_methods + + match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])} + return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._routes.values()) + + async def _handle(self, request: Request) -> StreamResponse: + rel_url = request.match_info["filename"] + filename = Path(rel_url) + if filename.anchor: + # rel_url is an absolute name like + # /static/\\machine_name\c$ or /static/D:\path + # where the static dir is totally different + raise HTTPForbidden() + + unresolved_path = self._directory.joinpath(filename) + loop = asyncio.get_running_loop() + return await loop.run_in_executor( + None, self._resolve_path_to_response, unresolved_path + ) + + def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse: + """Take the unresolved path and query the file system to form a response.""" + # Check for access outside the root directory. For follow symlinks, URI + # cannot traverse out, but symlinks can. Otherwise, no access outside + # root is permitted. + try: + if self._follow_symlinks: + normalized_path = Path(os.path.normpath(unresolved_path)) + normalized_path.relative_to(self._directory) + file_path = normalized_path.resolve() + else: + file_path = unresolved_path.resolve() + file_path.relative_to(self._directory) + except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error: + # ValueError is raised for the relative check. Circular symlinks + # raise here on resolving for python < 3.13. + raise HTTPNotFound() from error + + # if path is a directory, return the contents if permitted. Note the + # directory check will raise if a segment is not readable. + try: + if file_path.is_dir(): + if self._show_index: + return Response( + text=self._directory_as_html(file_path), + content_type="text/html", + ) + else: + raise HTTPForbidden() + except PermissionError as error: + raise HTTPForbidden() from error + + # Return the file response, which handles all other checks. + return FileResponse(file_path, chunk_size=self._chunk_size) + + def _directory_as_html(self, dir_path: Path) -> str: + """returns directory's index as html.""" + assert dir_path.is_dir() + + relative_path_to_dir = dir_path.relative_to(self._directory).as_posix() + index_of = f"Index of /{html_escape(relative_path_to_dir)}" + h1 = f"

{index_of}

" + + index_list = [] + dir_index = dir_path.iterdir() + for _file in sorted(dir_index): + # show file url as relative to static path + rel_path = _file.relative_to(self._directory).as_posix() + quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}") + + # if file is a directory, add '/' to the end of the name + if _file.is_dir(): + file_name = f"{_file.name}/" + else: + file_name = _file.name + + index_list.append( + f'
  • {html_escape(file_name)}
  • ' + ) + ul = "
      \n{}\n
    ".format("\n".join(index_list)) + body = f"\n{h1}\n{ul}\n" + + head_str = f"\n{index_of}\n" + html = f"\n{head_str}\n{body}\n" + + return html + + def __repr__(self) -> str: + name = "'" + self.name + "'" if self.name is not None else "" + return " {directory!r}>".format( + name=name, path=self._prefix, directory=self._directory + ) + + +class PrefixedSubAppResource(PrefixResource): + def __init__(self, prefix: str, app: "Application") -> None: + super().__init__(prefix) + self._app = app + self._add_prefix_to_resources(prefix) + + def add_prefix(self, prefix: str) -> None: + super().add_prefix(prefix) + self._add_prefix_to_resources(prefix) + + def _add_prefix_to_resources(self, prefix: str) -> None: + router = self._app.router + for resource in router.resources(): + # Since the canonical path of a resource is about + # to change, we need to unindex it and then reindex + router.unindex_resource(resource) + resource.add_prefix(prefix) + router.index_resource(resource) + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not supported by sub-application root") + + def get_info(self) -> _InfoDict: + return {"app": self._app, "prefix": self._prefix} + + async def resolve(self, request: Request) -> _Resolve: + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __len__(self) -> int: + return len(self._app.router.routes()) + + def __iter__(self) -> Iterator[AbstractRoute]: + return iter(self._app.router.routes()) + + def __repr__(self) -> str: + return " {app!r}>".format( + prefix=self._prefix, app=self._app + ) + + +class AbstractRuleMatching(abc.ABC): + @abc.abstractmethod # pragma: no branch + async def match(self, request: Request) -> bool: + """Return bool if the request satisfies the criteria""" + + @abc.abstractmethod # pragma: no branch + def get_info(self) -> _InfoDict: + """Return a dict with additional info useful for introspection""" + + @property + @abc.abstractmethod # pragma: no branch + def canonical(self) -> str: + """Return a str""" + + +class Domain(AbstractRuleMatching): + re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(? None: + super().__init__() + self._domain = self.validation(domain) + + @property + def canonical(self) -> str: + return self._domain + + def validation(self, domain: str) -> str: + if not isinstance(domain, str): + raise TypeError("Domain must be str") + domain = domain.rstrip(".").lower() + if not domain: + raise ValueError("Domain cannot be empty") + elif "://" in domain: + raise ValueError("Scheme not supported") + url = URL("http://" + domain) + assert url.raw_host is not None + if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")): + raise ValueError("Domain not valid") + if url.port == 80: + return url.raw_host + return f"{url.raw_host}:{url.port}" + + async def match(self, request: Request) -> bool: + host = request.headers.get(hdrs.HOST) + if not host: + return False + return self.match_domain(host) + + def match_domain(self, host: str) -> bool: + return host.lower() == self._domain + + def get_info(self) -> _InfoDict: + return {"domain": self._domain} + + +class MaskDomain(Domain): + re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(? None: + super().__init__(domain) + mask = self._domain.replace(".", r"\.").replace("*", ".*") + self._mask = re.compile(mask) + + @property + def canonical(self) -> str: + return self._mask.pattern + + def match_domain(self, host: str) -> bool: + return self._mask.fullmatch(host) is not None + + +class MatchedSubAppResource(PrefixedSubAppResource): + def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None: + AbstractResource.__init__(self) + self._prefix = "" + self._app = app + self._rule = rule + + @property + def canonical(self) -> str: + return self._rule.canonical + + def get_info(self) -> _InfoDict: + return {"app": self._app, "rule": self._rule} + + async def resolve(self, request: Request) -> _Resolve: + if not await self._rule.match(request): + return None, set() + match_info = await self._app.router.resolve(request) + match_info.add_app(self._app) + if isinstance(match_info.http_exception, HTTPMethodNotAllowed): + methods = match_info.http_exception.allowed_methods + else: + methods = set() + return match_info, methods + + def __repr__(self) -> str: + return f" {self._app!r}>" + + +class ResourceRoute(AbstractRoute): + """A route with resource""" + + def __init__( + self, + method: str, + handler: Union[Handler, Type[AbstractView]], + resource: AbstractResource, + *, + expect_handler: Optional[_ExpectHandler] = None, + ) -> None: + super().__init__( + method, handler, expect_handler=expect_handler, resource=resource + ) + + def __repr__(self) -> str: + return " {handler!r}".format( + method=self.method, resource=self._resource, handler=self.handler + ) + + @property + def name(self) -> Optional[str]: + if self._resource is None: + return None + return self._resource.name + + def url_for(self, *args: str, **kwargs: str) -> URL: + """Construct url for route with additional params.""" + assert self._resource is not None + return self._resource.url_for(*args, **kwargs) + + def get_info(self) -> _InfoDict: + assert self._resource is not None + return self._resource.get_info() + + +class SystemRoute(AbstractRoute): + def __init__(self, http_exception: HTTPException) -> None: + super().__init__(hdrs.METH_ANY, self._handle) + self._http_exception = http_exception + + def url_for(self, *args: str, **kwargs: str) -> URL: + raise RuntimeError(".url_for() is not allowed for SystemRoute") + + @property + def name(self) -> Optional[str]: + return None + + def get_info(self) -> _InfoDict: + return {"http_exception": self._http_exception} + + async def _handle(self, request: Request) -> StreamResponse: + raise self._http_exception + + @property + def status(self) -> int: + return self._http_exception.status + + @property + def reason(self) -> str: + return self._http_exception.reason + + def __repr__(self) -> str: + return "".format(self=self) + + +class View(AbstractView): + async def _iter(self) -> StreamResponse: + if self.request.method not in hdrs.METH_ALL: + self._raise_allowed_methods() + method: Optional[Callable[[], Awaitable[StreamResponse]]] + method = getattr(self, self.request.method.lower(), None) + if method is None: + self._raise_allowed_methods() + ret = await method() + assert isinstance(ret, StreamResponse) + return ret + + def __await__(self) -> Generator[Any, None, StreamResponse]: + return self._iter().__await__() + + def _raise_allowed_methods(self) -> NoReturn: + allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())} + raise HTTPMethodNotAllowed(self.request.method, allowed_methods) + + +class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): + def __init__(self, resources: List[AbstractResource]) -> None: + self._resources = resources + + def __len__(self) -> int: + return len(self._resources) + + def __iter__(self) -> Iterator[AbstractResource]: + yield from self._resources + + def __contains__(self, resource: object) -> bool: + return resource in self._resources + + +class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): + def __init__(self, resources: List[AbstractResource]): + self._routes: List[AbstractRoute] = [] + for resource in resources: + for route in resource: + self._routes.append(route) + + def __len__(self) -> int: + return len(self._routes) + + def __iter__(self) -> Iterator[AbstractRoute]: + yield from self._routes + + def __contains__(self, route: object) -> bool: + return route in self._routes + + +class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): + + NAME_SPLIT_RE = re.compile(r"[.:-]") + + def __init__(self) -> None: + super().__init__() + self._resources: List[AbstractResource] = [] + self._named_resources: Dict[str, AbstractResource] = {} + self._resource_index: dict[str, list[AbstractResource]] = {} + self._matched_sub_app_resources: List[MatchedSubAppResource] = [] + + async def resolve(self, request: Request) -> UrlMappingMatchInfo: + resource_index = self._resource_index + allowed_methods: Set[str] = set() + + # Walk the url parts looking for candidates. We walk the url backwards + # to ensure the most explicit match is found first. If there are multiple + # candidates for a given url part because there are multiple resources + # registered for the same canonical path, we resolve them in a linear + # fashion to ensure registration order is respected. + url_part = request.rel_url.path_safe + while url_part: + for candidate in resource_index.get(url_part, ()): + match_dict, allowed = await candidate.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + if url_part == "/": + break + url_part = url_part.rpartition("/")[0] or "/" + + # + # We didn't find any candidates, so we'll try the matched sub-app + # resources which we have to walk in a linear fashion because they + # have regex/wildcard match rules and we cannot index them. + # + # For most cases we do not expect there to be many of these since + # currently they are only added by `add_domain` + # + for resource in self._matched_sub_app_resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + + if allowed_methods: + return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) + + return MatchInfoError(HTTPNotFound()) + + def __iter__(self) -> Iterator[str]: + return iter(self._named_resources) + + def __len__(self) -> int: + return len(self._named_resources) + + def __contains__(self, resource: object) -> bool: + return resource in self._named_resources + + def __getitem__(self, name: str) -> AbstractResource: + return self._named_resources[name] + + def resources(self) -> ResourcesView: + return ResourcesView(self._resources) + + def routes(self) -> RoutesView: + return RoutesView(self._resources) + + def named_resources(self) -> Mapping[str, AbstractResource]: + return MappingProxyType(self._named_resources) + + def register_resource(self, resource: AbstractResource) -> None: + assert isinstance( + resource, AbstractResource + ), f"Instance of AbstractResource class is required, got {resource!r}" + if self.frozen: + raise RuntimeError("Cannot register a resource into frozen router.") + + name = resource.name + + if name is not None: + parts = self.NAME_SPLIT_RE.split(name) + for part in parts: + if keyword.iskeyword(part): + raise ValueError( + f"Incorrect route name {name!r}, " + "python keywords cannot be used " + "for route name" + ) + if not part.isidentifier(): + raise ValueError( + "Incorrect route name {!r}, " + "the name should be a sequence of " + "python identifiers separated " + "by dash, dot or column".format(name) + ) + if name in self._named_resources: + raise ValueError( + "Duplicate {!r}, " + "already handled by {!r}".format(name, self._named_resources[name]) + ) + self._named_resources[name] = resource + self._resources.append(resource) + + if isinstance(resource, MatchedSubAppResource): + # We cannot index match sub-app resources because they have match rules + self._matched_sub_app_resources.append(resource) + else: + self.index_resource(resource) + + def _get_resource_index_key(self, resource: AbstractResource) -> str: + """Return a key to index the resource in the resource index.""" + if "{" in (index_key := resource.canonical): + # strip at the first { to allow for variables, and than + # rpartition at / to allow for variable parts in the path + # For example if the canonical path is `/core/locations{tail:.*}` + # the index key will be `/core` since index is based on the + # url parts split by `/` + index_key = index_key.partition("{")[0].rpartition("/")[0] + return index_key.rstrip("/") or "/" + + def index_resource(self, resource: AbstractResource) -> None: + """Add a resource to the resource index.""" + resource_key = self._get_resource_index_key(resource) + # There may be multiple resources for a canonical path + # so we keep them in a list to ensure that registration + # order is respected. + self._resource_index.setdefault(resource_key, []).append(resource) + + def unindex_resource(self, resource: AbstractResource) -> None: + """Remove a resource from the resource index.""" + resource_key = self._get_resource_index_key(resource) + self._resource_index[resource_key].remove(resource) + + def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: + if path and not path.startswith("/"): + raise ValueError("path should be started with / or be empty") + # Reuse last added resource if path and name are the same + if self._resources: + resource = self._resources[-1] + if resource.name == name and resource.raw_match(path): + return cast(Resource, resource) + if not ("{" in path or "}" in path or ROUTE_RE.search(path)): + resource = PlainResource(path, name=name) + self.register_resource(resource) + return resource + resource = DynamicResource(path, name=name) + self.register_resource(resource) + return resource + + def add_route( + self, + method: str, + path: str, + handler: Union[Handler, Type[AbstractView]], + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + ) -> AbstractRoute: + resource = self.add_resource(path, name=name) + return resource.add_route(method, handler, expect_handler=expect_handler) + + def add_static( + self, + prefix: str, + path: PathLike, + *, + name: Optional[str] = None, + expect_handler: Optional[_ExpectHandler] = None, + chunk_size: int = 256 * 1024, + show_index: bool = False, + follow_symlinks: bool = False, + append_version: bool = False, + ) -> AbstractResource: + """Add static files view. + + prefix - url prefix + path - folder with files + + """ + assert prefix.startswith("/") + if prefix.endswith("/"): + prefix = prefix[:-1] + resource = StaticResource( + prefix, + path, + name=name, + expect_handler=expect_handler, + chunk_size=chunk_size, + show_index=show_index, + follow_symlinks=follow_symlinks, + append_version=append_version, + ) + self.register_resource(resource) + return resource + + def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method HEAD.""" + return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs) + + def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method OPTIONS.""" + return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs) + + def add_get( + self, + path: str, + handler: Handler, + *, + name: Optional[str] = None, + allow_head: bool = True, + **kwargs: Any, + ) -> AbstractRoute: + """Shortcut for add_route with method GET. + + If allow_head is true, another + route is added allowing head requests to the same endpoint. + """ + resource = self.add_resource(path, name=name) + if allow_head: + resource.add_route(hdrs.METH_HEAD, handler, **kwargs) + return resource.add_route(hdrs.METH_GET, handler, **kwargs) + + def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method POST.""" + return self.add_route(hdrs.METH_POST, path, handler, **kwargs) + + def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PUT.""" + return self.add_route(hdrs.METH_PUT, path, handler, **kwargs) + + def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method PATCH.""" + return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs) + + def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute: + """Shortcut for add_route with method DELETE.""" + return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) + + def add_view( + self, path: str, handler: Type[AbstractView], **kwargs: Any + ) -> AbstractRoute: + """Shortcut for add_route with ANY methods for a class-based view.""" + return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) + + def freeze(self) -> None: + super().freeze() + for resource in self._resources: + resource.freeze() + + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + """Append routes to route table. + + Parameter should be a sequence of RouteDef objects. + + Returns a list of registered AbstractRoute instances. + """ + registered_routes = [] + for route_def in routes: + registered_routes.extend(route_def.register(self)) + return registered_routes + + +def _quote_path(value: str) -> str: + if YARL_VERSION < (1, 6): + value = value.replace("%", "%25") + return URL.build(path=value, encoded=False).raw_path + + +def _unquote_path_safe(value: str) -> str: + if "%" not in value: + return value + return value.replace("%2F", "/").replace("%25", "%") + + +def _requote_path(value: str) -> str: + # Quote non-ascii characters and other characters which must be quoted, + # but preserve existing %-sequences. + result = _quote_path(value) + if "%" in value: + result = result.replace("%25", "%") + return result diff --git a/deepseek/lib/python3.10/site-packages/aiohttp/worker.py b/deepseek/lib/python3.10/site-packages/aiohttp/worker.py new file mode 100644 index 0000000000000000000000000000000000000000..9b30769733663fb45f17362291dcc3a5753abbc1 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/aiohttp/worker.py @@ -0,0 +1,247 @@ +"""Async gunicorn worker for aiohttp.web""" + +import asyncio +import os +import re +import signal +import sys +from types import FrameType +from typing import Any, Awaitable, Callable, Optional, Union # noqa + +from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat +from gunicorn.workers import base + +from aiohttp import web + +from .helpers import set_result +from .web_app import Application +from .web_log import AccessLogger + +try: + import ssl + + SSLContext = ssl.SSLContext +except ImportError: # pragma: no cover + ssl = None # type: ignore[assignment] + SSLContext = object # type: ignore[misc,assignment] + + +__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker") + + +class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] + + DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT + DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default + + def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover + super().__init__(*args, **kw) + + self._task: Optional[asyncio.Task[None]] = None + self.exit_code = 0 + self._notify_waiter: Optional[asyncio.Future[bool]] = None + + def init_process(self) -> None: + # create new event_loop after fork + asyncio.get_event_loop().close() + + self.loop = asyncio.new_event_loop() + asyncio.set_event_loop(self.loop) + + super().init_process() + + def run(self) -> None: + self._task = self.loop.create_task(self._run()) + + try: # ignore all finalization problems + self.loop.run_until_complete(self._task) + except Exception: + self.log.exception("Exception in gunicorn worker") + self.loop.run_until_complete(self.loop.shutdown_asyncgens()) + self.loop.close() + + sys.exit(self.exit_code) + + async def _run(self) -> None: + runner = None + if isinstance(self.wsgi, Application): + app = self.wsgi + elif asyncio.iscoroutinefunction(self.wsgi): + wsgi = await self.wsgi() + if isinstance(wsgi, web.AppRunner): + runner = wsgi + app = runner.app + else: + app = wsgi + else: + raise RuntimeError( + "wsgi app should be either Application or " + "async function returning Application, got {}".format(self.wsgi) + ) + + if runner is None: + access_log = self.log.access_log if self.cfg.accesslog else None + runner = web.AppRunner( + app, + logger=self.log, + keepalive_timeout=self.cfg.keepalive, + access_log=access_log, + access_log_format=self._get_valid_log_format( + self.cfg.access_log_format + ), + shutdown_timeout=self.cfg.graceful_timeout / 100 * 95, + ) + await runner.setup() + + ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None + + runner = runner + assert runner is not None + server = runner.server + assert server is not None + for sock in self.sockets: + site = web.SockSite( + runner, + sock, + ssl_context=ctx, + ) + await site.start() + + # If our parent changed then we shut down. + pid = os.getpid() + try: + while self.alive: # type: ignore[has-type] + self.notify() + + cnt = server.requests_count + if self.max_requests and cnt > self.max_requests: + self.alive = False + self.log.info("Max requests, shutting down: %s", self) + + elif pid == os.getpid() and self.ppid != os.getppid(): + self.alive = False + self.log.info("Parent changed, shutting down: %s", self) + else: + await self._wait_next_notify() + except BaseException: + pass + + await runner.cleanup() + + def _wait_next_notify(self) -> "asyncio.Future[bool]": + self._notify_waiter_done() + + loop = self.loop + assert loop is not None + self._notify_waiter = waiter = loop.create_future() + self.loop.call_later(1.0, self._notify_waiter_done, waiter) + + return waiter + + def _notify_waiter_done( + self, waiter: Optional["asyncio.Future[bool]"] = None + ) -> None: + if waiter is None: + waiter = self._notify_waiter + if waiter is not None: + set_result(waiter, True) + + if waiter is self._notify_waiter: + self._notify_waiter = None + + def init_signals(self) -> None: + # Set up signals through the event loop API. + + self.loop.add_signal_handler( + signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None + ) + + self.loop.add_signal_handler( + signal.SIGTERM, self.handle_exit, signal.SIGTERM, None + ) + + self.loop.add_signal_handler( + signal.SIGINT, self.handle_quit, signal.SIGINT, None + ) + + self.loop.add_signal_handler( + signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None + ) + + self.loop.add_signal_handler( + signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None + ) + + self.loop.add_signal_handler( + signal.SIGABRT, self.handle_abort, signal.SIGABRT, None + ) + + # Don't let SIGTERM and SIGUSR1 disturb active requests + # by interrupting system calls + signal.siginterrupt(signal.SIGTERM, False) + signal.siginterrupt(signal.SIGUSR1, False) + # Reset signals so Gunicorn doesn't swallow subprocess return codes + # See: https://github.com/aio-libs/aiohttp/issues/6130 + + def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: + self.alive = False + + # worker_int callback + self.cfg.worker_int(self) + + # wakeup closing process + self._notify_waiter_done() + + def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None: + self.alive = False + self.exit_code = 1 + self.cfg.worker_abort(self) + sys.exit(1) + + @staticmethod + def _create_ssl_context(cfg: Any) -> "SSLContext": + """Creates SSLContext instance for usage in asyncio.create_server. + + See ssl.SSLSocket.__init__ for more details. + """ + if ssl is None: # pragma: no cover + raise RuntimeError("SSL is not supported.") + + ctx = ssl.SSLContext(cfg.ssl_version) + ctx.load_cert_chain(cfg.certfile, cfg.keyfile) + ctx.verify_mode = cfg.cert_reqs + if cfg.ca_certs: + ctx.load_verify_locations(cfg.ca_certs) + if cfg.ciphers: + ctx.set_ciphers(cfg.ciphers) + return ctx + + def _get_valid_log_format(self, source_format: str) -> str: + if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT: + return self.DEFAULT_AIOHTTP_LOG_FORMAT + elif re.search(r"%\([^\)]+\)", source_format): + raise ValueError( + "Gunicorn's style options in form of `%(name)s` are not " + "supported for the log formatting. Please use aiohttp's " + "format specification to configure access log formatting: " + "http://docs.aiohttp.org/en/stable/logging.html" + "#format-specification" + ) + else: + return source_format + + +class GunicornUVLoopWebWorker(GunicornWebWorker): + def init_process(self) -> None: + import uvloop + + # Close any existing event loop before setting a + # new policy. + asyncio.get_event_loop().close() + + # Setup uvloop policy, so that every + # asyncio.get_event_loop() will create an instance + # of uvloop event loop. + asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) + + super().init_process() diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_check.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_check.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e499be27ef360ce3fad4b42ea712e1a35d4a5c3 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_check.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d63ab1495e6e87da6eb7a8b25112611c2e267edb Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_dataclasses.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_diff.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_diff.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d20cd71defb8090bb071876a177d5daff1c5ef3 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_diff.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_extendpickle.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_extendpickle.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6bfedc9c314b34f73b94157700488374420c3b90 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_extendpickle.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0a07bfcdb56e73c057bc93c06fa6bf542ca5bb35 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_functors.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_module.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_module.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7185d67891a93b82d8043597145fdae62169d006 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_module.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_nested.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_nested.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3666243ff2c8de2d6ae1b92152e248d5b316c3e1 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_nested.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_properties.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_properties.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c1148b47296f5c466ca6852e221859a7aed7ac7a Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_properties.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_recursive.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_recursive.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6cceeedeac03a6cea3f65b64c3f5723caba07107 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_recursive.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_registered.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_registered.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9bba888437b9e87659f8ff362e660c0a026a18b8 Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_registered.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_selected.cpython-310.pyc b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_selected.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0b231f364db5693c195fe5d42fdb5ea3e559abf Binary files /dev/null and b/deepseek/lib/python3.10/site-packages/dill/tests/__pycache__/test_selected.cpython-310.pyc differ diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/INSTALLER b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/LICENSE b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..aaf210b1d01da693fda15995ec973ed21cd968c3 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/LICENSE @@ -0,0 +1,18 @@ +Copyright © 2017 Erez Shinan + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/METADATA b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..6d5f341b993998b289ea5f9093de3ca1488652c2 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/METADATA @@ -0,0 +1,47 @@ +Metadata-Version: 2.1 +Name: lark +Version: 1.2.2 +Summary: a modern parsing library +Author-email: Erez Shinan +License: MIT +Project-URL: Homepage, https://github.com/lark-parser/lark +Project-URL: Download, https://github.com/lark-parser/lark/tarball/master +Keywords: Earley,LALR,parser,parsing,ast +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: General +Classifier: Topic :: Text Processing :: Linguistic +Classifier: License :: OSI Approved :: MIT License +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: atomic_cache +Requires-Dist: atomicwrites ; extra == 'atomic_cache' +Provides-Extra: interegular +Requires-Dist: interegular <0.4.0,>=0.3.1 ; extra == 'interegular' +Provides-Extra: nearley +Requires-Dist: js2py ; extra == 'nearley' +Provides-Extra: regex +Requires-Dist: regex ; extra == 'regex' + +Lark is a modern general-purpose parsing library for Python. +With Lark, you can parse any context-free grammar, efficiently, with very little code. +Main Features: +- Builds a parse-tree (AST) automagically, based on the structure of the grammar +- Earley parser +- Can parse all context-free grammars +- Full support for ambiguous grammars +- LALR(1) parser +- Fast and light, competitive with PLY +- Can generate a stand-alone parser +- CYK parser, for highly ambiguous grammars +- EBNF grammar +- Unicode fully supported +- Automatic line & column tracking +- Standard library of terminals (strings, numbers, names, etc.) +- Import grammars from Nearley.js +- Extensive test suite +- And much more! +Since version 1.2, only Python versions 3.8 and up are supported. diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..c7f35d922906b54d10966dee5c4d98a3adb12da4 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/RECORD @@ -0,0 +1,83 @@ +lark-1.2.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +lark-1.2.2.dist-info/LICENSE,sha256=Lu5g9S1OETV7-J5ysDTQUOKF5H_aE2HlZi-zIu4n13E,1055 +lark-1.2.2.dist-info/METADATA,sha256=S-69HuNJr0ktlvb7J5XE48ghb_6ahYn8ksdW9HcB-d0,1831 +lark-1.2.2.dist-info/RECORD,, +lark-1.2.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +lark-1.2.2.dist-info/WHEEL,sha256=HiCZjzuy6Dw0hdX5R3LCFPDmFS4BWl8H-8W39XfmgX4,91 +lark-1.2.2.dist-info/entry_points.txt,sha256=WXYg_uCUdFlxQDPUhli3HFah37bNNFQfXLdzCqsacGI,61 +lark-1.2.2.dist-info/top_level.txt,sha256=dyS6jg8hCHHkXWvsfcIMO8rjlv_bdzAxiE0lkkzJ5hk,5 +lark/__init__.py,sha256=bc0tK7h7XwHA-Y4vVeJoNIqSMA-MHVTihq8yy795WXo,744 +lark/__pycache__/__init__.cpython-310.pyc,, +lark/__pycache__/ast_utils.cpython-310.pyc,, +lark/__pycache__/common.cpython-310.pyc,, +lark/__pycache__/exceptions.cpython-310.pyc,, +lark/__pycache__/grammar.cpython-310.pyc,, +lark/__pycache__/indenter.cpython-310.pyc,, +lark/__pycache__/lark.cpython-310.pyc,, +lark/__pycache__/lexer.cpython-310.pyc,, +lark/__pycache__/load_grammar.cpython-310.pyc,, +lark/__pycache__/parse_tree_builder.cpython-310.pyc,, +lark/__pycache__/parser_frontends.cpython-310.pyc,, +lark/__pycache__/reconstruct.cpython-310.pyc,, +lark/__pycache__/tree.cpython-310.pyc,, +lark/__pycache__/tree_matcher.cpython-310.pyc,, +lark/__pycache__/tree_templates.cpython-310.pyc,, +lark/__pycache__/utils.cpython-310.pyc,, +lark/__pycache__/visitors.cpython-310.pyc,, +lark/__pyinstaller/__init__.py,sha256=_PpFm44f_mwHlCpvYgv9ZgubLfNDc3PlePVir4sxRfI,182 +lark/__pyinstaller/__pycache__/__init__.cpython-310.pyc,, +lark/__pyinstaller/__pycache__/hook-lark.cpython-310.pyc,, +lark/__pyinstaller/hook-lark.py,sha256=5aFHiZWVHPRdHT8qnb4kW4JSOql5GusHodHR25_q9sU,599 +lark/ast_utils.py,sha256=jwn44ocNQhZGbfcFsEZnwi_gGvPbNgzjQ-0RuEtwDzI,2117 +lark/common.py,sha256=M9-CFAUP3--OkftyyWjke-Kc1-pQMczT1MluHCFwdy4,3008 +lark/exceptions.py,sha256=g76ygMPfSMl6ukKqFAZVpR2EAJTOOdyfJ_ALXc_MCR8,10939 +lark/grammar.py,sha256=DR17QSLSKCRhMOqx2UQh4n-Ywu4CD-wjdQxtuM8OHkY,3665 +lark/grammars/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +lark/grammars/__pycache__/__init__.cpython-310.pyc,, +lark/grammars/common.lark,sha256=FV9xGIPiPqHRM4ULAxP6jApXRTVsSwbOe697I9s7DLs,885 +lark/grammars/lark.lark,sha256=nq1NTZYqm_DPI2mjRIlpd3ZcxPjGhapA4GUzkcfBTQs,1541 +lark/grammars/python.lark,sha256=WMakTkpzCqOd0jUjYONI3LOnSy2KRN9NoL9pFtAZYCI,10641 +lark/grammars/unicode.lark,sha256=d9YCz0XWimdl4F8M5YCptavBcFG9D58Yd4aMwxjYtEI,96 +lark/indenter.py,sha256=L5uNDYUMNrk4ZTWKmW0Tu-H-3GGErLOHygMC32N_twE,4221 +lark/lark.py,sha256=_IHWmTxt43kfd9eYVtwx58zEWWSFAq9_gKH7Oeu5PZs,28184 +lark/lexer.py,sha256=OwgQPCpQ-vUi-2aeZztsydd4DLkEgCbZeucvEPvHFi4,24037 +lark/load_grammar.py,sha256=WYZDxyO6omhA8NKyMjSckfAMwVKuIMF3liiYXE_-kHo,53946 +lark/parse_tree_builder.py,sha256=jT_3gCEkBGZoTXAWSnhMn1kRuJILWB-E7XkUciYNHI4,14412 +lark/parser_frontends.py,sha256=mxMXxux2hkfTfE859wuVp4-Fr1no6YVEUt8toDjEdPQ,10165 +lark/parsers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +lark/parsers/__pycache__/__init__.cpython-310.pyc,, +lark/parsers/__pycache__/cyk.cpython-310.pyc,, +lark/parsers/__pycache__/earley.cpython-310.pyc,, +lark/parsers/__pycache__/earley_common.cpython-310.pyc,, +lark/parsers/__pycache__/earley_forest.cpython-310.pyc,, +lark/parsers/__pycache__/grammar_analysis.cpython-310.pyc,, +lark/parsers/__pycache__/lalr_analysis.cpython-310.pyc,, +lark/parsers/__pycache__/lalr_interactive_parser.cpython-310.pyc,, +lark/parsers/__pycache__/lalr_parser.cpython-310.pyc,, +lark/parsers/__pycache__/lalr_parser_state.cpython-310.pyc,, +lark/parsers/__pycache__/xearley.cpython-310.pyc,, +lark/parsers/cyk.py,sha256=c3GLk3kq23Xwb8MqUOjvivwP488KJY6NUWgxqeR5980,12192 +lark/parsers/earley.py,sha256=03sW9vfBkcH4NR72EBt8HkndDKSVSH3IdRnDulXWy24,15117 +lark/parsers/earley_common.py,sha256=e2e6NrNucw-WMiNV8HqQ_TpGx6P7v_S8f5aEcF0Tkqo,1620 +lark/parsers/earley_forest.py,sha256=w4JTb4tVMewue8dL-gCO96-Uo0wd4BbQUfSfIhr7txY,31332 +lark/parsers/grammar_analysis.py,sha256=rQ4Sn9EP8gjXGTZXEiWLW0KByPPpeKpN5hSIQZgNl3I,7141 +lark/parsers/lalr_analysis.py,sha256=DGHFk2tIluIyeFEVFfsMRU77DVbd598IJnUUOXO04yo,12207 +lark/parsers/lalr_interactive_parser.py,sha256=LsgfT1gdne8pXHTCsN6bl6zD6Pdh2dDqp1rIWOzp7Yw,5757 +lark/parsers/lalr_parser.py,sha256=6U8jP1AlUsuGxgJBWMq15WuGuyaolsLPevcf8HZ_zZk,4586 +lark/parsers/lalr_parser_state.py,sha256=QZ12p4CtvcvFAIKIqkeDBJYgEU3ntQllBJDYXb419ls,3793 +lark/parsers/xearley.py,sha256=DboXMNtuN0G-SXrrDm5zgUDUekz85h0Rih2PRvcf1LM,7825 +lark/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +lark/reconstruct.py,sha256=s7CevBXchUG_fe2otdAITxIaSXCEIiSjy4Sbh5QC0hs,3763 +lark/tools/__init__.py,sha256=FeKYmVUjXSt-vlQm2ktyWkcxaOCTOkZnHD_kOUWjUuA,2469 +lark/tools/__pycache__/__init__.cpython-310.pyc,, +lark/tools/__pycache__/nearley.cpython-310.pyc,, +lark/tools/__pycache__/serialize.cpython-310.pyc,, +lark/tools/__pycache__/standalone.cpython-310.pyc,, +lark/tools/nearley.py,sha256=QaLYdW6mYQdDq8JKMisV3lvPqzF0wPgu8q8BtsSA33g,6265 +lark/tools/serialize.py,sha256=nwt46LNxkDm0T_Uh9k2wS4fcfgvZQ2dy4-YC_aKhTQk,965 +lark/tools/standalone.py,sha256=6eXDqBuzZSpE5BGZm_Fh6X5yRhAPYxNVyl2aUU3ABzA,5627 +lark/tree.py,sha256=aWWHMazid8bbJanhmCjK9XK2jRFJ6N6WmlwXJGTsz28,8522 +lark/tree_matcher.py,sha256=jHdZJggn405SXmPpGf9U9HLrrsfP4eNNZaj267UTB00,6003 +lark/tree_templates.py,sha256=sSnfw1m8txAkJOYhcQrooG7xajVyVplunzTnNsxY720,6139 +lark/utils.py,sha256=3qd1-c0YgHYklvx1hA28qF7N_Ty1Zz6TbtCFMzQanNk,11270 +lark/visitors.py,sha256=VJ3T1m8p78MwXJotpOAvn06mYEqKyuIlhsAF51U-a3w,21422 diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/REQUESTED b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..71360e028d9f29e8cf66c9737e4ab9a7a4d352e6 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: setuptools (72.2.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/entry_points.txt b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..ec317d7da483edbd9ff23577367c3a6fa5de9525 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/entry_points.txt @@ -0,0 +1,2 @@ +[pyinstaller40] +hook-dirs = lark.__pyinstaller:get_hook_dirs diff --git a/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/top_level.txt b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..bc30e96adc75f793da4efdd21e3aff9b501659ed --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/lark-1.2.2.dist-info/top_level.txt @@ -0,0 +1 @@ +lark diff --git a/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..769a48c8265408ec08d9ba7835893423f57500f9 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/RECORD @@ -0,0 +1,68 @@ +mistral_common-1.5.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +mistral_common-1.5.1.dist-info/LICENCE,sha256=Xtb3nndzS1pgdA3YIa9eysmm8zcJyGDupOIPy2zKf8w,11339 +mistral_common-1.5.1.dist-info/METADATA,sha256=RVU_zGf7zKTqd7cwh1kUzadNl2Lj54tQl0WWCHZjYQE,4580 +mistral_common-1.5.1.dist-info/RECORD,, +mistral_common-1.5.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common-1.5.1.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88 +mistral_common/__init__.py,sha256=8rmxh34XkVT7yu5zgYupipfDrKs2hgGCjOMEIK3E4oI,22 +mistral_common/__pycache__/__init__.cpython-310.pyc,, +mistral_common/__pycache__/base.cpython-310.pyc,, +mistral_common/__pycache__/exceptions.cpython-310.pyc,, +mistral_common/__pycache__/multimodal.cpython-310.pyc,, +mistral_common/base.py,sha256=16qnCXyrVWlynGPo7iuQeJZenaXqSC7ufavi-K5inpQ,231 +mistral_common/data/mistral_instruct_tokenizer_240216.model.v2,sha256=N_ADdN6khljuj10PIYlbm8VcsBA5OWB8gYW_0cbKH4k,587404 +mistral_common/data/mistral_instruct_tokenizer_240323.model.v3,sha256=mt3Ivc5ZiESK6Btykzb0OoEmIWCujadgZ0utq51MfTM,587591 +mistral_common/data/mistral_instruct_tokenizer_241114.model.v7,sha256=G5aLjcNS9CGSNnM3x4zMYeHq3cbWQaV5Ny1PIGlL63o,587562 +mistral_common/data/mistral_instruct_tokenizer_241114.model.v7m1,sha256=G5aLjcNS9CGSNnM3x4zMYeHq3cbWQaV5Ny1PIGlL63o,587562 +mistral_common/data/tekken_240718.json,sha256=7M0WZdLkd2l8M8t_Dapvbf78V6CmvOtm1L5SlS-CdRY,14801223 +mistral_common/data/tekken_240911.json,sha256=OGsfmPummzjD3lEqTrYC3GmpXa4OVObOBI6j4pomJ6g,19280967 +mistral_common/data/tokenizer.model.v1,sha256=2t_VbXZnFcYdLveApSWrQ7jm2k3mhlvaPZX9714TQFU,493443 +mistral_common/exceptions.py,sha256=GnJzDnKHfMv5HrzqktuywU7hvy5IbKOGXsX119H6sUQ,1807 +mistral_common/multimodal.py,sha256=2ilSLB1Lo1aZaGRHBcJ3w3AQU-cyS2cGIzX0hRqsn9A,2240 +mistral_common/protocol/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/__pycache__/base.cpython-310.pyc,, +mistral_common/protocol/__pycache__/utils.cpython-310.pyc,, +mistral_common/protocol/base.py,sha256=h2-KAFw6u2Lx3MBGcc73c_994WwxQI5g0Hei5Ppw0pY,502 +mistral_common/protocol/embedding/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/embedding/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/embedding/__pycache__/request.cpython-310.pyc,, +mistral_common/protocol/embedding/__pycache__/response.cpython-310.pyc,, +mistral_common/protocol/embedding/request.py,sha256=zujGdPW8QALCecqZ3rEJb5CE7iDq9_Xy_llazzXRD-U,401 +mistral_common/protocol/embedding/response.py,sha256=-Cf8ONDSzyi0aGasKck8uV84GYpOxywIceriDBWMwtY,874 +mistral_common/protocol/instruct/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/protocol/instruct/__pycache__/__init__.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/messages.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/normalize.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/request.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/response.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/tool_calls.cpython-310.pyc,, +mistral_common/protocol/instruct/__pycache__/validator.cpython-310.pyc,, +mistral_common/protocol/instruct/messages.py,sha256=Qe2RW65CgzqFHoOfSTONXIYpQkt1ETxaOkZDVjccCLE,3205 +mistral_common/protocol/instruct/normalize.py,sha256=Hn0hI8m22sjyqJhvruzH_ijW7igjNnPzjn4qyYVP_ic,11228 +mistral_common/protocol/instruct/request.py,sha256=NGqMVRb5g2EIru3mDev8Amy9NpnJKOf7PhwQokdBm2Y,874 +mistral_common/protocol/instruct/response.py,sha256=0uaMkl8R_MISwl3iV-0VDvJKq0e0pZcuPU4IVS-BBrA,1918 +mistral_common/protocol/instruct/tool_calls.py,sha256=OFIxehAacudwddbc_WTbOsB8k9PRm55tzb7HMko4tEg,1044 +mistral_common/protocol/instruct/validator.py,sha256=-Mr9ME8bdWz01t3pb-IphV4SJJPD0Rz7iy3zTRZQUo8,13614 +mistral_common/protocol/utils.py,sha256=HMFv5gXiirPvbBRvJShJhH3P7q5cUXPKQrxCZHpR674,73 +mistral_common/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/__pycache__/__init__.cpython-310.pyc,, +mistral_common/tokens/instruct/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/instruct/__pycache__/__init__.cpython-310.pyc,, +mistral_common/tokens/instruct/__pycache__/request.cpython-310.pyc,, +mistral_common/tokens/instruct/request.py,sha256=1dSMNFQIms-RPZHhsj1gOjVNX4tXQfpqa-66rDGzcVE,693 +mistral_common/tokens/tokenizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +mistral_common/tokens/tokenizers/__pycache__/__init__.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/base.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/mistral.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/multimodal.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/sentencepiece.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/tekken.cpython-310.pyc,, +mistral_common/tokens/tokenizers/__pycache__/utils.cpython-310.pyc,, +mistral_common/tokens/tokenizers/base.py,sha256=ws_wBjkNF5KrGOKaPF7r02lv4yYg5862jzDVtzk21jA,5522 +mistral_common/tokens/tokenizers/mistral.py,sha256=O2Xb84Qkg2Koz9Ar0pu0_b2RsDZK-OqI95tAd8-AZmE,10893 +mistral_common/tokens/tokenizers/multimodal.py,sha256=g2qgE6pEdXxL7bsxzOcIgoXtICqcLnPsmNN54fpQf7o,5453 +mistral_common/tokens/tokenizers/sentencepiece.py,sha256=0G8vShiUXCYKuYmbnWzasri3c9Slj0ZLDCrNH7XzOa0,26335 +mistral_common/tokens/tokenizers/tekken.py,sha256=ArYcbBEhRhPYhz3YJcW35FrLHk_s-QtOWMaSsiqGPa4,11225 +mistral_common/tokens/tokenizers/utils.py,sha256=M1nsMshZOQecXZSnkhLMcJ0shT9894A3L6VJRaErTfU,187 diff --git a/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/REQUESTED b/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..d73ccaae8e0eea45949b0957a5af034099b36aa4 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/mistral_common-1.5.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.9.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/LICENSE b/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..19b305b00060a774a9180fb916c14b49edb2008f --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/LICENSE @@ -0,0 +1,32 @@ +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. diff --git a/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/RECORD b/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..fa2942112d6869c9aca08ab9c45bb7e92fc6a1cd --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/RECORD @@ -0,0 +1,117 @@ +google/_upb/_message.abi3.so,sha256=NNXmkM6cyUNtdgk5dC1t8aCvsljfioOf5_9IxJgJXzA,390920 +google/protobuf/__init__.py,sha256=Pbsntkpd1YjH8vuMHb6EUmx6js-38EyvuwCf59a3dVc,346 +google/protobuf/__pycache__/__init__.cpython-310.pyc,, +google/protobuf/__pycache__/any.cpython-310.pyc,, +google/protobuf/__pycache__/any_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/api_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/descriptor.cpython-310.pyc,, +google/protobuf/__pycache__/descriptor_database.cpython-310.pyc,, +google/protobuf/__pycache__/descriptor_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/descriptor_pool.cpython-310.pyc,, +google/protobuf/__pycache__/duration.cpython-310.pyc,, +google/protobuf/__pycache__/duration_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/empty_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/field_mask_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/json_format.cpython-310.pyc,, +google/protobuf/__pycache__/message.cpython-310.pyc,, +google/protobuf/__pycache__/message_factory.cpython-310.pyc,, +google/protobuf/__pycache__/proto.cpython-310.pyc,, +google/protobuf/__pycache__/proto_builder.cpython-310.pyc,, +google/protobuf/__pycache__/proto_json.cpython-310.pyc,, +google/protobuf/__pycache__/reflection.cpython-310.pyc,, +google/protobuf/__pycache__/runtime_version.cpython-310.pyc,, +google/protobuf/__pycache__/service.cpython-310.pyc,, +google/protobuf/__pycache__/service_reflection.cpython-310.pyc,, +google/protobuf/__pycache__/source_context_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/struct_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/symbol_database.cpython-310.pyc,, +google/protobuf/__pycache__/text_encoding.cpython-310.pyc,, +google/protobuf/__pycache__/text_format.cpython-310.pyc,, +google/protobuf/__pycache__/timestamp.cpython-310.pyc,, +google/protobuf/__pycache__/timestamp_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/type_pb2.cpython-310.pyc,, +google/protobuf/__pycache__/unknown_fields.cpython-310.pyc,, +google/protobuf/__pycache__/wrappers_pb2.cpython-310.pyc,, +google/protobuf/any.py,sha256=AZuOL26Bo8AFFUjHLhh_OQP2ceUJEgOUTqImjxXAJkc,975 +google/protobuf/any_pb2.py,sha256=awlm1xSyLM88AznVAEYF2JLh82eOTGwLCGGrv2ledZk,1725 +google/protobuf/api_pb2.py,sha256=mHriS-plVj7lxD32ay1oyzwWw-FYC9K3MwFRFv3vxNs,3145 +google/protobuf/compiler/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +google/protobuf/compiler/__pycache__/__init__.cpython-310.pyc,, +google/protobuf/compiler/__pycache__/plugin_pb2.cpython-310.pyc,, +google/protobuf/compiler/plugin_pb2.py,sha256=u51cOYDRPW10SdBuqMzT3Hp4ExJc3BcRXsJDkPsXW4c,3797 +google/protobuf/descriptor.py,sha256=xM9LaJQJbyt0fxMdJwZdSzCS8tZK4LvHw5Yd2F9KgKU,52253 +google/protobuf/descriptor_database.py,sha256=GDiSu-vBZBZ-L1YHQXSTsbsJMRNY-20icb6pj3ER8E8,5444 +google/protobuf/descriptor_pb2.py,sha256=PEZq8dpSWcavPLjUyMJEhOTiwGn_IiSCprdWU4xO334,343365 +google/protobuf/descriptor_pool.py,sha256=DA5XTv-jmRCJ1O4b_Yswg93KzmFpzaWOPofRGzhXeBY,48430 +google/protobuf/duration.py,sha256=vQTwVyiiyGm3Wy3LW8ohA3tkGkrUKoTn_p4SdEBU8bM,2672 +google/protobuf/duration_pb2.py,sha256=t1v8AhPwBCFdSA_6i0JOyqmln4MUCNjsRa6HhLn97Uw,1805 +google/protobuf/empty_pb2.py,sha256=k93ObVBZryfOTH9iTHaOBE5ZpWVSeiF-y0x1BctQ2xM,1669 +google/protobuf/field_mask_pb2.py,sha256=Cjl6_9cwkRnHO6Lc4cBfRl5L86E8Q_8pExGQ2jruwks,1765 +google/protobuf/internal/__init__.py,sha256=8d_k1ksNWIuqPDEEEtOjgC3Xx8kAXD2-04R7mxJlSbs,272 +google/protobuf/internal/__pycache__/__init__.cpython-310.pyc,, +google/protobuf/internal/__pycache__/_parameterized.cpython-310.pyc,, +google/protobuf/internal/__pycache__/api_implementation.cpython-310.pyc,, +google/protobuf/internal/__pycache__/builder.cpython-310.pyc,, +google/protobuf/internal/__pycache__/containers.cpython-310.pyc,, +google/protobuf/internal/__pycache__/decoder.cpython-310.pyc,, +google/protobuf/internal/__pycache__/encoder.cpython-310.pyc,, +google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-310.pyc,, +google/protobuf/internal/__pycache__/extension_dict.cpython-310.pyc,, +google/protobuf/internal/__pycache__/field_mask.cpython-310.pyc,, +google/protobuf/internal/__pycache__/message_listener.cpython-310.pyc,, +google/protobuf/internal/__pycache__/python_edition_defaults.cpython-310.pyc,, +google/protobuf/internal/__pycache__/python_message.cpython-310.pyc,, +google/protobuf/internal/__pycache__/testing_refleaks.cpython-310.pyc,, +google/protobuf/internal/__pycache__/type_checkers.cpython-310.pyc,, +google/protobuf/internal/__pycache__/well_known_types.cpython-310.pyc,, +google/protobuf/internal/__pycache__/wire_format.cpython-310.pyc,, +google/protobuf/internal/_parameterized.py,sha256=_LLIH2kmUrI1hZfUlIF8OBcBbbQXgRnm39uB9TpzaHU,14073 +google/protobuf/internal/api_implementation.py,sha256=Qnq9L9thCvgdxlhnGsaNrSCVXmMq_wCZ7-ooRNLVtzs,4787 +google/protobuf/internal/builder.py,sha256=2veSGrr1WphCBOGE3wNXKbVPBkY1-LlSCsKOQH2Nudk,4015 +google/protobuf/internal/containers.py,sha256=CQ0R54YddBf2uWnDqMUnaevr79BdBb1fYM33qsnYSxY,21722 +google/protobuf/internal/decoder.py,sha256=7nDfjyHd67pwky4hNyO_gZKm0IggiAkHF2IfUlO_gMY,36727 +google/protobuf/internal/encoder.py,sha256=Vujp3bU10dLBasUnRaGZKD-ZTLq7zEGA8wKh7mVLR-g,27297 +google/protobuf/internal/enum_type_wrapper.py,sha256=PNhK87a_NP1JIfFHuYFibpE4hHdHYawXwqZxMEtvsvo,3747 +google/protobuf/internal/extension_dict.py,sha256=7bT-5iqa_qw4wkk3QNtCPzGlfPU2h9FDyc5TjF2wiTo,7225 +google/protobuf/internal/field_mask.py,sha256=Ek2eDU8mY1Shj-V2wRmOggXummBv_brbL3XOEVFR6c0,10416 +google/protobuf/internal/message_listener.py,sha256=uh8viU_MvWdDe4Kl14CromKVFAzBMPlMzFZ4vew_UJc,2008 +google/protobuf/internal/python_edition_defaults.py,sha256=72ruAhyM3WEiE8I29ZJZIRp_dOLJoZuBDedecOAW7aQ,434 +google/protobuf/internal/python_message.py,sha256=GkE2xJ3KuVKA3jZonHTgaxOijVzPSQdHMZQTfH10Xzk,58179 +google/protobuf/internal/testing_refleaks.py,sha256=Pp-e8isZv-IwZDOzPaLo9WujUXj_XghNrbV-rHswvL4,4080 +google/protobuf/internal/type_checkers.py,sha256=1W7k9lfyeWML2Hl461xGsKCFJiN63uKBT6vyIKKz9go,15471 +google/protobuf/internal/well_known_types.py,sha256=dv8F2oJXfU2hlBaVbfJ3bWs97bEm1FfKzWHr1-nazSM,22705 +google/protobuf/internal/wire_format.py,sha256=EbAXZdb23iCObCZxNgaMx8-VRF2UjgyPrBCTtV10Rx8,7087 +google/protobuf/json_format.py,sha256=d-27JdC0vA_-A1V3yTuRqR70e4xuXUHy3nbJWs-oLc8,37260 +google/protobuf/message.py,sha256=usc6ma5tUR66le_XDFC6ce7VRX3VvQlrRFCvjshxI-k,14042 +google/protobuf/message_factory.py,sha256=hsMGMC6BJ3ik5vjGGeIG57WLInAf1Vt8G1528XKBprc,8262 +google/protobuf/proto.py,sha256=R-vAuadXJgPhWCeU9nHOQPmAlCvAgnkHIny7DzkkyXo,3500 +google/protobuf/proto_builder.py,sha256=pGU2L_pPEYkylZkrvHMCUH2PFWvc9wI-awwT7F5i740,4203 +google/protobuf/proto_json.py,sha256=fUy0Vb4m_831-oabn7JbzmyipcoJpQWtBdgTMoj8Yp4,3094 +google/protobuf/pyext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +google/protobuf/pyext/__pycache__/__init__.cpython-310.pyc,, +google/protobuf/pyext/__pycache__/cpp_message.cpython-310.pyc,, +google/protobuf/pyext/cpp_message.py,sha256=8uSrWX9kD3HPRhntvTPc4bgnfQ2BzX9FPC73CgifXAw,1715 +google/protobuf/reflection.py,sha256=aC4b3fcWr0rYi9DAk29dX3-WW0QxrRUOErzUOywxjsg,2893 +google/protobuf/runtime_version.py,sha256=7HkQnAqahMicPwBDm2oqxQ2FNIFtDx_kvt8iR1a992E,3911 +google/protobuf/service.py,sha256=BkCM4Acflz04MNVtFsPeMVUo_6DDERIq7Dl7g30VGO0,8059 +google/protobuf/service_reflection.py,sha256=WHElGnPgywDtn3X8xKVNsZZOCgJOTzgpAyTd-rmCKGU,10058 +google/protobuf/source_context_pb2.py,sha256=u0jlhFLcTEC3nCW47G30ej1rjpjeuxA9En_u4CmcSIQ,1791 +google/protobuf/struct_pb2.py,sha256=wv-zh5zz7XdrqKNi9_DCdLAbcwS3OUDRtxiaEUgwUyM,3061 +google/protobuf/symbol_database.py,sha256=ruKrtrkuxmFe7uzbJGMgOD7D6Qs2g6jFIRC3aS9NNvU,6709 +google/protobuf/testdata/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +google/protobuf/testdata/__pycache__/__init__.cpython-310.pyc,, +google/protobuf/text_encoding.py,sha256=Ao1Q6OP8i4p8VDtvpe8uW1BjX7aQZvkJggvhFYYrB7w,3621 +google/protobuf/text_format.py,sha256=L-gxTX1L6OEHoVNuFJI2Qtp5QQvsYkJkedCNMtsm55M,63477 +google/protobuf/timestamp.py,sha256=s23LWq6hDiFIeAtVUn8LwfEc5aRM7WAwTz_hCaOVndk,3133 +google/protobuf/timestamp_pb2.py,sha256=Or6x5fjFI8kKMqgAxzlTy3WViAa9jh7uBH2Q_GRSocU,1815 +google/protobuf/type_pb2.py,sha256=CtODh-2e_c1g7BtG7dcNIX6LZtpW4xtHbHldgFZnMrA,5438 +google/protobuf/unknown_fields.py,sha256=RVMDxyiZcObbb40dMK-xXCAvc5pkyLNSL1y2qzPAUbA,3127 +google/protobuf/util/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +google/protobuf/util/__pycache__/__init__.cpython-310.pyc,, +google/protobuf/wrappers_pb2.py,sha256=VYjQ1La95AJtK3zC72LzHJcVPN2M_baS1_LxGK21CsE,3037 +protobuf-5.29.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +protobuf-5.29.1.dist-info/LICENSE,sha256=bl4RcySv2UTc9n82zzKYQ7wakiKajNm7Vz16gxMP6n0,1732 +protobuf-5.29.1.dist-info/METADATA,sha256=mF-v7kpzeK_DdZ2r-q1rTXmztM10h-AIv3U_GVa7aF0,592 +protobuf-5.29.1.dist-info/RECORD,, +protobuf-5.29.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +protobuf-5.29.1.dist-info/WHEEL,sha256=vS6ODHxi8MEFPlH7P2fb6m0CdIf-1Dsx19UF_kqQxLY,110 diff --git a/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..7abeb368128cc07031faedbc8710536299c572bc --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/protobuf-5.29.1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: bazel-wheelmaker 1.0 +Root-Is-Purelib: false +Tag: cp38-abi3-manylinux2014_x86_64 diff --git a/deepseek/lib/python3.10/site-packages/regex-2024.11.6.dist-info/LICENSE.txt b/deepseek/lib/python3.10/site-packages/regex-2024.11.6.dist-info/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..99c19cf844141395a87c34325da3a537f1e95815 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/regex-2024.11.6.dist-info/LICENSE.txt @@ -0,0 +1,208 @@ +This work was derived from the 're' module of CPython 2.6 and CPython 3.1, +copyright (c) 1998-2001 by Secret Labs AB and licensed under CNRI's Python 1.6 +license. + +All additions and alterations are licensed under the Apache 2.0 License. + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Matthew Barnett + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/deepseek/lib/python3.10/site-packages/regex-2024.11.6.dist-info/WHEEL b/deepseek/lib/python3.10/site-packages/regex-2024.11.6.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..7f736e9a4d9f982b35f055c3019059d4bae0e480 --- /dev/null +++ b/deepseek/lib/python3.10/site-packages/regex-2024.11.6.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: setuptools (75.3.0) +Root-Is-Purelib: false +Tag: cp310-cp310-manylinux_2_17_x86_64 +Tag: cp310-cp310-manylinux2014_x86_64 +