Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- .venv/lib/python3.11/site-packages/aiohttp/_find_header.pxd +2 -0
- .venv/lib/python3.11/site-packages/aiohttp/_headers.pxi +83 -0
- .venv/lib/python3.11/site-packages/aiohttp/abc.py +253 -0
- .venv/lib/python3.11/site-packages/aiohttp/compression_utils.py +173 -0
- .venv/lib/python3.11/site-packages/aiohttp/formdata.py +182 -0
- .venv/lib/python3.11/site-packages/aiohttp/http_exceptions.py +112 -0
- .venv/lib/python3.11/site-packages/aiohttp/http_parser.py +1046 -0
- .venv/lib/python3.11/site-packages/aiohttp/http_writer.py +249 -0
- .venv/lib/python3.11/site-packages/aiohttp/pytest_plugin.py +436 -0
- .venv/lib/python3.11/site-packages/aiohttp/resolver.py +187 -0
- .venv/lib/python3.11/site-packages/aiohttp/test_utils.py +770 -0
- .venv/lib/python3.11/site-packages/aiohttp/typedefs.py +69 -0
- .venv/lib/python3.11/site-packages/aiohttp/web.py +605 -0
- .venv/lib/python3.11/site-packages/aiohttp/web_protocol.py +750 -0
- .venv/lib/python3.11/site-packages/aiohttp/web_routedef.py +214 -0
- .venv/lib/python3.11/site-packages/aiohttp/web_runner.py +399 -0
- .venv/lib/python3.11/site-packages/aiohttp/worker.py +252 -0
- .venv/lib/python3.11/site-packages/filelock/__init__.py +70 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/__init__.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/_api.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/_error.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/_soft.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/_unix.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/_util.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/_windows.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/asyncio.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/__pycache__/version.cpython-311.pyc +0 -0
- .venv/lib/python3.11/site-packages/filelock/_api.py +403 -0
- .venv/lib/python3.11/site-packages/filelock/_error.py +30 -0
- .venv/lib/python3.11/site-packages/filelock/_soft.py +47 -0
- .venv/lib/python3.11/site-packages/filelock/_unix.py +68 -0
- .venv/lib/python3.11/site-packages/filelock/_util.py +52 -0
- .venv/lib/python3.11/site-packages/filelock/_windows.py +65 -0
- .venv/lib/python3.11/site-packages/filelock/asyncio.py +342 -0
- .venv/lib/python3.11/site-packages/filelock/py.typed +0 -0
- .venv/lib/python3.11/site-packages/filelock/version.py +16 -0
- .venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/METADATA +145 -0
- .venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/RECORD +9 -0
- .venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/WHEEL +4 -0
- .venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc +3 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/INSTALLER +1 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE +3 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.APACHE +177 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.BSD +23 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/METADATA +102 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/RECORD +40 -0
- .venv/lib/python3.11/site-packages/packaging-24.2.dist-info/WHEEL +4 -0
- .venv/lib/python3.11/site-packages/pybind11/__pycache__/__init__.cpython-311.pyc +0 -0
.gitattributes
CHANGED
|
@@ -400,3 +400,4 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cudnn/lib/
|
|
| 400 |
.venv/lib/python3.11/site-packages/numpy/ma/tests/__pycache__/test_core.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
| 401 |
.venv/lib/python3.11/site-packages/mistral_common/data/tekken_240718.json filter=lfs diff=lfs merge=lfs -text
|
| 402 |
.venv/lib/python3.11/site-packages/torchgen/__pycache__/gen.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
| 400 |
.venv/lib/python3.11/site-packages/numpy/ma/tests/__pycache__/test_core.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
| 401 |
.venv/lib/python3.11/site-packages/mistral_common/data/tekken_240718.json filter=lfs diff=lfs merge=lfs -text
|
| 402 |
.venv/lib/python3.11/site-packages/torchgen/__pycache__/gen.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
| 403 |
+
.venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
|
.venv/lib/python3.11/site-packages/aiohttp/_find_header.pxd
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "_find_header.h":
|
| 2 |
+
int find_header(char *, int)
|
.venv/lib/python3.11/site-packages/aiohttp/_headers.pxi
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
| 2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
| 3 |
+
|
| 4 |
+
from . import hdrs
|
| 5 |
+
cdef tuple headers = (
|
| 6 |
+
hdrs.ACCEPT,
|
| 7 |
+
hdrs.ACCEPT_CHARSET,
|
| 8 |
+
hdrs.ACCEPT_ENCODING,
|
| 9 |
+
hdrs.ACCEPT_LANGUAGE,
|
| 10 |
+
hdrs.ACCEPT_RANGES,
|
| 11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
| 12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
| 13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
| 14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
| 15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
| 16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
| 17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
| 18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
| 19 |
+
hdrs.AGE,
|
| 20 |
+
hdrs.ALLOW,
|
| 21 |
+
hdrs.AUTHORIZATION,
|
| 22 |
+
hdrs.CACHE_CONTROL,
|
| 23 |
+
hdrs.CONNECTION,
|
| 24 |
+
hdrs.CONTENT_DISPOSITION,
|
| 25 |
+
hdrs.CONTENT_ENCODING,
|
| 26 |
+
hdrs.CONTENT_LANGUAGE,
|
| 27 |
+
hdrs.CONTENT_LENGTH,
|
| 28 |
+
hdrs.CONTENT_LOCATION,
|
| 29 |
+
hdrs.CONTENT_MD5,
|
| 30 |
+
hdrs.CONTENT_RANGE,
|
| 31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
| 32 |
+
hdrs.CONTENT_TYPE,
|
| 33 |
+
hdrs.COOKIE,
|
| 34 |
+
hdrs.DATE,
|
| 35 |
+
hdrs.DESTINATION,
|
| 36 |
+
hdrs.DIGEST,
|
| 37 |
+
hdrs.ETAG,
|
| 38 |
+
hdrs.EXPECT,
|
| 39 |
+
hdrs.EXPIRES,
|
| 40 |
+
hdrs.FORWARDED,
|
| 41 |
+
hdrs.FROM,
|
| 42 |
+
hdrs.HOST,
|
| 43 |
+
hdrs.IF_MATCH,
|
| 44 |
+
hdrs.IF_MODIFIED_SINCE,
|
| 45 |
+
hdrs.IF_NONE_MATCH,
|
| 46 |
+
hdrs.IF_RANGE,
|
| 47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
| 48 |
+
hdrs.KEEP_ALIVE,
|
| 49 |
+
hdrs.LAST_EVENT_ID,
|
| 50 |
+
hdrs.LAST_MODIFIED,
|
| 51 |
+
hdrs.LINK,
|
| 52 |
+
hdrs.LOCATION,
|
| 53 |
+
hdrs.MAX_FORWARDS,
|
| 54 |
+
hdrs.ORIGIN,
|
| 55 |
+
hdrs.PRAGMA,
|
| 56 |
+
hdrs.PROXY_AUTHENTICATE,
|
| 57 |
+
hdrs.PROXY_AUTHORIZATION,
|
| 58 |
+
hdrs.RANGE,
|
| 59 |
+
hdrs.REFERER,
|
| 60 |
+
hdrs.RETRY_AFTER,
|
| 61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
| 62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
| 63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
| 64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
| 65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
| 66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
| 67 |
+
hdrs.SERVER,
|
| 68 |
+
hdrs.SET_COOKIE,
|
| 69 |
+
hdrs.TE,
|
| 70 |
+
hdrs.TRAILER,
|
| 71 |
+
hdrs.TRANSFER_ENCODING,
|
| 72 |
+
hdrs.URI,
|
| 73 |
+
hdrs.UPGRADE,
|
| 74 |
+
hdrs.USER_AGENT,
|
| 75 |
+
hdrs.VARY,
|
| 76 |
+
hdrs.VIA,
|
| 77 |
+
hdrs.WWW_AUTHENTICATE,
|
| 78 |
+
hdrs.WANT_DIGEST,
|
| 79 |
+
hdrs.WARNING,
|
| 80 |
+
hdrs.X_FORWARDED_FOR,
|
| 81 |
+
hdrs.X_FORWARDED_HOST,
|
| 82 |
+
hdrs.X_FORWARDED_PROTO,
|
| 83 |
+
)
|
.venv/lib/python3.11/site-packages/aiohttp/abc.py
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import socket
|
| 4 |
+
import zlib
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from collections.abc import Sized
|
| 7 |
+
from http.cookies import BaseCookie, Morsel
|
| 8 |
+
from typing import (
|
| 9 |
+
TYPE_CHECKING,
|
| 10 |
+
Any,
|
| 11 |
+
Awaitable,
|
| 12 |
+
Callable,
|
| 13 |
+
Dict,
|
| 14 |
+
Generator,
|
| 15 |
+
Iterable,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Tuple,
|
| 19 |
+
TypedDict,
|
| 20 |
+
Union,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from multidict import CIMultiDict
|
| 24 |
+
from yarl import URL
|
| 25 |
+
|
| 26 |
+
from .typedefs import LooseCookies
|
| 27 |
+
|
| 28 |
+
if TYPE_CHECKING:
|
| 29 |
+
from .web_app import Application
|
| 30 |
+
from .web_exceptions import HTTPException
|
| 31 |
+
from .web_request import BaseRequest, Request
|
| 32 |
+
from .web_response import StreamResponse
|
| 33 |
+
else:
|
| 34 |
+
BaseRequest = Request = Application = StreamResponse = None
|
| 35 |
+
HTTPException = None
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class AbstractRouter(ABC):
|
| 39 |
+
def __init__(self) -> None:
|
| 40 |
+
self._frozen = False
|
| 41 |
+
|
| 42 |
+
def post_init(self, app: Application) -> None:
|
| 43 |
+
"""Post init stage.
|
| 44 |
+
|
| 45 |
+
Not an abstract method for sake of backward compatibility,
|
| 46 |
+
but if the router wants to be aware of the application
|
| 47 |
+
it can override this.
|
| 48 |
+
"""
|
| 49 |
+
|
| 50 |
+
@property
|
| 51 |
+
def frozen(self) -> bool:
|
| 52 |
+
return self._frozen
|
| 53 |
+
|
| 54 |
+
def freeze(self) -> None:
|
| 55 |
+
"""Freeze router."""
|
| 56 |
+
self._frozen = True
|
| 57 |
+
|
| 58 |
+
@abstractmethod
|
| 59 |
+
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
| 60 |
+
"""Return MATCH_INFO for given request"""
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class AbstractMatchInfo(ABC):
|
| 64 |
+
|
| 65 |
+
__slots__ = ()
|
| 66 |
+
|
| 67 |
+
@property # pragma: no branch
|
| 68 |
+
@abstractmethod
|
| 69 |
+
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
| 70 |
+
"""Execute matched request handler"""
|
| 71 |
+
|
| 72 |
+
@property
|
| 73 |
+
@abstractmethod
|
| 74 |
+
def expect_handler(
|
| 75 |
+
self,
|
| 76 |
+
) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
|
| 77 |
+
"""Expect handler for 100-continue processing"""
|
| 78 |
+
|
| 79 |
+
@property # pragma: no branch
|
| 80 |
+
@abstractmethod
|
| 81 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 82 |
+
"""HTTPException instance raised on router's resolving, or None"""
|
| 83 |
+
|
| 84 |
+
@abstractmethod # pragma: no branch
|
| 85 |
+
def get_info(self) -> Dict[str, Any]:
|
| 86 |
+
"""Return a dict with additional info useful for introspection"""
|
| 87 |
+
|
| 88 |
+
@property # pragma: no branch
|
| 89 |
+
@abstractmethod
|
| 90 |
+
def apps(self) -> Tuple[Application, ...]:
|
| 91 |
+
"""Stack of nested applications.
|
| 92 |
+
|
| 93 |
+
Top level application is left-most element.
|
| 94 |
+
|
| 95 |
+
"""
|
| 96 |
+
|
| 97 |
+
@abstractmethod
|
| 98 |
+
def add_app(self, app: Application) -> None:
|
| 99 |
+
"""Add application to the nested apps stack."""
|
| 100 |
+
|
| 101 |
+
@abstractmethod
|
| 102 |
+
def freeze(self) -> None:
|
| 103 |
+
"""Freeze the match info.
|
| 104 |
+
|
| 105 |
+
The method is called after route resolution.
|
| 106 |
+
|
| 107 |
+
After the call .add_app() is forbidden.
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class AbstractView(ABC):
|
| 113 |
+
"""Abstract class based view."""
|
| 114 |
+
|
| 115 |
+
def __init__(self, request: Request) -> None:
|
| 116 |
+
self._request = request
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def request(self) -> Request:
|
| 120 |
+
"""Request instance."""
|
| 121 |
+
return self._request
|
| 122 |
+
|
| 123 |
+
@abstractmethod
|
| 124 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 125 |
+
"""Execute the view handler."""
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class ResolveResult(TypedDict):
|
| 129 |
+
"""Resolve result.
|
| 130 |
+
|
| 131 |
+
This is the result returned from an AbstractResolver's
|
| 132 |
+
resolve method.
|
| 133 |
+
|
| 134 |
+
:param hostname: The hostname that was provided.
|
| 135 |
+
:param host: The IP address that was resolved.
|
| 136 |
+
:param port: The port that was resolved.
|
| 137 |
+
:param family: The address family that was resolved.
|
| 138 |
+
:param proto: The protocol that was resolved.
|
| 139 |
+
:param flags: The flags that were resolved.
|
| 140 |
+
"""
|
| 141 |
+
|
| 142 |
+
hostname: str
|
| 143 |
+
host: str
|
| 144 |
+
port: int
|
| 145 |
+
family: int
|
| 146 |
+
proto: int
|
| 147 |
+
flags: int
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
class AbstractResolver(ABC):
|
| 151 |
+
"""Abstract DNS resolver."""
|
| 152 |
+
|
| 153 |
+
@abstractmethod
|
| 154 |
+
async def resolve(
|
| 155 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 156 |
+
) -> List[ResolveResult]:
|
| 157 |
+
"""Return IP address for given hostname"""
|
| 158 |
+
|
| 159 |
+
@abstractmethod
|
| 160 |
+
async def close(self) -> None:
|
| 161 |
+
"""Release resolver"""
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
if TYPE_CHECKING:
|
| 165 |
+
IterableBase = Iterable[Morsel[str]]
|
| 166 |
+
else:
|
| 167 |
+
IterableBase = Iterable
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class AbstractCookieJar(Sized, IterableBase):
|
| 174 |
+
"""Abstract Cookie Jar."""
|
| 175 |
+
|
| 176 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 177 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
@abstractmethod
|
| 181 |
+
def quote_cookie(self) -> bool:
|
| 182 |
+
"""Return True if cookies should be quoted."""
|
| 183 |
+
|
| 184 |
+
@abstractmethod
|
| 185 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 186 |
+
"""Clear all cookies if no predicate is passed."""
|
| 187 |
+
|
| 188 |
+
@abstractmethod
|
| 189 |
+
def clear_domain(self, domain: str) -> None:
|
| 190 |
+
"""Clear all cookies for domain and all subdomains."""
|
| 191 |
+
|
| 192 |
+
@abstractmethod
|
| 193 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 194 |
+
"""Update cookies."""
|
| 195 |
+
|
| 196 |
+
@abstractmethod
|
| 197 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 198 |
+
"""Return the jar's cookies filtered by their attributes."""
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class AbstractStreamWriter(ABC):
|
| 202 |
+
"""Abstract stream writer."""
|
| 203 |
+
|
| 204 |
+
buffer_size: int = 0
|
| 205 |
+
output_size: int = 0
|
| 206 |
+
length: Optional[int] = 0
|
| 207 |
+
|
| 208 |
+
@abstractmethod
|
| 209 |
+
async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
|
| 210 |
+
"""Write chunk into stream."""
|
| 211 |
+
|
| 212 |
+
@abstractmethod
|
| 213 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 214 |
+
"""Write last chunk."""
|
| 215 |
+
|
| 216 |
+
@abstractmethod
|
| 217 |
+
async def drain(self) -> None:
|
| 218 |
+
"""Flush the write buffer."""
|
| 219 |
+
|
| 220 |
+
@abstractmethod
|
| 221 |
+
def enable_compression(
|
| 222 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 223 |
+
) -> None:
|
| 224 |
+
"""Enable HTTP body compression"""
|
| 225 |
+
|
| 226 |
+
@abstractmethod
|
| 227 |
+
def enable_chunking(self) -> None:
|
| 228 |
+
"""Enable HTTP chunked mode"""
|
| 229 |
+
|
| 230 |
+
@abstractmethod
|
| 231 |
+
async def write_headers(
|
| 232 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 233 |
+
) -> None:
|
| 234 |
+
"""Write HTTP headers"""
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class AbstractAccessLogger(ABC):
|
| 238 |
+
"""Abstract writer to access log."""
|
| 239 |
+
|
| 240 |
+
__slots__ = ("logger", "log_format")
|
| 241 |
+
|
| 242 |
+
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
| 243 |
+
self.logger = logger
|
| 244 |
+
self.log_format = log_format
|
| 245 |
+
|
| 246 |
+
@abstractmethod
|
| 247 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
| 248 |
+
"""Emit log to logger."""
|
| 249 |
+
|
| 250 |
+
@property
|
| 251 |
+
def enabled(self) -> bool:
|
| 252 |
+
"""Check if logger is enabled."""
|
| 253 |
+
return True
|
.venv/lib/python3.11/site-packages/aiohttp/compression_utils.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import zlib
|
| 3 |
+
from concurrent.futures import Executor
|
| 4 |
+
from typing import Optional, cast
|
| 5 |
+
|
| 6 |
+
try:
|
| 7 |
+
try:
|
| 8 |
+
import brotlicffi as brotli
|
| 9 |
+
except ImportError:
|
| 10 |
+
import brotli
|
| 11 |
+
|
| 12 |
+
HAS_BROTLI = True
|
| 13 |
+
except ImportError: # pragma: no cover
|
| 14 |
+
HAS_BROTLI = False
|
| 15 |
+
|
| 16 |
+
MAX_SYNC_CHUNK_SIZE = 1024
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
def encoding_to_mode(
|
| 20 |
+
encoding: Optional[str] = None,
|
| 21 |
+
suppress_deflate_header: bool = False,
|
| 22 |
+
) -> int:
|
| 23 |
+
if encoding == "gzip":
|
| 24 |
+
return 16 + zlib.MAX_WBITS
|
| 25 |
+
|
| 26 |
+
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ZlibBaseHandler:
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
mode: int,
|
| 33 |
+
executor: Optional[Executor] = None,
|
| 34 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 35 |
+
):
|
| 36 |
+
self._mode = mode
|
| 37 |
+
self._executor = executor
|
| 38 |
+
self._max_sync_chunk_size = max_sync_chunk_size
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class ZLibCompressor(ZlibBaseHandler):
|
| 42 |
+
def __init__(
|
| 43 |
+
self,
|
| 44 |
+
encoding: Optional[str] = None,
|
| 45 |
+
suppress_deflate_header: bool = False,
|
| 46 |
+
level: Optional[int] = None,
|
| 47 |
+
wbits: Optional[int] = None,
|
| 48 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
| 49 |
+
executor: Optional[Executor] = None,
|
| 50 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 51 |
+
):
|
| 52 |
+
super().__init__(
|
| 53 |
+
mode=(
|
| 54 |
+
encoding_to_mode(encoding, suppress_deflate_header)
|
| 55 |
+
if wbits is None
|
| 56 |
+
else wbits
|
| 57 |
+
),
|
| 58 |
+
executor=executor,
|
| 59 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
| 60 |
+
)
|
| 61 |
+
if level is None:
|
| 62 |
+
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
| 63 |
+
else:
|
| 64 |
+
self._compressor = zlib.compressobj(
|
| 65 |
+
wbits=self._mode, strategy=strategy, level=level
|
| 66 |
+
)
|
| 67 |
+
self._compress_lock = asyncio.Lock()
|
| 68 |
+
|
| 69 |
+
def compress_sync(self, data: bytes) -> bytes:
|
| 70 |
+
return self._compressor.compress(data)
|
| 71 |
+
|
| 72 |
+
async def compress(self, data: bytes) -> bytes:
|
| 73 |
+
"""Compress the data and returned the compressed bytes.
|
| 74 |
+
|
| 75 |
+
Note that flush() must be called after the last call to compress()
|
| 76 |
+
|
| 77 |
+
If the data size is large than the max_sync_chunk_size, the compression
|
| 78 |
+
will be done in the executor. Otherwise, the compression will be done
|
| 79 |
+
in the event loop.
|
| 80 |
+
"""
|
| 81 |
+
async with self._compress_lock:
|
| 82 |
+
# To ensure the stream is consistent in the event
|
| 83 |
+
# there are multiple writers, we need to lock
|
| 84 |
+
# the compressor so that only one writer can
|
| 85 |
+
# compress at a time.
|
| 86 |
+
if (
|
| 87 |
+
self._max_sync_chunk_size is not None
|
| 88 |
+
and len(data) > self._max_sync_chunk_size
|
| 89 |
+
):
|
| 90 |
+
return await asyncio.get_running_loop().run_in_executor(
|
| 91 |
+
self._executor, self._compressor.compress, data
|
| 92 |
+
)
|
| 93 |
+
return self.compress_sync(data)
|
| 94 |
+
|
| 95 |
+
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
| 96 |
+
return self._compressor.flush(mode)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
class ZLibDecompressor(ZlibBaseHandler):
|
| 100 |
+
def __init__(
|
| 101 |
+
self,
|
| 102 |
+
encoding: Optional[str] = None,
|
| 103 |
+
suppress_deflate_header: bool = False,
|
| 104 |
+
executor: Optional[Executor] = None,
|
| 105 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
| 106 |
+
):
|
| 107 |
+
super().__init__(
|
| 108 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
| 109 |
+
executor=executor,
|
| 110 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
| 111 |
+
)
|
| 112 |
+
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
| 113 |
+
|
| 114 |
+
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
| 115 |
+
return self._decompressor.decompress(data, max_length)
|
| 116 |
+
|
| 117 |
+
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
| 118 |
+
"""Decompress the data and return the decompressed bytes.
|
| 119 |
+
|
| 120 |
+
If the data size is large than the max_sync_chunk_size, the decompression
|
| 121 |
+
will be done in the executor. Otherwise, the decompression will be done
|
| 122 |
+
in the event loop.
|
| 123 |
+
"""
|
| 124 |
+
if (
|
| 125 |
+
self._max_sync_chunk_size is not None
|
| 126 |
+
and len(data) > self._max_sync_chunk_size
|
| 127 |
+
):
|
| 128 |
+
return await asyncio.get_running_loop().run_in_executor(
|
| 129 |
+
self._executor, self._decompressor.decompress, data, max_length
|
| 130 |
+
)
|
| 131 |
+
return self.decompress_sync(data, max_length)
|
| 132 |
+
|
| 133 |
+
def flush(self, length: int = 0) -> bytes:
|
| 134 |
+
return (
|
| 135 |
+
self._decompressor.flush(length)
|
| 136 |
+
if length > 0
|
| 137 |
+
else self._decompressor.flush()
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
@property
|
| 141 |
+
def eof(self) -> bool:
|
| 142 |
+
return self._decompressor.eof
|
| 143 |
+
|
| 144 |
+
@property
|
| 145 |
+
def unconsumed_tail(self) -> bytes:
|
| 146 |
+
return self._decompressor.unconsumed_tail
|
| 147 |
+
|
| 148 |
+
@property
|
| 149 |
+
def unused_data(self) -> bytes:
|
| 150 |
+
return self._decompressor.unused_data
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class BrotliDecompressor:
|
| 154 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
| 155 |
+
# since they share an import name. The top branches
|
| 156 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
| 157 |
+
def __init__(self) -> None:
|
| 158 |
+
if not HAS_BROTLI:
|
| 159 |
+
raise RuntimeError(
|
| 160 |
+
"The brotli decompression is not available. "
|
| 161 |
+
"Please install `Brotli` module"
|
| 162 |
+
)
|
| 163 |
+
self._obj = brotli.Decompressor()
|
| 164 |
+
|
| 165 |
+
def decompress_sync(self, data: bytes) -> bytes:
|
| 166 |
+
if hasattr(self._obj, "decompress"):
|
| 167 |
+
return cast(bytes, self._obj.decompress(data))
|
| 168 |
+
return cast(bytes, self._obj.process(data))
|
| 169 |
+
|
| 170 |
+
def flush(self) -> bytes:
|
| 171 |
+
if hasattr(self._obj, "flush"):
|
| 172 |
+
return cast(bytes, self._obj.flush())
|
| 173 |
+
return b""
|
.venv/lib/python3.11/site-packages/aiohttp/formdata.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import warnings
|
| 3 |
+
from typing import Any, Iterable, List, Optional
|
| 4 |
+
from urllib.parse import urlencode
|
| 5 |
+
|
| 6 |
+
from multidict import MultiDict, MultiDictProxy
|
| 7 |
+
|
| 8 |
+
from . import hdrs, multipart, payload
|
| 9 |
+
from .helpers import guess_filename
|
| 10 |
+
from .payload import Payload
|
| 11 |
+
|
| 12 |
+
__all__ = ("FormData",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FormData:
|
| 16 |
+
"""Helper class for form body generation.
|
| 17 |
+
|
| 18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
fields: Iterable[Any] = (),
|
| 24 |
+
quote_fields: bool = True,
|
| 25 |
+
charset: Optional[str] = None,
|
| 26 |
+
*,
|
| 27 |
+
default_to_multipart: bool = False,
|
| 28 |
+
) -> None:
|
| 29 |
+
self._writer = multipart.MultipartWriter("form-data")
|
| 30 |
+
self._fields: List[Any] = []
|
| 31 |
+
self._is_multipart = default_to_multipart
|
| 32 |
+
self._is_processed = False
|
| 33 |
+
self._quote_fields = quote_fields
|
| 34 |
+
self._charset = charset
|
| 35 |
+
|
| 36 |
+
if isinstance(fields, dict):
|
| 37 |
+
fields = list(fields.items())
|
| 38 |
+
elif not isinstance(fields, (list, tuple)):
|
| 39 |
+
fields = (fields,)
|
| 40 |
+
self.add_fields(*fields)
|
| 41 |
+
|
| 42 |
+
@property
|
| 43 |
+
def is_multipart(self) -> bool:
|
| 44 |
+
return self._is_multipart
|
| 45 |
+
|
| 46 |
+
def add_field(
|
| 47 |
+
self,
|
| 48 |
+
name: str,
|
| 49 |
+
value: Any,
|
| 50 |
+
*,
|
| 51 |
+
content_type: Optional[str] = None,
|
| 52 |
+
filename: Optional[str] = None,
|
| 53 |
+
content_transfer_encoding: Optional[str] = None,
|
| 54 |
+
) -> None:
|
| 55 |
+
|
| 56 |
+
if isinstance(value, io.IOBase):
|
| 57 |
+
self._is_multipart = True
|
| 58 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
| 59 |
+
msg = (
|
| 60 |
+
"In v4, passing bytes will no longer create a file field. "
|
| 61 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
| 62 |
+
)
|
| 63 |
+
if filename is None and content_transfer_encoding is None:
|
| 64 |
+
warnings.warn(msg, DeprecationWarning)
|
| 65 |
+
filename = name
|
| 66 |
+
|
| 67 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
| 68 |
+
if filename is not None and not isinstance(filename, str):
|
| 69 |
+
raise TypeError("filename must be an instance of str. Got: %s" % filename)
|
| 70 |
+
if filename is None and isinstance(value, io.IOBase):
|
| 71 |
+
filename = guess_filename(value, name)
|
| 72 |
+
if filename is not None:
|
| 73 |
+
type_options["filename"] = filename
|
| 74 |
+
self._is_multipart = True
|
| 75 |
+
|
| 76 |
+
headers = {}
|
| 77 |
+
if content_type is not None:
|
| 78 |
+
if not isinstance(content_type, str):
|
| 79 |
+
raise TypeError(
|
| 80 |
+
"content_type must be an instance of str. Got: %s" % content_type
|
| 81 |
+
)
|
| 82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
| 83 |
+
self._is_multipart = True
|
| 84 |
+
if content_transfer_encoding is not None:
|
| 85 |
+
if not isinstance(content_transfer_encoding, str):
|
| 86 |
+
raise TypeError(
|
| 87 |
+
"content_transfer_encoding must be an instance"
|
| 88 |
+
" of str. Got: %s" % content_transfer_encoding
|
| 89 |
+
)
|
| 90 |
+
msg = (
|
| 91 |
+
"content_transfer_encoding is deprecated. "
|
| 92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
| 93 |
+
)
|
| 94 |
+
warnings.warn(msg, DeprecationWarning)
|
| 95 |
+
self._is_multipart = True
|
| 96 |
+
|
| 97 |
+
self._fields.append((type_options, headers, value))
|
| 98 |
+
|
| 99 |
+
def add_fields(self, *fields: Any) -> None:
|
| 100 |
+
to_add = list(fields)
|
| 101 |
+
|
| 102 |
+
while to_add:
|
| 103 |
+
rec = to_add.pop(0)
|
| 104 |
+
|
| 105 |
+
if isinstance(rec, io.IOBase):
|
| 106 |
+
k = guess_filename(rec, "unknown")
|
| 107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
| 108 |
+
|
| 109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
| 110 |
+
to_add.extend(rec.items())
|
| 111 |
+
|
| 112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
| 113 |
+
k, fp = rec
|
| 114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
| 115 |
+
|
| 116 |
+
else:
|
| 117 |
+
raise TypeError(
|
| 118 |
+
"Only io.IOBase, multidict and (name, file) "
|
| 119 |
+
"pairs allowed, use .add_field() for passing "
|
| 120 |
+
"more complex parameters, got {!r}".format(rec)
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
| 124 |
+
# form data (x-www-form-urlencoded)
|
| 125 |
+
data = []
|
| 126 |
+
for type_options, _, value in self._fields:
|
| 127 |
+
data.append((type_options["name"], value))
|
| 128 |
+
|
| 129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
| 130 |
+
|
| 131 |
+
if charset == "utf-8":
|
| 132 |
+
content_type = "application/x-www-form-urlencoded"
|
| 133 |
+
else:
|
| 134 |
+
content_type = "application/x-www-form-urlencoded; charset=%s" % charset
|
| 135 |
+
|
| 136 |
+
return payload.BytesPayload(
|
| 137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
| 138 |
+
content_type=content_type,
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
| 142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
| 143 |
+
if self._is_processed:
|
| 144 |
+
raise RuntimeError("Form data has been processed already")
|
| 145 |
+
for dispparams, headers, value in self._fields:
|
| 146 |
+
try:
|
| 147 |
+
if hdrs.CONTENT_TYPE in headers:
|
| 148 |
+
part = payload.get_payload(
|
| 149 |
+
value,
|
| 150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
| 151 |
+
headers=headers,
|
| 152 |
+
encoding=self._charset,
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
part = payload.get_payload(
|
| 156 |
+
value, headers=headers, encoding=self._charset
|
| 157 |
+
)
|
| 158 |
+
except Exception as exc:
|
| 159 |
+
raise TypeError(
|
| 160 |
+
"Can not serialize value type: %r\n "
|
| 161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
| 162 |
+
) from exc
|
| 163 |
+
|
| 164 |
+
if dispparams:
|
| 165 |
+
part.set_content_disposition(
|
| 166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
| 167 |
+
)
|
| 168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
| 169 |
+
# Content-Length which were sent via chunked transfer encoding
|
| 170 |
+
assert part.headers is not None
|
| 171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 172 |
+
|
| 173 |
+
self._writer.append_payload(part)
|
| 174 |
+
|
| 175 |
+
self._is_processed = True
|
| 176 |
+
return self._writer
|
| 177 |
+
|
| 178 |
+
def __call__(self) -> Payload:
|
| 179 |
+
if self._is_multipart:
|
| 180 |
+
return self._gen_form_data()
|
| 181 |
+
else:
|
| 182 |
+
return self._gen_form_urlencoded()
|
.venv/lib/python3.11/site-packages/aiohttp/http_exceptions.py
ADDED
|
@@ -0,0 +1,112 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low-level http related exceptions."""
|
| 2 |
+
|
| 3 |
+
from textwrap import indent
|
| 4 |
+
from typing import Optional, Union
|
| 5 |
+
|
| 6 |
+
from .typedefs import _CIMultiDict
|
| 7 |
+
|
| 8 |
+
__all__ = ("HttpProcessingError",)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class HttpProcessingError(Exception):
|
| 12 |
+
"""HTTP error.
|
| 13 |
+
|
| 14 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
| 15 |
+
|
| 16 |
+
code: HTTP Error code.
|
| 17 |
+
message: (optional) Error message.
|
| 18 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
code = 0
|
| 22 |
+
message = ""
|
| 23 |
+
headers = None
|
| 24 |
+
|
| 25 |
+
def __init__(
|
| 26 |
+
self,
|
| 27 |
+
*,
|
| 28 |
+
code: Optional[int] = None,
|
| 29 |
+
message: str = "",
|
| 30 |
+
headers: Optional[_CIMultiDict] = None,
|
| 31 |
+
) -> None:
|
| 32 |
+
if code is not None:
|
| 33 |
+
self.code = code
|
| 34 |
+
self.headers = headers
|
| 35 |
+
self.message = message
|
| 36 |
+
|
| 37 |
+
def __str__(self) -> str:
|
| 38 |
+
msg = indent(self.message, " ")
|
| 39 |
+
return f"{self.code}, message:\n{msg}"
|
| 40 |
+
|
| 41 |
+
def __repr__(self) -> str:
|
| 42 |
+
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class BadHttpMessage(HttpProcessingError):
|
| 46 |
+
|
| 47 |
+
code = 400
|
| 48 |
+
message = "Bad Request"
|
| 49 |
+
|
| 50 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
| 51 |
+
super().__init__(message=message, headers=headers)
|
| 52 |
+
self.args = (message,)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class HttpBadRequest(BadHttpMessage):
|
| 56 |
+
|
| 57 |
+
code = 400
|
| 58 |
+
message = "Bad Request"
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class PayloadEncodingError(BadHttpMessage):
|
| 62 |
+
"""Base class for payload errors"""
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class ContentEncodingError(PayloadEncodingError):
|
| 66 |
+
"""Content encoding error."""
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class TransferEncodingError(PayloadEncodingError):
|
| 70 |
+
"""transfer encoding error."""
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class ContentLengthError(PayloadEncodingError):
|
| 74 |
+
"""Not enough data for satisfy content length header."""
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class LineTooLong(BadHttpMessage):
|
| 78 |
+
def __init__(
|
| 79 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
| 80 |
+
) -> None:
|
| 81 |
+
super().__init__(
|
| 82 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
| 83 |
+
)
|
| 84 |
+
self.args = (line, limit, actual_size)
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class InvalidHeader(BadHttpMessage):
|
| 88 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
| 89 |
+
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
| 90 |
+
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
| 91 |
+
self.hdr = hdr_s
|
| 92 |
+
self.args = (hdr,)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class BadStatusLine(BadHttpMessage):
|
| 96 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
| 97 |
+
if not isinstance(line, str):
|
| 98 |
+
line = repr(line)
|
| 99 |
+
super().__init__(error or f"Bad status line {line!r}")
|
| 100 |
+
self.args = (line,)
|
| 101 |
+
self.line = line
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class BadHttpMethod(BadStatusLine):
|
| 105 |
+
"""Invalid HTTP method in status line."""
|
| 106 |
+
|
| 107 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
| 108 |
+
super().__init__(line, error or f"Bad HTTP method in status line {line!r}")
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
class InvalidURLError(BadHttpMessage):
|
| 112 |
+
pass
|
.venv/lib/python3.11/site-packages/aiohttp/http_parser.py
ADDED
|
@@ -0,0 +1,1046 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import re
|
| 4 |
+
import string
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from enum import IntEnum
|
| 7 |
+
from typing import (
|
| 8 |
+
Any,
|
| 9 |
+
ClassVar,
|
| 10 |
+
Final,
|
| 11 |
+
Generic,
|
| 12 |
+
List,
|
| 13 |
+
Literal,
|
| 14 |
+
NamedTuple,
|
| 15 |
+
Optional,
|
| 16 |
+
Pattern,
|
| 17 |
+
Set,
|
| 18 |
+
Tuple,
|
| 19 |
+
Type,
|
| 20 |
+
TypeVar,
|
| 21 |
+
Union,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
| 25 |
+
from yarl import URL
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .base_protocol import BaseProtocol
|
| 29 |
+
from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
|
| 30 |
+
from .helpers import (
|
| 31 |
+
_EXC_SENTINEL,
|
| 32 |
+
DEBUG,
|
| 33 |
+
EMPTY_BODY_METHODS,
|
| 34 |
+
EMPTY_BODY_STATUS_CODES,
|
| 35 |
+
NO_EXTENSIONS,
|
| 36 |
+
BaseTimerContext,
|
| 37 |
+
set_exception,
|
| 38 |
+
)
|
| 39 |
+
from .http_exceptions import (
|
| 40 |
+
BadHttpMessage,
|
| 41 |
+
BadHttpMethod,
|
| 42 |
+
BadStatusLine,
|
| 43 |
+
ContentEncodingError,
|
| 44 |
+
ContentLengthError,
|
| 45 |
+
InvalidHeader,
|
| 46 |
+
InvalidURLError,
|
| 47 |
+
LineTooLong,
|
| 48 |
+
TransferEncodingError,
|
| 49 |
+
)
|
| 50 |
+
from .http_writer import HttpVersion, HttpVersion10
|
| 51 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 52 |
+
from .typedefs import RawHeaders
|
| 53 |
+
|
| 54 |
+
__all__ = (
|
| 55 |
+
"HeadersParser",
|
| 56 |
+
"HttpParser",
|
| 57 |
+
"HttpRequestParser",
|
| 58 |
+
"HttpResponseParser",
|
| 59 |
+
"RawRequestMessage",
|
| 60 |
+
"RawResponseMessage",
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
_SEP = Literal[b"\r\n", b"\n"]
|
| 64 |
+
|
| 65 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
| 66 |
+
|
| 67 |
+
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
|
| 68 |
+
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
|
| 69 |
+
#
|
| 70 |
+
# method = token
|
| 71 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
| 72 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
| 73 |
+
# token = 1*tchar
|
| 74 |
+
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
|
| 75 |
+
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
|
| 76 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
|
| 77 |
+
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
|
| 78 |
+
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class RawRequestMessage(NamedTuple):
|
| 82 |
+
method: str
|
| 83 |
+
path: str
|
| 84 |
+
version: HttpVersion
|
| 85 |
+
headers: "CIMultiDictProxy[str]"
|
| 86 |
+
raw_headers: RawHeaders
|
| 87 |
+
should_close: bool
|
| 88 |
+
compression: Optional[str]
|
| 89 |
+
upgrade: bool
|
| 90 |
+
chunked: bool
|
| 91 |
+
url: URL
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class RawResponseMessage(NamedTuple):
|
| 95 |
+
version: HttpVersion
|
| 96 |
+
code: int
|
| 97 |
+
reason: str
|
| 98 |
+
headers: CIMultiDictProxy[str]
|
| 99 |
+
raw_headers: RawHeaders
|
| 100 |
+
should_close: bool
|
| 101 |
+
compression: Optional[str]
|
| 102 |
+
upgrade: bool
|
| 103 |
+
chunked: bool
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class ParseState(IntEnum):
|
| 110 |
+
|
| 111 |
+
PARSE_NONE = 0
|
| 112 |
+
PARSE_LENGTH = 1
|
| 113 |
+
PARSE_CHUNKED = 2
|
| 114 |
+
PARSE_UNTIL_EOF = 3
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
class ChunkState(IntEnum):
|
| 118 |
+
PARSE_CHUNKED_SIZE = 0
|
| 119 |
+
PARSE_CHUNKED_CHUNK = 1
|
| 120 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
| 121 |
+
PARSE_MAYBE_TRAILERS = 3
|
| 122 |
+
PARSE_TRAILERS = 4
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
class HeadersParser:
|
| 126 |
+
def __init__(
|
| 127 |
+
self,
|
| 128 |
+
max_line_size: int = 8190,
|
| 129 |
+
max_headers: int = 32768,
|
| 130 |
+
max_field_size: int = 8190,
|
| 131 |
+
lax: bool = False,
|
| 132 |
+
) -> None:
|
| 133 |
+
self.max_line_size = max_line_size
|
| 134 |
+
self.max_headers = max_headers
|
| 135 |
+
self.max_field_size = max_field_size
|
| 136 |
+
self._lax = lax
|
| 137 |
+
|
| 138 |
+
def parse_headers(
|
| 139 |
+
self, lines: List[bytes]
|
| 140 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
| 141 |
+
headers: CIMultiDict[str] = CIMultiDict()
|
| 142 |
+
# note: "raw" does not mean inclusion of OWS before/after the field value
|
| 143 |
+
raw_headers = []
|
| 144 |
+
|
| 145 |
+
lines_idx = 1
|
| 146 |
+
line = lines[1]
|
| 147 |
+
line_count = len(lines)
|
| 148 |
+
|
| 149 |
+
while line:
|
| 150 |
+
# Parse initial header name : value pair.
|
| 151 |
+
try:
|
| 152 |
+
bname, bvalue = line.split(b":", 1)
|
| 153 |
+
except ValueError:
|
| 154 |
+
raise InvalidHeader(line) from None
|
| 155 |
+
|
| 156 |
+
if len(bname) == 0:
|
| 157 |
+
raise InvalidHeader(bname)
|
| 158 |
+
|
| 159 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
|
| 160 |
+
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
|
| 161 |
+
raise InvalidHeader(line)
|
| 162 |
+
|
| 163 |
+
bvalue = bvalue.lstrip(b" \t")
|
| 164 |
+
if len(bname) > self.max_field_size:
|
| 165 |
+
raise LineTooLong(
|
| 166 |
+
"request header name {}".format(
|
| 167 |
+
bname.decode("utf8", "backslashreplace")
|
| 168 |
+
),
|
| 169 |
+
str(self.max_field_size),
|
| 170 |
+
str(len(bname)),
|
| 171 |
+
)
|
| 172 |
+
name = bname.decode("utf-8", "surrogateescape")
|
| 173 |
+
if not TOKENRE.fullmatch(name):
|
| 174 |
+
raise InvalidHeader(bname)
|
| 175 |
+
|
| 176 |
+
header_length = len(bvalue)
|
| 177 |
+
|
| 178 |
+
# next line
|
| 179 |
+
lines_idx += 1
|
| 180 |
+
line = lines[lines_idx]
|
| 181 |
+
|
| 182 |
+
# consume continuation lines
|
| 183 |
+
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
|
| 184 |
+
|
| 185 |
+
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
|
| 186 |
+
if continuation:
|
| 187 |
+
bvalue_lst = [bvalue]
|
| 188 |
+
while continuation:
|
| 189 |
+
header_length += len(line)
|
| 190 |
+
if header_length > self.max_field_size:
|
| 191 |
+
raise LineTooLong(
|
| 192 |
+
"request header field {}".format(
|
| 193 |
+
bname.decode("utf8", "backslashreplace")
|
| 194 |
+
),
|
| 195 |
+
str(self.max_field_size),
|
| 196 |
+
str(header_length),
|
| 197 |
+
)
|
| 198 |
+
bvalue_lst.append(line)
|
| 199 |
+
|
| 200 |
+
# next line
|
| 201 |
+
lines_idx += 1
|
| 202 |
+
if lines_idx < line_count:
|
| 203 |
+
line = lines[lines_idx]
|
| 204 |
+
if line:
|
| 205 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
| 206 |
+
else:
|
| 207 |
+
line = b""
|
| 208 |
+
break
|
| 209 |
+
bvalue = b"".join(bvalue_lst)
|
| 210 |
+
else:
|
| 211 |
+
if header_length > self.max_field_size:
|
| 212 |
+
raise LineTooLong(
|
| 213 |
+
"request header field {}".format(
|
| 214 |
+
bname.decode("utf8", "backslashreplace")
|
| 215 |
+
),
|
| 216 |
+
str(self.max_field_size),
|
| 217 |
+
str(header_length),
|
| 218 |
+
)
|
| 219 |
+
|
| 220 |
+
bvalue = bvalue.strip(b" \t")
|
| 221 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
| 222 |
+
|
| 223 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
|
| 224 |
+
if "\n" in value or "\r" in value or "\x00" in value:
|
| 225 |
+
raise InvalidHeader(bvalue)
|
| 226 |
+
|
| 227 |
+
headers.add(name, value)
|
| 228 |
+
raw_headers.append((bname, bvalue))
|
| 229 |
+
|
| 230 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
|
| 234 |
+
"""Check if the upgrade header is supported."""
|
| 235 |
+
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
| 239 |
+
lax: ClassVar[bool] = False
|
| 240 |
+
|
| 241 |
+
def __init__(
|
| 242 |
+
self,
|
| 243 |
+
protocol: Optional[BaseProtocol] = None,
|
| 244 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 245 |
+
limit: int = 2**16,
|
| 246 |
+
max_line_size: int = 8190,
|
| 247 |
+
max_headers: int = 32768,
|
| 248 |
+
max_field_size: int = 8190,
|
| 249 |
+
timer: Optional[BaseTimerContext] = None,
|
| 250 |
+
code: Optional[int] = None,
|
| 251 |
+
method: Optional[str] = None,
|
| 252 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
| 253 |
+
response_with_body: bool = True,
|
| 254 |
+
read_until_eof: bool = False,
|
| 255 |
+
auto_decompress: bool = True,
|
| 256 |
+
) -> None:
|
| 257 |
+
self.protocol = protocol
|
| 258 |
+
self.loop = loop
|
| 259 |
+
self.max_line_size = max_line_size
|
| 260 |
+
self.max_headers = max_headers
|
| 261 |
+
self.max_field_size = max_field_size
|
| 262 |
+
self.timer = timer
|
| 263 |
+
self.code = code
|
| 264 |
+
self.method = method
|
| 265 |
+
self.payload_exception = payload_exception
|
| 266 |
+
self.response_with_body = response_with_body
|
| 267 |
+
self.read_until_eof = read_until_eof
|
| 268 |
+
|
| 269 |
+
self._lines: List[bytes] = []
|
| 270 |
+
self._tail = b""
|
| 271 |
+
self._upgraded = False
|
| 272 |
+
self._payload = None
|
| 273 |
+
self._payload_parser: Optional[HttpPayloadParser] = None
|
| 274 |
+
self._auto_decompress = auto_decompress
|
| 275 |
+
self._limit = limit
|
| 276 |
+
self._headers_parser = HeadersParser(
|
| 277 |
+
max_line_size, max_headers, max_field_size, self.lax
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
@abc.abstractmethod
|
| 281 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT: ...
|
| 282 |
+
|
| 283 |
+
@abc.abstractmethod
|
| 284 |
+
def _is_chunked_te(self, te: str) -> bool: ...
|
| 285 |
+
|
| 286 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
| 287 |
+
if self._payload_parser is not None:
|
| 288 |
+
self._payload_parser.feed_eof()
|
| 289 |
+
self._payload_parser = None
|
| 290 |
+
else:
|
| 291 |
+
# try to extract partial message
|
| 292 |
+
if self._tail:
|
| 293 |
+
self._lines.append(self._tail)
|
| 294 |
+
|
| 295 |
+
if self._lines:
|
| 296 |
+
if self._lines[-1] != "\r\n":
|
| 297 |
+
self._lines.append(b"")
|
| 298 |
+
with suppress(Exception):
|
| 299 |
+
return self.parse_message(self._lines)
|
| 300 |
+
return None
|
| 301 |
+
|
| 302 |
+
def feed_data(
|
| 303 |
+
self,
|
| 304 |
+
data: bytes,
|
| 305 |
+
SEP: _SEP = b"\r\n",
|
| 306 |
+
EMPTY: bytes = b"",
|
| 307 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
| 308 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
| 309 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
| 310 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
| 311 |
+
|
| 312 |
+
messages = []
|
| 313 |
+
|
| 314 |
+
if self._tail:
|
| 315 |
+
data, self._tail = self._tail + data, b""
|
| 316 |
+
|
| 317 |
+
data_len = len(data)
|
| 318 |
+
start_pos = 0
|
| 319 |
+
loop = self.loop
|
| 320 |
+
|
| 321 |
+
should_close = False
|
| 322 |
+
while start_pos < data_len:
|
| 323 |
+
|
| 324 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
| 325 |
+
# and split by lines
|
| 326 |
+
if self._payload_parser is None and not self._upgraded:
|
| 327 |
+
pos = data.find(SEP, start_pos)
|
| 328 |
+
# consume \r\n
|
| 329 |
+
if pos == start_pos and not self._lines:
|
| 330 |
+
start_pos = pos + len(SEP)
|
| 331 |
+
continue
|
| 332 |
+
|
| 333 |
+
if pos >= start_pos:
|
| 334 |
+
if should_close:
|
| 335 |
+
raise BadHttpMessage("Data after `Connection: close`")
|
| 336 |
+
|
| 337 |
+
# line found
|
| 338 |
+
line = data[start_pos:pos]
|
| 339 |
+
if SEP == b"\n": # For lax response parsing
|
| 340 |
+
line = line.rstrip(b"\r")
|
| 341 |
+
self._lines.append(line)
|
| 342 |
+
start_pos = pos + len(SEP)
|
| 343 |
+
|
| 344 |
+
# \r\n\r\n found
|
| 345 |
+
if self._lines[-1] == EMPTY:
|
| 346 |
+
try:
|
| 347 |
+
msg: _MsgT = self.parse_message(self._lines)
|
| 348 |
+
finally:
|
| 349 |
+
self._lines.clear()
|
| 350 |
+
|
| 351 |
+
def get_content_length() -> Optional[int]:
|
| 352 |
+
# payload length
|
| 353 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
| 354 |
+
if length_hdr is None:
|
| 355 |
+
return None
|
| 356 |
+
|
| 357 |
+
# Shouldn't allow +/- or other number formats.
|
| 358 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
|
| 359 |
+
# msg.headers is already stripped of leading/trailing wsp
|
| 360 |
+
if not DIGITS.fullmatch(length_hdr):
|
| 361 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
| 362 |
+
|
| 363 |
+
return int(length_hdr)
|
| 364 |
+
|
| 365 |
+
length = get_content_length()
|
| 366 |
+
# do not support old websocket spec
|
| 367 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
| 368 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 369 |
+
|
| 370 |
+
self._upgraded = msg.upgrade and _is_supported_upgrade(
|
| 371 |
+
msg.headers
|
| 372 |
+
)
|
| 373 |
+
|
| 374 |
+
method = getattr(msg, "method", self.method)
|
| 375 |
+
# code is only present on responses
|
| 376 |
+
code = getattr(msg, "code", 0)
|
| 377 |
+
|
| 378 |
+
assert self.protocol is not None
|
| 379 |
+
# calculate payload
|
| 380 |
+
empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
|
| 381 |
+
method and method in EMPTY_BODY_METHODS
|
| 382 |
+
)
|
| 383 |
+
if not empty_body and (
|
| 384 |
+
((length is not None and length > 0) or msg.chunked)
|
| 385 |
+
and not self._upgraded
|
| 386 |
+
):
|
| 387 |
+
payload = StreamReader(
|
| 388 |
+
self.protocol,
|
| 389 |
+
timer=self.timer,
|
| 390 |
+
loop=loop,
|
| 391 |
+
limit=self._limit,
|
| 392 |
+
)
|
| 393 |
+
payload_parser = HttpPayloadParser(
|
| 394 |
+
payload,
|
| 395 |
+
length=length,
|
| 396 |
+
chunked=msg.chunked,
|
| 397 |
+
method=method,
|
| 398 |
+
compression=msg.compression,
|
| 399 |
+
code=self.code,
|
| 400 |
+
response_with_body=self.response_with_body,
|
| 401 |
+
auto_decompress=self._auto_decompress,
|
| 402 |
+
lax=self.lax,
|
| 403 |
+
)
|
| 404 |
+
if not payload_parser.done:
|
| 405 |
+
self._payload_parser = payload_parser
|
| 406 |
+
elif method == METH_CONNECT:
|
| 407 |
+
assert isinstance(msg, RawRequestMessage)
|
| 408 |
+
payload = StreamReader(
|
| 409 |
+
self.protocol,
|
| 410 |
+
timer=self.timer,
|
| 411 |
+
loop=loop,
|
| 412 |
+
limit=self._limit,
|
| 413 |
+
)
|
| 414 |
+
self._upgraded = True
|
| 415 |
+
self._payload_parser = HttpPayloadParser(
|
| 416 |
+
payload,
|
| 417 |
+
method=msg.method,
|
| 418 |
+
compression=msg.compression,
|
| 419 |
+
auto_decompress=self._auto_decompress,
|
| 420 |
+
lax=self.lax,
|
| 421 |
+
)
|
| 422 |
+
elif not empty_body and length is None and self.read_until_eof:
|
| 423 |
+
payload = StreamReader(
|
| 424 |
+
self.protocol,
|
| 425 |
+
timer=self.timer,
|
| 426 |
+
loop=loop,
|
| 427 |
+
limit=self._limit,
|
| 428 |
+
)
|
| 429 |
+
payload_parser = HttpPayloadParser(
|
| 430 |
+
payload,
|
| 431 |
+
length=length,
|
| 432 |
+
chunked=msg.chunked,
|
| 433 |
+
method=method,
|
| 434 |
+
compression=msg.compression,
|
| 435 |
+
code=self.code,
|
| 436 |
+
response_with_body=self.response_with_body,
|
| 437 |
+
auto_decompress=self._auto_decompress,
|
| 438 |
+
lax=self.lax,
|
| 439 |
+
)
|
| 440 |
+
if not payload_parser.done:
|
| 441 |
+
self._payload_parser = payload_parser
|
| 442 |
+
else:
|
| 443 |
+
payload = EMPTY_PAYLOAD
|
| 444 |
+
|
| 445 |
+
messages.append((msg, payload))
|
| 446 |
+
should_close = msg.should_close
|
| 447 |
+
else:
|
| 448 |
+
self._tail = data[start_pos:]
|
| 449 |
+
data = EMPTY
|
| 450 |
+
break
|
| 451 |
+
|
| 452 |
+
# no parser, just store
|
| 453 |
+
elif self._payload_parser is None and self._upgraded:
|
| 454 |
+
assert not self._lines
|
| 455 |
+
break
|
| 456 |
+
|
| 457 |
+
# feed payload
|
| 458 |
+
elif data and start_pos < data_len:
|
| 459 |
+
assert not self._lines
|
| 460 |
+
assert self._payload_parser is not None
|
| 461 |
+
try:
|
| 462 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
|
| 463 |
+
except BaseException as underlying_exc:
|
| 464 |
+
reraised_exc = underlying_exc
|
| 465 |
+
if self.payload_exception is not None:
|
| 466 |
+
reraised_exc = self.payload_exception(str(underlying_exc))
|
| 467 |
+
|
| 468 |
+
set_exception(
|
| 469 |
+
self._payload_parser.payload,
|
| 470 |
+
reraised_exc,
|
| 471 |
+
underlying_exc,
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
eof = True
|
| 475 |
+
data = b""
|
| 476 |
+
|
| 477 |
+
if eof:
|
| 478 |
+
start_pos = 0
|
| 479 |
+
data_len = len(data)
|
| 480 |
+
self._payload_parser = None
|
| 481 |
+
continue
|
| 482 |
+
else:
|
| 483 |
+
break
|
| 484 |
+
|
| 485 |
+
if data and start_pos < data_len:
|
| 486 |
+
data = data[start_pos:]
|
| 487 |
+
else:
|
| 488 |
+
data = EMPTY
|
| 489 |
+
|
| 490 |
+
return messages, self._upgraded, data
|
| 491 |
+
|
| 492 |
+
def parse_headers(
|
| 493 |
+
self, lines: List[bytes]
|
| 494 |
+
) -> Tuple[
|
| 495 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
| 496 |
+
]:
|
| 497 |
+
"""Parses RFC 5322 headers from a stream.
|
| 498 |
+
|
| 499 |
+
Line continuations are supported. Returns list of header name
|
| 500 |
+
and value pairs. Header name is in upper case.
|
| 501 |
+
"""
|
| 502 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
| 503 |
+
close_conn = None
|
| 504 |
+
encoding = None
|
| 505 |
+
upgrade = False
|
| 506 |
+
chunked = False
|
| 507 |
+
|
| 508 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
|
| 509 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
|
| 510 |
+
singletons = (
|
| 511 |
+
hdrs.CONTENT_LENGTH,
|
| 512 |
+
hdrs.CONTENT_LOCATION,
|
| 513 |
+
hdrs.CONTENT_RANGE,
|
| 514 |
+
hdrs.CONTENT_TYPE,
|
| 515 |
+
hdrs.ETAG,
|
| 516 |
+
hdrs.HOST,
|
| 517 |
+
hdrs.MAX_FORWARDS,
|
| 518 |
+
hdrs.SERVER,
|
| 519 |
+
hdrs.TRANSFER_ENCODING,
|
| 520 |
+
hdrs.USER_AGENT,
|
| 521 |
+
)
|
| 522 |
+
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
|
| 523 |
+
if bad_hdr is not None:
|
| 524 |
+
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
|
| 525 |
+
|
| 526 |
+
# keep-alive
|
| 527 |
+
conn = headers.get(hdrs.CONNECTION)
|
| 528 |
+
if conn:
|
| 529 |
+
v = conn.lower()
|
| 530 |
+
if v == "close":
|
| 531 |
+
close_conn = True
|
| 532 |
+
elif v == "keep-alive":
|
| 533 |
+
close_conn = False
|
| 534 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
|
| 535 |
+
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
|
| 536 |
+
upgrade = True
|
| 537 |
+
|
| 538 |
+
# encoding
|
| 539 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
| 540 |
+
if enc:
|
| 541 |
+
enc = enc.lower()
|
| 542 |
+
if enc in ("gzip", "deflate", "br"):
|
| 543 |
+
encoding = enc
|
| 544 |
+
|
| 545 |
+
# chunking
|
| 546 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
| 547 |
+
if te is not None:
|
| 548 |
+
if self._is_chunked_te(te):
|
| 549 |
+
chunked = True
|
| 550 |
+
|
| 551 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 552 |
+
raise BadHttpMessage(
|
| 553 |
+
"Transfer-Encoding can't be present with Content-Length",
|
| 554 |
+
)
|
| 555 |
+
|
| 556 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
| 557 |
+
|
| 558 |
+
def set_upgraded(self, val: bool) -> None:
|
| 559 |
+
"""Set connection upgraded (to websocket) mode.
|
| 560 |
+
|
| 561 |
+
:param bool val: new state.
|
| 562 |
+
"""
|
| 563 |
+
self._upgraded = val
|
| 564 |
+
|
| 565 |
+
|
| 566 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
| 567 |
+
"""Read request status line.
|
| 568 |
+
|
| 569 |
+
Exception .http_exceptions.BadStatusLine
|
| 570 |
+
could be raised in case of any errors in status line.
|
| 571 |
+
Returns RawRequestMessage.
|
| 572 |
+
"""
|
| 573 |
+
|
| 574 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
| 575 |
+
# request line
|
| 576 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 577 |
+
try:
|
| 578 |
+
method, path, version = line.split(" ", maxsplit=2)
|
| 579 |
+
except ValueError:
|
| 580 |
+
raise BadHttpMethod(line) from None
|
| 581 |
+
|
| 582 |
+
if len(path) > self.max_line_size:
|
| 583 |
+
raise LineTooLong(
|
| 584 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
| 585 |
+
)
|
| 586 |
+
|
| 587 |
+
# method
|
| 588 |
+
if not TOKENRE.fullmatch(method):
|
| 589 |
+
raise BadHttpMethod(method)
|
| 590 |
+
|
| 591 |
+
# version
|
| 592 |
+
match = VERSRE.fullmatch(version)
|
| 593 |
+
if match is None:
|
| 594 |
+
raise BadStatusLine(line)
|
| 595 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 596 |
+
|
| 597 |
+
if method == "CONNECT":
|
| 598 |
+
# authority-form,
|
| 599 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
| 600 |
+
url = URL.build(authority=path, encoded=True)
|
| 601 |
+
elif path.startswith("/"):
|
| 602 |
+
# origin-form,
|
| 603 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
| 604 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
| 605 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
| 606 |
+
|
| 607 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
| 608 |
+
# NOTE: parser does, otherwise it results into the same
|
| 609 |
+
# NOTE: HTTP Request-Line input producing different
|
| 610 |
+
# NOTE: `yarl.URL()` objects
|
| 611 |
+
url = URL.build(
|
| 612 |
+
path=path_part,
|
| 613 |
+
query_string=qs_part,
|
| 614 |
+
fragment=url_fragment,
|
| 615 |
+
encoded=True,
|
| 616 |
+
)
|
| 617 |
+
elif path == "*" and method == "OPTIONS":
|
| 618 |
+
# asterisk-form,
|
| 619 |
+
url = URL(path, encoded=True)
|
| 620 |
+
else:
|
| 621 |
+
# absolute-form for proxy maybe,
|
| 622 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
| 623 |
+
url = URL(path, encoded=True)
|
| 624 |
+
if url.scheme == "":
|
| 625 |
+
# not absolute-form
|
| 626 |
+
raise InvalidURLError(
|
| 627 |
+
path.encode(errors="surrogateescape").decode("latin1")
|
| 628 |
+
)
|
| 629 |
+
|
| 630 |
+
# read headers
|
| 631 |
+
(
|
| 632 |
+
headers,
|
| 633 |
+
raw_headers,
|
| 634 |
+
close,
|
| 635 |
+
compression,
|
| 636 |
+
upgrade,
|
| 637 |
+
chunked,
|
| 638 |
+
) = self.parse_headers(lines)
|
| 639 |
+
|
| 640 |
+
if close is None: # then the headers weren't set in the request
|
| 641 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
| 642 |
+
close = True
|
| 643 |
+
else: # HTTP 1.1 must ask to close.
|
| 644 |
+
close = False
|
| 645 |
+
|
| 646 |
+
return RawRequestMessage(
|
| 647 |
+
method,
|
| 648 |
+
path,
|
| 649 |
+
version_o,
|
| 650 |
+
headers,
|
| 651 |
+
raw_headers,
|
| 652 |
+
close,
|
| 653 |
+
compression,
|
| 654 |
+
upgrade,
|
| 655 |
+
chunked,
|
| 656 |
+
url,
|
| 657 |
+
)
|
| 658 |
+
|
| 659 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 660 |
+
if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
|
| 661 |
+
return True
|
| 662 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
|
| 663 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
| 664 |
+
|
| 665 |
+
|
| 666 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
| 667 |
+
"""Read response status line and headers.
|
| 668 |
+
|
| 669 |
+
BadStatusLine could be raised in case of any errors in status line.
|
| 670 |
+
Returns RawResponseMessage.
|
| 671 |
+
"""
|
| 672 |
+
|
| 673 |
+
# Lax mode should only be enabled on response parser.
|
| 674 |
+
lax = not DEBUG
|
| 675 |
+
|
| 676 |
+
def feed_data(
|
| 677 |
+
self,
|
| 678 |
+
data: bytes,
|
| 679 |
+
SEP: Optional[_SEP] = None,
|
| 680 |
+
*args: Any,
|
| 681 |
+
**kwargs: Any,
|
| 682 |
+
) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
|
| 683 |
+
if SEP is None:
|
| 684 |
+
SEP = b"\r\n" if DEBUG else b"\n"
|
| 685 |
+
return super().feed_data(data, SEP, *args, **kwargs)
|
| 686 |
+
|
| 687 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
| 688 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 689 |
+
try:
|
| 690 |
+
version, status = line.split(maxsplit=1)
|
| 691 |
+
except ValueError:
|
| 692 |
+
raise BadStatusLine(line) from None
|
| 693 |
+
|
| 694 |
+
try:
|
| 695 |
+
status, reason = status.split(maxsplit=1)
|
| 696 |
+
except ValueError:
|
| 697 |
+
status = status.strip()
|
| 698 |
+
reason = ""
|
| 699 |
+
|
| 700 |
+
if len(reason) > self.max_line_size:
|
| 701 |
+
raise LineTooLong(
|
| 702 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
# version
|
| 706 |
+
match = VERSRE.fullmatch(version)
|
| 707 |
+
if match is None:
|
| 708 |
+
raise BadStatusLine(line)
|
| 709 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 710 |
+
|
| 711 |
+
# The status code is a three-digit ASCII number, no padding
|
| 712 |
+
if len(status) != 3 or not DIGITS.fullmatch(status):
|
| 713 |
+
raise BadStatusLine(line)
|
| 714 |
+
status_i = int(status)
|
| 715 |
+
|
| 716 |
+
# read headers
|
| 717 |
+
(
|
| 718 |
+
headers,
|
| 719 |
+
raw_headers,
|
| 720 |
+
close,
|
| 721 |
+
compression,
|
| 722 |
+
upgrade,
|
| 723 |
+
chunked,
|
| 724 |
+
) = self.parse_headers(lines)
|
| 725 |
+
|
| 726 |
+
if close is None:
|
| 727 |
+
if version_o <= HttpVersion10:
|
| 728 |
+
close = True
|
| 729 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
|
| 730 |
+
elif 100 <= status_i < 200 or status_i in {204, 304}:
|
| 731 |
+
close = False
|
| 732 |
+
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
|
| 733 |
+
close = False
|
| 734 |
+
else:
|
| 735 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
|
| 736 |
+
close = True
|
| 737 |
+
|
| 738 |
+
return RawResponseMessage(
|
| 739 |
+
version_o,
|
| 740 |
+
status_i,
|
| 741 |
+
reason.strip(),
|
| 742 |
+
headers,
|
| 743 |
+
raw_headers,
|
| 744 |
+
close,
|
| 745 |
+
compression,
|
| 746 |
+
upgrade,
|
| 747 |
+
chunked,
|
| 748 |
+
)
|
| 749 |
+
|
| 750 |
+
def _is_chunked_te(self, te: str) -> bool:
|
| 751 |
+
# https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
|
| 752 |
+
return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
|
| 753 |
+
|
| 754 |
+
|
| 755 |
+
class HttpPayloadParser:
|
| 756 |
+
def __init__(
|
| 757 |
+
self,
|
| 758 |
+
payload: StreamReader,
|
| 759 |
+
length: Optional[int] = None,
|
| 760 |
+
chunked: bool = False,
|
| 761 |
+
compression: Optional[str] = None,
|
| 762 |
+
code: Optional[int] = None,
|
| 763 |
+
method: Optional[str] = None,
|
| 764 |
+
response_with_body: bool = True,
|
| 765 |
+
auto_decompress: bool = True,
|
| 766 |
+
lax: bool = False,
|
| 767 |
+
) -> None:
|
| 768 |
+
self._length = 0
|
| 769 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
| 770 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 771 |
+
self._chunk_size = 0
|
| 772 |
+
self._chunk_tail = b""
|
| 773 |
+
self._auto_decompress = auto_decompress
|
| 774 |
+
self._lax = lax
|
| 775 |
+
self.done = False
|
| 776 |
+
|
| 777 |
+
# payload decompression wrapper
|
| 778 |
+
if response_with_body and compression and self._auto_decompress:
|
| 779 |
+
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
| 780 |
+
payload, compression
|
| 781 |
+
)
|
| 782 |
+
else:
|
| 783 |
+
real_payload = payload
|
| 784 |
+
|
| 785 |
+
# payload parser
|
| 786 |
+
if not response_with_body:
|
| 787 |
+
# don't parse payload if it's not expected to be received
|
| 788 |
+
self._type = ParseState.PARSE_NONE
|
| 789 |
+
real_payload.feed_eof()
|
| 790 |
+
self.done = True
|
| 791 |
+
elif chunked:
|
| 792 |
+
self._type = ParseState.PARSE_CHUNKED
|
| 793 |
+
elif length is not None:
|
| 794 |
+
self._type = ParseState.PARSE_LENGTH
|
| 795 |
+
self._length = length
|
| 796 |
+
if self._length == 0:
|
| 797 |
+
real_payload.feed_eof()
|
| 798 |
+
self.done = True
|
| 799 |
+
|
| 800 |
+
self.payload = real_payload
|
| 801 |
+
|
| 802 |
+
def feed_eof(self) -> None:
|
| 803 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
| 804 |
+
self.payload.feed_eof()
|
| 805 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
| 806 |
+
raise ContentLengthError(
|
| 807 |
+
"Not enough data for satisfy content length header."
|
| 808 |
+
)
|
| 809 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 810 |
+
raise TransferEncodingError(
|
| 811 |
+
"Not enough data for satisfy transfer length header."
|
| 812 |
+
)
|
| 813 |
+
|
| 814 |
+
def feed_data(
|
| 815 |
+
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
|
| 816 |
+
) -> Tuple[bool, bytes]:
|
| 817 |
+
# Read specified amount of bytes
|
| 818 |
+
if self._type == ParseState.PARSE_LENGTH:
|
| 819 |
+
required = self._length
|
| 820 |
+
chunk_len = len(chunk)
|
| 821 |
+
|
| 822 |
+
if required >= chunk_len:
|
| 823 |
+
self._length = required - chunk_len
|
| 824 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 825 |
+
if self._length == 0:
|
| 826 |
+
self.payload.feed_eof()
|
| 827 |
+
return True, b""
|
| 828 |
+
else:
|
| 829 |
+
self._length = 0
|
| 830 |
+
self.payload.feed_data(chunk[:required], required)
|
| 831 |
+
self.payload.feed_eof()
|
| 832 |
+
return True, chunk[required:]
|
| 833 |
+
|
| 834 |
+
# Chunked transfer encoding parser
|
| 835 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 836 |
+
if self._chunk_tail:
|
| 837 |
+
chunk = self._chunk_tail + chunk
|
| 838 |
+
self._chunk_tail = b""
|
| 839 |
+
|
| 840 |
+
while chunk:
|
| 841 |
+
|
| 842 |
+
# read next chunk size
|
| 843 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
| 844 |
+
pos = chunk.find(SEP)
|
| 845 |
+
if pos >= 0:
|
| 846 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
| 847 |
+
if i >= 0:
|
| 848 |
+
size_b = chunk[:i] # strip chunk-extensions
|
| 849 |
+
# Verify no LF in the chunk-extension
|
| 850 |
+
if b"\n" in (ext := chunk[i:pos]):
|
| 851 |
+
exc = BadHttpMessage(
|
| 852 |
+
f"Unexpected LF in chunk-extension: {ext!r}"
|
| 853 |
+
)
|
| 854 |
+
set_exception(self.payload, exc)
|
| 855 |
+
raise exc
|
| 856 |
+
else:
|
| 857 |
+
size_b = chunk[:pos]
|
| 858 |
+
|
| 859 |
+
if self._lax: # Allow whitespace in lax mode.
|
| 860 |
+
size_b = size_b.strip()
|
| 861 |
+
|
| 862 |
+
if not re.fullmatch(HEXDIGITS, size_b):
|
| 863 |
+
exc = TransferEncodingError(
|
| 864 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
| 865 |
+
)
|
| 866 |
+
set_exception(self.payload, exc)
|
| 867 |
+
raise exc
|
| 868 |
+
size = int(bytes(size_b), 16)
|
| 869 |
+
|
| 870 |
+
chunk = chunk[pos + len(SEP) :]
|
| 871 |
+
if size == 0: # eof marker
|
| 872 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 873 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 874 |
+
chunk = chunk[1:]
|
| 875 |
+
else:
|
| 876 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
| 877 |
+
self._chunk_size = size
|
| 878 |
+
self.payload.begin_http_chunk_receiving()
|
| 879 |
+
else:
|
| 880 |
+
self._chunk_tail = chunk
|
| 881 |
+
return False, b""
|
| 882 |
+
|
| 883 |
+
# read chunk and feed buffer
|
| 884 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
| 885 |
+
required = self._chunk_size
|
| 886 |
+
chunk_len = len(chunk)
|
| 887 |
+
|
| 888 |
+
if required > chunk_len:
|
| 889 |
+
self._chunk_size = required - chunk_len
|
| 890 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 891 |
+
return False, b""
|
| 892 |
+
else:
|
| 893 |
+
self._chunk_size = 0
|
| 894 |
+
self.payload.feed_data(chunk[:required], required)
|
| 895 |
+
chunk = chunk[required:]
|
| 896 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
| 897 |
+
self.payload.end_http_chunk_receiving()
|
| 898 |
+
|
| 899 |
+
# toss the CRLF at the end of the chunk
|
| 900 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
| 901 |
+
if self._lax and chunk.startswith(b"\r"):
|
| 902 |
+
chunk = chunk[1:]
|
| 903 |
+
if chunk[: len(SEP)] == SEP:
|
| 904 |
+
chunk = chunk[len(SEP) :]
|
| 905 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 906 |
+
else:
|
| 907 |
+
self._chunk_tail = chunk
|
| 908 |
+
return False, b""
|
| 909 |
+
|
| 910 |
+
# if stream does not contain trailer, after 0\r\n
|
| 911 |
+
# we should get another \r\n otherwise
|
| 912 |
+
# trailers needs to be skipped until \r\n\r\n
|
| 913 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
| 914 |
+
head = chunk[: len(SEP)]
|
| 915 |
+
if head == SEP:
|
| 916 |
+
# end of stream
|
| 917 |
+
self.payload.feed_eof()
|
| 918 |
+
return True, chunk[len(SEP) :]
|
| 919 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
| 920 |
+
# expected that CRLF or LF will be shown at the very first
|
| 921 |
+
# byte next time, otherwise trailers should come. The last
|
| 922 |
+
# CRLF which marks the end of response might not be
|
| 923 |
+
# contained in the same TCP segment which delivered the
|
| 924 |
+
# size indicator.
|
| 925 |
+
if not head:
|
| 926 |
+
return False, b""
|
| 927 |
+
if head == SEP[:1]:
|
| 928 |
+
self._chunk_tail = head
|
| 929 |
+
return False, b""
|
| 930 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
| 931 |
+
|
| 932 |
+
# read and discard trailer up to the CRLF terminator
|
| 933 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
| 934 |
+
pos = chunk.find(SEP)
|
| 935 |
+
if pos >= 0:
|
| 936 |
+
chunk = chunk[pos + len(SEP) :]
|
| 937 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 938 |
+
else:
|
| 939 |
+
self._chunk_tail = chunk
|
| 940 |
+
return False, b""
|
| 941 |
+
|
| 942 |
+
# Read all bytes until eof
|
| 943 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
| 944 |
+
self.payload.feed_data(chunk, len(chunk))
|
| 945 |
+
|
| 946 |
+
return False, b""
|
| 947 |
+
|
| 948 |
+
|
| 949 |
+
class DeflateBuffer:
|
| 950 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
| 951 |
+
|
| 952 |
+
decompressor: Any
|
| 953 |
+
|
| 954 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
| 955 |
+
self.out = out
|
| 956 |
+
self.size = 0
|
| 957 |
+
self.encoding = encoding
|
| 958 |
+
self._started_decoding = False
|
| 959 |
+
|
| 960 |
+
self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
|
| 961 |
+
if encoding == "br":
|
| 962 |
+
if not HAS_BROTLI: # pragma: no cover
|
| 963 |
+
raise ContentEncodingError(
|
| 964 |
+
"Can not decode content-encoding: brotli (br). "
|
| 965 |
+
"Please install `Brotli`"
|
| 966 |
+
)
|
| 967 |
+
self.decompressor = BrotliDecompressor()
|
| 968 |
+
else:
|
| 969 |
+
self.decompressor = ZLibDecompressor(encoding=encoding)
|
| 970 |
+
|
| 971 |
+
def set_exception(
|
| 972 |
+
self,
|
| 973 |
+
exc: BaseException,
|
| 974 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 975 |
+
) -> None:
|
| 976 |
+
set_exception(self.out, exc, exc_cause)
|
| 977 |
+
|
| 978 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
| 979 |
+
if not size:
|
| 980 |
+
return
|
| 981 |
+
|
| 982 |
+
self.size += size
|
| 983 |
+
|
| 984 |
+
# RFC1950
|
| 985 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
| 986 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
| 987 |
+
if (
|
| 988 |
+
not self._started_decoding
|
| 989 |
+
and self.encoding == "deflate"
|
| 990 |
+
and chunk[0] & 0xF != 8
|
| 991 |
+
):
|
| 992 |
+
# Change the decoder to decompress incorrectly compressed data
|
| 993 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
| 994 |
+
self.decompressor = ZLibDecompressor(
|
| 995 |
+
encoding=self.encoding, suppress_deflate_header=True
|
| 996 |
+
)
|
| 997 |
+
|
| 998 |
+
try:
|
| 999 |
+
chunk = self.decompressor.decompress_sync(chunk)
|
| 1000 |
+
except Exception:
|
| 1001 |
+
raise ContentEncodingError(
|
| 1002 |
+
"Can not decode content-encoding: %s" % self.encoding
|
| 1003 |
+
)
|
| 1004 |
+
|
| 1005 |
+
self._started_decoding = True
|
| 1006 |
+
|
| 1007 |
+
if chunk:
|
| 1008 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1009 |
+
|
| 1010 |
+
def feed_eof(self) -> None:
|
| 1011 |
+
chunk = self.decompressor.flush()
|
| 1012 |
+
|
| 1013 |
+
if chunk or self.size > 0:
|
| 1014 |
+
self.out.feed_data(chunk, len(chunk))
|
| 1015 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
| 1016 |
+
raise ContentEncodingError("deflate")
|
| 1017 |
+
|
| 1018 |
+
self.out.feed_eof()
|
| 1019 |
+
|
| 1020 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 1021 |
+
self.out.begin_http_chunk_receiving()
|
| 1022 |
+
|
| 1023 |
+
def end_http_chunk_receiving(self) -> None:
|
| 1024 |
+
self.out.end_http_chunk_receiving()
|
| 1025 |
+
|
| 1026 |
+
|
| 1027 |
+
HttpRequestParserPy = HttpRequestParser
|
| 1028 |
+
HttpResponseParserPy = HttpResponseParser
|
| 1029 |
+
RawRequestMessagePy = RawRequestMessage
|
| 1030 |
+
RawResponseMessagePy = RawResponseMessage
|
| 1031 |
+
|
| 1032 |
+
try:
|
| 1033 |
+
if not NO_EXTENSIONS:
|
| 1034 |
+
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
|
| 1035 |
+
HttpRequestParser,
|
| 1036 |
+
HttpResponseParser,
|
| 1037 |
+
RawRequestMessage,
|
| 1038 |
+
RawResponseMessage,
|
| 1039 |
+
)
|
| 1040 |
+
|
| 1041 |
+
HttpRequestParserC = HttpRequestParser
|
| 1042 |
+
HttpResponseParserC = HttpResponseParser
|
| 1043 |
+
RawRequestMessageC = RawRequestMessage
|
| 1044 |
+
RawResponseMessageC = RawResponseMessage
|
| 1045 |
+
except ImportError: # pragma: no cover
|
| 1046 |
+
pass
|
.venv/lib/python3.11/site-packages/aiohttp/http_writer.py
ADDED
|
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Http related parsers and protocol."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import sys
|
| 5 |
+
import zlib
|
| 6 |
+
from typing import ( # noqa
|
| 7 |
+
Any,
|
| 8 |
+
Awaitable,
|
| 9 |
+
Callable,
|
| 10 |
+
Iterable,
|
| 11 |
+
List,
|
| 12 |
+
NamedTuple,
|
| 13 |
+
Optional,
|
| 14 |
+
Union,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
from multidict import CIMultiDict
|
| 18 |
+
|
| 19 |
+
from .abc import AbstractStreamWriter
|
| 20 |
+
from .base_protocol import BaseProtocol
|
| 21 |
+
from .client_exceptions import ClientConnectionResetError
|
| 22 |
+
from .compression_utils import ZLibCompressor
|
| 23 |
+
from .helpers import NO_EXTENSIONS
|
| 24 |
+
|
| 25 |
+
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
MIN_PAYLOAD_FOR_WRITELINES = 2048
|
| 29 |
+
IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2)
|
| 30 |
+
IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9)
|
| 31 |
+
SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9
|
| 32 |
+
# writelines is not safe for use
|
| 33 |
+
# on Python 3.12+ until 3.12.9
|
| 34 |
+
# on Python 3.13+ until 3.13.2
|
| 35 |
+
# and on older versions it not any faster than write
|
| 36 |
+
# CVE-2024-12254: https://github.com/python/cpython/pull/127656
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class HttpVersion(NamedTuple):
|
| 40 |
+
major: int
|
| 41 |
+
minor: int
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
HttpVersion10 = HttpVersion(1, 0)
|
| 45 |
+
HttpVersion11 = HttpVersion(1, 1)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
| 49 |
+
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class StreamWriter(AbstractStreamWriter):
|
| 53 |
+
|
| 54 |
+
length: Optional[int] = None
|
| 55 |
+
chunked: bool = False
|
| 56 |
+
_eof: bool = False
|
| 57 |
+
_compress: Optional[ZLibCompressor] = None
|
| 58 |
+
|
| 59 |
+
def __init__(
|
| 60 |
+
self,
|
| 61 |
+
protocol: BaseProtocol,
|
| 62 |
+
loop: asyncio.AbstractEventLoop,
|
| 63 |
+
on_chunk_sent: _T_OnChunkSent = None,
|
| 64 |
+
on_headers_sent: _T_OnHeadersSent = None,
|
| 65 |
+
) -> None:
|
| 66 |
+
self._protocol = protocol
|
| 67 |
+
self.loop = loop
|
| 68 |
+
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
| 69 |
+
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
| 70 |
+
|
| 71 |
+
@property
|
| 72 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
| 73 |
+
return self._protocol.transport
|
| 74 |
+
|
| 75 |
+
@property
|
| 76 |
+
def protocol(self) -> BaseProtocol:
|
| 77 |
+
return self._protocol
|
| 78 |
+
|
| 79 |
+
def enable_chunking(self) -> None:
|
| 80 |
+
self.chunked = True
|
| 81 |
+
|
| 82 |
+
def enable_compression(
|
| 83 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 84 |
+
) -> None:
|
| 85 |
+
self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
|
| 86 |
+
|
| 87 |
+
def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
|
| 88 |
+
size = len(chunk)
|
| 89 |
+
self.buffer_size += size
|
| 90 |
+
self.output_size += size
|
| 91 |
+
transport = self._protocol.transport
|
| 92 |
+
if transport is None or transport.is_closing():
|
| 93 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 94 |
+
transport.write(chunk)
|
| 95 |
+
|
| 96 |
+
def _writelines(self, chunks: Iterable[bytes]) -> None:
|
| 97 |
+
size = 0
|
| 98 |
+
for chunk in chunks:
|
| 99 |
+
size += len(chunk)
|
| 100 |
+
self.buffer_size += size
|
| 101 |
+
self.output_size += size
|
| 102 |
+
transport = self._protocol.transport
|
| 103 |
+
if transport is None or transport.is_closing():
|
| 104 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 105 |
+
if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES:
|
| 106 |
+
transport.write(b"".join(chunks))
|
| 107 |
+
else:
|
| 108 |
+
transport.writelines(chunks)
|
| 109 |
+
|
| 110 |
+
async def write(
|
| 111 |
+
self,
|
| 112 |
+
chunk: Union[bytes, bytearray, memoryview],
|
| 113 |
+
*,
|
| 114 |
+
drain: bool = True,
|
| 115 |
+
LIMIT: int = 0x10000,
|
| 116 |
+
) -> None:
|
| 117 |
+
"""Writes chunk of data to a stream.
|
| 118 |
+
|
| 119 |
+
write_eof() indicates end of stream.
|
| 120 |
+
writer can't be used after write_eof() method being called.
|
| 121 |
+
write() return drain future.
|
| 122 |
+
"""
|
| 123 |
+
if self._on_chunk_sent is not None:
|
| 124 |
+
await self._on_chunk_sent(chunk)
|
| 125 |
+
|
| 126 |
+
if isinstance(chunk, memoryview):
|
| 127 |
+
if chunk.nbytes != len(chunk):
|
| 128 |
+
# just reshape it
|
| 129 |
+
chunk = chunk.cast("c")
|
| 130 |
+
|
| 131 |
+
if self._compress is not None:
|
| 132 |
+
chunk = await self._compress.compress(chunk)
|
| 133 |
+
if not chunk:
|
| 134 |
+
return
|
| 135 |
+
|
| 136 |
+
if self.length is not None:
|
| 137 |
+
chunk_len = len(chunk)
|
| 138 |
+
if self.length >= chunk_len:
|
| 139 |
+
self.length = self.length - chunk_len
|
| 140 |
+
else:
|
| 141 |
+
chunk = chunk[: self.length]
|
| 142 |
+
self.length = 0
|
| 143 |
+
if not chunk:
|
| 144 |
+
return
|
| 145 |
+
|
| 146 |
+
if chunk:
|
| 147 |
+
if self.chunked:
|
| 148 |
+
self._writelines(
|
| 149 |
+
(f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n")
|
| 150 |
+
)
|
| 151 |
+
else:
|
| 152 |
+
self._write(chunk)
|
| 153 |
+
|
| 154 |
+
if self.buffer_size > LIMIT and drain:
|
| 155 |
+
self.buffer_size = 0
|
| 156 |
+
await self.drain()
|
| 157 |
+
|
| 158 |
+
async def write_headers(
|
| 159 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 160 |
+
) -> None:
|
| 161 |
+
"""Write request/response status and headers."""
|
| 162 |
+
if self._on_headers_sent is not None:
|
| 163 |
+
await self._on_headers_sent(headers)
|
| 164 |
+
|
| 165 |
+
# status + headers
|
| 166 |
+
buf = _serialize_headers(status_line, headers)
|
| 167 |
+
self._write(buf)
|
| 168 |
+
|
| 169 |
+
def set_eof(self) -> None:
|
| 170 |
+
"""Indicate that the message is complete."""
|
| 171 |
+
self._eof = True
|
| 172 |
+
|
| 173 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 174 |
+
if self._eof:
|
| 175 |
+
return
|
| 176 |
+
|
| 177 |
+
if chunk and self._on_chunk_sent is not None:
|
| 178 |
+
await self._on_chunk_sent(chunk)
|
| 179 |
+
|
| 180 |
+
if self._compress:
|
| 181 |
+
chunks: List[bytes] = []
|
| 182 |
+
chunks_len = 0
|
| 183 |
+
if chunk and (compressed_chunk := await self._compress.compress(chunk)):
|
| 184 |
+
chunks_len = len(compressed_chunk)
|
| 185 |
+
chunks.append(compressed_chunk)
|
| 186 |
+
|
| 187 |
+
flush_chunk = self._compress.flush()
|
| 188 |
+
chunks_len += len(flush_chunk)
|
| 189 |
+
chunks.append(flush_chunk)
|
| 190 |
+
assert chunks_len
|
| 191 |
+
|
| 192 |
+
if self.chunked:
|
| 193 |
+
chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
|
| 194 |
+
self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n"))
|
| 195 |
+
elif len(chunks) > 1:
|
| 196 |
+
self._writelines(chunks)
|
| 197 |
+
else:
|
| 198 |
+
self._write(chunks[0])
|
| 199 |
+
elif self.chunked:
|
| 200 |
+
if chunk:
|
| 201 |
+
chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
|
| 202 |
+
self._writelines((chunk_len_pre, chunk, b"\r\n0\r\n\r\n"))
|
| 203 |
+
else:
|
| 204 |
+
self._write(b"0\r\n\r\n")
|
| 205 |
+
elif chunk:
|
| 206 |
+
self._write(chunk)
|
| 207 |
+
|
| 208 |
+
await self.drain()
|
| 209 |
+
|
| 210 |
+
self._eof = True
|
| 211 |
+
|
| 212 |
+
async def drain(self) -> None:
|
| 213 |
+
"""Flush the write buffer.
|
| 214 |
+
|
| 215 |
+
The intended use is to write
|
| 216 |
+
|
| 217 |
+
await w.write(data)
|
| 218 |
+
await w.drain()
|
| 219 |
+
"""
|
| 220 |
+
protocol = self._protocol
|
| 221 |
+
if protocol.transport is not None and protocol._paused:
|
| 222 |
+
await protocol._drain_helper()
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def _safe_header(string: str) -> str:
|
| 226 |
+
if "\r" in string or "\n" in string:
|
| 227 |
+
raise ValueError(
|
| 228 |
+
"Newline or carriage return detected in headers. "
|
| 229 |
+
"Potential header injection attack."
|
| 230 |
+
)
|
| 231 |
+
return string
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
| 235 |
+
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
| 236 |
+
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
| 237 |
+
return line.encode("utf-8")
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
_serialize_headers = _py_serialize_headers
|
| 241 |
+
|
| 242 |
+
try:
|
| 243 |
+
import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
|
| 244 |
+
|
| 245 |
+
_c_serialize_headers = _http_writer._serialize_headers
|
| 246 |
+
if not NO_EXTENSIONS:
|
| 247 |
+
_serialize_headers = _c_serialize_headers
|
| 248 |
+
except ImportError:
|
| 249 |
+
pass
|
.venv/lib/python3.11/site-packages/aiohttp/pytest_plugin.py
ADDED
|
@@ -0,0 +1,436 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import contextlib
|
| 3 |
+
import inspect
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import (
|
| 6 |
+
Any,
|
| 7 |
+
Awaitable,
|
| 8 |
+
Callable,
|
| 9 |
+
Dict,
|
| 10 |
+
Iterator,
|
| 11 |
+
Optional,
|
| 12 |
+
Protocol,
|
| 13 |
+
Type,
|
| 14 |
+
Union,
|
| 15 |
+
overload,
|
| 16 |
+
)
|
| 17 |
+
|
| 18 |
+
import pytest
|
| 19 |
+
|
| 20 |
+
from .test_utils import (
|
| 21 |
+
BaseTestServer,
|
| 22 |
+
RawTestServer,
|
| 23 |
+
TestClient,
|
| 24 |
+
TestServer,
|
| 25 |
+
loop_context,
|
| 26 |
+
setup_test_loop,
|
| 27 |
+
teardown_test_loop,
|
| 28 |
+
unused_port as _unused_port,
|
| 29 |
+
)
|
| 30 |
+
from .web import Application, BaseRequest, Request
|
| 31 |
+
from .web_protocol import _RequestHandler
|
| 32 |
+
|
| 33 |
+
try:
|
| 34 |
+
import uvloop
|
| 35 |
+
except ImportError: # pragma: no cover
|
| 36 |
+
uvloop = None # type: ignore[assignment]
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class AiohttpClient(Protocol):
|
| 40 |
+
@overload
|
| 41 |
+
async def __call__(
|
| 42 |
+
self,
|
| 43 |
+
__param: Application,
|
| 44 |
+
*,
|
| 45 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 46 |
+
**kwargs: Any,
|
| 47 |
+
) -> TestClient[Request, Application]: ...
|
| 48 |
+
@overload
|
| 49 |
+
async def __call__(
|
| 50 |
+
self,
|
| 51 |
+
__param: BaseTestServer,
|
| 52 |
+
*,
|
| 53 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 54 |
+
**kwargs: Any,
|
| 55 |
+
) -> TestClient[BaseRequest, None]: ...
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class AiohttpServer(Protocol):
|
| 59 |
+
def __call__(
|
| 60 |
+
self, app: Application, *, port: Optional[int] = None, **kwargs: Any
|
| 61 |
+
) -> Awaitable[TestServer]: ...
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class AiohttpRawServer(Protocol):
|
| 65 |
+
def __call__(
|
| 66 |
+
self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
| 67 |
+
) -> Awaitable[RawTestServer]: ...
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
| 71 |
+
parser.addoption(
|
| 72 |
+
"--aiohttp-fast",
|
| 73 |
+
action="store_true",
|
| 74 |
+
default=False,
|
| 75 |
+
help="run tests faster by disabling extra checks",
|
| 76 |
+
)
|
| 77 |
+
parser.addoption(
|
| 78 |
+
"--aiohttp-loop",
|
| 79 |
+
action="store",
|
| 80 |
+
default="pyloop",
|
| 81 |
+
help="run tests with specific loop: pyloop, uvloop or all",
|
| 82 |
+
)
|
| 83 |
+
parser.addoption(
|
| 84 |
+
"--aiohttp-enable-loop-debug",
|
| 85 |
+
action="store_true",
|
| 86 |
+
default=False,
|
| 87 |
+
help="enable event loop debug mode",
|
| 88 |
+
)
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
| 92 |
+
"""Set up pytest fixture.
|
| 93 |
+
|
| 94 |
+
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
| 95 |
+
"""
|
| 96 |
+
func = fixturedef.func
|
| 97 |
+
|
| 98 |
+
if inspect.isasyncgenfunction(func):
|
| 99 |
+
# async generator fixture
|
| 100 |
+
is_async_gen = True
|
| 101 |
+
elif asyncio.iscoroutinefunction(func):
|
| 102 |
+
# regular async fixture
|
| 103 |
+
is_async_gen = False
|
| 104 |
+
else:
|
| 105 |
+
# not an async fixture, nothing to do
|
| 106 |
+
return
|
| 107 |
+
|
| 108 |
+
strip_request = False
|
| 109 |
+
if "request" not in fixturedef.argnames:
|
| 110 |
+
fixturedef.argnames += ("request",)
|
| 111 |
+
strip_request = True
|
| 112 |
+
|
| 113 |
+
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
| 114 |
+
request = kwargs["request"]
|
| 115 |
+
if strip_request:
|
| 116 |
+
del kwargs["request"]
|
| 117 |
+
|
| 118 |
+
# if neither the fixture nor the test use the 'loop' fixture,
|
| 119 |
+
# 'getfixturevalue' will fail because the test is not parameterized
|
| 120 |
+
# (this can be removed someday if 'loop' is no longer parameterized)
|
| 121 |
+
if "loop" not in request.fixturenames:
|
| 122 |
+
raise Exception(
|
| 123 |
+
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
| 124 |
+
"be used in tests depending from it."
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
_loop = request.getfixturevalue("loop")
|
| 128 |
+
|
| 129 |
+
if is_async_gen:
|
| 130 |
+
# for async generators, we need to advance the generator once,
|
| 131 |
+
# then advance it again in a finalizer
|
| 132 |
+
gen = func(*args, **kwargs)
|
| 133 |
+
|
| 134 |
+
def finalizer(): # type: ignore[no-untyped-def]
|
| 135 |
+
try:
|
| 136 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 137 |
+
except StopAsyncIteration:
|
| 138 |
+
pass
|
| 139 |
+
|
| 140 |
+
request.addfinalizer(finalizer)
|
| 141 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 142 |
+
else:
|
| 143 |
+
return _loop.run_until_complete(func(*args, **kwargs))
|
| 144 |
+
|
| 145 |
+
fixturedef.func = wrapper
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
@pytest.fixture
|
| 149 |
+
def fast(request): # type: ignore[no-untyped-def]
|
| 150 |
+
"""--fast config option"""
|
| 151 |
+
return request.config.getoption("--aiohttp-fast")
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
@pytest.fixture
|
| 155 |
+
def loop_debug(request): # type: ignore[no-untyped-def]
|
| 156 |
+
"""--enable-loop-debug config option"""
|
| 157 |
+
return request.config.getoption("--aiohttp-enable-loop-debug")
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
@contextlib.contextmanager
|
| 161 |
+
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
| 162 |
+
"""Context manager which checks for RuntimeWarnings.
|
| 163 |
+
|
| 164 |
+
This exists specifically to
|
| 165 |
+
avoid "coroutine 'X' was never awaited" warnings being missed.
|
| 166 |
+
|
| 167 |
+
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
| 168 |
+
"""
|
| 169 |
+
with warnings.catch_warnings(record=True) as _warnings:
|
| 170 |
+
yield
|
| 171 |
+
rw = [
|
| 172 |
+
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
| 173 |
+
for w in _warnings
|
| 174 |
+
if w.category == RuntimeWarning
|
| 175 |
+
]
|
| 176 |
+
if rw:
|
| 177 |
+
raise RuntimeError(
|
| 178 |
+
"{} Runtime Warning{},\n{}".format(
|
| 179 |
+
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
| 180 |
+
)
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
@contextlib.contextmanager
|
| 185 |
+
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
| 186 |
+
"""Passthrough loop context.
|
| 187 |
+
|
| 188 |
+
Sets up and tears down a loop unless one is passed in via the loop
|
| 189 |
+
argument when it's passed straight through.
|
| 190 |
+
"""
|
| 191 |
+
if loop:
|
| 192 |
+
# loop already exists, pass it straight through
|
| 193 |
+
yield loop
|
| 194 |
+
else:
|
| 195 |
+
# this shadows loop_context's standard behavior
|
| 196 |
+
loop = setup_test_loop()
|
| 197 |
+
yield loop
|
| 198 |
+
teardown_test_loop(loop, fast=fast)
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
| 202 |
+
"""Fix pytest collecting for coroutines."""
|
| 203 |
+
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
| 204 |
+
return list(collector._genfunctions(name, obj))
|
| 205 |
+
|
| 206 |
+
|
| 207 |
+
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
| 208 |
+
"""Run coroutines in an event loop instead of a normal function call."""
|
| 209 |
+
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
| 210 |
+
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
| 211 |
+
existing_loop = pyfuncitem.funcargs.get(
|
| 212 |
+
"proactor_loop"
|
| 213 |
+
) or pyfuncitem.funcargs.get("loop", None)
|
| 214 |
+
with _runtime_warning_context():
|
| 215 |
+
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
| 216 |
+
testargs = {
|
| 217 |
+
arg: pyfuncitem.funcargs[arg]
|
| 218 |
+
for arg in pyfuncitem._fixtureinfo.argnames
|
| 219 |
+
}
|
| 220 |
+
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
| 221 |
+
|
| 222 |
+
return True
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
| 226 |
+
if "loop_factory" not in metafunc.fixturenames:
|
| 227 |
+
return
|
| 228 |
+
|
| 229 |
+
loops = metafunc.config.option.aiohttp_loop
|
| 230 |
+
avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
|
| 231 |
+
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
| 232 |
+
|
| 233 |
+
if uvloop is not None: # pragma: no cover
|
| 234 |
+
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
| 235 |
+
|
| 236 |
+
if loops == "all":
|
| 237 |
+
loops = "pyloop,uvloop?"
|
| 238 |
+
|
| 239 |
+
factories = {} # type: ignore[var-annotated]
|
| 240 |
+
for name in loops.split(","):
|
| 241 |
+
required = not name.endswith("?")
|
| 242 |
+
name = name.strip(" ?")
|
| 243 |
+
if name not in avail_factories: # pragma: no cover
|
| 244 |
+
if required:
|
| 245 |
+
raise ValueError(
|
| 246 |
+
"Unknown loop '%s', available loops: %s"
|
| 247 |
+
% (name, list(factories.keys()))
|
| 248 |
+
)
|
| 249 |
+
else:
|
| 250 |
+
continue
|
| 251 |
+
factories[name] = avail_factories[name]
|
| 252 |
+
metafunc.parametrize(
|
| 253 |
+
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
@pytest.fixture
|
| 258 |
+
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
| 259 |
+
"""Return an instance of the event loop."""
|
| 260 |
+
policy = loop_factory()
|
| 261 |
+
asyncio.set_event_loop_policy(policy)
|
| 262 |
+
with loop_context(fast=fast) as _loop:
|
| 263 |
+
if loop_debug:
|
| 264 |
+
_loop.set_debug(True) # pragma: no cover
|
| 265 |
+
asyncio.set_event_loop(_loop)
|
| 266 |
+
yield _loop
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
@pytest.fixture
|
| 270 |
+
def proactor_loop(): # type: ignore[no-untyped-def]
|
| 271 |
+
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
| 272 |
+
asyncio.set_event_loop_policy(policy)
|
| 273 |
+
|
| 274 |
+
with loop_context(policy.new_event_loop) as _loop:
|
| 275 |
+
asyncio.set_event_loop(_loop)
|
| 276 |
+
yield _loop
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
@pytest.fixture
|
| 280 |
+
def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
|
| 281 |
+
warnings.warn(
|
| 282 |
+
"Deprecated, use aiohttp_unused_port fixture instead",
|
| 283 |
+
DeprecationWarning,
|
| 284 |
+
stacklevel=2,
|
| 285 |
+
)
|
| 286 |
+
return aiohttp_unused_port
|
| 287 |
+
|
| 288 |
+
|
| 289 |
+
@pytest.fixture
|
| 290 |
+
def aiohttp_unused_port() -> Callable[[], int]:
|
| 291 |
+
"""Return a port that is unused on the current host."""
|
| 292 |
+
return _unused_port
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
@pytest.fixture
|
| 296 |
+
def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
|
| 297 |
+
"""Factory to create a TestServer instance, given an app.
|
| 298 |
+
|
| 299 |
+
aiohttp_server(app, **kwargs)
|
| 300 |
+
"""
|
| 301 |
+
servers = []
|
| 302 |
+
|
| 303 |
+
async def go(
|
| 304 |
+
app: Application, *, port: Optional[int] = None, **kwargs: Any
|
| 305 |
+
) -> TestServer:
|
| 306 |
+
server = TestServer(app, port=port)
|
| 307 |
+
await server.start_server(loop=loop, **kwargs)
|
| 308 |
+
servers.append(server)
|
| 309 |
+
return server
|
| 310 |
+
|
| 311 |
+
yield go
|
| 312 |
+
|
| 313 |
+
async def finalize() -> None:
|
| 314 |
+
while servers:
|
| 315 |
+
await servers.pop().close()
|
| 316 |
+
|
| 317 |
+
loop.run_until_complete(finalize())
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
@pytest.fixture
|
| 321 |
+
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
| 322 |
+
warnings.warn(
|
| 323 |
+
"Deprecated, use aiohttp_server fixture instead",
|
| 324 |
+
DeprecationWarning,
|
| 325 |
+
stacklevel=2,
|
| 326 |
+
)
|
| 327 |
+
return aiohttp_server
|
| 328 |
+
|
| 329 |
+
|
| 330 |
+
@pytest.fixture
|
| 331 |
+
def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
|
| 332 |
+
"""Factory to create a RawTestServer instance, given a web handler.
|
| 333 |
+
|
| 334 |
+
aiohttp_raw_server(handler, **kwargs)
|
| 335 |
+
"""
|
| 336 |
+
servers = []
|
| 337 |
+
|
| 338 |
+
async def go(
|
| 339 |
+
handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
|
| 340 |
+
) -> RawTestServer:
|
| 341 |
+
server = RawTestServer(handler, port=port)
|
| 342 |
+
await server.start_server(loop=loop, **kwargs)
|
| 343 |
+
servers.append(server)
|
| 344 |
+
return server
|
| 345 |
+
|
| 346 |
+
yield go
|
| 347 |
+
|
| 348 |
+
async def finalize() -> None:
|
| 349 |
+
while servers:
|
| 350 |
+
await servers.pop().close()
|
| 351 |
+
|
| 352 |
+
loop.run_until_complete(finalize())
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
@pytest.fixture
|
| 356 |
+
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
| 357 |
+
aiohttp_raw_server,
|
| 358 |
+
):
|
| 359 |
+
warnings.warn(
|
| 360 |
+
"Deprecated, use aiohttp_raw_server fixture instead",
|
| 361 |
+
DeprecationWarning,
|
| 362 |
+
stacklevel=2,
|
| 363 |
+
)
|
| 364 |
+
return aiohttp_raw_server
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
@pytest.fixture
|
| 368 |
+
def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]:
|
| 369 |
+
"""Factory to create a TestClient instance.
|
| 370 |
+
|
| 371 |
+
aiohttp_client(app, **kwargs)
|
| 372 |
+
aiohttp_client(server, **kwargs)
|
| 373 |
+
aiohttp_client(raw_server, **kwargs)
|
| 374 |
+
"""
|
| 375 |
+
clients = []
|
| 376 |
+
|
| 377 |
+
@overload
|
| 378 |
+
async def go(
|
| 379 |
+
__param: Application,
|
| 380 |
+
*,
|
| 381 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 382 |
+
**kwargs: Any,
|
| 383 |
+
) -> TestClient[Request, Application]: ...
|
| 384 |
+
|
| 385 |
+
@overload
|
| 386 |
+
async def go(
|
| 387 |
+
__param: BaseTestServer,
|
| 388 |
+
*,
|
| 389 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 390 |
+
**kwargs: Any,
|
| 391 |
+
) -> TestClient[BaseRequest, None]: ...
|
| 392 |
+
|
| 393 |
+
async def go(
|
| 394 |
+
__param: Union[Application, BaseTestServer],
|
| 395 |
+
*args: Any,
|
| 396 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 397 |
+
**kwargs: Any,
|
| 398 |
+
) -> TestClient[Any, Any]:
|
| 399 |
+
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
| 400 |
+
__param, (Application, BaseTestServer)
|
| 401 |
+
):
|
| 402 |
+
__param = __param(loop, *args, **kwargs)
|
| 403 |
+
kwargs = {}
|
| 404 |
+
else:
|
| 405 |
+
assert not args, "args should be empty"
|
| 406 |
+
|
| 407 |
+
if isinstance(__param, Application):
|
| 408 |
+
server_kwargs = server_kwargs or {}
|
| 409 |
+
server = TestServer(__param, loop=loop, **server_kwargs)
|
| 410 |
+
client = TestClient(server, loop=loop, **kwargs)
|
| 411 |
+
elif isinstance(__param, BaseTestServer):
|
| 412 |
+
client = TestClient(__param, loop=loop, **kwargs)
|
| 413 |
+
else:
|
| 414 |
+
raise ValueError("Unknown argument type: %r" % type(__param))
|
| 415 |
+
|
| 416 |
+
await client.start_server()
|
| 417 |
+
clients.append(client)
|
| 418 |
+
return client
|
| 419 |
+
|
| 420 |
+
yield go
|
| 421 |
+
|
| 422 |
+
async def finalize() -> None:
|
| 423 |
+
while clients:
|
| 424 |
+
await clients.pop().close()
|
| 425 |
+
|
| 426 |
+
loop.run_until_complete(finalize())
|
| 427 |
+
|
| 428 |
+
|
| 429 |
+
@pytest.fixture
|
| 430 |
+
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
| 431 |
+
warnings.warn(
|
| 432 |
+
"Deprecated, use aiohttp_client fixture instead",
|
| 433 |
+
DeprecationWarning,
|
| 434 |
+
stacklevel=2,
|
| 435 |
+
)
|
| 436 |
+
return aiohttp_client
|
.venv/lib/python3.11/site-packages/aiohttp/resolver.py
ADDED
|
@@ -0,0 +1,187 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import socket
|
| 3 |
+
from typing import Any, Dict, List, Optional, Tuple, Type, Union
|
| 4 |
+
|
| 5 |
+
from .abc import AbstractResolver, ResolveResult
|
| 6 |
+
|
| 7 |
+
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import aiodns
|
| 12 |
+
|
| 13 |
+
aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
|
| 14 |
+
except ImportError: # pragma: no cover
|
| 15 |
+
aiodns = None # type: ignore[assignment]
|
| 16 |
+
aiodns_default = False
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
|
| 20 |
+
_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ThreadedResolver(AbstractResolver):
|
| 24 |
+
"""Threaded resolver.
|
| 25 |
+
|
| 26 |
+
Uses an Executor for synchronous getaddrinfo() calls.
|
| 27 |
+
concurrent.futures.ThreadPoolExecutor is used by default.
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 31 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 32 |
+
|
| 33 |
+
async def resolve(
|
| 34 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 35 |
+
) -> List[ResolveResult]:
|
| 36 |
+
infos = await self._loop.getaddrinfo(
|
| 37 |
+
host,
|
| 38 |
+
port,
|
| 39 |
+
type=socket.SOCK_STREAM,
|
| 40 |
+
family=family,
|
| 41 |
+
flags=socket.AI_ADDRCONFIG,
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
hosts: List[ResolveResult] = []
|
| 45 |
+
for family, _, proto, _, address in infos:
|
| 46 |
+
if family == socket.AF_INET6:
|
| 47 |
+
if len(address) < 3:
|
| 48 |
+
# IPv6 is not supported by Python build,
|
| 49 |
+
# or IPv6 is not enabled in the host
|
| 50 |
+
continue
|
| 51 |
+
if address[3]:
|
| 52 |
+
# This is essential for link-local IPv6 addresses.
|
| 53 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 54 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 55 |
+
resolved_host, _port = await self._loop.getnameinfo(
|
| 56 |
+
address, _NAME_SOCKET_FLAGS
|
| 57 |
+
)
|
| 58 |
+
port = int(_port)
|
| 59 |
+
else:
|
| 60 |
+
resolved_host, port = address[:2]
|
| 61 |
+
else: # IPv4
|
| 62 |
+
assert family == socket.AF_INET
|
| 63 |
+
resolved_host, port = address # type: ignore[misc]
|
| 64 |
+
hosts.append(
|
| 65 |
+
ResolveResult(
|
| 66 |
+
hostname=host,
|
| 67 |
+
host=resolved_host,
|
| 68 |
+
port=port,
|
| 69 |
+
family=family,
|
| 70 |
+
proto=proto,
|
| 71 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
| 72 |
+
)
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
return hosts
|
| 76 |
+
|
| 77 |
+
async def close(self) -> None:
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class AsyncResolver(AbstractResolver):
|
| 82 |
+
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
| 83 |
+
|
| 84 |
+
def __init__(
|
| 85 |
+
self,
|
| 86 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 87 |
+
*args: Any,
|
| 88 |
+
**kwargs: Any,
|
| 89 |
+
) -> None:
|
| 90 |
+
if aiodns is None:
|
| 91 |
+
raise RuntimeError("Resolver requires aiodns library")
|
| 92 |
+
|
| 93 |
+
self._resolver = aiodns.DNSResolver(*args, **kwargs)
|
| 94 |
+
|
| 95 |
+
if not hasattr(self._resolver, "gethostbyname"):
|
| 96 |
+
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
| 97 |
+
self.resolve = self._resolve_with_query # type: ignore
|
| 98 |
+
|
| 99 |
+
async def resolve(
|
| 100 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 101 |
+
) -> List[ResolveResult]:
|
| 102 |
+
try:
|
| 103 |
+
resp = await self._resolver.getaddrinfo(
|
| 104 |
+
host,
|
| 105 |
+
port=port,
|
| 106 |
+
type=socket.SOCK_STREAM,
|
| 107 |
+
family=family,
|
| 108 |
+
flags=socket.AI_ADDRCONFIG,
|
| 109 |
+
)
|
| 110 |
+
except aiodns.error.DNSError as exc:
|
| 111 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 112 |
+
raise OSError(None, msg) from exc
|
| 113 |
+
hosts: List[ResolveResult] = []
|
| 114 |
+
for node in resp.nodes:
|
| 115 |
+
address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr
|
| 116 |
+
family = node.family
|
| 117 |
+
if family == socket.AF_INET6:
|
| 118 |
+
if len(address) > 3 and address[3]:
|
| 119 |
+
# This is essential for link-local IPv6 addresses.
|
| 120 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 121 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 122 |
+
result = await self._resolver.getnameinfo(
|
| 123 |
+
(address[0].decode("ascii"), *address[1:]),
|
| 124 |
+
_NAME_SOCKET_FLAGS,
|
| 125 |
+
)
|
| 126 |
+
resolved_host = result.node
|
| 127 |
+
else:
|
| 128 |
+
resolved_host = address[0].decode("ascii")
|
| 129 |
+
port = address[1]
|
| 130 |
+
else: # IPv4
|
| 131 |
+
assert family == socket.AF_INET
|
| 132 |
+
resolved_host = address[0].decode("ascii")
|
| 133 |
+
port = address[1]
|
| 134 |
+
hosts.append(
|
| 135 |
+
ResolveResult(
|
| 136 |
+
hostname=host,
|
| 137 |
+
host=resolved_host,
|
| 138 |
+
port=port,
|
| 139 |
+
family=family,
|
| 140 |
+
proto=0,
|
| 141 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
| 142 |
+
)
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
if not hosts:
|
| 146 |
+
raise OSError(None, "DNS lookup failed")
|
| 147 |
+
|
| 148 |
+
return hosts
|
| 149 |
+
|
| 150 |
+
async def _resolve_with_query(
|
| 151 |
+
self, host: str, port: int = 0, family: int = socket.AF_INET
|
| 152 |
+
) -> List[Dict[str, Any]]:
|
| 153 |
+
if family == socket.AF_INET6:
|
| 154 |
+
qtype = "AAAA"
|
| 155 |
+
else:
|
| 156 |
+
qtype = "A"
|
| 157 |
+
|
| 158 |
+
try:
|
| 159 |
+
resp = await self._resolver.query(host, qtype)
|
| 160 |
+
except aiodns.error.DNSError as exc:
|
| 161 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 162 |
+
raise OSError(None, msg) from exc
|
| 163 |
+
|
| 164 |
+
hosts = []
|
| 165 |
+
for rr in resp:
|
| 166 |
+
hosts.append(
|
| 167 |
+
{
|
| 168 |
+
"hostname": host,
|
| 169 |
+
"host": rr.host,
|
| 170 |
+
"port": port,
|
| 171 |
+
"family": family,
|
| 172 |
+
"proto": 0,
|
| 173 |
+
"flags": socket.AI_NUMERICHOST,
|
| 174 |
+
}
|
| 175 |
+
)
|
| 176 |
+
|
| 177 |
+
if not hosts:
|
| 178 |
+
raise OSError(None, "DNS lookup failed")
|
| 179 |
+
|
| 180 |
+
return hosts
|
| 181 |
+
|
| 182 |
+
async def close(self) -> None:
|
| 183 |
+
self._resolver.cancel()
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
| 187 |
+
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
.venv/lib/python3.11/site-packages/aiohttp/test_utils.py
ADDED
|
@@ -0,0 +1,770 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities shared by tests."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import contextlib
|
| 5 |
+
import gc
|
| 6 |
+
import inspect
|
| 7 |
+
import ipaddress
|
| 8 |
+
import os
|
| 9 |
+
import socket
|
| 10 |
+
import sys
|
| 11 |
+
import warnings
|
| 12 |
+
from abc import ABC, abstractmethod
|
| 13 |
+
from types import TracebackType
|
| 14 |
+
from typing import (
|
| 15 |
+
TYPE_CHECKING,
|
| 16 |
+
Any,
|
| 17 |
+
Callable,
|
| 18 |
+
Generic,
|
| 19 |
+
Iterator,
|
| 20 |
+
List,
|
| 21 |
+
Optional,
|
| 22 |
+
Type,
|
| 23 |
+
TypeVar,
|
| 24 |
+
cast,
|
| 25 |
+
overload,
|
| 26 |
+
)
|
| 27 |
+
from unittest import IsolatedAsyncioTestCase, mock
|
| 28 |
+
|
| 29 |
+
from aiosignal import Signal
|
| 30 |
+
from multidict import CIMultiDict, CIMultiDictProxy
|
| 31 |
+
from yarl import URL
|
| 32 |
+
|
| 33 |
+
import aiohttp
|
| 34 |
+
from aiohttp.client import (
|
| 35 |
+
_RequestContextManager,
|
| 36 |
+
_RequestOptions,
|
| 37 |
+
_WSRequestContextManager,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
from . import ClientSession, hdrs
|
| 41 |
+
from .abc import AbstractCookieJar
|
| 42 |
+
from .client_reqrep import ClientResponse
|
| 43 |
+
from .client_ws import ClientWebSocketResponse
|
| 44 |
+
from .helpers import sentinel
|
| 45 |
+
from .http import HttpVersion, RawRequestMessage
|
| 46 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 47 |
+
from .typedefs import StrOrURL
|
| 48 |
+
from .web import (
|
| 49 |
+
Application,
|
| 50 |
+
AppRunner,
|
| 51 |
+
BaseRequest,
|
| 52 |
+
BaseRunner,
|
| 53 |
+
Request,
|
| 54 |
+
Server,
|
| 55 |
+
ServerRunner,
|
| 56 |
+
SockSite,
|
| 57 |
+
UrlMappingMatchInfo,
|
| 58 |
+
)
|
| 59 |
+
from .web_protocol import _RequestHandler
|
| 60 |
+
|
| 61 |
+
if TYPE_CHECKING:
|
| 62 |
+
from ssl import SSLContext
|
| 63 |
+
else:
|
| 64 |
+
SSLContext = None
|
| 65 |
+
|
| 66 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 67 |
+
from typing import Unpack
|
| 68 |
+
|
| 69 |
+
if sys.version_info >= (3, 11):
|
| 70 |
+
from typing import Self
|
| 71 |
+
else:
|
| 72 |
+
Self = Any
|
| 73 |
+
|
| 74 |
+
_ApplicationNone = TypeVar("_ApplicationNone", Application, None)
|
| 75 |
+
_Request = TypeVar("_Request", bound=BaseRequest)
|
| 76 |
+
|
| 77 |
+
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def get_unused_port_socket(
|
| 81 |
+
host: str, family: socket.AddressFamily = socket.AF_INET
|
| 82 |
+
) -> socket.socket:
|
| 83 |
+
return get_port_socket(host, 0, family)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def get_port_socket(
|
| 87 |
+
host: str, port: int, family: socket.AddressFamily
|
| 88 |
+
) -> socket.socket:
|
| 89 |
+
s = socket.socket(family, socket.SOCK_STREAM)
|
| 90 |
+
if REUSE_ADDRESS:
|
| 91 |
+
# Windows has different semantics for SO_REUSEADDR,
|
| 92 |
+
# so don't set it. Ref:
|
| 93 |
+
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
| 94 |
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 95 |
+
s.bind((host, port))
|
| 96 |
+
return s
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def unused_port() -> int:
|
| 100 |
+
"""Return a port that is unused on the current host."""
|
| 101 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 102 |
+
s.bind(("127.0.0.1", 0))
|
| 103 |
+
return cast(int, s.getsockname()[1])
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class BaseTestServer(ABC):
|
| 107 |
+
__test__ = False
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
*,
|
| 112 |
+
scheme: str = "",
|
| 113 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 114 |
+
host: str = "127.0.0.1",
|
| 115 |
+
port: Optional[int] = None,
|
| 116 |
+
skip_url_asserts: bool = False,
|
| 117 |
+
socket_factory: Callable[
|
| 118 |
+
[str, int, socket.AddressFamily], socket.socket
|
| 119 |
+
] = get_port_socket,
|
| 120 |
+
**kwargs: Any,
|
| 121 |
+
) -> None:
|
| 122 |
+
self._loop = loop
|
| 123 |
+
self.runner: Optional[BaseRunner] = None
|
| 124 |
+
self._root: Optional[URL] = None
|
| 125 |
+
self.host = host
|
| 126 |
+
self.port = port
|
| 127 |
+
self._closed = False
|
| 128 |
+
self.scheme = scheme
|
| 129 |
+
self.skip_url_asserts = skip_url_asserts
|
| 130 |
+
self.socket_factory = socket_factory
|
| 131 |
+
|
| 132 |
+
async def start_server(
|
| 133 |
+
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
| 134 |
+
) -> None:
|
| 135 |
+
if self.runner:
|
| 136 |
+
return
|
| 137 |
+
self._loop = loop
|
| 138 |
+
self._ssl = kwargs.pop("ssl", None)
|
| 139 |
+
self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
|
| 140 |
+
await self.runner.setup()
|
| 141 |
+
if not self.port:
|
| 142 |
+
self.port = 0
|
| 143 |
+
absolute_host = self.host
|
| 144 |
+
try:
|
| 145 |
+
version = ipaddress.ip_address(self.host).version
|
| 146 |
+
except ValueError:
|
| 147 |
+
version = 4
|
| 148 |
+
if version == 6:
|
| 149 |
+
absolute_host = f"[{self.host}]"
|
| 150 |
+
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
| 151 |
+
_sock = self.socket_factory(self.host, self.port, family)
|
| 152 |
+
self.host, self.port = _sock.getsockname()[:2]
|
| 153 |
+
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
| 154 |
+
await site.start()
|
| 155 |
+
server = site._server
|
| 156 |
+
assert server is not None
|
| 157 |
+
sockets = server.sockets # type: ignore[attr-defined]
|
| 158 |
+
assert sockets is not None
|
| 159 |
+
self.port = sockets[0].getsockname()[1]
|
| 160 |
+
if not self.scheme:
|
| 161 |
+
self.scheme = "https" if self._ssl else "http"
|
| 162 |
+
self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
|
| 163 |
+
|
| 164 |
+
@abstractmethod # pragma: no cover
|
| 165 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
| 166 |
+
pass
|
| 167 |
+
|
| 168 |
+
def make_url(self, path: StrOrURL) -> URL:
|
| 169 |
+
assert self._root is not None
|
| 170 |
+
url = URL(path)
|
| 171 |
+
if not self.skip_url_asserts:
|
| 172 |
+
assert not url.absolute
|
| 173 |
+
return self._root.join(url)
|
| 174 |
+
else:
|
| 175 |
+
return URL(str(self._root) + str(path))
|
| 176 |
+
|
| 177 |
+
@property
|
| 178 |
+
def started(self) -> bool:
|
| 179 |
+
return self.runner is not None
|
| 180 |
+
|
| 181 |
+
@property
|
| 182 |
+
def closed(self) -> bool:
|
| 183 |
+
return self._closed
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def handler(self) -> Server:
|
| 187 |
+
# for backward compatibility
|
| 188 |
+
# web.Server instance
|
| 189 |
+
runner = self.runner
|
| 190 |
+
assert runner is not None
|
| 191 |
+
assert runner.server is not None
|
| 192 |
+
return runner.server
|
| 193 |
+
|
| 194 |
+
async def close(self) -> None:
|
| 195 |
+
"""Close all fixtures created by the test client.
|
| 196 |
+
|
| 197 |
+
After that point, the TestClient is no longer usable.
|
| 198 |
+
|
| 199 |
+
This is an idempotent function: running close multiple times
|
| 200 |
+
will not have any additional effects.
|
| 201 |
+
|
| 202 |
+
close is also run when the object is garbage collected, and on
|
| 203 |
+
exit when used as a context manager.
|
| 204 |
+
|
| 205 |
+
"""
|
| 206 |
+
if self.started and not self.closed:
|
| 207 |
+
assert self.runner is not None
|
| 208 |
+
await self.runner.cleanup()
|
| 209 |
+
self._root = None
|
| 210 |
+
self.port = None
|
| 211 |
+
self._closed = True
|
| 212 |
+
|
| 213 |
+
def __enter__(self) -> None:
|
| 214 |
+
raise TypeError("Use async with instead")
|
| 215 |
+
|
| 216 |
+
def __exit__(
|
| 217 |
+
self,
|
| 218 |
+
exc_type: Optional[Type[BaseException]],
|
| 219 |
+
exc_value: Optional[BaseException],
|
| 220 |
+
traceback: Optional[TracebackType],
|
| 221 |
+
) -> None:
|
| 222 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 223 |
+
pass # pragma: no cover
|
| 224 |
+
|
| 225 |
+
async def __aenter__(self) -> "BaseTestServer":
|
| 226 |
+
await self.start_server(loop=self._loop)
|
| 227 |
+
return self
|
| 228 |
+
|
| 229 |
+
async def __aexit__(
|
| 230 |
+
self,
|
| 231 |
+
exc_type: Optional[Type[BaseException]],
|
| 232 |
+
exc_value: Optional[BaseException],
|
| 233 |
+
traceback: Optional[TracebackType],
|
| 234 |
+
) -> None:
|
| 235 |
+
await self.close()
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
class TestServer(BaseTestServer):
|
| 239 |
+
def __init__(
|
| 240 |
+
self,
|
| 241 |
+
app: Application,
|
| 242 |
+
*,
|
| 243 |
+
scheme: str = "",
|
| 244 |
+
host: str = "127.0.0.1",
|
| 245 |
+
port: Optional[int] = None,
|
| 246 |
+
**kwargs: Any,
|
| 247 |
+
):
|
| 248 |
+
self.app = app
|
| 249 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
| 250 |
+
|
| 251 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
| 252 |
+
return AppRunner(self.app, **kwargs)
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class RawTestServer(BaseTestServer):
|
| 256 |
+
def __init__(
|
| 257 |
+
self,
|
| 258 |
+
handler: _RequestHandler,
|
| 259 |
+
*,
|
| 260 |
+
scheme: str = "",
|
| 261 |
+
host: str = "127.0.0.1",
|
| 262 |
+
port: Optional[int] = None,
|
| 263 |
+
**kwargs: Any,
|
| 264 |
+
) -> None:
|
| 265 |
+
self._handler = handler
|
| 266 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
| 267 |
+
|
| 268 |
+
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
|
| 269 |
+
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
|
| 270 |
+
return ServerRunner(srv, debug=debug, **kwargs)
|
| 271 |
+
|
| 272 |
+
|
| 273 |
+
class TestClient(Generic[_Request, _ApplicationNone]):
|
| 274 |
+
"""
|
| 275 |
+
A test client implementation.
|
| 276 |
+
|
| 277 |
+
To write functional tests for aiohttp based servers.
|
| 278 |
+
|
| 279 |
+
"""
|
| 280 |
+
|
| 281 |
+
__test__ = False
|
| 282 |
+
|
| 283 |
+
@overload
|
| 284 |
+
def __init__(
|
| 285 |
+
self: "TestClient[Request, Application]",
|
| 286 |
+
server: TestServer,
|
| 287 |
+
*,
|
| 288 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 289 |
+
**kwargs: Any,
|
| 290 |
+
) -> None: ...
|
| 291 |
+
@overload
|
| 292 |
+
def __init__(
|
| 293 |
+
self: "TestClient[_Request, None]",
|
| 294 |
+
server: BaseTestServer,
|
| 295 |
+
*,
|
| 296 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 297 |
+
**kwargs: Any,
|
| 298 |
+
) -> None: ...
|
| 299 |
+
def __init__(
|
| 300 |
+
self,
|
| 301 |
+
server: BaseTestServer,
|
| 302 |
+
*,
|
| 303 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 304 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 305 |
+
**kwargs: Any,
|
| 306 |
+
) -> None:
|
| 307 |
+
if not isinstance(server, BaseTestServer):
|
| 308 |
+
raise TypeError(
|
| 309 |
+
"server must be TestServer instance, found type: %r" % type(server)
|
| 310 |
+
)
|
| 311 |
+
self._server = server
|
| 312 |
+
self._loop = loop
|
| 313 |
+
if cookie_jar is None:
|
| 314 |
+
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
| 315 |
+
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
| 316 |
+
self._session._retry_connection = False
|
| 317 |
+
self._closed = False
|
| 318 |
+
self._responses: List[ClientResponse] = []
|
| 319 |
+
self._websockets: List[ClientWebSocketResponse] = []
|
| 320 |
+
|
| 321 |
+
async def start_server(self) -> None:
|
| 322 |
+
await self._server.start_server(loop=self._loop)
|
| 323 |
+
|
| 324 |
+
@property
|
| 325 |
+
def host(self) -> str:
|
| 326 |
+
return self._server.host
|
| 327 |
+
|
| 328 |
+
@property
|
| 329 |
+
def port(self) -> Optional[int]:
|
| 330 |
+
return self._server.port
|
| 331 |
+
|
| 332 |
+
@property
|
| 333 |
+
def server(self) -> BaseTestServer:
|
| 334 |
+
return self._server
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def app(self) -> _ApplicationNone:
|
| 338 |
+
return getattr(self._server, "app", None) # type: ignore[return-value]
|
| 339 |
+
|
| 340 |
+
@property
|
| 341 |
+
def session(self) -> ClientSession:
|
| 342 |
+
"""An internal aiohttp.ClientSession.
|
| 343 |
+
|
| 344 |
+
Unlike the methods on the TestClient, client session requests
|
| 345 |
+
do not automatically include the host in the url queried, and
|
| 346 |
+
will require an absolute path to the resource.
|
| 347 |
+
|
| 348 |
+
"""
|
| 349 |
+
return self._session
|
| 350 |
+
|
| 351 |
+
def make_url(self, path: StrOrURL) -> URL:
|
| 352 |
+
return self._server.make_url(path)
|
| 353 |
+
|
| 354 |
+
async def _request(
|
| 355 |
+
self, method: str, path: StrOrURL, **kwargs: Any
|
| 356 |
+
) -> ClientResponse:
|
| 357 |
+
resp = await self._session.request(method, self.make_url(path), **kwargs)
|
| 358 |
+
# save it to close later
|
| 359 |
+
self._responses.append(resp)
|
| 360 |
+
return resp
|
| 361 |
+
|
| 362 |
+
if sys.version_info >= (3, 11) and TYPE_CHECKING:
|
| 363 |
+
|
| 364 |
+
def request(
|
| 365 |
+
self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions]
|
| 366 |
+
) -> _RequestContextManager: ...
|
| 367 |
+
|
| 368 |
+
def get(
|
| 369 |
+
self,
|
| 370 |
+
path: StrOrURL,
|
| 371 |
+
**kwargs: Unpack[_RequestOptions],
|
| 372 |
+
) -> _RequestContextManager: ...
|
| 373 |
+
|
| 374 |
+
def options(
|
| 375 |
+
self,
|
| 376 |
+
path: StrOrURL,
|
| 377 |
+
**kwargs: Unpack[_RequestOptions],
|
| 378 |
+
) -> _RequestContextManager: ...
|
| 379 |
+
|
| 380 |
+
def head(
|
| 381 |
+
self,
|
| 382 |
+
path: StrOrURL,
|
| 383 |
+
**kwargs: Unpack[_RequestOptions],
|
| 384 |
+
) -> _RequestContextManager: ...
|
| 385 |
+
|
| 386 |
+
def post(
|
| 387 |
+
self,
|
| 388 |
+
path: StrOrURL,
|
| 389 |
+
**kwargs: Unpack[_RequestOptions],
|
| 390 |
+
) -> _RequestContextManager: ...
|
| 391 |
+
|
| 392 |
+
def put(
|
| 393 |
+
self,
|
| 394 |
+
path: StrOrURL,
|
| 395 |
+
**kwargs: Unpack[_RequestOptions],
|
| 396 |
+
) -> _RequestContextManager: ...
|
| 397 |
+
|
| 398 |
+
def patch(
|
| 399 |
+
self,
|
| 400 |
+
path: StrOrURL,
|
| 401 |
+
**kwargs: Unpack[_RequestOptions],
|
| 402 |
+
) -> _RequestContextManager: ...
|
| 403 |
+
|
| 404 |
+
def delete(
|
| 405 |
+
self,
|
| 406 |
+
path: StrOrURL,
|
| 407 |
+
**kwargs: Unpack[_RequestOptions],
|
| 408 |
+
) -> _RequestContextManager: ...
|
| 409 |
+
|
| 410 |
+
else:
|
| 411 |
+
|
| 412 |
+
def request(
|
| 413 |
+
self, method: str, path: StrOrURL, **kwargs: Any
|
| 414 |
+
) -> _RequestContextManager:
|
| 415 |
+
"""Routes a request to tested http server.
|
| 416 |
+
|
| 417 |
+
The interface is identical to aiohttp.ClientSession.request,
|
| 418 |
+
except the loop kwarg is overridden by the instance used by the
|
| 419 |
+
test server.
|
| 420 |
+
|
| 421 |
+
"""
|
| 422 |
+
return _RequestContextManager(self._request(method, path, **kwargs))
|
| 423 |
+
|
| 424 |
+
def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 425 |
+
"""Perform an HTTP GET request."""
|
| 426 |
+
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
|
| 427 |
+
|
| 428 |
+
def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 429 |
+
"""Perform an HTTP POST request."""
|
| 430 |
+
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
|
| 431 |
+
|
| 432 |
+
def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 433 |
+
"""Perform an HTTP OPTIONS request."""
|
| 434 |
+
return _RequestContextManager(
|
| 435 |
+
self._request(hdrs.METH_OPTIONS, path, **kwargs)
|
| 436 |
+
)
|
| 437 |
+
|
| 438 |
+
def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 439 |
+
"""Perform an HTTP HEAD request."""
|
| 440 |
+
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
|
| 441 |
+
|
| 442 |
+
def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 443 |
+
"""Perform an HTTP PUT request."""
|
| 444 |
+
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
|
| 445 |
+
|
| 446 |
+
def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 447 |
+
"""Perform an HTTP PATCH request."""
|
| 448 |
+
return _RequestContextManager(
|
| 449 |
+
self._request(hdrs.METH_PATCH, path, **kwargs)
|
| 450 |
+
)
|
| 451 |
+
|
| 452 |
+
def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
|
| 453 |
+
"""Perform an HTTP PATCH request."""
|
| 454 |
+
return _RequestContextManager(
|
| 455 |
+
self._request(hdrs.METH_DELETE, path, **kwargs)
|
| 456 |
+
)
|
| 457 |
+
|
| 458 |
+
def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
|
| 459 |
+
"""Initiate websocket connection.
|
| 460 |
+
|
| 461 |
+
The api corresponds to aiohttp.ClientSession.ws_connect.
|
| 462 |
+
|
| 463 |
+
"""
|
| 464 |
+
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
|
| 465 |
+
|
| 466 |
+
async def _ws_connect(
|
| 467 |
+
self, path: StrOrURL, **kwargs: Any
|
| 468 |
+
) -> ClientWebSocketResponse:
|
| 469 |
+
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
|
| 470 |
+
self._websockets.append(ws)
|
| 471 |
+
return ws
|
| 472 |
+
|
| 473 |
+
async def close(self) -> None:
|
| 474 |
+
"""Close all fixtures created by the test client.
|
| 475 |
+
|
| 476 |
+
After that point, the TestClient is no longer usable.
|
| 477 |
+
|
| 478 |
+
This is an idempotent function: running close multiple times
|
| 479 |
+
will not have any additional effects.
|
| 480 |
+
|
| 481 |
+
close is also run on exit when used as a(n) (asynchronous)
|
| 482 |
+
context manager.
|
| 483 |
+
|
| 484 |
+
"""
|
| 485 |
+
if not self._closed:
|
| 486 |
+
for resp in self._responses:
|
| 487 |
+
resp.close()
|
| 488 |
+
for ws in self._websockets:
|
| 489 |
+
await ws.close()
|
| 490 |
+
await self._session.close()
|
| 491 |
+
await self._server.close()
|
| 492 |
+
self._closed = True
|
| 493 |
+
|
| 494 |
+
def __enter__(self) -> None:
|
| 495 |
+
raise TypeError("Use async with instead")
|
| 496 |
+
|
| 497 |
+
def __exit__(
|
| 498 |
+
self,
|
| 499 |
+
exc_type: Optional[Type[BaseException]],
|
| 500 |
+
exc: Optional[BaseException],
|
| 501 |
+
tb: Optional[TracebackType],
|
| 502 |
+
) -> None:
|
| 503 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 504 |
+
pass # pragma: no cover
|
| 505 |
+
|
| 506 |
+
async def __aenter__(self) -> Self:
|
| 507 |
+
await self.start_server()
|
| 508 |
+
return self
|
| 509 |
+
|
| 510 |
+
async def __aexit__(
|
| 511 |
+
self,
|
| 512 |
+
exc_type: Optional[Type[BaseException]],
|
| 513 |
+
exc: Optional[BaseException],
|
| 514 |
+
tb: Optional[TracebackType],
|
| 515 |
+
) -> None:
|
| 516 |
+
await self.close()
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class AioHTTPTestCase(IsolatedAsyncioTestCase):
|
| 520 |
+
"""A base class to allow for unittest web applications using aiohttp.
|
| 521 |
+
|
| 522 |
+
Provides the following:
|
| 523 |
+
|
| 524 |
+
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
|
| 525 |
+
* self.loop (asyncio.BaseEventLoop): the event loop in which the
|
| 526 |
+
application and server are running.
|
| 527 |
+
* self.app (aiohttp.web.Application): the application returned by
|
| 528 |
+
self.get_application()
|
| 529 |
+
|
| 530 |
+
Note that the TestClient's methods are asynchronous: you have to
|
| 531 |
+
execute function on the test client using asynchronous methods.
|
| 532 |
+
"""
|
| 533 |
+
|
| 534 |
+
async def get_application(self) -> Application:
|
| 535 |
+
"""Get application.
|
| 536 |
+
|
| 537 |
+
This method should be overridden
|
| 538 |
+
to return the aiohttp.web.Application
|
| 539 |
+
object to test.
|
| 540 |
+
"""
|
| 541 |
+
return self.get_app()
|
| 542 |
+
|
| 543 |
+
def get_app(self) -> Application:
|
| 544 |
+
"""Obsolete method used to constructing web application.
|
| 545 |
+
|
| 546 |
+
Use .get_application() coroutine instead.
|
| 547 |
+
"""
|
| 548 |
+
raise RuntimeError("Did you forget to define get_application()?")
|
| 549 |
+
|
| 550 |
+
async def asyncSetUp(self) -> None:
|
| 551 |
+
self.loop = asyncio.get_running_loop()
|
| 552 |
+
return await self.setUpAsync()
|
| 553 |
+
|
| 554 |
+
async def setUpAsync(self) -> None:
|
| 555 |
+
self.app = await self.get_application()
|
| 556 |
+
self.server = await self.get_server(self.app)
|
| 557 |
+
self.client = await self.get_client(self.server)
|
| 558 |
+
|
| 559 |
+
await self.client.start_server()
|
| 560 |
+
|
| 561 |
+
async def asyncTearDown(self) -> None:
|
| 562 |
+
return await self.tearDownAsync()
|
| 563 |
+
|
| 564 |
+
async def tearDownAsync(self) -> None:
|
| 565 |
+
await self.client.close()
|
| 566 |
+
|
| 567 |
+
async def get_server(self, app: Application) -> TestServer:
|
| 568 |
+
"""Return a TestServer instance."""
|
| 569 |
+
return TestServer(app, loop=self.loop)
|
| 570 |
+
|
| 571 |
+
async def get_client(self, server: TestServer) -> TestClient[Request, Application]:
|
| 572 |
+
"""Return a TestClient instance."""
|
| 573 |
+
return TestClient(server, loop=self.loop)
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
| 577 |
+
"""
|
| 578 |
+
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
| 579 |
+
|
| 580 |
+
In 3.8+, this does nothing.
|
| 581 |
+
"""
|
| 582 |
+
warnings.warn(
|
| 583 |
+
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
| 584 |
+
DeprecationWarning,
|
| 585 |
+
stacklevel=2,
|
| 586 |
+
)
|
| 587 |
+
return func
|
| 588 |
+
|
| 589 |
+
|
| 590 |
+
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
| 591 |
+
|
| 592 |
+
|
| 593 |
+
@contextlib.contextmanager
|
| 594 |
+
def loop_context(
|
| 595 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
|
| 596 |
+
) -> Iterator[asyncio.AbstractEventLoop]:
|
| 597 |
+
"""A contextmanager that creates an event_loop, for test purposes.
|
| 598 |
+
|
| 599 |
+
Handles the creation and cleanup of a test loop.
|
| 600 |
+
"""
|
| 601 |
+
loop = setup_test_loop(loop_factory)
|
| 602 |
+
yield loop
|
| 603 |
+
teardown_test_loop(loop, fast=fast)
|
| 604 |
+
|
| 605 |
+
|
| 606 |
+
def setup_test_loop(
|
| 607 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
| 608 |
+
) -> asyncio.AbstractEventLoop:
|
| 609 |
+
"""Create and return an asyncio.BaseEventLoop instance.
|
| 610 |
+
|
| 611 |
+
The caller should also call teardown_test_loop,
|
| 612 |
+
once they are done with the loop.
|
| 613 |
+
"""
|
| 614 |
+
loop = loop_factory()
|
| 615 |
+
asyncio.set_event_loop(loop)
|
| 616 |
+
return loop
|
| 617 |
+
|
| 618 |
+
|
| 619 |
+
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
| 620 |
+
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
| 621 |
+
closed = loop.is_closed()
|
| 622 |
+
if not closed:
|
| 623 |
+
loop.call_soon(loop.stop)
|
| 624 |
+
loop.run_forever()
|
| 625 |
+
loop.close()
|
| 626 |
+
|
| 627 |
+
if not fast:
|
| 628 |
+
gc.collect()
|
| 629 |
+
|
| 630 |
+
asyncio.set_event_loop(None)
|
| 631 |
+
|
| 632 |
+
|
| 633 |
+
def _create_app_mock() -> mock.MagicMock:
|
| 634 |
+
def get_dict(app: Any, key: str) -> Any:
|
| 635 |
+
return app.__app_dict[key]
|
| 636 |
+
|
| 637 |
+
def set_dict(app: Any, key: str, value: Any) -> None:
|
| 638 |
+
app.__app_dict[key] = value
|
| 639 |
+
|
| 640 |
+
app = mock.MagicMock(spec=Application)
|
| 641 |
+
app.__app_dict = {}
|
| 642 |
+
app.__getitem__ = get_dict
|
| 643 |
+
app.__setitem__ = set_dict
|
| 644 |
+
|
| 645 |
+
app._debug = False
|
| 646 |
+
app.on_response_prepare = Signal(app)
|
| 647 |
+
app.on_response_prepare.freeze()
|
| 648 |
+
return app
|
| 649 |
+
|
| 650 |
+
|
| 651 |
+
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
|
| 652 |
+
transport = mock.Mock()
|
| 653 |
+
|
| 654 |
+
def get_extra_info(key: str) -> Optional[SSLContext]:
|
| 655 |
+
if key == "sslcontext":
|
| 656 |
+
return sslcontext
|
| 657 |
+
else:
|
| 658 |
+
return None
|
| 659 |
+
|
| 660 |
+
transport.get_extra_info.side_effect = get_extra_info
|
| 661 |
+
return transport
|
| 662 |
+
|
| 663 |
+
|
| 664 |
+
def make_mocked_request(
|
| 665 |
+
method: str,
|
| 666 |
+
path: str,
|
| 667 |
+
headers: Any = None,
|
| 668 |
+
*,
|
| 669 |
+
match_info: Any = sentinel,
|
| 670 |
+
version: HttpVersion = HttpVersion(1, 1),
|
| 671 |
+
closing: bool = False,
|
| 672 |
+
app: Any = None,
|
| 673 |
+
writer: Any = sentinel,
|
| 674 |
+
protocol: Any = sentinel,
|
| 675 |
+
transport: Any = sentinel,
|
| 676 |
+
payload: StreamReader = EMPTY_PAYLOAD,
|
| 677 |
+
sslcontext: Optional[SSLContext] = None,
|
| 678 |
+
client_max_size: int = 1024**2,
|
| 679 |
+
loop: Any = ...,
|
| 680 |
+
) -> Request:
|
| 681 |
+
"""Creates mocked web.Request testing purposes.
|
| 682 |
+
|
| 683 |
+
Useful in unit tests, when spinning full web server is overkill or
|
| 684 |
+
specific conditions and errors are hard to trigger.
|
| 685 |
+
"""
|
| 686 |
+
task = mock.Mock()
|
| 687 |
+
if loop is ...:
|
| 688 |
+
# no loop passed, try to get the current one if
|
| 689 |
+
# its is running as we need a real loop to create
|
| 690 |
+
# executor jobs to be able to do testing
|
| 691 |
+
# with a real executor
|
| 692 |
+
try:
|
| 693 |
+
loop = asyncio.get_running_loop()
|
| 694 |
+
except RuntimeError:
|
| 695 |
+
loop = mock.Mock()
|
| 696 |
+
loop.create_future.return_value = ()
|
| 697 |
+
|
| 698 |
+
if version < HttpVersion(1, 1):
|
| 699 |
+
closing = True
|
| 700 |
+
|
| 701 |
+
if headers:
|
| 702 |
+
headers = CIMultiDictProxy(CIMultiDict(headers))
|
| 703 |
+
raw_hdrs = tuple(
|
| 704 |
+
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
| 705 |
+
)
|
| 706 |
+
else:
|
| 707 |
+
headers = CIMultiDictProxy(CIMultiDict())
|
| 708 |
+
raw_hdrs = ()
|
| 709 |
+
|
| 710 |
+
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
| 711 |
+
|
| 712 |
+
message = RawRequestMessage(
|
| 713 |
+
method,
|
| 714 |
+
path,
|
| 715 |
+
version,
|
| 716 |
+
headers,
|
| 717 |
+
raw_hdrs,
|
| 718 |
+
closing,
|
| 719 |
+
None,
|
| 720 |
+
False,
|
| 721 |
+
chunked,
|
| 722 |
+
URL(path),
|
| 723 |
+
)
|
| 724 |
+
if app is None:
|
| 725 |
+
app = _create_app_mock()
|
| 726 |
+
|
| 727 |
+
if transport is sentinel:
|
| 728 |
+
transport = _create_transport(sslcontext)
|
| 729 |
+
|
| 730 |
+
if protocol is sentinel:
|
| 731 |
+
protocol = mock.Mock()
|
| 732 |
+
protocol.transport = transport
|
| 733 |
+
|
| 734 |
+
if writer is sentinel:
|
| 735 |
+
writer = mock.Mock()
|
| 736 |
+
writer.write_headers = make_mocked_coro(None)
|
| 737 |
+
writer.write = make_mocked_coro(None)
|
| 738 |
+
writer.write_eof = make_mocked_coro(None)
|
| 739 |
+
writer.drain = make_mocked_coro(None)
|
| 740 |
+
writer.transport = transport
|
| 741 |
+
|
| 742 |
+
protocol.transport = transport
|
| 743 |
+
protocol.writer = writer
|
| 744 |
+
|
| 745 |
+
req = Request(
|
| 746 |
+
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
|
| 747 |
+
)
|
| 748 |
+
|
| 749 |
+
match_info = UrlMappingMatchInfo(
|
| 750 |
+
{} if match_info is sentinel else match_info, mock.Mock()
|
| 751 |
+
)
|
| 752 |
+
match_info.add_app(app)
|
| 753 |
+
req._match_info = match_info
|
| 754 |
+
|
| 755 |
+
return req
|
| 756 |
+
|
| 757 |
+
|
| 758 |
+
def make_mocked_coro(
|
| 759 |
+
return_value: Any = sentinel, raise_exception: Any = sentinel
|
| 760 |
+
) -> Any:
|
| 761 |
+
"""Creates a coroutine mock."""
|
| 762 |
+
|
| 763 |
+
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
| 764 |
+
if raise_exception is not sentinel:
|
| 765 |
+
raise raise_exception
|
| 766 |
+
if not inspect.isawaitable(return_value):
|
| 767 |
+
return return_value
|
| 768 |
+
await return_value
|
| 769 |
+
|
| 770 |
+
return mock.Mock(wraps=mock_coro)
|
.venv/lib/python3.11/site-packages/aiohttp/typedefs.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Awaitable,
|
| 7 |
+
Callable,
|
| 8 |
+
Iterable,
|
| 9 |
+
Mapping,
|
| 10 |
+
Protocol,
|
| 11 |
+
Tuple,
|
| 12 |
+
Union,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
| 16 |
+
from yarl import URL, Query as _Query
|
| 17 |
+
|
| 18 |
+
Query = _Query
|
| 19 |
+
|
| 20 |
+
DEFAULT_JSON_ENCODER = json.dumps
|
| 21 |
+
DEFAULT_JSON_DECODER = json.loads
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
_CIMultiDict = CIMultiDict[str]
|
| 25 |
+
_CIMultiDictProxy = CIMultiDictProxy[str]
|
| 26 |
+
_MultiDict = MultiDict[str]
|
| 27 |
+
_MultiDictProxy = MultiDictProxy[str]
|
| 28 |
+
from http.cookies import BaseCookie, Morsel
|
| 29 |
+
|
| 30 |
+
from .web import Request, StreamResponse
|
| 31 |
+
else:
|
| 32 |
+
_CIMultiDict = CIMultiDict
|
| 33 |
+
_CIMultiDictProxy = CIMultiDictProxy
|
| 34 |
+
_MultiDict = MultiDict
|
| 35 |
+
_MultiDictProxy = MultiDictProxy
|
| 36 |
+
|
| 37 |
+
Byteish = Union[bytes, bytearray, memoryview]
|
| 38 |
+
JSONEncoder = Callable[[Any], str]
|
| 39 |
+
JSONDecoder = Callable[[str], Any]
|
| 40 |
+
LooseHeaders = Union[
|
| 41 |
+
Mapping[str, str],
|
| 42 |
+
Mapping[istr, str],
|
| 43 |
+
_CIMultiDict,
|
| 44 |
+
_CIMultiDictProxy,
|
| 45 |
+
Iterable[Tuple[Union[str, istr], str]],
|
| 46 |
+
]
|
| 47 |
+
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
| 48 |
+
StrOrURL = Union[str, URL]
|
| 49 |
+
|
| 50 |
+
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 51 |
+
LooseCookiesIterables = Iterable[
|
| 52 |
+
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 53 |
+
]
|
| 54 |
+
LooseCookies = Union[
|
| 55 |
+
LooseCookiesMappings,
|
| 56 |
+
LooseCookiesIterables,
|
| 57 |
+
"BaseCookie[str]",
|
| 58 |
+
]
|
| 59 |
+
|
| 60 |
+
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Middleware(Protocol):
|
| 64 |
+
def __call__(
|
| 65 |
+
self, request: "Request", handler: Handler
|
| 66 |
+
) -> Awaitable["StreamResponse"]: ...
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
PathLike = Union[str, "os.PathLike[str]"]
|
.venv/lib/python3.11/site-packages/aiohttp/web.py
ADDED
|
@@ -0,0 +1,605 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import socket
|
| 5 |
+
import sys
|
| 6 |
+
import warnings
|
| 7 |
+
from argparse import ArgumentParser
|
| 8 |
+
from collections.abc import Iterable
|
| 9 |
+
from contextlib import suppress
|
| 10 |
+
from importlib import import_module
|
| 11 |
+
from typing import (
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Awaitable,
|
| 15 |
+
Callable,
|
| 16 |
+
Iterable as TypingIterable,
|
| 17 |
+
List,
|
| 18 |
+
Optional,
|
| 19 |
+
Set,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from .abc import AbstractAccessLogger
|
| 26 |
+
from .helpers import AppKey as AppKey
|
| 27 |
+
from .log import access_logger
|
| 28 |
+
from .typedefs import PathLike
|
| 29 |
+
from .web_app import Application as Application, CleanupError as CleanupError
|
| 30 |
+
from .web_exceptions import (
|
| 31 |
+
HTTPAccepted as HTTPAccepted,
|
| 32 |
+
HTTPBadGateway as HTTPBadGateway,
|
| 33 |
+
HTTPBadRequest as HTTPBadRequest,
|
| 34 |
+
HTTPClientError as HTTPClientError,
|
| 35 |
+
HTTPConflict as HTTPConflict,
|
| 36 |
+
HTTPCreated as HTTPCreated,
|
| 37 |
+
HTTPError as HTTPError,
|
| 38 |
+
HTTPException as HTTPException,
|
| 39 |
+
HTTPExpectationFailed as HTTPExpectationFailed,
|
| 40 |
+
HTTPFailedDependency as HTTPFailedDependency,
|
| 41 |
+
HTTPForbidden as HTTPForbidden,
|
| 42 |
+
HTTPFound as HTTPFound,
|
| 43 |
+
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
| 44 |
+
HTTPGone as HTTPGone,
|
| 45 |
+
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
| 46 |
+
HTTPInternalServerError as HTTPInternalServerError,
|
| 47 |
+
HTTPLengthRequired as HTTPLengthRequired,
|
| 48 |
+
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
| 49 |
+
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
| 50 |
+
HTTPMove as HTTPMove,
|
| 51 |
+
HTTPMovedPermanently as HTTPMovedPermanently,
|
| 52 |
+
HTTPMultipleChoices as HTTPMultipleChoices,
|
| 53 |
+
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
| 54 |
+
HTTPNoContent as HTTPNoContent,
|
| 55 |
+
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
| 56 |
+
HTTPNotAcceptable as HTTPNotAcceptable,
|
| 57 |
+
HTTPNotExtended as HTTPNotExtended,
|
| 58 |
+
HTTPNotFound as HTTPNotFound,
|
| 59 |
+
HTTPNotImplemented as HTTPNotImplemented,
|
| 60 |
+
HTTPNotModified as HTTPNotModified,
|
| 61 |
+
HTTPOk as HTTPOk,
|
| 62 |
+
HTTPPartialContent as HTTPPartialContent,
|
| 63 |
+
HTTPPaymentRequired as HTTPPaymentRequired,
|
| 64 |
+
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
| 65 |
+
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
| 66 |
+
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
| 67 |
+
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
| 68 |
+
HTTPRedirection as HTTPRedirection,
|
| 69 |
+
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
| 70 |
+
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
| 71 |
+
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
| 72 |
+
HTTPRequestTimeout as HTTPRequestTimeout,
|
| 73 |
+
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
| 74 |
+
HTTPResetContent as HTTPResetContent,
|
| 75 |
+
HTTPSeeOther as HTTPSeeOther,
|
| 76 |
+
HTTPServerError as HTTPServerError,
|
| 77 |
+
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
| 78 |
+
HTTPSuccessful as HTTPSuccessful,
|
| 79 |
+
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
| 80 |
+
HTTPTooManyRequests as HTTPTooManyRequests,
|
| 81 |
+
HTTPUnauthorized as HTTPUnauthorized,
|
| 82 |
+
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
| 83 |
+
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
| 84 |
+
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
| 85 |
+
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
| 86 |
+
HTTPUseProxy as HTTPUseProxy,
|
| 87 |
+
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
| 88 |
+
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
| 89 |
+
NotAppKeyWarning as NotAppKeyWarning,
|
| 90 |
+
)
|
| 91 |
+
from .web_fileresponse import FileResponse as FileResponse
|
| 92 |
+
from .web_log import AccessLogger
|
| 93 |
+
from .web_middlewares import (
|
| 94 |
+
middleware as middleware,
|
| 95 |
+
normalize_path_middleware as normalize_path_middleware,
|
| 96 |
+
)
|
| 97 |
+
from .web_protocol import (
|
| 98 |
+
PayloadAccessError as PayloadAccessError,
|
| 99 |
+
RequestHandler as RequestHandler,
|
| 100 |
+
RequestPayloadError as RequestPayloadError,
|
| 101 |
+
)
|
| 102 |
+
from .web_request import (
|
| 103 |
+
BaseRequest as BaseRequest,
|
| 104 |
+
FileField as FileField,
|
| 105 |
+
Request as Request,
|
| 106 |
+
)
|
| 107 |
+
from .web_response import (
|
| 108 |
+
ContentCoding as ContentCoding,
|
| 109 |
+
Response as Response,
|
| 110 |
+
StreamResponse as StreamResponse,
|
| 111 |
+
json_response as json_response,
|
| 112 |
+
)
|
| 113 |
+
from .web_routedef import (
|
| 114 |
+
AbstractRouteDef as AbstractRouteDef,
|
| 115 |
+
RouteDef as RouteDef,
|
| 116 |
+
RouteTableDef as RouteTableDef,
|
| 117 |
+
StaticDef as StaticDef,
|
| 118 |
+
delete as delete,
|
| 119 |
+
get as get,
|
| 120 |
+
head as head,
|
| 121 |
+
options as options,
|
| 122 |
+
patch as patch,
|
| 123 |
+
post as post,
|
| 124 |
+
put as put,
|
| 125 |
+
route as route,
|
| 126 |
+
static as static,
|
| 127 |
+
view as view,
|
| 128 |
+
)
|
| 129 |
+
from .web_runner import (
|
| 130 |
+
AppRunner as AppRunner,
|
| 131 |
+
BaseRunner as BaseRunner,
|
| 132 |
+
BaseSite as BaseSite,
|
| 133 |
+
GracefulExit as GracefulExit,
|
| 134 |
+
NamedPipeSite as NamedPipeSite,
|
| 135 |
+
ServerRunner as ServerRunner,
|
| 136 |
+
SockSite as SockSite,
|
| 137 |
+
TCPSite as TCPSite,
|
| 138 |
+
UnixSite as UnixSite,
|
| 139 |
+
)
|
| 140 |
+
from .web_server import Server as Server
|
| 141 |
+
from .web_urldispatcher import (
|
| 142 |
+
AbstractResource as AbstractResource,
|
| 143 |
+
AbstractRoute as AbstractRoute,
|
| 144 |
+
DynamicResource as DynamicResource,
|
| 145 |
+
PlainResource as PlainResource,
|
| 146 |
+
PrefixedSubAppResource as PrefixedSubAppResource,
|
| 147 |
+
Resource as Resource,
|
| 148 |
+
ResourceRoute as ResourceRoute,
|
| 149 |
+
StaticResource as StaticResource,
|
| 150 |
+
UrlDispatcher as UrlDispatcher,
|
| 151 |
+
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
| 152 |
+
View as View,
|
| 153 |
+
)
|
| 154 |
+
from .web_ws import (
|
| 155 |
+
WebSocketReady as WebSocketReady,
|
| 156 |
+
WebSocketResponse as WebSocketResponse,
|
| 157 |
+
WSMsgType as WSMsgType,
|
| 158 |
+
)
|
| 159 |
+
|
| 160 |
+
__all__ = (
|
| 161 |
+
# web_app
|
| 162 |
+
"AppKey",
|
| 163 |
+
"Application",
|
| 164 |
+
"CleanupError",
|
| 165 |
+
# web_exceptions
|
| 166 |
+
"NotAppKeyWarning",
|
| 167 |
+
"HTTPAccepted",
|
| 168 |
+
"HTTPBadGateway",
|
| 169 |
+
"HTTPBadRequest",
|
| 170 |
+
"HTTPClientError",
|
| 171 |
+
"HTTPConflict",
|
| 172 |
+
"HTTPCreated",
|
| 173 |
+
"HTTPError",
|
| 174 |
+
"HTTPException",
|
| 175 |
+
"HTTPExpectationFailed",
|
| 176 |
+
"HTTPFailedDependency",
|
| 177 |
+
"HTTPForbidden",
|
| 178 |
+
"HTTPFound",
|
| 179 |
+
"HTTPGatewayTimeout",
|
| 180 |
+
"HTTPGone",
|
| 181 |
+
"HTTPInsufficientStorage",
|
| 182 |
+
"HTTPInternalServerError",
|
| 183 |
+
"HTTPLengthRequired",
|
| 184 |
+
"HTTPMethodNotAllowed",
|
| 185 |
+
"HTTPMisdirectedRequest",
|
| 186 |
+
"HTTPMove",
|
| 187 |
+
"HTTPMovedPermanently",
|
| 188 |
+
"HTTPMultipleChoices",
|
| 189 |
+
"HTTPNetworkAuthenticationRequired",
|
| 190 |
+
"HTTPNoContent",
|
| 191 |
+
"HTTPNonAuthoritativeInformation",
|
| 192 |
+
"HTTPNotAcceptable",
|
| 193 |
+
"HTTPNotExtended",
|
| 194 |
+
"HTTPNotFound",
|
| 195 |
+
"HTTPNotImplemented",
|
| 196 |
+
"HTTPNotModified",
|
| 197 |
+
"HTTPOk",
|
| 198 |
+
"HTTPPartialContent",
|
| 199 |
+
"HTTPPaymentRequired",
|
| 200 |
+
"HTTPPermanentRedirect",
|
| 201 |
+
"HTTPPreconditionFailed",
|
| 202 |
+
"HTTPPreconditionRequired",
|
| 203 |
+
"HTTPProxyAuthenticationRequired",
|
| 204 |
+
"HTTPRedirection",
|
| 205 |
+
"HTTPRequestEntityTooLarge",
|
| 206 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
| 207 |
+
"HTTPRequestRangeNotSatisfiable",
|
| 208 |
+
"HTTPRequestTimeout",
|
| 209 |
+
"HTTPRequestURITooLong",
|
| 210 |
+
"HTTPResetContent",
|
| 211 |
+
"HTTPSeeOther",
|
| 212 |
+
"HTTPServerError",
|
| 213 |
+
"HTTPServiceUnavailable",
|
| 214 |
+
"HTTPSuccessful",
|
| 215 |
+
"HTTPTemporaryRedirect",
|
| 216 |
+
"HTTPTooManyRequests",
|
| 217 |
+
"HTTPUnauthorized",
|
| 218 |
+
"HTTPUnavailableForLegalReasons",
|
| 219 |
+
"HTTPUnprocessableEntity",
|
| 220 |
+
"HTTPUnsupportedMediaType",
|
| 221 |
+
"HTTPUpgradeRequired",
|
| 222 |
+
"HTTPUseProxy",
|
| 223 |
+
"HTTPVariantAlsoNegotiates",
|
| 224 |
+
"HTTPVersionNotSupported",
|
| 225 |
+
# web_fileresponse
|
| 226 |
+
"FileResponse",
|
| 227 |
+
# web_middlewares
|
| 228 |
+
"middleware",
|
| 229 |
+
"normalize_path_middleware",
|
| 230 |
+
# web_protocol
|
| 231 |
+
"PayloadAccessError",
|
| 232 |
+
"RequestHandler",
|
| 233 |
+
"RequestPayloadError",
|
| 234 |
+
# web_request
|
| 235 |
+
"BaseRequest",
|
| 236 |
+
"FileField",
|
| 237 |
+
"Request",
|
| 238 |
+
# web_response
|
| 239 |
+
"ContentCoding",
|
| 240 |
+
"Response",
|
| 241 |
+
"StreamResponse",
|
| 242 |
+
"json_response",
|
| 243 |
+
# web_routedef
|
| 244 |
+
"AbstractRouteDef",
|
| 245 |
+
"RouteDef",
|
| 246 |
+
"RouteTableDef",
|
| 247 |
+
"StaticDef",
|
| 248 |
+
"delete",
|
| 249 |
+
"get",
|
| 250 |
+
"head",
|
| 251 |
+
"options",
|
| 252 |
+
"patch",
|
| 253 |
+
"post",
|
| 254 |
+
"put",
|
| 255 |
+
"route",
|
| 256 |
+
"static",
|
| 257 |
+
"view",
|
| 258 |
+
# web_runner
|
| 259 |
+
"AppRunner",
|
| 260 |
+
"BaseRunner",
|
| 261 |
+
"BaseSite",
|
| 262 |
+
"GracefulExit",
|
| 263 |
+
"ServerRunner",
|
| 264 |
+
"SockSite",
|
| 265 |
+
"TCPSite",
|
| 266 |
+
"UnixSite",
|
| 267 |
+
"NamedPipeSite",
|
| 268 |
+
# web_server
|
| 269 |
+
"Server",
|
| 270 |
+
# web_urldispatcher
|
| 271 |
+
"AbstractResource",
|
| 272 |
+
"AbstractRoute",
|
| 273 |
+
"DynamicResource",
|
| 274 |
+
"PlainResource",
|
| 275 |
+
"PrefixedSubAppResource",
|
| 276 |
+
"Resource",
|
| 277 |
+
"ResourceRoute",
|
| 278 |
+
"StaticResource",
|
| 279 |
+
"UrlDispatcher",
|
| 280 |
+
"UrlMappingMatchInfo",
|
| 281 |
+
"View",
|
| 282 |
+
# web_ws
|
| 283 |
+
"WebSocketReady",
|
| 284 |
+
"WebSocketResponse",
|
| 285 |
+
"WSMsgType",
|
| 286 |
+
# web
|
| 287 |
+
"run_app",
|
| 288 |
+
)
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
if TYPE_CHECKING:
|
| 292 |
+
from ssl import SSLContext
|
| 293 |
+
else:
|
| 294 |
+
try:
|
| 295 |
+
from ssl import SSLContext
|
| 296 |
+
except ImportError: # pragma: no cover
|
| 297 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 298 |
+
|
| 299 |
+
# Only display warning when using -Wdefault, -We, -X dev or similar.
|
| 300 |
+
warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
|
| 301 |
+
|
| 302 |
+
HostSequence = TypingIterable[str]
|
| 303 |
+
|
| 304 |
+
|
| 305 |
+
async def _run_app(
|
| 306 |
+
app: Union[Application, Awaitable[Application]],
|
| 307 |
+
*,
|
| 308 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 309 |
+
port: Optional[int] = None,
|
| 310 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 311 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 312 |
+
shutdown_timeout: float = 60.0,
|
| 313 |
+
keepalive_timeout: float = 75.0,
|
| 314 |
+
ssl_context: Optional[SSLContext] = None,
|
| 315 |
+
print: Optional[Callable[..., None]] = print,
|
| 316 |
+
backlog: int = 128,
|
| 317 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 318 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 319 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 320 |
+
handle_signals: bool = True,
|
| 321 |
+
reuse_address: Optional[bool] = None,
|
| 322 |
+
reuse_port: Optional[bool] = None,
|
| 323 |
+
handler_cancellation: bool = False,
|
| 324 |
+
) -> None:
|
| 325 |
+
# An internal function to actually do all dirty job for application running
|
| 326 |
+
if asyncio.iscoroutine(app):
|
| 327 |
+
app = await app
|
| 328 |
+
|
| 329 |
+
app = cast(Application, app)
|
| 330 |
+
|
| 331 |
+
runner = AppRunner(
|
| 332 |
+
app,
|
| 333 |
+
handle_signals=handle_signals,
|
| 334 |
+
access_log_class=access_log_class,
|
| 335 |
+
access_log_format=access_log_format,
|
| 336 |
+
access_log=access_log,
|
| 337 |
+
keepalive_timeout=keepalive_timeout,
|
| 338 |
+
shutdown_timeout=shutdown_timeout,
|
| 339 |
+
handler_cancellation=handler_cancellation,
|
| 340 |
+
)
|
| 341 |
+
|
| 342 |
+
await runner.setup()
|
| 343 |
+
|
| 344 |
+
sites: List[BaseSite] = []
|
| 345 |
+
|
| 346 |
+
try:
|
| 347 |
+
if host is not None:
|
| 348 |
+
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
| 349 |
+
sites.append(
|
| 350 |
+
TCPSite(
|
| 351 |
+
runner,
|
| 352 |
+
host,
|
| 353 |
+
port,
|
| 354 |
+
ssl_context=ssl_context,
|
| 355 |
+
backlog=backlog,
|
| 356 |
+
reuse_address=reuse_address,
|
| 357 |
+
reuse_port=reuse_port,
|
| 358 |
+
)
|
| 359 |
+
)
|
| 360 |
+
else:
|
| 361 |
+
for h in host:
|
| 362 |
+
sites.append(
|
| 363 |
+
TCPSite(
|
| 364 |
+
runner,
|
| 365 |
+
h,
|
| 366 |
+
port,
|
| 367 |
+
ssl_context=ssl_context,
|
| 368 |
+
backlog=backlog,
|
| 369 |
+
reuse_address=reuse_address,
|
| 370 |
+
reuse_port=reuse_port,
|
| 371 |
+
)
|
| 372 |
+
)
|
| 373 |
+
elif path is None and sock is None or port is not None:
|
| 374 |
+
sites.append(
|
| 375 |
+
TCPSite(
|
| 376 |
+
runner,
|
| 377 |
+
port=port,
|
| 378 |
+
ssl_context=ssl_context,
|
| 379 |
+
backlog=backlog,
|
| 380 |
+
reuse_address=reuse_address,
|
| 381 |
+
reuse_port=reuse_port,
|
| 382 |
+
)
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
if path is not None:
|
| 386 |
+
if isinstance(path, (str, os.PathLike)):
|
| 387 |
+
sites.append(
|
| 388 |
+
UnixSite(
|
| 389 |
+
runner,
|
| 390 |
+
path,
|
| 391 |
+
ssl_context=ssl_context,
|
| 392 |
+
backlog=backlog,
|
| 393 |
+
)
|
| 394 |
+
)
|
| 395 |
+
else:
|
| 396 |
+
for p in path:
|
| 397 |
+
sites.append(
|
| 398 |
+
UnixSite(
|
| 399 |
+
runner,
|
| 400 |
+
p,
|
| 401 |
+
ssl_context=ssl_context,
|
| 402 |
+
backlog=backlog,
|
| 403 |
+
)
|
| 404 |
+
)
|
| 405 |
+
|
| 406 |
+
if sock is not None:
|
| 407 |
+
if not isinstance(sock, Iterable):
|
| 408 |
+
sites.append(
|
| 409 |
+
SockSite(
|
| 410 |
+
runner,
|
| 411 |
+
sock,
|
| 412 |
+
ssl_context=ssl_context,
|
| 413 |
+
backlog=backlog,
|
| 414 |
+
)
|
| 415 |
+
)
|
| 416 |
+
else:
|
| 417 |
+
for s in sock:
|
| 418 |
+
sites.append(
|
| 419 |
+
SockSite(
|
| 420 |
+
runner,
|
| 421 |
+
s,
|
| 422 |
+
ssl_context=ssl_context,
|
| 423 |
+
backlog=backlog,
|
| 424 |
+
)
|
| 425 |
+
)
|
| 426 |
+
for site in sites:
|
| 427 |
+
await site.start()
|
| 428 |
+
|
| 429 |
+
if print: # pragma: no branch
|
| 430 |
+
names = sorted(str(s.name) for s in runner.sites)
|
| 431 |
+
print(
|
| 432 |
+
"======== Running on {} ========\n"
|
| 433 |
+
"(Press CTRL+C to quit)".format(", ".join(names))
|
| 434 |
+
)
|
| 435 |
+
|
| 436 |
+
# sleep forever by 1 hour intervals,
|
| 437 |
+
while True:
|
| 438 |
+
await asyncio.sleep(3600)
|
| 439 |
+
finally:
|
| 440 |
+
await runner.cleanup()
|
| 441 |
+
|
| 442 |
+
|
| 443 |
+
def _cancel_tasks(
|
| 444 |
+
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
| 445 |
+
) -> None:
|
| 446 |
+
if not to_cancel:
|
| 447 |
+
return
|
| 448 |
+
|
| 449 |
+
for task in to_cancel:
|
| 450 |
+
task.cancel()
|
| 451 |
+
|
| 452 |
+
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
| 453 |
+
|
| 454 |
+
for task in to_cancel:
|
| 455 |
+
if task.cancelled():
|
| 456 |
+
continue
|
| 457 |
+
if task.exception() is not None:
|
| 458 |
+
loop.call_exception_handler(
|
| 459 |
+
{
|
| 460 |
+
"message": "unhandled exception during asyncio.run() shutdown",
|
| 461 |
+
"exception": task.exception(),
|
| 462 |
+
"task": task,
|
| 463 |
+
}
|
| 464 |
+
)
|
| 465 |
+
|
| 466 |
+
|
| 467 |
+
def run_app(
|
| 468 |
+
app: Union[Application, Awaitable[Application]],
|
| 469 |
+
*,
|
| 470 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 471 |
+
port: Optional[int] = None,
|
| 472 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 473 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 474 |
+
shutdown_timeout: float = 60.0,
|
| 475 |
+
keepalive_timeout: float = 75.0,
|
| 476 |
+
ssl_context: Optional[SSLContext] = None,
|
| 477 |
+
print: Optional[Callable[..., None]] = print,
|
| 478 |
+
backlog: int = 128,
|
| 479 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 480 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 481 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 482 |
+
handle_signals: bool = True,
|
| 483 |
+
reuse_address: Optional[bool] = None,
|
| 484 |
+
reuse_port: Optional[bool] = None,
|
| 485 |
+
handler_cancellation: bool = False,
|
| 486 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 487 |
+
) -> None:
|
| 488 |
+
"""Run an app locally"""
|
| 489 |
+
if loop is None:
|
| 490 |
+
loop = asyncio.new_event_loop()
|
| 491 |
+
|
| 492 |
+
# Configure if and only if in debugging mode and using the default logger
|
| 493 |
+
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
| 494 |
+
if access_log.level == logging.NOTSET:
|
| 495 |
+
access_log.setLevel(logging.DEBUG)
|
| 496 |
+
if not access_log.hasHandlers():
|
| 497 |
+
access_log.addHandler(logging.StreamHandler())
|
| 498 |
+
|
| 499 |
+
main_task = loop.create_task(
|
| 500 |
+
_run_app(
|
| 501 |
+
app,
|
| 502 |
+
host=host,
|
| 503 |
+
port=port,
|
| 504 |
+
path=path,
|
| 505 |
+
sock=sock,
|
| 506 |
+
shutdown_timeout=shutdown_timeout,
|
| 507 |
+
keepalive_timeout=keepalive_timeout,
|
| 508 |
+
ssl_context=ssl_context,
|
| 509 |
+
print=print,
|
| 510 |
+
backlog=backlog,
|
| 511 |
+
access_log_class=access_log_class,
|
| 512 |
+
access_log_format=access_log_format,
|
| 513 |
+
access_log=access_log,
|
| 514 |
+
handle_signals=handle_signals,
|
| 515 |
+
reuse_address=reuse_address,
|
| 516 |
+
reuse_port=reuse_port,
|
| 517 |
+
handler_cancellation=handler_cancellation,
|
| 518 |
+
)
|
| 519 |
+
)
|
| 520 |
+
|
| 521 |
+
try:
|
| 522 |
+
asyncio.set_event_loop(loop)
|
| 523 |
+
loop.run_until_complete(main_task)
|
| 524 |
+
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
| 525 |
+
pass
|
| 526 |
+
finally:
|
| 527 |
+
try:
|
| 528 |
+
main_task.cancel()
|
| 529 |
+
with suppress(asyncio.CancelledError):
|
| 530 |
+
loop.run_until_complete(main_task)
|
| 531 |
+
finally:
|
| 532 |
+
_cancel_tasks(asyncio.all_tasks(loop), loop)
|
| 533 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
| 534 |
+
loop.close()
|
| 535 |
+
|
| 536 |
+
|
| 537 |
+
def main(argv: List[str]) -> None:
|
| 538 |
+
arg_parser = ArgumentParser(
|
| 539 |
+
description="aiohttp.web Application server", prog="aiohttp.web"
|
| 540 |
+
)
|
| 541 |
+
arg_parser.add_argument(
|
| 542 |
+
"entry_func",
|
| 543 |
+
help=(
|
| 544 |
+
"Callable returning the `aiohttp.web.Application` instance to "
|
| 545 |
+
"run. Should be specified in the 'module:function' syntax."
|
| 546 |
+
),
|
| 547 |
+
metavar="entry-func",
|
| 548 |
+
)
|
| 549 |
+
arg_parser.add_argument(
|
| 550 |
+
"-H",
|
| 551 |
+
"--hostname",
|
| 552 |
+
help="TCP/IP hostname to serve on (default: localhost)",
|
| 553 |
+
default=None,
|
| 554 |
+
)
|
| 555 |
+
arg_parser.add_argument(
|
| 556 |
+
"-P",
|
| 557 |
+
"--port",
|
| 558 |
+
help="TCP/IP port to serve on (default: %(default)r)",
|
| 559 |
+
type=int,
|
| 560 |
+
default=8080,
|
| 561 |
+
)
|
| 562 |
+
arg_parser.add_argument(
|
| 563 |
+
"-U",
|
| 564 |
+
"--path",
|
| 565 |
+
help="Unix file system path to serve on. Can be combined with hostname "
|
| 566 |
+
"to serve on both Unix and TCP.",
|
| 567 |
+
)
|
| 568 |
+
args, extra_argv = arg_parser.parse_known_args(argv)
|
| 569 |
+
|
| 570 |
+
# Import logic
|
| 571 |
+
mod_str, _, func_str = args.entry_func.partition(":")
|
| 572 |
+
if not func_str or not mod_str:
|
| 573 |
+
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
| 574 |
+
if mod_str.startswith("."):
|
| 575 |
+
arg_parser.error("relative module names not supported")
|
| 576 |
+
try:
|
| 577 |
+
module = import_module(mod_str)
|
| 578 |
+
except ImportError as ex:
|
| 579 |
+
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
| 580 |
+
try:
|
| 581 |
+
func = getattr(module, func_str)
|
| 582 |
+
except AttributeError:
|
| 583 |
+
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
| 584 |
+
|
| 585 |
+
# Compatibility logic
|
| 586 |
+
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
| 587 |
+
arg_parser.error(
|
| 588 |
+
"file system paths not supported by your operating environment"
|
| 589 |
+
)
|
| 590 |
+
|
| 591 |
+
logging.basicConfig(level=logging.DEBUG)
|
| 592 |
+
|
| 593 |
+
if args.path and args.hostname is None:
|
| 594 |
+
host = port = None
|
| 595 |
+
else:
|
| 596 |
+
host = args.hostname or "localhost"
|
| 597 |
+
port = args.port
|
| 598 |
+
|
| 599 |
+
app = func(extra_argv)
|
| 600 |
+
run_app(app, host=host, port=port, path=args.path)
|
| 601 |
+
arg_parser.exit(message="Stopped\n")
|
| 602 |
+
|
| 603 |
+
|
| 604 |
+
if __name__ == "__main__": # pragma: no branch
|
| 605 |
+
main(sys.argv[1:]) # pragma: no cover
|
.venv/lib/python3.11/site-packages/aiohttp/web_protocol.py
ADDED
|
@@ -0,0 +1,750 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import asyncio.streams
|
| 3 |
+
import sys
|
| 4 |
+
import traceback
|
| 5 |
+
import warnings
|
| 6 |
+
from collections import deque
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
from html import escape as html_escape
|
| 9 |
+
from http import HTTPStatus
|
| 10 |
+
from logging import Logger
|
| 11 |
+
from typing import (
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
Awaitable,
|
| 15 |
+
Callable,
|
| 16 |
+
Deque,
|
| 17 |
+
Optional,
|
| 18 |
+
Sequence,
|
| 19 |
+
Tuple,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
import attr
|
| 26 |
+
import yarl
|
| 27 |
+
|
| 28 |
+
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
| 29 |
+
from .base_protocol import BaseProtocol
|
| 30 |
+
from .helpers import ceil_timeout
|
| 31 |
+
from .http import (
|
| 32 |
+
HttpProcessingError,
|
| 33 |
+
HttpRequestParser,
|
| 34 |
+
HttpVersion10,
|
| 35 |
+
RawRequestMessage,
|
| 36 |
+
StreamWriter,
|
| 37 |
+
)
|
| 38 |
+
from .http_exceptions import BadHttpMethod
|
| 39 |
+
from .log import access_logger, server_logger
|
| 40 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 41 |
+
from .tcp_helpers import tcp_keepalive
|
| 42 |
+
from .web_exceptions import HTTPException, HTTPInternalServerError
|
| 43 |
+
from .web_log import AccessLogger
|
| 44 |
+
from .web_request import BaseRequest
|
| 45 |
+
from .web_response import Response, StreamResponse
|
| 46 |
+
|
| 47 |
+
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
| 48 |
+
|
| 49 |
+
if TYPE_CHECKING:
|
| 50 |
+
from .web_server import Server
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
_RequestFactory = Callable[
|
| 54 |
+
[
|
| 55 |
+
RawRequestMessage,
|
| 56 |
+
StreamReader,
|
| 57 |
+
"RequestHandler",
|
| 58 |
+
AbstractStreamWriter,
|
| 59 |
+
"asyncio.Task[None]",
|
| 60 |
+
],
|
| 61 |
+
BaseRequest,
|
| 62 |
+
]
|
| 63 |
+
|
| 64 |
+
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
| 65 |
+
|
| 66 |
+
ERROR = RawRequestMessage(
|
| 67 |
+
"UNKNOWN",
|
| 68 |
+
"/",
|
| 69 |
+
HttpVersion10,
|
| 70 |
+
{}, # type: ignore[arg-type]
|
| 71 |
+
{}, # type: ignore[arg-type]
|
| 72 |
+
True,
|
| 73 |
+
None,
|
| 74 |
+
False,
|
| 75 |
+
False,
|
| 76 |
+
yarl.URL("/"),
|
| 77 |
+
)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class RequestPayloadError(Exception):
|
| 81 |
+
"""Payload parsing error."""
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
class PayloadAccessError(Exception):
|
| 85 |
+
"""Payload was accessed after response was sent."""
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
_PAYLOAD_ACCESS_ERROR = PayloadAccessError()
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 92 |
+
class _ErrInfo:
|
| 93 |
+
status: int
|
| 94 |
+
exc: BaseException
|
| 95 |
+
message: str
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class RequestHandler(BaseProtocol):
|
| 102 |
+
"""HTTP protocol implementation.
|
| 103 |
+
|
| 104 |
+
RequestHandler handles incoming HTTP request. It reads request line,
|
| 105 |
+
request headers and request payload and calls handle_request() method.
|
| 106 |
+
By default it always returns with 404 response.
|
| 107 |
+
|
| 108 |
+
RequestHandler handles errors in incoming request, like bad
|
| 109 |
+
status line, bad headers or incomplete payload. If any error occurs,
|
| 110 |
+
connection gets closed.
|
| 111 |
+
|
| 112 |
+
keepalive_timeout -- number of seconds before closing
|
| 113 |
+
keep-alive connection
|
| 114 |
+
|
| 115 |
+
tcp_keepalive -- TCP keep-alive is on, default is on
|
| 116 |
+
|
| 117 |
+
debug -- enable debug mode
|
| 118 |
+
|
| 119 |
+
logger -- custom logger object
|
| 120 |
+
|
| 121 |
+
access_log_class -- custom class for access_logger
|
| 122 |
+
|
| 123 |
+
access_log -- custom logging object
|
| 124 |
+
|
| 125 |
+
access_log_format -- access log format string
|
| 126 |
+
|
| 127 |
+
loop -- Optional event loop
|
| 128 |
+
|
| 129 |
+
max_line_size -- Optional maximum header line size
|
| 130 |
+
|
| 131 |
+
max_field_size -- Optional maximum header field size
|
| 132 |
+
|
| 133 |
+
max_headers -- Optional maximum header size
|
| 134 |
+
|
| 135 |
+
timeout_ceil_threshold -- Optional value to specify
|
| 136 |
+
threshold to ceil() timeout
|
| 137 |
+
values
|
| 138 |
+
|
| 139 |
+
"""
|
| 140 |
+
|
| 141 |
+
__slots__ = (
|
| 142 |
+
"_request_count",
|
| 143 |
+
"_keepalive",
|
| 144 |
+
"_manager",
|
| 145 |
+
"_request_handler",
|
| 146 |
+
"_request_factory",
|
| 147 |
+
"_tcp_keepalive",
|
| 148 |
+
"_next_keepalive_close_time",
|
| 149 |
+
"_keepalive_handle",
|
| 150 |
+
"_keepalive_timeout",
|
| 151 |
+
"_lingering_time",
|
| 152 |
+
"_messages",
|
| 153 |
+
"_message_tail",
|
| 154 |
+
"_handler_waiter",
|
| 155 |
+
"_waiter",
|
| 156 |
+
"_task_handler",
|
| 157 |
+
"_upgrade",
|
| 158 |
+
"_payload_parser",
|
| 159 |
+
"_request_parser",
|
| 160 |
+
"_reading_paused",
|
| 161 |
+
"logger",
|
| 162 |
+
"debug",
|
| 163 |
+
"access_log",
|
| 164 |
+
"access_logger",
|
| 165 |
+
"_close",
|
| 166 |
+
"_force_close",
|
| 167 |
+
"_current_request",
|
| 168 |
+
"_timeout_ceil_threshold",
|
| 169 |
+
"_request_in_progress",
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
def __init__(
|
| 173 |
+
self,
|
| 174 |
+
manager: "Server",
|
| 175 |
+
*,
|
| 176 |
+
loop: asyncio.AbstractEventLoop,
|
| 177 |
+
# Default should be high enough that it's likely longer than a reverse proxy.
|
| 178 |
+
keepalive_timeout: float = 3630,
|
| 179 |
+
tcp_keepalive: bool = True,
|
| 180 |
+
logger: Logger = server_logger,
|
| 181 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 182 |
+
access_log: Logger = access_logger,
|
| 183 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 184 |
+
debug: bool = False,
|
| 185 |
+
max_line_size: int = 8190,
|
| 186 |
+
max_headers: int = 32768,
|
| 187 |
+
max_field_size: int = 8190,
|
| 188 |
+
lingering_time: float = 10.0,
|
| 189 |
+
read_bufsize: int = 2**16,
|
| 190 |
+
auto_decompress: bool = True,
|
| 191 |
+
timeout_ceil_threshold: float = 5,
|
| 192 |
+
):
|
| 193 |
+
super().__init__(loop)
|
| 194 |
+
|
| 195 |
+
# _request_count is the number of requests processed with the same connection.
|
| 196 |
+
self._request_count = 0
|
| 197 |
+
self._keepalive = False
|
| 198 |
+
self._current_request: Optional[BaseRequest] = None
|
| 199 |
+
self._manager: Optional[Server] = manager
|
| 200 |
+
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
| 201 |
+
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
| 202 |
+
|
| 203 |
+
self._tcp_keepalive = tcp_keepalive
|
| 204 |
+
# placeholder to be replaced on keepalive timeout setup
|
| 205 |
+
self._next_keepalive_close_time = 0.0
|
| 206 |
+
self._keepalive_handle: Optional[asyncio.Handle] = None
|
| 207 |
+
self._keepalive_timeout = keepalive_timeout
|
| 208 |
+
self._lingering_time = float(lingering_time)
|
| 209 |
+
|
| 210 |
+
self._messages: Deque[_MsgType] = deque()
|
| 211 |
+
self._message_tail = b""
|
| 212 |
+
|
| 213 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 214 |
+
self._handler_waiter: Optional[asyncio.Future[None]] = None
|
| 215 |
+
self._task_handler: Optional[asyncio.Task[None]] = None
|
| 216 |
+
|
| 217 |
+
self._upgrade = False
|
| 218 |
+
self._payload_parser: Any = None
|
| 219 |
+
self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
|
| 220 |
+
self,
|
| 221 |
+
loop,
|
| 222 |
+
read_bufsize,
|
| 223 |
+
max_line_size=max_line_size,
|
| 224 |
+
max_field_size=max_field_size,
|
| 225 |
+
max_headers=max_headers,
|
| 226 |
+
payload_exception=RequestPayloadError,
|
| 227 |
+
auto_decompress=auto_decompress,
|
| 228 |
+
)
|
| 229 |
+
|
| 230 |
+
self._timeout_ceil_threshold: float = 5
|
| 231 |
+
try:
|
| 232 |
+
self._timeout_ceil_threshold = float(timeout_ceil_threshold)
|
| 233 |
+
except (TypeError, ValueError):
|
| 234 |
+
pass
|
| 235 |
+
|
| 236 |
+
self.logger = logger
|
| 237 |
+
self.debug = debug
|
| 238 |
+
self.access_log = access_log
|
| 239 |
+
if access_log:
|
| 240 |
+
self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
|
| 241 |
+
access_log, access_log_format
|
| 242 |
+
)
|
| 243 |
+
else:
|
| 244 |
+
self.access_logger = None
|
| 245 |
+
|
| 246 |
+
self._close = False
|
| 247 |
+
self._force_close = False
|
| 248 |
+
self._request_in_progress = False
|
| 249 |
+
|
| 250 |
+
def __repr__(self) -> str:
|
| 251 |
+
return "<{} {}>".format(
|
| 252 |
+
self.__class__.__name__,
|
| 253 |
+
"connected" if self.transport is not None else "disconnected",
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
@property
|
| 257 |
+
def keepalive_timeout(self) -> float:
|
| 258 |
+
return self._keepalive_timeout
|
| 259 |
+
|
| 260 |
+
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
| 261 |
+
"""Do worker process exit preparations.
|
| 262 |
+
|
| 263 |
+
We need to clean up everything and stop accepting requests.
|
| 264 |
+
It is especially important for keep-alive connections.
|
| 265 |
+
"""
|
| 266 |
+
self._force_close = True
|
| 267 |
+
|
| 268 |
+
if self._keepalive_handle is not None:
|
| 269 |
+
self._keepalive_handle.cancel()
|
| 270 |
+
|
| 271 |
+
# Wait for graceful handler completion
|
| 272 |
+
if self._request_in_progress:
|
| 273 |
+
# The future is only created when we are shutting
|
| 274 |
+
# down while the handler is still processing a request
|
| 275 |
+
# to avoid creating a future for every request.
|
| 276 |
+
self._handler_waiter = self._loop.create_future()
|
| 277 |
+
try:
|
| 278 |
+
async with ceil_timeout(timeout):
|
| 279 |
+
await self._handler_waiter
|
| 280 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 281 |
+
self._handler_waiter = None
|
| 282 |
+
if (
|
| 283 |
+
sys.version_info >= (3, 11)
|
| 284 |
+
and (task := asyncio.current_task())
|
| 285 |
+
and task.cancelling()
|
| 286 |
+
):
|
| 287 |
+
raise
|
| 288 |
+
# Then cancel handler and wait
|
| 289 |
+
try:
|
| 290 |
+
async with ceil_timeout(timeout):
|
| 291 |
+
if self._current_request is not None:
|
| 292 |
+
self._current_request._cancel(asyncio.CancelledError())
|
| 293 |
+
|
| 294 |
+
if self._task_handler is not None and not self._task_handler.done():
|
| 295 |
+
await asyncio.shield(self._task_handler)
|
| 296 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 297 |
+
if (
|
| 298 |
+
sys.version_info >= (3, 11)
|
| 299 |
+
and (task := asyncio.current_task())
|
| 300 |
+
and task.cancelling()
|
| 301 |
+
):
|
| 302 |
+
raise
|
| 303 |
+
|
| 304 |
+
# force-close non-idle handler
|
| 305 |
+
if self._task_handler is not None:
|
| 306 |
+
self._task_handler.cancel()
|
| 307 |
+
|
| 308 |
+
self.force_close()
|
| 309 |
+
|
| 310 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 311 |
+
super().connection_made(transport)
|
| 312 |
+
|
| 313 |
+
real_transport = cast(asyncio.Transport, transport)
|
| 314 |
+
if self._tcp_keepalive:
|
| 315 |
+
tcp_keepalive(real_transport)
|
| 316 |
+
|
| 317 |
+
assert self._manager is not None
|
| 318 |
+
self._manager.connection_made(self, real_transport)
|
| 319 |
+
|
| 320 |
+
loop = self._loop
|
| 321 |
+
if sys.version_info >= (3, 12):
|
| 322 |
+
task = asyncio.Task(self.start(), loop=loop, eager_start=True)
|
| 323 |
+
else:
|
| 324 |
+
task = loop.create_task(self.start())
|
| 325 |
+
self._task_handler = task
|
| 326 |
+
|
| 327 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 328 |
+
if self._manager is None:
|
| 329 |
+
return
|
| 330 |
+
self._manager.connection_lost(self, exc)
|
| 331 |
+
|
| 332 |
+
# Grab value before setting _manager to None.
|
| 333 |
+
handler_cancellation = self._manager.handler_cancellation
|
| 334 |
+
|
| 335 |
+
self.force_close()
|
| 336 |
+
super().connection_lost(exc)
|
| 337 |
+
self._manager = None
|
| 338 |
+
self._request_factory = None
|
| 339 |
+
self._request_handler = None
|
| 340 |
+
self._request_parser = None
|
| 341 |
+
|
| 342 |
+
if self._keepalive_handle is not None:
|
| 343 |
+
self._keepalive_handle.cancel()
|
| 344 |
+
|
| 345 |
+
if self._current_request is not None:
|
| 346 |
+
if exc is None:
|
| 347 |
+
exc = ConnectionResetError("Connection lost")
|
| 348 |
+
self._current_request._cancel(exc)
|
| 349 |
+
|
| 350 |
+
if handler_cancellation and self._task_handler is not None:
|
| 351 |
+
self._task_handler.cancel()
|
| 352 |
+
|
| 353 |
+
self._task_handler = None
|
| 354 |
+
|
| 355 |
+
if self._payload_parser is not None:
|
| 356 |
+
self._payload_parser.feed_eof()
|
| 357 |
+
self._payload_parser = None
|
| 358 |
+
|
| 359 |
+
def set_parser(self, parser: Any) -> None:
|
| 360 |
+
# Actual type is WebReader
|
| 361 |
+
assert self._payload_parser is None
|
| 362 |
+
|
| 363 |
+
self._payload_parser = parser
|
| 364 |
+
|
| 365 |
+
if self._message_tail:
|
| 366 |
+
self._payload_parser.feed_data(self._message_tail)
|
| 367 |
+
self._message_tail = b""
|
| 368 |
+
|
| 369 |
+
def eof_received(self) -> None:
|
| 370 |
+
pass
|
| 371 |
+
|
| 372 |
+
def data_received(self, data: bytes) -> None:
|
| 373 |
+
if self._force_close or self._close:
|
| 374 |
+
return
|
| 375 |
+
# parse http messages
|
| 376 |
+
messages: Sequence[_MsgType]
|
| 377 |
+
if self._payload_parser is None and not self._upgrade:
|
| 378 |
+
assert self._request_parser is not None
|
| 379 |
+
try:
|
| 380 |
+
messages, upgraded, tail = self._request_parser.feed_data(data)
|
| 381 |
+
except HttpProcessingError as exc:
|
| 382 |
+
messages = [
|
| 383 |
+
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
| 384 |
+
]
|
| 385 |
+
upgraded = False
|
| 386 |
+
tail = b""
|
| 387 |
+
|
| 388 |
+
for msg, payload in messages or ():
|
| 389 |
+
self._request_count += 1
|
| 390 |
+
self._messages.append((msg, payload))
|
| 391 |
+
|
| 392 |
+
waiter = self._waiter
|
| 393 |
+
if messages and waiter is not None and not waiter.done():
|
| 394 |
+
# don't set result twice
|
| 395 |
+
waiter.set_result(None)
|
| 396 |
+
|
| 397 |
+
self._upgrade = upgraded
|
| 398 |
+
if upgraded and tail:
|
| 399 |
+
self._message_tail = tail
|
| 400 |
+
|
| 401 |
+
# no parser, just store
|
| 402 |
+
elif self._payload_parser is None and self._upgrade and data:
|
| 403 |
+
self._message_tail += data
|
| 404 |
+
|
| 405 |
+
# feed payload
|
| 406 |
+
elif data:
|
| 407 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 408 |
+
if eof:
|
| 409 |
+
self.close()
|
| 410 |
+
|
| 411 |
+
def keep_alive(self, val: bool) -> None:
|
| 412 |
+
"""Set keep-alive connection mode.
|
| 413 |
+
|
| 414 |
+
:param bool val: new state.
|
| 415 |
+
"""
|
| 416 |
+
self._keepalive = val
|
| 417 |
+
if self._keepalive_handle:
|
| 418 |
+
self._keepalive_handle.cancel()
|
| 419 |
+
self._keepalive_handle = None
|
| 420 |
+
|
| 421 |
+
def close(self) -> None:
|
| 422 |
+
"""Close connection.
|
| 423 |
+
|
| 424 |
+
Stop accepting new pipelining messages and close
|
| 425 |
+
connection when handlers done processing messages.
|
| 426 |
+
"""
|
| 427 |
+
self._close = True
|
| 428 |
+
if self._waiter:
|
| 429 |
+
self._waiter.cancel()
|
| 430 |
+
|
| 431 |
+
def force_close(self) -> None:
|
| 432 |
+
"""Forcefully close connection."""
|
| 433 |
+
self._force_close = True
|
| 434 |
+
if self._waiter:
|
| 435 |
+
self._waiter.cancel()
|
| 436 |
+
if self.transport is not None:
|
| 437 |
+
self.transport.close()
|
| 438 |
+
self.transport = None
|
| 439 |
+
|
| 440 |
+
def log_access(
|
| 441 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
| 442 |
+
) -> None:
|
| 443 |
+
if self.access_logger is not None and self.access_logger.enabled:
|
| 444 |
+
self.access_logger.log(request, response, self._loop.time() - time)
|
| 445 |
+
|
| 446 |
+
def log_debug(self, *args: Any, **kw: Any) -> None:
|
| 447 |
+
if self.debug:
|
| 448 |
+
self.logger.debug(*args, **kw)
|
| 449 |
+
|
| 450 |
+
def log_exception(self, *args: Any, **kw: Any) -> None:
|
| 451 |
+
self.logger.exception(*args, **kw)
|
| 452 |
+
|
| 453 |
+
def _process_keepalive(self) -> None:
|
| 454 |
+
self._keepalive_handle = None
|
| 455 |
+
if self._force_close or not self._keepalive:
|
| 456 |
+
return
|
| 457 |
+
|
| 458 |
+
loop = self._loop
|
| 459 |
+
now = loop.time()
|
| 460 |
+
close_time = self._next_keepalive_close_time
|
| 461 |
+
if now < close_time:
|
| 462 |
+
# Keep alive close check fired too early, reschedule
|
| 463 |
+
self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
|
| 464 |
+
return
|
| 465 |
+
|
| 466 |
+
# handler in idle state
|
| 467 |
+
if self._waiter and not self._waiter.done():
|
| 468 |
+
self.force_close()
|
| 469 |
+
|
| 470 |
+
async def _handle_request(
|
| 471 |
+
self,
|
| 472 |
+
request: BaseRequest,
|
| 473 |
+
start_time: float,
|
| 474 |
+
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
| 475 |
+
) -> Tuple[StreamResponse, bool]:
|
| 476 |
+
self._request_in_progress = True
|
| 477 |
+
try:
|
| 478 |
+
try:
|
| 479 |
+
self._current_request = request
|
| 480 |
+
resp = await request_handler(request)
|
| 481 |
+
finally:
|
| 482 |
+
self._current_request = None
|
| 483 |
+
except HTTPException as exc:
|
| 484 |
+
resp = exc
|
| 485 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 486 |
+
except asyncio.CancelledError:
|
| 487 |
+
raise
|
| 488 |
+
except asyncio.TimeoutError as exc:
|
| 489 |
+
self.log_debug("Request handler timed out.", exc_info=exc)
|
| 490 |
+
resp = self.handle_error(request, 504)
|
| 491 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 492 |
+
except Exception as exc:
|
| 493 |
+
resp = self.handle_error(request, 500, exc)
|
| 494 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 495 |
+
else:
|
| 496 |
+
# Deprecation warning (See #2415)
|
| 497 |
+
if getattr(resp, "__http_exception__", False):
|
| 498 |
+
warnings.warn(
|
| 499 |
+
"returning HTTPException object is deprecated "
|
| 500 |
+
"(#2415) and will be removed, "
|
| 501 |
+
"please raise the exception instead",
|
| 502 |
+
DeprecationWarning,
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
resp, reset = await self.finish_response(request, resp, start_time)
|
| 506 |
+
finally:
|
| 507 |
+
self._request_in_progress = False
|
| 508 |
+
if self._handler_waiter is not None:
|
| 509 |
+
self._handler_waiter.set_result(None)
|
| 510 |
+
|
| 511 |
+
return resp, reset
|
| 512 |
+
|
| 513 |
+
async def start(self) -> None:
|
| 514 |
+
"""Process incoming request.
|
| 515 |
+
|
| 516 |
+
It reads request line, request headers and request payload, then
|
| 517 |
+
calls handle_request() method. Subclass has to override
|
| 518 |
+
handle_request(). start() handles various exceptions in request
|
| 519 |
+
or response handling. Connection is being closed always unless
|
| 520 |
+
keep_alive(True) specified.
|
| 521 |
+
"""
|
| 522 |
+
loop = self._loop
|
| 523 |
+
handler = asyncio.current_task(loop)
|
| 524 |
+
assert handler is not None
|
| 525 |
+
manager = self._manager
|
| 526 |
+
assert manager is not None
|
| 527 |
+
keepalive_timeout = self._keepalive_timeout
|
| 528 |
+
resp = None
|
| 529 |
+
assert self._request_factory is not None
|
| 530 |
+
assert self._request_handler is not None
|
| 531 |
+
|
| 532 |
+
while not self._force_close:
|
| 533 |
+
if not self._messages:
|
| 534 |
+
try:
|
| 535 |
+
# wait for next request
|
| 536 |
+
self._waiter = loop.create_future()
|
| 537 |
+
await self._waiter
|
| 538 |
+
finally:
|
| 539 |
+
self._waiter = None
|
| 540 |
+
|
| 541 |
+
message, payload = self._messages.popleft()
|
| 542 |
+
|
| 543 |
+
start = loop.time()
|
| 544 |
+
|
| 545 |
+
manager.requests_count += 1
|
| 546 |
+
writer = StreamWriter(self, loop)
|
| 547 |
+
if isinstance(message, _ErrInfo):
|
| 548 |
+
# make request_factory work
|
| 549 |
+
request_handler = self._make_error_handler(message)
|
| 550 |
+
message = ERROR
|
| 551 |
+
else:
|
| 552 |
+
request_handler = self._request_handler
|
| 553 |
+
|
| 554 |
+
request = self._request_factory(message, payload, self, writer, handler)
|
| 555 |
+
try:
|
| 556 |
+
# a new task is used for copy context vars (#3406)
|
| 557 |
+
coro = self._handle_request(request, start, request_handler)
|
| 558 |
+
if sys.version_info >= (3, 12):
|
| 559 |
+
task = asyncio.Task(coro, loop=loop, eager_start=True)
|
| 560 |
+
else:
|
| 561 |
+
task = loop.create_task(coro)
|
| 562 |
+
try:
|
| 563 |
+
resp, reset = await task
|
| 564 |
+
except ConnectionError:
|
| 565 |
+
self.log_debug("Ignored premature client disconnection")
|
| 566 |
+
break
|
| 567 |
+
|
| 568 |
+
# Drop the processed task from asyncio.Task.all_tasks() early
|
| 569 |
+
del task
|
| 570 |
+
if reset:
|
| 571 |
+
self.log_debug("Ignored premature client disconnection 2")
|
| 572 |
+
break
|
| 573 |
+
|
| 574 |
+
# notify server about keep-alive
|
| 575 |
+
self._keepalive = bool(resp.keep_alive)
|
| 576 |
+
|
| 577 |
+
# check payload
|
| 578 |
+
if not payload.is_eof():
|
| 579 |
+
lingering_time = self._lingering_time
|
| 580 |
+
if not self._force_close and lingering_time:
|
| 581 |
+
self.log_debug(
|
| 582 |
+
"Start lingering close timer for %s sec.", lingering_time
|
| 583 |
+
)
|
| 584 |
+
|
| 585 |
+
now = loop.time()
|
| 586 |
+
end_t = now + lingering_time
|
| 587 |
+
|
| 588 |
+
try:
|
| 589 |
+
while not payload.is_eof() and now < end_t:
|
| 590 |
+
async with ceil_timeout(end_t - now):
|
| 591 |
+
# read and ignore
|
| 592 |
+
await payload.readany()
|
| 593 |
+
now = loop.time()
|
| 594 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 595 |
+
if (
|
| 596 |
+
sys.version_info >= (3, 11)
|
| 597 |
+
and (t := asyncio.current_task())
|
| 598 |
+
and t.cancelling()
|
| 599 |
+
):
|
| 600 |
+
raise
|
| 601 |
+
|
| 602 |
+
# if payload still uncompleted
|
| 603 |
+
if not payload.is_eof() and not self._force_close:
|
| 604 |
+
self.log_debug("Uncompleted request.")
|
| 605 |
+
self.close()
|
| 606 |
+
|
| 607 |
+
payload.set_exception(_PAYLOAD_ACCESS_ERROR)
|
| 608 |
+
|
| 609 |
+
except asyncio.CancelledError:
|
| 610 |
+
self.log_debug("Ignored premature client disconnection")
|
| 611 |
+
raise
|
| 612 |
+
except Exception as exc:
|
| 613 |
+
self.log_exception("Unhandled exception", exc_info=exc)
|
| 614 |
+
self.force_close()
|
| 615 |
+
finally:
|
| 616 |
+
if self.transport is None and resp is not None:
|
| 617 |
+
self.log_debug("Ignored premature client disconnection.")
|
| 618 |
+
elif not self._force_close:
|
| 619 |
+
if self._keepalive and not self._close:
|
| 620 |
+
# start keep-alive timer
|
| 621 |
+
if keepalive_timeout is not None:
|
| 622 |
+
now = loop.time()
|
| 623 |
+
close_time = now + keepalive_timeout
|
| 624 |
+
self._next_keepalive_close_time = close_time
|
| 625 |
+
if self._keepalive_handle is None:
|
| 626 |
+
self._keepalive_handle = loop.call_at(
|
| 627 |
+
close_time, self._process_keepalive
|
| 628 |
+
)
|
| 629 |
+
else:
|
| 630 |
+
break
|
| 631 |
+
|
| 632 |
+
# remove handler, close transport if no handlers left
|
| 633 |
+
if not self._force_close:
|
| 634 |
+
self._task_handler = None
|
| 635 |
+
if self.transport is not None:
|
| 636 |
+
self.transport.close()
|
| 637 |
+
|
| 638 |
+
async def finish_response(
|
| 639 |
+
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
| 640 |
+
) -> Tuple[StreamResponse, bool]:
|
| 641 |
+
"""Prepare the response and write_eof, then log access.
|
| 642 |
+
|
| 643 |
+
This has to
|
| 644 |
+
be called within the context of any exception so the access logger
|
| 645 |
+
can get exception information. Returns True if the client disconnects
|
| 646 |
+
prematurely.
|
| 647 |
+
"""
|
| 648 |
+
request._finish()
|
| 649 |
+
if self._request_parser is not None:
|
| 650 |
+
self._request_parser.set_upgraded(False)
|
| 651 |
+
self._upgrade = False
|
| 652 |
+
if self._message_tail:
|
| 653 |
+
self._request_parser.feed_data(self._message_tail)
|
| 654 |
+
self._message_tail = b""
|
| 655 |
+
try:
|
| 656 |
+
prepare_meth = resp.prepare
|
| 657 |
+
except AttributeError:
|
| 658 |
+
if resp is None:
|
| 659 |
+
self.log_exception("Missing return statement on request handler")
|
| 660 |
+
else:
|
| 661 |
+
self.log_exception(
|
| 662 |
+
"Web-handler should return a response instance, "
|
| 663 |
+
"got {!r}".format(resp)
|
| 664 |
+
)
|
| 665 |
+
exc = HTTPInternalServerError()
|
| 666 |
+
resp = Response(
|
| 667 |
+
status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
|
| 668 |
+
)
|
| 669 |
+
prepare_meth = resp.prepare
|
| 670 |
+
try:
|
| 671 |
+
await prepare_meth(request)
|
| 672 |
+
await resp.write_eof()
|
| 673 |
+
except ConnectionError:
|
| 674 |
+
self.log_access(request, resp, start_time)
|
| 675 |
+
return resp, True
|
| 676 |
+
|
| 677 |
+
self.log_access(request, resp, start_time)
|
| 678 |
+
return resp, False
|
| 679 |
+
|
| 680 |
+
def handle_error(
|
| 681 |
+
self,
|
| 682 |
+
request: BaseRequest,
|
| 683 |
+
status: int = 500,
|
| 684 |
+
exc: Optional[BaseException] = None,
|
| 685 |
+
message: Optional[str] = None,
|
| 686 |
+
) -> StreamResponse:
|
| 687 |
+
"""Handle errors.
|
| 688 |
+
|
| 689 |
+
Returns HTTP response with specific status code. Logs additional
|
| 690 |
+
information. It always closes current connection.
|
| 691 |
+
"""
|
| 692 |
+
if self._request_count == 1 and isinstance(exc, BadHttpMethod):
|
| 693 |
+
# BadHttpMethod is common when a client sends non-HTTP
|
| 694 |
+
# or encrypted traffic to an HTTP port. This is expected
|
| 695 |
+
# to happen when connected to the public internet so we log
|
| 696 |
+
# it at the debug level as to not fill logs with noise.
|
| 697 |
+
self.logger.debug(
|
| 698 |
+
"Error handling request from %s", request.remote, exc_info=exc
|
| 699 |
+
)
|
| 700 |
+
else:
|
| 701 |
+
self.log_exception(
|
| 702 |
+
"Error handling request from %s", request.remote, exc_info=exc
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
# some data already got sent, connection is broken
|
| 706 |
+
if request.writer.output_size > 0:
|
| 707 |
+
raise ConnectionError(
|
| 708 |
+
"Response is sent already, cannot send another response "
|
| 709 |
+
"with the error message"
|
| 710 |
+
)
|
| 711 |
+
|
| 712 |
+
ct = "text/plain"
|
| 713 |
+
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
| 714 |
+
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
| 715 |
+
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
| 716 |
+
tb = None
|
| 717 |
+
if self.debug:
|
| 718 |
+
with suppress(Exception):
|
| 719 |
+
tb = traceback.format_exc()
|
| 720 |
+
|
| 721 |
+
if "text/html" in request.headers.get("Accept", ""):
|
| 722 |
+
if tb:
|
| 723 |
+
tb = html_escape(tb)
|
| 724 |
+
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
| 725 |
+
message = (
|
| 726 |
+
"<html><head>"
|
| 727 |
+
"<title>{title}</title>"
|
| 728 |
+
"</head><body>\n<h1>{title}</h1>"
|
| 729 |
+
"\n{msg}\n</body></html>\n"
|
| 730 |
+
).format(title=title, msg=msg)
|
| 731 |
+
ct = "text/html"
|
| 732 |
+
else:
|
| 733 |
+
if tb:
|
| 734 |
+
msg = tb
|
| 735 |
+
message = title + "\n\n" + msg
|
| 736 |
+
|
| 737 |
+
resp = Response(status=status, text=message, content_type=ct)
|
| 738 |
+
resp.force_close()
|
| 739 |
+
|
| 740 |
+
return resp
|
| 741 |
+
|
| 742 |
+
def _make_error_handler(
|
| 743 |
+
self, err_info: _ErrInfo
|
| 744 |
+
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
| 745 |
+
async def handler(request: BaseRequest) -> StreamResponse:
|
| 746 |
+
return self.handle_error(
|
| 747 |
+
request, err_info.status, err_info.exc, err_info.message
|
| 748 |
+
)
|
| 749 |
+
|
| 750 |
+
return handler
|
.venv/lib/python3.11/site-packages/aiohttp/web_routedef.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import os # noqa
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Callable,
|
| 7 |
+
Dict,
|
| 8 |
+
Iterator,
|
| 9 |
+
List,
|
| 10 |
+
Optional,
|
| 11 |
+
Sequence,
|
| 12 |
+
Type,
|
| 13 |
+
Union,
|
| 14 |
+
overload,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
import attr
|
| 18 |
+
|
| 19 |
+
from . import hdrs
|
| 20 |
+
from .abc import AbstractView
|
| 21 |
+
from .typedefs import Handler, PathLike
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
from .web_request import Request
|
| 25 |
+
from .web_response import StreamResponse
|
| 26 |
+
from .web_urldispatcher import AbstractRoute, UrlDispatcher
|
| 27 |
+
else:
|
| 28 |
+
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
__all__ = (
|
| 32 |
+
"AbstractRouteDef",
|
| 33 |
+
"RouteDef",
|
| 34 |
+
"StaticDef",
|
| 35 |
+
"RouteTableDef",
|
| 36 |
+
"head",
|
| 37 |
+
"options",
|
| 38 |
+
"get",
|
| 39 |
+
"post",
|
| 40 |
+
"patch",
|
| 41 |
+
"put",
|
| 42 |
+
"delete",
|
| 43 |
+
"route",
|
| 44 |
+
"view",
|
| 45 |
+
"static",
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class AbstractRouteDef(abc.ABC):
|
| 50 |
+
@abc.abstractmethod
|
| 51 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 52 |
+
pass # pragma: no cover
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
_HandlerType = Union[Type[AbstractView], Handler]
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 59 |
+
class RouteDef(AbstractRouteDef):
|
| 60 |
+
method: str
|
| 61 |
+
path: str
|
| 62 |
+
handler: _HandlerType
|
| 63 |
+
kwargs: Dict[str, Any]
|
| 64 |
+
|
| 65 |
+
def __repr__(self) -> str:
|
| 66 |
+
info = []
|
| 67 |
+
for name, value in sorted(self.kwargs.items()):
|
| 68 |
+
info.append(f", {name}={value!r}")
|
| 69 |
+
return "<RouteDef {method} {path} -> {handler.__name__!r}{info}>".format(
|
| 70 |
+
method=self.method, path=self.path, handler=self.handler, info="".join(info)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 74 |
+
if self.method in hdrs.METH_ALL:
|
| 75 |
+
reg = getattr(router, "add_" + self.method.lower())
|
| 76 |
+
return [reg(self.path, self.handler, **self.kwargs)]
|
| 77 |
+
else:
|
| 78 |
+
return [
|
| 79 |
+
router.add_route(self.method, self.path, self.handler, **self.kwargs)
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 84 |
+
class StaticDef(AbstractRouteDef):
|
| 85 |
+
prefix: str
|
| 86 |
+
path: PathLike
|
| 87 |
+
kwargs: Dict[str, Any]
|
| 88 |
+
|
| 89 |
+
def __repr__(self) -> str:
|
| 90 |
+
info = []
|
| 91 |
+
for name, value in sorted(self.kwargs.items()):
|
| 92 |
+
info.append(f", {name}={value!r}")
|
| 93 |
+
return "<StaticDef {prefix} -> {path}{info}>".format(
|
| 94 |
+
prefix=self.prefix, path=self.path, info="".join(info)
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 98 |
+
resource = router.add_static(self.prefix, self.path, **self.kwargs)
|
| 99 |
+
routes = resource.get_info().get("routes", {})
|
| 100 |
+
return list(routes.values())
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 104 |
+
return RouteDef(method, path, handler, kwargs)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 108 |
+
return route(hdrs.METH_HEAD, path, handler, **kwargs)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 112 |
+
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def get(
|
| 116 |
+
path: str,
|
| 117 |
+
handler: _HandlerType,
|
| 118 |
+
*,
|
| 119 |
+
name: Optional[str] = None,
|
| 120 |
+
allow_head: bool = True,
|
| 121 |
+
**kwargs: Any,
|
| 122 |
+
) -> RouteDef:
|
| 123 |
+
return route(
|
| 124 |
+
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 129 |
+
return route(hdrs.METH_POST, path, handler, **kwargs)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 133 |
+
return route(hdrs.METH_PUT, path, handler, **kwargs)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 137 |
+
return route(hdrs.METH_PATCH, path, handler, **kwargs)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 141 |
+
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
|
| 145 |
+
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
|
| 149 |
+
return StaticDef(prefix, path, kwargs)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
_Deco = Callable[[_HandlerType], _HandlerType]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class RouteTableDef(Sequence[AbstractRouteDef]):
|
| 156 |
+
"""Route definition table"""
|
| 157 |
+
|
| 158 |
+
def __init__(self) -> None:
|
| 159 |
+
self._items: List[AbstractRouteDef] = []
|
| 160 |
+
|
| 161 |
+
def __repr__(self) -> str:
|
| 162 |
+
return f"<RouteTableDef count={len(self._items)}>"
|
| 163 |
+
|
| 164 |
+
@overload
|
| 165 |
+
def __getitem__(self, index: int) -> AbstractRouteDef: ...
|
| 166 |
+
|
| 167 |
+
@overload
|
| 168 |
+
def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ...
|
| 169 |
+
|
| 170 |
+
def __getitem__(self, index): # type: ignore[no-untyped-def]
|
| 171 |
+
return self._items[index]
|
| 172 |
+
|
| 173 |
+
def __iter__(self) -> Iterator[AbstractRouteDef]:
|
| 174 |
+
return iter(self._items)
|
| 175 |
+
|
| 176 |
+
def __len__(self) -> int:
|
| 177 |
+
return len(self._items)
|
| 178 |
+
|
| 179 |
+
def __contains__(self, item: object) -> bool:
|
| 180 |
+
return item in self._items
|
| 181 |
+
|
| 182 |
+
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
|
| 183 |
+
def inner(handler: _HandlerType) -> _HandlerType:
|
| 184 |
+
self._items.append(RouteDef(method, path, handler, kwargs))
|
| 185 |
+
return handler
|
| 186 |
+
|
| 187 |
+
return inner
|
| 188 |
+
|
| 189 |
+
def head(self, path: str, **kwargs: Any) -> _Deco:
|
| 190 |
+
return self.route(hdrs.METH_HEAD, path, **kwargs)
|
| 191 |
+
|
| 192 |
+
def get(self, path: str, **kwargs: Any) -> _Deco:
|
| 193 |
+
return self.route(hdrs.METH_GET, path, **kwargs)
|
| 194 |
+
|
| 195 |
+
def post(self, path: str, **kwargs: Any) -> _Deco:
|
| 196 |
+
return self.route(hdrs.METH_POST, path, **kwargs)
|
| 197 |
+
|
| 198 |
+
def put(self, path: str, **kwargs: Any) -> _Deco:
|
| 199 |
+
return self.route(hdrs.METH_PUT, path, **kwargs)
|
| 200 |
+
|
| 201 |
+
def patch(self, path: str, **kwargs: Any) -> _Deco:
|
| 202 |
+
return self.route(hdrs.METH_PATCH, path, **kwargs)
|
| 203 |
+
|
| 204 |
+
def delete(self, path: str, **kwargs: Any) -> _Deco:
|
| 205 |
+
return self.route(hdrs.METH_DELETE, path, **kwargs)
|
| 206 |
+
|
| 207 |
+
def options(self, path: str, **kwargs: Any) -> _Deco:
|
| 208 |
+
return self.route(hdrs.METH_OPTIONS, path, **kwargs)
|
| 209 |
+
|
| 210 |
+
def view(self, path: str, **kwargs: Any) -> _Deco:
|
| 211 |
+
return self.route(hdrs.METH_ANY, path, **kwargs)
|
| 212 |
+
|
| 213 |
+
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
|
| 214 |
+
self._items.append(StaticDef(prefix, path, kwargs))
|
.venv/lib/python3.11/site-packages/aiohttp/web_runner.py
ADDED
|
@@ -0,0 +1,399 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import signal
|
| 3 |
+
import socket
|
| 4 |
+
import warnings
|
| 5 |
+
from abc import ABC, abstractmethod
|
| 6 |
+
from typing import TYPE_CHECKING, Any, List, Optional, Set
|
| 7 |
+
|
| 8 |
+
from yarl import URL
|
| 9 |
+
|
| 10 |
+
from .typedefs import PathLike
|
| 11 |
+
from .web_app import Application
|
| 12 |
+
from .web_server import Server
|
| 13 |
+
|
| 14 |
+
if TYPE_CHECKING:
|
| 15 |
+
from ssl import SSLContext
|
| 16 |
+
else:
|
| 17 |
+
try:
|
| 18 |
+
from ssl import SSLContext
|
| 19 |
+
except ImportError: # pragma: no cover
|
| 20 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 21 |
+
|
| 22 |
+
__all__ = (
|
| 23 |
+
"BaseSite",
|
| 24 |
+
"TCPSite",
|
| 25 |
+
"UnixSite",
|
| 26 |
+
"NamedPipeSite",
|
| 27 |
+
"SockSite",
|
| 28 |
+
"BaseRunner",
|
| 29 |
+
"AppRunner",
|
| 30 |
+
"ServerRunner",
|
| 31 |
+
"GracefulExit",
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class GracefulExit(SystemExit):
|
| 36 |
+
code = 1
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _raise_graceful_exit() -> None:
|
| 40 |
+
raise GracefulExit()
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class BaseSite(ABC):
|
| 44 |
+
__slots__ = ("_runner", "_ssl_context", "_backlog", "_server")
|
| 45 |
+
|
| 46 |
+
def __init__(
|
| 47 |
+
self,
|
| 48 |
+
runner: "BaseRunner",
|
| 49 |
+
*,
|
| 50 |
+
shutdown_timeout: float = 60.0,
|
| 51 |
+
ssl_context: Optional[SSLContext] = None,
|
| 52 |
+
backlog: int = 128,
|
| 53 |
+
) -> None:
|
| 54 |
+
if runner.server is None:
|
| 55 |
+
raise RuntimeError("Call runner.setup() before making a site")
|
| 56 |
+
if shutdown_timeout != 60.0:
|
| 57 |
+
msg = "shutdown_timeout should be set on BaseRunner"
|
| 58 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 59 |
+
runner._shutdown_timeout = shutdown_timeout
|
| 60 |
+
self._runner = runner
|
| 61 |
+
self._ssl_context = ssl_context
|
| 62 |
+
self._backlog = backlog
|
| 63 |
+
self._server: Optional[asyncio.AbstractServer] = None
|
| 64 |
+
|
| 65 |
+
@property
|
| 66 |
+
@abstractmethod
|
| 67 |
+
def name(self) -> str:
|
| 68 |
+
pass # pragma: no cover
|
| 69 |
+
|
| 70 |
+
@abstractmethod
|
| 71 |
+
async def start(self) -> None:
|
| 72 |
+
self._runner._reg_site(self)
|
| 73 |
+
|
| 74 |
+
async def stop(self) -> None:
|
| 75 |
+
self._runner._check_site(self)
|
| 76 |
+
if self._server is not None: # Maybe not started yet
|
| 77 |
+
self._server.close()
|
| 78 |
+
|
| 79 |
+
self._runner._unreg_site(self)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class TCPSite(BaseSite):
|
| 83 |
+
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
|
| 84 |
+
|
| 85 |
+
def __init__(
|
| 86 |
+
self,
|
| 87 |
+
runner: "BaseRunner",
|
| 88 |
+
host: Optional[str] = None,
|
| 89 |
+
port: Optional[int] = None,
|
| 90 |
+
*,
|
| 91 |
+
shutdown_timeout: float = 60.0,
|
| 92 |
+
ssl_context: Optional[SSLContext] = None,
|
| 93 |
+
backlog: int = 128,
|
| 94 |
+
reuse_address: Optional[bool] = None,
|
| 95 |
+
reuse_port: Optional[bool] = None,
|
| 96 |
+
) -> None:
|
| 97 |
+
super().__init__(
|
| 98 |
+
runner,
|
| 99 |
+
shutdown_timeout=shutdown_timeout,
|
| 100 |
+
ssl_context=ssl_context,
|
| 101 |
+
backlog=backlog,
|
| 102 |
+
)
|
| 103 |
+
self._host = host
|
| 104 |
+
if port is None:
|
| 105 |
+
port = 8443 if self._ssl_context else 8080
|
| 106 |
+
self._port = port
|
| 107 |
+
self._reuse_address = reuse_address
|
| 108 |
+
self._reuse_port = reuse_port
|
| 109 |
+
|
| 110 |
+
@property
|
| 111 |
+
def name(self) -> str:
|
| 112 |
+
scheme = "https" if self._ssl_context else "http"
|
| 113 |
+
host = "0.0.0.0" if not self._host else self._host
|
| 114 |
+
return str(URL.build(scheme=scheme, host=host, port=self._port))
|
| 115 |
+
|
| 116 |
+
async def start(self) -> None:
|
| 117 |
+
await super().start()
|
| 118 |
+
loop = asyncio.get_event_loop()
|
| 119 |
+
server = self._runner.server
|
| 120 |
+
assert server is not None
|
| 121 |
+
self._server = await loop.create_server(
|
| 122 |
+
server,
|
| 123 |
+
self._host,
|
| 124 |
+
self._port,
|
| 125 |
+
ssl=self._ssl_context,
|
| 126 |
+
backlog=self._backlog,
|
| 127 |
+
reuse_address=self._reuse_address,
|
| 128 |
+
reuse_port=self._reuse_port,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class UnixSite(BaseSite):
|
| 133 |
+
__slots__ = ("_path",)
|
| 134 |
+
|
| 135 |
+
def __init__(
|
| 136 |
+
self,
|
| 137 |
+
runner: "BaseRunner",
|
| 138 |
+
path: PathLike,
|
| 139 |
+
*,
|
| 140 |
+
shutdown_timeout: float = 60.0,
|
| 141 |
+
ssl_context: Optional[SSLContext] = None,
|
| 142 |
+
backlog: int = 128,
|
| 143 |
+
) -> None:
|
| 144 |
+
super().__init__(
|
| 145 |
+
runner,
|
| 146 |
+
shutdown_timeout=shutdown_timeout,
|
| 147 |
+
ssl_context=ssl_context,
|
| 148 |
+
backlog=backlog,
|
| 149 |
+
)
|
| 150 |
+
self._path = path
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def name(self) -> str:
|
| 154 |
+
scheme = "https" if self._ssl_context else "http"
|
| 155 |
+
return f"{scheme}://unix:{self._path}:"
|
| 156 |
+
|
| 157 |
+
async def start(self) -> None:
|
| 158 |
+
await super().start()
|
| 159 |
+
loop = asyncio.get_event_loop()
|
| 160 |
+
server = self._runner.server
|
| 161 |
+
assert server is not None
|
| 162 |
+
self._server = await loop.create_unix_server(
|
| 163 |
+
server,
|
| 164 |
+
self._path,
|
| 165 |
+
ssl=self._ssl_context,
|
| 166 |
+
backlog=self._backlog,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class NamedPipeSite(BaseSite):
|
| 171 |
+
__slots__ = ("_path",)
|
| 172 |
+
|
| 173 |
+
def __init__(
|
| 174 |
+
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
|
| 175 |
+
) -> None:
|
| 176 |
+
loop = asyncio.get_event_loop()
|
| 177 |
+
if not isinstance(
|
| 178 |
+
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
| 179 |
+
):
|
| 180 |
+
raise RuntimeError(
|
| 181 |
+
"Named Pipes only available in proactor loop under windows"
|
| 182 |
+
)
|
| 183 |
+
super().__init__(runner, shutdown_timeout=shutdown_timeout)
|
| 184 |
+
self._path = path
|
| 185 |
+
|
| 186 |
+
@property
|
| 187 |
+
def name(self) -> str:
|
| 188 |
+
return self._path
|
| 189 |
+
|
| 190 |
+
async def start(self) -> None:
|
| 191 |
+
await super().start()
|
| 192 |
+
loop = asyncio.get_event_loop()
|
| 193 |
+
server = self._runner.server
|
| 194 |
+
assert server is not None
|
| 195 |
+
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
|
| 196 |
+
server, self._path
|
| 197 |
+
)
|
| 198 |
+
self._server = _server[0]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
class SockSite(BaseSite):
|
| 202 |
+
__slots__ = ("_sock", "_name")
|
| 203 |
+
|
| 204 |
+
def __init__(
|
| 205 |
+
self,
|
| 206 |
+
runner: "BaseRunner",
|
| 207 |
+
sock: socket.socket,
|
| 208 |
+
*,
|
| 209 |
+
shutdown_timeout: float = 60.0,
|
| 210 |
+
ssl_context: Optional[SSLContext] = None,
|
| 211 |
+
backlog: int = 128,
|
| 212 |
+
) -> None:
|
| 213 |
+
super().__init__(
|
| 214 |
+
runner,
|
| 215 |
+
shutdown_timeout=shutdown_timeout,
|
| 216 |
+
ssl_context=ssl_context,
|
| 217 |
+
backlog=backlog,
|
| 218 |
+
)
|
| 219 |
+
self._sock = sock
|
| 220 |
+
scheme = "https" if self._ssl_context else "http"
|
| 221 |
+
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
|
| 222 |
+
name = f"{scheme}://unix:{sock.getsockname()}:"
|
| 223 |
+
else:
|
| 224 |
+
host, port = sock.getsockname()[:2]
|
| 225 |
+
name = str(URL.build(scheme=scheme, host=host, port=port))
|
| 226 |
+
self._name = name
|
| 227 |
+
|
| 228 |
+
@property
|
| 229 |
+
def name(self) -> str:
|
| 230 |
+
return self._name
|
| 231 |
+
|
| 232 |
+
async def start(self) -> None:
|
| 233 |
+
await super().start()
|
| 234 |
+
loop = asyncio.get_event_loop()
|
| 235 |
+
server = self._runner.server
|
| 236 |
+
assert server is not None
|
| 237 |
+
self._server = await loop.create_server(
|
| 238 |
+
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
|
| 239 |
+
)
|
| 240 |
+
|
| 241 |
+
|
| 242 |
+
class BaseRunner(ABC):
|
| 243 |
+
__slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout")
|
| 244 |
+
|
| 245 |
+
def __init__(
|
| 246 |
+
self,
|
| 247 |
+
*,
|
| 248 |
+
handle_signals: bool = False,
|
| 249 |
+
shutdown_timeout: float = 60.0,
|
| 250 |
+
**kwargs: Any,
|
| 251 |
+
) -> None:
|
| 252 |
+
self._handle_signals = handle_signals
|
| 253 |
+
self._kwargs = kwargs
|
| 254 |
+
self._server: Optional[Server] = None
|
| 255 |
+
self._sites: List[BaseSite] = []
|
| 256 |
+
self._shutdown_timeout = shutdown_timeout
|
| 257 |
+
|
| 258 |
+
@property
|
| 259 |
+
def server(self) -> Optional[Server]:
|
| 260 |
+
return self._server
|
| 261 |
+
|
| 262 |
+
@property
|
| 263 |
+
def addresses(self) -> List[Any]:
|
| 264 |
+
ret: List[Any] = []
|
| 265 |
+
for site in self._sites:
|
| 266 |
+
server = site._server
|
| 267 |
+
if server is not None:
|
| 268 |
+
sockets = server.sockets # type: ignore[attr-defined]
|
| 269 |
+
if sockets is not None:
|
| 270 |
+
for sock in sockets:
|
| 271 |
+
ret.append(sock.getsockname())
|
| 272 |
+
return ret
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def sites(self) -> Set[BaseSite]:
|
| 276 |
+
return set(self._sites)
|
| 277 |
+
|
| 278 |
+
async def setup(self) -> None:
|
| 279 |
+
loop = asyncio.get_event_loop()
|
| 280 |
+
|
| 281 |
+
if self._handle_signals:
|
| 282 |
+
try:
|
| 283 |
+
loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
|
| 284 |
+
loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
|
| 285 |
+
except NotImplementedError: # pragma: no cover
|
| 286 |
+
# add_signal_handler is not implemented on Windows
|
| 287 |
+
pass
|
| 288 |
+
|
| 289 |
+
self._server = await self._make_server()
|
| 290 |
+
|
| 291 |
+
@abstractmethod
|
| 292 |
+
async def shutdown(self) -> None:
|
| 293 |
+
"""Call any shutdown hooks to help server close gracefully."""
|
| 294 |
+
|
| 295 |
+
async def cleanup(self) -> None:
|
| 296 |
+
# The loop over sites is intentional, an exception on gather()
|
| 297 |
+
# leaves self._sites in unpredictable state.
|
| 298 |
+
# The loop guaranties that a site is either deleted on success or
|
| 299 |
+
# still present on failure
|
| 300 |
+
for site in list(self._sites):
|
| 301 |
+
await site.stop()
|
| 302 |
+
|
| 303 |
+
if self._server: # If setup succeeded
|
| 304 |
+
# Yield to event loop to ensure incoming requests prior to stopping the sites
|
| 305 |
+
# have all started to be handled before we proceed to close idle connections.
|
| 306 |
+
await asyncio.sleep(0)
|
| 307 |
+
self._server.pre_shutdown()
|
| 308 |
+
await self.shutdown()
|
| 309 |
+
await self._server.shutdown(self._shutdown_timeout)
|
| 310 |
+
await self._cleanup_server()
|
| 311 |
+
|
| 312 |
+
self._server = None
|
| 313 |
+
if self._handle_signals:
|
| 314 |
+
loop = asyncio.get_running_loop()
|
| 315 |
+
try:
|
| 316 |
+
loop.remove_signal_handler(signal.SIGINT)
|
| 317 |
+
loop.remove_signal_handler(signal.SIGTERM)
|
| 318 |
+
except NotImplementedError: # pragma: no cover
|
| 319 |
+
# remove_signal_handler is not implemented on Windows
|
| 320 |
+
pass
|
| 321 |
+
|
| 322 |
+
@abstractmethod
|
| 323 |
+
async def _make_server(self) -> Server:
|
| 324 |
+
pass # pragma: no cover
|
| 325 |
+
|
| 326 |
+
@abstractmethod
|
| 327 |
+
async def _cleanup_server(self) -> None:
|
| 328 |
+
pass # pragma: no cover
|
| 329 |
+
|
| 330 |
+
def _reg_site(self, site: BaseSite) -> None:
|
| 331 |
+
if site in self._sites:
|
| 332 |
+
raise RuntimeError(f"Site {site} is already registered in runner {self}")
|
| 333 |
+
self._sites.append(site)
|
| 334 |
+
|
| 335 |
+
def _check_site(self, site: BaseSite) -> None:
|
| 336 |
+
if site not in self._sites:
|
| 337 |
+
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
| 338 |
+
|
| 339 |
+
def _unreg_site(self, site: BaseSite) -> None:
|
| 340 |
+
if site not in self._sites:
|
| 341 |
+
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
| 342 |
+
self._sites.remove(site)
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
class ServerRunner(BaseRunner):
|
| 346 |
+
"""Low-level web server runner"""
|
| 347 |
+
|
| 348 |
+
__slots__ = ("_web_server",)
|
| 349 |
+
|
| 350 |
+
def __init__(
|
| 351 |
+
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
|
| 352 |
+
) -> None:
|
| 353 |
+
super().__init__(handle_signals=handle_signals, **kwargs)
|
| 354 |
+
self._web_server = web_server
|
| 355 |
+
|
| 356 |
+
async def shutdown(self) -> None:
|
| 357 |
+
pass
|
| 358 |
+
|
| 359 |
+
async def _make_server(self) -> Server:
|
| 360 |
+
return self._web_server
|
| 361 |
+
|
| 362 |
+
async def _cleanup_server(self) -> None:
|
| 363 |
+
pass
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class AppRunner(BaseRunner):
|
| 367 |
+
"""Web Application runner"""
|
| 368 |
+
|
| 369 |
+
__slots__ = ("_app",)
|
| 370 |
+
|
| 371 |
+
def __init__(
|
| 372 |
+
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
|
| 373 |
+
) -> None:
|
| 374 |
+
super().__init__(handle_signals=handle_signals, **kwargs)
|
| 375 |
+
if not isinstance(app, Application):
|
| 376 |
+
raise TypeError(
|
| 377 |
+
"The first argument should be web.Application "
|
| 378 |
+
"instance, got {!r}".format(app)
|
| 379 |
+
)
|
| 380 |
+
self._app = app
|
| 381 |
+
|
| 382 |
+
@property
|
| 383 |
+
def app(self) -> Application:
|
| 384 |
+
return self._app
|
| 385 |
+
|
| 386 |
+
async def shutdown(self) -> None:
|
| 387 |
+
await self._app.shutdown()
|
| 388 |
+
|
| 389 |
+
async def _make_server(self) -> Server:
|
| 390 |
+
loop = asyncio.get_event_loop()
|
| 391 |
+
self._app._set_loop(loop)
|
| 392 |
+
self._app.on_startup.freeze()
|
| 393 |
+
await self._app.startup()
|
| 394 |
+
self._app.freeze()
|
| 395 |
+
|
| 396 |
+
return self._app._make_handler(loop=loop, **self._kwargs)
|
| 397 |
+
|
| 398 |
+
async def _cleanup_server(self) -> None:
|
| 399 |
+
await self._app.cleanup()
|
.venv/lib/python3.11/site-packages/aiohttp/worker.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Async gunicorn worker for aiohttp.web"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
from types import FrameType
|
| 9 |
+
from typing import TYPE_CHECKING, Any, Optional
|
| 10 |
+
|
| 11 |
+
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
|
| 12 |
+
from gunicorn.workers import base
|
| 13 |
+
|
| 14 |
+
from aiohttp import web
|
| 15 |
+
|
| 16 |
+
from .helpers import set_result
|
| 17 |
+
from .web_app import Application
|
| 18 |
+
from .web_log import AccessLogger
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
import ssl
|
| 22 |
+
|
| 23 |
+
SSLContext = ssl.SSLContext
|
| 24 |
+
else:
|
| 25 |
+
try:
|
| 26 |
+
import ssl
|
| 27 |
+
|
| 28 |
+
SSLContext = ssl.SSLContext
|
| 29 |
+
except ImportError: # pragma: no cover
|
| 30 |
+
ssl = None # type: ignore[assignment]
|
| 31 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
|
| 38 |
+
|
| 39 |
+
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
|
| 40 |
+
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
|
| 41 |
+
|
| 42 |
+
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
|
| 43 |
+
super().__init__(*args, **kw)
|
| 44 |
+
|
| 45 |
+
self._task: Optional[asyncio.Task[None]] = None
|
| 46 |
+
self.exit_code = 0
|
| 47 |
+
self._notify_waiter: Optional[asyncio.Future[bool]] = None
|
| 48 |
+
|
| 49 |
+
def init_process(self) -> None:
|
| 50 |
+
# create new event_loop after fork
|
| 51 |
+
asyncio.get_event_loop().close()
|
| 52 |
+
|
| 53 |
+
self.loop = asyncio.new_event_loop()
|
| 54 |
+
asyncio.set_event_loop(self.loop)
|
| 55 |
+
|
| 56 |
+
super().init_process()
|
| 57 |
+
|
| 58 |
+
def run(self) -> None:
|
| 59 |
+
self._task = self.loop.create_task(self._run())
|
| 60 |
+
|
| 61 |
+
try: # ignore all finalization problems
|
| 62 |
+
self.loop.run_until_complete(self._task)
|
| 63 |
+
except Exception:
|
| 64 |
+
self.log.exception("Exception in gunicorn worker")
|
| 65 |
+
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
| 66 |
+
self.loop.close()
|
| 67 |
+
|
| 68 |
+
sys.exit(self.exit_code)
|
| 69 |
+
|
| 70 |
+
async def _run(self) -> None:
|
| 71 |
+
runner = None
|
| 72 |
+
if isinstance(self.wsgi, Application):
|
| 73 |
+
app = self.wsgi
|
| 74 |
+
elif asyncio.iscoroutinefunction(self.wsgi):
|
| 75 |
+
wsgi = await self.wsgi()
|
| 76 |
+
if isinstance(wsgi, web.AppRunner):
|
| 77 |
+
runner = wsgi
|
| 78 |
+
app = runner.app
|
| 79 |
+
else:
|
| 80 |
+
app = wsgi
|
| 81 |
+
else:
|
| 82 |
+
raise RuntimeError(
|
| 83 |
+
"wsgi app should be either Application or "
|
| 84 |
+
"async function returning Application, got {}".format(self.wsgi)
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
if runner is None:
|
| 88 |
+
access_log = self.log.access_log if self.cfg.accesslog else None
|
| 89 |
+
runner = web.AppRunner(
|
| 90 |
+
app,
|
| 91 |
+
logger=self.log,
|
| 92 |
+
keepalive_timeout=self.cfg.keepalive,
|
| 93 |
+
access_log=access_log,
|
| 94 |
+
access_log_format=self._get_valid_log_format(
|
| 95 |
+
self.cfg.access_log_format
|
| 96 |
+
),
|
| 97 |
+
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
|
| 98 |
+
)
|
| 99 |
+
await runner.setup()
|
| 100 |
+
|
| 101 |
+
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
|
| 102 |
+
|
| 103 |
+
runner = runner
|
| 104 |
+
assert runner is not None
|
| 105 |
+
server = runner.server
|
| 106 |
+
assert server is not None
|
| 107 |
+
for sock in self.sockets:
|
| 108 |
+
site = web.SockSite(
|
| 109 |
+
runner,
|
| 110 |
+
sock,
|
| 111 |
+
ssl_context=ctx,
|
| 112 |
+
)
|
| 113 |
+
await site.start()
|
| 114 |
+
|
| 115 |
+
# If our parent changed then we shut down.
|
| 116 |
+
pid = os.getpid()
|
| 117 |
+
try:
|
| 118 |
+
while self.alive: # type: ignore[has-type]
|
| 119 |
+
self.notify()
|
| 120 |
+
|
| 121 |
+
cnt = server.requests_count
|
| 122 |
+
if self.max_requests and cnt > self.max_requests:
|
| 123 |
+
self.alive = False
|
| 124 |
+
self.log.info("Max requests, shutting down: %s", self)
|
| 125 |
+
|
| 126 |
+
elif pid == os.getpid() and self.ppid != os.getppid():
|
| 127 |
+
self.alive = False
|
| 128 |
+
self.log.info("Parent changed, shutting down: %s", self)
|
| 129 |
+
else:
|
| 130 |
+
await self._wait_next_notify()
|
| 131 |
+
except BaseException:
|
| 132 |
+
pass
|
| 133 |
+
|
| 134 |
+
await runner.cleanup()
|
| 135 |
+
|
| 136 |
+
def _wait_next_notify(self) -> "asyncio.Future[bool]":
|
| 137 |
+
self._notify_waiter_done()
|
| 138 |
+
|
| 139 |
+
loop = self.loop
|
| 140 |
+
assert loop is not None
|
| 141 |
+
self._notify_waiter = waiter = loop.create_future()
|
| 142 |
+
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
|
| 143 |
+
|
| 144 |
+
return waiter
|
| 145 |
+
|
| 146 |
+
def _notify_waiter_done(
|
| 147 |
+
self, waiter: Optional["asyncio.Future[bool]"] = None
|
| 148 |
+
) -> None:
|
| 149 |
+
if waiter is None:
|
| 150 |
+
waiter = self._notify_waiter
|
| 151 |
+
if waiter is not None:
|
| 152 |
+
set_result(waiter, True)
|
| 153 |
+
|
| 154 |
+
if waiter is self._notify_waiter:
|
| 155 |
+
self._notify_waiter = None
|
| 156 |
+
|
| 157 |
+
def init_signals(self) -> None:
|
| 158 |
+
# Set up signals through the event loop API.
|
| 159 |
+
|
| 160 |
+
self.loop.add_signal_handler(
|
| 161 |
+
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
self.loop.add_signal_handler(
|
| 165 |
+
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
self.loop.add_signal_handler(
|
| 169 |
+
signal.SIGINT, self.handle_quit, signal.SIGINT, None
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
self.loop.add_signal_handler(
|
| 173 |
+
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
|
| 174 |
+
)
|
| 175 |
+
|
| 176 |
+
self.loop.add_signal_handler(
|
| 177 |
+
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
self.loop.add_signal_handler(
|
| 181 |
+
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
|
| 182 |
+
)
|
| 183 |
+
|
| 184 |
+
# Don't let SIGTERM and SIGUSR1 disturb active requests
|
| 185 |
+
# by interrupting system calls
|
| 186 |
+
signal.siginterrupt(signal.SIGTERM, False)
|
| 187 |
+
signal.siginterrupt(signal.SIGUSR1, False)
|
| 188 |
+
# Reset signals so Gunicorn doesn't swallow subprocess return codes
|
| 189 |
+
# See: https://github.com/aio-libs/aiohttp/issues/6130
|
| 190 |
+
|
| 191 |
+
def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
|
| 192 |
+
self.alive = False
|
| 193 |
+
|
| 194 |
+
# worker_int callback
|
| 195 |
+
self.cfg.worker_int(self)
|
| 196 |
+
|
| 197 |
+
# wakeup closing process
|
| 198 |
+
self._notify_waiter_done()
|
| 199 |
+
|
| 200 |
+
def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
|
| 201 |
+
self.alive = False
|
| 202 |
+
self.exit_code = 1
|
| 203 |
+
self.cfg.worker_abort(self)
|
| 204 |
+
sys.exit(1)
|
| 205 |
+
|
| 206 |
+
@staticmethod
|
| 207 |
+
def _create_ssl_context(cfg: Any) -> "SSLContext":
|
| 208 |
+
"""Creates SSLContext instance for usage in asyncio.create_server.
|
| 209 |
+
|
| 210 |
+
See ssl.SSLSocket.__init__ for more details.
|
| 211 |
+
"""
|
| 212 |
+
if ssl is None: # pragma: no cover
|
| 213 |
+
raise RuntimeError("SSL is not supported.")
|
| 214 |
+
|
| 215 |
+
ctx = ssl.SSLContext(cfg.ssl_version)
|
| 216 |
+
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
|
| 217 |
+
ctx.verify_mode = cfg.cert_reqs
|
| 218 |
+
if cfg.ca_certs:
|
| 219 |
+
ctx.load_verify_locations(cfg.ca_certs)
|
| 220 |
+
if cfg.ciphers:
|
| 221 |
+
ctx.set_ciphers(cfg.ciphers)
|
| 222 |
+
return ctx
|
| 223 |
+
|
| 224 |
+
def _get_valid_log_format(self, source_format: str) -> str:
|
| 225 |
+
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
|
| 226 |
+
return self.DEFAULT_AIOHTTP_LOG_FORMAT
|
| 227 |
+
elif re.search(r"%\([^\)]+\)", source_format):
|
| 228 |
+
raise ValueError(
|
| 229 |
+
"Gunicorn's style options in form of `%(name)s` are not "
|
| 230 |
+
"supported for the log formatting. Please use aiohttp's "
|
| 231 |
+
"format specification to configure access log formatting: "
|
| 232 |
+
"http://docs.aiohttp.org/en/stable/logging.html"
|
| 233 |
+
"#format-specification"
|
| 234 |
+
)
|
| 235 |
+
else:
|
| 236 |
+
return source_format
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class GunicornUVLoopWebWorker(GunicornWebWorker):
|
| 240 |
+
def init_process(self) -> None:
|
| 241 |
+
import uvloop
|
| 242 |
+
|
| 243 |
+
# Close any existing event loop before setting a
|
| 244 |
+
# new policy.
|
| 245 |
+
asyncio.get_event_loop().close()
|
| 246 |
+
|
| 247 |
+
# Setup uvloop policy, so that every
|
| 248 |
+
# asyncio.get_event_loop() will create an instance
|
| 249 |
+
# of uvloop event loop.
|
| 250 |
+
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
| 251 |
+
|
| 252 |
+
super().init_process()
|
.venv/lib/python3.11/site-packages/filelock/__init__.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
A platform independent file lock that supports the with-statement.
|
| 3 |
+
|
| 4 |
+
.. autodata:: filelock.__version__
|
| 5 |
+
:no-value:
|
| 6 |
+
|
| 7 |
+
"""
|
| 8 |
+
|
| 9 |
+
from __future__ import annotations
|
| 10 |
+
|
| 11 |
+
import sys
|
| 12 |
+
import warnings
|
| 13 |
+
from typing import TYPE_CHECKING
|
| 14 |
+
|
| 15 |
+
from ._api import AcquireReturnProxy, BaseFileLock
|
| 16 |
+
from ._error import Timeout
|
| 17 |
+
from ._soft import SoftFileLock
|
| 18 |
+
from ._unix import UnixFileLock, has_fcntl
|
| 19 |
+
from ._windows import WindowsFileLock
|
| 20 |
+
from .asyncio import (
|
| 21 |
+
AsyncAcquireReturnProxy,
|
| 22 |
+
AsyncSoftFileLock,
|
| 23 |
+
AsyncUnixFileLock,
|
| 24 |
+
AsyncWindowsFileLock,
|
| 25 |
+
BaseAsyncFileLock,
|
| 26 |
+
)
|
| 27 |
+
from .version import version
|
| 28 |
+
|
| 29 |
+
#: version of the project as a string
|
| 30 |
+
__version__: str = version
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
if sys.platform == "win32": # pragma: win32 cover
|
| 34 |
+
_FileLock: type[BaseFileLock] = WindowsFileLock
|
| 35 |
+
_AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock
|
| 36 |
+
else: # pragma: win32 no cover # noqa: PLR5501
|
| 37 |
+
if has_fcntl:
|
| 38 |
+
_FileLock: type[BaseFileLock] = UnixFileLock
|
| 39 |
+
_AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock
|
| 40 |
+
else:
|
| 41 |
+
_FileLock = SoftFileLock
|
| 42 |
+
_AsyncFileLock = AsyncSoftFileLock
|
| 43 |
+
if warnings is not None:
|
| 44 |
+
warnings.warn("only soft file lock is available", stacklevel=2)
|
| 45 |
+
|
| 46 |
+
if TYPE_CHECKING:
|
| 47 |
+
FileLock = SoftFileLock
|
| 48 |
+
AsyncFileLock = AsyncSoftFileLock
|
| 49 |
+
else:
|
| 50 |
+
#: Alias for the lock, which should be used for the current platform.
|
| 51 |
+
FileLock = _FileLock
|
| 52 |
+
AsyncFileLock = _AsyncFileLock
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
__all__ = [
|
| 56 |
+
"AcquireReturnProxy",
|
| 57 |
+
"AsyncAcquireReturnProxy",
|
| 58 |
+
"AsyncFileLock",
|
| 59 |
+
"AsyncSoftFileLock",
|
| 60 |
+
"AsyncUnixFileLock",
|
| 61 |
+
"AsyncWindowsFileLock",
|
| 62 |
+
"BaseAsyncFileLock",
|
| 63 |
+
"BaseFileLock",
|
| 64 |
+
"FileLock",
|
| 65 |
+
"SoftFileLock",
|
| 66 |
+
"Timeout",
|
| 67 |
+
"UnixFileLock",
|
| 68 |
+
"WindowsFileLock",
|
| 69 |
+
"__version__",
|
| 70 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (1.83 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/_api.cpython-311.pyc
ADDED
|
Binary file (17.9 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/_error.cpython-311.pyc
ADDED
|
Binary file (1.94 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/_soft.cpython-311.pyc
ADDED
|
Binary file (2.69 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/_unix.cpython-311.pyc
ADDED
|
Binary file (3.72 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/_util.cpython-311.pyc
ADDED
|
Binary file (2.17 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/_windows.cpython-311.pyc
ADDED
|
Binary file (3.65 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/asyncio.cpython-311.pyc
ADDED
|
Binary file (16.6 kB). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/__pycache__/version.cpython-311.pyc
ADDED
|
Binary file (639 Bytes). View file
|
|
|
.venv/lib/python3.11/site-packages/filelock/_api.py
ADDED
|
@@ -0,0 +1,403 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import contextlib
|
| 4 |
+
import inspect
|
| 5 |
+
import logging
|
| 6 |
+
import os
|
| 7 |
+
import time
|
| 8 |
+
import warnings
|
| 9 |
+
from abc import ABCMeta, abstractmethod
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from threading import local
|
| 12 |
+
from typing import TYPE_CHECKING, Any, cast
|
| 13 |
+
from weakref import WeakValueDictionary
|
| 14 |
+
|
| 15 |
+
from ._error import Timeout
|
| 16 |
+
|
| 17 |
+
if TYPE_CHECKING:
|
| 18 |
+
import sys
|
| 19 |
+
from types import TracebackType
|
| 20 |
+
|
| 21 |
+
if sys.version_info >= (3, 11): # pragma: no cover (py311+)
|
| 22 |
+
from typing import Self
|
| 23 |
+
else: # pragma: no cover (<py311)
|
| 24 |
+
from typing_extensions import Self
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
_LOGGER = logging.getLogger("filelock")
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
# This is a helper class which is returned by :meth:`BaseFileLock.acquire` and wraps the lock to make sure __enter__
|
| 31 |
+
# is not called twice when entering the with statement. If we would simply return *self*, the lock would be acquired
|
| 32 |
+
# again in the *__enter__* method of the BaseFileLock, but not released again automatically. issue #37 (memory leak)
|
| 33 |
+
class AcquireReturnProxy:
|
| 34 |
+
"""A context-aware object that will release the lock file when exiting."""
|
| 35 |
+
|
| 36 |
+
def __init__(self, lock: BaseFileLock) -> None:
|
| 37 |
+
self.lock = lock
|
| 38 |
+
|
| 39 |
+
def __enter__(self) -> BaseFileLock:
|
| 40 |
+
return self.lock
|
| 41 |
+
|
| 42 |
+
def __exit__(
|
| 43 |
+
self,
|
| 44 |
+
exc_type: type[BaseException] | None,
|
| 45 |
+
exc_value: BaseException | None,
|
| 46 |
+
traceback: TracebackType | None,
|
| 47 |
+
) -> None:
|
| 48 |
+
self.lock.release()
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
@dataclass
|
| 52 |
+
class FileLockContext:
|
| 53 |
+
"""A dataclass which holds the context for a ``BaseFileLock`` object."""
|
| 54 |
+
|
| 55 |
+
# The context is held in a separate class to allow optional use of thread local storage via the
|
| 56 |
+
# ThreadLocalFileContext class.
|
| 57 |
+
|
| 58 |
+
#: The path to the lock file.
|
| 59 |
+
lock_file: str
|
| 60 |
+
|
| 61 |
+
#: The default timeout value.
|
| 62 |
+
timeout: float
|
| 63 |
+
|
| 64 |
+
#: The mode for the lock files
|
| 65 |
+
mode: int
|
| 66 |
+
|
| 67 |
+
#: Whether the lock should be blocking or not
|
| 68 |
+
blocking: bool
|
| 69 |
+
|
| 70 |
+
#: The file descriptor for the *_lock_file* as it is returned by the os.open() function, not None when lock held
|
| 71 |
+
lock_file_fd: int | None = None
|
| 72 |
+
|
| 73 |
+
#: The lock counter is used for implementing the nested locking mechanism.
|
| 74 |
+
lock_counter: int = 0 # When the lock is acquired is increased and the lock is only released, when this value is 0
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class ThreadLocalFileContext(FileLockContext, local):
|
| 78 |
+
"""A thread local version of the ``FileLockContext`` class."""
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class FileLockMeta(ABCMeta):
|
| 82 |
+
def __call__( # noqa: PLR0913
|
| 83 |
+
cls,
|
| 84 |
+
lock_file: str | os.PathLike[str],
|
| 85 |
+
timeout: float = -1,
|
| 86 |
+
mode: int = 0o644,
|
| 87 |
+
thread_local: bool = True, # noqa: FBT001, FBT002
|
| 88 |
+
*,
|
| 89 |
+
blocking: bool = True,
|
| 90 |
+
is_singleton: bool = False,
|
| 91 |
+
**kwargs: Any, # capture remaining kwargs for subclasses # noqa: ANN401
|
| 92 |
+
) -> BaseFileLock:
|
| 93 |
+
if is_singleton:
|
| 94 |
+
instance = cls._instances.get(str(lock_file)) # type: ignore[attr-defined]
|
| 95 |
+
if instance:
|
| 96 |
+
params_to_check = {
|
| 97 |
+
"thread_local": (thread_local, instance.is_thread_local()),
|
| 98 |
+
"timeout": (timeout, instance.timeout),
|
| 99 |
+
"mode": (mode, instance.mode),
|
| 100 |
+
"blocking": (blocking, instance.blocking),
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
non_matching_params = {
|
| 104 |
+
name: (passed_param, set_param)
|
| 105 |
+
for name, (passed_param, set_param) in params_to_check.items()
|
| 106 |
+
if passed_param != set_param
|
| 107 |
+
}
|
| 108 |
+
if not non_matching_params:
|
| 109 |
+
return cast("BaseFileLock", instance)
|
| 110 |
+
|
| 111 |
+
# parameters do not match; raise error
|
| 112 |
+
msg = "Singleton lock instances cannot be initialized with differing arguments"
|
| 113 |
+
msg += "\nNon-matching arguments: "
|
| 114 |
+
for param_name, (passed_param, set_param) in non_matching_params.items():
|
| 115 |
+
msg += f"\n\t{param_name} (existing lock has {set_param} but {passed_param} was passed)"
|
| 116 |
+
raise ValueError(msg)
|
| 117 |
+
|
| 118 |
+
# Workaround to make `__init__`'s params optional in subclasses
|
| 119 |
+
# E.g. virtualenv changes the signature of the `__init__` method in the `BaseFileLock` class descendant
|
| 120 |
+
# (https://github.com/tox-dev/filelock/pull/340)
|
| 121 |
+
|
| 122 |
+
all_params = {
|
| 123 |
+
"timeout": timeout,
|
| 124 |
+
"mode": mode,
|
| 125 |
+
"thread_local": thread_local,
|
| 126 |
+
"blocking": blocking,
|
| 127 |
+
"is_singleton": is_singleton,
|
| 128 |
+
**kwargs,
|
| 129 |
+
}
|
| 130 |
+
|
| 131 |
+
present_params = inspect.signature(cls.__init__).parameters # type: ignore[misc]
|
| 132 |
+
init_params = {key: value for key, value in all_params.items() if key in present_params}
|
| 133 |
+
|
| 134 |
+
instance = super().__call__(lock_file, **init_params)
|
| 135 |
+
|
| 136 |
+
if is_singleton:
|
| 137 |
+
cls._instances[str(lock_file)] = instance # type: ignore[attr-defined]
|
| 138 |
+
|
| 139 |
+
return cast("BaseFileLock", instance)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
class BaseFileLock(contextlib.ContextDecorator, metaclass=FileLockMeta):
|
| 143 |
+
"""Abstract base class for a file lock object."""
|
| 144 |
+
|
| 145 |
+
_instances: WeakValueDictionary[str, BaseFileLock]
|
| 146 |
+
|
| 147 |
+
def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None:
|
| 148 |
+
"""Setup unique state for lock subclasses."""
|
| 149 |
+
super().__init_subclass__(**kwargs)
|
| 150 |
+
cls._instances = WeakValueDictionary()
|
| 151 |
+
|
| 152 |
+
def __init__( # noqa: PLR0913
|
| 153 |
+
self,
|
| 154 |
+
lock_file: str | os.PathLike[str],
|
| 155 |
+
timeout: float = -1,
|
| 156 |
+
mode: int = 0o644,
|
| 157 |
+
thread_local: bool = True, # noqa: FBT001, FBT002
|
| 158 |
+
*,
|
| 159 |
+
blocking: bool = True,
|
| 160 |
+
is_singleton: bool = False,
|
| 161 |
+
) -> None:
|
| 162 |
+
"""
|
| 163 |
+
Create a new lock object.
|
| 164 |
+
|
| 165 |
+
:param lock_file: path to the file
|
| 166 |
+
:param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
|
| 167 |
+
the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
|
| 168 |
+
to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
|
| 169 |
+
:param mode: file permissions for the lockfile
|
| 170 |
+
:param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
|
| 171 |
+
``False`` then the lock will be reentrant across threads.
|
| 172 |
+
:param blocking: whether the lock should be blocking or not
|
| 173 |
+
:param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
|
| 174 |
+
per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
|
| 175 |
+
to pass the same object around.
|
| 176 |
+
|
| 177 |
+
"""
|
| 178 |
+
self._is_thread_local = thread_local
|
| 179 |
+
self._is_singleton = is_singleton
|
| 180 |
+
|
| 181 |
+
# Create the context. Note that external code should not work with the context directly and should instead use
|
| 182 |
+
# properties of this class.
|
| 183 |
+
kwargs: dict[str, Any] = {
|
| 184 |
+
"lock_file": os.fspath(lock_file),
|
| 185 |
+
"timeout": timeout,
|
| 186 |
+
"mode": mode,
|
| 187 |
+
"blocking": blocking,
|
| 188 |
+
}
|
| 189 |
+
self._context: FileLockContext = (ThreadLocalFileContext if thread_local else FileLockContext)(**kwargs)
|
| 190 |
+
|
| 191 |
+
def is_thread_local(self) -> bool:
|
| 192 |
+
""":return: a flag indicating if this lock is thread local or not"""
|
| 193 |
+
return self._is_thread_local
|
| 194 |
+
|
| 195 |
+
@property
|
| 196 |
+
def is_singleton(self) -> bool:
|
| 197 |
+
""":return: a flag indicating if this lock is singleton or not"""
|
| 198 |
+
return self._is_singleton
|
| 199 |
+
|
| 200 |
+
@property
|
| 201 |
+
def lock_file(self) -> str:
|
| 202 |
+
""":return: path to the lock file"""
|
| 203 |
+
return self._context.lock_file
|
| 204 |
+
|
| 205 |
+
@property
|
| 206 |
+
def timeout(self) -> float:
|
| 207 |
+
"""
|
| 208 |
+
:return: the default timeout value, in seconds
|
| 209 |
+
|
| 210 |
+
.. versionadded:: 2.0.0
|
| 211 |
+
"""
|
| 212 |
+
return self._context.timeout
|
| 213 |
+
|
| 214 |
+
@timeout.setter
|
| 215 |
+
def timeout(self, value: float | str) -> None:
|
| 216 |
+
"""
|
| 217 |
+
Change the default timeout value.
|
| 218 |
+
|
| 219 |
+
:param value: the new value, in seconds
|
| 220 |
+
|
| 221 |
+
"""
|
| 222 |
+
self._context.timeout = float(value)
|
| 223 |
+
|
| 224 |
+
@property
|
| 225 |
+
def blocking(self) -> bool:
|
| 226 |
+
""":return: whether the locking is blocking or not"""
|
| 227 |
+
return self._context.blocking
|
| 228 |
+
|
| 229 |
+
@blocking.setter
|
| 230 |
+
def blocking(self, value: bool) -> None:
|
| 231 |
+
"""
|
| 232 |
+
Change the default blocking value.
|
| 233 |
+
|
| 234 |
+
:param value: the new value as bool
|
| 235 |
+
|
| 236 |
+
"""
|
| 237 |
+
self._context.blocking = value
|
| 238 |
+
|
| 239 |
+
@property
|
| 240 |
+
def mode(self) -> int:
|
| 241 |
+
""":return: the file permissions for the lockfile"""
|
| 242 |
+
return self._context.mode
|
| 243 |
+
|
| 244 |
+
@abstractmethod
|
| 245 |
+
def _acquire(self) -> None:
|
| 246 |
+
"""If the file lock could be acquired, self._context.lock_file_fd holds the file descriptor of the lock file."""
|
| 247 |
+
raise NotImplementedError
|
| 248 |
+
|
| 249 |
+
@abstractmethod
|
| 250 |
+
def _release(self) -> None:
|
| 251 |
+
"""Releases the lock and sets self._context.lock_file_fd to None."""
|
| 252 |
+
raise NotImplementedError
|
| 253 |
+
|
| 254 |
+
@property
|
| 255 |
+
def is_locked(self) -> bool:
|
| 256 |
+
"""
|
| 257 |
+
|
| 258 |
+
:return: A boolean indicating if the lock file is holding the lock currently.
|
| 259 |
+
|
| 260 |
+
.. versionchanged:: 2.0.0
|
| 261 |
+
|
| 262 |
+
This was previously a method and is now a property.
|
| 263 |
+
"""
|
| 264 |
+
return self._context.lock_file_fd is not None
|
| 265 |
+
|
| 266 |
+
@property
|
| 267 |
+
def lock_counter(self) -> int:
|
| 268 |
+
""":return: The number of times this lock has been acquired (but not yet released)."""
|
| 269 |
+
return self._context.lock_counter
|
| 270 |
+
|
| 271 |
+
def acquire(
|
| 272 |
+
self,
|
| 273 |
+
timeout: float | None = None,
|
| 274 |
+
poll_interval: float = 0.05,
|
| 275 |
+
*,
|
| 276 |
+
poll_intervall: float | None = None,
|
| 277 |
+
blocking: bool | None = None,
|
| 278 |
+
) -> AcquireReturnProxy:
|
| 279 |
+
"""
|
| 280 |
+
Try to acquire the file lock.
|
| 281 |
+
|
| 282 |
+
:param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
|
| 283 |
+
if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
|
| 284 |
+
:param poll_interval: interval of trying to acquire the lock file
|
| 285 |
+
:param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
|
| 286 |
+
:param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
|
| 287 |
+
first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
|
| 288 |
+
:raises Timeout: if fails to acquire lock within the timeout period
|
| 289 |
+
:return: a context object that will unlock the file when the context is exited
|
| 290 |
+
|
| 291 |
+
.. code-block:: python
|
| 292 |
+
|
| 293 |
+
# You can use this method in the context manager (recommended)
|
| 294 |
+
with lock.acquire():
|
| 295 |
+
pass
|
| 296 |
+
|
| 297 |
+
# Or use an equivalent try-finally construct:
|
| 298 |
+
lock.acquire()
|
| 299 |
+
try:
|
| 300 |
+
pass
|
| 301 |
+
finally:
|
| 302 |
+
lock.release()
|
| 303 |
+
|
| 304 |
+
.. versionchanged:: 2.0.0
|
| 305 |
+
|
| 306 |
+
This method returns now a *proxy* object instead of *self*,
|
| 307 |
+
so that it can be used in a with statement without side effects.
|
| 308 |
+
|
| 309 |
+
"""
|
| 310 |
+
# Use the default timeout, if no timeout is provided.
|
| 311 |
+
if timeout is None:
|
| 312 |
+
timeout = self._context.timeout
|
| 313 |
+
|
| 314 |
+
if blocking is None:
|
| 315 |
+
blocking = self._context.blocking
|
| 316 |
+
|
| 317 |
+
if poll_intervall is not None:
|
| 318 |
+
msg = "use poll_interval instead of poll_intervall"
|
| 319 |
+
warnings.warn(msg, DeprecationWarning, stacklevel=2)
|
| 320 |
+
poll_interval = poll_intervall
|
| 321 |
+
|
| 322 |
+
# Increment the number right at the beginning. We can still undo it, if something fails.
|
| 323 |
+
self._context.lock_counter += 1
|
| 324 |
+
|
| 325 |
+
lock_id = id(self)
|
| 326 |
+
lock_filename = self.lock_file
|
| 327 |
+
start_time = time.perf_counter()
|
| 328 |
+
try:
|
| 329 |
+
while True:
|
| 330 |
+
if not self.is_locked:
|
| 331 |
+
_LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
|
| 332 |
+
self._acquire()
|
| 333 |
+
if self.is_locked:
|
| 334 |
+
_LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
|
| 335 |
+
break
|
| 336 |
+
if blocking is False:
|
| 337 |
+
_LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
|
| 338 |
+
raise Timeout(lock_filename) # noqa: TRY301
|
| 339 |
+
if 0 <= timeout < time.perf_counter() - start_time:
|
| 340 |
+
_LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
|
| 341 |
+
raise Timeout(lock_filename) # noqa: TRY301
|
| 342 |
+
msg = "Lock %s not acquired on %s, waiting %s seconds ..."
|
| 343 |
+
_LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
|
| 344 |
+
time.sleep(poll_interval)
|
| 345 |
+
except BaseException: # Something did go wrong, so decrement the counter.
|
| 346 |
+
self._context.lock_counter = max(0, self._context.lock_counter - 1)
|
| 347 |
+
raise
|
| 348 |
+
return AcquireReturnProxy(lock=self)
|
| 349 |
+
|
| 350 |
+
def release(self, force: bool = False) -> None: # noqa: FBT001, FBT002
|
| 351 |
+
"""
|
| 352 |
+
Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
|
| 353 |
+
Also note, that the lock file itself is not automatically deleted.
|
| 354 |
+
|
| 355 |
+
:param force: If true, the lock counter is ignored and the lock is released in every case/
|
| 356 |
+
|
| 357 |
+
"""
|
| 358 |
+
if self.is_locked:
|
| 359 |
+
self._context.lock_counter -= 1
|
| 360 |
+
|
| 361 |
+
if self._context.lock_counter == 0 or force:
|
| 362 |
+
lock_id, lock_filename = id(self), self.lock_file
|
| 363 |
+
|
| 364 |
+
_LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
|
| 365 |
+
self._release()
|
| 366 |
+
self._context.lock_counter = 0
|
| 367 |
+
_LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
|
| 368 |
+
|
| 369 |
+
def __enter__(self) -> Self:
|
| 370 |
+
"""
|
| 371 |
+
Acquire the lock.
|
| 372 |
+
|
| 373 |
+
:return: the lock object
|
| 374 |
+
|
| 375 |
+
"""
|
| 376 |
+
self.acquire()
|
| 377 |
+
return self
|
| 378 |
+
|
| 379 |
+
def __exit__(
|
| 380 |
+
self,
|
| 381 |
+
exc_type: type[BaseException] | None,
|
| 382 |
+
exc_value: BaseException | None,
|
| 383 |
+
traceback: TracebackType | None,
|
| 384 |
+
) -> None:
|
| 385 |
+
"""
|
| 386 |
+
Release the lock.
|
| 387 |
+
|
| 388 |
+
:param exc_type: the exception type if raised
|
| 389 |
+
:param exc_value: the exception value if raised
|
| 390 |
+
:param traceback: the exception traceback if raised
|
| 391 |
+
|
| 392 |
+
"""
|
| 393 |
+
self.release()
|
| 394 |
+
|
| 395 |
+
def __del__(self) -> None:
|
| 396 |
+
"""Called when the lock object is deleted."""
|
| 397 |
+
self.release(force=True)
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
__all__ = [
|
| 401 |
+
"AcquireReturnProxy",
|
| 402 |
+
"BaseFileLock",
|
| 403 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/_error.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from typing import Any
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class Timeout(TimeoutError): # noqa: N818
|
| 7 |
+
"""Raised when the lock could not be acquired in *timeout* seconds."""
|
| 8 |
+
|
| 9 |
+
def __init__(self, lock_file: str) -> None:
|
| 10 |
+
super().__init__()
|
| 11 |
+
self._lock_file = lock_file
|
| 12 |
+
|
| 13 |
+
def __reduce__(self) -> str | tuple[Any, ...]:
|
| 14 |
+
return self.__class__, (self._lock_file,) # Properly pickle the exception
|
| 15 |
+
|
| 16 |
+
def __str__(self) -> str:
|
| 17 |
+
return f"The file lock '{self._lock_file}' could not be acquired."
|
| 18 |
+
|
| 19 |
+
def __repr__(self) -> str:
|
| 20 |
+
return f"{self.__class__.__name__}({self.lock_file!r})"
|
| 21 |
+
|
| 22 |
+
@property
|
| 23 |
+
def lock_file(self) -> str:
|
| 24 |
+
""":return: The path of the file lock."""
|
| 25 |
+
return self._lock_file
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
__all__ = [
|
| 29 |
+
"Timeout",
|
| 30 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/_soft.py
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from errno import EACCES, EEXIST
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
from ._api import BaseFileLock
|
| 10 |
+
from ._util import ensure_directory_exists, raise_on_not_writable_file
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class SoftFileLock(BaseFileLock):
|
| 14 |
+
"""Simply watches the existence of the lock file."""
|
| 15 |
+
|
| 16 |
+
def _acquire(self) -> None:
|
| 17 |
+
raise_on_not_writable_file(self.lock_file)
|
| 18 |
+
ensure_directory_exists(self.lock_file)
|
| 19 |
+
# first check for exists and read-only mode as the open will mask this case as EEXIST
|
| 20 |
+
flags = (
|
| 21 |
+
os.O_WRONLY # open for writing only
|
| 22 |
+
| os.O_CREAT
|
| 23 |
+
| os.O_EXCL # together with above raise EEXIST if the file specified by filename exists
|
| 24 |
+
| os.O_TRUNC # truncate the file to zero byte
|
| 25 |
+
)
|
| 26 |
+
try:
|
| 27 |
+
file_handler = os.open(self.lock_file, flags, self._context.mode)
|
| 28 |
+
except OSError as exception: # re-raise unless expected exception
|
| 29 |
+
if not (
|
| 30 |
+
exception.errno == EEXIST # lock already exist
|
| 31 |
+
or (exception.errno == EACCES and sys.platform == "win32") # has no access to this lock
|
| 32 |
+
): # pragma: win32 no cover
|
| 33 |
+
raise
|
| 34 |
+
else:
|
| 35 |
+
self._context.lock_file_fd = file_handler
|
| 36 |
+
|
| 37 |
+
def _release(self) -> None:
|
| 38 |
+
assert self._context.lock_file_fd is not None # noqa: S101
|
| 39 |
+
os.close(self._context.lock_file_fd) # the lock file is definitely not None
|
| 40 |
+
self._context.lock_file_fd = None
|
| 41 |
+
with suppress(OSError): # the file is already deleted and that's what we want
|
| 42 |
+
Path(self.lock_file).unlink()
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
__all__ = [
|
| 46 |
+
"SoftFileLock",
|
| 47 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/_unix.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from errno import ENOSYS
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from typing import cast
|
| 9 |
+
|
| 10 |
+
from ._api import BaseFileLock
|
| 11 |
+
from ._util import ensure_directory_exists
|
| 12 |
+
|
| 13 |
+
#: a flag to indicate if the fcntl API is available
|
| 14 |
+
has_fcntl = False
|
| 15 |
+
if sys.platform == "win32": # pragma: win32 cover
|
| 16 |
+
|
| 17 |
+
class UnixFileLock(BaseFileLock):
|
| 18 |
+
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
| 19 |
+
|
| 20 |
+
def _acquire(self) -> None:
|
| 21 |
+
raise NotImplementedError
|
| 22 |
+
|
| 23 |
+
def _release(self) -> None:
|
| 24 |
+
raise NotImplementedError
|
| 25 |
+
|
| 26 |
+
else: # pragma: win32 no cover
|
| 27 |
+
try:
|
| 28 |
+
import fcntl
|
| 29 |
+
except ImportError:
|
| 30 |
+
pass
|
| 31 |
+
else:
|
| 32 |
+
has_fcntl = True
|
| 33 |
+
|
| 34 |
+
class UnixFileLock(BaseFileLock):
|
| 35 |
+
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
| 36 |
+
|
| 37 |
+
def _acquire(self) -> None:
|
| 38 |
+
ensure_directory_exists(self.lock_file)
|
| 39 |
+
open_flags = os.O_RDWR | os.O_TRUNC
|
| 40 |
+
if not Path(self.lock_file).exists():
|
| 41 |
+
open_flags |= os.O_CREAT
|
| 42 |
+
fd = os.open(self.lock_file, open_flags, self._context.mode)
|
| 43 |
+
with suppress(PermissionError): # This locked is not owned by this UID
|
| 44 |
+
os.fchmod(fd, self._context.mode)
|
| 45 |
+
try:
|
| 46 |
+
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
| 47 |
+
except OSError as exception:
|
| 48 |
+
os.close(fd)
|
| 49 |
+
if exception.errno == ENOSYS: # NotImplemented error
|
| 50 |
+
msg = "FileSystem does not appear to support flock; use SoftFileLock instead"
|
| 51 |
+
raise NotImplementedError(msg) from exception
|
| 52 |
+
else:
|
| 53 |
+
self._context.lock_file_fd = fd
|
| 54 |
+
|
| 55 |
+
def _release(self) -> None:
|
| 56 |
+
# Do not remove the lockfile:
|
| 57 |
+
# https://github.com/tox-dev/py-filelock/issues/31
|
| 58 |
+
# https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
|
| 59 |
+
fd = cast("int", self._context.lock_file_fd)
|
| 60 |
+
self._context.lock_file_fd = None
|
| 61 |
+
fcntl.flock(fd, fcntl.LOCK_UN)
|
| 62 |
+
os.close(fd)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
__all__ = [
|
| 66 |
+
"UnixFileLock",
|
| 67 |
+
"has_fcntl",
|
| 68 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/_util.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import stat
|
| 5 |
+
import sys
|
| 6 |
+
from errno import EACCES, EISDIR
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def raise_on_not_writable_file(filename: str) -> None:
|
| 11 |
+
"""
|
| 12 |
+
Raise an exception if attempting to open the file for writing would fail.
|
| 13 |
+
|
| 14 |
+
This is done so files that will never be writable can be separated from files that are writable but currently
|
| 15 |
+
locked.
|
| 16 |
+
|
| 17 |
+
:param filename: file to check
|
| 18 |
+
:raises OSError: as if the file was opened for writing.
|
| 19 |
+
|
| 20 |
+
"""
|
| 21 |
+
try: # use stat to do exists + can write to check without race condition
|
| 22 |
+
file_stat = os.stat(filename) # noqa: PTH116
|
| 23 |
+
except OSError:
|
| 24 |
+
return # swallow does not exist or other errors
|
| 25 |
+
|
| 26 |
+
if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
|
| 27 |
+
if not (file_stat.st_mode & stat.S_IWUSR):
|
| 28 |
+
raise PermissionError(EACCES, "Permission denied", filename)
|
| 29 |
+
|
| 30 |
+
if stat.S_ISDIR(file_stat.st_mode):
|
| 31 |
+
if sys.platform == "win32": # pragma: win32 cover
|
| 32 |
+
# On Windows, this is PermissionError
|
| 33 |
+
raise PermissionError(EACCES, "Permission denied", filename)
|
| 34 |
+
else: # pragma: win32 no cover # noqa: RET506
|
| 35 |
+
# On linux / macOS, this is IsADirectoryError
|
| 36 |
+
raise IsADirectoryError(EISDIR, "Is a directory", filename)
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def ensure_directory_exists(filename: Path | str) -> None:
|
| 40 |
+
"""
|
| 41 |
+
Ensure the directory containing the file exists (create it if necessary).
|
| 42 |
+
|
| 43 |
+
:param filename: file.
|
| 44 |
+
|
| 45 |
+
"""
|
| 46 |
+
Path(filename).parent.mkdir(parents=True, exist_ok=True)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
__all__ = [
|
| 50 |
+
"ensure_directory_exists",
|
| 51 |
+
"raise_on_not_writable_file",
|
| 52 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/_windows.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from errno import EACCES
|
| 7 |
+
from pathlib import Path
|
| 8 |
+
from typing import cast
|
| 9 |
+
|
| 10 |
+
from ._api import BaseFileLock
|
| 11 |
+
from ._util import ensure_directory_exists, raise_on_not_writable_file
|
| 12 |
+
|
| 13 |
+
if sys.platform == "win32": # pragma: win32 cover
|
| 14 |
+
import msvcrt
|
| 15 |
+
|
| 16 |
+
class WindowsFileLock(BaseFileLock):
|
| 17 |
+
"""Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
|
| 18 |
+
|
| 19 |
+
def _acquire(self) -> None:
|
| 20 |
+
raise_on_not_writable_file(self.lock_file)
|
| 21 |
+
ensure_directory_exists(self.lock_file)
|
| 22 |
+
flags = (
|
| 23 |
+
os.O_RDWR # open for read and write
|
| 24 |
+
| os.O_CREAT # create file if not exists
|
| 25 |
+
| os.O_TRUNC # truncate file if not empty
|
| 26 |
+
)
|
| 27 |
+
try:
|
| 28 |
+
fd = os.open(self.lock_file, flags, self._context.mode)
|
| 29 |
+
except OSError as exception:
|
| 30 |
+
if exception.errno != EACCES: # has no access to this lock
|
| 31 |
+
raise
|
| 32 |
+
else:
|
| 33 |
+
try:
|
| 34 |
+
msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
|
| 35 |
+
except OSError as exception:
|
| 36 |
+
os.close(fd) # close file first
|
| 37 |
+
if exception.errno != EACCES: # file is already locked
|
| 38 |
+
raise
|
| 39 |
+
else:
|
| 40 |
+
self._context.lock_file_fd = fd
|
| 41 |
+
|
| 42 |
+
def _release(self) -> None:
|
| 43 |
+
fd = cast("int", self._context.lock_file_fd)
|
| 44 |
+
self._context.lock_file_fd = None
|
| 45 |
+
msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
|
| 46 |
+
os.close(fd)
|
| 47 |
+
|
| 48 |
+
with suppress(OSError): # Probably another instance of the application hat acquired the file lock.
|
| 49 |
+
Path(self.lock_file).unlink()
|
| 50 |
+
|
| 51 |
+
else: # pragma: win32 no cover
|
| 52 |
+
|
| 53 |
+
class WindowsFileLock(BaseFileLock):
|
| 54 |
+
"""Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
|
| 55 |
+
|
| 56 |
+
def _acquire(self) -> None:
|
| 57 |
+
raise NotImplementedError
|
| 58 |
+
|
| 59 |
+
def _release(self) -> None:
|
| 60 |
+
raise NotImplementedError
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
__all__ = [
|
| 64 |
+
"WindowsFileLock",
|
| 65 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/asyncio.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""An asyncio-based implementation of the file lock.""" # noqa: A005
|
| 2 |
+
|
| 3 |
+
from __future__ import annotations
|
| 4 |
+
|
| 5 |
+
import asyncio
|
| 6 |
+
import contextlib
|
| 7 |
+
import logging
|
| 8 |
+
import os
|
| 9 |
+
import time
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from threading import local
|
| 12 |
+
from typing import TYPE_CHECKING, Any, Callable, NoReturn, cast
|
| 13 |
+
|
| 14 |
+
from ._api import BaseFileLock, FileLockContext, FileLockMeta
|
| 15 |
+
from ._error import Timeout
|
| 16 |
+
from ._soft import SoftFileLock
|
| 17 |
+
from ._unix import UnixFileLock
|
| 18 |
+
from ._windows import WindowsFileLock
|
| 19 |
+
|
| 20 |
+
if TYPE_CHECKING:
|
| 21 |
+
import sys
|
| 22 |
+
from concurrent import futures
|
| 23 |
+
from types import TracebackType
|
| 24 |
+
|
| 25 |
+
if sys.version_info >= (3, 11): # pragma: no cover (py311+)
|
| 26 |
+
from typing import Self
|
| 27 |
+
else: # pragma: no cover (<py311)
|
| 28 |
+
from typing_extensions import Self
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
_LOGGER = logging.getLogger("filelock")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@dataclass
|
| 35 |
+
class AsyncFileLockContext(FileLockContext):
|
| 36 |
+
"""A dataclass which holds the context for a ``BaseAsyncFileLock`` object."""
|
| 37 |
+
|
| 38 |
+
#: Whether run in executor
|
| 39 |
+
run_in_executor: bool = True
|
| 40 |
+
|
| 41 |
+
#: The executor
|
| 42 |
+
executor: futures.Executor | None = None
|
| 43 |
+
|
| 44 |
+
#: The loop
|
| 45 |
+
loop: asyncio.AbstractEventLoop | None = None
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class AsyncThreadLocalFileContext(AsyncFileLockContext, local):
|
| 49 |
+
"""A thread local version of the ``FileLockContext`` class."""
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class AsyncAcquireReturnProxy:
|
| 53 |
+
"""A context-aware object that will release the lock file when exiting."""
|
| 54 |
+
|
| 55 |
+
def __init__(self, lock: BaseAsyncFileLock) -> None: # noqa: D107
|
| 56 |
+
self.lock = lock
|
| 57 |
+
|
| 58 |
+
async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105
|
| 59 |
+
return self.lock
|
| 60 |
+
|
| 61 |
+
async def __aexit__( # noqa: D105
|
| 62 |
+
self,
|
| 63 |
+
exc_type: type[BaseException] | None,
|
| 64 |
+
exc_value: BaseException | None,
|
| 65 |
+
traceback: TracebackType | None,
|
| 66 |
+
) -> None:
|
| 67 |
+
await self.lock.release()
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
class AsyncFileLockMeta(FileLockMeta):
|
| 71 |
+
def __call__( # type: ignore[override] # noqa: PLR0913
|
| 72 |
+
cls, # noqa: N805
|
| 73 |
+
lock_file: str | os.PathLike[str],
|
| 74 |
+
timeout: float = -1,
|
| 75 |
+
mode: int = 0o644,
|
| 76 |
+
thread_local: bool = False, # noqa: FBT001, FBT002
|
| 77 |
+
*,
|
| 78 |
+
blocking: bool = True,
|
| 79 |
+
is_singleton: bool = False,
|
| 80 |
+
loop: asyncio.AbstractEventLoop | None = None,
|
| 81 |
+
run_in_executor: bool = True,
|
| 82 |
+
executor: futures.Executor | None = None,
|
| 83 |
+
) -> BaseAsyncFileLock:
|
| 84 |
+
if thread_local and run_in_executor:
|
| 85 |
+
msg = "run_in_executor is not supported when thread_local is True"
|
| 86 |
+
raise ValueError(msg)
|
| 87 |
+
instance = super().__call__(
|
| 88 |
+
lock_file=lock_file,
|
| 89 |
+
timeout=timeout,
|
| 90 |
+
mode=mode,
|
| 91 |
+
thread_local=thread_local,
|
| 92 |
+
blocking=blocking,
|
| 93 |
+
is_singleton=is_singleton,
|
| 94 |
+
loop=loop,
|
| 95 |
+
run_in_executor=run_in_executor,
|
| 96 |
+
executor=executor,
|
| 97 |
+
)
|
| 98 |
+
return cast("BaseAsyncFileLock", instance)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class BaseAsyncFileLock(BaseFileLock, metaclass=AsyncFileLockMeta):
|
| 102 |
+
"""Base class for asynchronous file locks."""
|
| 103 |
+
|
| 104 |
+
def __init__( # noqa: PLR0913
|
| 105 |
+
self,
|
| 106 |
+
lock_file: str | os.PathLike[str],
|
| 107 |
+
timeout: float = -1,
|
| 108 |
+
mode: int = 0o644,
|
| 109 |
+
thread_local: bool = False, # noqa: FBT001, FBT002
|
| 110 |
+
*,
|
| 111 |
+
blocking: bool = True,
|
| 112 |
+
is_singleton: bool = False,
|
| 113 |
+
loop: asyncio.AbstractEventLoop | None = None,
|
| 114 |
+
run_in_executor: bool = True,
|
| 115 |
+
executor: futures.Executor | None = None,
|
| 116 |
+
) -> None:
|
| 117 |
+
"""
|
| 118 |
+
Create a new lock object.
|
| 119 |
+
|
| 120 |
+
:param lock_file: path to the file
|
| 121 |
+
:param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
|
| 122 |
+
the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
|
| 123 |
+
to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
|
| 124 |
+
:param mode: file permissions for the lockfile
|
| 125 |
+
:param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
|
| 126 |
+
``False`` then the lock will be reentrant across threads.
|
| 127 |
+
:param blocking: whether the lock should be blocking or not
|
| 128 |
+
:param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
|
| 129 |
+
per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
|
| 130 |
+
to pass the same object around.
|
| 131 |
+
:param loop: The event loop to use. If not specified, the running event loop will be used.
|
| 132 |
+
:param run_in_executor: If this is set to ``True`` then the lock will be acquired in an executor.
|
| 133 |
+
:param executor: The executor to use. If not specified, the default executor will be used.
|
| 134 |
+
|
| 135 |
+
"""
|
| 136 |
+
self._is_thread_local = thread_local
|
| 137 |
+
self._is_singleton = is_singleton
|
| 138 |
+
|
| 139 |
+
# Create the context. Note that external code should not work with the context directly and should instead use
|
| 140 |
+
# properties of this class.
|
| 141 |
+
kwargs: dict[str, Any] = {
|
| 142 |
+
"lock_file": os.fspath(lock_file),
|
| 143 |
+
"timeout": timeout,
|
| 144 |
+
"mode": mode,
|
| 145 |
+
"blocking": blocking,
|
| 146 |
+
"loop": loop,
|
| 147 |
+
"run_in_executor": run_in_executor,
|
| 148 |
+
"executor": executor,
|
| 149 |
+
}
|
| 150 |
+
self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if thread_local else AsyncFileLockContext)(
|
| 151 |
+
**kwargs
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def run_in_executor(self) -> bool:
|
| 156 |
+
"""::return: whether run in executor."""
|
| 157 |
+
return self._context.run_in_executor
|
| 158 |
+
|
| 159 |
+
@property
|
| 160 |
+
def executor(self) -> futures.Executor | None:
|
| 161 |
+
"""::return: the executor."""
|
| 162 |
+
return self._context.executor
|
| 163 |
+
|
| 164 |
+
@executor.setter
|
| 165 |
+
def executor(self, value: futures.Executor | None) -> None: # pragma: no cover
|
| 166 |
+
"""
|
| 167 |
+
Change the executor.
|
| 168 |
+
|
| 169 |
+
:param value: the new executor or ``None``
|
| 170 |
+
:type value: futures.Executor | None
|
| 171 |
+
|
| 172 |
+
"""
|
| 173 |
+
self._context.executor = value
|
| 174 |
+
|
| 175 |
+
@property
|
| 176 |
+
def loop(self) -> asyncio.AbstractEventLoop | None:
|
| 177 |
+
"""::return: the event loop."""
|
| 178 |
+
return self._context.loop
|
| 179 |
+
|
| 180 |
+
async def acquire( # type: ignore[override]
|
| 181 |
+
self,
|
| 182 |
+
timeout: float | None = None,
|
| 183 |
+
poll_interval: float = 0.05,
|
| 184 |
+
*,
|
| 185 |
+
blocking: bool | None = None,
|
| 186 |
+
) -> AsyncAcquireReturnProxy:
|
| 187 |
+
"""
|
| 188 |
+
Try to acquire the file lock.
|
| 189 |
+
|
| 190 |
+
:param timeout: maximum wait time for acquiring the lock, ``None`` means use the default
|
| 191 |
+
:attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is no timeout and
|
| 192 |
+
this method will block until the lock could be acquired
|
| 193 |
+
:param poll_interval: interval of trying to acquire the lock file
|
| 194 |
+
:param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
|
| 195 |
+
first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
|
| 196 |
+
:raises Timeout: if fails to acquire lock within the timeout period
|
| 197 |
+
:return: a context object that will unlock the file when the context is exited
|
| 198 |
+
|
| 199 |
+
.. code-block:: python
|
| 200 |
+
|
| 201 |
+
# You can use this method in the context manager (recommended)
|
| 202 |
+
with lock.acquire():
|
| 203 |
+
pass
|
| 204 |
+
|
| 205 |
+
# Or use an equivalent try-finally construct:
|
| 206 |
+
lock.acquire()
|
| 207 |
+
try:
|
| 208 |
+
pass
|
| 209 |
+
finally:
|
| 210 |
+
lock.release()
|
| 211 |
+
|
| 212 |
+
"""
|
| 213 |
+
# Use the default timeout, if no timeout is provided.
|
| 214 |
+
if timeout is None:
|
| 215 |
+
timeout = self._context.timeout
|
| 216 |
+
|
| 217 |
+
if blocking is None:
|
| 218 |
+
blocking = self._context.blocking
|
| 219 |
+
|
| 220 |
+
# Increment the number right at the beginning. We can still undo it, if something fails.
|
| 221 |
+
self._context.lock_counter += 1
|
| 222 |
+
|
| 223 |
+
lock_id = id(self)
|
| 224 |
+
lock_filename = self.lock_file
|
| 225 |
+
start_time = time.perf_counter()
|
| 226 |
+
try:
|
| 227 |
+
while True:
|
| 228 |
+
if not self.is_locked:
|
| 229 |
+
_LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
|
| 230 |
+
await self._run_internal_method(self._acquire)
|
| 231 |
+
if self.is_locked:
|
| 232 |
+
_LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
|
| 233 |
+
break
|
| 234 |
+
if blocking is False:
|
| 235 |
+
_LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
|
| 236 |
+
raise Timeout(lock_filename) # noqa: TRY301
|
| 237 |
+
if 0 <= timeout < time.perf_counter() - start_time:
|
| 238 |
+
_LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
|
| 239 |
+
raise Timeout(lock_filename) # noqa: TRY301
|
| 240 |
+
msg = "Lock %s not acquired on %s, waiting %s seconds ..."
|
| 241 |
+
_LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
|
| 242 |
+
await asyncio.sleep(poll_interval)
|
| 243 |
+
except BaseException: # Something did go wrong, so decrement the counter.
|
| 244 |
+
self._context.lock_counter = max(0, self._context.lock_counter - 1)
|
| 245 |
+
raise
|
| 246 |
+
return AsyncAcquireReturnProxy(lock=self)
|
| 247 |
+
|
| 248 |
+
async def release(self, force: bool = False) -> None: # type: ignore[override] # noqa: FBT001, FBT002
|
| 249 |
+
"""
|
| 250 |
+
Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
|
| 251 |
+
Also note, that the lock file itself is not automatically deleted.
|
| 252 |
+
|
| 253 |
+
:param force: If true, the lock counter is ignored and the lock is released in every case/
|
| 254 |
+
|
| 255 |
+
"""
|
| 256 |
+
if self.is_locked:
|
| 257 |
+
self._context.lock_counter -= 1
|
| 258 |
+
|
| 259 |
+
if self._context.lock_counter == 0 or force:
|
| 260 |
+
lock_id, lock_filename = id(self), self.lock_file
|
| 261 |
+
|
| 262 |
+
_LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
|
| 263 |
+
await self._run_internal_method(self._release)
|
| 264 |
+
self._context.lock_counter = 0
|
| 265 |
+
_LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
|
| 266 |
+
|
| 267 |
+
async def _run_internal_method(self, method: Callable[[], Any]) -> None:
|
| 268 |
+
if asyncio.iscoroutinefunction(method):
|
| 269 |
+
await method()
|
| 270 |
+
elif self.run_in_executor:
|
| 271 |
+
loop = self.loop or asyncio.get_running_loop()
|
| 272 |
+
await loop.run_in_executor(self.executor, method)
|
| 273 |
+
else:
|
| 274 |
+
method()
|
| 275 |
+
|
| 276 |
+
def __enter__(self) -> NoReturn:
|
| 277 |
+
"""
|
| 278 |
+
Replace old __enter__ method to avoid using it.
|
| 279 |
+
|
| 280 |
+
NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD.
|
| 281 |
+
|
| 282 |
+
:return: none
|
| 283 |
+
:rtype: NoReturn
|
| 284 |
+
"""
|
| 285 |
+
msg = "Do not use `with` for asyncio locks, use `async with` instead."
|
| 286 |
+
raise NotImplementedError(msg)
|
| 287 |
+
|
| 288 |
+
async def __aenter__(self) -> Self:
|
| 289 |
+
"""
|
| 290 |
+
Acquire the lock.
|
| 291 |
+
|
| 292 |
+
:return: the lock object
|
| 293 |
+
|
| 294 |
+
"""
|
| 295 |
+
await self.acquire()
|
| 296 |
+
return self
|
| 297 |
+
|
| 298 |
+
async def __aexit__(
|
| 299 |
+
self,
|
| 300 |
+
exc_type: type[BaseException] | None,
|
| 301 |
+
exc_value: BaseException | None,
|
| 302 |
+
traceback: TracebackType | None,
|
| 303 |
+
) -> None:
|
| 304 |
+
"""
|
| 305 |
+
Release the lock.
|
| 306 |
+
|
| 307 |
+
:param exc_type: the exception type if raised
|
| 308 |
+
:param exc_value: the exception value if raised
|
| 309 |
+
:param traceback: the exception traceback if raised
|
| 310 |
+
|
| 311 |
+
"""
|
| 312 |
+
await self.release()
|
| 313 |
+
|
| 314 |
+
def __del__(self) -> None:
|
| 315 |
+
"""Called when the lock object is deleted."""
|
| 316 |
+
with contextlib.suppress(RuntimeError):
|
| 317 |
+
loop = self.loop or asyncio.get_running_loop()
|
| 318 |
+
if not loop.is_running(): # pragma: no cover
|
| 319 |
+
loop.run_until_complete(self.release(force=True))
|
| 320 |
+
else:
|
| 321 |
+
loop.create_task(self.release(force=True))
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock):
|
| 325 |
+
"""Simply watches the existence of the lock file."""
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock):
|
| 329 |
+
"""Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock):
|
| 333 |
+
"""Uses the :func:`msvcrt.locking` to hard lock the lock file on windows systems."""
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
__all__ = [
|
| 337 |
+
"AsyncAcquireReturnProxy",
|
| 338 |
+
"AsyncSoftFileLock",
|
| 339 |
+
"AsyncUnixFileLock",
|
| 340 |
+
"AsyncWindowsFileLock",
|
| 341 |
+
"BaseAsyncFileLock",
|
| 342 |
+
]
|
.venv/lib/python3.11/site-packages/filelock/py.typed
ADDED
|
File without changes
|
.venv/lib/python3.11/site-packages/filelock/version.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# file generated by setuptools_scm
|
| 2 |
+
# don't change, don't track in version control
|
| 3 |
+
TYPE_CHECKING = False
|
| 4 |
+
if TYPE_CHECKING:
|
| 5 |
+
from typing import Tuple, Union
|
| 6 |
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
| 7 |
+
else:
|
| 8 |
+
VERSION_TUPLE = object
|
| 9 |
+
|
| 10 |
+
version: str
|
| 11 |
+
__version__: str
|
| 12 |
+
__version_tuple__: VERSION_TUPLE
|
| 13 |
+
version_tuple: VERSION_TUPLE
|
| 14 |
+
|
| 15 |
+
__version__ = version = '3.17.0'
|
| 16 |
+
__version_tuple__ = version_tuple = (3, 17, 0)
|
.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.4
|
| 2 |
+
Name: jiter
|
| 3 |
+
Version: 0.8.2
|
| 4 |
+
Classifier: Development Status :: 4 - Beta
|
| 5 |
+
Classifier: Programming Language :: Python
|
| 6 |
+
Classifier: Programming Language :: Python :: 3
|
| 7 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 8 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 9 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 10 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 11 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 12 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 13 |
+
Classifier: Intended Audience :: Developers
|
| 14 |
+
Classifier: Intended Audience :: Information Technology
|
| 15 |
+
Classifier: Intended Audience :: System Administrators
|
| 16 |
+
Classifier: License :: OSI Approved :: MIT License
|
| 17 |
+
Classifier: Operating System :: Unix
|
| 18 |
+
Classifier: Operating System :: POSIX :: Linux
|
| 19 |
+
Classifier: Environment :: Console
|
| 20 |
+
Classifier: Environment :: MacOS X
|
| 21 |
+
Classifier: Topic :: File Formats :: JSON
|
| 22 |
+
Classifier: Framework :: Pydantic :: 2
|
| 23 |
+
Summary: Fast iterable JSON parser.
|
| 24 |
+
Keywords: JSON,parsing,deserialization,iter
|
| 25 |
+
Home-Page: https://github.com/pydantic/jiter/
|
| 26 |
+
Author: Samuel Colvin <samuel@pydantic.dev>
|
| 27 |
+
Author-email: Samuel Colvin <s@muelcolvin.com>
|
| 28 |
+
License: MIT
|
| 29 |
+
Requires-Python: >=3.8
|
| 30 |
+
Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
|
| 31 |
+
Project-URL: Source Code, https://github.com/pydantic/jiter/
|
| 32 |
+
|
| 33 |
+
# jiter
|
| 34 |
+
|
| 35 |
+
[](https://github.com/pydantic/jiter/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
|
| 36 |
+
[](https://pypi.python.org/pypi/jiter)
|
| 37 |
+
[](https://github.com/pydantic/jiter)
|
| 38 |
+
[](https://github.com/pydantic/jiter/blob/main/LICENSE)
|
| 39 |
+
|
| 40 |
+
This is a standalone version of the JSON parser used in `pydantic-core`. The recommendation is to only use this package directly if you do not use `pydantic`.
|
| 41 |
+
|
| 42 |
+
The API is extremely minimal:
|
| 43 |
+
|
| 44 |
+
```python
|
| 45 |
+
def from_json(
|
| 46 |
+
json_data: bytes,
|
| 47 |
+
/,
|
| 48 |
+
*,
|
| 49 |
+
allow_inf_nan: bool = True,
|
| 50 |
+
cache_mode: Literal[True, False, "all", "keys", "none"] = "all",
|
| 51 |
+
partial_mode: Literal[True, False, "off", "on", "trailing-strings"] = False,
|
| 52 |
+
catch_duplicate_keys: bool = False,
|
| 53 |
+
float_mode: Literal["float", "decimal", "lossless-float"] = False,
|
| 54 |
+
) -> Any:
|
| 55 |
+
"""
|
| 56 |
+
Parse input bytes into a JSON object.
|
| 57 |
+
|
| 58 |
+
Arguments:
|
| 59 |
+
json_data: The JSON data to parse
|
| 60 |
+
allow_inf_nan: Whether to allow infinity (`Infinity` an `-Infinity`) and `NaN` values to float fields.
|
| 61 |
+
Defaults to True.
|
| 62 |
+
cache_mode: cache Python strings to improve performance at the cost of some memory usage
|
| 63 |
+
- True / 'all' - cache all strings
|
| 64 |
+
- 'keys' - cache only object keys
|
| 65 |
+
- False / 'none' - cache nothing
|
| 66 |
+
partial_mode: How to handle incomplete strings:
|
| 67 |
+
- False / 'off' - raise an exception if the input is incomplete
|
| 68 |
+
- True / 'on' - allow incomplete JSON but discard the last string if it is incomplete
|
| 69 |
+
- 'trailing-strings' - allow incomplete JSON, and include the last incomplete string in the output
|
| 70 |
+
catch_duplicate_keys: if True, raise an exception if objects contain the same key multiple times
|
| 71 |
+
float_mode: How to return floats: as a `float`, `Decimal` or `LosslessFloat`
|
| 72 |
+
|
| 73 |
+
Returns:
|
| 74 |
+
Python object built from the JSON input.
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
def cache_clear() -> None:
|
| 78 |
+
"""
|
| 79 |
+
Reset the string cache.
|
| 80 |
+
"""
|
| 81 |
+
|
| 82 |
+
def cache_usage() -> int:
|
| 83 |
+
"""
|
| 84 |
+
get the size of the string cache.
|
| 85 |
+
|
| 86 |
+
Returns:
|
| 87 |
+
Size of the string cache in bytes.
|
| 88 |
+
"""
|
| 89 |
+
```
|
| 90 |
+
## Examples
|
| 91 |
+
|
| 92 |
+
The main function provided by Jiter is `from_json()`, which accepts a bytes object containing JSON and returns a Python dictionary, list or other value.
|
| 93 |
+
|
| 94 |
+
```python
|
| 95 |
+
import jiter
|
| 96 |
+
|
| 97 |
+
json_data = b'{"name": "John", "age": 30}'
|
| 98 |
+
parsed_data = jiter.from_json(json_data)
|
| 99 |
+
print(parsed_data) # Output: {'name': 'John', 'age': 30}
|
| 100 |
+
```
|
| 101 |
+
|
| 102 |
+
### Handling Partial JSON
|
| 103 |
+
|
| 104 |
+
Incomplete JSON objects can be parsed using the `partial_mode=` parameter.
|
| 105 |
+
|
| 106 |
+
```python
|
| 107 |
+
import jiter
|
| 108 |
+
|
| 109 |
+
partial_json = b'{"name": "John", "age": 30, "city": "New Yor'
|
| 110 |
+
|
| 111 |
+
# Raise error on incomplete JSON
|
| 112 |
+
try:
|
| 113 |
+
jiter.from_json(partial_json, partial_mode=False)
|
| 114 |
+
except ValueError as e:
|
| 115 |
+
print(f"Error: {e}")
|
| 116 |
+
|
| 117 |
+
# Parse incomplete JSON, discarding incomplete last field
|
| 118 |
+
result = jiter.from_json(partial_json, partial_mode=True)
|
| 119 |
+
print(result) # Output: {'name': 'John', 'age': 30}
|
| 120 |
+
|
| 121 |
+
# Parse incomplete JSON, including incomplete last field
|
| 122 |
+
result = jiter.from_json(partial_json, partial_mode='trailing-strings')
|
| 123 |
+
print(result) # Output: {'name': 'John', 'age': 30, 'city': 'New Yor'}
|
| 124 |
+
```
|
| 125 |
+
|
| 126 |
+
### Catching Duplicate Keys
|
| 127 |
+
|
| 128 |
+
The `catch_duplicate_keys=True` option can be used to raise a `ValueError` if an object contains duplicate keys.
|
| 129 |
+
|
| 130 |
+
```python
|
| 131 |
+
import jiter
|
| 132 |
+
|
| 133 |
+
json_with_dupes = b'{"foo": 1, "foo": 2}'
|
| 134 |
+
|
| 135 |
+
# Default behavior (last value wins)
|
| 136 |
+
result = jiter.from_json(json_with_dupes)
|
| 137 |
+
print(result) # Output: {'foo': 2}
|
| 138 |
+
|
| 139 |
+
# Catch duplicate keys
|
| 140 |
+
try:
|
| 141 |
+
jiter.from_json(json_with_dupes, catch_duplicate_keys=True)
|
| 142 |
+
except ValueError as e:
|
| 143 |
+
print(f"Error: {e}")
|
| 144 |
+
```
|
| 145 |
+
|
.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
jiter-0.8.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
jiter-0.8.2.dist-info/METADATA,sha256=VdOT_6YbBf3ENlJDsdCNVUJ61N14uIaTHyVcHScgGb8,5177
|
| 3 |
+
jiter-0.8.2.dist-info/RECORD,,
|
| 4 |
+
jiter-0.8.2.dist-info/WHEEL,sha256=qfXqQP1Fc7f0pAAAyf6-qTIv7nE_-wkHw_y9EwRAQFw,129
|
| 5 |
+
jiter/__init__.py,sha256=Fp9HkOixiYYDSiC_80vmiJ_sCoCGT8OAh48yltm0lP0,103
|
| 6 |
+
jiter/__init__.pyi,sha256=AEs-Zbzf7c2r5vUTpTjxkLBuN7KnfFTURrWrZJAZnQY,2363
|
| 7 |
+
jiter/__pycache__/__init__.cpython-311.pyc,,
|
| 8 |
+
jiter/jiter.cpython-311-x86_64-linux-gnu.so,sha256=InrLB6omAldF_KvC5g9Dbq1pCwv5BQg15z84hzzhh5Q,812104
|
| 9 |
+
jiter/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: maturin (1.7.7)
|
| 3 |
+
Root-Is-Purelib: false
|
| 4 |
+
Tag: cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64
|
.venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b5e6d7050e58b5bab524e6e6be4ec4c1ff728845920bddd8e8acd823b43b5980
|
| 3 |
+
size 284937
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
This software is made available under the terms of *either* of the licenses
|
| 2 |
+
found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
|
| 3 |
+
under the terms of *both* these licenses.
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.APACHE
ADDED
|
@@ -0,0 +1,177 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
Apache License
|
| 3 |
+
Version 2.0, January 2004
|
| 4 |
+
http://www.apache.org/licenses/
|
| 5 |
+
|
| 6 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
| 7 |
+
|
| 8 |
+
1. Definitions.
|
| 9 |
+
|
| 10 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
| 11 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
| 12 |
+
|
| 13 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
| 14 |
+
the copyright owner that is granting the License.
|
| 15 |
+
|
| 16 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
| 17 |
+
other entities that control, are controlled by, or are under common
|
| 18 |
+
control with that entity. For the purposes of this definition,
|
| 19 |
+
"control" means (i) the power, direct or indirect, to cause the
|
| 20 |
+
direction or management of such entity, whether by contract or
|
| 21 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
| 22 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
| 23 |
+
|
| 24 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
| 25 |
+
exercising permissions granted by this License.
|
| 26 |
+
|
| 27 |
+
"Source" form shall mean the preferred form for making modifications,
|
| 28 |
+
including but not limited to software source code, documentation
|
| 29 |
+
source, and configuration files.
|
| 30 |
+
|
| 31 |
+
"Object" form shall mean any form resulting from mechanical
|
| 32 |
+
transformation or translation of a Source form, including but
|
| 33 |
+
not limited to compiled object code, generated documentation,
|
| 34 |
+
and conversions to other media types.
|
| 35 |
+
|
| 36 |
+
"Work" shall mean the work of authorship, whether in Source or
|
| 37 |
+
Object form, made available under the License, as indicated by a
|
| 38 |
+
copyright notice that is included in or attached to the work
|
| 39 |
+
(an example is provided in the Appendix below).
|
| 40 |
+
|
| 41 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
| 42 |
+
form, that is based on (or derived from) the Work and for which the
|
| 43 |
+
editorial revisions, annotations, elaborations, or other modifications
|
| 44 |
+
represent, as a whole, an original work of authorship. For the purposes
|
| 45 |
+
of this License, Derivative Works shall not include works that remain
|
| 46 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
| 47 |
+
the Work and Derivative Works thereof.
|
| 48 |
+
|
| 49 |
+
"Contribution" shall mean any work of authorship, including
|
| 50 |
+
the original version of the Work and any modifications or additions
|
| 51 |
+
to that Work or Derivative Works thereof, that is intentionally
|
| 52 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
| 53 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
| 54 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
| 55 |
+
means any form of electronic, verbal, or written communication sent
|
| 56 |
+
to the Licensor or its representatives, including but not limited to
|
| 57 |
+
communication on electronic mailing lists, source code control systems,
|
| 58 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
| 59 |
+
Licensor for the purpose of discussing and improving the Work, but
|
| 60 |
+
excluding communication that is conspicuously marked or otherwise
|
| 61 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
| 62 |
+
|
| 63 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
| 64 |
+
on behalf of whom a Contribution has been received by Licensor and
|
| 65 |
+
subsequently incorporated within the Work.
|
| 66 |
+
|
| 67 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
| 68 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 69 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 70 |
+
copyright license to reproduce, prepare Derivative Works of,
|
| 71 |
+
publicly display, publicly perform, sublicense, and distribute the
|
| 72 |
+
Work and such Derivative Works in Source or Object form.
|
| 73 |
+
|
| 74 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
| 75 |
+
this License, each Contributor hereby grants to You a perpetual,
|
| 76 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
| 77 |
+
(except as stated in this section) patent license to make, have made,
|
| 78 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
| 79 |
+
where such license applies only to those patent claims licensable
|
| 80 |
+
by such Contributor that are necessarily infringed by their
|
| 81 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
| 82 |
+
with the Work to which such Contribution(s) was submitted. If You
|
| 83 |
+
institute patent litigation against any entity (including a
|
| 84 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
| 85 |
+
or a Contribution incorporated within the Work constitutes direct
|
| 86 |
+
or contributory patent infringement, then any patent licenses
|
| 87 |
+
granted to You under this License for that Work shall terminate
|
| 88 |
+
as of the date such litigation is filed.
|
| 89 |
+
|
| 90 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
| 91 |
+
Work or Derivative Works thereof in any medium, with or without
|
| 92 |
+
modifications, and in Source or Object form, provided that You
|
| 93 |
+
meet the following conditions:
|
| 94 |
+
|
| 95 |
+
(a) You must give any other recipients of the Work or
|
| 96 |
+
Derivative Works a copy of this License; and
|
| 97 |
+
|
| 98 |
+
(b) You must cause any modified files to carry prominent notices
|
| 99 |
+
stating that You changed the files; and
|
| 100 |
+
|
| 101 |
+
(c) You must retain, in the Source form of any Derivative Works
|
| 102 |
+
that You distribute, all copyright, patent, trademark, and
|
| 103 |
+
attribution notices from the Source form of the Work,
|
| 104 |
+
excluding those notices that do not pertain to any part of
|
| 105 |
+
the Derivative Works; and
|
| 106 |
+
|
| 107 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
| 108 |
+
distribution, then any Derivative Works that You distribute must
|
| 109 |
+
include a readable copy of the attribution notices contained
|
| 110 |
+
within such NOTICE file, excluding those notices that do not
|
| 111 |
+
pertain to any part of the Derivative Works, in at least one
|
| 112 |
+
of the following places: within a NOTICE text file distributed
|
| 113 |
+
as part of the Derivative Works; within the Source form or
|
| 114 |
+
documentation, if provided along with the Derivative Works; or,
|
| 115 |
+
within a display generated by the Derivative Works, if and
|
| 116 |
+
wherever such third-party notices normally appear. The contents
|
| 117 |
+
of the NOTICE file are for informational purposes only and
|
| 118 |
+
do not modify the License. You may add Your own attribution
|
| 119 |
+
notices within Derivative Works that You distribute, alongside
|
| 120 |
+
or as an addendum to the NOTICE text from the Work, provided
|
| 121 |
+
that such additional attribution notices cannot be construed
|
| 122 |
+
as modifying the License.
|
| 123 |
+
|
| 124 |
+
You may add Your own copyright statement to Your modifications and
|
| 125 |
+
may provide additional or different license terms and conditions
|
| 126 |
+
for use, reproduction, or distribution of Your modifications, or
|
| 127 |
+
for any such Derivative Works as a whole, provided Your use,
|
| 128 |
+
reproduction, and distribution of the Work otherwise complies with
|
| 129 |
+
the conditions stated in this License.
|
| 130 |
+
|
| 131 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
| 132 |
+
any Contribution intentionally submitted for inclusion in the Work
|
| 133 |
+
by You to the Licensor shall be under the terms and conditions of
|
| 134 |
+
this License, without any additional terms or conditions.
|
| 135 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
| 136 |
+
the terms of any separate license agreement you may have executed
|
| 137 |
+
with Licensor regarding such Contributions.
|
| 138 |
+
|
| 139 |
+
6. Trademarks. This License does not grant permission to use the trade
|
| 140 |
+
names, trademarks, service marks, or product names of the Licensor,
|
| 141 |
+
except as required for reasonable and customary use in describing the
|
| 142 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
| 143 |
+
|
| 144 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
| 145 |
+
agreed to in writing, Licensor provides the Work (and each
|
| 146 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
| 147 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
| 148 |
+
implied, including, without limitation, any warranties or conditions
|
| 149 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
| 150 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
| 151 |
+
appropriateness of using or redistributing the Work and assume any
|
| 152 |
+
risks associated with Your exercise of permissions under this License.
|
| 153 |
+
|
| 154 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
| 155 |
+
whether in tort (including negligence), contract, or otherwise,
|
| 156 |
+
unless required by applicable law (such as deliberate and grossly
|
| 157 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
| 158 |
+
liable to You for damages, including any direct, indirect, special,
|
| 159 |
+
incidental, or consequential damages of any character arising as a
|
| 160 |
+
result of this License or out of the use or inability to use the
|
| 161 |
+
Work (including but not limited to damages for loss of goodwill,
|
| 162 |
+
work stoppage, computer failure or malfunction, or any and all
|
| 163 |
+
other commercial damages or losses), even if such Contributor
|
| 164 |
+
has been advised of the possibility of such damages.
|
| 165 |
+
|
| 166 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
| 167 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
| 168 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
| 169 |
+
or other liability obligations and/or rights consistent with this
|
| 170 |
+
License. However, in accepting such obligations, You may act only
|
| 171 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
| 172 |
+
of any other Contributor, and only if You agree to indemnify,
|
| 173 |
+
defend, and hold each Contributor harmless for any liability
|
| 174 |
+
incurred by, or claims asserted against, such Contributor by reason
|
| 175 |
+
of your accepting any such warranty or additional liability.
|
| 176 |
+
|
| 177 |
+
END OF TERMS AND CONDITIONS
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.BSD
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright (c) Donald Stufft and individual contributors.
|
| 2 |
+
All rights reserved.
|
| 3 |
+
|
| 4 |
+
Redistribution and use in source and binary forms, with or without
|
| 5 |
+
modification, are permitted provided that the following conditions are met:
|
| 6 |
+
|
| 7 |
+
1. Redistributions of source code must retain the above copyright notice,
|
| 8 |
+
this list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 11 |
+
notice, this list of conditions and the following disclaimer in the
|
| 12 |
+
documentation and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
| 15 |
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
| 16 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
| 17 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
| 18 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
| 19 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
| 20 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
| 21 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
| 22 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
| 23 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.3
|
| 2 |
+
Name: packaging
|
| 3 |
+
Version: 24.2
|
| 4 |
+
Summary: Core utilities for Python packages
|
| 5 |
+
Author-email: Donald Stufft <donald@stufft.io>
|
| 6 |
+
Requires-Python: >=3.8
|
| 7 |
+
Description-Content-Type: text/x-rst
|
| 8 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 9 |
+
Classifier: Intended Audience :: Developers
|
| 10 |
+
Classifier: License :: OSI Approved :: Apache Software License
|
| 11 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 12 |
+
Classifier: Programming Language :: Python
|
| 13 |
+
Classifier: Programming Language :: Python :: 3
|
| 14 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
| 15 |
+
Classifier: Programming Language :: Python :: 3.8
|
| 16 |
+
Classifier: Programming Language :: Python :: 3.9
|
| 17 |
+
Classifier: Programming Language :: Python :: 3.10
|
| 18 |
+
Classifier: Programming Language :: Python :: 3.11
|
| 19 |
+
Classifier: Programming Language :: Python :: 3.12
|
| 20 |
+
Classifier: Programming Language :: Python :: 3.13
|
| 21 |
+
Classifier: Programming Language :: Python :: Implementation :: CPython
|
| 22 |
+
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
| 23 |
+
Classifier: Typing :: Typed
|
| 24 |
+
Project-URL: Documentation, https://packaging.pypa.io/
|
| 25 |
+
Project-URL: Source, https://github.com/pypa/packaging
|
| 26 |
+
|
| 27 |
+
packaging
|
| 28 |
+
=========
|
| 29 |
+
|
| 30 |
+
.. start-intro
|
| 31 |
+
|
| 32 |
+
Reusable core utilities for various Python Packaging
|
| 33 |
+
`interoperability specifications <https://packaging.python.org/specifications/>`_.
|
| 34 |
+
|
| 35 |
+
This library provides utilities that implement the interoperability
|
| 36 |
+
specifications which have clearly one correct behaviour (eg: :pep:`440`)
|
| 37 |
+
or benefit greatly from having a single shared implementation (eg: :pep:`425`).
|
| 38 |
+
|
| 39 |
+
.. end-intro
|
| 40 |
+
|
| 41 |
+
The ``packaging`` project includes the following: version handling, specifiers,
|
| 42 |
+
markers, requirements, tags, utilities.
|
| 43 |
+
|
| 44 |
+
Documentation
|
| 45 |
+
-------------
|
| 46 |
+
|
| 47 |
+
The `documentation`_ provides information and the API for the following:
|
| 48 |
+
|
| 49 |
+
- Version Handling
|
| 50 |
+
- Specifiers
|
| 51 |
+
- Markers
|
| 52 |
+
- Requirements
|
| 53 |
+
- Tags
|
| 54 |
+
- Utilities
|
| 55 |
+
|
| 56 |
+
Installation
|
| 57 |
+
------------
|
| 58 |
+
|
| 59 |
+
Use ``pip`` to install these utilities::
|
| 60 |
+
|
| 61 |
+
pip install packaging
|
| 62 |
+
|
| 63 |
+
The ``packaging`` library uses calendar-based versioning (``YY.N``).
|
| 64 |
+
|
| 65 |
+
Discussion
|
| 66 |
+
----------
|
| 67 |
+
|
| 68 |
+
If you run into bugs, you can file them in our `issue tracker`_.
|
| 69 |
+
|
| 70 |
+
You can also join ``#pypa`` on Freenode to ask questions or get involved.
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
.. _`documentation`: https://packaging.pypa.io/
|
| 74 |
+
.. _`issue tracker`: https://github.com/pypa/packaging/issues
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
Code of Conduct
|
| 78 |
+
---------------
|
| 79 |
+
|
| 80 |
+
Everyone interacting in the packaging project's codebases, issue trackers, chat
|
| 81 |
+
rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
|
| 82 |
+
|
| 83 |
+
.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
|
| 84 |
+
|
| 85 |
+
Contributing
|
| 86 |
+
------------
|
| 87 |
+
|
| 88 |
+
The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
|
| 89 |
+
well as how to report a potential security issue. The documentation for this
|
| 90 |
+
project also covers information about `project development`_ and `security`_.
|
| 91 |
+
|
| 92 |
+
.. _`project development`: https://packaging.pypa.io/en/latest/development/
|
| 93 |
+
.. _`security`: https://packaging.pypa.io/en/latest/security/
|
| 94 |
+
|
| 95 |
+
Project History
|
| 96 |
+
---------------
|
| 97 |
+
|
| 98 |
+
Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
|
| 99 |
+
recent changes and project history.
|
| 100 |
+
|
| 101 |
+
.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
|
| 102 |
+
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
packaging-24.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
|
| 3 |
+
packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
|
| 4 |
+
packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
|
| 5 |
+
packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204
|
| 6 |
+
packaging-24.2.dist-info/RECORD,,
|
| 7 |
+
packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
|
| 8 |
+
packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494
|
| 9 |
+
packaging/__pycache__/__init__.cpython-311.pyc,,
|
| 10 |
+
packaging/__pycache__/_elffile.cpython-311.pyc,,
|
| 11 |
+
packaging/__pycache__/_manylinux.cpython-311.pyc,,
|
| 12 |
+
packaging/__pycache__/_musllinux.cpython-311.pyc,,
|
| 13 |
+
packaging/__pycache__/_parser.cpython-311.pyc,,
|
| 14 |
+
packaging/__pycache__/_structures.cpython-311.pyc,,
|
| 15 |
+
packaging/__pycache__/_tokenizer.cpython-311.pyc,,
|
| 16 |
+
packaging/__pycache__/markers.cpython-311.pyc,,
|
| 17 |
+
packaging/__pycache__/metadata.cpython-311.pyc,,
|
| 18 |
+
packaging/__pycache__/requirements.cpython-311.pyc,,
|
| 19 |
+
packaging/__pycache__/specifiers.cpython-311.pyc,,
|
| 20 |
+
packaging/__pycache__/tags.cpython-311.pyc,,
|
| 21 |
+
packaging/__pycache__/utils.cpython-311.pyc,,
|
| 22 |
+
packaging/__pycache__/version.cpython-311.pyc,,
|
| 23 |
+
packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306
|
| 24 |
+
packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612
|
| 25 |
+
packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
|
| 26 |
+
packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
|
| 27 |
+
packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
|
| 28 |
+
packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
|
| 29 |
+
packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715
|
| 30 |
+
packaging/licenses/__pycache__/__init__.cpython-311.pyc,,
|
| 31 |
+
packaging/licenses/__pycache__/_spdx.cpython-311.pyc,,
|
| 32 |
+
packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
|
| 33 |
+
packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561
|
| 34 |
+
packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762
|
| 35 |
+
packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 36 |
+
packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
|
| 37 |
+
packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074
|
| 38 |
+
packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014
|
| 39 |
+
packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
|
| 40 |
+
packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676
|
.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: flit 3.10.1
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
.venv/lib/python3.11/site-packages/pybind11/__pycache__/__init__.cpython-311.pyc
ADDED
|
Binary file (732 Bytes). View file
|
|
|