diff --git a/.gitattributes b/.gitattributes
index d01719e98163a2babc6d51c6c6186bddc244e080..000db3eff2ae0d935b72e3252ca177fa09735206 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -400,3 +400,4 @@ tuning-competition-baseline/.venv/lib/python3.11/site-packages/nvidia/cudnn/lib/
.venv/lib/python3.11/site-packages/numpy/ma/tests/__pycache__/test_core.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
.venv/lib/python3.11/site-packages/mistral_common/data/tekken_240718.json filter=lfs diff=lfs merge=lfs -text
.venv/lib/python3.11/site-packages/torchgen/__pycache__/gen.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
+.venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc filter=lfs diff=lfs merge=lfs -text
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/_find_header.pxd b/.venv/lib/python3.11/site-packages/aiohttp/_find_header.pxd
new file mode 100644
index 0000000000000000000000000000000000000000..37a6c37268ee30b182fd77d109688d35d5577c7f
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/_find_header.pxd
@@ -0,0 +1,2 @@
+cdef extern from "_find_header.h":
+ int find_header(char *, int)
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/_headers.pxi b/.venv/lib/python3.11/site-packages/aiohttp/_headers.pxi
new file mode 100644
index 0000000000000000000000000000000000000000..3744721d4786a6c79b90aa349c8d02fa66204ecc
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/_headers.pxi
@@ -0,0 +1,83 @@
+# The file is autogenerated from aiohttp/hdrs.py
+# Run ./tools/gen.py to update it after the origin changing.
+
+from . import hdrs
+cdef tuple headers = (
+ hdrs.ACCEPT,
+ hdrs.ACCEPT_CHARSET,
+ hdrs.ACCEPT_ENCODING,
+ hdrs.ACCEPT_LANGUAGE,
+ hdrs.ACCEPT_RANGES,
+ hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
+ hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
+ hdrs.ACCESS_CONTROL_ALLOW_METHODS,
+ hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
+ hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
+ hdrs.ACCESS_CONTROL_MAX_AGE,
+ hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
+ hdrs.ACCESS_CONTROL_REQUEST_METHOD,
+ hdrs.AGE,
+ hdrs.ALLOW,
+ hdrs.AUTHORIZATION,
+ hdrs.CACHE_CONTROL,
+ hdrs.CONNECTION,
+ hdrs.CONTENT_DISPOSITION,
+ hdrs.CONTENT_ENCODING,
+ hdrs.CONTENT_LANGUAGE,
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_MD5,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TRANSFER_ENCODING,
+ hdrs.CONTENT_TYPE,
+ hdrs.COOKIE,
+ hdrs.DATE,
+ hdrs.DESTINATION,
+ hdrs.DIGEST,
+ hdrs.ETAG,
+ hdrs.EXPECT,
+ hdrs.EXPIRES,
+ hdrs.FORWARDED,
+ hdrs.FROM,
+ hdrs.HOST,
+ hdrs.IF_MATCH,
+ hdrs.IF_MODIFIED_SINCE,
+ hdrs.IF_NONE_MATCH,
+ hdrs.IF_RANGE,
+ hdrs.IF_UNMODIFIED_SINCE,
+ hdrs.KEEP_ALIVE,
+ hdrs.LAST_EVENT_ID,
+ hdrs.LAST_MODIFIED,
+ hdrs.LINK,
+ hdrs.LOCATION,
+ hdrs.MAX_FORWARDS,
+ hdrs.ORIGIN,
+ hdrs.PRAGMA,
+ hdrs.PROXY_AUTHENTICATE,
+ hdrs.PROXY_AUTHORIZATION,
+ hdrs.RANGE,
+ hdrs.REFERER,
+ hdrs.RETRY_AFTER,
+ hdrs.SEC_WEBSOCKET_ACCEPT,
+ hdrs.SEC_WEBSOCKET_EXTENSIONS,
+ hdrs.SEC_WEBSOCKET_KEY,
+ hdrs.SEC_WEBSOCKET_KEY1,
+ hdrs.SEC_WEBSOCKET_PROTOCOL,
+ hdrs.SEC_WEBSOCKET_VERSION,
+ hdrs.SERVER,
+ hdrs.SET_COOKIE,
+ hdrs.TE,
+ hdrs.TRAILER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.URI,
+ hdrs.UPGRADE,
+ hdrs.USER_AGENT,
+ hdrs.VARY,
+ hdrs.VIA,
+ hdrs.WWW_AUTHENTICATE,
+ hdrs.WANT_DIGEST,
+ hdrs.WARNING,
+ hdrs.X_FORWARDED_FOR,
+ hdrs.X_FORWARDED_HOST,
+ hdrs.X_FORWARDED_PROTO,
+)
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/abc.py b/.venv/lib/python3.11/site-packages/aiohttp/abc.py
new file mode 100644
index 0000000000000000000000000000000000000000..5794a9108b076a81120dc09bbcca892dd9fcf1f3
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/abc.py
@@ -0,0 +1,253 @@
+import asyncio
+import logging
+import socket
+import zlib
+from abc import ABC, abstractmethod
+from collections.abc import Sized
+from http.cookies import BaseCookie, Morsel
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Dict,
+ Generator,
+ Iterable,
+ List,
+ Optional,
+ Tuple,
+ TypedDict,
+ Union,
+)
+
+from multidict import CIMultiDict
+from yarl import URL
+
+from .typedefs import LooseCookies
+
+if TYPE_CHECKING:
+ from .web_app import Application
+ from .web_exceptions import HTTPException
+ from .web_request import BaseRequest, Request
+ from .web_response import StreamResponse
+else:
+ BaseRequest = Request = Application = StreamResponse = None
+ HTTPException = None
+
+
+class AbstractRouter(ABC):
+ def __init__(self) -> None:
+ self._frozen = False
+
+ def post_init(self, app: Application) -> None:
+ """Post init stage.
+
+ Not an abstract method for sake of backward compatibility,
+ but if the router wants to be aware of the application
+ it can override this.
+ """
+
+ @property
+ def frozen(self) -> bool:
+ return self._frozen
+
+ def freeze(self) -> None:
+ """Freeze router."""
+ self._frozen = True
+
+ @abstractmethod
+ async def resolve(self, request: Request) -> "AbstractMatchInfo":
+ """Return MATCH_INFO for given request"""
+
+
+class AbstractMatchInfo(ABC):
+
+ __slots__ = ()
+
+ @property # pragma: no branch
+ @abstractmethod
+ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
+ """Execute matched request handler"""
+
+ @property
+ @abstractmethod
+ def expect_handler(
+ self,
+ ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]:
+ """Expect handler for 100-continue processing"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def http_exception(self) -> Optional[HTTPException]:
+ """HTTPException instance raised on router's resolving, or None"""
+
+ @abstractmethod # pragma: no branch
+ def get_info(self) -> Dict[str, Any]:
+ """Return a dict with additional info useful for introspection"""
+
+ @property # pragma: no branch
+ @abstractmethod
+ def apps(self) -> Tuple[Application, ...]:
+ """Stack of nested applications.
+
+ Top level application is left-most element.
+
+ """
+
+ @abstractmethod
+ def add_app(self, app: Application) -> None:
+ """Add application to the nested apps stack."""
+
+ @abstractmethod
+ def freeze(self) -> None:
+ """Freeze the match info.
+
+ The method is called after route resolution.
+
+ After the call .add_app() is forbidden.
+
+ """
+
+
+class AbstractView(ABC):
+ """Abstract class based view."""
+
+ def __init__(self, request: Request) -> None:
+ self._request = request
+
+ @property
+ def request(self) -> Request:
+ """Request instance."""
+ return self._request
+
+ @abstractmethod
+ def __await__(self) -> Generator[Any, None, StreamResponse]:
+ """Execute the view handler."""
+
+
+class ResolveResult(TypedDict):
+ """Resolve result.
+
+ This is the result returned from an AbstractResolver's
+ resolve method.
+
+ :param hostname: The hostname that was provided.
+ :param host: The IP address that was resolved.
+ :param port: The port that was resolved.
+ :param family: The address family that was resolved.
+ :param proto: The protocol that was resolved.
+ :param flags: The flags that were resolved.
+ """
+
+ hostname: str
+ host: str
+ port: int
+ family: int
+ proto: int
+ flags: int
+
+
+class AbstractResolver(ABC):
+ """Abstract DNS resolver."""
+
+ @abstractmethod
+ async def resolve(
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
+ ) -> List[ResolveResult]:
+ """Return IP address for given hostname"""
+
+ @abstractmethod
+ async def close(self) -> None:
+ """Release resolver"""
+
+
+if TYPE_CHECKING:
+ IterableBase = Iterable[Morsel[str]]
+else:
+ IterableBase = Iterable
+
+
+ClearCookiePredicate = Callable[["Morsel[str]"], bool]
+
+
+class AbstractCookieJar(Sized, IterableBase):
+ """Abstract Cookie Jar."""
+
+ def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = loop or asyncio.get_running_loop()
+
+ @property
+ @abstractmethod
+ def quote_cookie(self) -> bool:
+ """Return True if cookies should be quoted."""
+
+ @abstractmethod
+ def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
+ """Clear all cookies if no predicate is passed."""
+
+ @abstractmethod
+ def clear_domain(self, domain: str) -> None:
+ """Clear all cookies for domain and all subdomains."""
+
+ @abstractmethod
+ def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
+ """Update cookies."""
+
+ @abstractmethod
+ def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
+ """Return the jar's cookies filtered by their attributes."""
+
+
+class AbstractStreamWriter(ABC):
+ """Abstract stream writer."""
+
+ buffer_size: int = 0
+ output_size: int = 0
+ length: Optional[int] = 0
+
+ @abstractmethod
+ async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
+ """Write chunk into stream."""
+
+ @abstractmethod
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ """Write last chunk."""
+
+ @abstractmethod
+ async def drain(self) -> None:
+ """Flush the write buffer."""
+
+ @abstractmethod
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
+ ) -> None:
+ """Enable HTTP body compression"""
+
+ @abstractmethod
+ def enable_chunking(self) -> None:
+ """Enable HTTP chunked mode"""
+
+ @abstractmethod
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write HTTP headers"""
+
+
+class AbstractAccessLogger(ABC):
+ """Abstract writer to access log."""
+
+ __slots__ = ("logger", "log_format")
+
+ def __init__(self, logger: logging.Logger, log_format: str) -> None:
+ self.logger = logger
+ self.log_format = log_format
+
+ @abstractmethod
+ def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
+ """Emit log to logger."""
+
+ @property
+ def enabled(self) -> bool:
+ """Check if logger is enabled."""
+ return True
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/compression_utils.py b/.venv/lib/python3.11/site-packages/aiohttp/compression_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..ebe8857f487466049c8c6e9e825da7aea6c9d7b9
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/compression_utils.py
@@ -0,0 +1,173 @@
+import asyncio
+import zlib
+from concurrent.futures import Executor
+from typing import Optional, cast
+
+try:
+ try:
+ import brotlicffi as brotli
+ except ImportError:
+ import brotli
+
+ HAS_BROTLI = True
+except ImportError: # pragma: no cover
+ HAS_BROTLI = False
+
+MAX_SYNC_CHUNK_SIZE = 1024
+
+
+def encoding_to_mode(
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+) -> int:
+ if encoding == "gzip":
+ return 16 + zlib.MAX_WBITS
+
+ return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
+
+
+class ZlibBaseHandler:
+ def __init__(
+ self,
+ mode: int,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ self._mode = mode
+ self._executor = executor
+ self._max_sync_chunk_size = max_sync_chunk_size
+
+
+class ZLibCompressor(ZlibBaseHandler):
+ def __init__(
+ self,
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+ level: Optional[int] = None,
+ wbits: Optional[int] = None,
+ strategy: int = zlib.Z_DEFAULT_STRATEGY,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ super().__init__(
+ mode=(
+ encoding_to_mode(encoding, suppress_deflate_header)
+ if wbits is None
+ else wbits
+ ),
+ executor=executor,
+ max_sync_chunk_size=max_sync_chunk_size,
+ )
+ if level is None:
+ self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
+ else:
+ self._compressor = zlib.compressobj(
+ wbits=self._mode, strategy=strategy, level=level
+ )
+ self._compress_lock = asyncio.Lock()
+
+ def compress_sync(self, data: bytes) -> bytes:
+ return self._compressor.compress(data)
+
+ async def compress(self, data: bytes) -> bytes:
+ """Compress the data and returned the compressed bytes.
+
+ Note that flush() must be called after the last call to compress()
+
+ If the data size is large than the max_sync_chunk_size, the compression
+ will be done in the executor. Otherwise, the compression will be done
+ in the event loop.
+ """
+ async with self._compress_lock:
+ # To ensure the stream is consistent in the event
+ # there are multiple writers, we need to lock
+ # the compressor so that only one writer can
+ # compress at a time.
+ if (
+ self._max_sync_chunk_size is not None
+ and len(data) > self._max_sync_chunk_size
+ ):
+ return await asyncio.get_running_loop().run_in_executor(
+ self._executor, self._compressor.compress, data
+ )
+ return self.compress_sync(data)
+
+ def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
+ return self._compressor.flush(mode)
+
+
+class ZLibDecompressor(ZlibBaseHandler):
+ def __init__(
+ self,
+ encoding: Optional[str] = None,
+ suppress_deflate_header: bool = False,
+ executor: Optional[Executor] = None,
+ max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
+ ):
+ super().__init__(
+ mode=encoding_to_mode(encoding, suppress_deflate_header),
+ executor=executor,
+ max_sync_chunk_size=max_sync_chunk_size,
+ )
+ self._decompressor = zlib.decompressobj(wbits=self._mode)
+
+ def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
+ return self._decompressor.decompress(data, max_length)
+
+ async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
+ """Decompress the data and return the decompressed bytes.
+
+ If the data size is large than the max_sync_chunk_size, the decompression
+ will be done in the executor. Otherwise, the decompression will be done
+ in the event loop.
+ """
+ if (
+ self._max_sync_chunk_size is not None
+ and len(data) > self._max_sync_chunk_size
+ ):
+ return await asyncio.get_running_loop().run_in_executor(
+ self._executor, self._decompressor.decompress, data, max_length
+ )
+ return self.decompress_sync(data, max_length)
+
+ def flush(self, length: int = 0) -> bytes:
+ return (
+ self._decompressor.flush(length)
+ if length > 0
+ else self._decompressor.flush()
+ )
+
+ @property
+ def eof(self) -> bool:
+ return self._decompressor.eof
+
+ @property
+ def unconsumed_tail(self) -> bytes:
+ return self._decompressor.unconsumed_tail
+
+ @property
+ def unused_data(self) -> bytes:
+ return self._decompressor.unused_data
+
+
+class BrotliDecompressor:
+ # Supports both 'brotlipy' and 'Brotli' packages
+ # since they share an import name. The top branches
+ # are for 'brotlipy' and bottom branches for 'Brotli'
+ def __init__(self) -> None:
+ if not HAS_BROTLI:
+ raise RuntimeError(
+ "The brotli decompression is not available. "
+ "Please install `Brotli` module"
+ )
+ self._obj = brotli.Decompressor()
+
+ def decompress_sync(self, data: bytes) -> bytes:
+ if hasattr(self._obj, "decompress"):
+ return cast(bytes, self._obj.decompress(data))
+ return cast(bytes, self._obj.process(data))
+
+ def flush(self) -> bytes:
+ if hasattr(self._obj, "flush"):
+ return cast(bytes, self._obj.flush())
+ return b""
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/formdata.py b/.venv/lib/python3.11/site-packages/aiohttp/formdata.py
new file mode 100644
index 0000000000000000000000000000000000000000..73056f4bc45f2ec140c00e7a3983e7e0e21c4343
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/formdata.py
@@ -0,0 +1,182 @@
+import io
+import warnings
+from typing import Any, Iterable, List, Optional
+from urllib.parse import urlencode
+
+from multidict import MultiDict, MultiDictProxy
+
+from . import hdrs, multipart, payload
+from .helpers import guess_filename
+from .payload import Payload
+
+__all__ = ("FormData",)
+
+
+class FormData:
+ """Helper class for form body generation.
+
+ Supports multipart/form-data and application/x-www-form-urlencoded.
+ """
+
+ def __init__(
+ self,
+ fields: Iterable[Any] = (),
+ quote_fields: bool = True,
+ charset: Optional[str] = None,
+ *,
+ default_to_multipart: bool = False,
+ ) -> None:
+ self._writer = multipart.MultipartWriter("form-data")
+ self._fields: List[Any] = []
+ self._is_multipart = default_to_multipart
+ self._is_processed = False
+ self._quote_fields = quote_fields
+ self._charset = charset
+
+ if isinstance(fields, dict):
+ fields = list(fields.items())
+ elif not isinstance(fields, (list, tuple)):
+ fields = (fields,)
+ self.add_fields(*fields)
+
+ @property
+ def is_multipart(self) -> bool:
+ return self._is_multipart
+
+ def add_field(
+ self,
+ name: str,
+ value: Any,
+ *,
+ content_type: Optional[str] = None,
+ filename: Optional[str] = None,
+ content_transfer_encoding: Optional[str] = None,
+ ) -> None:
+
+ if isinstance(value, io.IOBase):
+ self._is_multipart = True
+ elif isinstance(value, (bytes, bytearray, memoryview)):
+ msg = (
+ "In v4, passing bytes will no longer create a file field. "
+ "Please explicitly use the filename parameter or pass a BytesIO object."
+ )
+ if filename is None and content_transfer_encoding is None:
+ warnings.warn(msg, DeprecationWarning)
+ filename = name
+
+ type_options: MultiDict[str] = MultiDict({"name": name})
+ if filename is not None and not isinstance(filename, str):
+ raise TypeError("filename must be an instance of str. Got: %s" % filename)
+ if filename is None and isinstance(value, io.IOBase):
+ filename = guess_filename(value, name)
+ if filename is not None:
+ type_options["filename"] = filename
+ self._is_multipart = True
+
+ headers = {}
+ if content_type is not None:
+ if not isinstance(content_type, str):
+ raise TypeError(
+ "content_type must be an instance of str. Got: %s" % content_type
+ )
+ headers[hdrs.CONTENT_TYPE] = content_type
+ self._is_multipart = True
+ if content_transfer_encoding is not None:
+ if not isinstance(content_transfer_encoding, str):
+ raise TypeError(
+ "content_transfer_encoding must be an instance"
+ " of str. Got: %s" % content_transfer_encoding
+ )
+ msg = (
+ "content_transfer_encoding is deprecated. "
+ "To maintain compatibility with v4 please pass a BytesPayload."
+ )
+ warnings.warn(msg, DeprecationWarning)
+ self._is_multipart = True
+
+ self._fields.append((type_options, headers, value))
+
+ def add_fields(self, *fields: Any) -> None:
+ to_add = list(fields)
+
+ while to_add:
+ rec = to_add.pop(0)
+
+ if isinstance(rec, io.IOBase):
+ k = guess_filename(rec, "unknown")
+ self.add_field(k, rec) # type: ignore[arg-type]
+
+ elif isinstance(rec, (MultiDictProxy, MultiDict)):
+ to_add.extend(rec.items())
+
+ elif isinstance(rec, (list, tuple)) and len(rec) == 2:
+ k, fp = rec
+ self.add_field(k, fp) # type: ignore[arg-type]
+
+ else:
+ raise TypeError(
+ "Only io.IOBase, multidict and (name, file) "
+ "pairs allowed, use .add_field() for passing "
+ "more complex parameters, got {!r}".format(rec)
+ )
+
+ def _gen_form_urlencoded(self) -> payload.BytesPayload:
+ # form data (x-www-form-urlencoded)
+ data = []
+ for type_options, _, value in self._fields:
+ data.append((type_options["name"], value))
+
+ charset = self._charset if self._charset is not None else "utf-8"
+
+ if charset == "utf-8":
+ content_type = "application/x-www-form-urlencoded"
+ else:
+ content_type = "application/x-www-form-urlencoded; charset=%s" % charset
+
+ return payload.BytesPayload(
+ urlencode(data, doseq=True, encoding=charset).encode(),
+ content_type=content_type,
+ )
+
+ def _gen_form_data(self) -> multipart.MultipartWriter:
+ """Encode a list of fields using the multipart/form-data MIME format"""
+ if self._is_processed:
+ raise RuntimeError("Form data has been processed already")
+ for dispparams, headers, value in self._fields:
+ try:
+ if hdrs.CONTENT_TYPE in headers:
+ part = payload.get_payload(
+ value,
+ content_type=headers[hdrs.CONTENT_TYPE],
+ headers=headers,
+ encoding=self._charset,
+ )
+ else:
+ part = payload.get_payload(
+ value, headers=headers, encoding=self._charset
+ )
+ except Exception as exc:
+ raise TypeError(
+ "Can not serialize value type: %r\n "
+ "headers: %r\n value: %r" % (type(value), headers, value)
+ ) from exc
+
+ if dispparams:
+ part.set_content_disposition(
+ "form-data", quote_fields=self._quote_fields, **dispparams
+ )
+ # FIXME cgi.FieldStorage doesn't likes body parts with
+ # Content-Length which were sent via chunked transfer encoding
+ assert part.headers is not None
+ part.headers.popall(hdrs.CONTENT_LENGTH, None)
+
+ self._writer.append_payload(part)
+
+ self._is_processed = True
+ return self._writer
+
+ def __call__(self) -> Payload:
+ if self._is_multipart:
+ return self._gen_form_data()
+ else:
+ return self._gen_form_urlencoded()
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/http_exceptions.py b/.venv/lib/python3.11/site-packages/aiohttp/http_exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..b8dda999acf599c1dbb616c20037c255ba27aa5b
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/http_exceptions.py
@@ -0,0 +1,112 @@
+"""Low-level http related exceptions."""
+
+from textwrap import indent
+from typing import Optional, Union
+
+from .typedefs import _CIMultiDict
+
+__all__ = ("HttpProcessingError",)
+
+
+class HttpProcessingError(Exception):
+ """HTTP error.
+
+ Shortcut for raising HTTP errors with custom code, message and headers.
+
+ code: HTTP Error code.
+ message: (optional) Error message.
+ headers: (optional) Headers to be sent in response, a list of pairs
+ """
+
+ code = 0
+ message = ""
+ headers = None
+
+ def __init__(
+ self,
+ *,
+ code: Optional[int] = None,
+ message: str = "",
+ headers: Optional[_CIMultiDict] = None,
+ ) -> None:
+ if code is not None:
+ self.code = code
+ self.headers = headers
+ self.message = message
+
+ def __str__(self) -> str:
+ msg = indent(self.message, " ")
+ return f"{self.code}, message:\n{msg}"
+
+ def __repr__(self) -> str:
+ return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
+
+
+class BadHttpMessage(HttpProcessingError):
+
+ code = 400
+ message = "Bad Request"
+
+ def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
+ super().__init__(message=message, headers=headers)
+ self.args = (message,)
+
+
+class HttpBadRequest(BadHttpMessage):
+
+ code = 400
+ message = "Bad Request"
+
+
+class PayloadEncodingError(BadHttpMessage):
+ """Base class for payload errors"""
+
+
+class ContentEncodingError(PayloadEncodingError):
+ """Content encoding error."""
+
+
+class TransferEncodingError(PayloadEncodingError):
+ """transfer encoding error."""
+
+
+class ContentLengthError(PayloadEncodingError):
+ """Not enough data for satisfy content length header."""
+
+
+class LineTooLong(BadHttpMessage):
+ def __init__(
+ self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
+ ) -> None:
+ super().__init__(
+ f"Got more than {limit} bytes ({actual_size}) when reading {line}."
+ )
+ self.args = (line, limit, actual_size)
+
+
+class InvalidHeader(BadHttpMessage):
+ def __init__(self, hdr: Union[bytes, str]) -> None:
+ hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
+ super().__init__(f"Invalid HTTP header: {hdr!r}")
+ self.hdr = hdr_s
+ self.args = (hdr,)
+
+
+class BadStatusLine(BadHttpMessage):
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
+ if not isinstance(line, str):
+ line = repr(line)
+ super().__init__(error or f"Bad status line {line!r}")
+ self.args = (line,)
+ self.line = line
+
+
+class BadHttpMethod(BadStatusLine):
+ """Invalid HTTP method in status line."""
+
+ def __init__(self, line: str = "", error: Optional[str] = None) -> None:
+ super().__init__(line, error or f"Bad HTTP method in status line {line!r}")
+
+
+class InvalidURLError(BadHttpMessage):
+ pass
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/http_parser.py b/.venv/lib/python3.11/site-packages/aiohttp/http_parser.py
new file mode 100644
index 0000000000000000000000000000000000000000..1b8b5b4d49e1cc14a9d9659da7f5533d77a44cc7
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/http_parser.py
@@ -0,0 +1,1046 @@
+import abc
+import asyncio
+import re
+import string
+from contextlib import suppress
+from enum import IntEnum
+from typing import (
+ Any,
+ ClassVar,
+ Final,
+ Generic,
+ List,
+ Literal,
+ NamedTuple,
+ Optional,
+ Pattern,
+ Set,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, istr
+from yarl import URL
+
+from . import hdrs
+from .base_protocol import BaseProtocol
+from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
+from .helpers import (
+ _EXC_SENTINEL,
+ DEBUG,
+ EMPTY_BODY_METHODS,
+ EMPTY_BODY_STATUS_CODES,
+ NO_EXTENSIONS,
+ BaseTimerContext,
+ set_exception,
+)
+from .http_exceptions import (
+ BadHttpMessage,
+ BadHttpMethod,
+ BadStatusLine,
+ ContentEncodingError,
+ ContentLengthError,
+ InvalidHeader,
+ InvalidURLError,
+ LineTooLong,
+ TransferEncodingError,
+)
+from .http_writer import HttpVersion, HttpVersion10
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .typedefs import RawHeaders
+
+__all__ = (
+ "HeadersParser",
+ "HttpParser",
+ "HttpRequestParser",
+ "HttpResponseParser",
+ "RawRequestMessage",
+ "RawResponseMessage",
+)
+
+_SEP = Literal[b"\r\n", b"\n"]
+
+ASCIISET: Final[Set[str]] = set(string.printable)
+
+# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
+# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
+#
+# method = token
+# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
+# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
+# token = 1*tchar
+_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
+TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
+VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
+DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
+HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
+
+
+class RawRequestMessage(NamedTuple):
+ method: str
+ path: str
+ version: HttpVersion
+ headers: "CIMultiDictProxy[str]"
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
+ url: URL
+
+
+class RawResponseMessage(NamedTuple):
+ version: HttpVersion
+ code: int
+ reason: str
+ headers: CIMultiDictProxy[str]
+ raw_headers: RawHeaders
+ should_close: bool
+ compression: Optional[str]
+ upgrade: bool
+ chunked: bool
+
+
+_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
+
+
+class ParseState(IntEnum):
+
+ PARSE_NONE = 0
+ PARSE_LENGTH = 1
+ PARSE_CHUNKED = 2
+ PARSE_UNTIL_EOF = 3
+
+
+class ChunkState(IntEnum):
+ PARSE_CHUNKED_SIZE = 0
+ PARSE_CHUNKED_CHUNK = 1
+ PARSE_CHUNKED_CHUNK_EOF = 2
+ PARSE_MAYBE_TRAILERS = 3
+ PARSE_TRAILERS = 4
+
+
+class HeadersParser:
+ def __init__(
+ self,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ lax: bool = False,
+ ) -> None:
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+ self._lax = lax
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
+ headers: CIMultiDict[str] = CIMultiDict()
+ # note: "raw" does not mean inclusion of OWS before/after the field value
+ raw_headers = []
+
+ lines_idx = 1
+ line = lines[1]
+ line_count = len(lines)
+
+ while line:
+ # Parse initial header name : value pair.
+ try:
+ bname, bvalue = line.split(b":", 1)
+ except ValueError:
+ raise InvalidHeader(line) from None
+
+ if len(bname) == 0:
+ raise InvalidHeader(bname)
+
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
+ if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
+ raise InvalidHeader(line)
+
+ bvalue = bvalue.lstrip(b" \t")
+ if len(bname) > self.max_field_size:
+ raise LineTooLong(
+ "request header name {}".format(
+ bname.decode("utf8", "backslashreplace")
+ ),
+ str(self.max_field_size),
+ str(len(bname)),
+ )
+ name = bname.decode("utf-8", "surrogateescape")
+ if not TOKENRE.fullmatch(name):
+ raise InvalidHeader(bname)
+
+ header_length = len(bvalue)
+
+ # next line
+ lines_idx += 1
+ line = lines[lines_idx]
+
+ # consume continuation lines
+ continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
+
+ # Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
+ if continuation:
+ bvalue_lst = [bvalue]
+ while continuation:
+ header_length += len(line)
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "backslashreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+ bvalue_lst.append(line)
+
+ # next line
+ lines_idx += 1
+ if lines_idx < line_count:
+ line = lines[lines_idx]
+ if line:
+ continuation = line[0] in (32, 9) # (' ', '\t')
+ else:
+ line = b""
+ break
+ bvalue = b"".join(bvalue_lst)
+ else:
+ if header_length > self.max_field_size:
+ raise LineTooLong(
+ "request header field {}".format(
+ bname.decode("utf8", "backslashreplace")
+ ),
+ str(self.max_field_size),
+ str(header_length),
+ )
+
+ bvalue = bvalue.strip(b" \t")
+ value = bvalue.decode("utf-8", "surrogateescape")
+
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
+ if "\n" in value or "\r" in value or "\x00" in value:
+ raise InvalidHeader(bvalue)
+
+ headers.add(name, value)
+ raw_headers.append((bname, bvalue))
+
+ return (CIMultiDictProxy(headers), tuple(raw_headers))
+
+
+def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
+ """Check if the upgrade header is supported."""
+ return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
+
+
+class HttpParser(abc.ABC, Generic[_MsgT]):
+ lax: ClassVar[bool] = False
+
+ def __init__(
+ self,
+ protocol: Optional[BaseProtocol] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ limit: int = 2**16,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ timer: Optional[BaseTimerContext] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ payload_exception: Optional[Type[BaseException]] = None,
+ response_with_body: bool = True,
+ read_until_eof: bool = False,
+ auto_decompress: bool = True,
+ ) -> None:
+ self.protocol = protocol
+ self.loop = loop
+ self.max_line_size = max_line_size
+ self.max_headers = max_headers
+ self.max_field_size = max_field_size
+ self.timer = timer
+ self.code = code
+ self.method = method
+ self.payload_exception = payload_exception
+ self.response_with_body = response_with_body
+ self.read_until_eof = read_until_eof
+
+ self._lines: List[bytes] = []
+ self._tail = b""
+ self._upgraded = False
+ self._payload = None
+ self._payload_parser: Optional[HttpPayloadParser] = None
+ self._auto_decompress = auto_decompress
+ self._limit = limit
+ self._headers_parser = HeadersParser(
+ max_line_size, max_headers, max_field_size, self.lax
+ )
+
+ @abc.abstractmethod
+ def parse_message(self, lines: List[bytes]) -> _MsgT: ...
+
+ @abc.abstractmethod
+ def _is_chunked_te(self, te: str) -> bool: ...
+
+ def feed_eof(self) -> Optional[_MsgT]:
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+ else:
+ # try to extract partial message
+ if self._tail:
+ self._lines.append(self._tail)
+
+ if self._lines:
+ if self._lines[-1] != "\r\n":
+ self._lines.append(b"")
+ with suppress(Exception):
+ return self.parse_message(self._lines)
+ return None
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: _SEP = b"\r\n",
+ EMPTY: bytes = b"",
+ CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
+ METH_CONNECT: str = hdrs.METH_CONNECT,
+ SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
+ ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
+
+ messages = []
+
+ if self._tail:
+ data, self._tail = self._tail + data, b""
+
+ data_len = len(data)
+ start_pos = 0
+ loop = self.loop
+
+ should_close = False
+ while start_pos < data_len:
+
+ # read HTTP message (request/response line + headers), \r\n\r\n
+ # and split by lines
+ if self._payload_parser is None and not self._upgraded:
+ pos = data.find(SEP, start_pos)
+ # consume \r\n
+ if pos == start_pos and not self._lines:
+ start_pos = pos + len(SEP)
+ continue
+
+ if pos >= start_pos:
+ if should_close:
+ raise BadHttpMessage("Data after `Connection: close`")
+
+ # line found
+ line = data[start_pos:pos]
+ if SEP == b"\n": # For lax response parsing
+ line = line.rstrip(b"\r")
+ self._lines.append(line)
+ start_pos = pos + len(SEP)
+
+ # \r\n\r\n found
+ if self._lines[-1] == EMPTY:
+ try:
+ msg: _MsgT = self.parse_message(self._lines)
+ finally:
+ self._lines.clear()
+
+ def get_content_length() -> Optional[int]:
+ # payload length
+ length_hdr = msg.headers.get(CONTENT_LENGTH)
+ if length_hdr is None:
+ return None
+
+ # Shouldn't allow +/- or other number formats.
+ # https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
+ # msg.headers is already stripped of leading/trailing wsp
+ if not DIGITS.fullmatch(length_hdr):
+ raise InvalidHeader(CONTENT_LENGTH)
+
+ return int(length_hdr)
+
+ length = get_content_length()
+ # do not support old websocket spec
+ if SEC_WEBSOCKET_KEY1 in msg.headers:
+ raise InvalidHeader(SEC_WEBSOCKET_KEY1)
+
+ self._upgraded = msg.upgrade and _is_supported_upgrade(
+ msg.headers
+ )
+
+ method = getattr(msg, "method", self.method)
+ # code is only present on responses
+ code = getattr(msg, "code", 0)
+
+ assert self.protocol is not None
+ # calculate payload
+ empty_body = code in EMPTY_BODY_STATUS_CODES or bool(
+ method and method in EMPTY_BODY_METHODS
+ )
+ if not empty_body and (
+ ((length is not None and length > 0) or msg.chunked)
+ and not self._upgraded
+ ):
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ elif method == METH_CONNECT:
+ assert isinstance(msg, RawRequestMessage)
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ self._upgraded = True
+ self._payload_parser = HttpPayloadParser(
+ payload,
+ method=msg.method,
+ compression=msg.compression,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ )
+ elif not empty_body and length is None and self.read_until_eof:
+ payload = StreamReader(
+ self.protocol,
+ timer=self.timer,
+ loop=loop,
+ limit=self._limit,
+ )
+ payload_parser = HttpPayloadParser(
+ payload,
+ length=length,
+ chunked=msg.chunked,
+ method=method,
+ compression=msg.compression,
+ code=self.code,
+ response_with_body=self.response_with_body,
+ auto_decompress=self._auto_decompress,
+ lax=self.lax,
+ )
+ if not payload_parser.done:
+ self._payload_parser = payload_parser
+ else:
+ payload = EMPTY_PAYLOAD
+
+ messages.append((msg, payload))
+ should_close = msg.should_close
+ else:
+ self._tail = data[start_pos:]
+ data = EMPTY
+ break
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgraded:
+ assert not self._lines
+ break
+
+ # feed payload
+ elif data and start_pos < data_len:
+ assert not self._lines
+ assert self._payload_parser is not None
+ try:
+ eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
+ except BaseException as underlying_exc:
+ reraised_exc = underlying_exc
+ if self.payload_exception is not None:
+ reraised_exc = self.payload_exception(str(underlying_exc))
+
+ set_exception(
+ self._payload_parser.payload,
+ reraised_exc,
+ underlying_exc,
+ )
+
+ eof = True
+ data = b""
+
+ if eof:
+ start_pos = 0
+ data_len = len(data)
+ self._payload_parser = None
+ continue
+ else:
+ break
+
+ if data and start_pos < data_len:
+ data = data[start_pos:]
+ else:
+ data = EMPTY
+
+ return messages, self._upgraded, data
+
+ def parse_headers(
+ self, lines: List[bytes]
+ ) -> Tuple[
+ "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
+ ]:
+ """Parses RFC 5322 headers from a stream.
+
+ Line continuations are supported. Returns list of header name
+ and value pairs. Header name is in upper case.
+ """
+ headers, raw_headers = self._headers_parser.parse_headers(lines)
+ close_conn = None
+ encoding = None
+ upgrade = False
+ chunked = False
+
+ # https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
+ singletons = (
+ hdrs.CONTENT_LENGTH,
+ hdrs.CONTENT_LOCATION,
+ hdrs.CONTENT_RANGE,
+ hdrs.CONTENT_TYPE,
+ hdrs.ETAG,
+ hdrs.HOST,
+ hdrs.MAX_FORWARDS,
+ hdrs.SERVER,
+ hdrs.TRANSFER_ENCODING,
+ hdrs.USER_AGENT,
+ )
+ bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
+ if bad_hdr is not None:
+ raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
+
+ # keep-alive
+ conn = headers.get(hdrs.CONNECTION)
+ if conn:
+ v = conn.lower()
+ if v == "close":
+ close_conn = True
+ elif v == "keep-alive":
+ close_conn = False
+ # https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
+ elif v == "upgrade" and headers.get(hdrs.UPGRADE):
+ upgrade = True
+
+ # encoding
+ enc = headers.get(hdrs.CONTENT_ENCODING)
+ if enc:
+ enc = enc.lower()
+ if enc in ("gzip", "deflate", "br"):
+ encoding = enc
+
+ # chunking
+ te = headers.get(hdrs.TRANSFER_ENCODING)
+ if te is not None:
+ if self._is_chunked_te(te):
+ chunked = True
+
+ if hdrs.CONTENT_LENGTH in headers:
+ raise BadHttpMessage(
+ "Transfer-Encoding can't be present with Content-Length",
+ )
+
+ return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
+
+ def set_upgraded(self, val: bool) -> None:
+ """Set connection upgraded (to websocket) mode.
+
+ :param bool val: new state.
+ """
+ self._upgraded = val
+
+
+class HttpRequestParser(HttpParser[RawRequestMessage]):
+ """Read request status line.
+
+ Exception .http_exceptions.BadStatusLine
+ could be raised in case of any errors in status line.
+ Returns RawRequestMessage.
+ """
+
+ def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
+ # request line
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ method, path, version = line.split(" ", maxsplit=2)
+ except ValueError:
+ raise BadHttpMethod(line) from None
+
+ if len(path) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(path))
+ )
+
+ # method
+ if not TOKENRE.fullmatch(method):
+ raise BadHttpMethod(method)
+
+ # version
+ match = VERSRE.fullmatch(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ if method == "CONNECT":
+ # authority-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
+ url = URL.build(authority=path, encoded=True)
+ elif path.startswith("/"):
+ # origin-form,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
+ path_part, _hash_separator, url_fragment = path.partition("#")
+ path_part, _question_mark_separator, qs_part = path_part.partition("?")
+
+ # NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
+ # NOTE: parser does, otherwise it results into the same
+ # NOTE: HTTP Request-Line input producing different
+ # NOTE: `yarl.URL()` objects
+ url = URL.build(
+ path=path_part,
+ query_string=qs_part,
+ fragment=url_fragment,
+ encoded=True,
+ )
+ elif path == "*" and method == "OPTIONS":
+ # asterisk-form,
+ url = URL(path, encoded=True)
+ else:
+ # absolute-form for proxy maybe,
+ # https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
+ url = URL(path, encoded=True)
+ if url.scheme == "":
+ # not absolute-form
+ raise InvalidURLError(
+ path.encode(errors="surrogateescape").decode("latin1")
+ )
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines)
+
+ if close is None: # then the headers weren't set in the request
+ if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
+ close = True
+ else: # HTTP 1.1 must ask to close.
+ close = False
+
+ return RawRequestMessage(
+ method,
+ path,
+ version_o,
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ url,
+ )
+
+ def _is_chunked_te(self, te: str) -> bool:
+ if te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked":
+ return True
+ # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.3
+ raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
+
+
+class HttpResponseParser(HttpParser[RawResponseMessage]):
+ """Read response status line and headers.
+
+ BadStatusLine could be raised in case of any errors in status line.
+ Returns RawResponseMessage.
+ """
+
+ # Lax mode should only be enabled on response parser.
+ lax = not DEBUG
+
+ def feed_data(
+ self,
+ data: bytes,
+ SEP: Optional[_SEP] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
+ if SEP is None:
+ SEP = b"\r\n" if DEBUG else b"\n"
+ return super().feed_data(data, SEP, *args, **kwargs)
+
+ def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
+ line = lines[0].decode("utf-8", "surrogateescape")
+ try:
+ version, status = line.split(maxsplit=1)
+ except ValueError:
+ raise BadStatusLine(line) from None
+
+ try:
+ status, reason = status.split(maxsplit=1)
+ except ValueError:
+ status = status.strip()
+ reason = ""
+
+ if len(reason) > self.max_line_size:
+ raise LineTooLong(
+ "Status line is too long", str(self.max_line_size), str(len(reason))
+ )
+
+ # version
+ match = VERSRE.fullmatch(version)
+ if match is None:
+ raise BadStatusLine(line)
+ version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
+
+ # The status code is a three-digit ASCII number, no padding
+ if len(status) != 3 or not DIGITS.fullmatch(status):
+ raise BadStatusLine(line)
+ status_i = int(status)
+
+ # read headers
+ (
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ ) = self.parse_headers(lines)
+
+ if close is None:
+ if version_o <= HttpVersion10:
+ close = True
+ # https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
+ elif 100 <= status_i < 200 or status_i in {204, 304}:
+ close = False
+ elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
+ close = False
+ else:
+ # https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
+ close = True
+
+ return RawResponseMessage(
+ version_o,
+ status_i,
+ reason.strip(),
+ headers,
+ raw_headers,
+ close,
+ compression,
+ upgrade,
+ chunked,
+ )
+
+ def _is_chunked_te(self, te: str) -> bool:
+ # https://www.rfc-editor.org/rfc/rfc9112#section-6.3-2.4.2
+ return te.rsplit(",", maxsplit=1)[-1].strip(" \t").lower() == "chunked"
+
+
+class HttpPayloadParser:
+ def __init__(
+ self,
+ payload: StreamReader,
+ length: Optional[int] = None,
+ chunked: bool = False,
+ compression: Optional[str] = None,
+ code: Optional[int] = None,
+ method: Optional[str] = None,
+ response_with_body: bool = True,
+ auto_decompress: bool = True,
+ lax: bool = False,
+ ) -> None:
+ self._length = 0
+ self._type = ParseState.PARSE_UNTIL_EOF
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ self._chunk_size = 0
+ self._chunk_tail = b""
+ self._auto_decompress = auto_decompress
+ self._lax = lax
+ self.done = False
+
+ # payload decompression wrapper
+ if response_with_body and compression and self._auto_decompress:
+ real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
+ payload, compression
+ )
+ else:
+ real_payload = payload
+
+ # payload parser
+ if not response_with_body:
+ # don't parse payload if it's not expected to be received
+ self._type = ParseState.PARSE_NONE
+ real_payload.feed_eof()
+ self.done = True
+ elif chunked:
+ self._type = ParseState.PARSE_CHUNKED
+ elif length is not None:
+ self._type = ParseState.PARSE_LENGTH
+ self._length = length
+ if self._length == 0:
+ real_payload.feed_eof()
+ self.done = True
+
+ self.payload = real_payload
+
+ def feed_eof(self) -> None:
+ if self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_eof()
+ elif self._type == ParseState.PARSE_LENGTH:
+ raise ContentLengthError(
+ "Not enough data for satisfy content length header."
+ )
+ elif self._type == ParseState.PARSE_CHUNKED:
+ raise TransferEncodingError(
+ "Not enough data for satisfy transfer length header."
+ )
+
+ def feed_data(
+ self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
+ ) -> Tuple[bool, bytes]:
+ # Read specified amount of bytes
+ if self._type == ParseState.PARSE_LENGTH:
+ required = self._length
+ chunk_len = len(chunk)
+
+ if required >= chunk_len:
+ self._length = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ if self._length == 0:
+ self.payload.feed_eof()
+ return True, b""
+ else:
+ self._length = 0
+ self.payload.feed_data(chunk[:required], required)
+ self.payload.feed_eof()
+ return True, chunk[required:]
+
+ # Chunked transfer encoding parser
+ elif self._type == ParseState.PARSE_CHUNKED:
+ if self._chunk_tail:
+ chunk = self._chunk_tail + chunk
+ self._chunk_tail = b""
+
+ while chunk:
+
+ # read next chunk size
+ if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ i = chunk.find(CHUNK_EXT, 0, pos)
+ if i >= 0:
+ size_b = chunk[:i] # strip chunk-extensions
+ # Verify no LF in the chunk-extension
+ if b"\n" in (ext := chunk[i:pos]):
+ exc = BadHttpMessage(
+ f"Unexpected LF in chunk-extension: {ext!r}"
+ )
+ set_exception(self.payload, exc)
+ raise exc
+ else:
+ size_b = chunk[:pos]
+
+ if self._lax: # Allow whitespace in lax mode.
+ size_b = size_b.strip()
+
+ if not re.fullmatch(HEXDIGITS, size_b):
+ exc = TransferEncodingError(
+ chunk[:pos].decode("ascii", "surrogateescape")
+ )
+ set_exception(self.payload, exc)
+ raise exc
+ size = int(bytes(size_b), 16)
+
+ chunk = chunk[pos + len(SEP) :]
+ if size == 0: # eof marker
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
+ else:
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
+ self._chunk_size = size
+ self.payload.begin_http_chunk_receiving()
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # read chunk and feed buffer
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
+ required = self._chunk_size
+ chunk_len = len(chunk)
+
+ if required > chunk_len:
+ self._chunk_size = required - chunk_len
+ self.payload.feed_data(chunk, chunk_len)
+ return False, b""
+ else:
+ self._chunk_size = 0
+ self.payload.feed_data(chunk[:required], required)
+ chunk = chunk[required:]
+ self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
+ self.payload.end_http_chunk_receiving()
+
+ # toss the CRLF at the end of the chunk
+ if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
+ if self._lax and chunk.startswith(b"\r"):
+ chunk = chunk[1:]
+ if chunk[: len(SEP)] == SEP:
+ chunk = chunk[len(SEP) :]
+ self._chunk = ChunkState.PARSE_CHUNKED_SIZE
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # if stream does not contain trailer, after 0\r\n
+ # we should get another \r\n otherwise
+ # trailers needs to be skipped until \r\n\r\n
+ if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
+ head = chunk[: len(SEP)]
+ if head == SEP:
+ # end of stream
+ self.payload.feed_eof()
+ return True, chunk[len(SEP) :]
+ # Both CR and LF, or only LF may not be received yet. It is
+ # expected that CRLF or LF will be shown at the very first
+ # byte next time, otherwise trailers should come. The last
+ # CRLF which marks the end of response might not be
+ # contained in the same TCP segment which delivered the
+ # size indicator.
+ if not head:
+ return False, b""
+ if head == SEP[:1]:
+ self._chunk_tail = head
+ return False, b""
+ self._chunk = ChunkState.PARSE_TRAILERS
+
+ # read and discard trailer up to the CRLF terminator
+ if self._chunk == ChunkState.PARSE_TRAILERS:
+ pos = chunk.find(SEP)
+ if pos >= 0:
+ chunk = chunk[pos + len(SEP) :]
+ self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
+ else:
+ self._chunk_tail = chunk
+ return False, b""
+
+ # Read all bytes until eof
+ elif self._type == ParseState.PARSE_UNTIL_EOF:
+ self.payload.feed_data(chunk, len(chunk))
+
+ return False, b""
+
+
+class DeflateBuffer:
+ """DeflateStream decompress stream and feed data into specified stream."""
+
+ decompressor: Any
+
+ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
+ self.out = out
+ self.size = 0
+ self.encoding = encoding
+ self._started_decoding = False
+
+ self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
+ if encoding == "br":
+ if not HAS_BROTLI: # pragma: no cover
+ raise ContentEncodingError(
+ "Can not decode content-encoding: brotli (br). "
+ "Please install `Brotli`"
+ )
+ self.decompressor = BrotliDecompressor()
+ else:
+ self.decompressor = ZLibDecompressor(encoding=encoding)
+
+ def set_exception(
+ self,
+ exc: BaseException,
+ exc_cause: BaseException = _EXC_SENTINEL,
+ ) -> None:
+ set_exception(self.out, exc, exc_cause)
+
+ def feed_data(self, chunk: bytes, size: int) -> None:
+ if not size:
+ return
+
+ self.size += size
+
+ # RFC1950
+ # bits 0..3 = CM = 0b1000 = 8 = "deflate"
+ # bits 4..7 = CINFO = 1..7 = windows size.
+ if (
+ not self._started_decoding
+ and self.encoding == "deflate"
+ and chunk[0] & 0xF != 8
+ ):
+ # Change the decoder to decompress incorrectly compressed data
+ # Actually we should issue a warning about non-RFC-compliant data.
+ self.decompressor = ZLibDecompressor(
+ encoding=self.encoding, suppress_deflate_header=True
+ )
+
+ try:
+ chunk = self.decompressor.decompress_sync(chunk)
+ except Exception:
+ raise ContentEncodingError(
+ "Can not decode content-encoding: %s" % self.encoding
+ )
+
+ self._started_decoding = True
+
+ if chunk:
+ self.out.feed_data(chunk, len(chunk))
+
+ def feed_eof(self) -> None:
+ chunk = self.decompressor.flush()
+
+ if chunk or self.size > 0:
+ self.out.feed_data(chunk, len(chunk))
+ if self.encoding == "deflate" and not self.decompressor.eof:
+ raise ContentEncodingError("deflate")
+
+ self.out.feed_eof()
+
+ def begin_http_chunk_receiving(self) -> None:
+ self.out.begin_http_chunk_receiving()
+
+ def end_http_chunk_receiving(self) -> None:
+ self.out.end_http_chunk_receiving()
+
+
+HttpRequestParserPy = HttpRequestParser
+HttpResponseParserPy = HttpResponseParser
+RawRequestMessagePy = RawRequestMessage
+RawResponseMessagePy = RawResponseMessage
+
+try:
+ if not NO_EXTENSIONS:
+ from ._http_parser import ( # type: ignore[import-not-found,no-redef]
+ HttpRequestParser,
+ HttpResponseParser,
+ RawRequestMessage,
+ RawResponseMessage,
+ )
+
+ HttpRequestParserC = HttpRequestParser
+ HttpResponseParserC = HttpResponseParser
+ RawRequestMessageC = RawRequestMessage
+ RawResponseMessageC = RawResponseMessage
+except ImportError: # pragma: no cover
+ pass
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/http_writer.py b/.venv/lib/python3.11/site-packages/aiohttp/http_writer.py
new file mode 100644
index 0000000000000000000000000000000000000000..e031a97708dc7fd15ec5e8a1e63fb0eab24f2a49
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/http_writer.py
@@ -0,0 +1,249 @@
+"""Http related parsers and protocol."""
+
+import asyncio
+import sys
+import zlib
+from typing import ( # noqa
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ List,
+ NamedTuple,
+ Optional,
+ Union,
+)
+
+from multidict import CIMultiDict
+
+from .abc import AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .client_exceptions import ClientConnectionResetError
+from .compression_utils import ZLibCompressor
+from .helpers import NO_EXTENSIONS
+
+__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
+
+
+MIN_PAYLOAD_FOR_WRITELINES = 2048
+IS_PY313_BEFORE_313_2 = (3, 13, 0) <= sys.version_info < (3, 13, 2)
+IS_PY_BEFORE_312_9 = sys.version_info < (3, 12, 9)
+SKIP_WRITELINES = IS_PY313_BEFORE_313_2 or IS_PY_BEFORE_312_9
+# writelines is not safe for use
+# on Python 3.12+ until 3.12.9
+# on Python 3.13+ until 3.13.2
+# and on older versions it not any faster than write
+# CVE-2024-12254: https://github.com/python/cpython/pull/127656
+
+
+class HttpVersion(NamedTuple):
+ major: int
+ minor: int
+
+
+HttpVersion10 = HttpVersion(1, 0)
+HttpVersion11 = HttpVersion(1, 1)
+
+
+_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
+_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
+
+
+class StreamWriter(AbstractStreamWriter):
+
+ length: Optional[int] = None
+ chunked: bool = False
+ _eof: bool = False
+ _compress: Optional[ZLibCompressor] = None
+
+ def __init__(
+ self,
+ protocol: BaseProtocol,
+ loop: asyncio.AbstractEventLoop,
+ on_chunk_sent: _T_OnChunkSent = None,
+ on_headers_sent: _T_OnHeadersSent = None,
+ ) -> None:
+ self._protocol = protocol
+ self.loop = loop
+ self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
+ self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
+
+ @property
+ def transport(self) -> Optional[asyncio.Transport]:
+ return self._protocol.transport
+
+ @property
+ def protocol(self) -> BaseProtocol:
+ return self._protocol
+
+ def enable_chunking(self) -> None:
+ self.chunked = True
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
+ ) -> None:
+ self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
+
+ def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None:
+ size = len(chunk)
+ self.buffer_size += size
+ self.output_size += size
+ transport = self._protocol.transport
+ if transport is None or transport.is_closing():
+ raise ClientConnectionResetError("Cannot write to closing transport")
+ transport.write(chunk)
+
+ def _writelines(self, chunks: Iterable[bytes]) -> None:
+ size = 0
+ for chunk in chunks:
+ size += len(chunk)
+ self.buffer_size += size
+ self.output_size += size
+ transport = self._protocol.transport
+ if transport is None or transport.is_closing():
+ raise ClientConnectionResetError("Cannot write to closing transport")
+ if SKIP_WRITELINES or size < MIN_PAYLOAD_FOR_WRITELINES:
+ transport.write(b"".join(chunks))
+ else:
+ transport.writelines(chunks)
+
+ async def write(
+ self,
+ chunk: Union[bytes, bytearray, memoryview],
+ *,
+ drain: bool = True,
+ LIMIT: int = 0x10000,
+ ) -> None:
+ """Writes chunk of data to a stream.
+
+ write_eof() indicates end of stream.
+ writer can't be used after write_eof() method being called.
+ write() return drain future.
+ """
+ if self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if isinstance(chunk, memoryview):
+ if chunk.nbytes != len(chunk):
+ # just reshape it
+ chunk = chunk.cast("c")
+
+ if self._compress is not None:
+ chunk = await self._compress.compress(chunk)
+ if not chunk:
+ return
+
+ if self.length is not None:
+ chunk_len = len(chunk)
+ if self.length >= chunk_len:
+ self.length = self.length - chunk_len
+ else:
+ chunk = chunk[: self.length]
+ self.length = 0
+ if not chunk:
+ return
+
+ if chunk:
+ if self.chunked:
+ self._writelines(
+ (f"{len(chunk):x}\r\n".encode("ascii"), chunk, b"\r\n")
+ )
+ else:
+ self._write(chunk)
+
+ if self.buffer_size > LIMIT and drain:
+ self.buffer_size = 0
+ await self.drain()
+
+ async def write_headers(
+ self, status_line: str, headers: "CIMultiDict[str]"
+ ) -> None:
+ """Write request/response status and headers."""
+ if self._on_headers_sent is not None:
+ await self._on_headers_sent(headers)
+
+ # status + headers
+ buf = _serialize_headers(status_line, headers)
+ self._write(buf)
+
+ def set_eof(self) -> None:
+ """Indicate that the message is complete."""
+ self._eof = True
+
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ if self._eof:
+ return
+
+ if chunk and self._on_chunk_sent is not None:
+ await self._on_chunk_sent(chunk)
+
+ if self._compress:
+ chunks: List[bytes] = []
+ chunks_len = 0
+ if chunk and (compressed_chunk := await self._compress.compress(chunk)):
+ chunks_len = len(compressed_chunk)
+ chunks.append(compressed_chunk)
+
+ flush_chunk = self._compress.flush()
+ chunks_len += len(flush_chunk)
+ chunks.append(flush_chunk)
+ assert chunks_len
+
+ if self.chunked:
+ chunk_len_pre = f"{chunks_len:x}\r\n".encode("ascii")
+ self._writelines((chunk_len_pre, *chunks, b"\r\n0\r\n\r\n"))
+ elif len(chunks) > 1:
+ self._writelines(chunks)
+ else:
+ self._write(chunks[0])
+ elif self.chunked:
+ if chunk:
+ chunk_len_pre = f"{len(chunk):x}\r\n".encode("ascii")
+ self._writelines((chunk_len_pre, chunk, b"\r\n0\r\n\r\n"))
+ else:
+ self._write(b"0\r\n\r\n")
+ elif chunk:
+ self._write(chunk)
+
+ await self.drain()
+
+ self._eof = True
+
+ async def drain(self) -> None:
+ """Flush the write buffer.
+
+ The intended use is to write
+
+ await w.write(data)
+ await w.drain()
+ """
+ protocol = self._protocol
+ if protocol.transport is not None and protocol._paused:
+ await protocol._drain_helper()
+
+
+def _safe_header(string: str) -> str:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return detected in headers. "
+ "Potential header injection attack."
+ )
+ return string
+
+
+def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
+ headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
+ line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
+ return line.encode("utf-8")
+
+
+_serialize_headers = _py_serialize_headers
+
+try:
+ import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
+
+ _c_serialize_headers = _http_writer._serialize_headers
+ if not NO_EXTENSIONS:
+ _serialize_headers = _c_serialize_headers
+except ImportError:
+ pass
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/pytest_plugin.py b/.venv/lib/python3.11/site-packages/aiohttp/pytest_plugin.py
new file mode 100644
index 0000000000000000000000000000000000000000..7ce60faa4a4f628a75fe68e94b7f6de6acccc9af
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/pytest_plugin.py
@@ -0,0 +1,436 @@
+import asyncio
+import contextlib
+import inspect
+import warnings
+from typing import (
+ Any,
+ Awaitable,
+ Callable,
+ Dict,
+ Iterator,
+ Optional,
+ Protocol,
+ Type,
+ Union,
+ overload,
+)
+
+import pytest
+
+from .test_utils import (
+ BaseTestServer,
+ RawTestServer,
+ TestClient,
+ TestServer,
+ loop_context,
+ setup_test_loop,
+ teardown_test_loop,
+ unused_port as _unused_port,
+)
+from .web import Application, BaseRequest, Request
+from .web_protocol import _RequestHandler
+
+try:
+ import uvloop
+except ImportError: # pragma: no cover
+ uvloop = None # type: ignore[assignment]
+
+
+class AiohttpClient(Protocol):
+ @overload
+ async def __call__(
+ self,
+ __param: Application,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[Request, Application]: ...
+ @overload
+ async def __call__(
+ self,
+ __param: BaseTestServer,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[BaseRequest, None]: ...
+
+
+class AiohttpServer(Protocol):
+ def __call__(
+ self, app: Application, *, port: Optional[int] = None, **kwargs: Any
+ ) -> Awaitable[TestServer]: ...
+
+
+class AiohttpRawServer(Protocol):
+ def __call__(
+ self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
+ ) -> Awaitable[RawTestServer]: ...
+
+
+def pytest_addoption(parser): # type: ignore[no-untyped-def]
+ parser.addoption(
+ "--aiohttp-fast",
+ action="store_true",
+ default=False,
+ help="run tests faster by disabling extra checks",
+ )
+ parser.addoption(
+ "--aiohttp-loop",
+ action="store",
+ default="pyloop",
+ help="run tests with specific loop: pyloop, uvloop or all",
+ )
+ parser.addoption(
+ "--aiohttp-enable-loop-debug",
+ action="store_true",
+ default=False,
+ help="enable event loop debug mode",
+ )
+
+
+def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
+ """Set up pytest fixture.
+
+ Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
+ """
+ func = fixturedef.func
+
+ if inspect.isasyncgenfunction(func):
+ # async generator fixture
+ is_async_gen = True
+ elif asyncio.iscoroutinefunction(func):
+ # regular async fixture
+ is_async_gen = False
+ else:
+ # not an async fixture, nothing to do
+ return
+
+ strip_request = False
+ if "request" not in fixturedef.argnames:
+ fixturedef.argnames += ("request",)
+ strip_request = True
+
+ def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
+ request = kwargs["request"]
+ if strip_request:
+ del kwargs["request"]
+
+ # if neither the fixture nor the test use the 'loop' fixture,
+ # 'getfixturevalue' will fail because the test is not parameterized
+ # (this can be removed someday if 'loop' is no longer parameterized)
+ if "loop" not in request.fixturenames:
+ raise Exception(
+ "Asynchronous fixtures must depend on the 'loop' fixture or "
+ "be used in tests depending from it."
+ )
+
+ _loop = request.getfixturevalue("loop")
+
+ if is_async_gen:
+ # for async generators, we need to advance the generator once,
+ # then advance it again in a finalizer
+ gen = func(*args, **kwargs)
+
+ def finalizer(): # type: ignore[no-untyped-def]
+ try:
+ return _loop.run_until_complete(gen.__anext__())
+ except StopAsyncIteration:
+ pass
+
+ request.addfinalizer(finalizer)
+ return _loop.run_until_complete(gen.__anext__())
+ else:
+ return _loop.run_until_complete(func(*args, **kwargs))
+
+ fixturedef.func = wrapper
+
+
+@pytest.fixture
+def fast(request): # type: ignore[no-untyped-def]
+ """--fast config option"""
+ return request.config.getoption("--aiohttp-fast")
+
+
+@pytest.fixture
+def loop_debug(request): # type: ignore[no-untyped-def]
+ """--enable-loop-debug config option"""
+ return request.config.getoption("--aiohttp-enable-loop-debug")
+
+
+@contextlib.contextmanager
+def _runtime_warning_context(): # type: ignore[no-untyped-def]
+ """Context manager which checks for RuntimeWarnings.
+
+ This exists specifically to
+ avoid "coroutine 'X' was never awaited" warnings being missed.
+
+ If RuntimeWarnings occur in the context a RuntimeError is raised.
+ """
+ with warnings.catch_warnings(record=True) as _warnings:
+ yield
+ rw = [
+ "{w.filename}:{w.lineno}:{w.message}".format(w=w)
+ for w in _warnings
+ if w.category == RuntimeWarning
+ ]
+ if rw:
+ raise RuntimeError(
+ "{} Runtime Warning{},\n{}".format(
+ len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
+ )
+ )
+
+
+@contextlib.contextmanager
+def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
+ """Passthrough loop context.
+
+ Sets up and tears down a loop unless one is passed in via the loop
+ argument when it's passed straight through.
+ """
+ if loop:
+ # loop already exists, pass it straight through
+ yield loop
+ else:
+ # this shadows loop_context's standard behavior
+ loop = setup_test_loop()
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
+ """Fix pytest collecting for coroutines."""
+ if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
+ return list(collector._genfunctions(name, obj))
+
+
+def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
+ """Run coroutines in an event loop instead of a normal function call."""
+ fast = pyfuncitem.config.getoption("--aiohttp-fast")
+ if asyncio.iscoroutinefunction(pyfuncitem.function):
+ existing_loop = pyfuncitem.funcargs.get(
+ "proactor_loop"
+ ) or pyfuncitem.funcargs.get("loop", None)
+ with _runtime_warning_context():
+ with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
+ testargs = {
+ arg: pyfuncitem.funcargs[arg]
+ for arg in pyfuncitem._fixtureinfo.argnames
+ }
+ _loop.run_until_complete(pyfuncitem.obj(**testargs))
+
+ return True
+
+
+def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
+ if "loop_factory" not in metafunc.fixturenames:
+ return
+
+ loops = metafunc.config.option.aiohttp_loop
+ avail_factories: Dict[str, Type[asyncio.AbstractEventLoopPolicy]]
+ avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
+
+ if uvloop is not None: # pragma: no cover
+ avail_factories["uvloop"] = uvloop.EventLoopPolicy
+
+ if loops == "all":
+ loops = "pyloop,uvloop?"
+
+ factories = {} # type: ignore[var-annotated]
+ for name in loops.split(","):
+ required = not name.endswith("?")
+ name = name.strip(" ?")
+ if name not in avail_factories: # pragma: no cover
+ if required:
+ raise ValueError(
+ "Unknown loop '%s', available loops: %s"
+ % (name, list(factories.keys()))
+ )
+ else:
+ continue
+ factories[name] = avail_factories[name]
+ metafunc.parametrize(
+ "loop_factory", list(factories.values()), ids=list(factories.keys())
+ )
+
+
+@pytest.fixture
+def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
+ """Return an instance of the event loop."""
+ policy = loop_factory()
+ asyncio.set_event_loop_policy(policy)
+ with loop_context(fast=fast) as _loop:
+ if loop_debug:
+ _loop.set_debug(True) # pragma: no cover
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def proactor_loop(): # type: ignore[no-untyped-def]
+ policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
+ asyncio.set_event_loop_policy(policy)
+
+ with loop_context(policy.new_event_loop) as _loop:
+ asyncio.set_event_loop(_loop)
+ yield _loop
+
+
+@pytest.fixture
+def unused_port(aiohttp_unused_port: Callable[[], int]) -> Callable[[], int]:
+ warnings.warn(
+ "Deprecated, use aiohttp_unused_port fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_unused_port
+
+
+@pytest.fixture
+def aiohttp_unused_port() -> Callable[[], int]:
+ """Return a port that is unused on the current host."""
+ return _unused_port
+
+
+@pytest.fixture
+def aiohttp_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpServer]:
+ """Factory to create a TestServer instance, given an app.
+
+ aiohttp_server(app, **kwargs)
+ """
+ servers = []
+
+ async def go(
+ app: Application, *, port: Optional[int] = None, **kwargs: Any
+ ) -> TestServer:
+ server = TestServer(app, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_server
+
+
+@pytest.fixture
+def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawServer]:
+ """Factory to create a RawTestServer instance, given a web handler.
+
+ aiohttp_raw_server(handler, **kwargs)
+ """
+ servers = []
+
+ async def go(
+ handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any
+ ) -> RawTestServer:
+ server = RawTestServer(handler, port=port)
+ await server.start_server(loop=loop, **kwargs)
+ servers.append(server)
+ return server
+
+ yield go
+
+ async def finalize() -> None:
+ while servers:
+ await servers.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
+ aiohttp_raw_server,
+):
+ warnings.warn(
+ "Deprecated, use aiohttp_raw_server fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_raw_server
+
+
+@pytest.fixture
+def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]:
+ """Factory to create a TestClient instance.
+
+ aiohttp_client(app, **kwargs)
+ aiohttp_client(server, **kwargs)
+ aiohttp_client(raw_server, **kwargs)
+ """
+ clients = []
+
+ @overload
+ async def go(
+ __param: Application,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[Request, Application]: ...
+
+ @overload
+ async def go(
+ __param: BaseTestServer,
+ *,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[BaseRequest, None]: ...
+
+ async def go(
+ __param: Union[Application, BaseTestServer],
+ *args: Any,
+ server_kwargs: Optional[Dict[str, Any]] = None,
+ **kwargs: Any,
+ ) -> TestClient[Any, Any]:
+ if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
+ __param, (Application, BaseTestServer)
+ ):
+ __param = __param(loop, *args, **kwargs)
+ kwargs = {}
+ else:
+ assert not args, "args should be empty"
+
+ if isinstance(__param, Application):
+ server_kwargs = server_kwargs or {}
+ server = TestServer(__param, loop=loop, **server_kwargs)
+ client = TestClient(server, loop=loop, **kwargs)
+ elif isinstance(__param, BaseTestServer):
+ client = TestClient(__param, loop=loop, **kwargs)
+ else:
+ raise ValueError("Unknown argument type: %r" % type(__param))
+
+ await client.start_server()
+ clients.append(client)
+ return client
+
+ yield go
+
+ async def finalize() -> None:
+ while clients:
+ await clients.pop().close()
+
+ loop.run_until_complete(finalize())
+
+
+@pytest.fixture
+def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
+ warnings.warn(
+ "Deprecated, use aiohttp_client fixture instead",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return aiohttp_client
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/resolver.py b/.venv/lib/python3.11/site-packages/aiohttp/resolver.py
new file mode 100644
index 0000000000000000000000000000000000000000..9c744514faead59eaf29d24609efc7f563716ca0
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/resolver.py
@@ -0,0 +1,187 @@
+import asyncio
+import socket
+from typing import Any, Dict, List, Optional, Tuple, Type, Union
+
+from .abc import AbstractResolver, ResolveResult
+
+__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
+
+
+try:
+ import aiodns
+
+ aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
+except ImportError: # pragma: no cover
+ aiodns = None # type: ignore[assignment]
+ aiodns_default = False
+
+
+_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
+_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
+
+
+class ThreadedResolver(AbstractResolver):
+ """Threaded resolver.
+
+ Uses an Executor for synchronous getaddrinfo() calls.
+ concurrent.futures.ThreadPoolExecutor is used by default.
+ """
+
+ def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
+ self._loop = loop or asyncio.get_running_loop()
+
+ async def resolve(
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
+ ) -> List[ResolveResult]:
+ infos = await self._loop.getaddrinfo(
+ host,
+ port,
+ type=socket.SOCK_STREAM,
+ family=family,
+ flags=socket.AI_ADDRCONFIG,
+ )
+
+ hosts: List[ResolveResult] = []
+ for family, _, proto, _, address in infos:
+ if family == socket.AF_INET6:
+ if len(address) < 3:
+ # IPv6 is not supported by Python build,
+ # or IPv6 is not enabled in the host
+ continue
+ if address[3]:
+ # This is essential for link-local IPv6 addresses.
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
+ # getnameinfo() unconditionally, but performance makes sense.
+ resolved_host, _port = await self._loop.getnameinfo(
+ address, _NAME_SOCKET_FLAGS
+ )
+ port = int(_port)
+ else:
+ resolved_host, port = address[:2]
+ else: # IPv4
+ assert family == socket.AF_INET
+ resolved_host, port = address # type: ignore[misc]
+ hosts.append(
+ ResolveResult(
+ hostname=host,
+ host=resolved_host,
+ port=port,
+ family=family,
+ proto=proto,
+ flags=_NUMERIC_SOCKET_FLAGS,
+ )
+ )
+
+ return hosts
+
+ async def close(self) -> None:
+ pass
+
+
+class AsyncResolver(AbstractResolver):
+ """Use the `aiodns` package to make asynchronous DNS lookups"""
+
+ def __init__(
+ self,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ *args: Any,
+ **kwargs: Any,
+ ) -> None:
+ if aiodns is None:
+ raise RuntimeError("Resolver requires aiodns library")
+
+ self._resolver = aiodns.DNSResolver(*args, **kwargs)
+
+ if not hasattr(self._resolver, "gethostbyname"):
+ # aiodns 1.1 is not available, fallback to DNSResolver.query
+ self.resolve = self._resolve_with_query # type: ignore
+
+ async def resolve(
+ self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
+ ) -> List[ResolveResult]:
+ try:
+ resp = await self._resolver.getaddrinfo(
+ host,
+ port=port,
+ type=socket.SOCK_STREAM,
+ family=family,
+ flags=socket.AI_ADDRCONFIG,
+ )
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(None, msg) from exc
+ hosts: List[ResolveResult] = []
+ for node in resp.nodes:
+ address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr
+ family = node.family
+ if family == socket.AF_INET6:
+ if len(address) > 3 and address[3]:
+ # This is essential for link-local IPv6 addresses.
+ # LL IPv6 is a VERY rare case. Strictly speaking, we should use
+ # getnameinfo() unconditionally, but performance makes sense.
+ result = await self._resolver.getnameinfo(
+ (address[0].decode("ascii"), *address[1:]),
+ _NAME_SOCKET_FLAGS,
+ )
+ resolved_host = result.node
+ else:
+ resolved_host = address[0].decode("ascii")
+ port = address[1]
+ else: # IPv4
+ assert family == socket.AF_INET
+ resolved_host = address[0].decode("ascii")
+ port = address[1]
+ hosts.append(
+ ResolveResult(
+ hostname=host,
+ host=resolved_host,
+ port=port,
+ family=family,
+ proto=0,
+ flags=_NUMERIC_SOCKET_FLAGS,
+ )
+ )
+
+ if not hosts:
+ raise OSError(None, "DNS lookup failed")
+
+ return hosts
+
+ async def _resolve_with_query(
+ self, host: str, port: int = 0, family: int = socket.AF_INET
+ ) -> List[Dict[str, Any]]:
+ if family == socket.AF_INET6:
+ qtype = "AAAA"
+ else:
+ qtype = "A"
+
+ try:
+ resp = await self._resolver.query(host, qtype)
+ except aiodns.error.DNSError as exc:
+ msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
+ raise OSError(None, msg) from exc
+
+ hosts = []
+ for rr in resp:
+ hosts.append(
+ {
+ "hostname": host,
+ "host": rr.host,
+ "port": port,
+ "family": family,
+ "proto": 0,
+ "flags": socket.AI_NUMERICHOST,
+ }
+ )
+
+ if not hosts:
+ raise OSError(None, "DNS lookup failed")
+
+ return hosts
+
+ async def close(self) -> None:
+ self._resolver.cancel()
+
+
+_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
+DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/test_utils.py b/.venv/lib/python3.11/site-packages/aiohttp/test_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..be6e9b3353eecdd4ad359bb1dc1fc378a4f9a236
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/test_utils.py
@@ -0,0 +1,770 @@
+"""Utilities shared by tests."""
+
+import asyncio
+import contextlib
+import gc
+import inspect
+import ipaddress
+import os
+import socket
+import sys
+import warnings
+from abc import ABC, abstractmethod
+from types import TracebackType
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Generic,
+ Iterator,
+ List,
+ Optional,
+ Type,
+ TypeVar,
+ cast,
+ overload,
+)
+from unittest import IsolatedAsyncioTestCase, mock
+
+from aiosignal import Signal
+from multidict import CIMultiDict, CIMultiDictProxy
+from yarl import URL
+
+import aiohttp
+from aiohttp.client import (
+ _RequestContextManager,
+ _RequestOptions,
+ _WSRequestContextManager,
+)
+
+from . import ClientSession, hdrs
+from .abc import AbstractCookieJar
+from .client_reqrep import ClientResponse
+from .client_ws import ClientWebSocketResponse
+from .helpers import sentinel
+from .http import HttpVersion, RawRequestMessage
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .typedefs import StrOrURL
+from .web import (
+ Application,
+ AppRunner,
+ BaseRequest,
+ BaseRunner,
+ Request,
+ Server,
+ ServerRunner,
+ SockSite,
+ UrlMappingMatchInfo,
+)
+from .web_protocol import _RequestHandler
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ SSLContext = None
+
+if sys.version_info >= (3, 11) and TYPE_CHECKING:
+ from typing import Unpack
+
+if sys.version_info >= (3, 11):
+ from typing import Self
+else:
+ Self = Any
+
+_ApplicationNone = TypeVar("_ApplicationNone", Application, None)
+_Request = TypeVar("_Request", bound=BaseRequest)
+
+REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
+
+
+def get_unused_port_socket(
+ host: str, family: socket.AddressFamily = socket.AF_INET
+) -> socket.socket:
+ return get_port_socket(host, 0, family)
+
+
+def get_port_socket(
+ host: str, port: int, family: socket.AddressFamily
+) -> socket.socket:
+ s = socket.socket(family, socket.SOCK_STREAM)
+ if REUSE_ADDRESS:
+ # Windows has different semantics for SO_REUSEADDR,
+ # so don't set it. Ref:
+ # https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
+ s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ s.bind((host, port))
+ return s
+
+
+def unused_port() -> int:
+ """Return a port that is unused on the current host."""
+ with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
+ s.bind(("127.0.0.1", 0))
+ return cast(int, s.getsockname()[1])
+
+
+class BaseTestServer(ABC):
+ __test__ = False
+
+ def __init__(
+ self,
+ *,
+ scheme: str = "",
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ skip_url_asserts: bool = False,
+ socket_factory: Callable[
+ [str, int, socket.AddressFamily], socket.socket
+ ] = get_port_socket,
+ **kwargs: Any,
+ ) -> None:
+ self._loop = loop
+ self.runner: Optional[BaseRunner] = None
+ self._root: Optional[URL] = None
+ self.host = host
+ self.port = port
+ self._closed = False
+ self.scheme = scheme
+ self.skip_url_asserts = skip_url_asserts
+ self.socket_factory = socket_factory
+
+ async def start_server(
+ self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
+ ) -> None:
+ if self.runner:
+ return
+ self._loop = loop
+ self._ssl = kwargs.pop("ssl", None)
+ self.runner = await self._make_runner(handler_cancellation=True, **kwargs)
+ await self.runner.setup()
+ if not self.port:
+ self.port = 0
+ absolute_host = self.host
+ try:
+ version = ipaddress.ip_address(self.host).version
+ except ValueError:
+ version = 4
+ if version == 6:
+ absolute_host = f"[{self.host}]"
+ family = socket.AF_INET6 if version == 6 else socket.AF_INET
+ _sock = self.socket_factory(self.host, self.port, family)
+ self.host, self.port = _sock.getsockname()[:2]
+ site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
+ await site.start()
+ server = site._server
+ assert server is not None
+ sockets = server.sockets # type: ignore[attr-defined]
+ assert sockets is not None
+ self.port = sockets[0].getsockname()[1]
+ if not self.scheme:
+ self.scheme = "https" if self._ssl else "http"
+ self._root = URL(f"{self.scheme}://{absolute_host}:{self.port}")
+
+ @abstractmethod # pragma: no cover
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ pass
+
+ def make_url(self, path: StrOrURL) -> URL:
+ assert self._root is not None
+ url = URL(path)
+ if not self.skip_url_asserts:
+ assert not url.absolute
+ return self._root.join(url)
+ else:
+ return URL(str(self._root) + str(path))
+
+ @property
+ def started(self) -> bool:
+ return self.runner is not None
+
+ @property
+ def closed(self) -> bool:
+ return self._closed
+
+ @property
+ def handler(self) -> Server:
+ # for backward compatibility
+ # web.Server instance
+ runner = self.runner
+ assert runner is not None
+ assert runner.server is not None
+ return runner.server
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run when the object is garbage collected, and on
+ exit when used as a context manager.
+
+ """
+ if self.started and not self.closed:
+ assert self.runner is not None
+ await self.runner.cleanup()
+ self._root = None
+ self.port = None
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> "BaseTestServer":
+ await self.start_server(loop=self._loop)
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_value: Optional[BaseException],
+ traceback: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class TestServer(BaseTestServer):
+ def __init__(
+ self,
+ app: Application,
+ *,
+ scheme: str = "",
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ):
+ self.app = app
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, **kwargs: Any) -> BaseRunner:
+ return AppRunner(self.app, **kwargs)
+
+
+class RawTestServer(BaseTestServer):
+ def __init__(
+ self,
+ handler: _RequestHandler,
+ *,
+ scheme: str = "",
+ host: str = "127.0.0.1",
+ port: Optional[int] = None,
+ **kwargs: Any,
+ ) -> None:
+ self._handler = handler
+ super().__init__(scheme=scheme, host=host, port=port, **kwargs)
+
+ async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
+ srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
+ return ServerRunner(srv, debug=debug, **kwargs)
+
+
+class TestClient(Generic[_Request, _ApplicationNone]):
+ """
+ A test client implementation.
+
+ To write functional tests for aiohttp based servers.
+
+ """
+
+ __test__ = False
+
+ @overload
+ def __init__(
+ self: "TestClient[Request, Application]",
+ server: TestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ **kwargs: Any,
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: "TestClient[_Request, None]",
+ server: BaseTestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ **kwargs: Any,
+ ) -> None: ...
+ def __init__(
+ self,
+ server: BaseTestServer,
+ *,
+ cookie_jar: Optional[AbstractCookieJar] = None,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ **kwargs: Any,
+ ) -> None:
+ if not isinstance(server, BaseTestServer):
+ raise TypeError(
+ "server must be TestServer instance, found type: %r" % type(server)
+ )
+ self._server = server
+ self._loop = loop
+ if cookie_jar is None:
+ cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
+ self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
+ self._session._retry_connection = False
+ self._closed = False
+ self._responses: List[ClientResponse] = []
+ self._websockets: List[ClientWebSocketResponse] = []
+
+ async def start_server(self) -> None:
+ await self._server.start_server(loop=self._loop)
+
+ @property
+ def host(self) -> str:
+ return self._server.host
+
+ @property
+ def port(self) -> Optional[int]:
+ return self._server.port
+
+ @property
+ def server(self) -> BaseTestServer:
+ return self._server
+
+ @property
+ def app(self) -> _ApplicationNone:
+ return getattr(self._server, "app", None) # type: ignore[return-value]
+
+ @property
+ def session(self) -> ClientSession:
+ """An internal aiohttp.ClientSession.
+
+ Unlike the methods on the TestClient, client session requests
+ do not automatically include the host in the url queried, and
+ will require an absolute path to the resource.
+
+ """
+ return self._session
+
+ def make_url(self, path: StrOrURL) -> URL:
+ return self._server.make_url(path)
+
+ async def _request(
+ self, method: str, path: StrOrURL, **kwargs: Any
+ ) -> ClientResponse:
+ resp = await self._session.request(method, self.make_url(path), **kwargs)
+ # save it to close later
+ self._responses.append(resp)
+ return resp
+
+ if sys.version_info >= (3, 11) and TYPE_CHECKING:
+
+ def request(
+ self, method: str, path: StrOrURL, **kwargs: Unpack[_RequestOptions]
+ ) -> _RequestContextManager: ...
+
+ def get(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def options(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def head(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def post(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def put(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def patch(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ def delete(
+ self,
+ path: StrOrURL,
+ **kwargs: Unpack[_RequestOptions],
+ ) -> _RequestContextManager: ...
+
+ else:
+
+ def request(
+ self, method: str, path: StrOrURL, **kwargs: Any
+ ) -> _RequestContextManager:
+ """Routes a request to tested http server.
+
+ The interface is identical to aiohttp.ClientSession.request,
+ except the loop kwarg is overridden by the instance used by the
+ test server.
+
+ """
+ return _RequestContextManager(self._request(method, path, **kwargs))
+
+ def get(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP GET request."""
+ return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
+
+ def post(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP POST request."""
+ return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
+
+ def options(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP OPTIONS request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_OPTIONS, path, **kwargs)
+ )
+
+ def head(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP HEAD request."""
+ return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
+
+ def put(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PUT request."""
+ return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
+
+ def patch(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_PATCH, path, **kwargs)
+ )
+
+ def delete(self, path: StrOrURL, **kwargs: Any) -> _RequestContextManager:
+ """Perform an HTTP PATCH request."""
+ return _RequestContextManager(
+ self._request(hdrs.METH_DELETE, path, **kwargs)
+ )
+
+ def ws_connect(self, path: StrOrURL, **kwargs: Any) -> _WSRequestContextManager:
+ """Initiate websocket connection.
+
+ The api corresponds to aiohttp.ClientSession.ws_connect.
+
+ """
+ return _WSRequestContextManager(self._ws_connect(path, **kwargs))
+
+ async def _ws_connect(
+ self, path: StrOrURL, **kwargs: Any
+ ) -> ClientWebSocketResponse:
+ ws = await self._session.ws_connect(self.make_url(path), **kwargs)
+ self._websockets.append(ws)
+ return ws
+
+ async def close(self) -> None:
+ """Close all fixtures created by the test client.
+
+ After that point, the TestClient is no longer usable.
+
+ This is an idempotent function: running close multiple times
+ will not have any additional effects.
+
+ close is also run on exit when used as a(n) (asynchronous)
+ context manager.
+
+ """
+ if not self._closed:
+ for resp in self._responses:
+ resp.close()
+ for ws in self._websockets:
+ await ws.close()
+ await self._session.close()
+ await self._server.close()
+ self._closed = True
+
+ def __enter__(self) -> None:
+ raise TypeError("Use async with instead")
+
+ def __exit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ # __exit__ should exist in pair with __enter__ but never executed
+ pass # pragma: no cover
+
+ async def __aenter__(self) -> Self:
+ await self.start_server()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc: Optional[BaseException],
+ tb: Optional[TracebackType],
+ ) -> None:
+ await self.close()
+
+
+class AioHTTPTestCase(IsolatedAsyncioTestCase):
+ """A base class to allow for unittest web applications using aiohttp.
+
+ Provides the following:
+
+ * self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
+ * self.loop (asyncio.BaseEventLoop): the event loop in which the
+ application and server are running.
+ * self.app (aiohttp.web.Application): the application returned by
+ self.get_application()
+
+ Note that the TestClient's methods are asynchronous: you have to
+ execute function on the test client using asynchronous methods.
+ """
+
+ async def get_application(self) -> Application:
+ """Get application.
+
+ This method should be overridden
+ to return the aiohttp.web.Application
+ object to test.
+ """
+ return self.get_app()
+
+ def get_app(self) -> Application:
+ """Obsolete method used to constructing web application.
+
+ Use .get_application() coroutine instead.
+ """
+ raise RuntimeError("Did you forget to define get_application()?")
+
+ async def asyncSetUp(self) -> None:
+ self.loop = asyncio.get_running_loop()
+ return await self.setUpAsync()
+
+ async def setUpAsync(self) -> None:
+ self.app = await self.get_application()
+ self.server = await self.get_server(self.app)
+ self.client = await self.get_client(self.server)
+
+ await self.client.start_server()
+
+ async def asyncTearDown(self) -> None:
+ return await self.tearDownAsync()
+
+ async def tearDownAsync(self) -> None:
+ await self.client.close()
+
+ async def get_server(self, app: Application) -> TestServer:
+ """Return a TestServer instance."""
+ return TestServer(app, loop=self.loop)
+
+ async def get_client(self, server: TestServer) -> TestClient[Request, Application]:
+ """Return a TestClient instance."""
+ return TestClient(server, loop=self.loop)
+
+
+def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
+ """
+ A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
+
+ In 3.8+, this does nothing.
+ """
+ warnings.warn(
+ "Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
+ DeprecationWarning,
+ stacklevel=2,
+ )
+ return func
+
+
+_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
+
+
+@contextlib.contextmanager
+def loop_context(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
+) -> Iterator[asyncio.AbstractEventLoop]:
+ """A contextmanager that creates an event_loop, for test purposes.
+
+ Handles the creation and cleanup of a test loop.
+ """
+ loop = setup_test_loop(loop_factory)
+ yield loop
+ teardown_test_loop(loop, fast=fast)
+
+
+def setup_test_loop(
+ loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
+) -> asyncio.AbstractEventLoop:
+ """Create and return an asyncio.BaseEventLoop instance.
+
+ The caller should also call teardown_test_loop,
+ once they are done with the loop.
+ """
+ loop = loop_factory()
+ asyncio.set_event_loop(loop)
+ return loop
+
+
+def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
+ """Teardown and cleanup an event_loop created by setup_test_loop."""
+ closed = loop.is_closed()
+ if not closed:
+ loop.call_soon(loop.stop)
+ loop.run_forever()
+ loop.close()
+
+ if not fast:
+ gc.collect()
+
+ asyncio.set_event_loop(None)
+
+
+def _create_app_mock() -> mock.MagicMock:
+ def get_dict(app: Any, key: str) -> Any:
+ return app.__app_dict[key]
+
+ def set_dict(app: Any, key: str, value: Any) -> None:
+ app.__app_dict[key] = value
+
+ app = mock.MagicMock(spec=Application)
+ app.__app_dict = {}
+ app.__getitem__ = get_dict
+ app.__setitem__ = set_dict
+
+ app._debug = False
+ app.on_response_prepare = Signal(app)
+ app.on_response_prepare.freeze()
+ return app
+
+
+def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
+ transport = mock.Mock()
+
+ def get_extra_info(key: str) -> Optional[SSLContext]:
+ if key == "sslcontext":
+ return sslcontext
+ else:
+ return None
+
+ transport.get_extra_info.side_effect = get_extra_info
+ return transport
+
+
+def make_mocked_request(
+ method: str,
+ path: str,
+ headers: Any = None,
+ *,
+ match_info: Any = sentinel,
+ version: HttpVersion = HttpVersion(1, 1),
+ closing: bool = False,
+ app: Any = None,
+ writer: Any = sentinel,
+ protocol: Any = sentinel,
+ transport: Any = sentinel,
+ payload: StreamReader = EMPTY_PAYLOAD,
+ sslcontext: Optional[SSLContext] = None,
+ client_max_size: int = 1024**2,
+ loop: Any = ...,
+) -> Request:
+ """Creates mocked web.Request testing purposes.
+
+ Useful in unit tests, when spinning full web server is overkill or
+ specific conditions and errors are hard to trigger.
+ """
+ task = mock.Mock()
+ if loop is ...:
+ # no loop passed, try to get the current one if
+ # its is running as we need a real loop to create
+ # executor jobs to be able to do testing
+ # with a real executor
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = mock.Mock()
+ loop.create_future.return_value = ()
+
+ if version < HttpVersion(1, 1):
+ closing = True
+
+ if headers:
+ headers = CIMultiDictProxy(CIMultiDict(headers))
+ raw_hdrs = tuple(
+ (k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
+ )
+ else:
+ headers = CIMultiDictProxy(CIMultiDict())
+ raw_hdrs = ()
+
+ chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+
+ message = RawRequestMessage(
+ method,
+ path,
+ version,
+ headers,
+ raw_hdrs,
+ closing,
+ None,
+ False,
+ chunked,
+ URL(path),
+ )
+ if app is None:
+ app = _create_app_mock()
+
+ if transport is sentinel:
+ transport = _create_transport(sslcontext)
+
+ if protocol is sentinel:
+ protocol = mock.Mock()
+ protocol.transport = transport
+
+ if writer is sentinel:
+ writer = mock.Mock()
+ writer.write_headers = make_mocked_coro(None)
+ writer.write = make_mocked_coro(None)
+ writer.write_eof = make_mocked_coro(None)
+ writer.drain = make_mocked_coro(None)
+ writer.transport = transport
+
+ protocol.transport = transport
+ protocol.writer = writer
+
+ req = Request(
+ message, payload, protocol, writer, task, loop, client_max_size=client_max_size
+ )
+
+ match_info = UrlMappingMatchInfo(
+ {} if match_info is sentinel else match_info, mock.Mock()
+ )
+ match_info.add_app(app)
+ req._match_info = match_info
+
+ return req
+
+
+def make_mocked_coro(
+ return_value: Any = sentinel, raise_exception: Any = sentinel
+) -> Any:
+ """Creates a coroutine mock."""
+
+ async def mock_coro(*args: Any, **kwargs: Any) -> Any:
+ if raise_exception is not sentinel:
+ raise raise_exception
+ if not inspect.isawaitable(return_value):
+ return return_value
+ await return_value
+
+ return mock.Mock(wraps=mock_coro)
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/typedefs.py b/.venv/lib/python3.11/site-packages/aiohttp/typedefs.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc8c0825b4e522f7d1b6cf0058564f322fb5a905
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/typedefs.py
@@ -0,0 +1,69 @@
+import json
+import os
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ Mapping,
+ Protocol,
+ Tuple,
+ Union,
+)
+
+from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
+from yarl import URL, Query as _Query
+
+Query = _Query
+
+DEFAULT_JSON_ENCODER = json.dumps
+DEFAULT_JSON_DECODER = json.loads
+
+if TYPE_CHECKING:
+ _CIMultiDict = CIMultiDict[str]
+ _CIMultiDictProxy = CIMultiDictProxy[str]
+ _MultiDict = MultiDict[str]
+ _MultiDictProxy = MultiDictProxy[str]
+ from http.cookies import BaseCookie, Morsel
+
+ from .web import Request, StreamResponse
+else:
+ _CIMultiDict = CIMultiDict
+ _CIMultiDictProxy = CIMultiDictProxy
+ _MultiDict = MultiDict
+ _MultiDictProxy = MultiDictProxy
+
+Byteish = Union[bytes, bytearray, memoryview]
+JSONEncoder = Callable[[Any], str]
+JSONDecoder = Callable[[str], Any]
+LooseHeaders = Union[
+ Mapping[str, str],
+ Mapping[istr, str],
+ _CIMultiDict,
+ _CIMultiDictProxy,
+ Iterable[Tuple[Union[str, istr], str]],
+]
+RawHeaders = Tuple[Tuple[bytes, bytes], ...]
+StrOrURL = Union[str, URL]
+
+LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+LooseCookiesIterables = Iterable[
+ Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
+]
+LooseCookies = Union[
+ LooseCookiesMappings,
+ LooseCookiesIterables,
+ "BaseCookie[str]",
+]
+
+Handler = Callable[["Request"], Awaitable["StreamResponse"]]
+
+
+class Middleware(Protocol):
+ def __call__(
+ self, request: "Request", handler: Handler
+ ) -> Awaitable["StreamResponse"]: ...
+
+
+PathLike = Union[str, "os.PathLike[str]"]
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/web.py b/.venv/lib/python3.11/site-packages/aiohttp/web.py
new file mode 100644
index 0000000000000000000000000000000000000000..d6ab6f6fad49307152b8ffa488fb87d9daf7594d
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/web.py
@@ -0,0 +1,605 @@
+import asyncio
+import logging
+import os
+import socket
+import sys
+import warnings
+from argparse import ArgumentParser
+from collections.abc import Iterable
+from contextlib import suppress
+from importlib import import_module
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable as TypingIterable,
+ List,
+ Optional,
+ Set,
+ Type,
+ Union,
+ cast,
+)
+
+from .abc import AbstractAccessLogger
+from .helpers import AppKey as AppKey
+from .log import access_logger
+from .typedefs import PathLike
+from .web_app import Application as Application, CleanupError as CleanupError
+from .web_exceptions import (
+ HTTPAccepted as HTTPAccepted,
+ HTTPBadGateway as HTTPBadGateway,
+ HTTPBadRequest as HTTPBadRequest,
+ HTTPClientError as HTTPClientError,
+ HTTPConflict as HTTPConflict,
+ HTTPCreated as HTTPCreated,
+ HTTPError as HTTPError,
+ HTTPException as HTTPException,
+ HTTPExpectationFailed as HTTPExpectationFailed,
+ HTTPFailedDependency as HTTPFailedDependency,
+ HTTPForbidden as HTTPForbidden,
+ HTTPFound as HTTPFound,
+ HTTPGatewayTimeout as HTTPGatewayTimeout,
+ HTTPGone as HTTPGone,
+ HTTPInsufficientStorage as HTTPInsufficientStorage,
+ HTTPInternalServerError as HTTPInternalServerError,
+ HTTPLengthRequired as HTTPLengthRequired,
+ HTTPMethodNotAllowed as HTTPMethodNotAllowed,
+ HTTPMisdirectedRequest as HTTPMisdirectedRequest,
+ HTTPMove as HTTPMove,
+ HTTPMovedPermanently as HTTPMovedPermanently,
+ HTTPMultipleChoices as HTTPMultipleChoices,
+ HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
+ HTTPNoContent as HTTPNoContent,
+ HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
+ HTTPNotAcceptable as HTTPNotAcceptable,
+ HTTPNotExtended as HTTPNotExtended,
+ HTTPNotFound as HTTPNotFound,
+ HTTPNotImplemented as HTTPNotImplemented,
+ HTTPNotModified as HTTPNotModified,
+ HTTPOk as HTTPOk,
+ HTTPPartialContent as HTTPPartialContent,
+ HTTPPaymentRequired as HTTPPaymentRequired,
+ HTTPPermanentRedirect as HTTPPermanentRedirect,
+ HTTPPreconditionFailed as HTTPPreconditionFailed,
+ HTTPPreconditionRequired as HTTPPreconditionRequired,
+ HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
+ HTTPRedirection as HTTPRedirection,
+ HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
+ HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
+ HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
+ HTTPRequestTimeout as HTTPRequestTimeout,
+ HTTPRequestURITooLong as HTTPRequestURITooLong,
+ HTTPResetContent as HTTPResetContent,
+ HTTPSeeOther as HTTPSeeOther,
+ HTTPServerError as HTTPServerError,
+ HTTPServiceUnavailable as HTTPServiceUnavailable,
+ HTTPSuccessful as HTTPSuccessful,
+ HTTPTemporaryRedirect as HTTPTemporaryRedirect,
+ HTTPTooManyRequests as HTTPTooManyRequests,
+ HTTPUnauthorized as HTTPUnauthorized,
+ HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
+ HTTPUnprocessableEntity as HTTPUnprocessableEntity,
+ HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
+ HTTPUpgradeRequired as HTTPUpgradeRequired,
+ HTTPUseProxy as HTTPUseProxy,
+ HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
+ HTTPVersionNotSupported as HTTPVersionNotSupported,
+ NotAppKeyWarning as NotAppKeyWarning,
+)
+from .web_fileresponse import FileResponse as FileResponse
+from .web_log import AccessLogger
+from .web_middlewares import (
+ middleware as middleware,
+ normalize_path_middleware as normalize_path_middleware,
+)
+from .web_protocol import (
+ PayloadAccessError as PayloadAccessError,
+ RequestHandler as RequestHandler,
+ RequestPayloadError as RequestPayloadError,
+)
+from .web_request import (
+ BaseRequest as BaseRequest,
+ FileField as FileField,
+ Request as Request,
+)
+from .web_response import (
+ ContentCoding as ContentCoding,
+ Response as Response,
+ StreamResponse as StreamResponse,
+ json_response as json_response,
+)
+from .web_routedef import (
+ AbstractRouteDef as AbstractRouteDef,
+ RouteDef as RouteDef,
+ RouteTableDef as RouteTableDef,
+ StaticDef as StaticDef,
+ delete as delete,
+ get as get,
+ head as head,
+ options as options,
+ patch as patch,
+ post as post,
+ put as put,
+ route as route,
+ static as static,
+ view as view,
+)
+from .web_runner import (
+ AppRunner as AppRunner,
+ BaseRunner as BaseRunner,
+ BaseSite as BaseSite,
+ GracefulExit as GracefulExit,
+ NamedPipeSite as NamedPipeSite,
+ ServerRunner as ServerRunner,
+ SockSite as SockSite,
+ TCPSite as TCPSite,
+ UnixSite as UnixSite,
+)
+from .web_server import Server as Server
+from .web_urldispatcher import (
+ AbstractResource as AbstractResource,
+ AbstractRoute as AbstractRoute,
+ DynamicResource as DynamicResource,
+ PlainResource as PlainResource,
+ PrefixedSubAppResource as PrefixedSubAppResource,
+ Resource as Resource,
+ ResourceRoute as ResourceRoute,
+ StaticResource as StaticResource,
+ UrlDispatcher as UrlDispatcher,
+ UrlMappingMatchInfo as UrlMappingMatchInfo,
+ View as View,
+)
+from .web_ws import (
+ WebSocketReady as WebSocketReady,
+ WebSocketResponse as WebSocketResponse,
+ WSMsgType as WSMsgType,
+)
+
+__all__ = (
+ # web_app
+ "AppKey",
+ "Application",
+ "CleanupError",
+ # web_exceptions
+ "NotAppKeyWarning",
+ "HTTPAccepted",
+ "HTTPBadGateway",
+ "HTTPBadRequest",
+ "HTTPClientError",
+ "HTTPConflict",
+ "HTTPCreated",
+ "HTTPError",
+ "HTTPException",
+ "HTTPExpectationFailed",
+ "HTTPFailedDependency",
+ "HTTPForbidden",
+ "HTTPFound",
+ "HTTPGatewayTimeout",
+ "HTTPGone",
+ "HTTPInsufficientStorage",
+ "HTTPInternalServerError",
+ "HTTPLengthRequired",
+ "HTTPMethodNotAllowed",
+ "HTTPMisdirectedRequest",
+ "HTTPMove",
+ "HTTPMovedPermanently",
+ "HTTPMultipleChoices",
+ "HTTPNetworkAuthenticationRequired",
+ "HTTPNoContent",
+ "HTTPNonAuthoritativeInformation",
+ "HTTPNotAcceptable",
+ "HTTPNotExtended",
+ "HTTPNotFound",
+ "HTTPNotImplemented",
+ "HTTPNotModified",
+ "HTTPOk",
+ "HTTPPartialContent",
+ "HTTPPaymentRequired",
+ "HTTPPermanentRedirect",
+ "HTTPPreconditionFailed",
+ "HTTPPreconditionRequired",
+ "HTTPProxyAuthenticationRequired",
+ "HTTPRedirection",
+ "HTTPRequestEntityTooLarge",
+ "HTTPRequestHeaderFieldsTooLarge",
+ "HTTPRequestRangeNotSatisfiable",
+ "HTTPRequestTimeout",
+ "HTTPRequestURITooLong",
+ "HTTPResetContent",
+ "HTTPSeeOther",
+ "HTTPServerError",
+ "HTTPServiceUnavailable",
+ "HTTPSuccessful",
+ "HTTPTemporaryRedirect",
+ "HTTPTooManyRequests",
+ "HTTPUnauthorized",
+ "HTTPUnavailableForLegalReasons",
+ "HTTPUnprocessableEntity",
+ "HTTPUnsupportedMediaType",
+ "HTTPUpgradeRequired",
+ "HTTPUseProxy",
+ "HTTPVariantAlsoNegotiates",
+ "HTTPVersionNotSupported",
+ # web_fileresponse
+ "FileResponse",
+ # web_middlewares
+ "middleware",
+ "normalize_path_middleware",
+ # web_protocol
+ "PayloadAccessError",
+ "RequestHandler",
+ "RequestPayloadError",
+ # web_request
+ "BaseRequest",
+ "FileField",
+ "Request",
+ # web_response
+ "ContentCoding",
+ "Response",
+ "StreamResponse",
+ "json_response",
+ # web_routedef
+ "AbstractRouteDef",
+ "RouteDef",
+ "RouteTableDef",
+ "StaticDef",
+ "delete",
+ "get",
+ "head",
+ "options",
+ "patch",
+ "post",
+ "put",
+ "route",
+ "static",
+ "view",
+ # web_runner
+ "AppRunner",
+ "BaseRunner",
+ "BaseSite",
+ "GracefulExit",
+ "ServerRunner",
+ "SockSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ # web_server
+ "Server",
+ # web_urldispatcher
+ "AbstractResource",
+ "AbstractRoute",
+ "DynamicResource",
+ "PlainResource",
+ "PrefixedSubAppResource",
+ "Resource",
+ "ResourceRoute",
+ "StaticResource",
+ "UrlDispatcher",
+ "UrlMappingMatchInfo",
+ "View",
+ # web_ws
+ "WebSocketReady",
+ "WebSocketResponse",
+ "WSMsgType",
+ # web
+ "run_app",
+)
+
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ try:
+ from ssl import SSLContext
+ except ImportError: # pragma: no cover
+ SSLContext = object # type: ignore[misc,assignment]
+
+# Only display warning when using -Wdefault, -We, -X dev or similar.
+warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
+
+HostSequence = TypingIterable[str]
+
+
+async def _run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Optional[Callable[..., None]] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ handler_cancellation: bool = False,
+) -> None:
+ # An internal function to actually do all dirty job for application running
+ if asyncio.iscoroutine(app):
+ app = await app
+
+ app = cast(Application, app)
+
+ runner = AppRunner(
+ app,
+ handle_signals=handle_signals,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ keepalive_timeout=keepalive_timeout,
+ shutdown_timeout=shutdown_timeout,
+ handler_cancellation=handler_cancellation,
+ )
+
+ await runner.setup()
+
+ sites: List[BaseSite] = []
+
+ try:
+ if host is not None:
+ if isinstance(host, (str, bytes, bytearray, memoryview)):
+ sites.append(
+ TCPSite(
+ runner,
+ host,
+ port,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ else:
+ for h in host:
+ sites.append(
+ TCPSite(
+ runner,
+ h,
+ port,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+ elif path is None and sock is None or port is not None:
+ sites.append(
+ TCPSite(
+ runner,
+ port=port,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ )
+ )
+
+ if path is not None:
+ if isinstance(path, (str, os.PathLike)):
+ sites.append(
+ UnixSite(
+ runner,
+ path,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for p in path:
+ sites.append(
+ UnixSite(
+ runner,
+ p,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+
+ if sock is not None:
+ if not isinstance(sock, Iterable):
+ sites.append(
+ SockSite(
+ runner,
+ sock,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ else:
+ for s in sock:
+ sites.append(
+ SockSite(
+ runner,
+ s,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ )
+ for site in sites:
+ await site.start()
+
+ if print: # pragma: no branch
+ names = sorted(str(s.name) for s in runner.sites)
+ print(
+ "======== Running on {} ========\n"
+ "(Press CTRL+C to quit)".format(", ".join(names))
+ )
+
+ # sleep forever by 1 hour intervals,
+ while True:
+ await asyncio.sleep(3600)
+ finally:
+ await runner.cleanup()
+
+
+def _cancel_tasks(
+ to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
+) -> None:
+ if not to_cancel:
+ return
+
+ for task in to_cancel:
+ task.cancel()
+
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
+
+ for task in to_cancel:
+ if task.cancelled():
+ continue
+ if task.exception() is not None:
+ loop.call_exception_handler(
+ {
+ "message": "unhandled exception during asyncio.run() shutdown",
+ "exception": task.exception(),
+ "task": task,
+ }
+ )
+
+
+def run_app(
+ app: Union[Application, Awaitable[Application]],
+ *,
+ host: Optional[Union[str, HostSequence]] = None,
+ port: Optional[int] = None,
+ path: Union[PathLike, TypingIterable[PathLike], None] = None,
+ sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
+ shutdown_timeout: float = 60.0,
+ keepalive_timeout: float = 75.0,
+ ssl_context: Optional[SSLContext] = None,
+ print: Optional[Callable[..., None]] = print,
+ backlog: int = 128,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ access_log: Optional[logging.Logger] = access_logger,
+ handle_signals: bool = True,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ handler_cancellation: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+) -> None:
+ """Run an app locally"""
+ if loop is None:
+ loop = asyncio.new_event_loop()
+
+ # Configure if and only if in debugging mode and using the default logger
+ if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
+ if access_log.level == logging.NOTSET:
+ access_log.setLevel(logging.DEBUG)
+ if not access_log.hasHandlers():
+ access_log.addHandler(logging.StreamHandler())
+
+ main_task = loop.create_task(
+ _run_app(
+ app,
+ host=host,
+ port=port,
+ path=path,
+ sock=sock,
+ shutdown_timeout=shutdown_timeout,
+ keepalive_timeout=keepalive_timeout,
+ ssl_context=ssl_context,
+ print=print,
+ backlog=backlog,
+ access_log_class=access_log_class,
+ access_log_format=access_log_format,
+ access_log=access_log,
+ handle_signals=handle_signals,
+ reuse_address=reuse_address,
+ reuse_port=reuse_port,
+ handler_cancellation=handler_cancellation,
+ )
+ )
+
+ try:
+ asyncio.set_event_loop(loop)
+ loop.run_until_complete(main_task)
+ except (GracefulExit, KeyboardInterrupt): # pragma: no cover
+ pass
+ finally:
+ try:
+ main_task.cancel()
+ with suppress(asyncio.CancelledError):
+ loop.run_until_complete(main_task)
+ finally:
+ _cancel_tasks(asyncio.all_tasks(loop), loop)
+ loop.run_until_complete(loop.shutdown_asyncgens())
+ loop.close()
+
+
+def main(argv: List[str]) -> None:
+ arg_parser = ArgumentParser(
+ description="aiohttp.web Application server", prog="aiohttp.web"
+ )
+ arg_parser.add_argument(
+ "entry_func",
+ help=(
+ "Callable returning the `aiohttp.web.Application` instance to "
+ "run. Should be specified in the 'module:function' syntax."
+ ),
+ metavar="entry-func",
+ )
+ arg_parser.add_argument(
+ "-H",
+ "--hostname",
+ help="TCP/IP hostname to serve on (default: localhost)",
+ default=None,
+ )
+ arg_parser.add_argument(
+ "-P",
+ "--port",
+ help="TCP/IP port to serve on (default: %(default)r)",
+ type=int,
+ default=8080,
+ )
+ arg_parser.add_argument(
+ "-U",
+ "--path",
+ help="Unix file system path to serve on. Can be combined with hostname "
+ "to serve on both Unix and TCP.",
+ )
+ args, extra_argv = arg_parser.parse_known_args(argv)
+
+ # Import logic
+ mod_str, _, func_str = args.entry_func.partition(":")
+ if not func_str or not mod_str:
+ arg_parser.error("'entry-func' not in 'module:function' syntax")
+ if mod_str.startswith("."):
+ arg_parser.error("relative module names not supported")
+ try:
+ module = import_module(mod_str)
+ except ImportError as ex:
+ arg_parser.error(f"unable to import {mod_str}: {ex}")
+ try:
+ func = getattr(module, func_str)
+ except AttributeError:
+ arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
+
+ # Compatibility logic
+ if args.path is not None and not hasattr(socket, "AF_UNIX"):
+ arg_parser.error(
+ "file system paths not supported by your operating environment"
+ )
+
+ logging.basicConfig(level=logging.DEBUG)
+
+ if args.path and args.hostname is None:
+ host = port = None
+ else:
+ host = args.hostname or "localhost"
+ port = args.port
+
+ app = func(extra_argv)
+ run_app(app, host=host, port=port, path=args.path)
+ arg_parser.exit(message="Stopped\n")
+
+
+if __name__ == "__main__": # pragma: no branch
+ main(sys.argv[1:]) # pragma: no cover
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/web_protocol.py b/.venv/lib/python3.11/site-packages/aiohttp/web_protocol.py
new file mode 100644
index 0000000000000000000000000000000000000000..32f503474a9ce62be2c19c98ebf989cbe018cd0c
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/web_protocol.py
@@ -0,0 +1,750 @@
+import asyncio
+import asyncio.streams
+import sys
+import traceback
+import warnings
+from collections import deque
+from contextlib import suppress
+from html import escape as html_escape
+from http import HTTPStatus
+from logging import Logger
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Deque,
+ Optional,
+ Sequence,
+ Tuple,
+ Type,
+ Union,
+ cast,
+)
+
+import attr
+import yarl
+
+from .abc import AbstractAccessLogger, AbstractStreamWriter
+from .base_protocol import BaseProtocol
+from .helpers import ceil_timeout
+from .http import (
+ HttpProcessingError,
+ HttpRequestParser,
+ HttpVersion10,
+ RawRequestMessage,
+ StreamWriter,
+)
+from .http_exceptions import BadHttpMethod
+from .log import access_logger, server_logger
+from .streams import EMPTY_PAYLOAD, StreamReader
+from .tcp_helpers import tcp_keepalive
+from .web_exceptions import HTTPException, HTTPInternalServerError
+from .web_log import AccessLogger
+from .web_request import BaseRequest
+from .web_response import Response, StreamResponse
+
+__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
+
+if TYPE_CHECKING:
+ from .web_server import Server
+
+
+_RequestFactory = Callable[
+ [
+ RawRequestMessage,
+ StreamReader,
+ "RequestHandler",
+ AbstractStreamWriter,
+ "asyncio.Task[None]",
+ ],
+ BaseRequest,
+]
+
+_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
+
+ERROR = RawRequestMessage(
+ "UNKNOWN",
+ "/",
+ HttpVersion10,
+ {}, # type: ignore[arg-type]
+ {}, # type: ignore[arg-type]
+ True,
+ None,
+ False,
+ False,
+ yarl.URL("/"),
+)
+
+
+class RequestPayloadError(Exception):
+ """Payload parsing error."""
+
+
+class PayloadAccessError(Exception):
+ """Payload was accessed after response was sent."""
+
+
+_PAYLOAD_ACCESS_ERROR = PayloadAccessError()
+
+
+@attr.s(auto_attribs=True, frozen=True, slots=True)
+class _ErrInfo:
+ status: int
+ exc: BaseException
+ message: str
+
+
+_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
+
+
+class RequestHandler(BaseProtocol):
+ """HTTP protocol implementation.
+
+ RequestHandler handles incoming HTTP request. It reads request line,
+ request headers and request payload and calls handle_request() method.
+ By default it always returns with 404 response.
+
+ RequestHandler handles errors in incoming request, like bad
+ status line, bad headers or incomplete payload. If any error occurs,
+ connection gets closed.
+
+ keepalive_timeout -- number of seconds before closing
+ keep-alive connection
+
+ tcp_keepalive -- TCP keep-alive is on, default is on
+
+ debug -- enable debug mode
+
+ logger -- custom logger object
+
+ access_log_class -- custom class for access_logger
+
+ access_log -- custom logging object
+
+ access_log_format -- access log format string
+
+ loop -- Optional event loop
+
+ max_line_size -- Optional maximum header line size
+
+ max_field_size -- Optional maximum header field size
+
+ max_headers -- Optional maximum header size
+
+ timeout_ceil_threshold -- Optional value to specify
+ threshold to ceil() timeout
+ values
+
+ """
+
+ __slots__ = (
+ "_request_count",
+ "_keepalive",
+ "_manager",
+ "_request_handler",
+ "_request_factory",
+ "_tcp_keepalive",
+ "_next_keepalive_close_time",
+ "_keepalive_handle",
+ "_keepalive_timeout",
+ "_lingering_time",
+ "_messages",
+ "_message_tail",
+ "_handler_waiter",
+ "_waiter",
+ "_task_handler",
+ "_upgrade",
+ "_payload_parser",
+ "_request_parser",
+ "_reading_paused",
+ "logger",
+ "debug",
+ "access_log",
+ "access_logger",
+ "_close",
+ "_force_close",
+ "_current_request",
+ "_timeout_ceil_threshold",
+ "_request_in_progress",
+ )
+
+ def __init__(
+ self,
+ manager: "Server",
+ *,
+ loop: asyncio.AbstractEventLoop,
+ # Default should be high enough that it's likely longer than a reverse proxy.
+ keepalive_timeout: float = 3630,
+ tcp_keepalive: bool = True,
+ logger: Logger = server_logger,
+ access_log_class: Type[AbstractAccessLogger] = AccessLogger,
+ access_log: Logger = access_logger,
+ access_log_format: str = AccessLogger.LOG_FORMAT,
+ debug: bool = False,
+ max_line_size: int = 8190,
+ max_headers: int = 32768,
+ max_field_size: int = 8190,
+ lingering_time: float = 10.0,
+ read_bufsize: int = 2**16,
+ auto_decompress: bool = True,
+ timeout_ceil_threshold: float = 5,
+ ):
+ super().__init__(loop)
+
+ # _request_count is the number of requests processed with the same connection.
+ self._request_count = 0
+ self._keepalive = False
+ self._current_request: Optional[BaseRequest] = None
+ self._manager: Optional[Server] = manager
+ self._request_handler: Optional[_RequestHandler] = manager.request_handler
+ self._request_factory: Optional[_RequestFactory] = manager.request_factory
+
+ self._tcp_keepalive = tcp_keepalive
+ # placeholder to be replaced on keepalive timeout setup
+ self._next_keepalive_close_time = 0.0
+ self._keepalive_handle: Optional[asyncio.Handle] = None
+ self._keepalive_timeout = keepalive_timeout
+ self._lingering_time = float(lingering_time)
+
+ self._messages: Deque[_MsgType] = deque()
+ self._message_tail = b""
+
+ self._waiter: Optional[asyncio.Future[None]] = None
+ self._handler_waiter: Optional[asyncio.Future[None]] = None
+ self._task_handler: Optional[asyncio.Task[None]] = None
+
+ self._upgrade = False
+ self._payload_parser: Any = None
+ self._request_parser: Optional[HttpRequestParser] = HttpRequestParser(
+ self,
+ loop,
+ read_bufsize,
+ max_line_size=max_line_size,
+ max_field_size=max_field_size,
+ max_headers=max_headers,
+ payload_exception=RequestPayloadError,
+ auto_decompress=auto_decompress,
+ )
+
+ self._timeout_ceil_threshold: float = 5
+ try:
+ self._timeout_ceil_threshold = float(timeout_ceil_threshold)
+ except (TypeError, ValueError):
+ pass
+
+ self.logger = logger
+ self.debug = debug
+ self.access_log = access_log
+ if access_log:
+ self.access_logger: Optional[AbstractAccessLogger] = access_log_class(
+ access_log, access_log_format
+ )
+ else:
+ self.access_logger = None
+
+ self._close = False
+ self._force_close = False
+ self._request_in_progress = False
+
+ def __repr__(self) -> str:
+ return "<{} {}>".format(
+ self.__class__.__name__,
+ "connected" if self.transport is not None else "disconnected",
+ )
+
+ @property
+ def keepalive_timeout(self) -> float:
+ return self._keepalive_timeout
+
+ async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
+ """Do worker process exit preparations.
+
+ We need to clean up everything and stop accepting requests.
+ It is especially important for keep-alive connections.
+ """
+ self._force_close = True
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ # Wait for graceful handler completion
+ if self._request_in_progress:
+ # The future is only created when we are shutting
+ # down while the handler is still processing a request
+ # to avoid creating a future for every request.
+ self._handler_waiter = self._loop.create_future()
+ try:
+ async with ceil_timeout(timeout):
+ await self._handler_waiter
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ self._handler_waiter = None
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ # Then cancel handler and wait
+ try:
+ async with ceil_timeout(timeout):
+ if self._current_request is not None:
+ self._current_request._cancel(asyncio.CancelledError())
+
+ if self._task_handler is not None and not self._task_handler.done():
+ await asyncio.shield(self._task_handler)
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+
+ # force-close non-idle handler
+ if self._task_handler is not None:
+ self._task_handler.cancel()
+
+ self.force_close()
+
+ def connection_made(self, transport: asyncio.BaseTransport) -> None:
+ super().connection_made(transport)
+
+ real_transport = cast(asyncio.Transport, transport)
+ if self._tcp_keepalive:
+ tcp_keepalive(real_transport)
+
+ assert self._manager is not None
+ self._manager.connection_made(self, real_transport)
+
+ loop = self._loop
+ if sys.version_info >= (3, 12):
+ task = asyncio.Task(self.start(), loop=loop, eager_start=True)
+ else:
+ task = loop.create_task(self.start())
+ self._task_handler = task
+
+ def connection_lost(self, exc: Optional[BaseException]) -> None:
+ if self._manager is None:
+ return
+ self._manager.connection_lost(self, exc)
+
+ # Grab value before setting _manager to None.
+ handler_cancellation = self._manager.handler_cancellation
+
+ self.force_close()
+ super().connection_lost(exc)
+ self._manager = None
+ self._request_factory = None
+ self._request_handler = None
+ self._request_parser = None
+
+ if self._keepalive_handle is not None:
+ self._keepalive_handle.cancel()
+
+ if self._current_request is not None:
+ if exc is None:
+ exc = ConnectionResetError("Connection lost")
+ self._current_request._cancel(exc)
+
+ if handler_cancellation and self._task_handler is not None:
+ self._task_handler.cancel()
+
+ self._task_handler = None
+
+ if self._payload_parser is not None:
+ self._payload_parser.feed_eof()
+ self._payload_parser = None
+
+ def set_parser(self, parser: Any) -> None:
+ # Actual type is WebReader
+ assert self._payload_parser is None
+
+ self._payload_parser = parser
+
+ if self._message_tail:
+ self._payload_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+
+ def eof_received(self) -> None:
+ pass
+
+ def data_received(self, data: bytes) -> None:
+ if self._force_close or self._close:
+ return
+ # parse http messages
+ messages: Sequence[_MsgType]
+ if self._payload_parser is None and not self._upgrade:
+ assert self._request_parser is not None
+ try:
+ messages, upgraded, tail = self._request_parser.feed_data(data)
+ except HttpProcessingError as exc:
+ messages = [
+ (_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
+ ]
+ upgraded = False
+ tail = b""
+
+ for msg, payload in messages or ():
+ self._request_count += 1
+ self._messages.append((msg, payload))
+
+ waiter = self._waiter
+ if messages and waiter is not None and not waiter.done():
+ # don't set result twice
+ waiter.set_result(None)
+
+ self._upgrade = upgraded
+ if upgraded and tail:
+ self._message_tail = tail
+
+ # no parser, just store
+ elif self._payload_parser is None and self._upgrade and data:
+ self._message_tail += data
+
+ # feed payload
+ elif data:
+ eof, tail = self._payload_parser.feed_data(data)
+ if eof:
+ self.close()
+
+ def keep_alive(self, val: bool) -> None:
+ """Set keep-alive connection mode.
+
+ :param bool val: new state.
+ """
+ self._keepalive = val
+ if self._keepalive_handle:
+ self._keepalive_handle.cancel()
+ self._keepalive_handle = None
+
+ def close(self) -> None:
+ """Close connection.
+
+ Stop accepting new pipelining messages and close
+ connection when handlers done processing messages.
+ """
+ self._close = True
+ if self._waiter:
+ self._waiter.cancel()
+
+ def force_close(self) -> None:
+ """Forcefully close connection."""
+ self._force_close = True
+ if self._waiter:
+ self._waiter.cancel()
+ if self.transport is not None:
+ self.transport.close()
+ self.transport = None
+
+ def log_access(
+ self, request: BaseRequest, response: StreamResponse, time: float
+ ) -> None:
+ if self.access_logger is not None and self.access_logger.enabled:
+ self.access_logger.log(request, response, self._loop.time() - time)
+
+ def log_debug(self, *args: Any, **kw: Any) -> None:
+ if self.debug:
+ self.logger.debug(*args, **kw)
+
+ def log_exception(self, *args: Any, **kw: Any) -> None:
+ self.logger.exception(*args, **kw)
+
+ def _process_keepalive(self) -> None:
+ self._keepalive_handle = None
+ if self._force_close or not self._keepalive:
+ return
+
+ loop = self._loop
+ now = loop.time()
+ close_time = self._next_keepalive_close_time
+ if now < close_time:
+ # Keep alive close check fired too early, reschedule
+ self._keepalive_handle = loop.call_at(close_time, self._process_keepalive)
+ return
+
+ # handler in idle state
+ if self._waiter and not self._waiter.done():
+ self.force_close()
+
+ async def _handle_request(
+ self,
+ request: BaseRequest,
+ start_time: float,
+ request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
+ ) -> Tuple[StreamResponse, bool]:
+ self._request_in_progress = True
+ try:
+ try:
+ self._current_request = request
+ resp = await request_handler(request)
+ finally:
+ self._current_request = None
+ except HTTPException as exc:
+ resp = exc
+ resp, reset = await self.finish_response(request, resp, start_time)
+ except asyncio.CancelledError:
+ raise
+ except asyncio.TimeoutError as exc:
+ self.log_debug("Request handler timed out.", exc_info=exc)
+ resp = self.handle_error(request, 504)
+ resp, reset = await self.finish_response(request, resp, start_time)
+ except Exception as exc:
+ resp = self.handle_error(request, 500, exc)
+ resp, reset = await self.finish_response(request, resp, start_time)
+ else:
+ # Deprecation warning (See #2415)
+ if getattr(resp, "__http_exception__", False):
+ warnings.warn(
+ "returning HTTPException object is deprecated "
+ "(#2415) and will be removed, "
+ "please raise the exception instead",
+ DeprecationWarning,
+ )
+
+ resp, reset = await self.finish_response(request, resp, start_time)
+ finally:
+ self._request_in_progress = False
+ if self._handler_waiter is not None:
+ self._handler_waiter.set_result(None)
+
+ return resp, reset
+
+ async def start(self) -> None:
+ """Process incoming request.
+
+ It reads request line, request headers and request payload, then
+ calls handle_request() method. Subclass has to override
+ handle_request(). start() handles various exceptions in request
+ or response handling. Connection is being closed always unless
+ keep_alive(True) specified.
+ """
+ loop = self._loop
+ handler = asyncio.current_task(loop)
+ assert handler is not None
+ manager = self._manager
+ assert manager is not None
+ keepalive_timeout = self._keepalive_timeout
+ resp = None
+ assert self._request_factory is not None
+ assert self._request_handler is not None
+
+ while not self._force_close:
+ if not self._messages:
+ try:
+ # wait for next request
+ self._waiter = loop.create_future()
+ await self._waiter
+ finally:
+ self._waiter = None
+
+ message, payload = self._messages.popleft()
+
+ start = loop.time()
+
+ manager.requests_count += 1
+ writer = StreamWriter(self, loop)
+ if isinstance(message, _ErrInfo):
+ # make request_factory work
+ request_handler = self._make_error_handler(message)
+ message = ERROR
+ else:
+ request_handler = self._request_handler
+
+ request = self._request_factory(message, payload, self, writer, handler)
+ try:
+ # a new task is used for copy context vars (#3406)
+ coro = self._handle_request(request, start, request_handler)
+ if sys.version_info >= (3, 12):
+ task = asyncio.Task(coro, loop=loop, eager_start=True)
+ else:
+ task = loop.create_task(coro)
+ try:
+ resp, reset = await task
+ except ConnectionError:
+ self.log_debug("Ignored premature client disconnection")
+ break
+
+ # Drop the processed task from asyncio.Task.all_tasks() early
+ del task
+ if reset:
+ self.log_debug("Ignored premature client disconnection 2")
+ break
+
+ # notify server about keep-alive
+ self._keepalive = bool(resp.keep_alive)
+
+ # check payload
+ if not payload.is_eof():
+ lingering_time = self._lingering_time
+ if not self._force_close and lingering_time:
+ self.log_debug(
+ "Start lingering close timer for %s sec.", lingering_time
+ )
+
+ now = loop.time()
+ end_t = now + lingering_time
+
+ try:
+ while not payload.is_eof() and now < end_t:
+ async with ceil_timeout(end_t - now):
+ # read and ignore
+ await payload.readany()
+ now = loop.time()
+ except (asyncio.CancelledError, asyncio.TimeoutError):
+ if (
+ sys.version_info >= (3, 11)
+ and (t := asyncio.current_task())
+ and t.cancelling()
+ ):
+ raise
+
+ # if payload still uncompleted
+ if not payload.is_eof() and not self._force_close:
+ self.log_debug("Uncompleted request.")
+ self.close()
+
+ payload.set_exception(_PAYLOAD_ACCESS_ERROR)
+
+ except asyncio.CancelledError:
+ self.log_debug("Ignored premature client disconnection")
+ raise
+ except Exception as exc:
+ self.log_exception("Unhandled exception", exc_info=exc)
+ self.force_close()
+ finally:
+ if self.transport is None and resp is not None:
+ self.log_debug("Ignored premature client disconnection.")
+ elif not self._force_close:
+ if self._keepalive and not self._close:
+ # start keep-alive timer
+ if keepalive_timeout is not None:
+ now = loop.time()
+ close_time = now + keepalive_timeout
+ self._next_keepalive_close_time = close_time
+ if self._keepalive_handle is None:
+ self._keepalive_handle = loop.call_at(
+ close_time, self._process_keepalive
+ )
+ else:
+ break
+
+ # remove handler, close transport if no handlers left
+ if not self._force_close:
+ self._task_handler = None
+ if self.transport is not None:
+ self.transport.close()
+
+ async def finish_response(
+ self, request: BaseRequest, resp: StreamResponse, start_time: float
+ ) -> Tuple[StreamResponse, bool]:
+ """Prepare the response and write_eof, then log access.
+
+ This has to
+ be called within the context of any exception so the access logger
+ can get exception information. Returns True if the client disconnects
+ prematurely.
+ """
+ request._finish()
+ if self._request_parser is not None:
+ self._request_parser.set_upgraded(False)
+ self._upgrade = False
+ if self._message_tail:
+ self._request_parser.feed_data(self._message_tail)
+ self._message_tail = b""
+ try:
+ prepare_meth = resp.prepare
+ except AttributeError:
+ if resp is None:
+ self.log_exception("Missing return statement on request handler")
+ else:
+ self.log_exception(
+ "Web-handler should return a response instance, "
+ "got {!r}".format(resp)
+ )
+ exc = HTTPInternalServerError()
+ resp = Response(
+ status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers
+ )
+ prepare_meth = resp.prepare
+ try:
+ await prepare_meth(request)
+ await resp.write_eof()
+ except ConnectionError:
+ self.log_access(request, resp, start_time)
+ return resp, True
+
+ self.log_access(request, resp, start_time)
+ return resp, False
+
+ def handle_error(
+ self,
+ request: BaseRequest,
+ status: int = 500,
+ exc: Optional[BaseException] = None,
+ message: Optional[str] = None,
+ ) -> StreamResponse:
+ """Handle errors.
+
+ Returns HTTP response with specific status code. Logs additional
+ information. It always closes current connection.
+ """
+ if self._request_count == 1 and isinstance(exc, BadHttpMethod):
+ # BadHttpMethod is common when a client sends non-HTTP
+ # or encrypted traffic to an HTTP port. This is expected
+ # to happen when connected to the public internet so we log
+ # it at the debug level as to not fill logs with noise.
+ self.logger.debug(
+ "Error handling request from %s", request.remote, exc_info=exc
+ )
+ else:
+ self.log_exception(
+ "Error handling request from %s", request.remote, exc_info=exc
+ )
+
+ # some data already got sent, connection is broken
+ if request.writer.output_size > 0:
+ raise ConnectionError(
+ "Response is sent already, cannot send another response "
+ "with the error message"
+ )
+
+ ct = "text/plain"
+ if status == HTTPStatus.INTERNAL_SERVER_ERROR:
+ title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
+ msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
+ tb = None
+ if self.debug:
+ with suppress(Exception):
+ tb = traceback.format_exc()
+
+ if "text/html" in request.headers.get("Accept", ""):
+ if tb:
+ tb = html_escape(tb)
+ msg = f"
Traceback:
\n{tb}"
+ message = (
+ ""
+ "{title}"
+ "\n{title}
"
+ "\n{msg}\n\n"
+ ).format(title=title, msg=msg)
+ ct = "text/html"
+ else:
+ if tb:
+ msg = tb
+ message = title + "\n\n" + msg
+
+ resp = Response(status=status, text=message, content_type=ct)
+ resp.force_close()
+
+ return resp
+
+ def _make_error_handler(
+ self, err_info: _ErrInfo
+ ) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
+ async def handler(request: BaseRequest) -> StreamResponse:
+ return self.handle_error(
+ request, err_info.status, err_info.exc, err_info.message
+ )
+
+ return handler
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/web_routedef.py b/.venv/lib/python3.11/site-packages/aiohttp/web_routedef.py
new file mode 100644
index 0000000000000000000000000000000000000000..f51b6cd00815a4daeabf7ef269a3225b2b764503
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/web_routedef.py
@@ -0,0 +1,214 @@
+import abc
+import os # noqa
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ Dict,
+ Iterator,
+ List,
+ Optional,
+ Sequence,
+ Type,
+ Union,
+ overload,
+)
+
+import attr
+
+from . import hdrs
+from .abc import AbstractView
+from .typedefs import Handler, PathLike
+
+if TYPE_CHECKING:
+ from .web_request import Request
+ from .web_response import StreamResponse
+ from .web_urldispatcher import AbstractRoute, UrlDispatcher
+else:
+ Request = StreamResponse = UrlDispatcher = AbstractRoute = None
+
+
+__all__ = (
+ "AbstractRouteDef",
+ "RouteDef",
+ "StaticDef",
+ "RouteTableDef",
+ "head",
+ "options",
+ "get",
+ "post",
+ "patch",
+ "put",
+ "delete",
+ "route",
+ "view",
+ "static",
+)
+
+
+class AbstractRouteDef(abc.ABC):
+ @abc.abstractmethod
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ pass # pragma: no cover
+
+
+_HandlerType = Union[Type[AbstractView], Handler]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class RouteDef(AbstractRouteDef):
+ method: str
+ path: str
+ handler: _HandlerType
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return " {handler.__name__!r}{info}>".format(
+ method=self.method, path=self.path, handler=self.handler, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ if self.method in hdrs.METH_ALL:
+ reg = getattr(router, "add_" + self.method.lower())
+ return [reg(self.path, self.handler, **self.kwargs)]
+ else:
+ return [
+ router.add_route(self.method, self.path, self.handler, **self.kwargs)
+ ]
+
+
+@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
+class StaticDef(AbstractRouteDef):
+ prefix: str
+ path: PathLike
+ kwargs: Dict[str, Any]
+
+ def __repr__(self) -> str:
+ info = []
+ for name, value in sorted(self.kwargs.items()):
+ info.append(f", {name}={value!r}")
+ return " {path}{info}>".format(
+ prefix=self.prefix, path=self.path, info="".join(info)
+ )
+
+ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
+ resource = router.add_static(self.prefix, self.path, **self.kwargs)
+ routes = resource.get_info().get("routes", {})
+ return list(routes.values())
+
+
+def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return RouteDef(method, path, handler, kwargs)
+
+
+def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_HEAD, path, handler, **kwargs)
+
+
+def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
+
+
+def get(
+ path: str,
+ handler: _HandlerType,
+ *,
+ name: Optional[str] = None,
+ allow_head: bool = True,
+ **kwargs: Any,
+) -> RouteDef:
+ return route(
+ hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
+ )
+
+
+def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_POST, path, handler, **kwargs)
+
+
+def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PUT, path, handler, **kwargs)
+
+
+def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_PATCH, path, handler, **kwargs)
+
+
+def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_DELETE, path, handler, **kwargs)
+
+
+def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
+ return route(hdrs.METH_ANY, path, handler, **kwargs)
+
+
+def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
+ return StaticDef(prefix, path, kwargs)
+
+
+_Deco = Callable[[_HandlerType], _HandlerType]
+
+
+class RouteTableDef(Sequence[AbstractRouteDef]):
+ """Route definition table"""
+
+ def __init__(self) -> None:
+ self._items: List[AbstractRouteDef] = []
+
+ def __repr__(self) -> str:
+ return f""
+
+ @overload
+ def __getitem__(self, index: int) -> AbstractRouteDef: ...
+
+ @overload
+ def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ...
+
+ def __getitem__(self, index): # type: ignore[no-untyped-def]
+ return self._items[index]
+
+ def __iter__(self) -> Iterator[AbstractRouteDef]:
+ return iter(self._items)
+
+ def __len__(self) -> int:
+ return len(self._items)
+
+ def __contains__(self, item: object) -> bool:
+ return item in self._items
+
+ def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
+ def inner(handler: _HandlerType) -> _HandlerType:
+ self._items.append(RouteDef(method, path, handler, kwargs))
+ return handler
+
+ return inner
+
+ def head(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_HEAD, path, **kwargs)
+
+ def get(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_GET, path, **kwargs)
+
+ def post(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_POST, path, **kwargs)
+
+ def put(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PUT, path, **kwargs)
+
+ def patch(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_PATCH, path, **kwargs)
+
+ def delete(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_DELETE, path, **kwargs)
+
+ def options(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_OPTIONS, path, **kwargs)
+
+ def view(self, path: str, **kwargs: Any) -> _Deco:
+ return self.route(hdrs.METH_ANY, path, **kwargs)
+
+ def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
+ self._items.append(StaticDef(prefix, path, kwargs))
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/web_runner.py b/.venv/lib/python3.11/site-packages/aiohttp/web_runner.py
new file mode 100644
index 0000000000000000000000000000000000000000..bcfec727c8419bbc6518085ecedde1f7de8992c9
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/web_runner.py
@@ -0,0 +1,399 @@
+import asyncio
+import signal
+import socket
+import warnings
+from abc import ABC, abstractmethod
+from typing import TYPE_CHECKING, Any, List, Optional, Set
+
+from yarl import URL
+
+from .typedefs import PathLike
+from .web_app import Application
+from .web_server import Server
+
+if TYPE_CHECKING:
+ from ssl import SSLContext
+else:
+ try:
+ from ssl import SSLContext
+ except ImportError: # pragma: no cover
+ SSLContext = object # type: ignore[misc,assignment]
+
+__all__ = (
+ "BaseSite",
+ "TCPSite",
+ "UnixSite",
+ "NamedPipeSite",
+ "SockSite",
+ "BaseRunner",
+ "AppRunner",
+ "ServerRunner",
+ "GracefulExit",
+)
+
+
+class GracefulExit(SystemExit):
+ code = 1
+
+
+def _raise_graceful_exit() -> None:
+ raise GracefulExit()
+
+
+class BaseSite(ABC):
+ __slots__ = ("_runner", "_ssl_context", "_backlog", "_server")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ if runner.server is None:
+ raise RuntimeError("Call runner.setup() before making a site")
+ if shutdown_timeout != 60.0:
+ msg = "shutdown_timeout should be set on BaseRunner"
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+ runner._shutdown_timeout = shutdown_timeout
+ self._runner = runner
+ self._ssl_context = ssl_context
+ self._backlog = backlog
+ self._server: Optional[asyncio.AbstractServer] = None
+
+ @property
+ @abstractmethod
+ def name(self) -> str:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def start(self) -> None:
+ self._runner._reg_site(self)
+
+ async def stop(self) -> None:
+ self._runner._check_site(self)
+ if self._server is not None: # Maybe not started yet
+ self._server.close()
+
+ self._runner._unreg_site(self)
+
+
+class TCPSite(BaseSite):
+ __slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ host: Optional[str] = None,
+ port: Optional[int] = None,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ reuse_address: Optional[bool] = None,
+ reuse_port: Optional[bool] = None,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._host = host
+ if port is None:
+ port = 8443 if self._ssl_context else 8080
+ self._port = port
+ self._reuse_address = reuse_address
+ self._reuse_port = reuse_port
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ host = "0.0.0.0" if not self._host else self._host
+ return str(URL.build(scheme=scheme, host=host, port=self._port))
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server,
+ self._host,
+ self._port,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
+ reuse_address=self._reuse_address,
+ reuse_port=self._reuse_port,
+ )
+
+
+class UnixSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ path: PathLike,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ scheme = "https" if self._ssl_context else "http"
+ return f"{scheme}://unix:{self._path}:"
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_unix_server(
+ server,
+ self._path,
+ ssl=self._ssl_context,
+ backlog=self._backlog,
+ )
+
+
+class NamedPipeSite(BaseSite):
+ __slots__ = ("_path",)
+
+ def __init__(
+ self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
+ ) -> None:
+ loop = asyncio.get_event_loop()
+ if not isinstance(
+ loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
+ ):
+ raise RuntimeError(
+ "Named Pipes only available in proactor loop under windows"
+ )
+ super().__init__(runner, shutdown_timeout=shutdown_timeout)
+ self._path = path
+
+ @property
+ def name(self) -> str:
+ return self._path
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ _server = await loop.start_serving_pipe( # type: ignore[attr-defined]
+ server, self._path
+ )
+ self._server = _server[0]
+
+
+class SockSite(BaseSite):
+ __slots__ = ("_sock", "_name")
+
+ def __init__(
+ self,
+ runner: "BaseRunner",
+ sock: socket.socket,
+ *,
+ shutdown_timeout: float = 60.0,
+ ssl_context: Optional[SSLContext] = None,
+ backlog: int = 128,
+ ) -> None:
+ super().__init__(
+ runner,
+ shutdown_timeout=shutdown_timeout,
+ ssl_context=ssl_context,
+ backlog=backlog,
+ )
+ self._sock = sock
+ scheme = "https" if self._ssl_context else "http"
+ if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
+ name = f"{scheme}://unix:{sock.getsockname()}:"
+ else:
+ host, port = sock.getsockname()[:2]
+ name = str(URL.build(scheme=scheme, host=host, port=port))
+ self._name = name
+
+ @property
+ def name(self) -> str:
+ return self._name
+
+ async def start(self) -> None:
+ await super().start()
+ loop = asyncio.get_event_loop()
+ server = self._runner.server
+ assert server is not None
+ self._server = await loop.create_server(
+ server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
+ )
+
+
+class BaseRunner(ABC):
+ __slots__ = ("_handle_signals", "_kwargs", "_server", "_sites", "_shutdown_timeout")
+
+ def __init__(
+ self,
+ *,
+ handle_signals: bool = False,
+ shutdown_timeout: float = 60.0,
+ **kwargs: Any,
+ ) -> None:
+ self._handle_signals = handle_signals
+ self._kwargs = kwargs
+ self._server: Optional[Server] = None
+ self._sites: List[BaseSite] = []
+ self._shutdown_timeout = shutdown_timeout
+
+ @property
+ def server(self) -> Optional[Server]:
+ return self._server
+
+ @property
+ def addresses(self) -> List[Any]:
+ ret: List[Any] = []
+ for site in self._sites:
+ server = site._server
+ if server is not None:
+ sockets = server.sockets # type: ignore[attr-defined]
+ if sockets is not None:
+ for sock in sockets:
+ ret.append(sock.getsockname())
+ return ret
+
+ @property
+ def sites(self) -> Set[BaseSite]:
+ return set(self._sites)
+
+ async def setup(self) -> None:
+ loop = asyncio.get_event_loop()
+
+ if self._handle_signals:
+ try:
+ loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
+ loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
+ except NotImplementedError: # pragma: no cover
+ # add_signal_handler is not implemented on Windows
+ pass
+
+ self._server = await self._make_server()
+
+ @abstractmethod
+ async def shutdown(self) -> None:
+ """Call any shutdown hooks to help server close gracefully."""
+
+ async def cleanup(self) -> None:
+ # The loop over sites is intentional, an exception on gather()
+ # leaves self._sites in unpredictable state.
+ # The loop guaranties that a site is either deleted on success or
+ # still present on failure
+ for site in list(self._sites):
+ await site.stop()
+
+ if self._server: # If setup succeeded
+ # Yield to event loop to ensure incoming requests prior to stopping the sites
+ # have all started to be handled before we proceed to close idle connections.
+ await asyncio.sleep(0)
+ self._server.pre_shutdown()
+ await self.shutdown()
+ await self._server.shutdown(self._shutdown_timeout)
+ await self._cleanup_server()
+
+ self._server = None
+ if self._handle_signals:
+ loop = asyncio.get_running_loop()
+ try:
+ loop.remove_signal_handler(signal.SIGINT)
+ loop.remove_signal_handler(signal.SIGTERM)
+ except NotImplementedError: # pragma: no cover
+ # remove_signal_handler is not implemented on Windows
+ pass
+
+ @abstractmethod
+ async def _make_server(self) -> Server:
+ pass # pragma: no cover
+
+ @abstractmethod
+ async def _cleanup_server(self) -> None:
+ pass # pragma: no cover
+
+ def _reg_site(self, site: BaseSite) -> None:
+ if site in self._sites:
+ raise RuntimeError(f"Site {site} is already registered in runner {self}")
+ self._sites.append(site)
+
+ def _check_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+
+ def _unreg_site(self, site: BaseSite) -> None:
+ if site not in self._sites:
+ raise RuntimeError(f"Site {site} is not registered in runner {self}")
+ self._sites.remove(site)
+
+
+class ServerRunner(BaseRunner):
+ """Low-level web server runner"""
+
+ __slots__ = ("_web_server",)
+
+ def __init__(
+ self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ self._web_server = web_server
+
+ async def shutdown(self) -> None:
+ pass
+
+ async def _make_server(self) -> Server:
+ return self._web_server
+
+ async def _cleanup_server(self) -> None:
+ pass
+
+
+class AppRunner(BaseRunner):
+ """Web Application runner"""
+
+ __slots__ = ("_app",)
+
+ def __init__(
+ self, app: Application, *, handle_signals: bool = False, **kwargs: Any
+ ) -> None:
+ super().__init__(handle_signals=handle_signals, **kwargs)
+ if not isinstance(app, Application):
+ raise TypeError(
+ "The first argument should be web.Application "
+ "instance, got {!r}".format(app)
+ )
+ self._app = app
+
+ @property
+ def app(self) -> Application:
+ return self._app
+
+ async def shutdown(self) -> None:
+ await self._app.shutdown()
+
+ async def _make_server(self) -> Server:
+ loop = asyncio.get_event_loop()
+ self._app._set_loop(loop)
+ self._app.on_startup.freeze()
+ await self._app.startup()
+ self._app.freeze()
+
+ return self._app._make_handler(loop=loop, **self._kwargs)
+
+ async def _cleanup_server(self) -> None:
+ await self._app.cleanup()
diff --git a/.venv/lib/python3.11/site-packages/aiohttp/worker.py b/.venv/lib/python3.11/site-packages/aiohttp/worker.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ed121ac95535903280f364f7fb9ba693193d6ec
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/aiohttp/worker.py
@@ -0,0 +1,252 @@
+"""Async gunicorn worker for aiohttp.web"""
+
+import asyncio
+import os
+import re
+import signal
+import sys
+from types import FrameType
+from typing import TYPE_CHECKING, Any, Optional
+
+from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
+from gunicorn.workers import base
+
+from aiohttp import web
+
+from .helpers import set_result
+from .web_app import Application
+from .web_log import AccessLogger
+
+if TYPE_CHECKING:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+else:
+ try:
+ import ssl
+
+ SSLContext = ssl.SSLContext
+ except ImportError: # pragma: no cover
+ ssl = None # type: ignore[assignment]
+ SSLContext = object # type: ignore[misc,assignment]
+
+
+__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker")
+
+
+class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
+
+ DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
+ DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
+
+ def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
+ super().__init__(*args, **kw)
+
+ self._task: Optional[asyncio.Task[None]] = None
+ self.exit_code = 0
+ self._notify_waiter: Optional[asyncio.Future[bool]] = None
+
+ def init_process(self) -> None:
+ # create new event_loop after fork
+ asyncio.get_event_loop().close()
+
+ self.loop = asyncio.new_event_loop()
+ asyncio.set_event_loop(self.loop)
+
+ super().init_process()
+
+ def run(self) -> None:
+ self._task = self.loop.create_task(self._run())
+
+ try: # ignore all finalization problems
+ self.loop.run_until_complete(self._task)
+ except Exception:
+ self.log.exception("Exception in gunicorn worker")
+ self.loop.run_until_complete(self.loop.shutdown_asyncgens())
+ self.loop.close()
+
+ sys.exit(self.exit_code)
+
+ async def _run(self) -> None:
+ runner = None
+ if isinstance(self.wsgi, Application):
+ app = self.wsgi
+ elif asyncio.iscoroutinefunction(self.wsgi):
+ wsgi = await self.wsgi()
+ if isinstance(wsgi, web.AppRunner):
+ runner = wsgi
+ app = runner.app
+ else:
+ app = wsgi
+ else:
+ raise RuntimeError(
+ "wsgi app should be either Application or "
+ "async function returning Application, got {}".format(self.wsgi)
+ )
+
+ if runner is None:
+ access_log = self.log.access_log if self.cfg.accesslog else None
+ runner = web.AppRunner(
+ app,
+ logger=self.log,
+ keepalive_timeout=self.cfg.keepalive,
+ access_log=access_log,
+ access_log_format=self._get_valid_log_format(
+ self.cfg.access_log_format
+ ),
+ shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
+ )
+ await runner.setup()
+
+ ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
+
+ runner = runner
+ assert runner is not None
+ server = runner.server
+ assert server is not None
+ for sock in self.sockets:
+ site = web.SockSite(
+ runner,
+ sock,
+ ssl_context=ctx,
+ )
+ await site.start()
+
+ # If our parent changed then we shut down.
+ pid = os.getpid()
+ try:
+ while self.alive: # type: ignore[has-type]
+ self.notify()
+
+ cnt = server.requests_count
+ if self.max_requests and cnt > self.max_requests:
+ self.alive = False
+ self.log.info("Max requests, shutting down: %s", self)
+
+ elif pid == os.getpid() and self.ppid != os.getppid():
+ self.alive = False
+ self.log.info("Parent changed, shutting down: %s", self)
+ else:
+ await self._wait_next_notify()
+ except BaseException:
+ pass
+
+ await runner.cleanup()
+
+ def _wait_next_notify(self) -> "asyncio.Future[bool]":
+ self._notify_waiter_done()
+
+ loop = self.loop
+ assert loop is not None
+ self._notify_waiter = waiter = loop.create_future()
+ self.loop.call_later(1.0, self._notify_waiter_done, waiter)
+
+ return waiter
+
+ def _notify_waiter_done(
+ self, waiter: Optional["asyncio.Future[bool]"] = None
+ ) -> None:
+ if waiter is None:
+ waiter = self._notify_waiter
+ if waiter is not None:
+ set_result(waiter, True)
+
+ if waiter is self._notify_waiter:
+ self._notify_waiter = None
+
+ def init_signals(self) -> None:
+ # Set up signals through the event loop API.
+
+ self.loop.add_signal_handler(
+ signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGINT, self.handle_quit, signal.SIGINT, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
+ )
+
+ self.loop.add_signal_handler(
+ signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
+ )
+
+ # Don't let SIGTERM and SIGUSR1 disturb active requests
+ # by interrupting system calls
+ signal.siginterrupt(signal.SIGTERM, False)
+ signal.siginterrupt(signal.SIGUSR1, False)
+ # Reset signals so Gunicorn doesn't swallow subprocess return codes
+ # See: https://github.com/aio-libs/aiohttp/issues/6130
+
+ def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None:
+ self.alive = False
+
+ # worker_int callback
+ self.cfg.worker_int(self)
+
+ # wakeup closing process
+ self._notify_waiter_done()
+
+ def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None:
+ self.alive = False
+ self.exit_code = 1
+ self.cfg.worker_abort(self)
+ sys.exit(1)
+
+ @staticmethod
+ def _create_ssl_context(cfg: Any) -> "SSLContext":
+ """Creates SSLContext instance for usage in asyncio.create_server.
+
+ See ssl.SSLSocket.__init__ for more details.
+ """
+ if ssl is None: # pragma: no cover
+ raise RuntimeError("SSL is not supported.")
+
+ ctx = ssl.SSLContext(cfg.ssl_version)
+ ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
+ ctx.verify_mode = cfg.cert_reqs
+ if cfg.ca_certs:
+ ctx.load_verify_locations(cfg.ca_certs)
+ if cfg.ciphers:
+ ctx.set_ciphers(cfg.ciphers)
+ return ctx
+
+ def _get_valid_log_format(self, source_format: str) -> str:
+ if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
+ return self.DEFAULT_AIOHTTP_LOG_FORMAT
+ elif re.search(r"%\([^\)]+\)", source_format):
+ raise ValueError(
+ "Gunicorn's style options in form of `%(name)s` are not "
+ "supported for the log formatting. Please use aiohttp's "
+ "format specification to configure access log formatting: "
+ "http://docs.aiohttp.org/en/stable/logging.html"
+ "#format-specification"
+ )
+ else:
+ return source_format
+
+
+class GunicornUVLoopWebWorker(GunicornWebWorker):
+ def init_process(self) -> None:
+ import uvloop
+
+ # Close any existing event loop before setting a
+ # new policy.
+ asyncio.get_event_loop().close()
+
+ # Setup uvloop policy, so that every
+ # asyncio.get_event_loop() will create an instance
+ # of uvloop event loop.
+ asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
+
+ super().init_process()
diff --git a/.venv/lib/python3.11/site-packages/filelock/__init__.py b/.venv/lib/python3.11/site-packages/filelock/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..c9d8c5b8ebe565a652b3671b3dfa066f7346af45
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/__init__.py
@@ -0,0 +1,70 @@
+"""
+A platform independent file lock that supports the with-statement.
+
+.. autodata:: filelock.__version__
+ :no-value:
+
+"""
+
+from __future__ import annotations
+
+import sys
+import warnings
+from typing import TYPE_CHECKING
+
+from ._api import AcquireReturnProxy, BaseFileLock
+from ._error import Timeout
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock, has_fcntl
+from ._windows import WindowsFileLock
+from .asyncio import (
+ AsyncAcquireReturnProxy,
+ AsyncSoftFileLock,
+ AsyncUnixFileLock,
+ AsyncWindowsFileLock,
+ BaseAsyncFileLock,
+)
+from .version import version
+
+#: version of the project as a string
+__version__: str = version
+
+
+if sys.platform == "win32": # pragma: win32 cover
+ _FileLock: type[BaseFileLock] = WindowsFileLock
+ _AsyncFileLock: type[BaseAsyncFileLock] = AsyncWindowsFileLock
+else: # pragma: win32 no cover # noqa: PLR5501
+ if has_fcntl:
+ _FileLock: type[BaseFileLock] = UnixFileLock
+ _AsyncFileLock: type[BaseAsyncFileLock] = AsyncUnixFileLock
+ else:
+ _FileLock = SoftFileLock
+ _AsyncFileLock = AsyncSoftFileLock
+ if warnings is not None:
+ warnings.warn("only soft file lock is available", stacklevel=2)
+
+if TYPE_CHECKING:
+ FileLock = SoftFileLock
+ AsyncFileLock = AsyncSoftFileLock
+else:
+ #: Alias for the lock, which should be used for the current platform.
+ FileLock = _FileLock
+ AsyncFileLock = _AsyncFileLock
+
+
+__all__ = [
+ "AcquireReturnProxy",
+ "AsyncAcquireReturnProxy",
+ "AsyncFileLock",
+ "AsyncSoftFileLock",
+ "AsyncUnixFileLock",
+ "AsyncWindowsFileLock",
+ "BaseAsyncFileLock",
+ "BaseFileLock",
+ "FileLock",
+ "SoftFileLock",
+ "Timeout",
+ "UnixFileLock",
+ "WindowsFileLock",
+ "__version__",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e85b34b36df508cbc2d51087a81825d942c66a63
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/__init__.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/_api.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_api.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..26721b542e778088763663eca168d0bc3d6753de
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_api.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/_error.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_error.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..60462a19994111959212aab69c853bd7f11d1fa9
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_error.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/_soft.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_soft.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..3adecc63eca3331c252981b98adb09ea1c30375b
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_soft.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/_unix.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_unix.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..ec4ebedea953dfd47bf4fea13e60922f628b9113
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_unix.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/_util.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_util.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9095910c37a269c7f2ef422e1ef8e91babe4eef5
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_util.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/_windows.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_windows.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8bcb1a5b8cab8db4baf5c7e29e0a16522c246f76
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/_windows.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/asyncio.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/asyncio.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..2c335a7cda8298653eb4840ec2216e05539ee0e5
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/asyncio.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/__pycache__/version.cpython-311.pyc b/.venv/lib/python3.11/site-packages/filelock/__pycache__/version.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..72085cbed3aa9c916c6fecd7391a1b54a6e22dad
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/filelock/__pycache__/version.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/filelock/_api.py b/.venv/lib/python3.11/site-packages/filelock/_api.py
new file mode 100644
index 0000000000000000000000000000000000000000..8fde69a0fef7badcc123d17735cd784a99baed52
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/_api.py
@@ -0,0 +1,403 @@
+from __future__ import annotations
+
+import contextlib
+import inspect
+import logging
+import os
+import time
+import warnings
+from abc import ABCMeta, abstractmethod
+from dataclasses import dataclass
+from threading import local
+from typing import TYPE_CHECKING, Any, cast
+from weakref import WeakValueDictionary
+
+from ._error import Timeout
+
+if TYPE_CHECKING:
+ import sys
+ from types import TracebackType
+
+ if sys.version_info >= (3, 11): # pragma: no cover (py311+)
+ from typing import Self
+ else: # pragma: no cover ( None:
+ self.lock = lock
+
+ def __enter__(self) -> BaseFileLock:
+ return self.lock
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ self.lock.release()
+
+
+@dataclass
+class FileLockContext:
+ """A dataclass which holds the context for a ``BaseFileLock`` object."""
+
+ # The context is held in a separate class to allow optional use of thread local storage via the
+ # ThreadLocalFileContext class.
+
+ #: The path to the lock file.
+ lock_file: str
+
+ #: The default timeout value.
+ timeout: float
+
+ #: The mode for the lock files
+ mode: int
+
+ #: Whether the lock should be blocking or not
+ blocking: bool
+
+ #: The file descriptor for the *_lock_file* as it is returned by the os.open() function, not None when lock held
+ lock_file_fd: int | None = None
+
+ #: The lock counter is used for implementing the nested locking mechanism.
+ lock_counter: int = 0 # When the lock is acquired is increased and the lock is only released, when this value is 0
+
+
+class ThreadLocalFileContext(FileLockContext, local):
+ """A thread local version of the ``FileLockContext`` class."""
+
+
+class FileLockMeta(ABCMeta):
+ def __call__( # noqa: PLR0913
+ cls,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = 0o644,
+ thread_local: bool = True, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ **kwargs: Any, # capture remaining kwargs for subclasses # noqa: ANN401
+ ) -> BaseFileLock:
+ if is_singleton:
+ instance = cls._instances.get(str(lock_file)) # type: ignore[attr-defined]
+ if instance:
+ params_to_check = {
+ "thread_local": (thread_local, instance.is_thread_local()),
+ "timeout": (timeout, instance.timeout),
+ "mode": (mode, instance.mode),
+ "blocking": (blocking, instance.blocking),
+ }
+
+ non_matching_params = {
+ name: (passed_param, set_param)
+ for name, (passed_param, set_param) in params_to_check.items()
+ if passed_param != set_param
+ }
+ if not non_matching_params:
+ return cast("BaseFileLock", instance)
+
+ # parameters do not match; raise error
+ msg = "Singleton lock instances cannot be initialized with differing arguments"
+ msg += "\nNon-matching arguments: "
+ for param_name, (passed_param, set_param) in non_matching_params.items():
+ msg += f"\n\t{param_name} (existing lock has {set_param} but {passed_param} was passed)"
+ raise ValueError(msg)
+
+ # Workaround to make `__init__`'s params optional in subclasses
+ # E.g. virtualenv changes the signature of the `__init__` method in the `BaseFileLock` class descendant
+ # (https://github.com/tox-dev/filelock/pull/340)
+
+ all_params = {
+ "timeout": timeout,
+ "mode": mode,
+ "thread_local": thread_local,
+ "blocking": blocking,
+ "is_singleton": is_singleton,
+ **kwargs,
+ }
+
+ present_params = inspect.signature(cls.__init__).parameters # type: ignore[misc]
+ init_params = {key: value for key, value in all_params.items() if key in present_params}
+
+ instance = super().__call__(lock_file, **init_params)
+
+ if is_singleton:
+ cls._instances[str(lock_file)] = instance # type: ignore[attr-defined]
+
+ return cast("BaseFileLock", instance)
+
+
+class BaseFileLock(contextlib.ContextDecorator, metaclass=FileLockMeta):
+ """Abstract base class for a file lock object."""
+
+ _instances: WeakValueDictionary[str, BaseFileLock]
+
+ def __init_subclass__(cls, **kwargs: dict[str, Any]) -> None:
+ """Setup unique state for lock subclasses."""
+ super().__init_subclass__(**kwargs)
+ cls._instances = WeakValueDictionary()
+
+ def __init__( # noqa: PLR0913
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = 0o644,
+ thread_local: bool = True, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ ) -> None:
+ """
+ Create a new lock object.
+
+ :param lock_file: path to the file
+ :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
+ the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
+ to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
+ :param mode: file permissions for the lockfile
+ :param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
+ ``False`` then the lock will be reentrant across threads.
+ :param blocking: whether the lock should be blocking or not
+ :param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
+ per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
+ to pass the same object around.
+
+ """
+ self._is_thread_local = thread_local
+ self._is_singleton = is_singleton
+
+ # Create the context. Note that external code should not work with the context directly and should instead use
+ # properties of this class.
+ kwargs: dict[str, Any] = {
+ "lock_file": os.fspath(lock_file),
+ "timeout": timeout,
+ "mode": mode,
+ "blocking": blocking,
+ }
+ self._context: FileLockContext = (ThreadLocalFileContext if thread_local else FileLockContext)(**kwargs)
+
+ def is_thread_local(self) -> bool:
+ """:return: a flag indicating if this lock is thread local or not"""
+ return self._is_thread_local
+
+ @property
+ def is_singleton(self) -> bool:
+ """:return: a flag indicating if this lock is singleton or not"""
+ return self._is_singleton
+
+ @property
+ def lock_file(self) -> str:
+ """:return: path to the lock file"""
+ return self._context.lock_file
+
+ @property
+ def timeout(self) -> float:
+ """
+ :return: the default timeout value, in seconds
+
+ .. versionadded:: 2.0.0
+ """
+ return self._context.timeout
+
+ @timeout.setter
+ def timeout(self, value: float | str) -> None:
+ """
+ Change the default timeout value.
+
+ :param value: the new value, in seconds
+
+ """
+ self._context.timeout = float(value)
+
+ @property
+ def blocking(self) -> bool:
+ """:return: whether the locking is blocking or not"""
+ return self._context.blocking
+
+ @blocking.setter
+ def blocking(self, value: bool) -> None:
+ """
+ Change the default blocking value.
+
+ :param value: the new value as bool
+
+ """
+ self._context.blocking = value
+
+ @property
+ def mode(self) -> int:
+ """:return: the file permissions for the lockfile"""
+ return self._context.mode
+
+ @abstractmethod
+ def _acquire(self) -> None:
+ """If the file lock could be acquired, self._context.lock_file_fd holds the file descriptor of the lock file."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def _release(self) -> None:
+ """Releases the lock and sets self._context.lock_file_fd to None."""
+ raise NotImplementedError
+
+ @property
+ def is_locked(self) -> bool:
+ """
+
+ :return: A boolean indicating if the lock file is holding the lock currently.
+
+ .. versionchanged:: 2.0.0
+
+ This was previously a method and is now a property.
+ """
+ return self._context.lock_file_fd is not None
+
+ @property
+ def lock_counter(self) -> int:
+ """:return: The number of times this lock has been acquired (but not yet released)."""
+ return self._context.lock_counter
+
+ def acquire(
+ self,
+ timeout: float | None = None,
+ poll_interval: float = 0.05,
+ *,
+ poll_intervall: float | None = None,
+ blocking: bool | None = None,
+ ) -> AcquireReturnProxy:
+ """
+ Try to acquire the file lock.
+
+ :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default :attr:`~timeout` is and
+ if ``timeout < 0``, there is no timeout and this method will block until the lock could be acquired
+ :param poll_interval: interval of trying to acquire the lock file
+ :param poll_intervall: deprecated, kept for backwards compatibility, use ``poll_interval`` instead
+ :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
+ first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
+ :raises Timeout: if fails to acquire lock within the timeout period
+ :return: a context object that will unlock the file when the context is exited
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ .. versionchanged:: 2.0.0
+
+ This method returns now a *proxy* object instead of *self*,
+ so that it can be used in a with statement without side effects.
+
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self._context.timeout
+
+ if blocking is None:
+ blocking = self._context.blocking
+
+ if poll_intervall is not None:
+ msg = "use poll_interval instead of poll_intervall"
+ warnings.warn(msg, DeprecationWarning, stacklevel=2)
+ poll_interval = poll_intervall
+
+ # Increment the number right at the beginning. We can still undo it, if something fails.
+ self._context.lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self.lock_file
+ start_time = time.perf_counter()
+ try:
+ while True:
+ if not self.is_locked:
+ _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
+ self._acquire()
+ if self.is_locked:
+ _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
+ break
+ if blocking is False:
+ _LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
+ raise Timeout(lock_filename) # noqa: TRY301
+ if 0 <= timeout < time.perf_counter() - start_time:
+ _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
+ raise Timeout(lock_filename) # noqa: TRY301
+ msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+ _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+ time.sleep(poll_interval)
+ except BaseException: # Something did go wrong, so decrement the counter.
+ self._context.lock_counter = max(0, self._context.lock_counter - 1)
+ raise
+ return AcquireReturnProxy(lock=self)
+
+ def release(self, force: bool = False) -> None: # noqa: FBT001, FBT002
+ """
+ Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
+ Also note, that the lock file itself is not automatically deleted.
+
+ :param force: If true, the lock counter is ignored and the lock is released in every case/
+
+ """
+ if self.is_locked:
+ self._context.lock_counter -= 1
+
+ if self._context.lock_counter == 0 or force:
+ lock_id, lock_filename = id(self), self.lock_file
+
+ _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
+ self._release()
+ self._context.lock_counter = 0
+ _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+ def __enter__(self) -> Self:
+ """
+ Acquire the lock.
+
+ :return: the lock object
+
+ """
+ self.acquire()
+ return self
+
+ def __exit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ """
+ Release the lock.
+
+ :param exc_type: the exception type if raised
+ :param exc_value: the exception value if raised
+ :param traceback: the exception traceback if raised
+
+ """
+ self.release()
+
+ def __del__(self) -> None:
+ """Called when the lock object is deleted."""
+ self.release(force=True)
+
+
+__all__ = [
+ "AcquireReturnProxy",
+ "BaseFileLock",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/_error.py b/.venv/lib/python3.11/site-packages/filelock/_error.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7ff08c0f508ad7077eb6ed1990898840c952b3a
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/_error.py
@@ -0,0 +1,30 @@
+from __future__ import annotations
+
+from typing import Any
+
+
+class Timeout(TimeoutError): # noqa: N818
+ """Raised when the lock could not be acquired in *timeout* seconds."""
+
+ def __init__(self, lock_file: str) -> None:
+ super().__init__()
+ self._lock_file = lock_file
+
+ def __reduce__(self) -> str | tuple[Any, ...]:
+ return self.__class__, (self._lock_file,) # Properly pickle the exception
+
+ def __str__(self) -> str:
+ return f"The file lock '{self._lock_file}' could not be acquired."
+
+ def __repr__(self) -> str:
+ return f"{self.__class__.__name__}({self.lock_file!r})"
+
+ @property
+ def lock_file(self) -> str:
+ """:return: The path of the file lock."""
+ return self._lock_file
+
+
+__all__ = [
+ "Timeout",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/_soft.py b/.venv/lib/python3.11/site-packages/filelock/_soft.py
new file mode 100644
index 0000000000000000000000000000000000000000..28c67f74cc82b8f55e47afd6a71972cc1fb95eb6
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/_soft.py
@@ -0,0 +1,47 @@
+from __future__ import annotations
+
+import os
+import sys
+from contextlib import suppress
+from errno import EACCES, EEXIST
+from pathlib import Path
+
+from ._api import BaseFileLock
+from ._util import ensure_directory_exists, raise_on_not_writable_file
+
+
+class SoftFileLock(BaseFileLock):
+ """Simply watches the existence of the lock file."""
+
+ def _acquire(self) -> None:
+ raise_on_not_writable_file(self.lock_file)
+ ensure_directory_exists(self.lock_file)
+ # first check for exists and read-only mode as the open will mask this case as EEXIST
+ flags = (
+ os.O_WRONLY # open for writing only
+ | os.O_CREAT
+ | os.O_EXCL # together with above raise EEXIST if the file specified by filename exists
+ | os.O_TRUNC # truncate the file to zero byte
+ )
+ try:
+ file_handler = os.open(self.lock_file, flags, self._context.mode)
+ except OSError as exception: # re-raise unless expected exception
+ if not (
+ exception.errno == EEXIST # lock already exist
+ or (exception.errno == EACCES and sys.platform == "win32") # has no access to this lock
+ ): # pragma: win32 no cover
+ raise
+ else:
+ self._context.lock_file_fd = file_handler
+
+ def _release(self) -> None:
+ assert self._context.lock_file_fd is not None # noqa: S101
+ os.close(self._context.lock_file_fd) # the lock file is definitely not None
+ self._context.lock_file_fd = None
+ with suppress(OSError): # the file is already deleted and that's what we want
+ Path(self.lock_file).unlink()
+
+
+__all__ = [
+ "SoftFileLock",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/_unix.py b/.venv/lib/python3.11/site-packages/filelock/_unix.py
new file mode 100644
index 0000000000000000000000000000000000000000..04f590daa99c7c689aa9fc5d7dc064d1ee027485
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/_unix.py
@@ -0,0 +1,68 @@
+from __future__ import annotations
+
+import os
+import sys
+from contextlib import suppress
+from errno import ENOSYS
+from pathlib import Path
+from typing import cast
+
+from ._api import BaseFileLock
+from ._util import ensure_directory_exists
+
+#: a flag to indicate if the fcntl API is available
+has_fcntl = False
+if sys.platform == "win32": # pragma: win32 cover
+
+ class UnixFileLock(BaseFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+ def _acquire(self) -> None:
+ raise NotImplementedError
+
+ def _release(self) -> None:
+ raise NotImplementedError
+
+else: # pragma: win32 no cover
+ try:
+ import fcntl
+ except ImportError:
+ pass
+ else:
+ has_fcntl = True
+
+ class UnixFileLock(BaseFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+ def _acquire(self) -> None:
+ ensure_directory_exists(self.lock_file)
+ open_flags = os.O_RDWR | os.O_TRUNC
+ if not Path(self.lock_file).exists():
+ open_flags |= os.O_CREAT
+ fd = os.open(self.lock_file, open_flags, self._context.mode)
+ with suppress(PermissionError): # This locked is not owned by this UID
+ os.fchmod(fd, self._context.mode)
+ try:
+ fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
+ except OSError as exception:
+ os.close(fd)
+ if exception.errno == ENOSYS: # NotImplemented error
+ msg = "FileSystem does not appear to support flock; use SoftFileLock instead"
+ raise NotImplementedError(msg) from exception
+ else:
+ self._context.lock_file_fd = fd
+
+ def _release(self) -> None:
+ # Do not remove the lockfile:
+ # https://github.com/tox-dev/py-filelock/issues/31
+ # https://stackoverflow.com/questions/17708885/flock-removing-locked-file-without-race-condition
+ fd = cast("int", self._context.lock_file_fd)
+ self._context.lock_file_fd = None
+ fcntl.flock(fd, fcntl.LOCK_UN)
+ os.close(fd)
+
+
+__all__ = [
+ "UnixFileLock",
+ "has_fcntl",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/_util.py b/.venv/lib/python3.11/site-packages/filelock/_util.py
new file mode 100644
index 0000000000000000000000000000000000000000..c671e8533873948f0e1b5575ff952c722019f067
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/_util.py
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+import os
+import stat
+import sys
+from errno import EACCES, EISDIR
+from pathlib import Path
+
+
+def raise_on_not_writable_file(filename: str) -> None:
+ """
+ Raise an exception if attempting to open the file for writing would fail.
+
+ This is done so files that will never be writable can be separated from files that are writable but currently
+ locked.
+
+ :param filename: file to check
+ :raises OSError: as if the file was opened for writing.
+
+ """
+ try: # use stat to do exists + can write to check without race condition
+ file_stat = os.stat(filename) # noqa: PTH116
+ except OSError:
+ return # swallow does not exist or other errors
+
+ if file_stat.st_mtime != 0: # if os.stat returns but modification is zero that's an invalid os.stat - ignore it
+ if not (file_stat.st_mode & stat.S_IWUSR):
+ raise PermissionError(EACCES, "Permission denied", filename)
+
+ if stat.S_ISDIR(file_stat.st_mode):
+ if sys.platform == "win32": # pragma: win32 cover
+ # On Windows, this is PermissionError
+ raise PermissionError(EACCES, "Permission denied", filename)
+ else: # pragma: win32 no cover # noqa: RET506
+ # On linux / macOS, this is IsADirectoryError
+ raise IsADirectoryError(EISDIR, "Is a directory", filename)
+
+
+def ensure_directory_exists(filename: Path | str) -> None:
+ """
+ Ensure the directory containing the file exists (create it if necessary).
+
+ :param filename: file.
+
+ """
+ Path(filename).parent.mkdir(parents=True, exist_ok=True)
+
+
+__all__ = [
+ "ensure_directory_exists",
+ "raise_on_not_writable_file",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/_windows.py b/.venv/lib/python3.11/site-packages/filelock/_windows.py
new file mode 100644
index 0000000000000000000000000000000000000000..348251d1067c28c55a6a267f8d11337abfae837f
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/_windows.py
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+import os
+import sys
+from contextlib import suppress
+from errno import EACCES
+from pathlib import Path
+from typing import cast
+
+from ._api import BaseFileLock
+from ._util import ensure_directory_exists, raise_on_not_writable_file
+
+if sys.platform == "win32": # pragma: win32 cover
+ import msvcrt
+
+ class WindowsFileLock(BaseFileLock):
+ """Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
+
+ def _acquire(self) -> None:
+ raise_on_not_writable_file(self.lock_file)
+ ensure_directory_exists(self.lock_file)
+ flags = (
+ os.O_RDWR # open for read and write
+ | os.O_CREAT # create file if not exists
+ | os.O_TRUNC # truncate file if not empty
+ )
+ try:
+ fd = os.open(self.lock_file, flags, self._context.mode)
+ except OSError as exception:
+ if exception.errno != EACCES: # has no access to this lock
+ raise
+ else:
+ try:
+ msvcrt.locking(fd, msvcrt.LK_NBLCK, 1)
+ except OSError as exception:
+ os.close(fd) # close file first
+ if exception.errno != EACCES: # file is already locked
+ raise
+ else:
+ self._context.lock_file_fd = fd
+
+ def _release(self) -> None:
+ fd = cast("int", self._context.lock_file_fd)
+ self._context.lock_file_fd = None
+ msvcrt.locking(fd, msvcrt.LK_UNLCK, 1)
+ os.close(fd)
+
+ with suppress(OSError): # Probably another instance of the application hat acquired the file lock.
+ Path(self.lock_file).unlink()
+
+else: # pragma: win32 no cover
+
+ class WindowsFileLock(BaseFileLock):
+ """Uses the :func:`msvcrt.locking` function to hard lock the lock file on Windows systems."""
+
+ def _acquire(self) -> None:
+ raise NotImplementedError
+
+ def _release(self) -> None:
+ raise NotImplementedError
+
+
+__all__ = [
+ "WindowsFileLock",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/asyncio.py b/.venv/lib/python3.11/site-packages/filelock/asyncio.py
new file mode 100644
index 0000000000000000000000000000000000000000..252de203078677c86345c6ad99230317197b9d28
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/asyncio.py
@@ -0,0 +1,342 @@
+"""An asyncio-based implementation of the file lock.""" # noqa: A005
+
+from __future__ import annotations
+
+import asyncio
+import contextlib
+import logging
+import os
+import time
+from dataclasses import dataclass
+from threading import local
+from typing import TYPE_CHECKING, Any, Callable, NoReturn, cast
+
+from ._api import BaseFileLock, FileLockContext, FileLockMeta
+from ._error import Timeout
+from ._soft import SoftFileLock
+from ._unix import UnixFileLock
+from ._windows import WindowsFileLock
+
+if TYPE_CHECKING:
+ import sys
+ from concurrent import futures
+ from types import TracebackType
+
+ if sys.version_info >= (3, 11): # pragma: no cover (py311+)
+ from typing import Self
+ else: # pragma: no cover ( None: # noqa: D107
+ self.lock = lock
+
+ async def __aenter__(self) -> BaseAsyncFileLock: # noqa: D105
+ return self.lock
+
+ async def __aexit__( # noqa: D105
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ await self.lock.release()
+
+
+class AsyncFileLockMeta(FileLockMeta):
+ def __call__( # type: ignore[override] # noqa: PLR0913
+ cls, # noqa: N805
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = 0o644,
+ thread_local: bool = False, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ loop: asyncio.AbstractEventLoop | None = None,
+ run_in_executor: bool = True,
+ executor: futures.Executor | None = None,
+ ) -> BaseAsyncFileLock:
+ if thread_local and run_in_executor:
+ msg = "run_in_executor is not supported when thread_local is True"
+ raise ValueError(msg)
+ instance = super().__call__(
+ lock_file=lock_file,
+ timeout=timeout,
+ mode=mode,
+ thread_local=thread_local,
+ blocking=blocking,
+ is_singleton=is_singleton,
+ loop=loop,
+ run_in_executor=run_in_executor,
+ executor=executor,
+ )
+ return cast("BaseAsyncFileLock", instance)
+
+
+class BaseAsyncFileLock(BaseFileLock, metaclass=AsyncFileLockMeta):
+ """Base class for asynchronous file locks."""
+
+ def __init__( # noqa: PLR0913
+ self,
+ lock_file: str | os.PathLike[str],
+ timeout: float = -1,
+ mode: int = 0o644,
+ thread_local: bool = False, # noqa: FBT001, FBT002
+ *,
+ blocking: bool = True,
+ is_singleton: bool = False,
+ loop: asyncio.AbstractEventLoop | None = None,
+ run_in_executor: bool = True,
+ executor: futures.Executor | None = None,
+ ) -> None:
+ """
+ Create a new lock object.
+
+ :param lock_file: path to the file
+ :param timeout: default timeout when acquiring the lock, in seconds. It will be used as fallback value in \
+ the acquire method, if no timeout value (``None``) is given. If you want to disable the timeout, set it \
+ to a negative value. A timeout of 0 means that there is exactly one attempt to acquire the file lock.
+ :param mode: file permissions for the lockfile
+ :param thread_local: Whether this object's internal context should be thread local or not. If this is set to \
+ ``False`` then the lock will be reentrant across threads.
+ :param blocking: whether the lock should be blocking or not
+ :param is_singleton: If this is set to ``True`` then only one instance of this class will be created \
+ per lock file. This is useful if you want to use the lock object for reentrant locking without needing \
+ to pass the same object around.
+ :param loop: The event loop to use. If not specified, the running event loop will be used.
+ :param run_in_executor: If this is set to ``True`` then the lock will be acquired in an executor.
+ :param executor: The executor to use. If not specified, the default executor will be used.
+
+ """
+ self._is_thread_local = thread_local
+ self._is_singleton = is_singleton
+
+ # Create the context. Note that external code should not work with the context directly and should instead use
+ # properties of this class.
+ kwargs: dict[str, Any] = {
+ "lock_file": os.fspath(lock_file),
+ "timeout": timeout,
+ "mode": mode,
+ "blocking": blocking,
+ "loop": loop,
+ "run_in_executor": run_in_executor,
+ "executor": executor,
+ }
+ self._context: AsyncFileLockContext = (AsyncThreadLocalFileContext if thread_local else AsyncFileLockContext)(
+ **kwargs
+ )
+
+ @property
+ def run_in_executor(self) -> bool:
+ """::return: whether run in executor."""
+ return self._context.run_in_executor
+
+ @property
+ def executor(self) -> futures.Executor | None:
+ """::return: the executor."""
+ return self._context.executor
+
+ @executor.setter
+ def executor(self, value: futures.Executor | None) -> None: # pragma: no cover
+ """
+ Change the executor.
+
+ :param value: the new executor or ``None``
+ :type value: futures.Executor | None
+
+ """
+ self._context.executor = value
+
+ @property
+ def loop(self) -> asyncio.AbstractEventLoop | None:
+ """::return: the event loop."""
+ return self._context.loop
+
+ async def acquire( # type: ignore[override]
+ self,
+ timeout: float | None = None,
+ poll_interval: float = 0.05,
+ *,
+ blocking: bool | None = None,
+ ) -> AsyncAcquireReturnProxy:
+ """
+ Try to acquire the file lock.
+
+ :param timeout: maximum wait time for acquiring the lock, ``None`` means use the default
+ :attr:`~BaseFileLock.timeout` is and if ``timeout < 0``, there is no timeout and
+ this method will block until the lock could be acquired
+ :param poll_interval: interval of trying to acquire the lock file
+ :param blocking: defaults to True. If False, function will return immediately if it cannot obtain a lock on the
+ first attempt. Otherwise, this method will block until the timeout expires or the lock is acquired.
+ :raises Timeout: if fails to acquire lock within the timeout period
+ :return: a context object that will unlock the file when the context is exited
+
+ .. code-block:: python
+
+ # You can use this method in the context manager (recommended)
+ with lock.acquire():
+ pass
+
+ # Or use an equivalent try-finally construct:
+ lock.acquire()
+ try:
+ pass
+ finally:
+ lock.release()
+
+ """
+ # Use the default timeout, if no timeout is provided.
+ if timeout is None:
+ timeout = self._context.timeout
+
+ if blocking is None:
+ blocking = self._context.blocking
+
+ # Increment the number right at the beginning. We can still undo it, if something fails.
+ self._context.lock_counter += 1
+
+ lock_id = id(self)
+ lock_filename = self.lock_file
+ start_time = time.perf_counter()
+ try:
+ while True:
+ if not self.is_locked:
+ _LOGGER.debug("Attempting to acquire lock %s on %s", lock_id, lock_filename)
+ await self._run_internal_method(self._acquire)
+ if self.is_locked:
+ _LOGGER.debug("Lock %s acquired on %s", lock_id, lock_filename)
+ break
+ if blocking is False:
+ _LOGGER.debug("Failed to immediately acquire lock %s on %s", lock_id, lock_filename)
+ raise Timeout(lock_filename) # noqa: TRY301
+ if 0 <= timeout < time.perf_counter() - start_time:
+ _LOGGER.debug("Timeout on acquiring lock %s on %s", lock_id, lock_filename)
+ raise Timeout(lock_filename) # noqa: TRY301
+ msg = "Lock %s not acquired on %s, waiting %s seconds ..."
+ _LOGGER.debug(msg, lock_id, lock_filename, poll_interval)
+ await asyncio.sleep(poll_interval)
+ except BaseException: # Something did go wrong, so decrement the counter.
+ self._context.lock_counter = max(0, self._context.lock_counter - 1)
+ raise
+ return AsyncAcquireReturnProxy(lock=self)
+
+ async def release(self, force: bool = False) -> None: # type: ignore[override] # noqa: FBT001, FBT002
+ """
+ Releases the file lock. Please note, that the lock is only completely released, if the lock counter is 0.
+ Also note, that the lock file itself is not automatically deleted.
+
+ :param force: If true, the lock counter is ignored and the lock is released in every case/
+
+ """
+ if self.is_locked:
+ self._context.lock_counter -= 1
+
+ if self._context.lock_counter == 0 or force:
+ lock_id, lock_filename = id(self), self.lock_file
+
+ _LOGGER.debug("Attempting to release lock %s on %s", lock_id, lock_filename)
+ await self._run_internal_method(self._release)
+ self._context.lock_counter = 0
+ _LOGGER.debug("Lock %s released on %s", lock_id, lock_filename)
+
+ async def _run_internal_method(self, method: Callable[[], Any]) -> None:
+ if asyncio.iscoroutinefunction(method):
+ await method()
+ elif self.run_in_executor:
+ loop = self.loop or asyncio.get_running_loop()
+ await loop.run_in_executor(self.executor, method)
+ else:
+ method()
+
+ def __enter__(self) -> NoReturn:
+ """
+ Replace old __enter__ method to avoid using it.
+
+ NOTE: DO NOT USE `with` FOR ASYNCIO LOCKS, USE `async with` INSTEAD.
+
+ :return: none
+ :rtype: NoReturn
+ """
+ msg = "Do not use `with` for asyncio locks, use `async with` instead."
+ raise NotImplementedError(msg)
+
+ async def __aenter__(self) -> Self:
+ """
+ Acquire the lock.
+
+ :return: the lock object
+
+ """
+ await self.acquire()
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: type[BaseException] | None,
+ exc_value: BaseException | None,
+ traceback: TracebackType | None,
+ ) -> None:
+ """
+ Release the lock.
+
+ :param exc_type: the exception type if raised
+ :param exc_value: the exception value if raised
+ :param traceback: the exception traceback if raised
+
+ """
+ await self.release()
+
+ def __del__(self) -> None:
+ """Called when the lock object is deleted."""
+ with contextlib.suppress(RuntimeError):
+ loop = self.loop or asyncio.get_running_loop()
+ if not loop.is_running(): # pragma: no cover
+ loop.run_until_complete(self.release(force=True))
+ else:
+ loop.create_task(self.release(force=True))
+
+
+class AsyncSoftFileLock(SoftFileLock, BaseAsyncFileLock):
+ """Simply watches the existence of the lock file."""
+
+
+class AsyncUnixFileLock(UnixFileLock, BaseAsyncFileLock):
+ """Uses the :func:`fcntl.flock` to hard lock the lock file on unix systems."""
+
+
+class AsyncWindowsFileLock(WindowsFileLock, BaseAsyncFileLock):
+ """Uses the :func:`msvcrt.locking` to hard lock the lock file on windows systems."""
+
+
+__all__ = [
+ "AsyncAcquireReturnProxy",
+ "AsyncSoftFileLock",
+ "AsyncUnixFileLock",
+ "AsyncWindowsFileLock",
+ "BaseAsyncFileLock",
+]
diff --git a/.venv/lib/python3.11/site-packages/filelock/py.typed b/.venv/lib/python3.11/site-packages/filelock/py.typed
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/.venv/lib/python3.11/site-packages/filelock/version.py b/.venv/lib/python3.11/site-packages/filelock/version.py
new file mode 100644
index 0000000000000000000000000000000000000000..4ef14cbdc1fcdd89aa0d799785c9d1285c9b0c23
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/filelock/version.py
@@ -0,0 +1,16 @@
+# file generated by setuptools_scm
+# don't change, don't track in version control
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Tuple, Union
+ VERSION_TUPLE = Tuple[Union[int, str], ...]
+else:
+ VERSION_TUPLE = object
+
+version: str
+__version__: str
+__version_tuple__: VERSION_TUPLE
+version_tuple: VERSION_TUPLE
+
+__version__ = version = '3.17.0'
+__version_tuple__ = version_tuple = (3, 17, 0)
diff --git a/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/INSTALLER b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/METADATA b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..a0741754f40b3b71cb52ef731a7b604ee697b130
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/METADATA
@@ -0,0 +1,145 @@
+Metadata-Version: 2.4
+Name: jiter
+Version: 0.8.2
+Classifier: Development Status :: 4 - Beta
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Information Technology
+Classifier: Intended Audience :: System Administrators
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: Unix
+Classifier: Operating System :: POSIX :: Linux
+Classifier: Environment :: Console
+Classifier: Environment :: MacOS X
+Classifier: Topic :: File Formats :: JSON
+Classifier: Framework :: Pydantic :: 2
+Summary: Fast iterable JSON parser.
+Keywords: JSON,parsing,deserialization,iter
+Home-Page: https://github.com/pydantic/jiter/
+Author: Samuel Colvin
+Author-email: Samuel Colvin
+License: MIT
+Requires-Python: >=3.8
+Description-Content-Type: text/markdown; charset=UTF-8; variant=GFM
+Project-URL: Source Code, https://github.com/pydantic/jiter/
+
+# jiter
+
+[](https://github.com/pydantic/jiter/actions?query=event%3Apush+branch%3Amain+workflow%3ACI)
+[](https://pypi.python.org/pypi/jiter)
+[](https://github.com/pydantic/jiter)
+[](https://github.com/pydantic/jiter/blob/main/LICENSE)
+
+This is a standalone version of the JSON parser used in `pydantic-core`. The recommendation is to only use this package directly if you do not use `pydantic`.
+
+The API is extremely minimal:
+
+```python
+def from_json(
+ json_data: bytes,
+ /,
+ *,
+ allow_inf_nan: bool = True,
+ cache_mode: Literal[True, False, "all", "keys", "none"] = "all",
+ partial_mode: Literal[True, False, "off", "on", "trailing-strings"] = False,
+ catch_duplicate_keys: bool = False,
+ float_mode: Literal["float", "decimal", "lossless-float"] = False,
+) -> Any:
+ """
+ Parse input bytes into a JSON object.
+
+ Arguments:
+ json_data: The JSON data to parse
+ allow_inf_nan: Whether to allow infinity (`Infinity` an `-Infinity`) and `NaN` values to float fields.
+ Defaults to True.
+ cache_mode: cache Python strings to improve performance at the cost of some memory usage
+ - True / 'all' - cache all strings
+ - 'keys' - cache only object keys
+ - False / 'none' - cache nothing
+ partial_mode: How to handle incomplete strings:
+ - False / 'off' - raise an exception if the input is incomplete
+ - True / 'on' - allow incomplete JSON but discard the last string if it is incomplete
+ - 'trailing-strings' - allow incomplete JSON, and include the last incomplete string in the output
+ catch_duplicate_keys: if True, raise an exception if objects contain the same key multiple times
+ float_mode: How to return floats: as a `float`, `Decimal` or `LosslessFloat`
+
+ Returns:
+ Python object built from the JSON input.
+ """
+
+def cache_clear() -> None:
+ """
+ Reset the string cache.
+ """
+
+def cache_usage() -> int:
+ """
+ get the size of the string cache.
+
+ Returns:
+ Size of the string cache in bytes.
+ """
+```
+## Examples
+
+The main function provided by Jiter is `from_json()`, which accepts a bytes object containing JSON and returns a Python dictionary, list or other value.
+
+```python
+import jiter
+
+json_data = b'{"name": "John", "age": 30}'
+parsed_data = jiter.from_json(json_data)
+print(parsed_data) # Output: {'name': 'John', 'age': 30}
+```
+
+### Handling Partial JSON
+
+Incomplete JSON objects can be parsed using the `partial_mode=` parameter.
+
+```python
+import jiter
+
+partial_json = b'{"name": "John", "age": 30, "city": "New Yor'
+
+# Raise error on incomplete JSON
+try:
+ jiter.from_json(partial_json, partial_mode=False)
+except ValueError as e:
+ print(f"Error: {e}")
+
+# Parse incomplete JSON, discarding incomplete last field
+result = jiter.from_json(partial_json, partial_mode=True)
+print(result) # Output: {'name': 'John', 'age': 30}
+
+# Parse incomplete JSON, including incomplete last field
+result = jiter.from_json(partial_json, partial_mode='trailing-strings')
+print(result) # Output: {'name': 'John', 'age': 30, 'city': 'New Yor'}
+```
+
+### Catching Duplicate Keys
+
+The `catch_duplicate_keys=True` option can be used to raise a `ValueError` if an object contains duplicate keys.
+
+```python
+import jiter
+
+json_with_dupes = b'{"foo": 1, "foo": 2}'
+
+# Default behavior (last value wins)
+result = jiter.from_json(json_with_dupes)
+print(result) # Output: {'foo': 2}
+
+# Catch duplicate keys
+try:
+ jiter.from_json(json_with_dupes, catch_duplicate_keys=True)
+except ValueError as e:
+ print(f"Error: {e}")
+```
+
diff --git a/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/RECORD b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..ef5d8fcd036c3b2d6a886fa5795d4986fce1c0bf
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/RECORD
@@ -0,0 +1,9 @@
+jiter-0.8.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+jiter-0.8.2.dist-info/METADATA,sha256=VdOT_6YbBf3ENlJDsdCNVUJ61N14uIaTHyVcHScgGb8,5177
+jiter-0.8.2.dist-info/RECORD,,
+jiter-0.8.2.dist-info/WHEEL,sha256=qfXqQP1Fc7f0pAAAyf6-qTIv7nE_-wkHw_y9EwRAQFw,129
+jiter/__init__.py,sha256=Fp9HkOixiYYDSiC_80vmiJ_sCoCGT8OAh48yltm0lP0,103
+jiter/__init__.pyi,sha256=AEs-Zbzf7c2r5vUTpTjxkLBuN7KnfFTURrWrZJAZnQY,2363
+jiter/__pycache__/__init__.cpython-311.pyc,,
+jiter/jiter.cpython-311-x86_64-linux-gnu.so,sha256=InrLB6omAldF_KvC5g9Dbq1pCwv5BQg15z84hzzhh5Q,812104
+jiter/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
diff --git a/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/WHEEL b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..5c6e5591d06124e9b15eb8759850c5d06124a401
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/jiter-0.8.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: maturin (1.7.7)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64
diff --git a/.venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc b/.venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..d19ee91a6b025c17ea9e309c991ebb18b0da2f02
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/mpmath/__pycache__/function_docs.cpython-311.pyc
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b5e6d7050e58b5bab524e6e6be4ec4c1ff728845920bddd8e8acd823b43b5980
+size 284937
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/INSTALLER b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..6f62d44e4ef733c0e713afcd2371fed7f2b3de67
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE
@@ -0,0 +1,3 @@
+This software is made available under the terms of *either* of the licenses
+found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made
+under the terms of *both* these licenses.
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.APACHE b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.APACHE
new file mode 100644
index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.APACHE
@@ -0,0 +1,177 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.BSD b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.BSD
new file mode 100644
index 0000000000000000000000000000000000000000..42ce7b75c92fb01a3f6ed17eea363f756b7da582
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/LICENSE.BSD
@@ -0,0 +1,23 @@
+Copyright (c) Donald Stufft and individual contributors.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+ 1. Redistributions of source code must retain the above copyright notice,
+ this list of conditions and the following disclaimer.
+
+ 2. Redistributions in binary form must reproduce the above copyright
+ notice, this list of conditions and the following disclaimer in the
+ documentation and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/METADATA b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..1479c8694bfbd583a896dbe9bd33cdb6d7e7371e
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/METADATA
@@ -0,0 +1,102 @@
+Metadata-Version: 2.3
+Name: packaging
+Version: 24.2
+Summary: Core utilities for Python packages
+Author-email: Donald Stufft
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Typing :: Typed
+Project-URL: Documentation, https://packaging.pypa.io/
+Project-URL: Source, https://github.com/pypa/packaging
+
+packaging
+=========
+
+.. start-intro
+
+Reusable core utilities for various Python Packaging
+`interoperability specifications `_.
+
+This library provides utilities that implement the interoperability
+specifications which have clearly one correct behaviour (eg: :pep:`440`)
+or benefit greatly from having a single shared implementation (eg: :pep:`425`).
+
+.. end-intro
+
+The ``packaging`` project includes the following: version handling, specifiers,
+markers, requirements, tags, utilities.
+
+Documentation
+-------------
+
+The `documentation`_ provides information and the API for the following:
+
+- Version Handling
+- Specifiers
+- Markers
+- Requirements
+- Tags
+- Utilities
+
+Installation
+------------
+
+Use ``pip`` to install these utilities::
+
+ pip install packaging
+
+The ``packaging`` library uses calendar-based versioning (``YY.N``).
+
+Discussion
+----------
+
+If you run into bugs, you can file them in our `issue tracker`_.
+
+You can also join ``#pypa`` on Freenode to ask questions or get involved.
+
+
+.. _`documentation`: https://packaging.pypa.io/
+.. _`issue tracker`: https://github.com/pypa/packaging/issues
+
+
+Code of Conduct
+---------------
+
+Everyone interacting in the packaging project's codebases, issue trackers, chat
+rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_.
+
+.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md
+
+Contributing
+------------
+
+The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as
+well as how to report a potential security issue. The documentation for this
+project also covers information about `project development`_ and `security`_.
+
+.. _`project development`: https://packaging.pypa.io/en/latest/development/
+.. _`security`: https://packaging.pypa.io/en/latest/security/
+
+Project History
+---------------
+
+Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for
+recent changes and project history.
+
+.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/
+
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/RECORD b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..0fc71b5458a740453b5e8e0e18fd3be595d55791
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/RECORD
@@ -0,0 +1,40 @@
+packaging-24.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197
+packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174
+packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344
+packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204
+packaging-24.2.dist-info/RECORD,,
+packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82
+packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494
+packaging/__pycache__/__init__.cpython-311.pyc,,
+packaging/__pycache__/_elffile.cpython-311.pyc,,
+packaging/__pycache__/_manylinux.cpython-311.pyc,,
+packaging/__pycache__/_musllinux.cpython-311.pyc,,
+packaging/__pycache__/_parser.cpython-311.pyc,,
+packaging/__pycache__/_structures.cpython-311.pyc,,
+packaging/__pycache__/_tokenizer.cpython-311.pyc,,
+packaging/__pycache__/markers.cpython-311.pyc,,
+packaging/__pycache__/metadata.cpython-311.pyc,,
+packaging/__pycache__/requirements.cpython-311.pyc,,
+packaging/__pycache__/specifiers.cpython-311.pyc,,
+packaging/__pycache__/tags.cpython-311.pyc,,
+packaging/__pycache__/utils.cpython-311.pyc,,
+packaging/__pycache__/version.cpython-311.pyc,,
+packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306
+packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612
+packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694
+packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236
+packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431
+packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273
+packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715
+packaging/licenses/__pycache__/__init__.cpython-311.pyc,,
+packaging/licenses/__pycache__/_spdx.cpython-311.pyc,,
+packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398
+packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561
+packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762
+packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947
+packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074
+packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014
+packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050
+packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676
diff --git a/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/WHEEL b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..e3c6feefa22927866e3fd5575379ea972b432aaf
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/packaging-24.2.dist-info/WHEEL
@@ -0,0 +1,4 @@
+Wheel-Version: 1.0
+Generator: flit 3.10.1
+Root-Is-Purelib: true
+Tag: py3-none-any
diff --git a/.venv/lib/python3.11/site-packages/pybind11/__pycache__/__init__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..0ed1ae7680380b1f6506e1438c3096ef7ec54249
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/__init__.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/pybind11/__pycache__/__main__.cpython-311.pyc b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/__main__.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..4f13956b5cbde42f56690b7a59f18ecbd375699f
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/__main__.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/pybind11/__pycache__/_version.cpython-311.pyc b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/_version.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..8130874c701b9e677105b6a18b27df275f223bdd
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/_version.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/pybind11/__pycache__/commands.cpython-311.pyc b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/commands.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..abf4b22a20f12f7fad958d97ef50638352d902e8
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/commands.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/pybind11/__pycache__/setup_helpers.cpython-311.pyc b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/setup_helpers.cpython-311.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..e962b89083556d110ff3bd3e606b5cd2cfc172d7
Binary files /dev/null and b/.venv/lib/python3.11/site-packages/pybind11/__pycache__/setup_helpers.cpython-311.pyc differ
diff --git a/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/value_and_holder.h b/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/value_and_holder.h
new file mode 100644
index 0000000000000000000000000000000000000000..ca37d70ad2e79d050bf8734bc97a825ec4d9c024
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/include/pybind11/detail/value_and_holder.h
@@ -0,0 +1,77 @@
+// Copyright (c) 2016-2024 The Pybind Development Team.
+// All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#pragma once
+
+#include "common.h"
+
+#include
+#include
+
+PYBIND11_NAMESPACE_BEGIN(PYBIND11_NAMESPACE)
+PYBIND11_NAMESPACE_BEGIN(detail)
+
+struct value_and_holder {
+ instance *inst = nullptr;
+ size_t index = 0u;
+ const detail::type_info *type = nullptr;
+ void **vh = nullptr;
+
+ // Main constructor for a found value/holder:
+ value_and_holder(instance *i, const detail::type_info *type, size_t vpos, size_t index)
+ : inst{i}, index{index}, type{type},
+ vh{inst->simple_layout ? inst->simple_value_holder
+ : &inst->nonsimple.values_and_holders[vpos]} {}
+
+ // Default constructor (used to signal a value-and-holder not found by get_value_and_holder())
+ value_and_holder() = default;
+
+ // Used for past-the-end iterator
+ explicit value_and_holder(size_t index) : index{index} {}
+
+ template
+ V *&value_ptr() const {
+ return reinterpret_cast(vh[0]);
+ }
+ // True if this `value_and_holder` has a non-null value pointer
+ explicit operator bool() const { return value_ptr() != nullptr; }
+
+ template
+ H &holder() const {
+ return reinterpret_cast(vh[1]);
+ }
+ bool holder_constructed() const {
+ return inst->simple_layout
+ ? inst->simple_holder_constructed
+ : (inst->nonsimple.status[index] & instance::status_holder_constructed) != 0u;
+ }
+ // NOLINTNEXTLINE(readability-make-member-function-const)
+ void set_holder_constructed(bool v = true) {
+ if (inst->simple_layout) {
+ inst->simple_holder_constructed = v;
+ } else if (v) {
+ inst->nonsimple.status[index] |= instance::status_holder_constructed;
+ } else {
+ inst->nonsimple.status[index] &= (std::uint8_t) ~instance::status_holder_constructed;
+ }
+ }
+ bool instance_registered() const {
+ return inst->simple_layout
+ ? inst->simple_instance_registered
+ : ((inst->nonsimple.status[index] & instance::status_instance_registered) != 0);
+ }
+ // NOLINTNEXTLINE(readability-make-member-function-const)
+ void set_instance_registered(bool v = true) {
+ if (inst->simple_layout) {
+ inst->simple_instance_registered = v;
+ } else if (v) {
+ inst->nonsimple.status[index] |= instance::status_instance_registered;
+ } else {
+ inst->nonsimple.status[index] &= (std::uint8_t) ~instance::status_instance_registered;
+ }
+ }
+};
+
+PYBIND11_NAMESPACE_END(detail)
+PYBIND11_NAMESPACE_END(PYBIND11_NAMESPACE)
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/FindPythonLibsNew.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/FindPythonLibsNew.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..283b4e2980134b38317c797ec8333fb0ba123eb5
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/FindPythonLibsNew.cmake
@@ -0,0 +1,310 @@
+# - Find python libraries
+# This module finds the libraries corresponding to the Python interpreter
+# FindPythonInterp provides.
+# This code sets the following variables:
+#
+# PYTHONLIBS_FOUND - have the Python libs been found
+# PYTHON_PREFIX - path to the Python installation
+# PYTHON_LIBRARIES - path to the python library
+# PYTHON_INCLUDE_DIRS - path to where Python.h is found
+# PYTHON_MODULE_EXTENSION - lib extension, e.g. '.so' or '.pyd'
+# PYTHON_MODULE_PREFIX - lib name prefix: usually an empty string
+# PYTHON_SITE_PACKAGES - path to installation site-packages
+# PYTHON_IS_DEBUG - whether the Python interpreter is a debug build
+#
+# Thanks to talljimbo for the patch adding the 'LDVERSION' config
+# variable usage.
+
+#=============================================================================
+# Copyright 2001-2009 Kitware, Inc.
+# Copyright 2012 Continuum Analytics, Inc.
+#
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# * Neither the names of Kitware, Inc., the Insight Software Consortium,
+# nor the names of their contributors may be used to endorse or promote
+# products derived from this software without specific prior written
+# permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#=============================================================================
+
+# Checking for the extension makes sure that `LibsNew` was found and not just `Libs`.
+if(PYTHONLIBS_FOUND AND PYTHON_MODULE_EXTENSION)
+ return()
+endif()
+
+if(PythonLibsNew_FIND_QUIETLY)
+ set(_pythonlibs_quiet QUIET)
+else()
+ set(_pythonlibs_quiet "")
+endif()
+
+if(PythonLibsNew_FIND_REQUIRED)
+ set(_pythonlibs_required REQUIRED)
+endif()
+
+# Check to see if the `python` command is present and from a virtual
+# environment, conda, or GHA activation - if it is, try to use that.
+
+if(NOT DEFINED PYTHON_EXECUTABLE)
+ if(DEFINED ENV{VIRTUAL_ENV})
+ find_program(
+ PYTHON_EXECUTABLE python
+ PATHS "$ENV{VIRTUAL_ENV}" "$ENV{VIRTUAL_ENV}/bin"
+ NO_DEFAULT_PATH)
+ elseif(DEFINED ENV{CONDA_PREFIX})
+ find_program(
+ PYTHON_EXECUTABLE python
+ PATHS "$ENV{CONDA_PREFIX}" "$ENV{CONDA_PREFIX}/bin"
+ NO_DEFAULT_PATH)
+ elseif(DEFINED ENV{pythonLocation})
+ find_program(
+ PYTHON_EXECUTABLE python
+ PATHS "$ENV{pythonLocation}" "$ENV{pythonLocation}/bin"
+ NO_DEFAULT_PATH)
+ endif()
+ if(NOT PYTHON_EXECUTABLE)
+ unset(PYTHON_EXECUTABLE)
+ endif()
+endif()
+
+# Use the Python interpreter to find the libs.
+if(NOT PythonLibsNew_FIND_VERSION)
+ set(PythonLibsNew_FIND_VERSION "3.7")
+endif()
+
+if(NOT CMAKE_VERSION VERSION_LESS "3.27")
+ cmake_policy(GET CMP0148 _pybind11_cmp0148)
+ if(NOT _pybind11_cmp0148)
+ message(
+ AUTHOR_WARNING
+ "Policy CMP0148 is not set: The FindPythonInterp and FindPythonLibs "
+ "modules are removed. Run \"cmake --help-policy CMP0148\" for policy "
+ "details. Use the cmake_policy command to set the policy and suppress "
+ "this warning, or preferably upgrade to using FindPython, either by "
+ "calling it explicitly before pybind11, or by setting "
+ "PYBIND11_FINDPYTHON ON before pybind11.")
+ endif()
+ cmake_policy(SET CMP0148 OLD)
+ unset(_pybind11_cmp0148)
+endif()
+
+find_package(PythonInterp ${PythonLibsNew_FIND_VERSION} ${_pythonlibs_required}
+ ${_pythonlibs_quiet})
+
+if(NOT PYTHONINTERP_FOUND)
+ set(PYTHONLIBS_FOUND FALSE)
+ set(PythonLibsNew_FOUND FALSE)
+ return()
+endif()
+
+# According to https://stackoverflow.com/questions/646518/python-how-to-detect-debug-interpreter
+# testing whether sys has the gettotalrefcount function is a reliable, cross-platform
+# way to detect a CPython debug interpreter.
+#
+# The library suffix is from the config var LDVERSION sometimes, otherwise
+# VERSION. VERSION will typically be like "2.7" on unix, and "27" on windows.
+execute_process(
+ COMMAND
+ "${PYTHON_EXECUTABLE}" "-c" "
+import sys;import struct;
+import sysconfig as s
+USE_SYSCONFIG = sys.version_info >= (3, 10)
+if not USE_SYSCONFIG:
+ from distutils import sysconfig as ds
+print('.'.join(str(v) for v in sys.version_info));
+print(sys.prefix);
+if USE_SYSCONFIG:
+ scheme = s.get_default_scheme()
+ if scheme == 'posix_local':
+ # Debian's default scheme installs to /usr/local/ but we want to find headers in /usr/
+ scheme = 'posix_prefix'
+ print(s.get_path('platinclude', scheme))
+ print(s.get_path('platlib'))
+ print(s.get_config_var('EXT_SUFFIX') or s.get_config_var('SO'))
+else:
+ print(ds.get_python_inc(plat_specific=True));
+ print(ds.get_python_lib(plat_specific=True));
+ print(ds.get_config_var('EXT_SUFFIX') or ds.get_config_var('SO'));
+print(hasattr(sys, 'gettotalrefcount')+0);
+print(struct.calcsize('@P'));
+print(s.get_config_var('LDVERSION') or s.get_config_var('VERSION'));
+print(s.get_config_var('LIBDIR') or '');
+print(s.get_config_var('MULTIARCH') or '');
+"
+ RESULT_VARIABLE _PYTHON_SUCCESS
+ OUTPUT_VARIABLE _PYTHON_VALUES
+ ERROR_VARIABLE _PYTHON_ERROR_VALUE)
+
+if(NOT _PYTHON_SUCCESS MATCHES 0)
+ if(PythonLibsNew_FIND_REQUIRED)
+ message(FATAL_ERROR "Python config failure:\n${_PYTHON_ERROR_VALUE}")
+ endif()
+ set(PYTHONLIBS_FOUND FALSE)
+ set(PythonLibsNew_FOUND FALSE)
+ return()
+endif()
+
+option(
+ PYBIND11_PYTHONLIBS_OVERWRITE
+ "Overwrite cached values read from Python library (classic search). Turn off if cross-compiling and manually setting these values."
+ ON)
+# Can manually set values when cross-compiling
+macro(_PYBIND11_GET_IF_UNDEF lst index name)
+ if(PYBIND11_PYTHONLIBS_OVERWRITE OR NOT DEFINED "${name}")
+ list(GET "${lst}" "${index}" "${name}")
+ endif()
+endmacro()
+
+# Convert the process output into a list
+if(WIN32)
+ string(REGEX REPLACE "\\\\" "/" _PYTHON_VALUES ${_PYTHON_VALUES})
+endif()
+string(REGEX REPLACE ";" "\\\\;" _PYTHON_VALUES ${_PYTHON_VALUES})
+string(REGEX REPLACE "\n" ";" _PYTHON_VALUES ${_PYTHON_VALUES})
+_pybind11_get_if_undef(_PYTHON_VALUES 0 _PYTHON_VERSION_LIST)
+_pybind11_get_if_undef(_PYTHON_VALUES 1 PYTHON_PREFIX)
+_pybind11_get_if_undef(_PYTHON_VALUES 2 PYTHON_INCLUDE_DIR)
+_pybind11_get_if_undef(_PYTHON_VALUES 3 PYTHON_SITE_PACKAGES)
+_pybind11_get_if_undef(_PYTHON_VALUES 5 PYTHON_IS_DEBUG)
+_pybind11_get_if_undef(_PYTHON_VALUES 6 PYTHON_SIZEOF_VOID_P)
+_pybind11_get_if_undef(_PYTHON_VALUES 7 PYTHON_LIBRARY_SUFFIX)
+_pybind11_get_if_undef(_PYTHON_VALUES 8 PYTHON_LIBDIR)
+_pybind11_get_if_undef(_PYTHON_VALUES 9 PYTHON_MULTIARCH)
+
+list(GET _PYTHON_VALUES 4 _PYTHON_MODULE_EXT_SUFFIX)
+if(PYBIND11_PYTHONLIBS_OVERWRITE OR NOT DEFINED PYTHON_MODULE_DEBUG_POSTFIX)
+ get_filename_component(PYTHON_MODULE_DEBUG_POSTFIX "${_PYTHON_MODULE_EXT_SUFFIX}" NAME_WE)
+endif()
+if(PYBIND11_PYTHONLIBS_OVERWRITE OR NOT DEFINED PYTHON_MODULE_EXTENSION)
+ get_filename_component(PYTHON_MODULE_EXTENSION "${_PYTHON_MODULE_EXT_SUFFIX}" EXT)
+endif()
+
+# Make sure the Python has the same pointer-size as the chosen compiler
+# Skip if CMAKE_SIZEOF_VOID_P is not defined
+# This should be skipped for (non-Apple) cross-compiles (like EMSCRIPTEN)
+if(NOT _PYBIND11_CROSSCOMPILING
+ AND CMAKE_SIZEOF_VOID_P
+ AND (NOT "${PYTHON_SIZEOF_VOID_P}" STREQUAL "${CMAKE_SIZEOF_VOID_P}"))
+ if(PythonLibsNew_FIND_REQUIRED)
+ math(EXPR _PYTHON_BITS "${PYTHON_SIZEOF_VOID_P} * 8")
+ math(EXPR _CMAKE_BITS "${CMAKE_SIZEOF_VOID_P} * 8")
+ message(FATAL_ERROR "Python config failure: Python is ${_PYTHON_BITS}-bit, "
+ "chosen compiler is ${_CMAKE_BITS}-bit")
+ endif()
+ set(PYTHONLIBS_FOUND FALSE)
+ set(PythonLibsNew_FOUND FALSE)
+ return()
+endif()
+
+# The built-in FindPython didn't always give the version numbers
+string(REGEX REPLACE "\\." ";" _PYTHON_VERSION_LIST ${_PYTHON_VERSION_LIST})
+list(GET _PYTHON_VERSION_LIST 0 PYTHON_VERSION_MAJOR)
+list(GET _PYTHON_VERSION_LIST 1 PYTHON_VERSION_MINOR)
+list(GET _PYTHON_VERSION_LIST 2 PYTHON_VERSION_PATCH)
+set(PYTHON_VERSION "${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}.${PYTHON_VERSION_PATCH}")
+
+# Make sure all directory separators are '/'
+string(REGEX REPLACE "\\\\" "/" PYTHON_PREFIX "${PYTHON_PREFIX}")
+string(REGEX REPLACE "\\\\" "/" PYTHON_INCLUDE_DIR "${PYTHON_INCLUDE_DIR}")
+string(REGEX REPLACE "\\\\" "/" PYTHON_SITE_PACKAGES "${PYTHON_SITE_PACKAGES}")
+
+if(DEFINED PYTHON_LIBRARY)
+ # Don't write to PYTHON_LIBRARY if it's already set
+elseif(CMAKE_HOST_WIN32)
+ set(PYTHON_LIBRARY "${PYTHON_PREFIX}/libs/python${PYTHON_LIBRARY_SUFFIX}.lib")
+
+ # when run in a venv, PYTHON_PREFIX points to it. But the libraries remain in the
+ # original python installation. They may be found relative to PYTHON_INCLUDE_DIR.
+ if(NOT EXISTS "${PYTHON_LIBRARY}")
+ get_filename_component(_PYTHON_ROOT ${PYTHON_INCLUDE_DIR} DIRECTORY)
+ set(PYTHON_LIBRARY "${_PYTHON_ROOT}/libs/python${PYTHON_LIBRARY_SUFFIX}.lib")
+ endif()
+
+ # if we are in MSYS & MINGW, and we didn't find windows python lib, look for system python lib
+ if(DEFINED ENV{MSYSTEM}
+ AND MINGW
+ AND NOT EXISTS "${PYTHON_LIBRARY}")
+ if(PYTHON_MULTIARCH)
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}")
+ else()
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}")
+ endif()
+ unset(PYTHON_LIBRARY)
+ find_library(
+ PYTHON_LIBRARY
+ NAMES "python${PYTHON_LIBRARY_SUFFIX}"
+ PATHS ${_PYTHON_LIBS_SEARCH}
+ NO_DEFAULT_PATH)
+ endif()
+
+ # raise an error if the python libs are still not found.
+ if(NOT EXISTS "${PYTHON_LIBRARY}")
+ message(FATAL_ERROR "Python libraries not found")
+ endif()
+
+else()
+ if(PYTHON_MULTIARCH)
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}/${PYTHON_MULTIARCH}" "${PYTHON_LIBDIR}")
+ else()
+ set(_PYTHON_LIBS_SEARCH "${PYTHON_LIBDIR}")
+ endif()
+ #message(STATUS "Searching for Python libs in ${_PYTHON_LIBS_SEARCH}")
+ # Probably this needs to be more involved. It would be nice if the config
+ # information the python interpreter itself gave us were more complete.
+ find_library(
+ PYTHON_LIBRARY
+ NAMES "python${PYTHON_LIBRARY_SUFFIX}"
+ PATHS ${_PYTHON_LIBS_SEARCH}
+ NO_DEFAULT_PATH)
+
+ # If all else fails, just set the name/version and let the linker figure out the path.
+ if(NOT PYTHON_LIBRARY)
+ set(PYTHON_LIBRARY python${PYTHON_LIBRARY_SUFFIX})
+ endif()
+endif()
+
+mark_as_advanced(PYTHON_LIBRARY PYTHON_INCLUDE_DIR)
+
+# We use PYTHON_INCLUDE_DIR, PYTHON_LIBRARY and PYTHON_DEBUG_LIBRARY for the
+# cache entries because they are meant to specify the location of a single
+# library. We now set the variables listed by the documentation for this
+# module.
+set(PYTHON_INCLUDE_DIRS "${PYTHON_INCLUDE_DIR}")
+set(PYTHON_LIBRARIES "${PYTHON_LIBRARY}")
+if(NOT PYTHON_DEBUG_LIBRARY)
+ set(PYTHON_DEBUG_LIBRARY "")
+endif()
+set(PYTHON_DEBUG_LIBRARIES "${PYTHON_DEBUG_LIBRARY}")
+
+find_package_message(PYTHON "Found PythonLibs: ${PYTHON_LIBRARIES}"
+ "${PYTHON_EXECUTABLE}${PYTHON_VERSION_STRING}")
+
+set(PYTHONLIBS_FOUND TRUE)
+set(PythonLibsNew_FOUND TRUE)
+
+if(NOT PYTHON_MODULE_PREFIX)
+ set(PYTHON_MODULE_PREFIX "")
+endif()
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Common.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Common.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..7d8d94b11d1d627a604dace61a8b0bb546f77d7b
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Common.cmake
@@ -0,0 +1,455 @@
+#[======================================================[.rst
+
+Adds the following targets::
+
+ pybind11::pybind11 - link to Python headers and pybind11::headers
+ pybind11::module - Adds module links
+ pybind11::embed - Adds embed links
+ pybind11::lto - Link time optimizations (only if CMAKE_INTERPROCEDURAL_OPTIMIZATION is not set)
+ pybind11::thin_lto - Link time optimizations (only if CMAKE_INTERPROCEDURAL_OPTIMIZATION is not set)
+ pybind11::python_link_helper - Adds link to Python libraries
+ pybind11::windows_extras - MSVC bigobj and mp for building multithreaded
+ pybind11::opt_size - avoid optimizations that increase code size
+
+Adds the following functions::
+
+ pybind11_strip(target) - strip target after building on linux/macOS
+ pybind11_find_import(module) - See if a module is installed.
+
+#]======================================================]
+
+# CMake 3.10 has an include_guard command, but we can't use that yet
+# include_guard(global) (pre-CMake 3.10)
+if(TARGET pybind11::pybind11)
+ return()
+endif()
+
+# If we are in subdirectory mode, all IMPORTED targets must be GLOBAL. If we
+# are in CONFIG mode, they should be "normal" targets instead.
+# In CMake 3.11+ you can promote a target to global after you create it,
+# which might be simpler than this check.
+get_property(
+ is_config
+ TARGET pybind11::headers
+ PROPERTY IMPORTED)
+if(NOT is_config)
+ set(optional_global GLOBAL)
+endif()
+
+# If not run in Python mode, we still would like this to at least
+# include pybind11's include directory:
+set(pybind11_INCLUDE_DIRS
+ "${pybind11_INCLUDE_DIR}"
+ CACHE INTERNAL "Include directory for pybind11 (Python not requested)")
+
+if(CMAKE_CROSSCOMPILING AND PYBIND11_USE_CROSSCOMPILING)
+ set(_PYBIND11_CROSSCOMPILING
+ ON
+ CACHE INTERNAL "")
+else()
+ set(_PYBIND11_CROSSCOMPILING
+ OFF
+ CACHE INTERNAL "")
+endif()
+
+# --------------------- Shared targets ----------------------------
+
+# Build an interface library target:
+add_library(pybind11::pybind11 IMPORTED INTERFACE ${optional_global})
+set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::headers)
+
+# Build a module target:
+add_library(pybind11::module IMPORTED INTERFACE ${optional_global})
+set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11)
+
+# Build an embed library target:
+add_library(pybind11::embed IMPORTED INTERFACE ${optional_global})
+set_property(
+ TARGET pybind11::embed
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11)
+
+# -------------- emscripten requires exceptions enabled -------------
+# _pybind11_no_exceptions is a private mechanism to disable this addition.
+# Please open an issue if you need to use it; it will be removed if no one
+# needs it.
+if(CMAKE_SYSTEM_NAME MATCHES Emscripten AND NOT _pybind11_no_exceptions)
+ if(CMAKE_VERSION VERSION_LESS 3.13)
+ message(WARNING "CMake 3.13+ is required to build for Emscripten. Some flags will be missing")
+ else()
+ if(is_config)
+ set(_tmp_config_target pybind11::pybind11_headers)
+ else()
+ set(_tmp_config_target pybind11_headers)
+ endif()
+
+ set_property(
+ TARGET ${_tmp_config_target}
+ APPEND
+ PROPERTY INTERFACE_LINK_OPTIONS -fexceptions)
+ set_property(
+ TARGET ${_tmp_config_target}
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS -fexceptions)
+ unset(_tmp_config_target)
+ endif()
+endif()
+
+# --------------------------- link helper ---------------------------
+
+add_library(pybind11::python_link_helper IMPORTED INTERFACE ${optional_global})
+
+if(CMAKE_VERSION VERSION_LESS 3.13)
+ # In CMake 3.11+, you can set INTERFACE properties via the normal methods, and
+ # this would be simpler.
+ set_property(
+ TARGET pybind11::python_link_helper
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES "$<$:-undefined dynamic_lookup>")
+else()
+ # link_options was added in 3.13+
+ # This is safer, because you are ensured the deduplication pass in CMake will not consider
+ # these separate and remove one but not the other.
+ set_property(
+ TARGET pybind11::python_link_helper
+ APPEND
+ PROPERTY INTERFACE_LINK_OPTIONS "$<$:LINKER:-undefined,dynamic_lookup>")
+endif()
+
+# ------------------------ Windows extras -------------------------
+
+add_library(pybind11::windows_extras IMPORTED INTERFACE ${optional_global})
+
+if(MSVC) # That's also clang-cl
+ # /bigobj is needed for bigger binding projects due to the limit to 64k
+ # addressable sections
+ set_property(
+ TARGET pybind11::windows_extras
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS $<$:/bigobj>)
+
+ # /MP enables multithreaded builds (relevant when there are many files) for MSVC
+ if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "MSVC") # no Clang no Intel
+ if(CMAKE_VERSION VERSION_LESS 3.11)
+ set_property(
+ TARGET pybind11::windows_extras
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS $<$>:/MP>)
+ else()
+ # Only set these options for C++ files. This is important so that, for
+ # instance, projects that include other types of source files like CUDA
+ # .cu files don't get these options propagated to nvcc since that would
+ # cause the build to fail.
+ set_property(
+ TARGET pybind11::windows_extras
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS
+ $<$>:$<$:/MP>>)
+ endif()
+ endif()
+endif()
+
+# ----------------------- Optimize binary size --------------------------
+
+add_library(pybind11::opt_size IMPORTED INTERFACE ${optional_global})
+
+if(MSVC)
+ set(PYBIND11_OPT_SIZE /Os)
+else()
+ set(PYBIND11_OPT_SIZE -Os)
+endif()
+
+set_property(
+ TARGET pybind11::opt_size
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS $<$:${PYBIND11_OPT_SIZE}>
+ $<$:${PYBIND11_OPT_SIZE}>
+ $<$:${PYBIND11_OPT_SIZE}>)
+
+# ----------------------- Legacy option --------------------------
+
+# Warn or error if old variable name used
+if(PYBIND11_CPP_STANDARD)
+ string(REGEX MATCH [[..$]] VAL "${PYBIND11_CPP_STANDARD}")
+ if(CMAKE_CXX_STANDARD)
+ if(NOT CMAKE_CXX_STANDARD STREQUAL VAL)
+ message(WARNING "CMAKE_CXX_STANDARD=${CMAKE_CXX_STANDARD} does not match "
+ "PYBIND11_CPP_STANDARD=${PYBIND11_CPP_STANDARD}, "
+ "please remove PYBIND11_CPP_STANDARD from your cache")
+ endif()
+ else()
+ set(supported_standards 11 14 17 20)
+ if("${VAL}" IN_LIST supported_standards)
+ message(WARNING "USE -DCMAKE_CXX_STANDARD=${VAL} instead of PYBIND11_CPP_STANDARD")
+ set(CMAKE_CXX_STANDARD
+ ${VAL}
+ CACHE STRING "From PYBIND11_CPP_STANDARD")
+ else()
+ message(FATAL_ERROR "PYBIND11_CPP_STANDARD should be replaced with CMAKE_CXX_STANDARD "
+ "(last two chars: ${VAL} not understood as a valid CXX std)")
+ endif()
+ endif()
+endif()
+
+# --------------------- Python specifics -------------------------
+
+# CMake 3.27 removes the classic FindPythonInterp if CMP0148 is NEW
+if(CMAKE_VERSION VERSION_LESS "3.27")
+ set(_pybind11_missing_old_python "OLD")
+else()
+ cmake_policy(GET CMP0148 _pybind11_missing_old_python)
+endif()
+
+# Check to see which Python mode we are in, new, old, or no python
+if(PYBIND11_NOPYTHON)
+ set(_pybind11_nopython ON)
+ # We won't use new FindPython if PYBIND11_FINDPYTHON is defined and falselike
+ # Otherwise, we use if FindPythonLibs is missing or if FindPython was already used
+elseif(
+ (NOT DEFINED PYBIND11_FINDPYTHON OR PYBIND11_FINDPYTHON)
+ AND (_pybind11_missing_old_python STREQUAL "NEW"
+ OR PYBIND11_FINDPYTHON
+ OR Python_FOUND
+ OR Python3_FOUND
+ ))
+
+ # New mode
+ include("${CMAKE_CURRENT_LIST_DIR}/pybind11NewTools.cmake")
+
+else()
+
+ # Classic mode
+ include("${CMAKE_CURRENT_LIST_DIR}/pybind11Tools.cmake")
+
+endif()
+
+# --------------------- pybind11_find_import -------------------------------
+
+if(NOT _pybind11_nopython AND NOT _PYBIND11_CROSSCOMPILING)
+ # Check to see if modules are importable. Use REQUIRED to force an error if
+ # one of the modules is not found. _FOUND will be set if the
+ # package was found (underscores replace dashes if present). QUIET will hide
+ # the found message, and VERSION will require a minimum version. A successful
+ # find will cache the result.
+ function(pybind11_find_import PYPI_NAME)
+ # CMake variables need underscores (PyPI doesn't care)
+ string(REPLACE "-" "_" NORM_PYPI_NAME "${PYPI_NAME}")
+
+ # Return if found previously
+ if(${NORM_PYPI_NAME}_FOUND)
+ return()
+ endif()
+
+ set(options "REQUIRED;QUIET")
+ set(oneValueArgs "VERSION")
+ cmake_parse_arguments(ARG "${options}" "${oneValueArgs}" "" ${ARGN})
+
+ if(ARG_REQUIRED)
+ set(status_level FATAL_ERROR)
+ else()
+ set(status_level WARNING)
+ endif()
+
+ execute_process(
+ COMMAND
+ ${${_Python}_EXECUTABLE} -c "
+try:
+ from importlib.metadata import version
+except ImportError:
+ from pkg_resources import get_distribution
+ def version(s):
+ return get_distribution(s).version
+print(version('${PYPI_NAME}'))
+ "
+ RESULT_VARIABLE RESULT_PRESENT
+ OUTPUT_VARIABLE PKG_VERSION
+ ERROR_QUIET)
+
+ string(STRIP "${PKG_VERSION}" PKG_VERSION)
+
+ # If a result is present, this failed
+ if(RESULT_PRESENT)
+ set(${NORM_PYPI_NAME}_FOUND
+ ${NORM_PYPI_NAME}-NOTFOUND
+ CACHE INTERNAL "")
+ # Always warn or error
+ message(
+ ${status_level}
+ "Missing: ${PYPI_NAME} ${ARG_VERSION}\nTry: ${${_Python}_EXECUTABLE} -m pip install ${PYPI_NAME}"
+ )
+ else()
+ if(ARG_VERSION AND PKG_VERSION VERSION_LESS ARG_VERSION)
+ message(
+ ${status_level}
+ "Version incorrect: ${PYPI_NAME} ${PKG_VERSION} found, ${ARG_VERSION} required - try upgrading"
+ )
+ else()
+ set(${NORM_PYPI_NAME}_FOUND
+ YES
+ CACHE INTERNAL "")
+ set(${NORM_PYPI_NAME}_VERSION
+ ${PKG_VERSION}
+ CACHE INTERNAL "")
+ endif()
+ if(NOT ARG_QUIET)
+ message(STATUS "Found ${PYPI_NAME} ${PKG_VERSION}")
+ endif()
+ endif()
+ if(NOT ARG_VERSION OR (NOT PKG_VERSION VERSION_LESS ARG_VERSION))
+ # We have successfully found a good version, cache to avoid calling again.
+ endif()
+ endfunction()
+endif()
+
+# --------------------- LTO -------------------------------
+
+include(CheckCXXCompilerFlag)
+
+# Checks whether the given CXX/linker flags can compile and link a cxx file.
+# cxxflags and linkerflags are lists of flags to use. The result variable is a
+# unique variable name for each set of flags: the compilation result will be
+# cached base on the result variable. If the flags work, sets them in
+# cxxflags_out/linkerflags_out internal cache variables (in addition to
+# ${result}).
+function(_pybind11_return_if_cxx_and_linker_flags_work result cxxflags linkerflags cxxflags_out
+ linkerflags_out)
+ set(CMAKE_REQUIRED_LIBRARIES ${linkerflags})
+ check_cxx_compiler_flag("${cxxflags}" ${result})
+ if(${result})
+ set(${cxxflags_out}
+ "${cxxflags}"
+ PARENT_SCOPE)
+ set(${linkerflags_out}
+ "${linkerflags}"
+ PARENT_SCOPE)
+ endif()
+endfunction()
+
+function(_pybind11_generate_lto target prefer_thin_lto)
+ if(MINGW)
+ message(STATUS "${target} disabled (problems with undefined symbols for MinGW for now)")
+ return()
+ endif()
+
+ if(CMAKE_CXX_COMPILER_ID MATCHES "GNU|Clang")
+ set(cxx_append "")
+ set(linker_append "")
+ if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND NOT APPLE)
+ # Clang Gold plugin does not support -Os; append -O3 to MinSizeRel builds to override it
+ set(linker_append ";$<$:-O3>")
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "GNU" AND NOT MINGW)
+ set(cxx_append ";-fno-fat-lto-objects")
+ endif()
+
+ if(prefer_thin_lto)
+ set(thin "=thin")
+ else()
+ set(thin "")
+ endif()
+
+ if(CMAKE_SYSTEM_PROCESSOR MATCHES "ppc64le" OR CMAKE_SYSTEM_PROCESSOR MATCHES "mips64")
+ # Do nothing
+ elseif(CMAKE_SYSTEM_NAME MATCHES Emscripten)
+ # This compile is very costly when cross-compiling, so set this without checking
+ set(PYBIND11_LTO_CXX_FLAGS "-flto${thin}${cxx_append}")
+ set(PYBIND11_LTO_LINKER_FLAGS "-flto${thin}${linker_append}")
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_FLTO_THIN "-flto${thin}${cxx_append}" "-flto${thin}${linker_append}"
+ PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+ if(NOT HAS_FLTO_THIN)
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_FLTO "-flto${cxx_append}" "-flto${linker_append}" PYBIND11_LTO_CXX_FLAGS
+ PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "IntelLLVM")
+ # IntelLLVM equivalent to LTO is called IPO; also IntelLLVM is WIN32/UNIX
+ # WARNING/HELP WANTED: This block of code is currently not covered by pybind11 GitHub Actions!
+ if(WIN32)
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_INTEL_IPO "-Qipo" "-Qipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ else()
+ _pybind11_return_if_cxx_and_linker_flags_work(
+ HAS_INTEL_IPO "-ipo" "-ipo" PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+ elseif(CMAKE_CXX_COMPILER_ID MATCHES "Intel")
+ # Intel equivalent to LTO is called IPO
+ _pybind11_return_if_cxx_and_linker_flags_work(HAS_INTEL_IPO "-ipo" "-ipo"
+ PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ elseif(MSVC)
+ # cmake only interprets libraries as linker flags when they start with a - (otherwise it
+ # converts /LTCG to \LTCG as if it was a Windows path). Luckily MSVC supports passing flags
+ # with - instead of /, even if it is a bit non-standard:
+ _pybind11_return_if_cxx_and_linker_flags_work(HAS_MSVC_GL_LTCG "/GL" "-LTCG"
+ PYBIND11_LTO_CXX_FLAGS PYBIND11_LTO_LINKER_FLAGS)
+ endif()
+
+ # Enable LTO flags if found, except for Debug builds
+ if(PYBIND11_LTO_CXX_FLAGS)
+ # CONFIG takes multiple values in CMake 3.19+, until then we have to use OR
+ set(is_debug "$,$>")
+ set(not_debug "$")
+ set(cxx_lang "$")
+ if(MSVC AND CMAKE_VERSION VERSION_LESS 3.11)
+ set(genex "${not_debug}")
+ else()
+ set(genex "$")
+ endif()
+ set_property(
+ TARGET ${target}
+ APPEND
+ PROPERTY INTERFACE_COMPILE_OPTIONS "$<${genex}:${PYBIND11_LTO_CXX_FLAGS}>")
+ if(CMAKE_PROJECT_NAME STREQUAL "pybind11")
+ message(STATUS "${target} enabled")
+ endif()
+ else()
+ if(CMAKE_PROJECT_NAME STREQUAL "pybind11")
+ message(STATUS "${target} disabled (not supported by the compiler and/or linker)")
+ endif()
+ endif()
+
+ if(PYBIND11_LTO_LINKER_FLAGS)
+ if(CMAKE_VERSION VERSION_LESS 3.11)
+ set_property(
+ TARGET ${target}
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES "$<${not_debug}:${PYBIND11_LTO_LINKER_FLAGS}>")
+ else()
+ set_property(
+ TARGET ${target}
+ APPEND
+ PROPERTY INTERFACE_LINK_OPTIONS "$<${not_debug}:${PYBIND11_LTO_LINKER_FLAGS}>")
+ endif()
+ endif()
+endfunction()
+
+if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION)
+ add_library(pybind11::lto IMPORTED INTERFACE ${optional_global})
+ _pybind11_generate_lto(pybind11::lto FALSE)
+
+ add_library(pybind11::thin_lto IMPORTED INTERFACE ${optional_global})
+ _pybind11_generate_lto(pybind11::thin_lto TRUE)
+endif()
+
+# ---------------------- pybind11_strip -----------------------------
+
+function(pybind11_strip target_name)
+ # Strip unnecessary sections of the binary on Linux/macOS
+ if(CMAKE_STRIP)
+ if(APPLE)
+ set(x_opt -x)
+ endif()
+
+ add_custom_command(
+ TARGET ${target_name}
+ POST_BUILD
+ COMMAND ${CMAKE_STRIP} ${x_opt} $)
+ endif()
+endfunction()
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Config.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Config.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..79d2a8a6ecfd2345c6cd4fa6d457fdb87631eb1f
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Config.cmake
@@ -0,0 +1,257 @@
+#[=============================================================================[.rst:
+
+pybind11Config.cmake
+####################
+
+Exported variables
+==================
+
+This module sets the following variables in your project:
+
+``pybind11_FOUND``
+ true if pybind11 and all required components found on the system
+``pybind11_VERSION``
+ pybind11 version in format Major.Minor.Release
+``pybind11_VERSION_TYPE``
+ pybind11 version type (``dev*`` or empty for a release)
+``pybind11_INCLUDE_DIRS``
+ Directories where pybind11 and python headers are located.
+``pybind11_INCLUDE_DIR``
+ Directory where pybind11 headers are located.
+``pybind11_DEFINITIONS``
+ Definitions necessary to use pybind11, namely USING_pybind11.
+``pybind11_LIBRARIES``
+ Compile flags and python libraries (as needed) to link against.
+``pybind11_LIBRARY``
+ Empty.
+
+Available components: None
+
+
+Exported targets
+================
+
+If pybind11 is found, this module defines the following ``IMPORTED``
+interface library targets:
+
+``pybind11::module``
+ for extension modules.
+``pybind11::embed``
+ for embedding the Python interpreter.
+
+Python headers, libraries (as needed by platform), and the C++ standard
+are attached to the target.
+
+Advanced targets are also supplied - these are primary for users building
+complex applications, and they are available in all modes:
+
+``pybind11::headers``
+ Just the pybind11 headers and minimum compile requirements.
+``pybind11::pybind11``
+ Python headers too.
+``pybind11::python_link_helper``
+ Just the "linking" part of ``pybind11:module``, for CMake < 3.15.
+``pybind11::thin_lto``
+ An alternative to ``INTERPROCEDURAL_OPTIMIZATION``.
+``pybind11::lto``
+ An alternative to ``INTERPROCEDURAL_OPTIMIZATION`` (also avoids thin LTO on clang).
+``pybind11::windows_extras``
+ Adds bigobj and mp for MSVC.
+
+Modes
+=====
+
+There are two modes provided; classic, which is built on the old Python
+discovery packages in CMake, or the new FindPython mode, which uses FindPython
+from 3.12+ forward (3.15+ _highly_ recommended). If you set the minimum or
+maximum version of CMake to 3.27+, then FindPython is the default (since
+FindPythonInterp/FindPythonLibs has been removed via policy `CMP0148`).
+
+New FindPython mode
+^^^^^^^^^^^^^^^^^^^
+
+To activate this mode, either call ``find_package(Python COMPONENTS Interpreter Development)``
+before finding this package, or set the ``PYBIND11_FINDPYTHON`` variable to ON. In this mode,
+you can either use the basic targets, or use the FindPython tools:
+
+.. code-block:: cmake
+
+ find_package(Python COMPONENTS Interpreter Development)
+ find_package(pybind11 CONFIG)
+
+ # pybind11 method:
+ pybind11_add_module(MyModule1 src1.cpp)
+
+ # Python method:
+ Python_add_library(MyModule2 src2.cpp)
+ target_link_libraries(MyModule2 PUBLIC pybind11::headers)
+ set_target_properties(MyModule2 PROPERTIES
+ INTERPROCEDURAL_OPTIMIZATION ON
+ CXX_VISIBILITY_PRESET ON
+ VISIBILITY_INLINES_HIDDEN ON)
+
+If you build targets yourself, you may be interested in stripping the output
+for reduced size; this is the one other feature that the helper function gives you.
+
+Classic mode
+^^^^^^^^^^^^
+
+Set PythonLibsNew variables to influence python detection and
+CMAKE_CXX_STANDARD to influence standard setting.
+
+.. code-block:: cmake
+
+ find_package(pybind11 CONFIG REQUIRED)
+
+ # Create an extension module
+ add_library(mylib MODULE main.cpp)
+ target_link_libraries(mylib PUBLIC pybind11::module)
+
+ # Or embed the Python interpreter into an executable
+ add_executable(myexe main.cpp)
+ target_link_libraries(myexe PUBLIC pybind11::embed)
+
+
+Hints
+=====
+
+The following variables can be set to guide the search for this package:
+
+``pybind11_DIR``
+ CMake variable, set to directory containing this Config file.
+``CMAKE_PREFIX_PATH``
+ CMake variable, set to root directory of this package.
+``PATH``
+ Environment variable, set to bin directory of this package.
+``CMAKE_DISABLE_FIND_PACKAGE_pybind11``
+ CMake variable, disables ``find_package(pybind11)`` when not ``REQUIRED``,
+ perhaps to force internal build.
+
+Commands
+========
+
+pybind11_add_module
+^^^^^^^^^^^^^^^^^^^
+
+This module defines the following commands to assist with creating Python modules:
+
+.. code-block:: cmake
+
+ pybind11_add_module(
+ [STATIC|SHARED|MODULE]
+ [THIN_LTO] [OPT_SIZE] [NO_EXTRAS] [WITHOUT_SOABI]
+ ...
+ )
+
+Add a module and setup all helpers. You can select the type of the library; the
+default is ``MODULE``. There are several options:
+
+``OPT_SIZE``
+ Optimize for size, even if the ``CMAKE_BUILD_TYPE`` is not ``MinSizeRel``.
+``THIN_LTO``
+ Use thin LTO instead of regular if there's a choice (pybind11's selection
+ is disabled if ``CMAKE_INTERPROCEDURAL_OPTIMIZATIONS`` is set).
+``WITHOUT_SOABI``
+ Disable the SOABI component (``PYBIND11_NEWPYTHON`` mode only).
+``NO_EXTRAS``
+ Disable all extras, exit immediately after making the module.
+
+pybind11_strip
+^^^^^^^^^^^^^^
+
+.. code-block:: cmake
+
+ pybind11_strip()
+
+Strip a target after building it (linux/macOS), called by ``pybind11_add_module``.
+
+pybind11_extension
+^^^^^^^^^^^^^^^^^^
+
+.. code-block:: cmake
+
+ pybind11_extension()
+
+Sets the Python extension name correctly for Python on your platform, called by
+``pybind11_add_module``.
+
+pybind11_find_import(module)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: cmake
+
+ pybind11_find_import( [VERSION ] [REQUIRED] [QUIET])
+
+See if a module is installed. Use the registered name (the one on PyPI). You
+can specify a ``VERSION``, and you can specify ``REQUIRED`` or ``QUIET``. Only available if
+``NOPYTHON`` mode is not active. Sets ``module_VERSION`` and ``module_FOUND``. Caches the
+result once a valid install is found.
+
+Suggested usage
+===============
+
+Using ``find_package`` with version info is not recommended except for release versions.
+
+.. code-block:: cmake
+
+ find_package(pybind11 CONFIG)
+ find_package(pybind11 2.9 EXACT CONFIG REQUIRED)
+
+#]=============================================================================]
+
+####### Expanded from @PACKAGE_INIT@ by configure_package_config_file() #######
+####### Any changes to this file will be overwritten by the next CMake run ####
+####### The input file was pybind11Config.cmake.in ########
+
+get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/../../../" ABSOLUTE)
+
+macro(set_and_check _var _file)
+ set(${_var} "${_file}")
+ if(NOT EXISTS "${_file}")
+ message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
+ endif()
+endmacro()
+
+macro(check_required_components _NAME)
+ foreach(comp ${${_NAME}_FIND_COMPONENTS})
+ if(NOT ${_NAME}_${comp}_FOUND)
+ if(${_NAME}_FIND_REQUIRED_${comp})
+ set(${_NAME}_FOUND FALSE)
+ endif()
+ endif()
+ endforeach()
+endmacro()
+
+####################################################################################
+
+# Location of pybind11/pybind11.h
+# This will be relative unless explicitly set as absolute
+set(pybind11_INCLUDE_DIR "${PACKAGE_PREFIX_DIR}/include")
+
+set(pybind11_LIBRARY "")
+set(pybind11_DEFINITIONS USING_pybind11)
+set(pybind11_VERSION_TYPE "")
+
+check_required_components(pybind11)
+
+if(TARGET pybind11::python_link_helper)
+ # This has already been setup elsewhere, such as with a previous call or
+ # add_subdirectory
+ return()
+endif()
+
+include("${CMAKE_CURRENT_LIST_DIR}/pybind11Targets.cmake")
+
+# Easier to use / remember
+add_library(pybind11::headers IMPORTED INTERFACE)
+set_target_properties(pybind11::headers PROPERTIES INTERFACE_LINK_LIBRARIES
+ pybind11::pybind11_headers)
+
+include("${CMAKE_CURRENT_LIST_DIR}/pybind11Common.cmake")
+
+if(NOT pybind11_FIND_QUIETLY)
+ message(
+ STATUS
+ "Found pybind11: ${pybind11_INCLUDE_DIR} (found version \"${pybind11_VERSION}${pybind11_VERSION_TYPE}\")"
+ )
+endif()
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11ConfigVersion.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11ConfigVersion.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..253f73be7355dabd9ba3dbd025b2890c807821ed
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11ConfigVersion.cmake
@@ -0,0 +1,32 @@
+# This is a basic version file for the Config-mode of find_package().
+# It is used by write_basic_package_version_file() as input file for configure_file()
+# to create a version-file which can be installed along a config.cmake file.
+#
+# The created file sets PACKAGE_VERSION_EXACT if the current version string and
+# the requested version string are exactly the same and it sets
+# PACKAGE_VERSION_COMPATIBLE if the current version is >= requested version.
+# The variable CVF_VERSION must be set before calling configure_file().
+
+set(PACKAGE_VERSION "2.13.6")
+
+if (PACKAGE_FIND_VERSION_RANGE)
+ # Package version must be in the requested version range
+ if ((PACKAGE_FIND_VERSION_RANGE_MIN STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION_MIN)
+ OR ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_GREATER PACKAGE_FIND_VERSION_MAX)
+ OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND PACKAGE_VERSION VERSION_GREATER_EQUAL PACKAGE_FIND_VERSION_MAX)))
+ set(PACKAGE_VERSION_COMPATIBLE FALSE)
+ else()
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ endif()
+else()
+ if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION)
+ set(PACKAGE_VERSION_COMPATIBLE FALSE)
+ else()
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION)
+ set(PACKAGE_VERSION_EXACT TRUE)
+ endif()
+ endif()
+endif()
+
+
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11GuessPythonExtSuffix.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11GuessPythonExtSuffix.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..c5fb3b42c9706df3d3318dd7502002e3a382850f
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11GuessPythonExtSuffix.cmake
@@ -0,0 +1,86 @@
+cmake_minimum_required(VERSION 3.5)
+
+function(pybind11_guess_python_module_extension python)
+
+ # The SETUPTOOLS_EXT_SUFFIX environment variable takes precedence:
+ if(NOT DEFINED PYTHON_MODULE_EXT_SUFFIX AND DEFINED ENV{SETUPTOOLS_EXT_SUFFIX})
+ message(
+ STATUS
+ "Getting Python extension suffix from ENV{SETUPTOOLS_EXT_SUFFIX}: $ENV{SETUPTOOLS_EXT_SUFFIX}"
+ )
+ set(PYTHON_MODULE_EXT_SUFFIX
+ "$ENV{SETUPTOOLS_EXT_SUFFIX}"
+ CACHE
+ STRING
+ "Extension suffix for Python extension modules (Initialized from SETUPTOOLS_EXT_SUFFIX)")
+ endif()
+ # If that didn't work, use the Python_SOABI variable:
+ if(NOT DEFINED PYTHON_MODULE_EXT_SUFFIX AND DEFINED ${python}_SOABI)
+ message(
+ STATUS "Determining Python extension suffix based on ${python}_SOABI: ${${python}_SOABI}")
+ # The final extension depends on the system
+ set(_PY_BUILD_EXTENSION "${CMAKE_SHARED_MODULE_SUFFIX}")
+ if(CMAKE_SYSTEM_NAME STREQUAL "Windows")
+ set(_PY_BUILD_EXTENSION ".pyd")
+ endif()
+ # If the SOABI already has an extension, use it as the full suffix
+ # (used for debug versions of Python on Windows)
+ if(${python}_SOABI MATCHES "\\.")
+ set(PYTHON_MODULE_EXT_SUFFIX "${${python}_SOABI}")
+ # If the SOABI is empty, this is usually a bug, but we generate a
+ # correct extension anyway, which is the best we can do
+ elseif("${${python}_SOABI}" STREQUAL "")
+ message(
+ WARNING
+ "${python}_SOABI is defined but empty. You may want to set PYTHON_MODULE_EXT_SUFFIX explicitly."
+ )
+ set(PYTHON_MODULE_EXT_SUFFIX "${_PY_BUILD_EXTENSION}")
+ # Otherwise, add the system-dependent extension to it
+ else()
+ set(PYTHON_MODULE_EXT_SUFFIX ".${${python}_SOABI}${_PY_BUILD_EXTENSION}")
+ endif()
+ endif()
+
+ # If we could not deduce the extension suffix, unset the results:
+ if(NOT DEFINED PYTHON_MODULE_EXT_SUFFIX)
+ unset(PYTHON_MODULE_DEBUG_POSTFIX PARENT_SCOPE)
+ unset(PYTHON_MODULE_EXTENSION PARENT_SCOPE)
+ unset(PYTHON_IS_DEBUG PARENT_SCOPE)
+ return()
+ endif()
+
+ # Sanity checks:
+ if(${python}_SOABI AND NOT (PYTHON_MODULE_EXT_SUFFIX STREQUAL ${python}_SOABI
+ OR PYTHON_MODULE_EXT_SUFFIX MATCHES "\\.${${python}_SOABI}\\."))
+ message(
+ WARNING
+ "Python extension suffix (${PYTHON_MODULE_EXT_SUFFIX}) does not match ${python}_SOABI (${${python}_SOABI})."
+ )
+ endif()
+
+ # Separate file name postfix from extension: (https://github.com/pybind/pybind11/issues/4699)
+ get_filename_component(_PYTHON_MODULE_DEBUG_POSTFIX "${PYTHON_MODULE_EXT_SUFFIX}" NAME_WE)
+ get_filename_component(_PYTHON_MODULE_EXTENSION "${PYTHON_MODULE_EXT_SUFFIX}" EXT)
+
+ # Try to deduce the debug ABI from the extension suffix:
+ if(NOT DEFINED _PYTHON_IS_DEBUG)
+ if(_PYTHON_MODULE_EXTENSION MATCHES "^\\.(cpython-|cp|pypy)[0-9]+dm?-"
+ OR _PYTHON_MODULE_DEBUG_POSTFIX MATCHES "^_d")
+ set(_PYTHON_IS_DEBUG On)
+ else()
+ set(_PYTHON_IS_DEBUG Off)
+ endif()
+ endif()
+
+ # Return results
+ set(PYTHON_MODULE_DEBUG_POSTFIX
+ "${_PYTHON_MODULE_DEBUG_POSTFIX}"
+ PARENT_SCOPE)
+ set(PYTHON_MODULE_EXTENSION
+ "${_PYTHON_MODULE_EXTENSION}"
+ PARENT_SCOPE)
+ set(PYTHON_IS_DEBUG
+ "${_PYTHON_IS_DEBUG}"
+ PARENT_SCOPE)
+
+endfunction()
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11NewTools.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11NewTools.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..a8b0800bb8e5c12db3cdf8e498cca612a777b114
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11NewTools.cmake
@@ -0,0 +1,341 @@
+# tools/pybind11NewTools.cmake -- Build system for the pybind11 modules
+#
+# Copyright (c) 2020 Wenzel Jakob and Henry Schreiner
+#
+# All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+if(CMAKE_VERSION VERSION_LESS 3.12)
+ message(FATAL_ERROR "You cannot use the new FindPython module with CMake < 3.12")
+endif()
+
+include_guard(DIRECTORY)
+
+get_property(
+ is_config
+ TARGET pybind11::headers
+ PROPERTY IMPORTED)
+
+if(pybind11_FIND_QUIETLY)
+ set(_pybind11_quiet QUIET)
+else()
+ set(_pybind11_quiet "")
+endif()
+
+if(NOT Python_FOUND AND NOT Python3_FOUND)
+ if(NOT DEFINED Python_FIND_IMPLEMENTATIONS)
+ set(Python_FIND_IMPLEMENTATIONS CPython PyPy)
+ endif()
+
+ # GitHub Actions like activation
+ if(NOT DEFINED Python_ROOT_DIR AND DEFINED ENV{pythonLocation})
+ set(Python_ROOT_DIR "$ENV{pythonLocation}")
+ endif()
+
+ # Interpreter should not be found when cross-compiling
+ if(_PYBIND11_CROSSCOMPILING)
+ set(_pybind11_interp_component "")
+ else()
+ set(_pybind11_interp_component Interpreter)
+ endif()
+
+ # Development.Module support (required for manylinux) started in 3.18
+ if(CMAKE_VERSION VERSION_LESS 3.18)
+ set(_pybind11_dev_component Development)
+ else()
+ set(_pybind11_dev_component Development.Module OPTIONAL_COMPONENTS Development.Embed)
+ endif()
+
+ # Callers need to be able to access Python_EXECUTABLE
+ set(_pybind11_global_keyword "")
+ if(NOT is_config AND NOT DEFINED Python_ARTIFACTS_INTERACTIVE)
+ set(Python_ARTIFACTS_INTERACTIVE TRUE)
+ if(NOT CMAKE_VERSION VERSION_LESS 3.24)
+ set(_pybind11_global_keyword "GLOBAL")
+ endif()
+ endif()
+
+ find_package(
+ Python 3.7 REQUIRED COMPONENTS ${_pybind11_interp_component} ${_pybind11_dev_component}
+ ${_pybind11_quiet} ${_pybind11_global_keyword})
+
+ # If we are in submodule mode, export the Python targets to global targets.
+ # If this behavior is not desired, FindPython _before_ pybind11.
+ if(NOT is_config
+ AND Python_ARTIFACTS_INTERACTIVE
+ AND _pybind11_global_keyword STREQUAL "")
+ if(TARGET Python::Python)
+ set_property(TARGET Python::Python PROPERTY IMPORTED_GLOBAL TRUE)
+ endif()
+ if(TARGET Python::Interpreter)
+ set_property(TARGET Python::Interpreter PROPERTY IMPORTED_GLOBAL TRUE)
+ endif()
+ if(TARGET Python::Module)
+ set_property(TARGET Python::Module PROPERTY IMPORTED_GLOBAL TRUE)
+ endif()
+ endif()
+
+ # Explicitly export version for callers (including our own functions)
+ if(NOT is_config AND Python_ARTIFACTS_INTERACTIVE)
+ set(Python_VERSION
+ "${Python_VERSION}"
+ CACHE INTERNAL "")
+ set(Python_VERSION_MAJOR
+ "${Python_VERSION_MAJOR}"
+ CACHE INTERNAL "")
+ set(Python_VERSION_MINOR
+ "${Python_VERSION_MINOR}"
+ CACHE INTERNAL "")
+ set(Python_VERSION_PATCH
+ "${Python_VERSION_PATCH}"
+ CACHE INTERNAL "")
+ endif()
+endif()
+
+if(Python_FOUND)
+ set(_Python
+ Python
+ CACHE INTERNAL "" FORCE)
+elseif(Python3_FOUND)
+ set(_Python
+ Python3
+ CACHE INTERNAL "" FORCE)
+endif()
+
+if(PYBIND11_MASTER_PROJECT)
+ if(${_Python}_INTERPRETER_ID MATCHES "PyPy")
+ message(STATUS "PyPy ${${_Python}_PyPy_VERSION} (Py ${${_Python}_VERSION})")
+ else()
+ message(STATUS "${_Python} ${${_Python}_VERSION}")
+ endif()
+endif()
+
+if(NOT _PYBIND11_CROSSCOMPILING)
+ # If a user finds Python, they may forget to include the Interpreter component
+ # and the following two steps require it. It is highly recommended by CMake
+ # when finding development libraries anyway, so we will require it.
+ if(NOT DEFINED ${_Python}_EXECUTABLE)
+ message(
+ FATAL_ERROR
+ "${_Python} was found without the Interpreter component. Pybind11 requires this component."
+ )
+
+ endif()
+
+ if(DEFINED PYBIND11_PYTHON_EXECUTABLE_LAST AND NOT ${_Python}_EXECUTABLE STREQUAL
+ PYBIND11_PYTHON_EXECUTABLE_LAST)
+ # Detect changes to the Python version/binary in subsequent CMake runs, and refresh config if needed
+ unset(PYTHON_IS_DEBUG CACHE)
+ unset(PYTHON_MODULE_EXTENSION CACHE)
+ endif()
+
+ set(PYBIND11_PYTHON_EXECUTABLE_LAST
+ "${${_Python}_EXECUTABLE}"
+ CACHE INTERNAL "Python executable during the last CMake run")
+
+ if(NOT DEFINED PYTHON_IS_DEBUG)
+ # Debug check - see https://stackoverflow.com/questions/646518/python-how-to-detect-debug-Interpreter
+ execute_process(
+ COMMAND "${${_Python}_EXECUTABLE}" "-c"
+ "import sys; sys.exit(hasattr(sys, 'gettotalrefcount'))"
+ RESULT_VARIABLE _PYTHON_IS_DEBUG)
+ set(PYTHON_IS_DEBUG
+ "${_PYTHON_IS_DEBUG}"
+ CACHE INTERNAL "Python debug status")
+ endif()
+
+ # Get the suffix - SO is deprecated, should use EXT_SUFFIX, but this is
+ # required for PyPy3 (as of 7.3.1)
+ if(NOT DEFINED PYTHON_MODULE_EXTENSION OR NOT DEFINED PYTHON_MODULE_DEBUG_POSTFIX)
+ execute_process(
+ COMMAND
+ "${${_Python}_EXECUTABLE}" "-c"
+ "import sys, importlib; s = importlib.import_module('distutils.sysconfig' if sys.version_info < (3, 10) else 'sysconfig'); print(s.get_config_var('EXT_SUFFIX') or s.get_config_var('SO'))"
+ OUTPUT_VARIABLE _PYTHON_MODULE_EXT_SUFFIX
+ ERROR_VARIABLE _PYTHON_MODULE_EXT_SUFFIX_ERR
+ OUTPUT_STRIP_TRAILING_WHITESPACE)
+
+ if(_PYTHON_MODULE_EXT_SUFFIX STREQUAL "")
+ message(
+ FATAL_ERROR
+ "pybind11 could not query the module file extension, likely the 'distutils'"
+ "package is not installed. Full error message:\n${_PYTHON_MODULE_EXT_SUFFIX_ERR}")
+ endif()
+
+ # This needs to be available for the pybind11_extension function
+ if(NOT DEFINED PYTHON_MODULE_DEBUG_POSTFIX)
+ get_filename_component(_PYTHON_MODULE_DEBUG_POSTFIX "${_PYTHON_MODULE_EXT_SUFFIX}" NAME_WE)
+ set(PYTHON_MODULE_DEBUG_POSTFIX
+ "${_PYTHON_MODULE_DEBUG_POSTFIX}"
+ CACHE INTERNAL "")
+ endif()
+
+ if(NOT DEFINED PYTHON_MODULE_EXTENSION)
+ get_filename_component(_PYTHON_MODULE_EXTENSION "${_PYTHON_MODULE_EXT_SUFFIX}" EXT)
+ set(PYTHON_MODULE_EXTENSION
+ "${_PYTHON_MODULE_EXTENSION}"
+ CACHE INTERNAL "")
+ endif()
+ endif()
+else()
+ if(NOT DEFINED PYTHON_IS_DEBUG
+ OR NOT DEFINED PYTHON_MODULE_EXTENSION
+ OR NOT DEFINED PYTHON_MODULE_DEBUG_POSTFIX)
+ include("${CMAKE_CURRENT_LIST_DIR}/pybind11GuessPythonExtSuffix.cmake")
+ pybind11_guess_python_module_extension("${_Python}")
+ endif()
+ # When cross-compiling, we cannot query the Python interpreter, so we require
+ # the user to set these variables explicitly.
+ if(NOT DEFINED PYTHON_IS_DEBUG
+ OR NOT DEFINED PYTHON_MODULE_EXTENSION
+ OR NOT DEFINED PYTHON_MODULE_DEBUG_POSTFIX)
+ message(
+ FATAL_ERROR
+ "When cross-compiling, you should set the PYTHON_IS_DEBUG, PYTHON_MODULE_EXTENSION and PYTHON_MODULE_DEBUG_POSTFIX \
+ variables appropriately before loading pybind11 (e.g. in your CMake toolchain file)")
+ endif()
+endif()
+
+# Python debug libraries expose slightly different objects before 3.8
+# https://docs.python.org/3.6/c-api/intro.html#debugging-builds
+# https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib
+if(PYTHON_IS_DEBUG)
+ set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_COMPILE_DEFINITIONS Py_DEBUG)
+endif()
+
+# Check on every access - since Python can change - do nothing in that case.
+
+if(DEFINED ${_Python}_INCLUDE_DIRS)
+ # Only add Python for build - must be added during the import for config
+ # since it has to be re-discovered.
+ #
+ # This needs to be a target to be included after the local pybind11
+ # directory, just in case there there is an installed pybind11 sitting
+ # next to Python's includes. It also ensures Python is a SYSTEM library.
+ add_library(pybind11::python_headers INTERFACE IMPORTED)
+ set_property(
+ TARGET pybind11::python_headers PROPERTY INTERFACE_INCLUDE_DIRECTORIES
+ "$")
+ set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_headers)
+ set(pybind11_INCLUDE_DIRS
+ "${pybind11_INCLUDE_DIR}" "${${_Python}_INCLUDE_DIRS}"
+ CACHE INTERNAL "Directories where pybind11 and possibly Python headers are located")
+endif()
+
+# In CMake 3.18+, you can find these separately, so include an if
+if(TARGET ${_Python}::Python)
+ set_property(
+ TARGET pybind11::embed
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES ${_Python}::Python)
+endif()
+
+# CMake 3.15+ has this
+if(TARGET ${_Python}::Module)
+ set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES ${_Python}::Module)
+else()
+ set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_link_helper)
+endif()
+
+# WITHOUT_SOABI and WITH_SOABI will disable the custom extension handling used by pybind11.
+# WITH_SOABI is passed on to python_add_library.
+function(pybind11_add_module target_name)
+ cmake_parse_arguments(PARSE_ARGV 1 ARG
+ "STATIC;SHARED;MODULE;THIN_LTO;OPT_SIZE;NO_EXTRAS;WITHOUT_SOABI" "" "")
+
+ if(ARG_STATIC)
+ set(lib_type STATIC)
+ elseif(ARG_SHARED)
+ set(lib_type SHARED)
+ else()
+ set(lib_type MODULE)
+ endif()
+
+ if("${_Python}" STREQUAL "Python")
+ python_add_library(${target_name} ${lib_type} ${ARG_UNPARSED_ARGUMENTS})
+ elseif("${_Python}" STREQUAL "Python3")
+ python3_add_library(${target_name} ${lib_type} ${ARG_UNPARSED_ARGUMENTS})
+ else()
+ message(FATAL_ERROR "Cannot detect FindPython version: ${_Python}")
+ endif()
+
+ target_link_libraries(${target_name} PRIVATE pybind11::headers)
+
+ if(lib_type STREQUAL "MODULE")
+ target_link_libraries(${target_name} PRIVATE pybind11::module)
+ else()
+ target_link_libraries(${target_name} PRIVATE pybind11::embed)
+ endif()
+
+ if(MSVC)
+ target_link_libraries(${target_name} PRIVATE pybind11::windows_extras)
+ endif()
+
+ # -fvisibility=hidden is required to allow multiple modules compiled against
+ # different pybind versions to work properly, and for some features (e.g.
+ # py::module_local). We force it on everything inside the `pybind11`
+ # namespace; also turning it on for a pybind module compilation here avoids
+ # potential warnings or issues from having mixed hidden/non-hidden types.
+ if(NOT DEFINED CMAKE_CXX_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden")
+ endif()
+
+ if(NOT DEFINED CMAKE_CUDA_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CUDA_VISIBILITY_PRESET "hidden")
+ endif()
+
+ # If we don't pass a WITH_SOABI or WITHOUT_SOABI, use our own default handling of extensions
+ if(NOT ARG_WITHOUT_SOABI AND NOT "WITH_SOABI" IN_LIST ARG_UNPARSED_ARGUMENTS)
+ pybind11_extension(${target_name})
+ endif()
+
+ if(ARG_NO_EXTRAS)
+ return()
+ endif()
+
+ if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION)
+ if(ARG_THIN_LTO)
+ target_link_libraries(${target_name} PRIVATE pybind11::thin_lto)
+ else()
+ target_link_libraries(${target_name} PRIVATE pybind11::lto)
+ endif()
+ endif()
+
+ if(DEFINED CMAKE_BUILD_TYPE) # see https://github.com/pybind/pybind11/issues/4454
+ # Use case-insensitive comparison to match the result of $
+ string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
+ if(NOT MSVC AND NOT "${uppercase_CMAKE_BUILD_TYPE}" MATCHES DEBUG|RELWITHDEBINFO)
+ # Strip unnecessary sections of the binary on Linux/macOS
+ pybind11_strip(${target_name})
+ endif()
+ endif()
+
+ if(MSVC)
+ target_link_libraries(${target_name} PRIVATE pybind11::windows_extras)
+ endif()
+
+ if(ARG_OPT_SIZE)
+ target_link_libraries(${target_name} PRIVATE pybind11::opt_size)
+ endif()
+endfunction()
+
+function(pybind11_extension name)
+ # The extension is precomputed
+ set_target_properties(
+ ${name}
+ PROPERTIES PREFIX ""
+ DEBUG_POSTFIX "${PYTHON_MODULE_DEBUG_POSTFIX}"
+ SUFFIX "${PYTHON_MODULE_EXTENSION}")
+endfunction()
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Targets.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Targets.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..bfc69e43f9168171abe05de325f15816e7ebac1c
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Targets.cmake
@@ -0,0 +1,107 @@
+# Generated by CMake
+
+if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8)
+ message(FATAL_ERROR "CMake >= 2.8.0 required")
+endif()
+if(CMAKE_VERSION VERSION_LESS "3.0.0")
+ message(FATAL_ERROR "CMake >= 3.0.0 required")
+endif()
+cmake_policy(PUSH)
+cmake_policy(VERSION 3.0.0...3.28)
+#----------------------------------------------------------------
+# Generated CMake target import file.
+#----------------------------------------------------------------
+
+# Commands may need to know the format version.
+set(CMAKE_IMPORT_FILE_VERSION 1)
+
+# Protect against multiple inclusion, which would fail when already imported targets are added once more.
+set(_cmake_targets_defined "")
+set(_cmake_targets_not_defined "")
+set(_cmake_expected_targets "")
+foreach(_cmake_expected_target IN ITEMS pybind11::pybind11_headers)
+ list(APPEND _cmake_expected_targets "${_cmake_expected_target}")
+ if(TARGET "${_cmake_expected_target}")
+ list(APPEND _cmake_targets_defined "${_cmake_expected_target}")
+ else()
+ list(APPEND _cmake_targets_not_defined "${_cmake_expected_target}")
+ endif()
+endforeach()
+unset(_cmake_expected_target)
+if(_cmake_targets_defined STREQUAL _cmake_expected_targets)
+ unset(_cmake_targets_defined)
+ unset(_cmake_targets_not_defined)
+ unset(_cmake_expected_targets)
+ unset(CMAKE_IMPORT_FILE_VERSION)
+ cmake_policy(POP)
+ return()
+endif()
+if(NOT _cmake_targets_defined STREQUAL "")
+ string(REPLACE ";" ", " _cmake_targets_defined_text "${_cmake_targets_defined}")
+ string(REPLACE ";" ", " _cmake_targets_not_defined_text "${_cmake_targets_not_defined}")
+ message(FATAL_ERROR "Some (but not all) targets in this export set were already defined.\nTargets Defined: ${_cmake_targets_defined_text}\nTargets not yet defined: ${_cmake_targets_not_defined_text}\n")
+endif()
+unset(_cmake_targets_defined)
+unset(_cmake_targets_not_defined)
+unset(_cmake_expected_targets)
+
+
+# Compute the installation prefix relative to this file.
+get_filename_component(_IMPORT_PREFIX "${CMAKE_CURRENT_LIST_FILE}" PATH)
+get_filename_component(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH)
+get_filename_component(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH)
+get_filename_component(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH)
+if(_IMPORT_PREFIX STREQUAL "/")
+ set(_IMPORT_PREFIX "")
+endif()
+
+# Create imported target pybind11::pybind11_headers
+add_library(pybind11::pybind11_headers INTERFACE IMPORTED)
+
+set_target_properties(pybind11::pybind11_headers PROPERTIES
+ INTERFACE_COMPILE_FEATURES "cxx_inheriting_constructors;cxx_user_literals;cxx_right_angle_brackets"
+ INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include"
+)
+
+# Load information for each installed configuration.
+file(GLOB _cmake_config_files "${CMAKE_CURRENT_LIST_DIR}/pybind11Targets-*.cmake")
+foreach(_cmake_config_file IN LISTS _cmake_config_files)
+ include("${_cmake_config_file}")
+endforeach()
+unset(_cmake_config_file)
+unset(_cmake_config_files)
+
+# Cleanup temporary variables.
+set(_IMPORT_PREFIX)
+
+# Loop over all imported files and verify that they actually exist
+foreach(_cmake_target IN LISTS _cmake_import_check_targets)
+ if(CMAKE_VERSION VERSION_LESS "3.28"
+ OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target}
+ OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}")
+ foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}")
+ if(NOT EXISTS "${_cmake_file}")
+ message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file
+ \"${_cmake_file}\"
+but this file does not exist. Possible reasons include:
+* The file was deleted, renamed, or moved to another location.
+* An install or uninstall procedure did not complete successfully.
+* The installation package was faulty and contained
+ \"${CMAKE_CURRENT_LIST_FILE}\"
+but not all the files it references.
+")
+ endif()
+ endforeach()
+ endif()
+ unset(_cmake_file)
+ unset("_cmake_import_check_files_for_${_cmake_target}")
+endforeach()
+unset(_cmake_target)
+unset(_cmake_import_check_targets)
+
+# This file does not depend on other imported targets which have
+# been exported from the same project but in a separate export set.
+
+# Commands beyond this point should not need to know the version.
+set(CMAKE_IMPORT_FILE_VERSION)
+cmake_policy(POP)
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Tools.cmake b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Tools.cmake
new file mode 100644
index 0000000000000000000000000000000000000000..bed5e08039abfffb59f1861573a6b835fc4d5a99
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/cmake/pybind11/pybind11Tools.cmake
@@ -0,0 +1,239 @@
+# tools/pybind11Tools.cmake -- Build system for the pybind11 modules
+#
+# Copyright (c) 2020 Wenzel Jakob
+#
+# All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+# include_guard(global) (pre-CMake 3.10)
+if(TARGET pybind11::python_headers)
+ return()
+endif()
+
+# Built-in in CMake 3.5+
+include(CMakeParseArguments)
+
+if(pybind11_FIND_QUIETLY)
+ set(_pybind11_quiet QUIET)
+else()
+ set(_pybind11_quiet "")
+endif()
+
+# If this is the first run, PYTHON_VERSION can stand in for PYBIND11_PYTHON_VERSION
+if(NOT DEFINED PYBIND11_PYTHON_VERSION AND DEFINED PYTHON_VERSION)
+ message(WARNING "Set PYBIND11_PYTHON_VERSION to search for a specific version, not "
+ "PYTHON_VERSION (which is an output). Assuming that is what you "
+ "meant to do and continuing anyway.")
+ set(PYBIND11_PYTHON_VERSION
+ "${PYTHON_VERSION}"
+ CACHE STRING "Python version to use for compiling modules")
+ unset(PYTHON_VERSION)
+ unset(PYTHON_VERSION CACHE)
+elseif(DEFINED PYBIND11_PYTHON_VERSION)
+ # If this is set as a normal variable, promote it
+ set(PYBIND11_PYTHON_VERSION
+ "${PYBIND11_PYTHON_VERSION}"
+ CACHE STRING "Python version to use for compiling modules")
+else()
+ # Make an empty cache variable.
+ set(PYBIND11_PYTHON_VERSION
+ ""
+ CACHE STRING "Python version to use for compiling modules")
+endif()
+
+# A user can set versions manually too
+set(Python_ADDITIONAL_VERSIONS
+ "3.12;3.11;3.10;3.9;3.8;3.7"
+ CACHE INTERNAL "")
+
+list(APPEND CMAKE_MODULE_PATH "${CMAKE_CURRENT_LIST_DIR}")
+find_package(PythonLibsNew ${PYBIND11_PYTHON_VERSION} MODULE REQUIRED ${_pybind11_quiet})
+list(REMOVE_AT CMAKE_MODULE_PATH -1)
+
+# Makes a normal variable a cached variable
+macro(_PYBIND11_PROMOTE_TO_CACHE NAME)
+ set(_tmp_ptc "${${NAME}}")
+ # CMake 3.21 complains if a cached variable is shadowed by a normal one
+ unset(${NAME})
+ set(${NAME}
+ "${_tmp_ptc}"
+ CACHE INTERNAL "")
+endmacro()
+
+# Cache variables so pybind11_add_module can be used in parent projects
+_pybind11_promote_to_cache(PYTHON_INCLUDE_DIRS)
+_pybind11_promote_to_cache(PYTHON_LIBRARIES)
+_pybind11_promote_to_cache(PYTHON_MODULE_PREFIX)
+_pybind11_promote_to_cache(PYTHON_MODULE_EXTENSION)
+_pybind11_promote_to_cache(PYTHON_MODULE_DEBUG_POSTFIX)
+_pybind11_promote_to_cache(PYTHON_VERSION_MAJOR)
+_pybind11_promote_to_cache(PYTHON_VERSION_MINOR)
+_pybind11_promote_to_cache(PYTHON_VERSION)
+_pybind11_promote_to_cache(PYTHON_IS_DEBUG)
+
+if(PYBIND11_MASTER_PROJECT)
+ if(PYTHON_MODULE_EXTENSION MATCHES "pypy")
+ if(NOT DEFINED PYPY_VERSION)
+ execute_process(
+ COMMAND ${PYTHON_EXECUTABLE} -c
+ [=[import sys; sys.stdout.write(".".join(map(str, sys.pypy_version_info[:3])))]=]
+ OUTPUT_VARIABLE pypy_version)
+ set(PYPY_VERSION
+ ${pypy_version}
+ CACHE INTERNAL "")
+ endif()
+ message(STATUS "PYPY ${PYPY_VERSION} (Py ${PYTHON_VERSION})")
+ else()
+ message(STATUS "PYTHON ${PYTHON_VERSION}")
+ endif()
+endif()
+
+# Only add Python for build - must be added during the import for config since
+# it has to be re-discovered.
+#
+# This needs to be an target to it is included after the local pybind11
+# directory, just in case there are multiple versions of pybind11, we want the
+# one we expect.
+add_library(pybind11::python_headers INTERFACE IMPORTED)
+set_property(TARGET pybind11::python_headers PROPERTY INTERFACE_INCLUDE_DIRECTORIES
+ "$")
+set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::python_headers)
+
+set(pybind11_INCLUDE_DIRS
+ "${pybind11_INCLUDE_DIR}" "${PYTHON_INCLUDE_DIRS}"
+ CACHE INTERNAL "Directories where pybind11 and possibly Python headers are located")
+
+# Python debug libraries expose slightly different objects before 3.8
+# https://docs.python.org/3.6/c-api/intro.html#debugging-builds
+# https://stackoverflow.com/questions/39161202/how-to-work-around-missing-pymodule-create2-in-amd64-win-python35-d-lib
+if(PYTHON_IS_DEBUG)
+ set_property(
+ TARGET pybind11::pybind11
+ APPEND
+ PROPERTY INTERFACE_COMPILE_DEFINITIONS Py_DEBUG)
+endif()
+
+# The <3.11 code here does not support release/debug builds at the same time, like on vcpkg
+if(CMAKE_VERSION VERSION_LESS 3.11)
+ set_property(
+ TARGET pybind11::module
+ APPEND
+ PROPERTY
+ INTERFACE_LINK_LIBRARIES
+ pybind11::python_link_helper
+ "$<$,$>:$>"
+ )
+
+ set_property(
+ TARGET pybind11::embed
+ APPEND
+ PROPERTY INTERFACE_LINK_LIBRARIES pybind11::pybind11 $)
+else()
+ # The IMPORTED INTERFACE library here is to ensure that "debug" and "release" get processed outside
+ # of a generator expression - https://gitlab.kitware.com/cmake/cmake/-/issues/18424, as they are
+ # target_link_library keywords rather than real libraries.
+ add_library(pybind11::_ClassicPythonLibraries IMPORTED INTERFACE)
+ target_link_libraries(pybind11::_ClassicPythonLibraries INTERFACE ${PYTHON_LIBRARIES})
+ target_link_libraries(
+ pybind11::module
+ INTERFACE
+ pybind11::python_link_helper
+ "$<$,$>:pybind11::_ClassicPythonLibraries>")
+
+ target_link_libraries(pybind11::embed INTERFACE pybind11::pybind11
+ pybind11::_ClassicPythonLibraries)
+endif()
+
+function(pybind11_extension name)
+ # The prefix and extension are provided by FindPythonLibsNew.cmake
+ set_target_properties(
+ ${name}
+ PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}"
+ DEBUG_POSTFIX "${PYTHON_MODULE_DEBUG_POSTFIX}"
+ SUFFIX "${PYTHON_MODULE_EXTENSION}")
+endfunction()
+
+# Build a Python extension module:
+# pybind11_add_module( [MODULE | SHARED] [EXCLUDE_FROM_ALL]
+# [NO_EXTRAS] [THIN_LTO] [OPT_SIZE] source1 [source2 ...])
+#
+function(pybind11_add_module target_name)
+ set(options "MODULE;SHARED;EXCLUDE_FROM_ALL;NO_EXTRAS;SYSTEM;THIN_LTO;OPT_SIZE")
+ cmake_parse_arguments(ARG "${options}" "" "" ${ARGN})
+
+ if(ARG_MODULE AND ARG_SHARED)
+ message(FATAL_ERROR "Can't be both MODULE and SHARED")
+ elseif(ARG_SHARED)
+ set(lib_type SHARED)
+ else()
+ set(lib_type MODULE)
+ endif()
+
+ if(ARG_EXCLUDE_FROM_ALL)
+ set(exclude_from_all EXCLUDE_FROM_ALL)
+ else()
+ set(exclude_from_all "")
+ endif()
+
+ add_library(${target_name} ${lib_type} ${exclude_from_all} ${ARG_UNPARSED_ARGUMENTS})
+
+ target_link_libraries(${target_name} PRIVATE pybind11::module)
+
+ if(ARG_SYSTEM)
+ message(
+ STATUS
+ "Warning: this does not have an effect - use NO_SYSTEM_FROM_IMPORTED if using imported targets"
+ )
+ endif()
+
+ pybind11_extension(${target_name})
+
+ # -fvisibility=hidden is required to allow multiple modules compiled against
+ # different pybind versions to work properly, and for some features (e.g.
+ # py::module_local). We force it on everything inside the `pybind11`
+ # namespace; also turning it on for a pybind module compilation here avoids
+ # potential warnings or issues from having mixed hidden/non-hidden types.
+ if(NOT DEFINED CMAKE_CXX_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CXX_VISIBILITY_PRESET "hidden")
+ endif()
+
+ if(NOT DEFINED CMAKE_CUDA_VISIBILITY_PRESET)
+ set_target_properties(${target_name} PROPERTIES CUDA_VISIBILITY_PRESET "hidden")
+ endif()
+
+ if(ARG_NO_EXTRAS)
+ return()
+ endif()
+
+ if(NOT DEFINED CMAKE_INTERPROCEDURAL_OPTIMIZATION)
+ if(ARG_THIN_LTO)
+ target_link_libraries(${target_name} PRIVATE pybind11::thin_lto)
+ else()
+ target_link_libraries(${target_name} PRIVATE pybind11::lto)
+ endif()
+ endif()
+
+ if(DEFINED CMAKE_BUILD_TYPE) # see https://github.com/pybind/pybind11/issues/4454
+ # Use case-insensitive comparison to match the result of $
+ string(TOUPPER "${CMAKE_BUILD_TYPE}" uppercase_CMAKE_BUILD_TYPE)
+ if(NOT MSVC AND NOT "${uppercase_CMAKE_BUILD_TYPE}" MATCHES DEBUG|RELWITHDEBINFO)
+ pybind11_strip(${target_name})
+ endif()
+ endif()
+
+ if(MSVC)
+ target_link_libraries(${target_name} PRIVATE pybind11::windows_extras)
+ endif()
+
+ if(ARG_OPT_SIZE)
+ target_link_libraries(${target_name} PRIVATE pybind11::opt_size)
+ endif()
+endfunction()
+
+# Provide general way to call common Python commands in "common" file.
+set(_Python
+ PYTHON
+ CACHE INTERNAL "" FORCE)
diff --git a/.venv/lib/python3.11/site-packages/pybind11/share/pkgconfig/pybind11.pc b/.venv/lib/python3.11/site-packages/pybind11/share/pkgconfig/pybind11.pc
new file mode 100644
index 0000000000000000000000000000000000000000..d3504bf5d785b704407dcc866fe47c21b82fb3ae
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/pybind11/share/pkgconfig/pybind11.pc
@@ -0,0 +1,7 @@
+prefix=${pcfiledir}/../../
+includedir=${prefix}/include
+
+Name: pybind11
+Description: Seamless operability between C++11 and Python
+Version: 2.13.6
+Cflags: -I${includedir}
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/INSTALLER b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/LICENSE-APACHE b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/LICENSE-APACHE
new file mode 100644
index 0000000000000000000000000000000000000000..5f66d4ee6e85dc68fc36a027dddb2368e3f71fb4
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/LICENSE-APACHE
@@ -0,0 +1,203 @@
+Copyright (C) 2016-present the uvloop authors and contributors.
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright (c) 2015-present MagicStack Inc. http://magic.io
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/LICENSE-MIT b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/LICENSE-MIT
new file mode 100644
index 0000000000000000000000000000000000000000..40fd0230d7010bf08f207bf92874e3166d10f647
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/LICENSE-MIT
@@ -0,0 +1,21 @@
+The MIT License
+
+Copyright (C) 2016-present the uvloop authors and contributors.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/METADATA b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..49012e493220ab4158fd4c8549ceaf8ca334b078
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/METADATA
@@ -0,0 +1,175 @@
+Metadata-Version: 2.1
+Name: uvloop
+Version: 0.21.0
+Summary: Fast implementation of asyncio event loop on top of libuv
+Author-email: Yury Selivanov
+License: MIT License
+Project-URL: github, https://github.com/MagicStack/uvloop
+Keywords: asyncio,networking
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Framework :: AsyncIO
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Programming Language :: Python :: 3 :: Only
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Topic :: System :: Networking
+Requires-Python: >=3.8.0
+Description-Content-Type: text/x-rst
+License-File: LICENSE-APACHE
+License-File: LICENSE-MIT
+Provides-Extra: dev
+Requires-Dist: setuptools>=60; extra == "dev"
+Requires-Dist: Cython~=3.0; extra == "dev"
+Provides-Extra: docs
+Requires-Dist: Sphinx~=4.1.2; extra == "docs"
+Requires-Dist: sphinxcontrib-asyncio~=0.3.0; extra == "docs"
+Requires-Dist: sphinx-rtd-theme~=0.5.2; extra == "docs"
+Provides-Extra: test
+Requires-Dist: aiohttp>=3.10.5; extra == "test"
+Requires-Dist: flake8~=5.0; extra == "test"
+Requires-Dist: psutil; extra == "test"
+Requires-Dist: pycodestyle~=2.9.0; extra == "test"
+Requires-Dist: pyOpenSSL~=23.0.0; extra == "test"
+Requires-Dist: mypy>=0.800; extra == "test"
+
+.. image:: https://img.shields.io/github/actions/workflow/status/MagicStack/uvloop/tests.yml?branch=master
+ :target: https://github.com/MagicStack/uvloop/actions/workflows/tests.yml?query=branch%3Amaster
+
+.. image:: https://img.shields.io/pypi/v/uvloop.svg
+ :target: https://pypi.python.org/pypi/uvloop
+
+.. image:: https://pepy.tech/badge/uvloop
+ :target: https://pepy.tech/project/uvloop
+ :alt: PyPI - Downloads
+
+
+uvloop is a fast, drop-in replacement of the built-in asyncio
+event loop. uvloop is implemented in Cython and uses libuv
+under the hood.
+
+The project documentation can be found
+`here `_. Please also check out the
+`wiki `_.
+
+
+Performance
+-----------
+
+uvloop makes asyncio 2-4x faster.
+
+.. image:: https://raw.githubusercontent.com/MagicStack/uvloop/master/performance.png
+ :target: http://magic.io/blog/uvloop-blazing-fast-python-networking/
+
+The above chart shows the performance of an echo server with different
+message sizes. The *sockets* benchmark uses ``loop.sock_recv()`` and
+``loop.sock_sendall()`` methods; the *streams* benchmark uses asyncio
+high-level streams, created by the ``asyncio.start_server()`` function;
+and the *protocol* benchmark uses ``loop.create_server()`` with a simple
+echo protocol. Read more about uvloop in a
+`blog post `_
+about it.
+
+
+Installation
+------------
+
+uvloop requires Python 3.8 or greater and is available on PyPI.
+Use pip to install it::
+
+ $ pip install uvloop
+
+Note that it is highly recommended to **upgrade pip before** installing
+uvloop with::
+
+ $ pip install -U pip
+
+
+Using uvloop
+------------
+
+As of uvloop 0.18, the preferred way of using it is via the
+``uvloop.run()`` helper function:
+
+
+.. code:: python
+
+ import uvloop
+
+ async def main():
+ # Main entry-point.
+ ...
+
+ uvloop.run(main())
+
+``uvloop.run()`` works by simply configuring ``asyncio.run()``
+to use uvloop, passing all of the arguments to it, such as ``debug``,
+e.g. ``uvloop.run(main(), debug=True)``.
+
+With Python 3.11 and earlier the following alternative
+snippet can be used:
+
+.. code:: python
+
+ import asyncio
+ import sys
+
+ import uvloop
+
+ async def main():
+ # Main entry-point.
+ ...
+
+ if sys.version_info >= (3, 11):
+ with asyncio.Runner(loop_factory=uvloop.new_event_loop) as runner:
+ runner.run(main())
+ else:
+ uvloop.install()
+ asyncio.run(main())
+
+
+Building From Source
+--------------------
+
+To build uvloop, you'll need Python 3.8 or greater:
+
+1. Clone the repository:
+
+ .. code::
+
+ $ git clone --recursive git@github.com:MagicStack/uvloop.git
+ $ cd uvloop
+
+2. Create a virtual environment and activate it:
+
+ .. code::
+
+ $ python3 -m venv uvloop-dev
+ $ source uvloop-dev/bin/activate
+
+3. Install development dependencies:
+
+ .. code::
+
+ $ pip install -e .[dev]
+
+4. Build and run tests:
+
+ .. code::
+
+ $ make
+ $ make test
+
+
+License
+-------
+
+uvloop is dual-licensed under MIT and Apache 2.0 licenses.
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/RECORD b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..fbc92d4950b4f45cd447a2afc2a03c062b3b1649
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/RECORD
@@ -0,0 +1,69 @@
+uvloop-0.21.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+uvloop-0.21.0.dist-info/LICENSE-APACHE,sha256=N3AlKHeY-dzYGeH4JvpfxeLzglKGkasFKMXPjIwoLCc,11439
+uvloop-0.21.0.dist-info/LICENSE-MIT,sha256=bdTDmfJt4EPXeirX4x20y1vwjqg2iwpC1uFYY1zIq2I,1105
+uvloop-0.21.0.dist-info/METADATA,sha256=UnacVCAjauzcbHHE4UVtxI84a9-D1B5zy0Fi_T6NGu0,4899
+uvloop-0.21.0.dist-info/RECORD,,
+uvloop-0.21.0.dist-info/WHEEL,sha256=XihS4yPLFu_eB7R4sl7jUHiEAA7zQ3q0-_CuIzkpFkk,151
+uvloop-0.21.0.dist-info/top_level.txt,sha256=2cDaltyemYfQErB19s2jFmumeJRnbsZPJ7Lj9A78c6Y,7
+uvloop/__init__.py,sha256=CuY_C2LjdsJTwxAgU0tqRAU6Bb-XC0F5EUjJc70OZFc,5228
+uvloop/__pycache__/__init__.cpython-311.pyc,,
+uvloop/__pycache__/_noop.cpython-311.pyc,,
+uvloop/__pycache__/_testbase.cpython-311.pyc,,
+uvloop/__pycache__/_version.cpython-311.pyc,,
+uvloop/_noop.py,sha256=SDAJTiWhE7g3KyttbjPdliv-Uheuas-tKX4_y_nvO_Q,86
+uvloop/_testbase.py,sha256=sRZjHR-nMHv4UZ23AkSCtgEsvgo8uOqDchFNOFViiRg,15570
+uvloop/_version.py,sha256=pRhsSEFabYnSrcbRCuOkm0vrAr6wBs5E2NLUAzk-OqY,576
+uvloop/cbhandles.pxd,sha256=gW0spS84wbfuEHuYEbRSsHiKRmb5pfDHkYZvxhTC-Vo,752
+uvloop/cbhandles.pyx,sha256=PTQjEEN4yGloNP6lIHddNzDOFqowvGm_CvS9M6yHvc4,12298
+uvloop/dns.pyx,sha256=oHTr36ic6u9F-VFAAv0G92KY44O3-0x3ytcOAVvGmTs,14562
+uvloop/errors.pyx,sha256=2etYn89Th3tIsNMLl33Quc-1WkKKY7umPOVvilTzi9k,2774
+uvloop/handles/async_.pxd,sha256=xtsWSi0A67joJU4iFp5JWzQxwNj4LCq_KMDyDDMxdec,252
+uvloop/handles/async_.pyx,sha256=Hd_Bgi8I9uJZ20_2qUsHYYQtwq4LKtjTr3THQYKp-Sk,1516
+uvloop/handles/basetransport.pxd,sha256=SiDD77NPthTfjXVg12gJJGM1YYKZXw4AEK9tv22jJeE,1322
+uvloop/handles/basetransport.pyx,sha256=GtN3vdp6DDkh1g0RRPemj0r4x-Exskw-m16p_vY_E9g,9553
+uvloop/handles/check.pxd,sha256=IufFrzdMhLRc5zAjh7Lb0lAqw-UclrYVo-UgqIs6eJ0,276
+uvloop/handles/check.pyx,sha256=70d5oylnFnZjEJo_HBg5JYw2hE3PvkU3rhzALDEUOK8,1881
+uvloop/handles/fsevent.pxd,sha256=YfklQ9TeikRV2QRLNPAtkEwu_3vwrsOq9cMJxFV8VgI,325
+uvloop/handles/fsevent.pyx,sha256=RUV2-WhBo2OjXFn0N49l4th1DFZ0kdC-7YgsIZkUBoI,2823
+uvloop/handles/handle.pxd,sha256=QPjUCObkDwvjRAZFlolF1tNXFV9-jAf22V0KweiLdOA,1189
+uvloop/handles/handle.pyx,sha256=YOaN1fSPzo_IJA3IbG7E10pc-dbAN7y8DyGZoLgho-M,13248
+uvloop/handles/idle.pxd,sha256=L3Gr2tuzKHWEB2NnykwjbNyexNUlckBdGFKPufn5AZU,274
+uvloop/handles/idle.pyx,sha256=BXi_PQrgbPN2n3-QybHo0CLhW2m9N7benwSb4q7u87I,1859
+uvloop/handles/pipe.pxd,sha256=LzsEOwptkqNa52O1Iyqhxq2d4ppzmHr0x8cMwJIZZfk,933
+uvloop/handles/pipe.pyx,sha256=9xINAS1xZuPM87gS-QYVGwUn_4JhcqKwqJobjpHHGkM,7688
+uvloop/handles/poll.pxd,sha256=afAR6gAx52OnmPqaHa3y41xxtIYxam1w9XoNZRxNMwU,575
+uvloop/handles/poll.pyx,sha256=kjlhSrRyOHnH2tJJLmBtE0ePltUWTKphJ6ml8RP0Qhg,6511
+uvloop/handles/process.pxd,sha256=FKCuQWWzDL8r0N1phlwPJ_pGGY3TZsOl5rBQP4AlgYo,2314
+uvloop/handles/process.pyx,sha256=x89gE5JCApGshWqln-2qxYI_I262r5udmLCnBAyW--w,26919
+uvloop/handles/stream.pxd,sha256=1BASyhG8z9HDf4ZikWPqd-hldQgGSdHl3ta-nNEnChE,1535
+uvloop/handles/stream.pyx,sha256=bizhF7PRNmy3Zcd7anORwZRAsQx4tV31dhzqNf5_fAc,31856
+uvloop/handles/streamserver.pxd,sha256=hIDDhB2RK0lnMUscDWcGl2NRkclb6AYfche77YEdaes,786
+uvloop/handles/streamserver.pyx,sha256=quWwKo_rz4Jzq-YNLZQ7lmcBNLSzQBpf31nS64jhbrU,4632
+uvloop/handles/tcp.pxd,sha256=xNYy-df1tK5ywK3V7a0wWno9tAA7JH-FiIQ5F0296ZM,892
+uvloop/handles/tcp.pyx,sha256=22isLLJ9__U7Bx2ZQwWP3Mozt0DZ66aOLREW7adKGLs,7291
+uvloop/handles/timer.pxd,sha256=VcLZBfzd9ixuxmJrE9O3YmyVO4LfMDwcG7UNpJbTu40,440
+uvloop/handles/timer.pyx,sha256=zT35AW9Wv9H_zWa6sw7GOi4SB7HavGUobFezTFfSq6E,2416
+uvloop/handles/udp.pxd,sha256=gQn9FH4rAiXDR_kZNqaYcNMGMzFL-T1V1G8JI6JOHU8,671
+uvloop/handles/udp.pyx,sha256=_doWmjAsh3vPES_CLQ7j309f71qK_6YIBGKtimpjAO8,12039
+uvloop/includes/__init__.py,sha256=-OUZ6zr6Opdw78PKsHYi1AuP74Ep7XByxyoRYOuRtgI,361
+uvloop/includes/__pycache__/__init__.cpython-311.pyc,,
+uvloop/includes/consts.pxi,sha256=m6K9HIUl8G3D9iOIzK0C3_chXKwIfsiq88j3VOvUuU4,843
+uvloop/includes/debug.pxd,sha256=cCnlyp6HkhQgVF7lAQPA31wIa1n1pn6eUY_wARYh3uA,64
+uvloop/includes/flowcontrol.pxd,sha256=7PuZtEgp4TS1Y3iNqZZInkDKI5iCylERrcLqe2ls3EY,458
+uvloop/includes/python.pxd,sha256=SSB2FPEsEt_Aif66l-SQvFpJ3I7TrgbL4lsiu_Kyu9k,846
+uvloop/includes/stdlib.pxi,sha256=k49jKoHwvBhVho5W95yQrPMKskonEhQpqi95GZe6RHM,6361
+uvloop/includes/system.pxd,sha256=pbXOeZeXaDZ0b3CIFOgObE5C-cr6vhi6io-F8wLIaNQ,2186
+uvloop/includes/uv.pxd,sha256=wkayMxCaI9RyxTb1sqkP6DdU6l_w9ql18SYAoEYSNiA,16080
+uvloop/loop.cpython-311-x86_64-linux-gnu.so,sha256=OygAxxMB_plaDqS2sQ637_Emm38r6p2eivy9PHcQvt4,13712536
+uvloop/loop.pxd,sha256=1C4lOQV6MTWmvAnL67W3CvEyBdnDNYLEtCMPTZD40s8,6224
+uvloop/loop.pyi,sha256=xLLboc-tuzlu68RcUhghA-jjSy-mMNixiVDNY6TZueU,10504
+uvloop/loop.pyx,sha256=C2jMCvqkhswEcq9rjg0lbieAIXeksLiFyXQAz9tRI6g,118619
+uvloop/lru.pyx,sha256=nBZ4zuy4XjsdLorq-JhNS7WObcLpZWMr1OjyRvv8FaI,2279
+uvloop/pseudosock.pyx,sha256=M3H7qMGFXE9ZZLvYwOgBl3ZcNA5OKSnZ7NUGLJA7AlA,5383
+uvloop/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+uvloop/request.pxd,sha256=7yx8JlG0Hu2cv_i2QCZ_WdLlsGjI0z5eM_ueOOOgK6w,143
+uvloop/request.pyx,sha256=6-8Dme6LoT88B5-MzvmpuLn3hGt1eZlekvQxG0x2y8s,2259
+uvloop/server.pxd,sha256=_zRDiZMjsmlxJRo0KDzSM0xyfg2k-TzlGln54wvXC-Y,394
+uvloop/server.pyx,sha256=6wC5vUhAHnnUs7qHOJXvRkgov38IeY8xp6w45-rCRFc,3623
+uvloop/sslproto.pxd,sha256=fCM5XWu5ZSTDpf5_-wF2jvj77Y403yk40QOiWc0wo1s,3534
+uvloop/sslproto.pyx,sha256=EL1fckxojYK42OCAIJ-geUoKc0uncPH1hXg50roBQ-0,35381
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/WHEEL b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..d9c3682338210698a6106de06c2fa2666600d6c2
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: setuptools (75.1.0)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_17_x86_64
+Tag: cp311-cp311-manylinux2014_x86_64
+
diff --git a/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/top_level.txt b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..99d47169df5668e3ecc365e7792595e5333d7eb5
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/uvloop-0.21.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+uvloop
diff --git a/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/INSTALLER b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/INSTALLER
new file mode 100644
index 0000000000000000000000000000000000000000..a1b589e38a32041e49332e5e81c2d363dc418d68
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/LICENSE b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/LICENSE
new file mode 100644
index 0000000000000000000000000000000000000000..bd8c7124a7caf7b73e65e2137835ea4da8578072
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/LICENSE
@@ -0,0 +1,24 @@
+Copyright (c) 2013-2023, Graham Dumpleton
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
+LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/METADATA b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/METADATA
new file mode 100644
index 0000000000000000000000000000000000000000..d2c9f2cf3df22aa6a85e033d5e788c1d62f938ff
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/METADATA
@@ -0,0 +1,167 @@
+Metadata-Version: 2.1
+Name: wrapt
+Version: 1.17.2
+Summary: Module for decorators, wrappers and monkey patching.
+Home-page: https://github.com/GrahamDumpleton/wrapt
+Author: Graham Dumpleton
+Author-email: Graham.Dumpleton@gmail.com
+License: BSD
+Project-URL: Bug Tracker, https://github.com/GrahamDumpleton/wrapt/issues/
+Project-URL: Changelog, https://wrapt.readthedocs.io/en/latest/changes.html
+Project-URL: Documentation, https://wrapt.readthedocs.io/
+Keywords: wrapper,proxy,decorator
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: 3.10
+Classifier: Programming Language :: Python :: 3.11
+Classifier: Programming Language :: Python :: 3.12
+Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Requires-Python: >=3.8
+Description-Content-Type: text/x-rst
+License-File: LICENSE
+
+wrapt
+=====
+
+|PyPI|
+
+The aim of the **wrapt** module is to provide a transparent object proxy
+for Python, which can be used as the basis for the construction of function
+wrappers and decorator functions.
+
+The **wrapt** module focuses very much on correctness. It therefore goes
+way beyond existing mechanisms such as ``functools.wraps()`` to ensure that
+decorators preserve introspectability, signatures, type checking abilities
+etc. The decorators that can be constructed using this module will work in
+far more scenarios than typical decorators and provide more predictable and
+consistent behaviour.
+
+To ensure that the overhead is as minimal as possible, a C extension module
+is used for performance critical components. An automatic fallback to a
+pure Python implementation is also provided where a target system does not
+have a compiler to allow the C extension to be compiled.
+
+Documentation
+-------------
+
+For further information on the **wrapt** module see:
+
+* http://wrapt.readthedocs.org/
+
+Quick Start
+-----------
+
+To implement your decorator you need to first define a wrapper function.
+This will be called each time a decorated function is called. The wrapper
+function needs to take four positional arguments:
+
+* ``wrapped`` - The wrapped function which in turns needs to be called by your wrapper function.
+* ``instance`` - The object to which the wrapped function was bound when it was called.
+* ``args`` - The list of positional arguments supplied when the decorated function was called.
+* ``kwargs`` - The dictionary of keyword arguments supplied when the decorated function was called.
+
+The wrapper function would do whatever it needs to, but would usually in
+turn call the wrapped function that is passed in via the ``wrapped``
+argument.
+
+The decorator ``@wrapt.decorator`` then needs to be applied to the wrapper
+function to convert it into a decorator which can in turn be applied to
+other functions.
+
+.. code-block:: python
+
+ import wrapt
+
+ @wrapt.decorator
+ def pass_through(wrapped, instance, args, kwargs):
+ return wrapped(*args, **kwargs)
+
+ @pass_through
+ def function():
+ pass
+
+If you wish to implement a decorator which accepts arguments, then wrap the
+definition of the decorator in a function closure. Any arguments supplied
+to the outer function when the decorator is applied, will be available to
+the inner wrapper when the wrapped function is called.
+
+.. code-block:: python
+
+ import wrapt
+
+ def with_arguments(myarg1, myarg2):
+ @wrapt.decorator
+ def wrapper(wrapped, instance, args, kwargs):
+ return wrapped(*args, **kwargs)
+ return wrapper
+
+ @with_arguments(1, 2)
+ def function():
+ pass
+
+When applied to a normal function or static method, the wrapper function
+when called will be passed ``None`` as the ``instance`` argument.
+
+When applied to an instance method, the wrapper function when called will
+be passed the instance of the class the method is being called on as the
+``instance`` argument. This will be the case even when the instance method
+was called explicitly via the class and the instance passed as the first
+argument. That is, the instance will never be passed as part of ``args``.
+
+When applied to a class method, the wrapper function when called will be
+passed the class type as the ``instance`` argument.
+
+When applied to a class, the wrapper function when called will be passed
+``None`` as the ``instance`` argument. The ``wrapped`` argument in this
+case will be the class.
+
+The above rules can be summarised with the following example.
+
+.. code-block:: python
+
+ import inspect
+
+ @wrapt.decorator
+ def universal(wrapped, instance, args, kwargs):
+ if instance is None:
+ if inspect.isclass(wrapped):
+ # Decorator was applied to a class.
+ return wrapped(*args, **kwargs)
+ else:
+ # Decorator was applied to a function or staticmethod.
+ return wrapped(*args, **kwargs)
+ else:
+ if inspect.isclass(instance):
+ # Decorator was applied to a classmethod.
+ return wrapped(*args, **kwargs)
+ else:
+ # Decorator was applied to an instancemethod.
+ return wrapped(*args, **kwargs)
+
+Using these checks it is therefore possible to create a universal decorator
+that can be applied in all situations. It is no longer necessary to create
+different variants of decorators for normal functions and instance methods,
+or use additional wrappers to convert a function decorator into one that
+will work for instance methods.
+
+In all cases, the wrapped function passed to the wrapper function is called
+in the same way, with ``args`` and ``kwargs`` being passed. The
+``instance`` argument doesn't need to be used in calling the wrapped
+function.
+
+Repository
+----------
+
+Full source code for the **wrapt** module, including documentation files
+and unit tests, can be obtained from github.
+
+* https://github.com/GrahamDumpleton/wrapt
+
+.. |PyPI| image:: https://img.shields.io/pypi/v/wrapt.svg?logo=python&cacheSeconds=3600
+ :target: https://pypi.python.org/pypi/wrapt
diff --git a/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/RECORD b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/RECORD
new file mode 100644
index 0000000000000000000000000000000000000000..ef36e27ce89efc64ab70405b8e8ca9305fcb6535
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/RECORD
@@ -0,0 +1,23 @@
+wrapt-1.17.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+wrapt-1.17.2.dist-info/LICENSE,sha256=WXTvu8i2JrIFCBDWPDqQfuIckr9ks3QPxiOtBes0SKs,1304
+wrapt-1.17.2.dist-info/METADATA,sha256=VmLirO1lTUJzSk_JMz4EwBxs7_a_yiYdTMYnPxfKSe4,6351
+wrapt-1.17.2.dist-info/RECORD,,
+wrapt-1.17.2.dist-info/WHEEL,sha256=uBEQ2oBDgJ6sDf60H0dqr9_l_bXYSImmz5OB-1x6FQs,224
+wrapt-1.17.2.dist-info/top_level.txt,sha256=Jf7kcuXtwjUJMwOL0QzALDg2WiSiXiH9ThKMjN64DW0,6
+wrapt/__init__.py,sha256=EFHFSe8ZPZ3s57Z_rLq7BTHcbkh1v8Qq8U_ZbdgZ0us,1238
+wrapt/__pycache__/__init__.cpython-311.pyc,,
+wrapt/__pycache__/__wrapt__.cpython-311.pyc,,
+wrapt/__pycache__/arguments.cpython-311.pyc,,
+wrapt/__pycache__/decorators.cpython-311.pyc,,
+wrapt/__pycache__/importer.cpython-311.pyc,,
+wrapt/__pycache__/patches.cpython-311.pyc,,
+wrapt/__pycache__/weakrefs.cpython-311.pyc,,
+wrapt/__pycache__/wrappers.cpython-311.pyc,,
+wrapt/__wrapt__.py,sha256=KgXZdYY5cIzq_hqzGuue38IK-SOoya8Kx4zkAr6Ztuo,443
+wrapt/_wrappers.cpython-311-x86_64-linux-gnu.so,sha256=xgRtJMG5uaENQsIcpG0a-PaltPLBNMqZUvY-wKPp1_s,197248
+wrapt/arguments.py,sha256=RF0nTEdPzPIewJ-jnSY42i4JSzK3ctjPABV1SJxLymg,1746
+wrapt/decorators.py,sha256=M0pDLB-SioOTIDczYWX3UpEorZMijp7s17FvpHdXf2Y,21333
+wrapt/importer.py,sha256=qxK5bfhm52uhYXgdJn3AwReOXdWE9gY32fwlBUybz64,10997
+wrapt/patches.py,sha256=08gt_aVAuNvXyOVn8o8_AkkUD9dPh0G5oUxnoBwd0Cs,5204
+wrapt/weakrefs.py,sha256=gKWTMwRqAQTUhjQ4Fo0MkxgjeE8w-fzTaEkBdcBMb6c,3881
+wrapt/wrappers.py,sha256=IQGTBWs2JfzftHQ643rd-EKpATH-OP31N--D5T2vSr0,28687
diff --git a/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/WHEEL b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/WHEEL
new file mode 100644
index 0000000000000000000000000000000000000000..cb445f3e102394e3176e552359b8b8db8fe4d828
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/WHEEL
@@ -0,0 +1,8 @@
+Wheel-Version: 1.0
+Generator: setuptools (75.5.0)
+Root-Is-Purelib: false
+Tag: cp311-cp311-manylinux_2_5_x86_64
+Tag: cp311-cp311-manylinux1_x86_64
+Tag: cp311-cp311-manylinux_2_17_x86_64
+Tag: cp311-cp311-manylinux2014_x86_64
+
diff --git a/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/top_level.txt b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..ba11553ab9e90bd2fc2366e2d157f5bf947d80d5
--- /dev/null
+++ b/.venv/lib/python3.11/site-packages/wrapt-1.17.2.dist-info/top_level.txt
@@ -0,0 +1 @@
+wrapt