diff --git a/.gitattributes b/.gitattributes index bed0738c7eeb449bca98b5d2f33c89a1ee56349a..7a20a1c7a8267f6d35d2395b7cbf84387e1205c1 100644 --- a/.gitattributes +++ b/.gitattributes @@ -58,3 +58,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text # Video files - compressed *.mp4 filter=lfs diff=lfs merge=lfs -text *.webm filter=lfs diff=lfs merge=lfs -text +venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text +venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-312.pyc filter=lfs diff=lfs merge=lfs -text diff --git a/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..d99323a9965f146d5b0888c4ca1bf0727e12b04f --- /dev/null +++ b/venv/lib/python3.12/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 the contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f428711515142282320a23f78097cd1d2ed81333 Binary files /dev/null and b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e40fdff930f46048bd700cfc6ab624c8792cb44f Binary files /dev/null and b/venv/lib/python3.12/site-packages/annotated_types/__pycache__/test_cases.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..404e7459a0ac4a1bdba514eea06442e9b59cbb41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5e7e56e69288b9ba6d7e5ae42d06952020dd3f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/from_thread.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/functools.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/functools.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2cf2bc38e71c1fc83f96ad0816d3463c3b19d16c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/functools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bead648bc3811964be3ec3d90571fba3a5131492 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/lowlevel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ace0d6bed82b949d3530e1fbf0f9dd3806e4b595 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/pytest_plugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3463539e3d1eb26ec11ad4c5248e68bc63175e0f Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_interpreter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f24a668bb6d4d3643377c58fa34bcfffdf42bbe1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_process.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f88d6aa52f87ac3f25d22a8189eccd201f8b859 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/__pycache__/to_thread.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py b/venv/lib/python3.12/site-packages/anyio/_backends/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c0ad9801f5f52f62235b8f975e15057840868dac Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b501e5327344865550ceab690b84691df1f6c31 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-312.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:207f88ed7c7a858551554262a2511f907dd4261f7486cc700d8fba613d242f7f +size 138967 diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..02fb39753a727e5baa4a42f02e68709eca0e217b Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_backends/__pycache__/_trio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py b/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py new file mode 100644 index 0000000000000000000000000000000000000000..9f1ddc242c778588bfc48c0d3ec8a2db49b666c7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_backends/_asyncio.py @@ -0,0 +1,2996 @@ +from __future__ import annotations + +import array +import asyncio +import concurrent.futures +import contextvars +import math +import os +import socket +import sys +import threading +import weakref +from asyncio import ( + AbstractEventLoop, + CancelledError, + all_tasks, + create_task, + current_task, + get_running_loop, + sleep, +) +from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] +from collections import OrderedDict, deque +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from concurrent.futures import Future +from contextlib import AbstractContextManager, suppress +from contextvars import Context, copy_context +from dataclasses import dataclass, field +from functools import partial, wraps +from inspect import ( + CORO_RUNNING, + CORO_SUSPENDED, + getcoroutinestate, + iscoroutine, +) +from io import IOBase +from os import PathLike +from queue import Queue +from signal import Signals +from socket import AddressFamily, SocketKind +from threading import Thread +from types import CodeType, TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + ParamSpec, + TypeVar, + cast, +) +from weakref import WeakKeyDictionary + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + TaskInfo, + abc, +) +from .._core._eventloop import ( + claim_worker_thread, + set_current_async_library, + threadlocals, +) +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, + RunFinishedError, + WouldBlock, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import ( + AsyncBackend, + IPSockAddrType, + SocketListener, + UDPPacketType, + UNIXDatagramPacketType, +) +from ..abc._eventloop import StrOrBytesPath +from ..lowlevel import RunVar +from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info >= (3, 11): + from asyncio import Runner + from typing import TypeVarTuple, Unpack +else: + import contextvars + import enum + import signal + from asyncio import coroutines, events, exceptions, tasks + + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + + class _State(enum.Enum): + CREATED = "created" + INITIALIZED = "initialized" + CLOSED = "closed" + + class Runner: + # Copied from CPython 3.11 + def __init__( + self, + *, + debug: bool | None = None, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ): + self._state = _State.CREATED + self._debug = debug + self._loop_factory = loop_factory + self._loop: AbstractEventLoop | None = None + self._context = None + self._interrupt_count = 0 + self._set_event_loop = False + + def __enter__(self) -> Runner: + self._lazy_init() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def close(self) -> None: + """Shutdown and close event loop.""" + loop = self._loop + if self._state is not _State.INITIALIZED or loop is None: + return + try: + _cancel_all_tasks(loop) + loop.run_until_complete(loop.shutdown_asyncgens()) + if hasattr(loop, "shutdown_default_executor"): + loop.run_until_complete(loop.shutdown_default_executor()) + else: + loop.run_until_complete(_shutdown_default_executor(loop)) + finally: + if self._set_event_loop: + events.set_event_loop(None) + loop.close() + self._loop = None + self._state = _State.CLOSED + + def get_loop(self) -> AbstractEventLoop: + """Return embedded event loop.""" + self._lazy_init() + return self._loop + + def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: + """Run a coroutine inside the embedded event loop.""" + if not coroutines.iscoroutine(coro): + raise ValueError(f"a coroutine was expected, got {coro!r}") + + if events._get_running_loop() is not None: + # fail fast with short traceback + raise RuntimeError( + "Runner.run() cannot be called from a running event loop" + ) + + self._lazy_init() + + if context is None: + context = self._context + task = context.run(self._loop.create_task, coro) + + if ( + threading.current_thread() is threading.main_thread() + and signal.getsignal(signal.SIGINT) is signal.default_int_handler + ): + sigint_handler = partial(self._on_sigint, main_task=task) + try: + signal.signal(signal.SIGINT, sigint_handler) + except ValueError: + # `signal.signal` may throw if `threading.main_thread` does + # not support signals (e.g. embedded interpreter with signals + # not registered - see gh-91880) + sigint_handler = None + else: + sigint_handler = None + + self._interrupt_count = 0 + try: + return self._loop.run_until_complete(task) + except exceptions.CancelledError: + if self._interrupt_count > 0: + uncancel = getattr(task, "uncancel", None) + if uncancel is not None and uncancel() == 0: + raise KeyboardInterrupt # noqa: B904 + raise # CancelledError + finally: + if ( + sigint_handler is not None + and signal.getsignal(signal.SIGINT) is sigint_handler + ): + signal.signal(signal.SIGINT, signal.default_int_handler) + + def _lazy_init(self) -> None: + if self._state is _State.CLOSED: + raise RuntimeError("Runner is closed") + if self._state is _State.INITIALIZED: + return + if self._loop_factory is None: + self._loop = events.new_event_loop() + if not self._set_event_loop: + # Call set_event_loop only once to avoid calling + # attach_loop multiple times on child watchers + events.set_event_loop(self._loop) + self._set_event_loop = True + else: + self._loop = self._loop_factory() + if self._debug is not None: + self._loop.set_debug(self._debug) + self._context = contextvars.copy_context() + self._state = _State.INITIALIZED + + def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: + self._interrupt_count += 1 + if self._interrupt_count == 1 and not main_task.done(): + main_task.cancel() + # wakeup loop if it is blocked by select() with long timeout + self._loop.call_soon_threadsafe(lambda: None) + return + raise KeyboardInterrupt() + + def _cancel_all_tasks(loop: AbstractEventLoop) -> None: + to_cancel = tasks.all_tasks(loop) + if not to_cancel: + return + + for task in to_cancel: + task.cancel() + + loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) + + for task in to_cancel: + if task.cancelled(): + continue + if task.exception() is not None: + loop.call_exception_handler( + { + "message": "unhandled exception during asyncio.run() shutdown", + "exception": task.exception(), + "task": task, + } + ) + + async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: + """Schedule the shutdown of the default executor.""" + + def _do_shutdown(future: asyncio.futures.Future) -> None: + try: + loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] + loop.call_soon_threadsafe(future.set_result, None) + except Exception as ex: + loop.call_soon_threadsafe(future.set_exception, ex) + + loop._executor_shutdown_called = True + if loop._default_executor is None: + return + future = loop.create_future() + thread = threading.Thread(target=_do_shutdown, args=(future,)) + thread.start() + try: + await future + finally: + thread.join() + + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + +_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") + + +def find_root_task() -> asyncio.Task: + root_task = _root_task.get(None) + if root_task is not None and not root_task.done(): + return root_task + + # Look for a task that has been started via run_until_complete() + for task in all_tasks(): + if task._callbacks and not task.done(): + callbacks = [cb for cb, context in task._callbacks] + for cb in callbacks: + if ( + cb is _run_until_complete_cb + or getattr(cb, "__module__", None) == "uvloop.loop" + ): + _root_task.set(task) + return task + + # Look up the topmost task in the AnyIO task tree, if possible + task = cast(asyncio.Task, current_task()) + state = _task_states.get(task) + if state: + cancel_scope = state.cancel_scope + while cancel_scope and cancel_scope._parent_scope is not None: + cancel_scope = cancel_scope._parent_scope + + if cancel_scope is not None: + return cast(asyncio.Task, cancel_scope._host_task) + + return task + + +def get_callable_name(func: Callable) -> str: + module = getattr(func, "__module__", None) + qualname = getattr(func, "__qualname__", None) + return ".".join([x for x in (module, qualname) if x]) + + +# +# Event loop +# + +_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() + + +def _task_started(task: asyncio.Task) -> bool: + """Return ``True`` if the task has been started and has not finished.""" + # The task coro should never be None here, as we never add finished tasks to the + # task list + coro = task.get_coro() + assert coro is not None + try: + return getcoroutinestate(coro) in (CORO_RUNNING, CORO_SUSPENDED) + except AttributeError: + # task coro is async_genenerator_asend https://bugs.python.org/issue37771 + raise Exception(f"Cannot determine if task {task} has started or not") from None + + +# +# Timeouts and cancellation +# + + +def is_anyio_cancellation(exc: CancelledError) -> bool: + # Sometimes third party frameworks catch a CancelledError and raise a new one, so as + # a workaround we have to look at the previous ones in __context__ too for a + # matching cancel message + while True: + if ( + exc.args + and isinstance(exc.args[0], str) + and exc.args[0].startswith("Cancelled via cancel scope ") + ): + return True + + if isinstance(exc.__context__, CancelledError): + exc = exc.__context__ + continue + + return False + + +class CancelScope(BaseCancelScope): + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, deadline: float = math.inf, shield: bool = False): + self._deadline = deadline + self._shield = shield + self._parent_scope: CancelScope | None = None + self._child_scopes: set[CancelScope] = set() + self._cancel_called = False + self._cancel_reason: str | None = None + self._cancelled_caught = False + self._active = False + self._timeout_handle: asyncio.TimerHandle | None = None + self._cancel_handle: asyncio.Handle | None = None + self._tasks: set[asyncio.Task] = set() + self._host_task: asyncio.Task | None = None + if sys.version_info >= (3, 11): + self._pending_uncancellations: int | None = 0 + else: + self._pending_uncancellations = None + + def __enter__(self) -> CancelScope: + if self._active: + raise RuntimeError( + "Each CancelScope may only be used for a single 'with' block" + ) + + self._host_task = host_task = cast(asyncio.Task, current_task()) + self._tasks.add(host_task) + try: + task_state = _task_states[host_task] + except KeyError: + task_state = TaskState(None, self) + _task_states[host_task] = task_state + else: + self._parent_scope = task_state.cancel_scope + task_state.cancel_scope = self + if self._parent_scope is not None: + # If using an eager task factory, the parent scope may not even contain + # the host task + self._parent_scope._child_scopes.add(self) + self._parent_scope._tasks.discard(host_task) + + self._timeout() + self._active = True + + # Start cancelling the host task if the scope was cancelled before entering + if self._cancel_called: + self._deliver_cancellation(self) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + del exc_tb + + if not self._active: + raise RuntimeError("This cancel scope is not active") + if current_task() is not self._host_task: + raise RuntimeError( + "Attempted to exit cancel scope in a different task than it was " + "entered in" + ) + + assert self._host_task is not None + host_task_state = _task_states.get(self._host_task) + if host_task_state is None or host_task_state.cancel_scope is not self: + raise RuntimeError( + "Attempted to exit a cancel scope that isn't the current tasks's " + "current cancel scope" + ) + + try: + self._active = False + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._tasks.remove(self._host_task) + if self._parent_scope is not None: + self._parent_scope._child_scopes.remove(self) + self._parent_scope._tasks.add(self._host_task) + + host_task_state.cancel_scope = self._parent_scope + + # Restart the cancellation effort in the closest visible, cancelled parent + # scope if necessary + self._restart_cancellation_in_parent() + + # We only swallow the exception iff it was an AnyIO CancelledError, either + # directly as exc_val or inside an exception group and there are no cancelled + # parent cancel scopes visible to us here + if self._cancel_called and not self._parent_cancellation_is_visible_to_us: + # For each level-cancel() call made on the host task, call uncancel() + while self._pending_uncancellations: + self._host_task.uncancel() + self._pending_uncancellations -= 1 + + # Update cancelled_caught and check for exceptions we must not swallow + if isinstance(exc_val, BaseExceptionGroup): + cancelleds_caught, remaining = exc_val.split( + lambda exc: ( + isinstance(exc, CancelledError) + and is_anyio_cancellation(exc) + ) + ) + + if cancelleds_caught is None: + return False + + self._cancelled_caught = True + + if remaining is None: + return True + + context = remaining.__context__ + try: + # Preserve __cause__ and __suppress_context__ by avoiding `raise + # ... from ...` + raise remaining + finally: + # Preserve __context__ + remaining.__context__ = context + del context + else: + if isinstance(exc_val, CancelledError) and is_anyio_cancellation( + exc_val + ): + self._cancelled_caught = True + return True + else: + return False + else: + if self._pending_uncancellations: + assert self._parent_scope is not None + assert self._parent_scope._pending_uncancellations is not None + self._parent_scope._pending_uncancellations += ( + self._pending_uncancellations + ) + self._pending_uncancellations = 0 + + return False + finally: + self._host_task = None + del exc_val + + @property + def _effectively_cancelled(self) -> bool: + cancel_scope: CancelScope | None = self + while cancel_scope is not None: + if cancel_scope._cancel_called: + return True + + if cancel_scope.shield: + return False + + cancel_scope = cancel_scope._parent_scope + + return False + + @property + def _parent_cancellation_is_visible_to_us(self) -> bool: + return ( + self._parent_scope is not None + and not self.shield + and self._parent_scope._effectively_cancelled + ) + + def _timeout(self) -> None: + if self._deadline != math.inf: + loop = get_running_loop() + if loop.time() >= self._deadline: + self.cancel("deadline exceeded") + else: + self._timeout_handle = loop.call_at(self._deadline, self._timeout) + + def _deliver_cancellation(self, origin: CancelScope) -> bool: + """ + Deliver cancellation to directly contained tasks and nested cancel scopes. + + Schedule another run at the end if we still have tasks eligible for + cancellation. + + :param origin: the cancel scope that originated the cancellation + :return: ``True`` if the delivery needs to be retried on the next cycle + + """ + should_retry = False + current = current_task() + for task in self._tasks: + should_retry = True + if task._must_cancel: # type: ignore[attr-defined] + continue + + # The task is eligible for cancellation if it has started + if task is not current and (task is self._host_task or _task_started(task)): + waiter = task._fut_waiter # type: ignore[attr-defined] + if not isinstance(waiter, asyncio.Future) or not waiter.done(): + task.cancel(origin._cancel_reason) + if ( + task is origin._host_task + and origin._pending_uncancellations is not None + ): + origin._pending_uncancellations += 1 + + # Deliver cancellation to child scopes that aren't shielded or running their own + # cancellation callbacks + for scope in self._child_scopes: + if not scope._shield and not scope.cancel_called: + should_retry = scope._deliver_cancellation(origin) or should_retry + + # Schedule another callback if there are still tasks left + if origin is self: + if should_retry: + self._cancel_handle = get_running_loop().call_soon( + self._deliver_cancellation, origin + ) + else: + self._cancel_handle = None + + return should_retry + + def _restart_cancellation_in_parent(self) -> None: + """ + Restart the cancellation effort in the closest directly cancelled parent scope. + + """ + scope = self._parent_scope + while scope is not None: + if scope._cancel_called: + if scope._cancel_handle is None: + scope._deliver_cancellation(scope) + + break + + # No point in looking beyond any shielded scope + if scope._shield: + break + + scope = scope._parent_scope + + def cancel(self, reason: str | None = None) -> None: + if not self._cancel_called: + if self._timeout_handle: + self._timeout_handle.cancel() + self._timeout_handle = None + + self._cancel_called = True + self._cancel_reason = f"Cancelled via cancel scope {id(self):x}" + if task := current_task(): + self._cancel_reason += f" by {task}" + + if reason: + self._cancel_reason += f"; reason: {reason}" + + if self._host_task is not None: + self._deliver_cancellation(self) + + @property + def deadline(self) -> float: + return self._deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self._deadline = float(value) + if self._timeout_handle is not None: + self._timeout_handle.cancel() + self._timeout_handle = None + + if self._active and not self._cancel_called: + self._timeout() + + @property + def cancel_called(self) -> bool: + return self._cancel_called + + @property + def cancelled_caught(self) -> bool: + return self._cancelled_caught + + @property + def shield(self) -> bool: + return self._shield + + @shield.setter + def shield(self, value: bool) -> None: + if self._shield != value: + self._shield = value + if not value: + self._restart_cancellation_in_parent() + + +# +# Task states +# + + +class TaskState: + """ + Encapsulates auxiliary task information that cannot be added to the Task instance + itself because there are no guarantees about its implementation. + """ + + __slots__ = "parent_id", "cancel_scope", "__weakref__" + + def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): + self.parent_id = parent_id + self.cancel_scope = cancel_scope + + +_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() + + +# +# Task groups +# + + +class _AsyncioTaskStatus(abc.TaskStatus): + def __init__(self, future: asyncio.Future, parent_id: int): + self._future = future + self._parent_id = parent_id + + def started(self, value: T_contra | None = None) -> None: + try: + self._future.set_result(value) + except asyncio.InvalidStateError: + if not self._future.cancelled(): + raise RuntimeError( + "called 'started' twice on the same task status" + ) from None + + task = cast(asyncio.Task, current_task()) + _task_states[task].parent_id = self._parent_id + + +if sys.version_info >= (3, 12): + _eager_task_factory_code: CodeType | None = asyncio.eager_task_factory.__code__ +else: + _eager_task_factory_code = None + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self.cancel_scope: CancelScope = CancelScope() + self._active = False + self._exceptions: list[BaseException] = [] + self._tasks: set[asyncio.Task] = set() + self._on_completed_fut: asyncio.Future[None] | None = None + + async def __aenter__(self) -> TaskGroup: + self.cancel_scope.__enter__() + self._active = True + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + if exc_val is not None: + self.cancel_scope.cancel() + if not isinstance(exc_val, CancelledError): + self._exceptions.append(exc_val) + + loop = get_running_loop() + try: + if self._tasks: + with CancelScope() as wait_scope: + while self._tasks: + self._on_completed_fut = loop.create_future() + + try: + await self._on_completed_fut + except CancelledError as exc: + # Shield the scope against further cancellation attempts, + # as they're not productive (#695) + wait_scope.shield = True + self.cancel_scope.cancel() + + # Set exc_val from the cancellation exception if it was + # previously unset. However, we should not replace a native + # cancellation exception with one raise by a cancel scope. + if exc_val is None or ( + isinstance(exc_val, CancelledError) + and not is_anyio_cancellation(exc) + ): + exc_val = exc + + self._on_completed_fut = None + else: + # If there are no child tasks to wait on, run at least one checkpoint + # anyway + await AsyncIOBackend.cancel_shielded_checkpoint() + + self._active = False + if self._exceptions: + # The exception that got us here should already have been + # added to self._exceptions so it's ok to break exception + # chaining and avoid adding a "During handling of above..." + # for each nesting level. + raise BaseExceptionGroup( + "unhandled errors in a TaskGroup", self._exceptions + ) from None + elif exc_val: + raise exc_val + except BaseException as exc: + if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): + return True + + raise + + return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) + finally: + del exc_val, exc_tb, self._exceptions + + def _spawn( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + args: tuple[Unpack[PosArgsT]], + name: object, + task_status_future: asyncio.Future | None = None, + ) -> asyncio.Task: + def task_done(_task: asyncio.Task) -> None: + if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None: + asyncio.future_discard_from_awaited_by( + _task, self.cancel_scope._host_task + ) + + task_state = _task_states[_task] + assert task_state.cancel_scope is not None + assert _task in task_state.cancel_scope._tasks + task_state.cancel_scope._tasks.remove(_task) + self._tasks.remove(task) + del _task_states[_task] + + if self._on_completed_fut is not None and not self._tasks: + try: + self._on_completed_fut.set_result(None) + except asyncio.InvalidStateError: + pass + + try: + exc = _task.exception() + except CancelledError as e: + while isinstance(e.__context__, CancelledError): + e = e.__context__ + + exc = e + + if exc is not None: + # The future can only be in the cancelled state if the host task was + # cancelled, so return immediately instead of adding one more + # CancelledError to the exceptions list + if task_status_future is not None and task_status_future.cancelled(): + return + + if task_status_future is None or task_status_future.done(): + if not isinstance(exc, CancelledError): + self._exceptions.append(exc) + + if not self.cancel_scope._effectively_cancelled: + self.cancel_scope.cancel() + else: + task_status_future.set_exception(exc) + elif task_status_future is not None and not task_status_future.done(): + task_status_future.set_exception( + RuntimeError("Child exited without calling task_status.started()") + ) + + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + kwargs = {} + if task_status_future: + parent_id = id(current_task()) + kwargs["task_status"] = _AsyncioTaskStatus( + task_status_future, id(self.cancel_scope._host_task) + ) + else: + parent_id = id(self.cancel_scope._host_task) + + coro = func(*args, **kwargs) + if not iscoroutine(coro): + prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" + raise TypeError( + f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " + f"the return value ({coro!r}) is not a coroutine object" + ) + + name = get_callable_name(func) if name is None else str(name) + loop = asyncio.get_running_loop() + if ( + (factory := loop.get_task_factory()) + and getattr(factory, "__code__", None) is _eager_task_factory_code + and (closure := getattr(factory, "__closure__", None)) + ): + custom_task_constructor = closure[0].cell_contents + task = custom_task_constructor(coro, loop=loop, name=name) + else: + task = create_task(coro, name=name) + + # Make the spawned task inherit the task group's cancel scope + _task_states[task] = TaskState( + parent_id=parent_id, cancel_scope=self.cancel_scope + ) + self.cancel_scope._tasks.add(task) + self._tasks.add(task) + if sys.version_info >= (3, 14) and self.cancel_scope._host_task is not None: + asyncio.future_add_to_awaited_by(task, self.cancel_scope._host_task) + + task.add_done_callback(task_done) + return task + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + self._spawn(func, args, name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + future: asyncio.Future = asyncio.Future() + task = self._spawn(func, args, name, future) + + # If the task raises an exception after sending a start value without a switch + # point between, the task group is cancelled and this method never proceeds to + # process the completed future. That's why we have to have a shielded cancel + # scope here. + try: + return await future + except CancelledError: + # Cancel the task and wait for it to exit before returning + task.cancel() + with CancelScope(shield=True), suppress(CancelledError): + await task + + raise + + +# +# Threads +# + +_Retval_Queue_Type = tuple[T_Retval | None, BaseException | None] + + +class WorkerThread(Thread): + MAX_IDLE_TIME = 10 # seconds + + def __init__( + self, + root_task: asyncio.Task, + workers: set[WorkerThread], + idle_workers: deque[WorkerThread], + ): + super().__init__(name="AnyIO worker thread") + self.root_task = root_task + self.workers = workers + self.idle_workers = idle_workers + self.loop = root_task._loop + self.queue: Queue[ + tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None + ] = Queue(2) + self.idle_since = AsyncIOBackend.current_time() + self.stopping = False + + def _report_result( + self, future: asyncio.Future, result: Any, exc: BaseException | None + ) -> None: + self.idle_since = AsyncIOBackend.current_time() + if not self.stopping: + self.idle_workers.append(self) + + if not future.cancelled(): + if exc is not None: + if isinstance(exc, StopIteration): + new_exc = RuntimeError("coroutine raised StopIteration") + new_exc.__cause__ = exc + exc = new_exc + + future.set_exception(exc) + else: + future.set_result(result) + + def run(self) -> None: + with claim_worker_thread(AsyncIOBackend, self.loop): + while True: + item = self.queue.get() + if item is None: + # Shutdown command received + return + + context, func, args, future, cancel_scope = item + if not future.cancelled(): + result = None + exception: BaseException | None = None + threadlocals.current_cancel_scope = cancel_scope + try: + result = context.run(func, *args) + except BaseException as exc: + exception = exc + finally: + del threadlocals.current_cancel_scope + + if not self.loop.is_closed(): + self.loop.call_soon_threadsafe( + self._report_result, future, result, exception + ) + + del result, exception + + self.queue.task_done() + del item, context, func, args, future, cancel_scope + + def stop(self, f: asyncio.Task | None = None) -> None: + self.stopping = True + self.queue.put_nowait(None) + self.workers.discard(self) + try: + self.idle_workers.remove(self) + except ValueError: + pass + + +_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( + "_threadpool_idle_workers" +) +_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class StreamReaderWrapper(abc.ByteReceiveStream): + _stream: asyncio.StreamReader + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._stream.read(max_bytes) + if data: + return data + else: + raise EndOfStream + + async def aclose(self) -> None: + self._stream.set_exception(ClosedResourceError()) + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class StreamWriterWrapper(abc.ByteSendStream): + _stream: asyncio.StreamWriter + _closed: bool = field(init=False, default=False) + + async def send(self, item: bytes) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + stream_paused = self._stream._protocol._paused # type: ignore[attr-defined] + try: + self._stream.write(item) + await self._stream.drain() + except (ConnectionResetError, BrokenPipeError, RuntimeError) as exc: + # If closed by us and/or the peer: + # * on stdlib, drain() raises ConnectionResetError or BrokenPipeError + # * on uvloop and Winloop, write() eventually starts raising RuntimeError + if self._closed: + raise ClosedResourceError from exc + elif self._stream.is_closing(): + raise BrokenResourceError from exc + + raise + + if not stream_paused: + await AsyncIOBackend.cancel_shielded_checkpoint() + + async def aclose(self) -> None: + self._closed = True + self._stream.close() + await AsyncIOBackend.checkpoint() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: asyncio.subprocess.Process + _stdin: StreamWriterWrapper | None + _stdout: StreamReaderWrapper | None + _stderr: StreamReaderWrapper | None + + async def aclose(self) -> None: + with CancelScope(shield=True) as scope: + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + scope.shield = False + try: + await self.wait() + except BaseException: + scope.shield = True + self.kill() + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: int) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +def _forcibly_shutdown_process_pool_on_exit( + workers: set[Process], _task: object +) -> None: + """ + Forcibly shuts down worker processes belonging to this event loop.""" + child_watcher: asyncio.AbstractChildWatcher | None = None # type: ignore[name-defined] + if sys.version_info < (3, 12): + try: + child_watcher = asyncio.get_event_loop_policy().get_child_watcher() + except NotImplementedError: + pass + + # Close as much as possible (w/o async/await) to avoid warnings + for process in workers.copy(): + if process.returncode is not None: + continue + + process._stdin._stream._transport.close() # type: ignore[union-attr] + process._stdout._stream._transport.close() # type: ignore[union-attr] + process._stderr._stream._transport.close() # type: ignore[union-attr] + process.kill() + if child_watcher: + child_watcher.remove_child_handler(process.pid) + + +async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: + """ + Shuts down worker processes belonging to this event loop. + + NOTE: this only works when the event loop was started using asyncio.run() or + anyio.run(). + + """ + process: abc.Process + try: + await sleep(math.inf) + except asyncio.CancelledError: + workers = workers.copy() + for process in workers: + if process.returncode is None: + process.kill() + + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class StreamProtocol(asyncio.Protocol): + read_queue: deque[bytes] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + is_at_eof: bool = False + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque() + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + cast(asyncio.Transport, transport).set_write_buffer_limits(0) + + def connection_lost(self, exc: Exception | None) -> None: + if exc: + self.exception = exc + + self.read_event.set() + self.write_event.set() + + def data_received(self, data: bytes) -> None: + # ProactorEventloop sometimes sends bytearray instead of bytes + self.read_queue.append(bytes(data)) + self.read_event.set() + + def eof_received(self) -> bool | None: + self.is_at_eof = True + self.read_event.set() + return True + + def pause_writing(self) -> None: + self.write_event = asyncio.Event() + + def resume_writing(self) -> None: + self.write_event.set() + + +class DatagramProtocol(asyncio.DatagramProtocol): + read_queue: deque[tuple[bytes, IPSockAddrType]] + read_event: asyncio.Event + write_event: asyncio.Event + exception: Exception | None = None + + def connection_made(self, transport: asyncio.BaseTransport) -> None: + self.read_queue = deque(maxlen=100) # arbitrary value + self.read_event = asyncio.Event() + self.write_event = asyncio.Event() + self.write_event.set() + + def connection_lost(self, exc: Exception | None) -> None: + self.read_event.set() + self.write_event.set() + + def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: + addr = convert_ipv6_sockaddr(addr) + self.read_queue.append((data, addr)) + self.read_event.set() + + def error_received(self, exc: Exception) -> None: + self.exception = exc + + def pause_writing(self) -> None: + self.write_event.clear() + + def resume_writing(self) -> None: + self.write_event.set() + + +class SocketStream(abc.SocketStream): + def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + if ( + not self._protocol.read_event.is_set() + and not self._transport.is_closing() + and not self._protocol.is_at_eof + ): + self._transport.resume_reading() + await self._protocol.read_event.wait() + self._transport.pause_reading() + else: + await AsyncIOBackend.checkpoint() + + try: + chunk = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + elif self._protocol.exception: + raise BrokenResourceError from self._protocol.exception + else: + raise EndOfStream from None + + if len(chunk) > max_bytes: + # Split the oversized chunk + chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] + self._protocol.read_queue.appendleft(leftover) + + # If the read queue is empty, clear the flag so that the next call will + # block until data is available + if not self._protocol.read_queue: + self._protocol.read_event.clear() + + return chunk + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + + if self._closed: + raise ClosedResourceError + elif self._protocol.exception is not None: + raise BrokenResourceError from self._protocol.exception + + try: + self._transport.write(item) + except RuntimeError as exc: + if self._transport.is_closing(): + raise BrokenResourceError from exc + else: + raise + + await self._protocol.write_event.wait() + + async def send_eof(self) -> None: + try: + self._transport.write_eof() + except OSError: + pass + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + try: + self._transport.write_eof() + except OSError: + pass + + self._transport.close() + await sleep(0) + self._transport.abort() + + +class _RawSocketMixin: + _receive_future: asyncio.Future | None = None + _send_future: asyncio.Future | None = None + _closing = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._receive_future + loop.remove_reader(self.__raw_socket) + + f = self._receive_future = asyncio.Future() + loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: + def callback(f: object) -> None: + del self._send_future + loop.remove_writer(self.__raw_socket) + + f = self._send_future = asyncio.Future() + loop.add_writer(self.__raw_socket, f.set_result, None) + f.add_done_callback(callback) + return f + + async def aclose(self) -> None: + if not self._closing: + self._closing = True + if self.__raw_socket.fileno() != -1: + self.__raw_socket.close() + + if self._receive_future: + self._receive_future.set_result(None) + if self._send_future: + self._send_future.set_result(None) + + +class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): + async def send_eof(self) -> None: + with self._send_guard: + self._raw_socket.shutdown(socket.SHUT_WR) + + async def receive(self, max_bytes: int = 65536) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(max_bytes) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = self._raw_socket.send(view) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + view = view[bytes_sent:] + + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + loop = get_running_loop() + fds = array.array("i") + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = self._raw_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + loop = get_running_loop() + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + # The ignore can be removed after mypy picks up + # https://github.com/python/typeshed/pull/5545 + self._raw_socket.sendmsg( + [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] + ) + break + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + +class TCPSocketListener(abc.SocketListener): + _accept_scope: CancelScope | None = None + _closed = False + + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) + self._accept_guard = ResourceGuard("accepting connections from") + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + async def accept(self) -> abc.SocketStream: + if self._closed: + raise ClosedResourceError + + with self._accept_guard: + await AsyncIOBackend.checkpoint() + with CancelScope() as self._accept_scope: + try: + client_sock, _addr = await self._loop.sock_accept(self._raw_socket) + except asyncio.CancelledError: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + if self._closed: + raise ClosedResourceError from None + + raise + finally: + self._accept_scope = None + + client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + transport, protocol = await self._loop.connect_accepted_socket( + StreamProtocol, client_sock + ) + return SocketStream(transport, protocol) + + async def aclose(self) -> None: + if self._closed: + return + + self._closed = True + if self._accept_scope: + # Workaround for https://bugs.python.org/issue41317 + try: + self._loop.remove_reader(self._raw_socket) + except (ValueError, NotImplementedError): + pass + + self._accept_scope.cancel() + await sleep(0) + + self._raw_socket.close() + + +class UNIXSocketListener(abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + self.__raw_socket = raw_socket + self._loop = get_running_loop() + self._accept_guard = ResourceGuard("accepting connections from") + self._closed = False + + async def accept(self) -> abc.SocketStream: + await AsyncIOBackend.checkpoint() + with self._accept_guard: + while True: + try: + client_sock, _ = self.__raw_socket.accept() + client_sock.setblocking(False) + return UNIXSocketStream(client_sock) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + self._loop.add_reader(self.__raw_socket, f.set_result, None) + f.add_done_callback( + lambda _: self._loop.remove_reader(self.__raw_socket) + ) + await f + except OSError as exc: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + + async def aclose(self) -> None: + self._closed = True + self.__raw_socket.close() + + @property + def _raw_socket(self) -> socket.socket: + return self.__raw_socket + + +class UDPSocket(abc.UDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + self._transport.close() + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + return self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(*item) + + +class ConnectedUDPSocket(abc.ConnectedUDPSocket): + def __init__( + self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol + ): + self._transport = transport + self._protocol = protocol + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + self._closed = False + + @property + def _raw_socket(self) -> socket.socket: + return self._transport.get_extra_info("socket") + + async def aclose(self) -> None: + self._closed = True + if not self._transport.is_closing(): + self._transport.close() + + async def receive(self) -> bytes: + with self._receive_guard: + await AsyncIOBackend.checkpoint() + + # If the buffer is empty, ask for more data + if not self._protocol.read_queue and not self._transport.is_closing(): + self._protocol.read_event.clear() + await self._protocol.read_event.wait() + + try: + packet = self._protocol.read_queue.popleft() + except IndexError: + if self._closed: + raise ClosedResourceError from None + else: + raise BrokenResourceError from None + + return packet[0] + + async def send(self, item: bytes) -> None: + with self._send_guard: + await AsyncIOBackend.checkpoint() + await self._protocol.write_event.wait() + if self._closed: + raise ClosedResourceError + elif self._transport.is_closing(): + raise BrokenResourceError + else: + self._transport.sendto(item) + + +class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): + async def receive(self) -> UNIXDatagramPacketType: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recvfrom(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: UNIXDatagramPacketType) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.sendto(*item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): + async def receive(self) -> bytes: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._receive_guard: + while True: + try: + data = self._raw_socket.recv(65536) + except BlockingIOError: + await self._wait_until_readable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return data + + async def send(self, item: bytes) -> None: + loop = get_running_loop() + await AsyncIOBackend.checkpoint() + with self._send_guard: + while True: + try: + self._raw_socket.send(item) + except BlockingIOError: + await self._wait_until_writable(loop) + except OSError as exc: + if self._closing: + raise ClosedResourceError from None + else: + raise BrokenResourceError from exc + else: + return + + +_read_events: RunVar[dict[int, asyncio.Future[bool]]] = RunVar("read_events") +_write_events: RunVar[dict[int, asyncio.Future[bool]]] = RunVar("write_events") + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self._event = asyncio.Event() + + def set(self) -> None: + self._event.set() + + def is_set(self) -> bool: + return self._event.is_set() + + async def wait(self) -> None: + if self.is_set(): + await AsyncIOBackend.checkpoint() + else: + await self._event.wait() + + def statistics(self) -> EventStatistics: + return EventStatistics(len(self._event._waiters)) + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self._owner_task: asyncio.Task | None = None + self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() + + async def acquire(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._owner_task = task + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + if self._owner_task == task: + raise RuntimeError("Attempted to acquire an already held Lock") + + fut: asyncio.Future[None] = asyncio.Future() + item = task, fut + self._waiters.append(item) + try: + await fut + except CancelledError: + self._waiters.remove(item) + if self._owner_task is task: + self.release() + + raise + + self._waiters.remove(item) + + def acquire_nowait(self) -> None: + task = cast(asyncio.Task, current_task()) + if self._owner_task is None and not self._waiters: + self._owner_task = task + return + + if self._owner_task is task: + raise RuntimeError("Attempted to acquire an already held Lock") + + raise WouldBlock + + def locked(self) -> bool: + return self._owner_task is not None + + def release(self) -> None: + if self._owner_task != current_task(): + raise RuntimeError("The current task is not holding this lock") + + for task, fut in self._waiters: + if not fut.cancelled(): + self._owner_task = task + fut.set_result(None) + return + + self._owner_task = None + + def statistics(self) -> LockStatistics: + task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None + return LockStatistics(self.locked(), task_info, len(self._waiters)) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + super().__init__(initial_value, max_value=max_value) + self._value = initial_value + self._max_value = max_value + self._fast_acquire = fast_acquire + self._waiters: deque[asyncio.Future[None]] = deque() + + async def acquire(self) -> None: + if self._value > 0 and not self._waiters: + await AsyncIOBackend.checkpoint_if_cancelled() + self._value -= 1 + + # Unless on the "fast path", yield control of the event loop so that other + # tasks can run too + if not self._fast_acquire: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except CancelledError: + self.release() + raise + + return + + fut: asyncio.Future[None] = asyncio.Future() + self._waiters.append(fut) + try: + await fut + except CancelledError: + try: + self._waiters.remove(fut) + except ValueError: + self.release() + + raise + + def acquire_nowait(self) -> None: + if self._value == 0: + raise WouldBlock + + self._value -= 1 + + def release(self) -> None: + if self._max_value is not None and self._value == self._max_value: + raise ValueError("semaphore released too many times") + + for fut in self._waiters: + if not fut.cancelled(): + fut.set_result(None) + self._waiters.remove(fut) + return + + self._value += 1 + + @property + def value(self) -> int: + return self._value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + return SemaphoreStatistics(len(self._waiters)) + + +class CapacityLimiter(BaseCapacityLimiter): + _total_tokens: float = 0 + + def __new__(cls, total_tokens: float) -> CapacityLimiter: + return object.__new__(cls) + + def __init__(self, total_tokens: float): + self._borrowers: set[Any] = set() + self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() + self.total_tokens = total_tokens + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + @property + def total_tokens(self) -> float: + return self._total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and not math.isinf(value): + raise TypeError("total_tokens must be an int or math.inf") + + if value < 0: + raise ValueError("total_tokens must be >= 0") + + waiters_to_notify = max(value - self._total_tokens, 0) + self._total_tokens = value + + # Notify waiting tasks that they have acquired the limiter + while self._wait_queue and waiters_to_notify: + event = self._wait_queue.popitem(last=False)[1] + event.set() + waiters_to_notify -= 1 + + @property + def borrowed_tokens(self) -> int: + return len(self._borrowers) + + @property + def available_tokens(self) -> float: + return self._total_tokens - len(self._borrowers) + + def _notify_next_waiter(self) -> None: + """Notify the next task in line if this limiter has free capacity now.""" + if self._wait_queue and len(self._borrowers) < self._total_tokens: + event = self._wait_queue.popitem(last=False)[1] + event.set() + + def acquire_nowait(self) -> None: + self.acquire_on_behalf_of_nowait(current_task()) + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + if borrower in self._borrowers: + raise RuntimeError( + "this borrower is already holding one of this CapacityLimiter's tokens" + ) + + if self._wait_queue or len(self._borrowers) >= self._total_tokens: + raise WouldBlock + + self._borrowers.add(borrower) + + async def acquire(self) -> None: + return await self.acquire_on_behalf_of(current_task()) + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await AsyncIOBackend.checkpoint_if_cancelled() + try: + self.acquire_on_behalf_of_nowait(borrower) + except WouldBlock: + event = asyncio.Event() + self._wait_queue[borrower] = event + try: + await event.wait() + except BaseException: + self._wait_queue.pop(borrower, None) + if event.is_set(): + self._notify_next_waiter() + + raise + + self._borrowers.add(borrower) + else: + try: + await AsyncIOBackend.cancel_shielded_checkpoint() + except BaseException: + self.release() + raise + + def release(self) -> None: + self.release_on_behalf_of(current_task()) + + def release_on_behalf_of(self, borrower: object) -> None: + try: + self._borrowers.remove(borrower) + except KeyError: + raise RuntimeError( + "this borrower isn't holding any of this CapacityLimiter's tokens" + ) from None + + self._notify_next_waiter() + + def statistics(self) -> CapacityLimiterStatistics: + return CapacityLimiterStatistics( + self.borrowed_tokens, + self.total_tokens, + tuple(self._borrowers), + len(self._wait_queue), + ) + + +_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") + + +# +# Operating system signals +# + + +class _SignalReceiver: + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + self._loop = get_running_loop() + self._signal_queue: deque[Signals] = deque() + self._future: asyncio.Future = asyncio.Future() + self._handled_signals: set[Signals] = set() + + def _deliver(self, signum: Signals) -> None: + self._signal_queue.append(signum) + if not self._future.done(): + self._future.set_result(None) + + def __enter__(self) -> _SignalReceiver: + for sig in set(self._signals): + self._loop.add_signal_handler(sig, self._deliver, sig) + self._handled_signals.add(sig) + + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + for sig in self._handled_signals: + self._loop.remove_signal_handler(sig) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + await AsyncIOBackend.checkpoint() + if not self._signal_queue: + self._future = asyncio.Future() + await self._future + + return self._signal_queue.popleft() + + +# +# Testing and debugging +# + + +class AsyncIOTaskInfo(TaskInfo): + def __init__(self, task: asyncio.Task): + task_state = _task_states.get(task) + if task_state is None: + parent_id = None + else: + parent_id = task_state.parent_id + + coro = task.get_coro() + assert coro is not None, "created TaskInfo from a completed Task" + super().__init__(id(task), parent_id, task.get_name(), coro) + self._task = weakref.ref(task) + + def has_pending_cancellation(self) -> bool: + if not (task := self._task()): + # If the task isn't around anymore, it won't have a pending cancellation + return False + + if task._must_cancel: # type: ignore[attr-defined] + return True + elif ( + isinstance(task._fut_waiter, asyncio.Future) # type: ignore[attr-defined] + and task._fut_waiter.cancelled() # type: ignore[attr-defined] + ): + return True + + if task_state := _task_states.get(task): + if cancel_scope := task_state.cancel_scope: + return cancel_scope._effectively_cancelled + + return False + + +class TestRunner(abc.TestRunner): + _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] + + def __init__( + self, + *, + debug: bool | None = None, + use_uvloop: bool = False, + loop_factory: Callable[[], AbstractEventLoop] | None = None, + ) -> None: + if use_uvloop and loop_factory is None: + if sys.platform != "win32": + import uvloop + + loop_factory = uvloop.new_event_loop + else: + import winloop + + loop_factory = winloop.new_event_loop + + self._runner = Runner(debug=debug, loop_factory=loop_factory) + self._exceptions: list[BaseException] = [] + self._runner_task: asyncio.Task | None = None + + def __enter__(self) -> TestRunner: + self._runner.__enter__() + self.get_loop().set_exception_handler(self._exception_handler) + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._runner.__exit__(exc_type, exc_val, exc_tb) + + def get_loop(self) -> AbstractEventLoop: + return self._runner.get_loop() + + def _exception_handler( + self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] + ) -> None: + if isinstance(context.get("exception"), Exception): + self._exceptions.append(context["exception"]) + else: + loop.default_exception_handler(context) + + def _raise_async_exceptions(self) -> None: + # Re-raise any exceptions raised in asynchronous callbacks + if self._exceptions: + exceptions, self._exceptions = self._exceptions, [] + if len(exceptions) == 1: + raise exceptions[0] + elif exceptions: + raise BaseExceptionGroup( + "Multiple exceptions occurred in asynchronous callbacks", exceptions + ) + + async def _run_tests_and_fixtures( + self, + receive_stream: MemoryObjectReceiveStream[ + tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] + ], + ) -> None: + from _pytest.outcomes import OutcomeException + + with receive_stream, self._send_stream: + async for coro, future in receive_stream: + try: + retval = await coro + except CancelledError as exc: + if not future.cancelled(): + future.cancel(*exc.args) + + raise + except BaseException as exc: + if not future.cancelled(): + future.set_exception(exc) + + if not isinstance(exc, (Exception, OutcomeException)): + raise + else: + if not future.cancelled(): + future.set_result(retval) + + async def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + /, + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if not self._runner_task: + self._send_stream, receive_stream = create_memory_object_stream[ + tuple[Awaitable[Any], asyncio.Future] + ](1) + self._runner_task = self.get_loop().create_task( + self._run_tests_and_fixtures(receive_stream) + ) + + coro = func(*args, **kwargs) + future: asyncio.Future[T_Retval] = self.get_loop().create_future() + self._send_stream.send_nowait((coro, future)) + return await future + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + self._raise_async_exceptions() + + yield fixturevalue + + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(asyncgen.asend, None) + ) + except StopAsyncIteration: + self._raise_async_exceptions() + else: + self.get_loop().run_until_complete(asyncgen.aclose()) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + retval = self.get_loop().run_until_complete( + self._call_in_runner_task(fixture_func, **kwargs) + ) + self._raise_async_exceptions() + return retval + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + try: + self.get_loop().run_until_complete( + self._call_in_runner_task(test_func, **kwargs) + ) + except Exception as exc: + self._exceptions.append(exc) + + self._raise_async_exceptions() + + +class AsyncIOBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + @wraps(func) + async def wrapper() -> T_Retval: + task = cast(asyncio.Task, current_task()) + task.set_name(get_callable_name(func)) + _task_states[task] = TaskState(None, None) + + try: + return await func(*args) + finally: + del _task_states[task] + + debug = options.get("debug", None) + loop_factory = options.get("loop_factory", None) + if loop_factory is None and options.get("use_uvloop", False): + if sys.platform != "win32": + import uvloop + + loop_factory = uvloop.new_event_loop + else: + import winloop + + loop_factory = winloop.new_event_loop + + with Runner(debug=debug, loop_factory=loop_factory) as runner: + return runner.run(wrapper()) + + @classmethod + def current_token(cls) -> object: + return get_running_loop() + + @classmethod + def current_time(cls) -> float: + return get_running_loop().time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return CancelledError + + @classmethod + async def checkpoint(cls) -> None: + await sleep(0) + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + task = current_task() + if task is None: + return + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return + + while cancel_scope: + if cancel_scope.cancel_called: + await sleep(0) + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + with CancelScope(shield=True): + await sleep(0) + + @classmethod + async def sleep(cls, delay: float) -> None: + await sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + if (task := current_task()) is None: + return math.inf + + try: + cancel_scope = _task_states[task].cancel_scope + except KeyError: + return math.inf + + deadline = math.inf + while cancel_scope: + deadline = min(deadline, cancel_scope.deadline) + if cancel_scope._cancel_called: + deadline = -math.inf + break + elif cancel_scope.shield: + break + else: + cancel_scope = cancel_scope._parent_scope + + return deadline + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( # type: ignore[return] + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + await cls.checkpoint() + + # If this is the first run in this event loop thread, set up the necessary + # variables + try: + idle_workers = _threadpool_idle_workers.get() + workers = _threadpool_workers.get() + except LookupError: + idle_workers = deque() + workers = set() + _threadpool_idle_workers.set(idle_workers) + _threadpool_workers.set(workers) + + async with limiter or cls.current_default_thread_limiter(): + with CancelScope(shield=not abandon_on_cancel) as scope: + future = asyncio.Future[T_Retval]() + root_task = find_root_task() + if not idle_workers: + worker = WorkerThread(root_task, workers, idle_workers) + worker.start() + workers.add(worker) + root_task.add_done_callback( + worker.stop, context=contextvars.Context() + ) + else: + worker = idle_workers.pop() + + # Prune any other workers that have been idle for MAX_IDLE_TIME + # seconds or longer + now = cls.current_time() + while idle_workers: + if ( + now - idle_workers[0].idle_since + < WorkerThread.MAX_IDLE_TIME + ): + break + + expired_worker = idle_workers.popleft() + expired_worker.root_task.remove_done_callback( + expired_worker.stop + ) + expired_worker.stop() + + context = copy_context() + context.run(set_current_async_library, None) + if abandon_on_cancel or scope._parent_scope is None: + worker_scope = scope + else: + worker_scope = scope._parent_scope + + worker.queue.put_nowait((context, func, args, future, worker_scope)) + return await future + + @classmethod + def check_cancelled(cls) -> None: + scope: CancelScope | None = threadlocals.current_cancel_scope + while scope is not None: + if scope.cancel_called: + raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") + + if scope.shield: + return + + scope = scope._parent_scope + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + async def task_wrapper() -> T_Retval: + __tracebackhide__ = True + if scope is not None: + task = cast(asyncio.Task, current_task()) + _task_states[task] = TaskState(None, scope) + scope._tasks.add(task) + try: + return await func(*args) + except CancelledError as exc: + raise concurrent.futures.CancelledError(str(exc)) from None + finally: + if scope is not None: + scope._tasks.discard(task) + + loop = cast( + "AbstractEventLoop", token or threadlocals.current_token.native_token + ) + if loop.is_closed(): + raise RunFinishedError + + context = copy_context() + context.run(set_current_async_library, "asyncio") + scope = getattr(threadlocals, "current_cancel_scope", None) + f: concurrent.futures.Future[T_Retval] = context.run( + asyncio.run_coroutine_threadsafe, task_wrapper(), loop=loop + ) + return f.result() + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + @wraps(func) + def wrapper() -> None: + try: + set_current_async_library("asyncio") + f.set_result(func(*args)) + except BaseException as exc: + f.set_exception(exc) + if not isinstance(exc, Exception): + raise + + loop = cast( + "AbstractEventLoop", token or threadlocals.current_token.native_token + ) + if loop.is_closed(): + raise RunFinishedError + + f: concurrent.futures.Future[T_Retval] = Future() + loop.call_soon_threadsafe(wrapper) + return f.result() + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + await cls.checkpoint() + if isinstance(command, PathLike): + command = os.fspath(command) + + if isinstance(command, (str, bytes)): + process = await asyncio.create_subprocess_shell( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + else: + process = await asyncio.create_subprocess_exec( + *command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + **kwargs, + ) + + stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None + stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None + stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + create_task( + _shutdown_process_pool_on_exit(workers), + name="AnyIO process pool shutdown task", + ) + find_root_task().add_done_callback( + partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] + ) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> abc.SocketStream: + transport, protocol = cast( + tuple[asyncio.Transport, StreamProtocol], + await get_running_loop().create_connection( + StreamProtocol, host, port, local_addr=local_address + ), + ) + transport.pause_reading() + return SocketStream(transport, protocol) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + await cls.checkpoint() + loop = get_running_loop() + raw_socket = socket.socket(socket.AF_UNIX) + raw_socket.setblocking(False) + while True: + try: + raw_socket.connect(path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return UNIXSocketStream(raw_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, + local_addr=local_address, + remote_addr=remote_address, + family=family, + reuse_port=reuse_port, + ) + if protocol.exception: + transport.close() + raise protocol.exception + + if not remote_address: + return UDPSocket(transport, protocol) + else: + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def create_unix_datagram_socket( # type: ignore[override] + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + await cls.checkpoint() + loop = get_running_loop() + + if remote_path: + while True: + try: + raw_socket.connect(remote_path) + except BlockingIOError: + f: asyncio.Future = asyncio.Future() + loop.add_writer(raw_socket, f.set_result, None) + f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) + await f + except BaseException: + raw_socket.close() + raise + else: + return ConnectedUNIXDatagramSocket(raw_socket) + else: + return UNIXDatagramSocket(raw_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await get_running_loop().getaddrinfo( + host, port, family=family, type=type, proto=proto, flags=flags + ) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await get_running_loop().getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + try: + read_events = _read_events.get() + except LookupError: + read_events = {} + _read_events.set(read_events) + + fd = obj if isinstance(obj, int) else obj.fileno() + if read_events.get(fd): + raise BusyResourceError("reading from") + + loop = get_running_loop() + fut: asyncio.Future[bool] = loop.create_future() + + def cb() -> None: + try: + del read_events[fd] + except KeyError: + pass + else: + remove_reader(fd) + + try: + fut.set_result(True) + except asyncio.InvalidStateError: + pass + + try: + loop.add_reader(fd, cb) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_reader(fd, cb) + remove_reader = selector.remove_reader + else: + remove_reader = loop.remove_reader + + read_events[fd] = fut + try: + success = await fut + finally: + try: + del read_events[fd] + except KeyError: + pass + else: + remove_reader(fd) + + if not success: + raise ClosedResourceError + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + try: + write_events = _write_events.get() + except LookupError: + write_events = {} + _write_events.set(write_events) + + fd = obj if isinstance(obj, int) else obj.fileno() + if write_events.get(fd): + raise BusyResourceError("writing to") + + loop = get_running_loop() + fut: asyncio.Future[bool] = loop.create_future() + + def cb() -> None: + try: + del write_events[fd] + except KeyError: + pass + else: + remove_writer(fd) + + try: + fut.set_result(True) + except asyncio.InvalidStateError: + pass + + try: + loop.add_writer(fd, cb) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + selector = get_selector() + selector.add_writer(fd, cb) + remove_writer = selector.remove_writer + else: + remove_writer = loop.remove_writer + + write_events[fd] = fut + try: + success = await fut + finally: + try: + del write_events[fd] + except KeyError: + pass + else: + remove_writer(fd) + + if not success: + raise ClosedResourceError + + @classmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + fd = obj if isinstance(obj, int) else obj.fileno() + loop = get_running_loop() + + try: + write_events = _write_events.get() + except LookupError: + pass + else: + try: + fut = write_events.pop(fd) + except KeyError: + pass + else: + try: + fut.set_result(False) + except asyncio.InvalidStateError: + pass + + try: + loop.remove_writer(fd) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + get_selector().remove_writer(fd) + + try: + read_events = _read_events.get() + except LookupError: + pass + else: + try: + fut = read_events.pop(fd) + except KeyError: + pass + else: + try: + fut.set_result(False) + except asyncio.InvalidStateError: + pass + + try: + loop.remove_reader(fd) + except NotImplementedError: + from anyio._core._asyncio_selector_thread import get_selector + + get_selector().remove_reader(fd) + + @classmethod + async def wrap_listener_socket(cls, sock: socket.socket) -> SocketListener: + return TCPSocketListener(sock) + + @classmethod + async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream: + transport, protocol = await get_running_loop().create_connection( + StreamProtocol, sock=sock + ) + return SocketStream(transport, protocol) + + @classmethod + async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream: + return UNIXSocketStream(sock) + + @classmethod + async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, sock=sock + ) + return UDPSocket(transport, protocol) + + @classmethod + async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket: + transport, protocol = await get_running_loop().create_datagram_endpoint( + DatagramProtocol, sock=sock + ) + return ConnectedUDPSocket(transport, protocol) + + @classmethod + async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket: + return UNIXDatagramSocket(sock) + + @classmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket.socket + ) -> ConnectedUNIXDatagramSocket: + return ConnectedUNIXDatagramSocket(sock) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _default_thread_limiter.get() + except LookupError: + limiter = CapacityLimiter(40) + _default_thread_limiter.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + await cls.checkpoint() + this_task = current_task() + while True: + for task in all_tasks(): + if task is this_task: + continue + + waiter = task._fut_waiter # type: ignore[attr-defined] + if waiter is None or waiter.done(): + await sleep(0.1) + break + else: + return + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = AsyncIOBackend diff --git a/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py b/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py new file mode 100644 index 0000000000000000000000000000000000000000..b85a10a13a0149e12718eea95f9269aed75449b6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_backends/_trio.py @@ -0,0 +1,1343 @@ +from __future__ import annotations + +import array +import math +import os +import socket +import sys +import types +import weakref +from collections.abc import ( + AsyncGenerator, + AsyncIterator, + Awaitable, + Callable, + Collection, + Coroutine, + Iterable, + Sequence, +) +from contextlib import AbstractContextManager +from dataclasses import dataclass +from io import IOBase +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind +from types import TracebackType +from typing import ( + IO, + TYPE_CHECKING, + Any, + Generic, + NoReturn, + ParamSpec, + TypeVar, + cast, + overload, +) + +import trio.from_thread +import trio.lowlevel +from outcome import Error, Outcome, Value +from trio.lowlevel import ( + current_root_task, + current_task, + notify_closing, + wait_readable, + wait_writable, +) +from trio.socket import SocketType as TrioSocketType +from trio.to_thread import run_sync + +from .. import ( + CapacityLimiterStatistics, + EventStatistics, + LockStatistics, + RunFinishedError, + TaskInfo, + WouldBlock, + abc, +) +from .._core._eventloop import claim_worker_thread +from .._core._exceptions import ( + BrokenResourceError, + BusyResourceError, + ClosedResourceError, + EndOfStream, +) +from .._core._sockets import convert_ipv6_sockaddr +from .._core._streams import create_memory_object_stream +from .._core._synchronization import ( + CapacityLimiter as BaseCapacityLimiter, +) +from .._core._synchronization import Event as BaseEvent +from .._core._synchronization import Lock as BaseLock +from .._core._synchronization import ( + ResourceGuard, + SemaphoreStatistics, +) +from .._core._synchronization import Semaphore as BaseSemaphore +from .._core._tasks import CancelScope as BaseCancelScope +from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType +from ..abc._eventloop import AsyncBackend, StrOrBytesPath +from ..streams.memory import MemoryObjectSendStream + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from exceptiongroup import BaseExceptionGroup + from typing_extensions import TypeVarTuple, Unpack + +T = TypeVar("T") +T_Retval = TypeVar("T_Retval") +T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) +PosArgsT = TypeVarTuple("PosArgsT") +P = ParamSpec("P") + + +# +# Event loop +# + +RunVar = trio.lowlevel.RunVar + + +# +# Timeouts and cancellation +# + + +class CancelScope(BaseCancelScope): + def __new__( + cls, original: trio.CancelScope | None = None, **kwargs: object + ) -> CancelScope: + return object.__new__(cls) + + def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: + self.__original = original or trio.CancelScope(**kwargs) + + def __enter__(self) -> CancelScope: + self.__original.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + return self.__original.__exit__(exc_type, exc_val, exc_tb) + + def cancel(self, reason: str | None = None) -> None: + self.__original.cancel(reason) + + @property + def deadline(self) -> float: + return self.__original.deadline + + @deadline.setter + def deadline(self, value: float) -> None: + self.__original.deadline = value + + @property + def cancel_called(self) -> bool: + return self.__original.cancel_called + + @property + def cancelled_caught(self) -> bool: + return self.__original.cancelled_caught + + @property + def shield(self) -> bool: + return self.__original.shield + + @shield.setter + def shield(self, value: bool) -> None: + self.__original.shield = value + + +# +# Task groups +# + + +class TaskGroup(abc.TaskGroup): + def __init__(self) -> None: + self._active = False + self._nursery_manager = trio.open_nursery(strict_exception_groups=True) + self.cancel_scope = None # type: ignore[assignment] + + async def __aenter__(self) -> TaskGroup: + self._active = True + self._nursery = await self._nursery_manager.__aenter__() + self.cancel_scope = CancelScope(self._nursery.cancel_scope) + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + try: + # trio.Nursery.__exit__ returns bool; .open_nursery has wrong type + return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value] + except BaseExceptionGroup as exc: + if not exc.split(trio.Cancelled)[1]: + raise trio.Cancelled._create() from exc + + raise + finally: + del exc_val, exc_tb + self._active = False + + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + self._nursery.start_soon(func, *args, name=name) + + async def start( + self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None + ) -> Any: + if not self._active: + raise RuntimeError( + "This task group is not active; no new tasks can be started." + ) + + return await self._nursery.start(func, *args, name=name) + + +# +# Subprocesses +# + + +@dataclass(eq=False) +class ReceiveStreamWrapper(abc.ByteReceiveStream): + _stream: trio.abc.ReceiveStream + + async def receive(self, max_bytes: int | None = None) -> bytes: + try: + data = await self._stream.receive_some(max_bytes) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + if data: + return bytes(data) + else: + raise EndOfStream + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class SendStreamWrapper(abc.ByteSendStream): + _stream: trio.abc.SendStream + + async def send(self, item: bytes) -> None: + try: + await self._stream.send_all(item) + except trio.ClosedResourceError as exc: + raise ClosedResourceError from exc.__cause__ + except trio.BrokenResourceError as exc: + raise BrokenResourceError from exc.__cause__ + + async def aclose(self) -> None: + await self._stream.aclose() + + +@dataclass(eq=False) +class Process(abc.Process): + _process: trio.Process + _stdin: abc.ByteSendStream | None + _stdout: abc.ByteReceiveStream | None + _stderr: abc.ByteReceiveStream | None + + async def aclose(self) -> None: + with CancelScope(shield=True): + if self._stdin: + await self._stdin.aclose() + if self._stdout: + await self._stdout.aclose() + if self._stderr: + await self._stderr.aclose() + + try: + await self.wait() + except BaseException: + self.kill() + with CancelScope(shield=True): + await self.wait() + raise + + async def wait(self) -> int: + return await self._process.wait() + + def terminate(self) -> None: + self._process.terminate() + + def kill(self) -> None: + self._process.kill() + + def send_signal(self, signal: Signals) -> None: + self._process.send_signal(signal) + + @property + def pid(self) -> int: + return self._process.pid + + @property + def returncode(self) -> int | None: + return self._process.returncode + + @property + def stdin(self) -> abc.ByteSendStream | None: + return self._stdin + + @property + def stdout(self) -> abc.ByteReceiveStream | None: + return self._stdout + + @property + def stderr(self) -> abc.ByteReceiveStream | None: + return self._stderr + + +class _ProcessPoolShutdownInstrument(trio.abc.Instrument): + def after_run(self) -> None: + super().after_run() + + +current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( + "current_default_worker_process_limiter" +) + + +async def _shutdown_process_pool(workers: set[abc.Process]) -> None: + try: + await trio.sleep(math.inf) + except trio.Cancelled: + for process in workers: + if process.returncode is None: + process.kill() + + with CancelScope(shield=True): + for process in workers: + await process.aclose() + + +# +# Sockets and networking +# + + +class _TrioSocketMixin(Generic[T_SockAddr]): + def __init__(self, trio_socket: TrioSocketType) -> None: + self._trio_socket = trio_socket + self._closed = False + + def _check_closed(self) -> None: + if self._closed: + raise ClosedResourceError + if self._trio_socket.fileno() < 0: + raise BrokenResourceError + + @property + def _raw_socket(self) -> socket.socket: + return self._trio_socket._sock # type: ignore[attr-defined] + + async def aclose(self) -> None: + if self._trio_socket.fileno() >= 0: + self._closed = True + self._trio_socket.close() + + def _convert_socket_error(self, exc: BaseException) -> NoReturn: + if isinstance(exc, trio.ClosedResourceError): + raise ClosedResourceError from exc + elif self._trio_socket.fileno() < 0 and self._closed: + raise ClosedResourceError from None + elif isinstance(exc, OSError): + raise BrokenResourceError from exc + else: + raise exc + + +class SocketStream(_TrioSocketMixin, abc.SocketStream): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self, max_bytes: int = 65536) -> bytes: + with self._receive_guard: + try: + data = await self._trio_socket.recv(max_bytes) + except BaseException as exc: + self._convert_socket_error(exc) + + if data: + return data + else: + raise EndOfStream + + async def send(self, item: bytes) -> None: + with self._send_guard: + view = memoryview(item) + while view: + try: + bytes_sent = await self._trio_socket.send(view) + except BaseException as exc: + self._convert_socket_error(exc) + + view = view[bytes_sent:] + + async def send_eof(self) -> None: + self._trio_socket.shutdown(socket.SHUT_WR) + + +class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + if not isinstance(msglen, int) or msglen < 0: + raise ValueError("msglen must be a non-negative integer") + if not isinstance(maxfds, int) or maxfds < 1: + raise ValueError("maxfds must be a positive integer") + + fds = array.array("i") + await trio.lowlevel.checkpoint() + with self._receive_guard: + while True: + try: + message, ancdata, flags, addr = await self._trio_socket.recvmsg( + msglen, socket.CMSG_LEN(maxfds * fds.itemsize) + ) + except BaseException as exc: + self._convert_socket_error(exc) + else: + if not message and not ancdata: + raise EndOfStream + + break + + for cmsg_level, cmsg_type, cmsg_data in ancdata: + if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: + raise RuntimeError( + f"Received unexpected ancillary data; message = {message!r}, " + f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" + ) + + fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) + + return message, list(fds) + + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + if not message: + raise ValueError("message must not be empty") + if not fds: + raise ValueError("fds must not be empty") + + filenos: list[int] = [] + for fd in fds: + if isinstance(fd, int): + filenos.append(fd) + elif isinstance(fd, IOBase): + filenos.append(fd.fileno()) + + fdarray = array.array("i", filenos) + await trio.lowlevel.checkpoint() + with self._send_guard: + while True: + try: + await self._trio_socket.sendmsg( + [message], + [ + ( + socket.SOL_SOCKET, + socket.SCM_RIGHTS, + fdarray, + ) + ], + ) + break + except BaseException as exc: + self._convert_socket_error(exc) + + +class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> SocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SocketStream(trio_socket) + + +class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): + def __init__(self, raw_socket: socket.socket): + super().__init__(trio.socket.from_stdlib_socket(raw_socket)) + self._accept_guard = ResourceGuard("accepting connections from") + + async def accept(self) -> UNIXSocketStream: + with self._accept_guard: + try: + trio_socket, _addr = await self._trio_socket.accept() + except BaseException as exc: + self._convert_socket_error(exc) + + return UNIXSocketStream(trio_socket) + + +class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> tuple[bytes, IPSockAddrType]: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, convert_ipv6_sockaddr(addr) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UDPPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> UNIXDatagramPacketType: + with self._receive_guard: + try: + data, addr = await self._trio_socket.recvfrom(65536) + return data, addr + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: UNIXDatagramPacketType) -> None: + with self._send_guard: + try: + await self._trio_socket.sendto(*item) + except BaseException as exc: + self._convert_socket_error(exc) + + +class ConnectedUNIXDatagramSocket( + _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket +): + def __init__(self, trio_socket: TrioSocketType) -> None: + super().__init__(trio_socket) + self._receive_guard = ResourceGuard("reading from") + self._send_guard = ResourceGuard("writing to") + + async def receive(self) -> bytes: + with self._receive_guard: + try: + return await self._trio_socket.recv(65536) + except BaseException as exc: + self._convert_socket_error(exc) + + async def send(self, item: bytes) -> None: + with self._send_guard: + try: + await self._trio_socket.send(item) + except BaseException as exc: + self._convert_socket_error(exc) + + +# +# Synchronization +# + + +class Event(BaseEvent): + def __new__(cls) -> Event: + return object.__new__(cls) + + def __init__(self) -> None: + self.__original = trio.Event() + + def is_set(self) -> bool: + return self.__original.is_set() + + async def wait(self) -> None: + return await self.__original.wait() + + def statistics(self) -> EventStatistics: + orig_statistics = self.__original.statistics() + return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) + + def set(self) -> None: + self.__original.set() + + +class Lock(BaseLock): + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False) -> None: + self._fast_acquire = fast_acquire + self.__original = trio.Lock() + + @staticmethod + def _convert_runtime_error_msg(exc: RuntimeError) -> None: + if exc.args == ("attempt to re-acquire an already held Lock",): + exc.args = ("Attempted to acquire an already held Lock",) + + async def acquire(self) -> None: + if not self._fast_acquire: + try: + await self.__original.acquire() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + except RuntimeError as exc: + self._convert_runtime_error_msg(exc) + raise + + def locked(self) -> bool: + return self.__original.locked() + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> LockStatistics: + orig_statistics = self.__original.statistics() + owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None + return LockStatistics( + orig_statistics.locked, owner, orig_statistics.tasks_waiting + ) + + +class Semaphore(BaseSemaphore): + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self.__original = trio.Semaphore(initial_value, max_value=max_value) + + async def acquire(self) -> None: + if not self._fast_acquire: + await self.__original.acquire() + return + + # This is the "fast path" where we don't let other tasks run + await trio.lowlevel.checkpoint_if_cancelled() + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + await self.__original._lot.park() + + def acquire_nowait(self) -> None: + try: + self.__original.acquire_nowait() + except trio.WouldBlock: + raise WouldBlock from None + + @property + def max_value(self) -> int | None: + return self.__original.max_value + + @property + def value(self) -> int: + return self.__original.value + + def release(self) -> None: + self.__original.release() + + def statistics(self) -> SemaphoreStatistics: + orig_statistics = self.__original.statistics() + return SemaphoreStatistics(orig_statistics.tasks_waiting) + + +class CapacityLimiter(BaseCapacityLimiter): + def __new__( + cls, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> CapacityLimiter: + return object.__new__(cls) + + def __init__( + self, + total_tokens: float | None = None, + *, + original: trio.CapacityLimiter | None = None, + ) -> None: + if original is not None: + self.__original = original + else: + assert total_tokens is not None + self.__original = trio.CapacityLimiter(total_tokens) + + async def __aenter__(self) -> None: + return await self.__original.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.__original.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + return self.__original.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + self.__original.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + return self.__original.borrowed_tokens + + @property + def available_tokens(self) -> float: + return self.__original.available_tokens + + def acquire_nowait(self) -> None: + self.__original.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self.__original.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self.__original.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self.__original.acquire_on_behalf_of(borrower) + + def release(self) -> None: + return self.__original.release() + + def release_on_behalf_of(self, borrower: object) -> None: + return self.__original.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + orig = self.__original.statistics() + return CapacityLimiterStatistics( + borrowed_tokens=orig.borrowed_tokens, + total_tokens=orig.total_tokens, + borrowers=tuple(orig.borrowers), + tasks_waiting=orig.tasks_waiting, + ) + + +_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") + + +# +# Signal handling +# + + +class _SignalReceiver: + _iterator: AsyncIterator[int] + + def __init__(self, signals: tuple[Signals, ...]): + self._signals = signals + + def __enter__(self) -> _SignalReceiver: + self._cm = trio.open_signal_receiver(*self._signals) + self._iterator = self._cm.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + return self._cm.__exit__(exc_type, exc_val, exc_tb) + + def __aiter__(self) -> _SignalReceiver: + return self + + async def __anext__(self) -> Signals: + signum = await self._iterator.__anext__() + return Signals(signum) + + +# +# Testing and debugging +# + + +class TestRunner(abc.TestRunner): + def __init__(self, **options: Any) -> None: + from queue import Queue + + self._call_queue: Queue[Callable[[], object]] = Queue() + self._send_stream: MemoryObjectSendStream | None = None + self._options = options + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> None: + if self._send_stream: + self._send_stream.close() + while self._send_stream is not None: + self._call_queue.get()() + + async def _run_tests_and_fixtures(self) -> None: + self._send_stream, receive_stream = create_memory_object_stream(1) + with receive_stream: + async for coro, outcome_holder in receive_stream: + try: + retval = await coro + except BaseException as exc: + outcome_holder.append(Error(exc)) + else: + outcome_holder.append(Value(retval)) + + def _main_task_finished(self, outcome: object) -> None: + self._send_stream = None + + def _call_in_runner_task( + self, + func: Callable[P, Awaitable[T_Retval]], + /, + *args: P.args, + **kwargs: P.kwargs, + ) -> T_Retval: + if self._send_stream is None: + trio.lowlevel.start_guest_run( + self._run_tests_and_fixtures, + run_sync_soon_threadsafe=self._call_queue.put, + done_callback=self._main_task_finished, + **self._options, + ) + while self._send_stream is None: + self._call_queue.get()() + + outcome_holder: list[Outcome] = [] + self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) + while not outcome_holder: + self._call_queue.get()() + + return outcome_holder[0].unwrap() + + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], + kwargs: dict[str, Any], + ) -> Iterable[T_Retval]: + asyncgen = fixture_func(**kwargs) + fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) + + yield fixturevalue + + try: + self._call_in_runner_task(asyncgen.asend, None) + except StopAsyncIteration: + pass + else: + self._call_in_runner_task(asyncgen.aclose) + raise RuntimeError("Async generator fixture did not stop") + + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], + kwargs: dict[str, Any], + ) -> T_Retval: + return self._call_in_runner_task(fixture_func, **kwargs) + + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + self._call_in_runner_task(test_func, **kwargs) + + +class TrioTaskInfo(TaskInfo): + def __init__(self, task: trio.lowlevel.Task): + parent_id = None + if task.parent_nursery and task.parent_nursery.parent_task: + parent_id = id(task.parent_nursery.parent_task) + + super().__init__(id(task), parent_id, task.name, task.coro) + self._task = weakref.proxy(task) + + def has_pending_cancellation(self) -> bool: + try: + return self._task._cancel_status.effectively_cancelled + except ReferenceError: + # If the task is no longer around, it surely doesn't have a cancellation + # pending + return False + + +class TrioBackend(AsyncBackend): + @classmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + return trio.run(func, *args) + + @classmethod + def current_token(cls) -> object: + return trio.lowlevel.current_trio_token() + + @classmethod + def current_time(cls) -> float: + return trio.current_time() + + @classmethod + def cancelled_exception_class(cls) -> type[BaseException]: + return trio.Cancelled + + @classmethod + async def checkpoint(cls) -> None: + await trio.lowlevel.checkpoint() + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + await trio.lowlevel.checkpoint_if_cancelled() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + await trio.lowlevel.cancel_shielded_checkpoint() + + @classmethod + async def sleep(cls, delay: float) -> None: + await trio.sleep(delay) + + @classmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> abc.CancelScope: + return CancelScope(deadline=deadline, shield=shield) + + @classmethod + def current_effective_deadline(cls) -> float: + return trio.current_effective_deadline() + + @classmethod + def create_task_group(cls) -> abc.TaskGroup: + return TaskGroup() + + @classmethod + def create_event(cls) -> abc.Event: + return Event() + + @classmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + return Lock(fast_acquire=fast_acquire) + + @classmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> abc.Semaphore: + return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) + + @classmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + return CapacityLimiter(total_tokens) + + @classmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: abc.CapacityLimiter | None = None, + ) -> T_Retval: + def wrapper() -> T_Retval: + with claim_worker_thread(TrioBackend, token): + return func(*args) + + token = TrioBackend.current_token() + return await run_sync( + wrapper, + abandon_on_cancel=abandon_on_cancel, + limiter=cast(trio.CapacityLimiter, limiter), + ) + + @classmethod + def check_cancelled(cls) -> None: + trio.from_thread.check_cancelled() + + @classmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + trio_token = cast("trio.lowlevel.TrioToken | None", token) + try: + return trio.from_thread.run(func, *args, trio_token=trio_token) + except trio.RunFinishedError: + raise RunFinishedError from None + + @classmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + trio_token = cast("trio.lowlevel.TrioToken | None", token) + try: + return trio.from_thread.run_sync(func, *args, trio_token=trio_token) + except trio.RunFinishedError: + raise RunFinishedError from None + + @classmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + def convert_item(item: StrOrBytesPath) -> str: + str_or_bytes = os.fspath(item) + if isinstance(str_or_bytes, str): + return str_or_bytes + else: + return os.fsdecode(str_or_bytes) + + if isinstance(command, (str, bytes, PathLike)): + process = await trio.lowlevel.open_process( + convert_item(command), + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=True, + **kwargs, + ) + else: + process = await trio.lowlevel.open_process( + [convert_item(item) for item in command], + stdin=stdin, + stdout=stdout, + stderr=stderr, + shell=False, + **kwargs, + ) + + stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None + stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None + stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None + return Process(process, stdin_stream, stdout_stream, stderr_stream) + + @classmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: + trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) + + @classmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + family = socket.AF_INET6 if ":" in host else socket.AF_INET + trio_socket = trio.socket.socket(family) + trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + if local_address: + await trio_socket.bind(local_address) + + try: + await trio_socket.connect((host, port)) + except BaseException: + trio_socket.close() + raise + + return SocketStream(trio_socket) + + @classmethod + async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: + trio_socket = trio.socket.socket(socket.AF_UNIX) + try: + await trio_socket.connect(path) + except BaseException: + trio_socket.close() + raise + + return UNIXSocketStream(trio_socket) + + @classmethod + def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: + return UNIXSocketListener(sock) + + @classmethod + async def create_udp_socket( + cls, + family: socket.AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) + + if reuse_port: + trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + if local_address: + await trio_socket.bind(local_address) + + if remote_address: + await trio_socket.connect(remote_address) + return ConnectedUDPSocket(trio_socket) + else: + return UDPSocket(trio_socket) + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: None + ) -> abc.UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes + ) -> abc.ConnectedUNIXDatagramSocket: ... + + @classmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket.socket, remote_path: str | bytes | None + ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: + trio_socket = trio.socket.from_stdlib_socket(raw_socket) + + if remote_path: + await trio_socket.connect(remote_path) + return ConnectedUNIXDatagramSocket(trio_socket) + else: + return UNIXDatagramSocket(trio_socket) + + @classmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) + + @classmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + return await trio.socket.getnameinfo(sockaddr, flags) + + @classmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + try: + await wait_readable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("reading from") from None + + @classmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + try: + await wait_writable(obj) + except trio.ClosedResourceError as exc: + raise ClosedResourceError().with_traceback(exc.__traceback__) from None + except trio.BusyResourceError: + raise BusyResourceError("writing to") from None + + @classmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + notify_closing(obj) + + @classmethod + async def wrap_listener_socket(cls, sock: socket.socket) -> abc.SocketListener: + return TCPSocketListener(sock) + + @classmethod + async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream: + trio_sock = trio.socket.from_stdlib_socket(sock) + return SocketStream(trio_sock) + + @classmethod + async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UNIXSocketStream(trio_sock) + + @classmethod + async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UDPSocket(trio_sock) + + @classmethod + async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return ConnectedUDPSocket(trio_sock) + + @classmethod + async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return UNIXDatagramSocket(trio_sock) + + @classmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket.socket + ) -> ConnectedUNIXDatagramSocket: + trio_sock = trio.socket.from_stdlib_socket(sock) + return ConnectedUNIXDatagramSocket(trio_sock) + + @classmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + try: + return _capacity_limiter_wrapper.get() + except LookupError: + limiter = CapacityLimiter( + original=trio.to_thread.current_default_thread_limiter() + ) + _capacity_limiter_wrapper.set(limiter) + return limiter + + @classmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + return _SignalReceiver(signals) + + @classmethod + def get_current_task(cls) -> TaskInfo: + task = current_task() + return TrioTaskInfo(task) + + @classmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + root_task = current_root_task() + assert root_task + task_infos = [TrioTaskInfo(root_task)] + nurseries = root_task.child_nurseries + while nurseries: + new_nurseries: list[trio.Nursery] = [] + for nursery in nurseries: + for task in nursery.child_tasks: + task_infos.append(TrioTaskInfo(task)) + new_nurseries.extend(task.child_nurseries) + + nurseries = new_nurseries + + return task_infos + + @classmethod + async def wait_all_tasks_blocked(cls) -> None: + from trio.testing import wait_all_tasks_blocked + + await wait_all_tasks_blocked() + + @classmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + return TestRunner(**options) + + +backend_class = TrioBackend diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__init__.py b/venv/lib/python3.12/site-packages/anyio/_core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cebb857e7c84d2ccab51d30f6fc94e57056d2906 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..851eb183bf9ebe5abfaf662ca88dd10d0cd3df0c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd9ac3336f6a50350f4864e547ecbccc062540ec Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..452548821a8de8eccd1aea229c12b8401f5880c1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..55d2dc1fdae869e920d48211b38a1032b1f143fe Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea67dd0d1f9abc0ec1ff9405e7af37ebccb33591 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_fileio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2317e80a44b6e20d312d6124cb0baa6a0b28ce2f Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_resources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4fd9f3d92fd4686610815c1d84e5f6db8368fa72 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_signals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..057fb94b19e318eb8755ea9d1d849539640ca74c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_sockets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..749a07b80a43f83d603bfaca84c81f5bb9cf66ca Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_streams.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6f9af27382fb94f5ddfe63db416b1f0bd77bb1c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da3f7e91e0b6ef223d1e0af19d3be2ae242d3a20 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d180b3ceab6bd743ae84c9dcb08ced3487e05b5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tasks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4c59ae6358fe9f4765f9b61456c2a0e6920ea7d Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_tempfile.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c3ab63c1379395155a1aaedeaa2e81375505362 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b9500abbd4fe9bf0dd993d9bd456ae0c2743fd43 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/_core/__pycache__/_typedattr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py b/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py new file mode 100644 index 0000000000000000000000000000000000000000..9f35bae568e33e6a9e1219761c83cc8350fa0532 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_asyncio_selector_thread.py @@ -0,0 +1,167 @@ +from __future__ import annotations + +import asyncio +import socket +import threading +from collections.abc import Callable +from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector +from typing import TYPE_CHECKING, Any + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + +_selector_lock = threading.Lock() +_selector: Selector | None = None + + +class Selector: + def __init__(self) -> None: + self._thread = threading.Thread(target=self.run, name="AnyIO socket selector") + self._selector = DefaultSelector() + self._send, self._receive = socket.socketpair() + self._send.setblocking(False) + self._receive.setblocking(False) + # This somewhat reduces the amount of memory wasted queueing up data + # for wakeups. With these settings, maximum number of 1-byte sends + # before getting BlockingIOError: + # Linux 4.8: 6 + # macOS (darwin 15.5): 1 + # Windows 10: 525347 + # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send + # blocking, even on non-blocking sockets, so don't do that.) + self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1) + self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1) + # On Windows this is a TCP socket so this might matter. On other + # platforms this fails b/c AF_UNIX sockets aren't actually TCP. + try: + self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + except OSError: + pass + + self._selector.register(self._receive, EVENT_READ) + self._closed = False + + def start(self) -> None: + self._thread.start() + threading._register_atexit(self._stop) # type: ignore[attr-defined] + + def _stop(self) -> None: + global _selector + self._closed = True + self._notify_self() + self._send.close() + self._thread.join() + self._selector.unregister(self._receive) + self._receive.close() + self._selector.close() + _selector = None + assert not self._selector.get_map(), ( + "selector still has registered file descriptors after shutdown" + ) + + def _notify_self(self) -> None: + try: + self._send.send(b"\x00") + except BlockingIOError: + pass + + def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)}) + else: + if EVENT_READ in key.data: + raise ValueError( + "this file descriptor is already registered for reading" + ) + + key.data[EVENT_READ] = loop, callback + self._selector.modify(fd, key.events | EVENT_READ, key.data) + + self._notify_self() + + def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None: + loop = asyncio.get_running_loop() + try: + key = self._selector.get_key(fd) + except KeyError: + self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)}) + else: + if EVENT_WRITE in key.data: + raise ValueError( + "this file descriptor is already registered for writing" + ) + + key.data[EVENT_WRITE] = loop, callback + self._selector.modify(fd, key.events | EVENT_WRITE, key.data) + + self._notify_self() + + def remove_reader(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_READ: + del key.data[EVENT_READ] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def remove_writer(self, fd: FileDescriptorLike) -> bool: + try: + key = self._selector.get_key(fd) + except KeyError: + return False + + if new_events := key.events ^ EVENT_WRITE: + del key.data[EVENT_WRITE] + self._selector.modify(fd, new_events, key.data) + else: + self._selector.unregister(fd) + + return True + + def run(self) -> None: + while not self._closed: + for key, events in self._selector.select(): + if key.fileobj is self._receive: + try: + while self._receive.recv(4096): + pass + except BlockingIOError: + pass + + continue + + if events & EVENT_READ: + loop, callback = key.data[EVENT_READ] + self.remove_reader(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + if events & EVENT_WRITE: + loop, callback = key.data[EVENT_WRITE] + self.remove_writer(key.fd) + try: + loop.call_soon_threadsafe(callback) + except RuntimeError: + pass # the loop was already closed + + +def get_selector() -> Selector: + global _selector + + with _selector_lock: + if _selector is None: + _selector = Selector() + _selector.start() + + return _selector diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py b/venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py new file mode 100644 index 0000000000000000000000000000000000000000..302f32b0c78a7071605b195c55054cfdb0b55f37 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_contextmanagers.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +from abc import abstractmethod +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from inspect import isasyncgen, iscoroutine, isgenerator +from types import TracebackType +from typing import Protocol, TypeVar, cast, final + +_T_co = TypeVar("_T_co", covariant=True) +_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None") + + +class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]): + def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ... + + +class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]): + def __asynccontextmanager__( + self, + ) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ... + + +class ContextManagerMixin: + """ + Mixin class providing context manager functionality via a generator-based + implementation. + + This class allows you to implement a context manager via :meth:`__contextmanager__` + which should return a generator. The mechanics are meant to mirror those of + :func:`@contextmanager `. + + .. note:: Classes using this mix-in are not reentrant as context managers, meaning + that once you enter it, you can't re-enter before first exiting it. + + .. seealso:: :doc:`contextmanagers` + """ + + __cm: AbstractContextManager[object, bool | None] | None = None + + @final + def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, ContextManagerMixin) + if self.__cm is not None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has already been entered" + ) + + cm = self.__contextmanager__() + if not isinstance(cm, AbstractContextManager): + if isgenerator(cm): + raise TypeError( + "__contextmanager__() returned a generator object instead of " + "a context manager. Did you forget to add the @contextmanager " + "decorator?" + ) + + raise TypeError( + f"__contextmanager__() did not return a context manager object, " + f"but {cm.__class__!r}" + ) + + if cm is self: + raise TypeError( + f"{self.__class__.__qualname__}.__contextmanager__() returned " + f"self. Did you forget to add the @contextmanager decorator and a " + f"'yield' statement?" + ) + + value = cm.__enter__() + self.__cm = cm + return value + + @final + def __exit__( + self: _SupportsCtxMgr[object, _ExitT_co], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> _ExitT_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, ContextManagerMixin) + if self.__cm is None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has not been entered yet" + ) + + # Prevent circular references + cm = self.__cm + del self.__cm + + return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb)) + + @abstractmethod + def __contextmanager__(self) -> AbstractContextManager[object, bool | None]: + """ + Implement your context manager logic here. + + This method **must** be decorated with + :func:`@contextmanager `. + + .. note:: Remember that the ``yield`` will raise any exception raised in the + enclosed context block, so use a ``finally:`` block to clean up resources! + + :return: a context manager object + """ + + +class AsyncContextManagerMixin: + """ + Mixin class providing async context manager functionality via a generator-based + implementation. + + This class allows you to implement a context manager via + :meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of + :func:`@asynccontextmanager `. + + .. note:: Classes using this mix-in are not reentrant as context managers, meaning + that once you enter it, you can't re-enter before first exiting it. + + .. seealso:: :doc:`contextmanagers` + """ + + __cm: AbstractAsyncContextManager[object, bool | None] | None = None + + @final + async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co: + # Needed for mypy to assume self still has the __cm member + assert isinstance(self, AsyncContextManagerMixin) + if self.__cm is not None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has already been entered" + ) + + cm = self.__asynccontextmanager__() + if not isinstance(cm, AbstractAsyncContextManager): + if isasyncgen(cm): + raise TypeError( + "__asynccontextmanager__() returned an async generator instead of " + "an async context manager. Did you forget to add the " + "@asynccontextmanager decorator?" + ) + elif iscoroutine(cm): + cm.close() + raise TypeError( + "__asynccontextmanager__() returned a coroutine object instead of " + "an async context manager. Did you forget to add the " + "@asynccontextmanager decorator and a 'yield' statement?" + ) + + raise TypeError( + f"__asynccontextmanager__() did not return an async context manager, " + f"but {cm.__class__!r}" + ) + + if cm is self: + raise TypeError( + f"{self.__class__.__qualname__}.__asynccontextmanager__() returned " + f"self. Did you forget to add the @asynccontextmanager decorator and a " + f"'yield' statement?" + ) + + value = await cm.__aenter__() + self.__cm = cm + return value + + @final + async def __aexit__( + self: _SupportsAsyncCtxMgr[object, _ExitT_co], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> _ExitT_co: + assert isinstance(self, AsyncContextManagerMixin) + if self.__cm is None: + raise RuntimeError( + f"this {self.__class__.__qualname__} has not been entered yet" + ) + + # Prevent circular references + cm = self.__cm + del self.__cm + + return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb)) + + @abstractmethod + def __asynccontextmanager__( + self, + ) -> AbstractAsyncContextManager[object, bool | None]: + """ + Implement your async context manager logic here. + + This method **must** be decorated with + :func:`@asynccontextmanager `. + + .. note:: Remember that the ``yield`` will raise any exception raised in the + enclosed context block, so use a ``finally:`` block to clean up resources! + + :return: an async context manager object + """ diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py b/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py new file mode 100644 index 0000000000000000000000000000000000000000..59a69ccdf02c2989fb522bcc9af5a23f64e1f3e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_eventloop.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import math +import sys +import threading +from collections.abc import Awaitable, Callable, Generator +from contextlib import contextmanager +from contextvars import Token +from importlib import import_module +from typing import TYPE_CHECKING, Any, TypeVar + +from ._exceptions import NoEventLoopError + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +sniffio: Any +try: + import sniffio +except ModuleNotFoundError: + sniffio = None + +if TYPE_CHECKING: + from ..abc import AsyncBackend + +# This must be updated when new backends are introduced +BACKENDS = "asyncio", "trio" + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") + +threadlocals = threading.local() +loaded_backends: dict[str, type[AsyncBackend]] = {} + + +def run( + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + *args: Unpack[PosArgsT], + backend: str = "asyncio", + backend_options: dict[str, Any] | None = None, +) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param backend: name of the asynchronous event loop implementation – currently + either ``asyncio`` or ``trio`` + :param backend_options: keyword arguments to call the backend ``run()`` + implementation with (documented :ref:`here `) + :return: the return value of the coroutine function + :raises RuntimeError: if an asynchronous event loop is already running in this + thread + :raises LookupError: if the named backend is not found + + """ + if asynclib_name := current_async_library(): + raise RuntimeError(f"Already running {asynclib_name} in this thread") + + try: + async_backend = get_async_backend(backend) + except ImportError as exc: + raise LookupError(f"No such backend: {backend}") from exc + + token = None + if asynclib_name is None: + # Since we're in control of the event loop, we can cache the name of the async + # library + token = set_current_async_library(backend) + + try: + backend_options = backend_options or {} + return async_backend.run(func, args, {}, backend_options) + finally: + reset_current_async_library(token) + + +async def sleep(delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + + """ + return await get_async_backend().sleep(delay) + + +async def sleep_forever() -> None: + """ + Pause the current task until it's cancelled. + + This is a shortcut for ``sleep(math.inf)``. + + .. versionadded:: 3.1 + + """ + await sleep(math.inf) + + +async def sleep_until(deadline: float) -> None: + """ + Pause the current task until the given time. + + :param deadline: the absolute time to wake up at (according to the internal + monotonic clock of the event loop) + + .. versionadded:: 3.1 + + """ + now = current_time() + await sleep(max(deadline - now, 0)) + + +def current_time() -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().current_time() + + +def get_all_backends() -> tuple[str, ...]: + """Return a tuple of the names of all built-in backends.""" + return BACKENDS + + +def get_available_backends() -> tuple[str, ...]: + """ + Test for the availability of built-in backends. + + :return a tuple of the built-in backend names that were successfully imported + + .. versionadded:: 4.12 + + """ + available_backends: list[str] = [] + for backend_name in get_all_backends(): + try: + get_async_backend(backend_name) + except ImportError: + continue + + available_backends.append(backend_name) + + return tuple(available_backends) + + +def get_cancelled_exc_class() -> type[BaseException]: + """ + Return the current async library's cancellation exception class. + + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().cancelled_exception_class() + + +# +# Private API +# + + +@contextmanager +def claim_worker_thread( + backend_class: type[AsyncBackend], token: object +) -> Generator[Any, None, None]: + from ..lowlevel import EventLoopToken + + threadlocals.current_token = EventLoopToken(backend_class, token) + try: + yield + finally: + del threadlocals.current_token + + +def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: + if asynclib_name is None: + asynclib_name = current_async_library() + if not asynclib_name: + raise NoEventLoopError( + f"Not currently running on any asynchronous event loop. " + f"Available async backends: {', '.join(get_all_backends())}" + ) + + # We use our own dict instead of sys.modules to get the already imported back-end + # class because the appropriate modules in sys.modules could potentially be only + # partially initialized + try: + return loaded_backends[asynclib_name] + except KeyError: + module = import_module(f"anyio._backends._{asynclib_name}") + loaded_backends[asynclib_name] = module.backend_class + return module.backend_class + + +def current_async_library() -> str | None: + if sniffio is None: + # If sniffio is not installed, we assume we're either running asyncio or nothing + import asyncio + + try: + asyncio.get_running_loop() + return "asyncio" + except RuntimeError: + pass + else: + try: + return sniffio.current_async_library() + except sniffio.AsyncLibraryNotFoundError: + pass + + return None + + +def set_current_async_library(asynclib_name: str | None) -> Token | None: + # no-op if sniffio is not installed + if sniffio is None: + return None + + return sniffio.current_async_library_cvar.set(asynclib_name) + + +def reset_current_async_library(token: Token | None) -> None: + if token is not None: + sniffio.current_async_library_cvar.reset(token) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py b/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..3776bedcd339913d609e41e2e396f3f2fd16ae9d --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_exceptions.py @@ -0,0 +1,156 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator +from textwrap import dedent +from typing import Any + +if sys.version_info < (3, 11): + from exceptiongroup import BaseExceptionGroup + + +class BrokenResourceError(Exception): + """ + Raised when trying to use a resource that has been rendered unusable due to external + causes (e.g. a send stream whose peer has disconnected). + """ + + +class BrokenWorkerProcess(Exception): + """ + Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or + otherwise misbehaves. + """ + + +class BrokenWorkerInterpreter(Exception): + """ + Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is + raised in the subinterpreter. + """ + + def __init__(self, excinfo: Any): + # This was adapted from concurrent.futures.interpreter.ExecutionFailed + msg = excinfo.formatted + if not msg: + if excinfo.type and excinfo.msg: + msg = f"{excinfo.type.__name__}: {excinfo.msg}" + else: + msg = excinfo.type.__name__ or excinfo.msg + + super().__init__(msg) + self.excinfo = excinfo + + def __str__(self) -> str: + try: + formatted = self.excinfo.errdisplay + except Exception: + return super().__str__() + else: + return dedent( + f""" + {super().__str__()} + + Uncaught in the interpreter: + + {formatted} + """.strip() + ) + + +class BusyResourceError(Exception): + """ + Raised when two tasks are trying to read from or write to the same resource + concurrently. + """ + + def __init__(self, action: str): + super().__init__(f"Another task is already {action} this resource") + + +class ClosedResourceError(Exception): + """Raised when trying to use a resource that has been closed.""" + + +class ConnectionFailed(OSError): + """ + Raised when a connection attempt fails. + + .. note:: This class inherits from :exc:`OSError` for backwards compatibility. + """ + + +def iterate_exceptions( + exception: BaseException, +) -> Generator[BaseException, None, None]: + if isinstance(exception, BaseExceptionGroup): + for exc in exception.exceptions: + yield from iterate_exceptions(exc) + else: + yield exception + + +class DelimiterNotFound(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + maximum number of bytes has been read without the delimiter being found. + """ + + def __init__(self, max_bytes: int) -> None: + super().__init__( + f"The delimiter was not found among the first {max_bytes} bytes" + ) + + +class EndOfStream(Exception): + """ + Raised when trying to read from a stream that has been closed from the other end. + """ + + +class IncompleteRead(Exception): + """ + Raised during + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or + :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the + connection is closed before the requested amount of bytes has been read. + """ + + def __init__(self) -> None: + super().__init__( + "The stream was closed before the read operation could be completed" + ) + + +class TypedAttributeLookupError(LookupError): + """ + Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute + is not found and no default value has been given. + """ + + +class WouldBlock(Exception): + """Raised by ``X_nowait`` functions if ``X()`` would block.""" + + +class NoEventLoopError(RuntimeError): + """ + Raised by several functions that require an event loop to be running in the current + thread when there is no running event loop. + + This is also raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` + if not calling from an AnyIO worker thread, and no ``token`` was passed. + """ + + +class RunFinishedError(RuntimeError): + """ + Raised by :func:`.from_thread.run` and :func:`.from_thread.run_sync` if the event + loop associated with the explicitly passed token has already finished. + """ + + def __init__(self) -> None: + super().__init__( + "The event loop associated with the given token has already finished" + ) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py b/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py new file mode 100644 index 0000000000000000000000000000000000000000..3bb8c845690818fbc92de6a3da31997a8da69244 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_fileio.py @@ -0,0 +1,799 @@ +from __future__ import annotations + +import os +import pathlib +import sys +from collections.abc import ( + AsyncIterator, + Callable, + Iterable, + Iterator, + Sequence, +) +from dataclasses import dataclass +from functools import partial +from os import PathLike +from typing import ( + IO, + TYPE_CHECKING, + Any, + AnyStr, + ClassVar, + Final, + Generic, + overload, +) + +from .. import to_thread +from ..abc import AsyncResource + +if TYPE_CHECKING: + from types import ModuleType + + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer +else: + ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object + + +class AsyncFile(AsyncResource, Generic[AnyStr]): + """ + An asynchronous file object. + + This class wraps a standard file object and provides async friendly versions of the + following blocking methods (where available on the original file object): + + * read + * read1 + * readline + * readlines + * readinto + * readinto1 + * write + * writelines + * truncate + * seek + * tell + * flush + + All other methods are directly passed through. + + This class supports the asynchronous context manager protocol which closes the + underlying file at the end of the context block. + + This class also supports asynchronous iteration:: + + async with await open_file(...) as f: + async for line in f: + print(line) + """ + + def __init__(self, fp: IO[AnyStr]) -> None: + self._fp: Any = fp + + def __getattr__(self, name: str) -> object: + return getattr(self._fp, name) + + @property + def wrapped(self) -> IO[AnyStr]: + """The wrapped file object.""" + return self._fp + + async def __aiter__(self) -> AsyncIterator[AnyStr]: + while True: + line = await self.readline() + if line: + yield line + else: + break + + async def aclose(self) -> None: + return await to_thread.run_sync(self._fp.close) + + async def read(self, size: int = -1) -> AnyStr: + return await to_thread.run_sync(self._fp.read, size) + + async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: + return await to_thread.run_sync(self._fp.read1, size) + + async def readline(self) -> AnyStr: + return await to_thread.run_sync(self._fp.readline) + + async def readlines(self) -> list[AnyStr]: + return await to_thread.run_sync(self._fp.readlines) + + async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto, b) + + async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int: + return await to_thread.run_sync(self._fp.readinto1, b) + + @overload + async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... + + @overload + async def write(self: AsyncFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + return await to_thread.run_sync(self._fp.write, b) + + @overload + async def writelines( + self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + + @overload + async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... + + async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: + return await to_thread.run_sync(self._fp.writelines, lines) + + async def truncate(self, size: int | None = None) -> int: + return await to_thread.run_sync(self._fp.truncate, size) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + return await to_thread.run_sync(self._fp.seek, offset, whence) + + async def tell(self) -> int: + return await to_thread.run_sync(self._fp.tell) + + async def flush(self) -> None: + return await to_thread.run_sync(self._fp.flush) + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[bytes]: ... + + +@overload +async def open_file( + file: str | PathLike[str] | int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: Callable[[str, int], int] | None = ..., +) -> AsyncFile[str]: ... + + +async def open_file( + file: str | PathLike[str] | int, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + closefd: bool = True, + opener: Callable[[str, int], int] | None = None, +) -> AsyncFile[Any]: + """ + Open a file asynchronously. + + The arguments are exactly the same as for the builtin :func:`open`. + + :return: an asynchronous file object + + """ + fp = await to_thread.run_sync( + open, file, mode, buffering, encoding, errors, newline, closefd, opener + ) + return AsyncFile(fp) + + +def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: + """ + Wrap an existing file as an asynchronous file. + + :param file: an existing file-like object + :return: an asynchronous file object + + """ + return AsyncFile(file) + + +@dataclass(eq=False) +class _PathIterator(AsyncIterator["Path"]): + iterator: Iterator[PathLike[str]] + + async def __anext__(self) -> Path: + nextval = await to_thread.run_sync( + next, self.iterator, None, abandon_on_cancel=True + ) + if nextval is None: + raise StopAsyncIteration from None + + return Path(nextval) + + +class Path: + """ + An asynchronous version of :class:`pathlib.Path`. + + This class cannot be substituted for :class:`pathlib.Path` or + :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` + interface. + + It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for + the deprecated :meth:`~pathlib.Path.link_to` method. + + Some methods may be unavailable or have limited functionality, based on the Python + version: + + * :meth:`~pathlib.Path.copy` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later) + * :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later) + * :attr:`~pathlib.Path.info` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later) + * :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only + available on Python 3.13 or later) + * :meth:`~pathlib.Path.move` (available on Python 3.14 or later) + * :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later) + * :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available + on Python 3.12 or later) + * :meth:`~pathlib.Path.walk` (available on Python 3.12 or later) + + Any methods that do disk I/O need to be awaited on. These methods are: + + * :meth:`~pathlib.Path.absolute` + * :meth:`~pathlib.Path.chmod` + * :meth:`~pathlib.Path.cwd` + * :meth:`~pathlib.Path.exists` + * :meth:`~pathlib.Path.expanduser` + * :meth:`~pathlib.Path.group` + * :meth:`~pathlib.Path.hardlink_to` + * :meth:`~pathlib.Path.home` + * :meth:`~pathlib.Path.is_block_device` + * :meth:`~pathlib.Path.is_char_device` + * :meth:`~pathlib.Path.is_dir` + * :meth:`~pathlib.Path.is_fifo` + * :meth:`~pathlib.Path.is_file` + * :meth:`~pathlib.Path.is_junction` + * :meth:`~pathlib.Path.is_mount` + * :meth:`~pathlib.Path.is_socket` + * :meth:`~pathlib.Path.is_symlink` + * :meth:`~pathlib.Path.lchmod` + * :meth:`~pathlib.Path.lstat` + * :meth:`~pathlib.Path.mkdir` + * :meth:`~pathlib.Path.open` + * :meth:`~pathlib.Path.owner` + * :meth:`~pathlib.Path.read_bytes` + * :meth:`~pathlib.Path.read_text` + * :meth:`~pathlib.Path.readlink` + * :meth:`~pathlib.Path.rename` + * :meth:`~pathlib.Path.replace` + * :meth:`~pathlib.Path.resolve` + * :meth:`~pathlib.Path.rmdir` + * :meth:`~pathlib.Path.samefile` + * :meth:`~pathlib.Path.stat` + * :meth:`~pathlib.Path.symlink_to` + * :meth:`~pathlib.Path.touch` + * :meth:`~pathlib.Path.unlink` + * :meth:`~pathlib.Path.walk` + * :meth:`~pathlib.Path.write_bytes` + * :meth:`~pathlib.Path.write_text` + + Additionally, the following methods return an async iterator yielding + :class:`~.Path` objects: + + * :meth:`~pathlib.Path.glob` + * :meth:`~pathlib.Path.iterdir` + * :meth:`~pathlib.Path.rglob` + """ + + __slots__ = "_path", "__weakref__" + + __weakref__: Any + + def __init__(self, *args: str | PathLike[str]) -> None: + self._path: Final[pathlib.Path] = pathlib.Path(*args) + + def __fspath__(self) -> str: + return self._path.__fspath__() + + if sys.version_info >= (3, 15): + + def __vfspath__(self) -> str: + return self._path.__vfspath__() + + def __str__(self) -> str: + return self._path.__str__() + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.as_posix()!r})" + + def __bytes__(self) -> bytes: + return self._path.__bytes__() + + def __hash__(self) -> int: + return self._path.__hash__() + + def __eq__(self, other: object) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__eq__(target) + + def __lt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__lt__(target) + + def __le__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__le__(target) + + def __gt__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__gt__(target) + + def __ge__(self, other: pathlib.PurePath | Path) -> bool: + target = other._path if isinstance(other, Path) else other + return self._path.__ge__(target) + + def __truediv__(self, other: str | PathLike[str]) -> Path: + return Path(self._path / other) + + def __rtruediv__(self, other: str | PathLike[str]) -> Path: + return Path(other) / self + + @property + def parts(self) -> tuple[str, ...]: + return self._path.parts + + @property + def drive(self) -> str: + return self._path.drive + + @property + def root(self) -> str: + return self._path.root + + @property + def anchor(self) -> str: + return self._path.anchor + + @property + def parents(self) -> Sequence[Path]: + return tuple(Path(p) for p in self._path.parents) + + @property + def parent(self) -> Path: + return Path(self._path.parent) + + @property + def name(self) -> str: + return self._path.name + + @property + def suffix(self) -> str: + return self._path.suffix + + @property + def suffixes(self) -> list[str]: + return self._path.suffixes + + @property + def stem(self) -> str: + return self._path.stem + + async def absolute(self) -> Path: + path = await to_thread.run_sync(self._path.absolute) + return Path(path) + + def as_posix(self) -> str: + return self._path.as_posix() + + def as_uri(self) -> str: + return self._path.as_uri() + + if sys.version_info >= (3, 13): + parser: ClassVar[ModuleType] = pathlib.Path.parser + + @classmethod + def from_uri(cls, uri: str) -> Path: + return Path(pathlib.Path.from_uri(uri)) + + def full_match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.full_match(path_pattern, case_sensitive=case_sensitive) + + def match( + self, path_pattern: str, *, case_sensitive: bool | None = None + ) -> bool: + return self._path.match(path_pattern, case_sensitive=case_sensitive) + else: + + def match(self, path_pattern: str) -> bool: + return self._path.match(path_pattern) + + if sys.version_info >= (3, 14): + + @property + def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it + return self._path.info + + async def copy( + self, + target: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy, + follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, pathlib.Path(target))) + + async def copy_into( + self, + target_dir: str | os.PathLike[str], + *, + follow_symlinks: bool = True, + preserve_metadata: bool = False, + ) -> Path: + func = partial( + self._path.copy_into, + follow_symlinks=follow_symlinks, + preserve_metadata=preserve_metadata, + ) + return Path(await to_thread.run_sync(func, pathlib.Path(target_dir))) + + async def move(self, target: str | os.PathLike[str]) -> Path: + # Upstream does not handle anyio.Path properly as a PathLike + target = pathlib.Path(target) + return Path(await to_thread.run_sync(self._path.move, target)) + + async def move_into( + self, + target_dir: str | os.PathLike[str], + ) -> Path: + return Path(await to_thread.run_sync(self._path.move_into, target_dir)) + + def is_relative_to(self, other: str | PathLike[str]) -> bool: + try: + self.relative_to(other) + return True + except ValueError: + return False + + async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: + func = partial(os.chmod, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, mode) + + @classmethod + async def cwd(cls) -> Path: + path = await to_thread.run_sync(pathlib.Path.cwd) + return cls(path) + + async def exists(self) -> bool: + return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) + + async def expanduser(self) -> Path: + return Path( + await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) + ) + + if sys.version_info < (3, 12): + # Python 3.11 and earlier + def glob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.glob(pattern) + return _PathIterator(gen) + elif (3, 12) <= sys.version_info < (3, 13): + # changed in Python 3.12: + # - The case_sensitive parameter was added. + def glob( + self, + pattern: str, + *, + case_sensitive: bool | None = None, + ) -> AsyncIterator[Path]: + gen = self._path.glob(pattern, case_sensitive=case_sensitive) + return _PathIterator(gen) + elif sys.version_info >= (3, 13): + # Changed in Python 3.13: + # - The recurse_symlinks parameter was added. + # - The pattern parameter accepts a path-like object. + def glob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block + self, + pattern: str | PathLike[str], + *, + case_sensitive: bool | None = None, + recurse_symlinks: bool = False, + ) -> AsyncIterator[Path]: + gen = self._path.glob( + pattern, # type: ignore[arg-type] + case_sensitive=case_sensitive, + recurse_symlinks=recurse_symlinks, + ) + return _PathIterator(gen) + + async def group(self) -> str: + return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) + + async def hardlink_to( + self, target: str | bytes | PathLike[str] | PathLike[bytes] + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(os.link, target, self) + + @classmethod + async def home(cls) -> Path: + home_path = await to_thread.run_sync(pathlib.Path.home) + return cls(home_path) + + def is_absolute(self) -> bool: + return self._path.is_absolute() + + async def is_block_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_block_device, abandon_on_cancel=True + ) + + async def is_char_device(self) -> bool: + return await to_thread.run_sync( + self._path.is_char_device, abandon_on_cancel=True + ) + + async def is_dir(self) -> bool: + return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) + + async def is_fifo(self) -> bool: + return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) + + async def is_file(self) -> bool: + return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) + + if sys.version_info >= (3, 12): + + async def is_junction(self) -> bool: + return await to_thread.run_sync(self._path.is_junction) + + async def is_mount(self) -> bool: + return await to_thread.run_sync( + os.path.ismount, self._path, abandon_on_cancel=True + ) + + if sys.version_info < (3, 15): + + def is_reserved(self) -> bool: + return self._path.is_reserved() + + async def is_socket(self) -> bool: + return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) + + async def is_symlink(self) -> bool: + return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) + + async def iterdir(self) -> AsyncIterator[Path]: + gen = ( + self._path.iterdir() + if sys.version_info < (3, 13) + else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True) + ) + async for path in _PathIterator(gen): + yield path + + def joinpath(self, *args: str | PathLike[str]) -> Path: + return Path(self._path.joinpath(*args)) + + async def lchmod(self, mode: int) -> None: + await to_thread.run_sync(self._path.lchmod, mode) + + async def lstat(self) -> os.stat_result: + return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) + + async def mkdir( + self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False + ) -> None: + await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) + + @overload + async def open( + self, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[bytes]: ... + + @overload + async def open( + self, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + ) -> AsyncFile[str]: ... + + async def open( + self, + mode: str = "r", + buffering: int = -1, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> AsyncFile[Any]: + fp = await to_thread.run_sync( + self._path.open, mode, buffering, encoding, errors, newline + ) + return AsyncFile(fp) + + async def owner(self) -> str: + return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) + + async def read_bytes(self) -> bytes: + return await to_thread.run_sync(self._path.read_bytes) + + async def read_text( + self, encoding: str | None = None, errors: str | None = None + ) -> str: + return await to_thread.run_sync(self._path.read_text, encoding, errors) + + if sys.version_info >= (3, 12): + + def relative_to( + self, *other: str | PathLike[str], walk_up: bool = False + ) -> Path: + # relative_to() should work with any PathLike but it doesn't + others = [pathlib.Path(other) for other in other] + return Path(self._path.relative_to(*others, walk_up=walk_up)) + + else: + + def relative_to(self, *other: str | PathLike[str]) -> Path: + return Path(self._path.relative_to(*other)) + + async def readlink(self) -> Path: + target = await to_thread.run_sync(os.readlink, self._path) + return Path(target) + + async def rename(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.rename, target) + return Path(target) + + async def replace(self, target: str | pathlib.PurePath | Path) -> Path: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.replace, target) + return Path(target) + + async def resolve(self, strict: bool = False) -> Path: + func = partial(self._path.resolve, strict=strict) + return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) + + if sys.version_info < (3, 12): + # Pre Python 3.12 + def rglob(self, pattern: str) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern) + return _PathIterator(gen) + elif (3, 12) <= sys.version_info < (3, 13): + # Changed in Python 3.12: + # - The case_sensitive parameter was added. + def rglob( + self, pattern: str, *, case_sensitive: bool | None = None + ) -> AsyncIterator[Path]: + gen = self._path.rglob(pattern, case_sensitive=case_sensitive) + return _PathIterator(gen) + elif sys.version_info >= (3, 13): + # Changed in Python 3.13: + # - The recurse_symlinks parameter was added. + # - The pattern parameter accepts a path-like object. + def rglob( # type: ignore[misc] # mypy doesn't allow for differing signatures in a conditional block + self, + pattern: str | PathLike[str], + *, + case_sensitive: bool | None = None, + recurse_symlinks: bool = False, + ) -> AsyncIterator[Path]: + gen = self._path.rglob( + pattern, # type: ignore[arg-type] + case_sensitive=case_sensitive, + recurse_symlinks=recurse_symlinks, + ) + return _PathIterator(gen) + + async def rmdir(self) -> None: + await to_thread.run_sync(self._path.rmdir) + + async def samefile(self, other_path: str | PathLike[str]) -> bool: + if isinstance(other_path, Path): + other_path = other_path._path + + return await to_thread.run_sync( + self._path.samefile, other_path, abandon_on_cancel=True + ) + + async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: + func = partial(os.stat, follow_symlinks=follow_symlinks) + return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) + + async def symlink_to( + self, + target: str | bytes | PathLike[str] | PathLike[bytes], + target_is_directory: bool = False, + ) -> None: + if isinstance(target, Path): + target = target._path + + await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) + + async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: + await to_thread.run_sync(self._path.touch, mode, exist_ok) + + async def unlink(self, missing_ok: bool = False) -> None: + try: + await to_thread.run_sync(self._path.unlink) + except FileNotFoundError: + if not missing_ok: + raise + + if sys.version_info >= (3, 12): + + async def walk( + self, + top_down: bool = True, + on_error: Callable[[OSError], object] | None = None, + follow_symlinks: bool = False, + ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: + def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: + try: + return next(gen) + except StopIteration: + return None + + gen = self._path.walk(top_down, on_error, follow_symlinks) + while True: + value = await to_thread.run_sync(get_next_value) + if value is None: + return + + root, dirs, paths = value + yield Path(root), dirs, paths + + def with_name(self, name: str) -> Path: + return Path(self._path.with_name(name)) + + def with_stem(self, stem: str) -> Path: + return Path(self._path.with_name(stem + self._path.suffix)) + + def with_suffix(self, suffix: str) -> Path: + return Path(self._path.with_suffix(suffix)) + + def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: + return Path(*pathsegments) + + async def write_bytes(self, data: bytes) -> int: + return await to_thread.run_sync(self._path.write_bytes, data) + + async def write_text( + self, + data: str, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> int: + return await to_thread.run_sync( + self._path.write_text, data, encoding, errors, newline + ) + + +PathLike.register(Path) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_resources.py b/venv/lib/python3.12/site-packages/anyio/_core/_resources.py new file mode 100644 index 0000000000000000000000000000000000000000..b9a5344aef2962670f9b305a02cd0b11f2087d2f --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_resources.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from ..abc import AsyncResource +from ._tasks import CancelScope + + +async def aclose_forcefully(resource: AsyncResource) -> None: + """ + Close an asynchronous resource in a cancelled scope. + + Doing this closes the resource without waiting on anything. + + :param resource: the resource to close + + """ + with CancelScope() as scope: + scope.cancel() + await resource.aclose() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_signals.py b/venv/lib/python3.12/site-packages/anyio/_core/_signals.py new file mode 100644 index 0000000000000000000000000000000000000000..e24c79e10d4b76775679f7dd0dbe3f5860150451 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_signals.py @@ -0,0 +1,29 @@ +from __future__ import annotations + +from collections.abc import AsyncIterator +from contextlib import AbstractContextManager +from signal import Signals + +from ._eventloop import get_async_backend + + +def open_signal_receiver( + *signals: Signals, +) -> AbstractContextManager[AsyncIterator[Signals]]: + """ + Start receiving operating system signals. + + :param signals: signals to receive (e.g. ``signal.SIGINT``) + :return: an asynchronous context manager for an asynchronous iterator which yields + signal numbers + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. warning:: Windows does not support signals natively so it is best to avoid + relying on this in cross-platform applications. + + .. warning:: On asyncio, this permanently replaces any previous signal handler for + the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. + + """ + return get_async_backend().open_signal_receiver(*signals) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py b/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py new file mode 100644 index 0000000000000000000000000000000000000000..6c99b3a1c1c7a5beee07aa5cf053149f8b5b9e2f --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_sockets.py @@ -0,0 +1,1003 @@ +from __future__ import annotations + +import errno +import os +import socket +import ssl +import stat +import sys +from collections.abc import Awaitable +from dataclasses import dataclass +from ipaddress import IPv4Address, IPv6Address, ip_address +from os import PathLike, chmod +from socket import AddressFamily, SocketKind +from typing import TYPE_CHECKING, Any, Literal, cast, overload + +from .. import ConnectionFailed, to_thread +from ..abc import ( + ByteStreamConnectable, + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPAddressType, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, +) +from ..streams.stapled import MultiListener +from ..streams.tls import TLSConnectable, TLSStream +from ._eventloop import get_async_backend +from ._resources import aclose_forcefully +from ._synchronization import Event +from ._tasks import create_task_group, move_on_after + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike +else: + FileDescriptorLike = object + +if sys.version_info < (3, 11): + from exceptiongroup import ExceptionGroup + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +if sys.version_info < (3, 13): + from typing_extensions import deprecated +else: + from warnings import deprecated + +IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 + +AnyIPAddressFamily = Literal[ + AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 +] +IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] + + +# tls_hostname given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str, + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# ssl_context given +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + ssl_context: ssl.SSLContext, + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=True +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[True], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> TLSStream: ... + + +# tls=False +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + tls: Literal[False], + ssl_context: ssl.SSLContext | None = ..., + tls_standard_compatible: bool = ..., + tls_hostname: str | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +# No TLS arguments +@overload +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = ..., + happy_eyeballs_delay: float = ..., +) -> SocketStream: ... + + +async def connect_tcp( + remote_host: IPAddressType, + remote_port: int, + *, + local_host: IPAddressType | None = None, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_standard_compatible: bool = True, + tls_hostname: str | None = None, + happy_eyeballs_delay: float = 0.25, +) -> SocketStream | TLSStream: + """ + Connect to a host using the TCP protocol. + + This function implements the stateless version of the Happy Eyeballs algorithm (RFC + 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, + each one is tried until one connection attempt succeeds. If the first attempt does + not connected within 250 milliseconds, a second attempt is started using the next + address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if + available) is tried first. + + When the connection has been established, a TLS handshake will be done if either + ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. + + :param remote_host: the IP address or host name to connect to + :param remote_port: port on the target host to connect to + :param local_host: the interface address or name to bind the socket to before + connecting + :param tls: ``True`` to do a TLS handshake with the connected stream and return a + :class:`~anyio.streams.tls.TLSStream` instead + :param ssl_context: the SSL context object to use (if omitted, a default context is + created) + :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake + before closing the stream and requires that the server does this as well. + Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. + Some protocols, such as HTTP, require this option to be ``False``. + See :meth:`~ssl.SSLContext.wrap_socket` for details. + :param tls_hostname: host name to check the server certificate against (defaults to + the value of ``remote_host``) + :param happy_eyeballs_delay: delay (in seconds) before starting the next connection + attempt + :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream + :raises ConnectionFailed: if the connection fails + + """ + # Placed here due to https://github.com/python/mypy/issues/7057 + connected_stream: SocketStream | None = None + + async def try_connect(remote_host: str, event: Event) -> None: + nonlocal connected_stream + try: + stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) + except OSError as exc: + oserrors.append(exc) + return + else: + if connected_stream is None: + connected_stream = stream + tg.cancel_scope.cancel() + else: + await stream.aclose() + finally: + event.set() + + asynclib = get_async_backend() + local_address: IPSockAddrType | None = None + family = socket.AF_UNSPEC + if local_host: + gai_res = await getaddrinfo(str(local_host), None) + family, *_, local_address = gai_res[0] + + target_host = str(remote_host) + try: + addr_obj = ip_address(remote_host) + except ValueError: + addr_obj = None + + if addr_obj is not None: + if isinstance(addr_obj, IPv6Address): + target_addrs = [(socket.AF_INET6, addr_obj.compressed)] + else: + target_addrs = [(socket.AF_INET, addr_obj.compressed)] + else: + # getaddrinfo() will raise an exception if name resolution fails + gai_res = await getaddrinfo( + target_host, remote_port, family=family, type=socket.SOCK_STREAM + ) + + # Organize the list so that the first address is an IPv6 address (if available) + # and the second one is an IPv4 addresses. The rest can be in whatever order. + v6_found = v4_found = False + target_addrs = [] + for af, *_, sa in gai_res: + if af == socket.AF_INET6 and not v6_found: + v6_found = True + target_addrs.insert(0, (af, sa[0])) + elif af == socket.AF_INET and not v4_found and v6_found: + v4_found = True + target_addrs.insert(1, (af, sa[0])) + else: + target_addrs.append((af, sa[0])) + + oserrors: list[OSError] = [] + try: + async with create_task_group() as tg: + for _af, addr in target_addrs: + event = Event() + tg.start_soon(try_connect, addr, event) + with move_on_after(happy_eyeballs_delay): + await event.wait() + + if connected_stream is None: + cause = ( + oserrors[0] + if len(oserrors) == 1 + else ExceptionGroup("multiple connection attempts failed", oserrors) + ) + raise OSError("All connection attempts failed") from cause + finally: + oserrors.clear() + + if tls or tls_hostname or ssl_context: + try: + return await TLSStream.wrap( + connected_stream, + server_side=False, + hostname=tls_hostname or str(remote_host), + ssl_context=ssl_context, + standard_compatible=tls_standard_compatible, + ) + except BaseException: + await aclose_forcefully(connected_stream) + raise + + return connected_stream + + +async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: + """ + Connect to the given UNIX socket. + + Not available on Windows. + + :param path: path to the socket + :return: a socket stream object + :raises ConnectionFailed: if the connection fails + + """ + path = os.fspath(path) + return await get_async_backend().connect_unix(path) + + +async def create_tcp_listener( + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, + backlog: int = 65536, + reuse_port: bool = False, +) -> MultiListener[SocketStream]: + """ + Create a TCP socket listener. + + :param local_port: port number to listen on + :param local_host: IP address of the interface to listen on. If omitted, listen on + all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address + family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. + :param family: address family (used if ``local_host`` was omitted) + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a multi-listener object containing one or more socket listeners + :raises OSError: if there's an error creating a socket, or binding to one or more + interfaces failed + + """ + asynclib = get_async_backend() + backlog = min(backlog, 65536) + local_host = str(local_host) if local_host is not None else None + + def setup_raw_socket( + fam: AddressFamily, + bind_addr: tuple[str, int] | tuple[str, int, int, int], + *, + v6only: bool = True, + ) -> socket.socket: + sock = socket.socket(fam) + try: + sock.setblocking(False) + + if fam == AddressFamily.AF_INET6: + sock.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, v6only) + + # For Windows, enable exclusive address use. For others, enable address + # reuse. + if sys.platform == "win32": + sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) + else: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + + if reuse_port: + sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + + # Workaround for #554 + if fam == socket.AF_INET6 and "%" in bind_addr[0]: + addr, scope_id = bind_addr[0].split("%", 1) + bind_addr = (addr, bind_addr[1], 0, int(scope_id)) + + sock.bind(bind_addr) + sock.listen(backlog) + except BaseException: + sock.close() + raise + + return sock + + # We passing type=0 on non-Windows platforms as a workaround for a uvloop bug + # where we don't get the correct scope ID for IPv6 link-local addresses when passing + # type=socket.SOCK_STREAM to getaddrinfo(): + # https://github.com/MagicStack/uvloop/issues/539 + gai_res = await getaddrinfo( + local_host, + local_port, + family=family, + type=socket.SOCK_STREAM if sys.platform == "win32" else 0, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + + # The set comprehension is here to work around a glibc bug: + # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 + sockaddrs = sorted({res for res in gai_res if res[1] == SocketKind.SOCK_STREAM}) + + # Special case for dual-stack binding on the "any" interface + if ( + local_host is None + and family == AddressFamily.AF_UNSPEC + and socket.has_dualstack_ipv6() + and any(fam == AddressFamily.AF_INET6 for fam, *_ in gai_res) + ): + raw_socket = setup_raw_socket( + AddressFamily.AF_INET6, ("::", local_port), v6only=False + ) + listener = asynclib.create_tcp_listener(raw_socket) + return MultiListener([listener]) + + errors: list[OSError] = [] + try: + for _ in range(len(sockaddrs)): + listeners: list[SocketListener] = [] + bound_ephemeral_port = local_port + try: + for fam, *_, sockaddr in sockaddrs: + sockaddr = sockaddr[0], bound_ephemeral_port, *sockaddr[2:] + raw_socket = setup_raw_socket(fam, sockaddr) + + # Store the assigned port if an ephemeral port was requested, so + # we'll bind to the same port on all interfaces + if local_port == 0 and len(gai_res) > 1: + bound_ephemeral_port = raw_socket.getsockname()[1] + + listeners.append(asynclib.create_tcp_listener(raw_socket)) + except BaseException as exc: + for listener in listeners: + await listener.aclose() + + # If an ephemeral port was requested but binding the assigned port + # failed for another interface, rotate the address list and try again + if ( + isinstance(exc, OSError) + and exc.errno == errno.EADDRINUSE + and local_port == 0 + and bound_ephemeral_port + ): + errors.append(exc) + sockaddrs.append(sockaddrs.pop(0)) + continue + + raise + + return MultiListener(listeners) + + raise OSError( + f"Could not create {len(sockaddrs)} listeners with a consistent port" + ) from ExceptionGroup("Several bind attempts failed", errors) + finally: + del errors # Prevent reference cycles + + +async def create_unix_listener( + path: str | bytes | PathLike[Any], + *, + mode: int | None = None, + backlog: int = 65536, +) -> SocketListener: + """ + Create a UNIX socket listener. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param backlog: maximum number of queued incoming connections (up to a maximum of + 2**16, or 65536) + :return: a listener object + + .. versionchanged:: 3.0 + If a socket already exists on the file system in the given path, it will be + removed first. + + """ + backlog = min(backlog, 65536) + raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) + try: + raw_socket.listen(backlog) + return get_async_backend().create_unix_listener(raw_socket) + except BaseException: + raw_socket.close() + raise + + +async def create_udp_socket( + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + *, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> UDPSocket: + """ + Create a UDP socket. + + If ``port`` has been given, the socket will be bound to this port on the local + machine, making this socket suitable for providing UDP based services. + + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a UDP socket + + """ + if family is AddressFamily.AF_UNSPEC and not local_host: + raise ValueError('Either "family" or "local_host" must be given') + + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + elif family is AddressFamily.AF_INET6: + local_address = ("::", 0) + else: + local_address = ("0.0.0.0", 0) + + sock = await get_async_backend().create_udp_socket( + family, local_address, None, reuse_port + ) + return cast(UDPSocket, sock) + + +async def create_connected_udp_socket( + remote_host: IPAddressType, + remote_port: int, + *, + family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, + local_host: IPAddressType | None = None, + local_port: int = 0, + reuse_port: bool = False, +) -> ConnectedUDPSocket: + """ + Create a connected UDP socket. + + Connected UDP sockets can only communicate with the specified remote host/port, an + any packets sent from other sources are dropped. + + :param remote_host: remote host to set as the default target + :param remote_port: port on the remote host to set as the default target + :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically + determined from ``local_host`` or ``remote_host`` if omitted + :param local_host: IP address or host name of the local interface to bind to + :param local_port: local port to bind to + :param reuse_port: ``True`` to allow multiple sockets to bind to the same + address/port (not supported on Windows) + :return: a connected UDP socket + + """ + local_address = None + if local_host: + gai_res = await getaddrinfo( + str(local_host), + local_port, + family=family, + type=socket.SOCK_DGRAM, + flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + local_address = gai_res[0][-1] + + gai_res = await getaddrinfo( + str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM + ) + family = cast(AnyIPAddressFamily, gai_res[0][0]) + remote_address = gai_res[0][-1] + + sock = await get_async_backend().create_udp_socket( + family, local_address, remote_address, reuse_port + ) + return cast(ConnectedUDPSocket, sock) + + +async def create_unix_datagram_socket( + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> UNIXDatagramSocket: + """ + Create a UNIX datagram socket. + + Not available on Windows. + + If ``local_path`` has been given, the socket will be bound to this path, making this + socket suitable for receiving datagrams from other processes. Other processes can + send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a UNIX datagram socket + + """ + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket(raw_socket, None) + + +async def create_connected_unix_datagram_socket( + remote_path: str | bytes | PathLike[Any], + *, + local_path: None | str | bytes | PathLike[Any] = None, + local_mode: int | None = None, +) -> ConnectedUNIXDatagramSocket: + """ + Create a connected UNIX datagram socket. + + Connected datagram sockets can only communicate with the specified remote path. + + If ``local_path`` has been given, the socket will be bound to this path, making + this socket suitable for receiving datagrams from other processes. Other processes + can send datagrams to this socket only if ``local_path`` is set. + + If a socket already exists on the file system in the ``local_path``, it will be + removed first. + + :param remote_path: the path to set as the default target + :param local_path: the path on which to bind to + :param local_mode: permissions to set on the local socket + :return: a connected UNIX datagram socket + + """ + remote_path = os.fspath(remote_path) + raw_socket = await setup_unix_local_socket( + local_path, local_mode, socket.SOCK_DGRAM + ) + return await get_async_backend().create_unix_datagram_socket( + raw_socket, remote_path + ) + + +async def getaddrinfo( + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: + """ + Look up a numeric IP address given a host name. + + Internationalized domain names are translated according to the (non-transitional) + IDNA 2008 standard. + + .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of + (host, port), unlike what :func:`socket.getaddrinfo` does. + + :param host: host name + :param port: port number + :param family: socket family (`'AF_INET``, ...) + :param type: socket type (``SOCK_STREAM``, ...) + :param proto: protocol number + :param flags: flags to pass to upstream ``getaddrinfo()`` + :return: list of tuples containing (family, type, proto, canonname, sockaddr) + + .. seealso:: :func:`socket.getaddrinfo` + + """ + # Handle unicode hostnames + if isinstance(host, str): + try: + encoded_host: bytes | None = host.encode("ascii") + except UnicodeEncodeError: + import idna + + encoded_host = idna.encode(host, uts46=True) + else: + encoded_host = host + + gai_res = await get_async_backend().getaddrinfo( + encoded_host, port, family=family, type=type, proto=proto, flags=flags + ) + return [ + (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) + for family, type, proto, canonname, sockaddr in gai_res + # filter out IPv6 results when IPv6 is disabled + if not isinstance(sockaddr[0], int) + ] + + +def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: + """ + Look up the host name of an IP address. + + :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) + :param flags: flags to pass to upstream ``getnameinfo()`` + :return: a tuple of (host name, service name) + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. seealso:: :func:`socket.getnameinfo` + + """ + return get_async_backend().getnameinfo(sockaddr, flags) + + +@deprecated("This function is deprecated; use `wait_readable` instead") +def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_readable` instead. + + Wait until the given socket has data to be read. + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become readable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().wait_readable(sock.fileno()) + + +@deprecated("This function is deprecated; use `wait_writable` instead") +def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: + """ + .. deprecated:: 4.7.0 + Use :func:`wait_writable` instead. + + Wait until the given socket can be written to. + + This does **NOT** work on Windows when using the asyncio backend with a proactor + event loop (default on py3.8+). + + .. warning:: Only use this on raw sockets that have not been wrapped by any higher + level constructs like socket streams! + + :param sock: a socket object + :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the + socket to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the socket + to become writable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().wait_writable(sock.fileno()) + + +def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object has data to be read. + + On Unix systems, ``obj`` must either be an integer file descriptor, or else an + object with a ``.fileno()`` method which returns an integer file descriptor. Any + kind of file descriptor can be passed, though the exact semantics will depend on + your kernel. For example, this probably won't do anything useful for on-disk files. + + On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an + object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File + descriptors aren't supported, and neither are handles that refer to anything besides + a ``SOCKET``. + + On backends where this functionality is not natively provided (asyncio + ``ProactorEventLoop`` on Windows), it is provided using a separate selector thread + which is set to shut down when the interpreter shuts down. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become readable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become readable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().wait_readable(obj) + + +def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]: + """ + Wait until the given object can be written to. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises ~anyio.ClosedResourceError: if the object was closed while waiting for the + object to become writable + :raises ~anyio.BusyResourceError: if another task is already waiting for the object + to become writable + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + .. seealso:: See the documentation of :func:`wait_readable` for the definition of + ``obj`` and notes on backend compatibility. + + .. warning:: Don't use this on raw sockets that have been wrapped by any higher + level constructs like socket streams! + + """ + return get_async_backend().wait_writable(obj) + + +def notify_closing(obj: FileDescriptorLike) -> None: + """ + Call this before closing a file descriptor (on Unix) or socket (on + Windows). This will cause any `wait_readable` or `wait_writable` + calls on the given object to immediately wake up and raise + `~anyio.ClosedResourceError`. + + This doesn't actually close the object – you still have to do that + yourself afterwards. Also, you want to be careful to make sure no + new tasks start waiting on the object in between when you call this + and when it's actually closed. So to close something properly, you + usually want to do these steps in order: + + 1. Explicitly mark the object as closed, so that any new attempts + to use it will abort before they start. + 2. Call `notify_closing` to wake up any already-existing users. + 3. Actually close the object. + + It's also possible to do them in a different order if that's more + convenient, *but only if* you make sure not to have any checkpoints in + between the steps. This way they all happen in a single atomic + step, so other tasks won't be able to tell what order they happened + in anyway. + + :param obj: an object with a ``.fileno()`` method or an integer handle + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + get_async_backend().notify_closing(obj) + + +# +# Private API +# + + +def convert_ipv6_sockaddr( + sockaddr: tuple[str, int, int, int] | tuple[str, int], +) -> tuple[str, int]: + """ + Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. + + If the scope ID is nonzero, it is added to the address, separated with ``%``. + Otherwise the flow id and scope id are simply cut off from the tuple. + Any other kinds of socket addresses are returned as-is. + + :param sockaddr: the result of :meth:`~socket.socket.getsockname` + :return: the converted socket address + + """ + # This is more complicated than it should be because of MyPy + if isinstance(sockaddr, tuple) and len(sockaddr) == 4: + host, port, flowinfo, scope_id = sockaddr + if scope_id: + # PyPy (as of v7.3.11) leaves the interface name in the result, so + # we discard it and only get the scope ID from the end + # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) + host = host.split("%")[0] + + # Add scope_id to the address + return f"{host}%{scope_id}", port + else: + return host, port + else: + return sockaddr + + +async def setup_unix_local_socket( + path: None | str | bytes | PathLike[Any], + mode: int | None, + socktype: int, +) -> socket.socket: + """ + Create a UNIX local socket object, deleting the socket at the given path if it + exists. + + Not available on Windows. + + :param path: path of the socket + :param mode: permissions to set on the socket + :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM + + """ + path_str: str | None + if path is not None: + path_str = os.fsdecode(path) + + # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call + if not path_str.startswith("\0"): + # Copied from pathlib... + try: + stat_result = os.stat(path) + except OSError as e: + if e.errno not in ( + errno.ENOENT, + errno.ENOTDIR, + errno.EBADF, + errno.ELOOP, + ): + raise + else: + if stat.S_ISSOCK(stat_result.st_mode): + os.unlink(path) + else: + path_str = None + + raw_socket = socket.socket(socket.AF_UNIX, socktype) + raw_socket.setblocking(False) + + if path_str is not None: + try: + await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) + if mode is not None: + await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) + except BaseException: + raw_socket.close() + raise + + return raw_socket + + +@dataclass +class TCPConnectable(ByteStreamConnectable): + """ + Connects to a TCP server at the given host and port. + + :param host: host name or IP address of the server + :param port: TCP port number of the server + """ + + host: str | IPv4Address | IPv6Address + port: int + + def __post_init__(self) -> None: + if self.port < 1 or self.port > 65535: + raise ValueError("TCP port number out of range") + + @override + async def connect(self) -> SocketStream: + try: + return await connect_tcp(self.host, self.port) + except OSError as exc: + raise ConnectionFailed( + f"error connecting to {self.host}:{self.port}: {exc}" + ) from exc + + +@dataclass +class UNIXConnectable(ByteStreamConnectable): + """ + Connects to a UNIX domain socket at the given path. + + :param path: the file system path of the socket + """ + + path: str | bytes | PathLike[str] | PathLike[bytes] + + @override + async def connect(self) -> UNIXSocketStream: + try: + return await connect_unix(self.path) + except OSError as exc: + raise ConnectionFailed(f"error connecting to {self.path!r}: {exc}") from exc + + +def as_connectable( + remote: ByteStreamConnectable + | tuple[str | IPv4Address | IPv6Address, int] + | str + | bytes + | PathLike[str], + /, + *, + tls: bool = False, + ssl_context: ssl.SSLContext | None = None, + tls_hostname: str | None = None, + tls_standard_compatible: bool = True, +) -> ByteStreamConnectable: + """ + Return a byte stream connectable from the given object. + + If a bytestream connectable is given, it is returned unchanged. + If a tuple of (host, port) is given, a TCP connectable is returned. + If a string or bytes path is given, a UNIX connectable is returned. + + If ``tls=True``, the connectable will be wrapped in a + :class:`~.streams.tls.TLSConnectable`. + + :param remote: a connectable, a tuple of (host, port) or a path to a UNIX socket + :param tls: if ``True``, wrap the plaintext connectable in a + :class:`~.streams.tls.TLSConnectable`, using the provided TLS settings) + :param ssl_context: if ``tls=True``, the SSLContext object to use (if not provided, + a secure default will be created) + :param tls_hostname: if ``tls=True``, host name of the server to use for checking + the server certificate (defaults to the host portion of the address for TCP + connectables) + :param tls_standard_compatible: if ``False`` and ``tls=True``, makes the TLS stream + skip the closing handshake when closing the connection, so it won't raise an + exception if the server does the same + + """ + connectable: TCPConnectable | UNIXConnectable | TLSConnectable + if isinstance(remote, ByteStreamConnectable): + return remote + elif isinstance(remote, tuple) and len(remote) == 2: + connectable = TCPConnectable(*remote) + elif isinstance(remote, (str, bytes, PathLike)): + connectable = UNIXConnectable(remote) + else: + raise TypeError(f"cannot convert {remote!r} to a connectable") + + if tls: + if not tls_hostname and isinstance(connectable, TCPConnectable): + tls_hostname = str(connectable.host) + + connectable = TLSConnectable( + connectable, + ssl_context=ssl_context, + hostname=tls_hostname, + standard_compatible=tls_standard_compatible, + ) + + return connectable diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_streams.py b/venv/lib/python3.12/site-packages/anyio/_core/_streams.py new file mode 100644 index 0000000000000000000000000000000000000000..2b9c7df200f9520357503c754bcdea1c047bdda3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_streams.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import math +from typing import TypeVar +from warnings import warn + +from ..streams.memory import ( + MemoryObjectReceiveStream, + MemoryObjectSendStream, + _MemoryObjectStreamState, +) + +T_Item = TypeVar("T_Item") + + +class create_memory_object_stream( + tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], +): + """ + Create a memory object stream. + + The stream's item type can be annotated like + :func:`create_memory_object_stream[T_Item]`. + + :param max_buffer_size: number of items held in the buffer until ``send()`` starts + blocking + :param item_type: old way of marking the streams with the right generic type for + static typing (does nothing on AnyIO 4) + + .. deprecated:: 4.0 + Use ``create_memory_object_stream[YourItemType](...)`` instead. + :return: a tuple of (send stream, receive stream) + + """ + + def __new__( # type: ignore[misc] + cls, max_buffer_size: float = 0, item_type: object = None + ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: + if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): + raise ValueError("max_buffer_size must be either an integer or math.inf") + if max_buffer_size < 0: + raise ValueError("max_buffer_size cannot be negative") + if item_type is not None: + warn( + "The item_type argument has been deprecated in AnyIO 4.0. " + "Use create_memory_object_stream[YourItemType](...) instead.", + DeprecationWarning, + stacklevel=2, + ) + + state = _MemoryObjectStreamState[T_Item](max_buffer_size) + return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py b/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py new file mode 100644 index 0000000000000000000000000000000000000000..9796f8bb99403616fcb0b764a0f8283792599e45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_subprocesses.py @@ -0,0 +1,196 @@ +from __future__ import annotations + +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence +from io import BytesIO +from os import PathLike +from subprocess import PIPE, CalledProcessError, CompletedProcess +from typing import IO, Any, TypeAlias, cast + +from ..abc import Process +from ._eventloop import get_async_backend +from ._tasks import create_task_group + +StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] + + +async def run_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + input: bytes | None = None, + stdin: int | IO[Any] | None = None, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + check: bool = True, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> CompletedProcess[bytes]: + """ + Run an external command in a subprocess and wait until it completes. + + .. seealso:: :func:`subprocess.run` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param input: bytes passed to the standard input of the subprocess + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None`; ``input`` overrides this + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or `None` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or `None` + :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the + process terminates with a return code other than 0 + :param cwd: If not ``None``, change the working directory to this before running the + command + :param env: if not ``None``, this mapping replaces the inherited environment + variables from the parent process + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (Python >= 3.9, POSIX only) + :param group: effective group to run the process as (Python >= 3.9, POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, + POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (Python >= 3.9, POSIX only) + :return: an object representing the completed process + :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process + exits with a nonzero return code + + """ + + async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: + buffer = BytesIO() + async for chunk in stream: + buffer.write(chunk) + + stream_contents[index] = buffer.getvalue() + + if stdin is not None and input is not None: + raise ValueError("only one of stdin and input is allowed") + + async with await open_process( + command, + stdin=PIPE if input else stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + user=user, + group=group, + extra_groups=extra_groups, + umask=umask, + ) as process: + stream_contents: list[bytes | None] = [None, None] + async with create_task_group() as tg: + if process.stdout: + tg.start_soon(drain_stream, process.stdout, 0) + + if process.stderr: + tg.start_soon(drain_stream, process.stderr, 1) + + if process.stdin and input: + await process.stdin.send(input) + await process.stdin.aclose() + + await process.wait() + + output, errors = stream_contents + if check and process.returncode != 0: + raise CalledProcessError(cast(int, process.returncode), command, output, errors) + + return CompletedProcess(command, cast(int, process.returncode), output, errors) + + +async def open_process( + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None = PIPE, + stdout: int | IO[Any] | None = PIPE, + stderr: int | IO[Any] | None = PIPE, + cwd: StrOrBytesPath | None = None, + env: Mapping[str, str] | None = None, + startupinfo: Any = None, + creationflags: int = 0, + start_new_session: bool = False, + pass_fds: Sequence[int] = (), + user: str | int | None = None, + group: str | int | None = None, + extra_groups: Iterable[str | int] | None = None, + umask: int = -1, +) -> Process: + """ + Start an external command in a subprocess. + + .. seealso:: :class:`subprocess.Popen` + + :param command: either a string to pass to the shell, or an iterable of strings + containing the executable name or path and its arguments + :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a + file-like object, or ``None`` + :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + a file-like object, or ``None`` + :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, + :data:`subprocess.STDOUT`, a file-like object, or ``None`` + :param cwd: If not ``None``, the working directory is changed before executing + :param env: If env is not ``None``, it must be a mapping that defines the + environment variables for the new process + :param creationflags: flags that can be used to control the creation of the + subprocess (see :class:`subprocess.Popen` for the specifics) + :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used + to specify process startup parameters (Windows only) + :param start_new_session: if ``true`` the setsid() system call will be made in the + child process prior to the execution of the subprocess. (POSIX only) + :param pass_fds: sequence of file descriptors to keep open between the parent and + child processes. (POSIX only) + :param user: effective user to run the process as (POSIX only) + :param group: effective group to run the process as (POSIX only) + :param extra_groups: supplementary groups to set in the subprocess (POSIX only) + :param umask: if not negative, this umask is applied in the child process before + running the given command (POSIX only) + :return: an asynchronous process object + + """ + kwargs: dict[str, Any] = {} + if user is not None: + kwargs["user"] = user + + if group is not None: + kwargs["group"] = group + + if extra_groups is not None: + kwargs["extra_groups"] = group + + if umask >= 0: + kwargs["umask"] = umask + + return await get_async_backend().open_process( + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + cwd=cwd, + env=env, + startupinfo=startupinfo, + creationflags=creationflags, + start_new_session=start_new_session, + pass_fds=pass_fds, + **kwargs, + ) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py b/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py new file mode 100644 index 0000000000000000000000000000000000000000..9c6f9a07287044dae830a48735a66ef2b0dd9b0b --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_synchronization.py @@ -0,0 +1,757 @@ +from __future__ import annotations + +import math +from collections import deque +from collections.abc import Callable +from dataclasses import dataclass +from types import TracebackType +from typing import TypeVar + +from ..lowlevel import checkpoint_if_cancelled +from ._eventloop import get_async_backend +from ._exceptions import BusyResourceError, NoEventLoopError +from ._tasks import CancelScope +from ._testing import TaskInfo, get_current_task + +T = TypeVar("T") + + +@dataclass(frozen=True) +class EventStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` + """ + + tasks_waiting: int + + +@dataclass(frozen=True) +class CapacityLimiterStatistics: + """ + :ivar int borrowed_tokens: number of tokens currently borrowed by tasks + :ivar float total_tokens: total number of available tokens + :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from + this limiter + :ivar int tasks_waiting: number of tasks waiting on + :meth:`~.CapacityLimiter.acquire` or + :meth:`~.CapacityLimiter.acquire_on_behalf_of` + """ + + borrowed_tokens: int + total_tokens: float + borrowers: tuple[object, ...] + tasks_waiting: int + + +@dataclass(frozen=True) +class LockStatistics: + """ + :ivar bool locked: flag indicating if this lock is locked or not + :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the + lock is not held by any task) + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` + """ + + locked: bool + owner: TaskInfo | None + tasks_waiting: int + + +@dataclass(frozen=True) +class ConditionStatistics: + """ + :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` + :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying + :class:`~.Lock` + """ + + tasks_waiting: int + lock_statistics: LockStatistics + + +@dataclass(frozen=True) +class SemaphoreStatistics: + """ + :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` + + """ + + tasks_waiting: int + + +class Event: + def __new__(cls) -> Event: + try: + return get_async_backend().create_event() + except NoEventLoopError: + return EventAdapter() + + def set(self) -> None: + """Set the flag, notifying all listeners.""" + raise NotImplementedError + + def is_set(self) -> bool: + """Return ``True`` if the flag is set, ``False`` if not.""" + raise NotImplementedError + + async def wait(self) -> None: + """ + Wait until the flag has been set. + + If the flag has already been set when this method is called, it returns + immediately. + + """ + raise NotImplementedError + + def statistics(self) -> EventStatistics: + """Return statistics about the current state of this event.""" + raise NotImplementedError + + +class EventAdapter(Event): + _internal_event: Event | None = None + _is_set: bool = False + + def __new__(cls) -> EventAdapter: + return object.__new__(cls) + + @property + def _event(self) -> Event: + if self._internal_event is None: + self._internal_event = get_async_backend().create_event() + if self._is_set: + self._internal_event.set() + + return self._internal_event + + def set(self) -> None: + if self._internal_event is None: + self._is_set = True + else: + self._event.set() + + def is_set(self) -> bool: + if self._internal_event is None: + return self._is_set + + return self._internal_event.is_set() + + async def wait(self) -> None: + await self._event.wait() + + def statistics(self) -> EventStatistics: + if self._internal_event is None: + return EventStatistics(tasks_waiting=0) + + return self._internal_event.statistics() + + +class Lock: + def __new__(cls, *, fast_acquire: bool = False) -> Lock: + try: + return get_async_backend().create_lock(fast_acquire=fast_acquire) + except NoEventLoopError: + return LockAdapter(fast_acquire=fast_acquire) + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Release the lock.""" + raise NotImplementedError + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + raise NotImplementedError + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class LockAdapter(Lock): + _internal_lock: Lock | None = None + + def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: + return object.__new__(cls) + + def __init__(self, *, fast_acquire: bool = False): + self._fast_acquire = fast_acquire + + @property + def _lock(self) -> Lock: + if self._internal_lock is None: + self._internal_lock = get_async_backend().create_lock( + fast_acquire=self._fast_acquire + ) + + return self._internal_lock + + async def __aenter__(self) -> None: + await self._lock.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self._internal_lock is not None: + self._internal_lock.release() + + async def acquire(self) -> None: + """Acquire the lock.""" + await self._lock.acquire() + + def acquire_nowait(self) -> None: + """ + Acquire the lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + + def release(self) -> None: + """Release the lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is currently held.""" + return self._lock.locked() + + def statistics(self) -> LockStatistics: + """ + Return statistics about the current state of this lock. + + .. versionadded:: 3.0 + + """ + if self._internal_lock is None: + return LockStatistics(False, None, 0) + + return self._internal_lock.statistics() + + +class Condition: + _owner_task: TaskInfo | None = None + + def __init__(self, lock: Lock | None = None): + self._lock = lock or Lock() + self._waiters: deque[Event] = deque() + + async def __aenter__(self) -> None: + await self.acquire() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + def _check_acquired(self) -> None: + if self._owner_task != get_current_task(): + raise RuntimeError("The current task is not holding the underlying lock") + + async def acquire(self) -> None: + """Acquire the underlying lock.""" + await self._lock.acquire() + self._owner_task = get_current_task() + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + self._lock.acquire_nowait() + self._owner_task = get_current_task() + + def release(self) -> None: + """Release the underlying lock.""" + self._lock.release() + + def locked(self) -> bool: + """Return True if the lock is set.""" + return self._lock.locked() + + def notify(self, n: int = 1) -> None: + """Notify exactly n listeners.""" + self._check_acquired() + for _ in range(n): + try: + event = self._waiters.popleft() + except IndexError: + break + + event.set() + + def notify_all(self) -> None: + """Notify all the listeners.""" + self._check_acquired() + for event in self._waiters: + event.set() + + self._waiters.clear() + + async def wait(self) -> None: + """Wait for a notification.""" + await checkpoint_if_cancelled() + self._check_acquired() + event = Event() + self._waiters.append(event) + self.release() + try: + await event.wait() + except BaseException: + if not event.is_set(): + self._waiters.remove(event) + elif self._waiters: + # This task was notified by could not act on it, so pass + # it on to the next task + self._waiters.popleft().set() + + raise + finally: + with CancelScope(shield=True): + await self.acquire() + + async def wait_for(self, predicate: Callable[[], T]) -> T: + """ + Wait until a predicate becomes true. + + :param predicate: a callable that returns a truthy value when the condition is + met + :return: the result of the predicate + + .. versionadded:: 4.11.0 + + """ + while not (result := predicate()): + await self.wait() + + return result + + def statistics(self) -> ConditionStatistics: + """ + Return statistics about the current state of this condition. + + .. versionadded:: 3.0 + """ + return ConditionStatistics(len(self._waiters), self._lock.statistics()) + + +class Semaphore: + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + try: + return get_async_backend().create_semaphore( + initial_value, max_value=max_value, fast_acquire=fast_acquire + ) + except NoEventLoopError: + return SemaphoreAdapter(initial_value, max_value=max_value) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ): + if not isinstance(initial_value, int): + raise TypeError("initial_value must be an integer") + if initial_value < 0: + raise ValueError("initial_value must be >= 0") + if max_value is not None: + if not isinstance(max_value, int): + raise TypeError("max_value must be an integer or None") + if max_value < initial_value: + raise ValueError( + "max_value must be equal to or higher than initial_value" + ) + + self._fast_acquire = fast_acquire + + async def __aenter__(self) -> Semaphore: + await self.acquire() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.release() + + async def acquire(self) -> None: + """Decrement the semaphore value, blocking if necessary.""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire the underlying lock, without blocking. + + :raises ~anyio.WouldBlock: if the operation would block + + """ + raise NotImplementedError + + def release(self) -> None: + """Increment the semaphore value.""" + raise NotImplementedError + + @property + def value(self) -> int: + """The current value of the semaphore.""" + raise NotImplementedError + + @property + def max_value(self) -> int | None: + """The maximum value of the semaphore.""" + raise NotImplementedError + + def statistics(self) -> SemaphoreStatistics: + """ + Return statistics about the current state of this semaphore. + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + +class SemaphoreAdapter(Semaphore): + _internal_semaphore: Semaphore | None = None + + def __new__( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> SemaphoreAdapter: + return object.__new__(cls) + + def __init__( + self, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> None: + super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) + self._initial_value = initial_value + self._max_value = max_value + + @property + def _semaphore(self) -> Semaphore: + if self._internal_semaphore is None: + self._internal_semaphore = get_async_backend().create_semaphore( + self._initial_value, max_value=self._max_value + ) + + return self._internal_semaphore + + async def acquire(self) -> None: + await self._semaphore.acquire() + + def acquire_nowait(self) -> None: + self._semaphore.acquire_nowait() + + def release(self) -> None: + self._semaphore.release() + + @property + def value(self) -> int: + if self._internal_semaphore is None: + return self._initial_value + + return self._semaphore.value + + @property + def max_value(self) -> int | None: + return self._max_value + + def statistics(self) -> SemaphoreStatistics: + if self._internal_semaphore is None: + return SemaphoreStatistics(tasks_waiting=0) + + return self._semaphore.statistics() + + +class CapacityLimiter: + def __new__(cls, total_tokens: float) -> CapacityLimiter: + try: + return get_async_backend().create_capacity_limiter(total_tokens) + except NoEventLoopError: + return CapacityLimiterAdapter(total_tokens) + + async def __aenter__(self) -> None: + raise NotImplementedError + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + raise NotImplementedError + + @property + def total_tokens(self) -> float: + """ + The total number of tokens available for borrowing. + + This is a read-write property. If the total number of tokens is increased, the + proportionate number of tasks waiting on this limiter will be granted their + tokens. + + .. versionchanged:: 3.0 + The property is now writable. + .. versionchanged:: 4.12 + The value can now be set to 0. + + """ + raise NotImplementedError + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + raise NotImplementedError + + @property + def borrowed_tokens(self) -> int: + """The number of tokens that have currently been borrowed.""" + raise NotImplementedError + + @property + def available_tokens(self) -> float: + """The number of tokens currently available to be borrowed""" + raise NotImplementedError + + def acquire_nowait(self) -> None: + """ + Acquire a token for the current task without waiting for one to become + available. + + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + """ + Acquire a token without waiting for one to become available. + + :param borrower: the entity borrowing a token + :raises ~anyio.WouldBlock: if there are no tokens available for borrowing + + """ + raise NotImplementedError + + async def acquire(self) -> None: + """ + Acquire a token for the current task, waiting if necessary for one to become + available. + + """ + raise NotImplementedError + + async def acquire_on_behalf_of(self, borrower: object) -> None: + """ + Acquire a token, waiting if necessary for one to become available. + + :param borrower: the entity borrowing a token + + """ + raise NotImplementedError + + def release(self) -> None: + """ + Release the token held by the current task. + + :raises RuntimeError: if the current task has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def release_on_behalf_of(self, borrower: object) -> None: + """ + Release the token held by the given borrower. + + :raises RuntimeError: if the borrower has not borrowed a token from this + limiter. + + """ + raise NotImplementedError + + def statistics(self) -> CapacityLimiterStatistics: + """ + Return statistics about the current state of this limiter. + + .. versionadded:: 3.0 + + """ + raise NotImplementedError + + +class CapacityLimiterAdapter(CapacityLimiter): + _internal_limiter: CapacityLimiter | None = None + + def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: + return object.__new__(cls) + + def __init__(self, total_tokens: float) -> None: + self.total_tokens = total_tokens + + @property + def _limiter(self) -> CapacityLimiter: + if self._internal_limiter is None: + self._internal_limiter = get_async_backend().create_capacity_limiter( + self._total_tokens + ) + + return self._internal_limiter + + async def __aenter__(self) -> None: + await self._limiter.__aenter__() + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) + + @property + def total_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.total_tokens + + @total_tokens.setter + def total_tokens(self, value: float) -> None: + if not isinstance(value, int) and value is not math.inf: + raise TypeError("total_tokens must be an int or math.inf") + elif value < 1: + raise ValueError("total_tokens must be >= 1") + + if self._internal_limiter is None: + self._total_tokens = value + return + + self._limiter.total_tokens = value + + @property + def borrowed_tokens(self) -> int: + if self._internal_limiter is None: + return 0 + + return self._internal_limiter.borrowed_tokens + + @property + def available_tokens(self) -> float: + if self._internal_limiter is None: + return self._total_tokens + + return self._internal_limiter.available_tokens + + def acquire_nowait(self) -> None: + self._limiter.acquire_nowait() + + def acquire_on_behalf_of_nowait(self, borrower: object) -> None: + self._limiter.acquire_on_behalf_of_nowait(borrower) + + async def acquire(self) -> None: + await self._limiter.acquire() + + async def acquire_on_behalf_of(self, borrower: object) -> None: + await self._limiter.acquire_on_behalf_of(borrower) + + def release(self) -> None: + self._limiter.release() + + def release_on_behalf_of(self, borrower: object) -> None: + self._limiter.release_on_behalf_of(borrower) + + def statistics(self) -> CapacityLimiterStatistics: + if self._internal_limiter is None: + return CapacityLimiterStatistics( + borrowed_tokens=0, + total_tokens=self.total_tokens, + borrowers=(), + tasks_waiting=0, + ) + + return self._internal_limiter.statistics() + + +class ResourceGuard: + """ + A context manager for ensuring that a resource is only used by a single task at a + time. + + Entering this context manager while the previous has not exited it yet will trigger + :exc:`BusyResourceError`. + + :param action: the action to guard against (visible in the :exc:`BusyResourceError` + when triggered, e.g. "Another task is already {action} this resource") + + .. versionadded:: 4.1 + """ + + __slots__ = "action", "_guarded" + + def __init__(self, action: str = "using"): + self.action: str = action + self._guarded = False + + def __enter__(self) -> None: + if self._guarded: + raise BusyResourceError(self.action) + + self._guarded = True + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self._guarded = False diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py b/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py new file mode 100644 index 0000000000000000000000000000000000000000..0688bfe960cf9747373c93e482a64d1369befa11 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_tasks.py @@ -0,0 +1,173 @@ +from __future__ import annotations + +import math +from collections.abc import Generator +from contextlib import contextmanager +from types import TracebackType + +from ..abc._tasks import TaskGroup, TaskStatus +from ._eventloop import get_async_backend + + +class _IgnoredTaskStatus(TaskStatus[object]): + def started(self, value: object = None) -> None: + pass + + +TASK_STATUS_IGNORED = _IgnoredTaskStatus() + + +class CancelScope: + """ + Wraps a unit of work that can be made separately cancellable. + + :param deadline: The time (clock value) when this scope is cancelled automatically + :param shield: ``True`` to shield the cancel scope from external cancellation + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + """ + + def __new__( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) + + def cancel(self, reason: str | None = None) -> None: + """ + Cancel this scope immediately. + + :param reason: a message describing the reason for the cancellation + + """ + raise NotImplementedError + + @property + def deadline(self) -> float: + """ + The time (clock value) when this scope is cancelled automatically. + + Will be ``float('inf')`` if no timeout has been set. + + """ + raise NotImplementedError + + @deadline.setter + def deadline(self, value: float) -> None: + raise NotImplementedError + + @property + def cancel_called(self) -> bool: + """``True`` if :meth:`cancel` has been called.""" + raise NotImplementedError + + @property + def cancelled_caught(self) -> bool: + """ + ``True`` if this scope suppressed a cancellation exception it itself raised. + + This is typically used to check if any work was interrupted, or to see if the + scope was cancelled due to its deadline being reached. The value will, however, + only be ``True`` if the cancellation was triggered by the scope itself (and not + an outer scope). + + """ + raise NotImplementedError + + @property + def shield(self) -> bool: + """ + ``True`` if this scope is shielded from external cancellation. + + While a scope is shielded, it will not receive cancellations from outside. + + """ + raise NotImplementedError + + @shield.setter + def shield(self, value: bool) -> None: + raise NotImplementedError + + def __enter__(self) -> CancelScope: + raise NotImplementedError + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + raise NotImplementedError + + +@contextmanager +def fail_after( + delay: float | None, shield: bool = False +) -> Generator[CancelScope, None, None]: + """ + Create a context manager which raises a :class:`TimeoutError` if does not finish in + time. + + :param delay: maximum allowed time (in seconds) before raising the exception, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a context manager that yields a cancel scope + :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + current_time = get_async_backend().current_time + deadline = (current_time() + delay) if delay is not None else math.inf + with get_async_backend().create_cancel_scope( + deadline=deadline, shield=shield + ) as cancel_scope: + yield cancel_scope + + if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: + raise TimeoutError + + +def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: + """ + Create a cancel scope with a deadline that expires after the given delay. + + :param delay: maximum allowed time (in seconds) before exiting the context block, or + ``None`` to disable the timeout + :param shield: ``True`` to shield the cancel scope from external cancellation + :return: a cancel scope + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + deadline = ( + (get_async_backend().current_time() + delay) if delay is not None else math.inf + ) + return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) + + +def current_effective_deadline() -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the current + task. + + :return: a clock value from the event loop's internal clock (or ``float('inf')`` if + there is no deadline in effect, or ``float('-inf')`` if the current scope has + been cancelled) + :rtype: float + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().current_effective_deadline() + + +def create_task_group() -> TaskGroup: + """ + Create a task group. + + :return: a task group + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().create_task_group() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py b/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py new file mode 100644 index 0000000000000000000000000000000000000000..75a09f793744b8e60375ce2efab98307d077bc21 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_tempfile.py @@ -0,0 +1,613 @@ +from __future__ import annotations + +import os +import sys +import tempfile +from collections.abc import Iterable +from io import BytesIO, TextIOWrapper +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AnyStr, + Generic, + overload, +) + +from .. import to_thread +from .._core._fileio import AsyncFile +from ..lowlevel import checkpoint_if_cancelled + +if TYPE_CHECKING: + from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer + + +class TemporaryFile(Generic[AnyStr]): + """ + An asynchronous temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager interface to a temporary file. + The file is created using Python's standard `tempfile.TemporaryFile` function in a + background thread, and is wrapped as an asynchronous file using `AsyncFile`. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: TemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: TemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + mode: OpenTextMode | OpenBinaryMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self.mode = mode + self.buffering = buffering + self.encoding = encoding + self.newline = newline + self.suffix: str | None = suffix + self.prefix: str | None = prefix + self.dir: str | None = dir + self.errors = errors + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile( + self.mode, + self.buffering, + self.encoding, + self.newline, + self.suffix, + self.prefix, + self.dir, + errors=self.errors, + ) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class NamedTemporaryFile(Generic[AnyStr]): + """ + An asynchronous named temporary file that is automatically created and cleaned up. + + This class provides an asynchronous context manager for a temporary file with a + visible name in the file system. It uses Python's standard + :func:`~tempfile.NamedTemporaryFile` function and wraps the file object with + :class:`AsyncFile` for asynchronous operations. + + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file. Only applicable in + text mode. + :param newline: Controls how universal newlines mode works (only applicable in text + mode). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param delete: Whether to delete the file when it is closed. + :param errors: The error handling scheme used for encoding/decoding errors. + :param delete_on_close: (Python 3.12+) Whether to delete the file on close. + """ + + _async_file: AsyncFile[AnyStr] + + @overload + def __init__( + self: NamedTemporaryFile[bytes], + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + @overload + def __init__( + self: NamedTemporaryFile[str], + mode: OpenTextMode, + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + delete: bool = ..., + *, + errors: str | None = ..., + delete_on_close: bool = ..., + ): ... + + def __init__( + self, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + delete: bool = True, + *, + errors: str | None = None, + delete_on_close: bool = True, + ) -> None: + self._params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "delete": delete, + "errors": errors, + } + if sys.version_info >= (3, 12): + self._params["delete_on_close"] = delete_on_close + + async def __aenter__(self) -> AsyncFile[AnyStr]: + fp = await to_thread.run_sync( + lambda: tempfile.NamedTemporaryFile(**self._params) + ) + self._async_file = AsyncFile(fp) + return self._async_file + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + await self._async_file.aclose() + + +class SpooledTemporaryFile(AsyncFile[AnyStr]): + """ + An asynchronous spooled temporary file that starts in memory and is spooled to disk. + + This class provides an asynchronous interface to a spooled temporary file, much like + Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous + write operations and provides a method to force a rollover to disk. + + :param max_size: Maximum size in bytes before the file is rolled over to disk. + :param mode: The mode in which the file is opened. Defaults to "w+b". + :param buffering: The buffering policy (-1 means the default buffering). + :param encoding: The encoding used to decode or encode the file (text mode only). + :param newline: Controls how universal newlines mode works (text mode only). + :param suffix: The suffix for the temporary file name. + :param prefix: The prefix for the temporary file name. + :param dir: The directory in which the temporary file is created. + :param errors: The error handling scheme used for encoding/decoding errors. + """ + + _rolled: bool = False + + @overload + def __init__( + self: SpooledTemporaryFile[bytes], + max_size: int = ..., + mode: OpenBinaryMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + @overload + def __init__( + self: SpooledTemporaryFile[str], + max_size: int = ..., + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + newline: str | None = ..., + suffix: str | None = ..., + prefix: str | None = ..., + dir: str | None = ..., + *, + errors: str | None = ..., + ): ... + + def __init__( + self, + max_size: int = 0, + mode: OpenBinaryMode | OpenTextMode = "w+b", + buffering: int = -1, + encoding: str | None = None, + newline: str | None = None, + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + *, + errors: str | None = None, + ) -> None: + self._tempfile_params: dict[str, Any] = { + "mode": mode, + "buffering": buffering, + "encoding": encoding, + "newline": newline, + "suffix": suffix, + "prefix": prefix, + "dir": dir, + "errors": errors, + } + self._max_size = max_size + if "b" in mode: + super().__init__(BytesIO()) # type: ignore[arg-type] + else: + super().__init__( + TextIOWrapper( # type: ignore[arg-type] + BytesIO(), + encoding=encoding, + errors=errors, + newline=newline, + write_through=True, + ) + ) + + async def aclose(self) -> None: + if not self._rolled: + self._fp.close() + return + + await super().aclose() + + async def _check(self) -> None: + if self._rolled or self._fp.tell() <= self._max_size: + return + + await self.rollover() + + async def rollover(self) -> None: + if self._rolled: + return + + self._rolled = True + buffer = self._fp + buffer.seek(0) + self._fp = await to_thread.run_sync( + lambda: tempfile.TemporaryFile(**self._tempfile_params) + ) + await self.write(buffer.read()) + buffer.close() + + @property + def closed(self) -> bool: + return self._fp.closed + + async def read(self, size: int = -1) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read(size) + + return await super().read(size) # type: ignore[return-value] + + async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.read1(size) + + return await super().read1(size) + + async def readline(self) -> AnyStr: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readline() + + return await super().readline() # type: ignore[return-value] + + async def readlines(self) -> list[AnyStr]: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.readlines() + + return await super().readlines() # type: ignore[return-value] + + async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto(b) + + async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + self._fp.readinto(b) + + return await super().readinto1(b) + + async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.seek(offset, whence) + + return await super().seek(offset, whence) + + async def tell(self) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.tell() + + return await super().tell() + + async def truncate(self, size: int | None = None) -> int: + if not self._rolled: + await checkpoint_if_cancelled() + return self._fp.truncate(size) + + return await super().truncate(size) + + @overload + async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ... + @overload + async def write(self: SpooledTemporaryFile[str], b: str) -> int: ... + + async def write(self, b: ReadableBuffer | str) -> int: + """ + Asynchronously write data to the spooled temporary file. + + If the file has not yet been rolled over, the data is written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param s: The data to write. + :return: The number of bytes written. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.write(b) + await self._check() + return result + + return await super().write(b) # type: ignore[misc] + + @overload + async def writelines( + self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer] + ) -> None: ... + @overload + async def writelines( + self: SpooledTemporaryFile[str], lines: Iterable[str] + ) -> None: ... + + async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None: + """ + Asynchronously write a list of lines to the spooled temporary file. + + If the file has not yet been rolled over, the lines are written synchronously, + and a rollover is triggered if the size exceeds the maximum size. + + :param lines: An iterable of lines to write. + :raises RuntimeError: If the underlying file is not initialized. + + """ + if not self._rolled: + await checkpoint_if_cancelled() + result = self._fp.writelines(lines) + await self._check() + return result + + return await super().writelines(lines) # type: ignore[misc] + + +class TemporaryDirectory(Generic[AnyStr]): + """ + An asynchronous temporary directory that is created and cleaned up automatically. + + This class provides an asynchronous context manager for creating a temporary + directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to + perform directory creation and cleanup operations in a background thread. + + :param suffix: Suffix to be added to the temporary directory name. + :param prefix: Prefix to be added to the temporary directory name. + :param dir: The parent directory where the temporary directory is created. + :param ignore_cleanup_errors: Whether to ignore errors during cleanup + :param delete: Whether to delete the directory upon closing (Python 3.12+). + """ + + def __init__( + self, + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + *, + ignore_cleanup_errors: bool = False, + delete: bool = True, + ) -> None: + self.suffix: AnyStr | None = suffix + self.prefix: AnyStr | None = prefix + self.dir: AnyStr | None = dir + self.ignore_cleanup_errors = ignore_cleanup_errors + self.delete = delete + + self._tempdir: tempfile.TemporaryDirectory | None = None + + async def __aenter__(self) -> str: + params: dict[str, Any] = { + "suffix": self.suffix, + "prefix": self.prefix, + "dir": self.dir, + "ignore_cleanup_errors": self.ignore_cleanup_errors, + } + if sys.version_info >= (3, 12): + params["delete"] = self.delete + + self._tempdir = await to_thread.run_sync( + lambda: tempfile.TemporaryDirectory(**params) + ) + return await to_thread.run_sync(self._tempdir.__enter__) + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + if self._tempdir is not None: + await to_thread.run_sync( + self._tempdir.__exit__, exc_type, exc_value, traceback + ) + + async def cleanup(self) -> None: + if self._tempdir is not None: + await to_thread.run_sync(self._tempdir.cleanup) + + +@overload +async def mkstemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, + text: bool = False, +) -> tuple[int, str]: ... + + +@overload +async def mkstemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, + text: bool = False, +) -> tuple[int, bytes]: ... + + +async def mkstemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, + text: bool = False, +) -> tuple[int, str | bytes]: + """ + Asynchronously create a temporary file and return an OS-level handle and the file + name. + + This function wraps `tempfile.mkstemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the file name. + :param prefix: Prefix to be added to the file name. + :param dir: Directory in which the temporary file is created. + :param text: Whether the file is opened in text mode. + :return: A tuple containing the file descriptor and the file name. + + """ + return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text) + + +@overload +async def mkdtemp( + suffix: str | None = None, + prefix: str | None = None, + dir: str | None = None, +) -> str: ... + + +@overload +async def mkdtemp( + suffix: bytes | None = None, + prefix: bytes | None = None, + dir: bytes | None = None, +) -> bytes: ... + + +async def mkdtemp( + suffix: AnyStr | None = None, + prefix: AnyStr | None = None, + dir: AnyStr | None = None, +) -> str | bytes: + """ + Asynchronously create a temporary directory and return its path. + + This function wraps `tempfile.mkdtemp` and executes it in a background thread. + + :param suffix: Suffix to be added to the directory name. + :param prefix: Prefix to be added to the directory name. + :param dir: Parent directory where the temporary directory is created. + :return: The path of the created temporary directory. + + """ + return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir) + + +async def gettempdir() -> str: + """ + Asynchronously return the name of the directory used for temporary files. + + This function wraps `tempfile.gettempdir` and executes it in a background thread. + + :return: The path of the temporary directory as a string. + + """ + return await to_thread.run_sync(tempfile.gettempdir) + + +async def gettempdirb() -> bytes: + """ + Asynchronously return the name of the directory used for temporary files in bytes. + + This function wraps `tempfile.gettempdirb` and executes it in a background thread. + + :return: The path of the temporary directory as bytes. + + """ + return await to_thread.run_sync(tempfile.gettempdirb) diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_testing.py b/venv/lib/python3.12/site-packages/anyio/_core/_testing.py new file mode 100644 index 0000000000000000000000000000000000000000..369e65c068a426e99b7e8571209e80ce35b71f47 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_testing.py @@ -0,0 +1,82 @@ +from __future__ import annotations + +from collections.abc import Awaitable, Generator +from typing import Any, cast + +from ._eventloop import get_async_backend + + +class TaskInfo: + """ + Represents an asynchronous task. + + :ivar int id: the unique identifier of the task + :ivar parent_id: the identifier of the parent task, if any + :vartype parent_id: Optional[int] + :ivar str name: the description of the task (if any) + :ivar ~collections.abc.Coroutine coro: the coroutine object of the task + """ + + __slots__ = "_name", "id", "parent_id", "name", "coro" + + def __init__( + self, + id: int, + parent_id: int | None, + name: str | None, + coro: Generator[Any, Any, Any] | Awaitable[Any], + ): + func = get_current_task + self._name = f"{func.__module__}.{func.__qualname__}" + self.id: int = id + self.parent_id: int | None = parent_id + self.name: str | None = name + self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro + + def __eq__(self, other: object) -> bool: + if isinstance(other, TaskInfo): + return self.id == other.id + + return NotImplemented + + def __hash__(self) -> int: + return hash(self.id) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" + + def has_pending_cancellation(self) -> bool: + """ + Return ``True`` if the task has a cancellation pending, ``False`` otherwise. + + """ + return False + + +def get_current_task() -> TaskInfo: + """ + Return the current task. + + :return: a representation of the current task + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return get_async_backend().get_current_task() + + +def get_running_tasks() -> list[TaskInfo]: + """ + Return a list of running tasks in the current event loop. + + :return: a list of task info objects + :raises NoEventLoopError: if no supported asynchronous event loop is running in the + current thread + + """ + return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) + + +async def wait_all_tasks_blocked() -> None: + """Wait until all other tasks are waiting for something.""" + await get_async_backend().wait_all_tasks_blocked() diff --git a/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py b/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py new file mode 100644 index 0000000000000000000000000000000000000000..f358a448cb12739fd4eda4f4859d3a24ddd1de63 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/_core/_typedattr.py @@ -0,0 +1,81 @@ +from __future__ import annotations + +from collections.abc import Callable, Mapping +from typing import Any, TypeVar, final, overload + +from ._exceptions import TypedAttributeLookupError + +T_Attr = TypeVar("T_Attr") +T_Default = TypeVar("T_Default") +undefined = object() + + +def typed_attribute() -> Any: + """Return a unique object, used to mark typed attributes.""" + return object() + + +class TypedAttributeSet: + """ + Superclass for typed attribute collections. + + Checks that every public attribute of every subclass has a type annotation. + """ + + def __init_subclass__(cls) -> None: + annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) + for attrname in dir(cls): + if not attrname.startswith("_") and attrname not in annotations: + raise TypeError( + f"Attribute {attrname!r} is missing its type annotation" + ) + + super().__init_subclass__() + + +class TypedAttributeProvider: + """Base class for classes that wish to provide typed extra attributes.""" + + @property + def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: + """ + A mapping of the extra attributes to callables that return the corresponding + values. + + If the provider wraps another provider, the attributes from that wrapper should + also be included in the returned mapping (but the wrapper may override the + callables from the wrapped instance). + + """ + return {} + + @overload + def extra(self, attribute: T_Attr) -> T_Attr: ... + + @overload + def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... + + @final + def extra(self, attribute: Any, default: object = undefined) -> object: + """ + extra(attribute, default=undefined) + + Return the value of the given typed extra attribute. + + :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to + look for + :param default: the value that should be returned if no value is found for the + attribute + :raises ~anyio.TypedAttributeLookupError: if the search failed and no default + value was given + + """ + try: + getter = self.extra_attributes[attribute] + except KeyError: + if default is undefined: + raise TypedAttributeLookupError("Attribute not found") from None + else: + return default + + return getter() diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__init__.py b/venv/lib/python3.12/site-packages/anyio/abc/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d560ce3f1fa45a7ee4a3bc8958aa59702caa9d0c --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/__init__.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +from ._eventloop import AsyncBackend as AsyncBackend +from ._resources import AsyncResource as AsyncResource +from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket +from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket +from ._sockets import IPAddressType as IPAddressType +from ._sockets import IPSockAddrType as IPSockAddrType +from ._sockets import SocketAttribute as SocketAttribute +from ._sockets import SocketListener as SocketListener +from ._sockets import SocketStream as SocketStream +from ._sockets import UDPPacketType as UDPPacketType +from ._sockets import UDPSocket as UDPSocket +from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType +from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket +from ._sockets import UNIXSocketStream as UNIXSocketStream +from ._streams import AnyByteReceiveStream as AnyByteReceiveStream +from ._streams import AnyByteSendStream as AnyByteSendStream +from ._streams import AnyByteStream as AnyByteStream +from ._streams import AnyByteStreamConnectable as AnyByteStreamConnectable +from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream +from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream +from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream +from ._streams import ByteReceiveStream as ByteReceiveStream +from ._streams import ByteSendStream as ByteSendStream +from ._streams import ByteStream as ByteStream +from ._streams import ByteStreamConnectable as ByteStreamConnectable +from ._streams import Listener as Listener +from ._streams import ObjectReceiveStream as ObjectReceiveStream +from ._streams import ObjectSendStream as ObjectSendStream +from ._streams import ObjectStream as ObjectStream +from ._streams import ObjectStreamConnectable as ObjectStreamConnectable +from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream +from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream +from ._streams import UnreliableObjectStream as UnreliableObjectStream +from ._subprocesses import Process as Process +from ._tasks import TaskGroup as TaskGroup +from ._tasks import TaskStatus as TaskStatus +from ._testing import TestRunner as TestRunner + +# Re-exported here, for backwards compatibility +# isort: off +from .._core._synchronization import ( + CapacityLimiter as CapacityLimiter, + Condition as Condition, + Event as Event, + Lock as Lock, + Semaphore as Semaphore, +) +from .._core._tasks import CancelScope as CancelScope +from ..from_thread import BlockingPortal as BlockingPortal + +# Re-export imports so they look like they live directly in this package +for __value in list(locals().values()): + if getattr(__value, "__module__", "").startswith("anyio.abc."): + __value.__module__ = __name__ + +del __value diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76dd1a280464f00777249204e64e315ac0185c88 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7e1d35ab06c409f82f339c2a6d13c3a8a11a932 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_eventloop.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..032bc9f58d673c13d5abcf63c02b3b5653c076c0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_resources.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..015a1896ce3e8c55b50724f18adeea40114837f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_sockets.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..094041b97b478963d21a0126911813ea04b985fb Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_streams.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d06961e0813451fd65369ab0351022619489337d Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_subprocesses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e5677984bd3d2d36fe70e865bdd9b90dfa4e59e Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_tasks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a4a3b64a9aeda0146170e2e2ef6eb925d0aac5c Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/abc/__pycache__/_testing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py b/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py new file mode 100644 index 0000000000000000000000000000000000000000..ae0628807c7e3f37527a9cd2c8e454601f8615f4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_eventloop.py @@ -0,0 +1,409 @@ +from __future__ import annotations + +import math +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncIterator, Awaitable, Callable, Sequence +from contextlib import AbstractContextManager +from os import PathLike +from signal import Signals +from socket import AddressFamily, SocketKind, socket +from typing import ( + IO, + TYPE_CHECKING, + Any, + TypeAlias, + TypeVar, + overload, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from _typeshed import FileDescriptorLike + + from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore + from .._core._tasks import CancelScope + from .._core._testing import TaskInfo + from ._sockets import ( + ConnectedUDPSocket, + ConnectedUNIXDatagramSocket, + IPSockAddrType, + SocketListener, + SocketStream, + UDPSocket, + UNIXDatagramSocket, + UNIXSocketStream, + ) + from ._subprocesses import Process + from ._tasks import TaskGroup + from ._testing import TestRunner + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] + + +class AsyncBackend(metaclass=ABCMeta): + @classmethod + @abstractmethod + def run( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + kwargs: dict[str, Any], + options: dict[str, Any], + ) -> T_Retval: + """ + Run the given coroutine function in an asynchronous event loop. + + The current thread must not be already running an event loop. + + :param func: a coroutine function + :param args: positional arguments to ``func`` + :param kwargs: positional arguments to ``func`` + :param options: keyword arguments to call the backend ``run()`` implementation + with + :return: the return value of the coroutine function + """ + + @classmethod + @abstractmethod + def current_token(cls) -> object: + """ + Return an object that allows other threads to run code inside the event loop. + + :return: a token object, specific to the event loop running in the current + thread + """ + + @classmethod + @abstractmethod + def current_time(cls) -> float: + """ + Return the current value of the event loop's internal clock. + + :return: the clock value (seconds) + """ + + @classmethod + @abstractmethod + def cancelled_exception_class(cls) -> type[BaseException]: + """Return the exception class that is raised in a task if it's cancelled.""" + + @classmethod + @abstractmethod + async def checkpoint(cls) -> None: + """ + Check if the task has been cancelled, and allow rescheduling of other tasks. + + This is effectively the same as running :meth:`checkpoint_if_cancelled` and then + :meth:`cancel_shielded_checkpoint`. + """ + + @classmethod + async def checkpoint_if_cancelled(cls) -> None: + """ + Check if the current task group has been cancelled. + + This will check if the task has been cancelled, but will not allow other tasks + to be scheduled if not. + + """ + if cls.current_effective_deadline() == -math.inf: + await cls.checkpoint() + + @classmethod + async def cancel_shielded_checkpoint(cls) -> None: + """ + Allow the rescheduling of other tasks. + + This will give other tasks the opportunity to run, but without checking if the + current task group has been cancelled, unlike with :meth:`checkpoint`. + + """ + with cls.create_cancel_scope(shield=True): + await cls.sleep(0) + + @classmethod + @abstractmethod + async def sleep(cls, delay: float) -> None: + """ + Pause the current task for the specified duration. + + :param delay: the duration, in seconds + """ + + @classmethod + @abstractmethod + def create_cancel_scope( + cls, *, deadline: float = math.inf, shield: bool = False + ) -> CancelScope: + pass + + @classmethod + @abstractmethod + def current_effective_deadline(cls) -> float: + """ + Return the nearest deadline among all the cancel scopes effective for the + current task. + + :return: + - a clock value from the event loop's internal clock + - ``inf`` if there is no deadline in effect + - ``-inf`` if the current scope has been cancelled + :rtype: float + """ + + @classmethod + @abstractmethod + def create_task_group(cls) -> TaskGroup: + pass + + @classmethod + @abstractmethod + def create_event(cls) -> Event: + pass + + @classmethod + @abstractmethod + def create_lock(cls, *, fast_acquire: bool) -> Lock: + pass + + @classmethod + @abstractmethod + def create_semaphore( + cls, + initial_value: int, + *, + max_value: int | None = None, + fast_acquire: bool = False, + ) -> Semaphore: + pass + + @classmethod + @abstractmethod + def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + async def run_sync_in_worker_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + abandon_on_cancel: bool = False, + limiter: CapacityLimiter | None = None, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def check_cancelled(cls) -> None: + pass + + @classmethod + @abstractmethod + def run_async_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + def run_sync_from_thread( + cls, + func: Callable[[Unpack[PosArgsT]], T_Retval], + args: tuple[Unpack[PosArgsT]], + token: object, + ) -> T_Retval: + pass + + @classmethod + @abstractmethod + async def open_process( + cls, + command: StrOrBytesPath | Sequence[StrOrBytesPath], + *, + stdin: int | IO[Any] | None, + stdout: int | IO[Any] | None, + stderr: int | IO[Any] | None, + **kwargs: Any, + ) -> Process: + pass + + @classmethod + @abstractmethod + def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: + pass + + @classmethod + @abstractmethod + async def connect_tcp( + cls, host: str, port: int, local_address: IPSockAddrType | None = None + ) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + def create_tcp_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + def create_unix_listener(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def create_udp_socket( + cls, + family: AddressFamily, + local_address: IPSockAddrType | None, + remote_address: IPSockAddrType | None, + reuse_port: bool, + ) -> UDPSocket | ConnectedUDPSocket: + pass + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: None + ) -> UNIXDatagramSocket: ... + + @classmethod + @overload + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes + ) -> ConnectedUNIXDatagramSocket: ... + + @classmethod + @abstractmethod + async def create_unix_datagram_socket( + cls, raw_socket: socket, remote_path: str | bytes | None + ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def getaddrinfo( + cls, + host: bytes | str | None, + port: str | int | None, + *, + family: int | AddressFamily = 0, + type: int | SocketKind = 0, + proto: int = 0, + flags: int = 0, + ) -> Sequence[ + tuple[ + AddressFamily, + SocketKind, + int, + str, + tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes], + ] + ]: + pass + + @classmethod + @abstractmethod + async def getnameinfo( + cls, sockaddr: IPSockAddrType, flags: int = 0 + ) -> tuple[str, str]: + pass + + @classmethod + @abstractmethod + async def wait_readable(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + async def wait_writable(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + def notify_closing(cls, obj: FileDescriptorLike) -> None: + pass + + @classmethod + @abstractmethod + async def wrap_listener_socket(cls, sock: socket) -> SocketListener: + pass + + @classmethod + @abstractmethod + async def wrap_stream_socket(cls, sock: socket) -> SocketStream: + pass + + @classmethod + @abstractmethod + async def wrap_unix_stream_socket(cls, sock: socket) -> UNIXSocketStream: + pass + + @classmethod + @abstractmethod + async def wrap_udp_socket(cls, sock: socket) -> UDPSocket: + pass + + @classmethod + @abstractmethod + async def wrap_connected_udp_socket(cls, sock: socket) -> ConnectedUDPSocket: + pass + + @classmethod + @abstractmethod + async def wrap_unix_datagram_socket(cls, sock: socket) -> UNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + async def wrap_connected_unix_datagram_socket( + cls, sock: socket + ) -> ConnectedUNIXDatagramSocket: + pass + + @classmethod + @abstractmethod + def current_default_thread_limiter(cls) -> CapacityLimiter: + pass + + @classmethod + @abstractmethod + def open_signal_receiver( + cls, *signals: Signals + ) -> AbstractContextManager[AsyncIterator[Signals]]: + pass + + @classmethod + @abstractmethod + def get_current_task(cls) -> TaskInfo: + pass + + @classmethod + @abstractmethod + def get_running_tasks(cls) -> Sequence[TaskInfo]: + pass + + @classmethod + @abstractmethod + async def wait_all_tasks_blocked(cls) -> None: + pass + + @classmethod + @abstractmethod + def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: + pass diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_resources.py b/venv/lib/python3.12/site-packages/anyio/abc/_resources.py new file mode 100644 index 0000000000000000000000000000000000000000..10df115a7b9f975493476da763cc1e26dbd822e5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_resources.py @@ -0,0 +1,33 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from types import TracebackType +from typing import TypeVar + +T = TypeVar("T") + + +class AsyncResource(metaclass=ABCMeta): + """ + Abstract base class for all closeable asynchronous resources. + + Works as an asynchronous context manager which returns the instance itself on enter, + and calls :meth:`aclose` on exit. + """ + + __slots__ = () + + async def __aenter__(self: T) -> T: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + await self.aclose() + + @abstractmethod + async def aclose(self) -> None: + """Close the resource.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py b/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py new file mode 100644 index 0000000000000000000000000000000000000000..feb26bd44a240acb20fd0f2498dff5631b8e2fb3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_sockets.py @@ -0,0 +1,399 @@ +from __future__ import annotations + +import errno +import socket +from abc import abstractmethod +from collections.abc import Callable, Collection, Mapping +from contextlib import AsyncExitStack +from io import IOBase +from ipaddress import IPv4Address, IPv6Address +from socket import AddressFamily +from typing import Any, TypeAlias, TypeVar + +from .._core._eventloop import get_async_backend +from .._core._typedattr import ( + TypedAttributeProvider, + TypedAttributeSet, + typed_attribute, +) +from ._streams import ByteStream, Listener, UnreliableObjectStream +from ._tasks import TaskGroup + +IPAddressType: TypeAlias = str | IPv4Address | IPv6Address +IPSockAddrType: TypeAlias = tuple[str, int] +SockAddrType: TypeAlias = IPSockAddrType | str +UDPPacketType: TypeAlias = tuple[bytes, IPSockAddrType] +UNIXDatagramPacketType: TypeAlias = tuple[bytes, str] +T_Retval = TypeVar("T_Retval") + + +def _validate_socket( + sock_or_fd: socket.socket | int, + sock_type: socket.SocketKind, + addr_family: socket.AddressFamily = socket.AF_UNSPEC, + *, + require_connected: bool = False, + require_bound: bool = False, +) -> socket.socket: + if isinstance(sock_or_fd, int): + try: + sock = socket.socket(fileno=sock_or_fd) + except OSError as exc: + if exc.errno == errno.ENOTSOCK: + raise ValueError( + "the file descriptor does not refer to a socket" + ) from exc + elif require_connected: + raise ValueError("the socket must be connected") from exc + elif require_bound: + raise ValueError("the socket must be bound to a local address") from exc + else: + raise + elif isinstance(sock_or_fd, socket.socket): + sock = sock_or_fd + else: + raise TypeError( + f"expected an int or socket, got {type(sock_or_fd).__qualname__} instead" + ) + + try: + if require_connected: + try: + sock.getpeername() + except OSError as exc: + raise ValueError("the socket must be connected") from exc + + if require_bound: + try: + if sock.family in (socket.AF_INET, socket.AF_INET6): + bound_addr = sock.getsockname()[1] + else: + bound_addr = sock.getsockname() + except OSError: + bound_addr = None + + if not bound_addr: + raise ValueError("the socket must be bound to a local address") + + if addr_family != socket.AF_UNSPEC and sock.family != addr_family: + raise ValueError( + f"address family mismatch: expected {addr_family.name}, got " + f"{sock.family.name}" + ) + + if sock.type != sock_type: + raise ValueError( + f"socket type mismatch: expected {sock_type.name}, got {sock.type.name}" + ) + except BaseException: + # Avoid ResourceWarning from the locally constructed socket object + if isinstance(sock_or_fd, int): + sock.detach() + + raise + + sock.setblocking(False) + return sock + + +class SocketAttribute(TypedAttributeSet): + """ + .. attribute:: family + :type: socket.AddressFamily + + the address family of the underlying socket + + .. attribute:: local_address + :type: tuple[str, int] | str + + the local address the underlying socket is connected to + + .. attribute:: local_port + :type: int + + for IP based sockets, the local port the underlying socket is bound to + + .. attribute:: raw_socket + :type: socket.socket + + the underlying stdlib socket object + + .. attribute:: remote_address + :type: tuple[str, int] | str + + the remote address the underlying socket is connected to + + .. attribute:: remote_port + :type: int + + for IP based sockets, the remote port the underlying socket is connected to + """ + + family: AddressFamily = typed_attribute() + local_address: SockAddrType = typed_attribute() + local_port: int = typed_attribute() + raw_socket: socket.socket = typed_attribute() + remote_address: SockAddrType = typed_attribute() + remote_port: int = typed_attribute() + + +class _SocketProvider(TypedAttributeProvider): + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + from .._core._sockets import convert_ipv6_sockaddr as convert + + attributes: dict[Any, Callable[[], Any]] = { + SocketAttribute.family: lambda: self._raw_socket.family, + SocketAttribute.local_address: lambda: convert( + self._raw_socket.getsockname() + ), + SocketAttribute.raw_socket: lambda: self._raw_socket, + } + try: + peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) + except OSError: + peername = None + + # Provide the remote address for connected sockets + if peername is not None: + attributes[SocketAttribute.remote_address] = lambda: peername + + # Provide local and remote ports for IP based sockets + if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): + attributes[SocketAttribute.local_port] = lambda: ( + self._raw_socket.getsockname()[1] + ) + if peername is not None: + remote_port = peername[1] + attributes[SocketAttribute.remote_port] = lambda: remote_port + + return attributes + + @property + @abstractmethod + def _raw_socket(self) -> socket.socket: + pass + + +class SocketStream(ByteStream, _SocketProvider): + """ + Transports bytes over a socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> SocketStream: + """ + Wrap an existing socket object or file descriptor as a socket stream. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a socket stream + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_connected=True) + return await get_async_backend().wrap_stream_socket(sock) + + +class UNIXSocketStream(SocketStream): + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> UNIXSocketStream: + """ + Wrap an existing socket object or file descriptor as a UNIX socket stream. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a UNIX socket stream + + """ + sock = _validate_socket( + sock_or_fd, socket.SOCK_STREAM, socket.AF_UNIX, require_connected=True + ) + return await get_async_backend().wrap_unix_stream_socket(sock) + + @abstractmethod + async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: + """ + Send file descriptors along with a message to the peer. + + :param message: a non-empty bytestring + :param fds: a collection of files (either numeric file descriptors or open file + or socket objects) + """ + + @abstractmethod + async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: + """ + Receive file descriptors along with a message from the peer. + + :param msglen: length of the message to expect from the peer + :param maxfds: maximum number of file descriptors to expect from the peer + :return: a tuple of (message, file descriptors) + """ + + +class SocketListener(Listener[SocketStream], _SocketProvider): + """ + Listens to incoming socket connections. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> SocketListener: + """ + Wrap an existing socket object or file descriptor as a socket listener. + + The newly created listener takes ownership of the socket being passed in. + + :param sock_or_fd: a socket object or file descriptor + :return: a socket listener + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_STREAM, require_bound=True) + return await get_async_backend().wrap_listener_socket(sock) + + @abstractmethod + async def accept(self) -> SocketStream: + """Accept an incoming connection.""" + + async def serve( + self, + handler: Callable[[SocketStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + from .. import create_task_group + + async with AsyncExitStack() as stack: + if task_group is None: + task_group = await stack.enter_async_context(create_task_group()) + + while True: + stream = await self.accept() + task_group.start_soon(handler, stream) + + +class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): + """ + Represents an unconnected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> UDPSocket: + """ + Wrap an existing socket object or file descriptor as a UDP socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must be bound to a local address. + + :param sock_or_fd: a socket object or file descriptor + :return: a UDP socket + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, require_bound=True) + return await get_async_backend().wrap_udp_socket(sock) + + async def sendto(self, data: bytes, host: str, port: int) -> None: + """ + Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). + + """ + return await self.send((data, (host, port))) + + +class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents an connected UDP socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket(cls, sock_or_fd: socket.socket | int) -> ConnectedUDPSocket: + """ + Wrap an existing socket object or file descriptor as a connected UDP socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a connected UDP socket + + """ + sock = _validate_socket( + sock_or_fd, + socket.SOCK_DGRAM, + require_connected=True, + ) + return await get_async_backend().wrap_connected_udp_socket(sock) + + +class UNIXDatagramSocket( + UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider +): + """ + Represents an unconnected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> UNIXDatagramSocket: + """ + Wrap an existing socket object or file descriptor as a UNIX datagram + socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + + :param sock_or_fd: a socket object or file descriptor + :return: a UNIX datagram socket + + """ + sock = _validate_socket(sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX) + return await get_async_backend().wrap_unix_datagram_socket(sock) + + async def sendto(self, data: bytes, path: str) -> None: + """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" + return await self.send((data, path)) + + +class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): + """ + Represents a connected Unix datagram socket. + + Supports all relevant extra attributes from :class:`~SocketAttribute`. + """ + + @classmethod + async def from_socket( + cls, + sock_or_fd: socket.socket | int, + ) -> ConnectedUNIXDatagramSocket: + """ + Wrap an existing socket object or file descriptor as a connected UNIX datagram + socket. + + The newly created socket wrapper takes ownership of the socket being passed in. + The existing socket must already be connected. + + :param sock_or_fd: a socket object or file descriptor + :return: a connected UNIX datagram socket + + """ + sock = _validate_socket( + sock_or_fd, socket.SOCK_DGRAM, socket.AF_UNIX, require_connected=True + ) + return await get_async_backend().wrap_connected_unix_datagram_socket(sock) diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_streams.py b/venv/lib/python3.12/site-packages/anyio/abc/_streams.py new file mode 100644 index 0000000000000000000000000000000000000000..186e3f503a9a71ce96ac2087cf8bfdedc490c534 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_streams.py @@ -0,0 +1,233 @@ +from __future__ import annotations + +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from typing import Any, Generic, TypeAlias, TypeVar + +from .._core._exceptions import EndOfStream +from .._core._typedattr import TypedAttributeProvider +from ._resources import AsyncResource +from ._tasks import TaskGroup + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class UnreliableObjectReceiveStream( + Generic[T_co], AsyncResource, TypedAttributeProvider +): + """ + An interface for receiving objects. + + This interface makes no guarantees that the received messages arrive in the order in + which they were sent, or that no messages are missed. + + Asynchronously iterating over objects of this type will yield objects matching the + given type parameter. + """ + + def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: + return self + + async def __anext__(self) -> T_co: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration from None + + @abstractmethod + async def receive(self) -> T_co: + """ + Receive the next item. + + :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly + closed + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectSendStream( + Generic[T_contra], AsyncResource, TypedAttributeProvider +): + """ + An interface for sending objects. + + This interface makes no guarantees that the messages sent will reach the + recipient(s) in the same order in which they were sent, or at all. + """ + + @abstractmethod + async def send(self, item: T_contra) -> None: + """ + Send an item to the peer(s). + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if the send stream has been explicitly + closed + :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable + due to external causes + """ + + +class UnreliableObjectStream( + UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] +): + """ + A bidirectional message stream which does not guarantee the order or reliability of + message delivery. + """ + + +class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): + """ + A receive message stream which guarantees that messages are received in the same + order in which they were sent, and that no messages are missed. + """ + + +class ObjectSendStream(UnreliableObjectSendStream[T_contra]): + """ + A send message stream which guarantees that messages are delivered in the same order + in which they were sent, without missing any messages in the middle. + """ + + +class ObjectStream( + ObjectReceiveStream[T_Item], + ObjectSendStream[T_Item], + UnreliableObjectStream[T_Item], +): + """ + A bidirectional message stream which guarantees the order and reliability of message + delivery. + """ + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +class ByteReceiveStream(AsyncResource, TypedAttributeProvider): + """ + An interface for receiving bytes from a single peer. + + Iterating this byte stream will yield a byte string of arbitrary length, but no more + than 65536 bytes. + """ + + def __aiter__(self) -> ByteReceiveStream: + return self + + async def __anext__(self) -> bytes: + try: + return await self.receive() + except EndOfStream: + raise StopAsyncIteration from None + + @abstractmethod + async def receive(self, max_bytes: int = 65536) -> bytes: + """ + Receive at most ``max_bytes`` bytes from the peer. + + .. note:: Implementers of this interface should not return an empty + :class:`bytes` object, and users should ignore them. + + :param max_bytes: maximum number of bytes to receive + :return: the received bytes + :raises ~anyio.EndOfStream: if this stream has been closed from the other end + """ + + +class ByteSendStream(AsyncResource, TypedAttributeProvider): + """An interface for sending bytes to a single peer.""" + + @abstractmethod + async def send(self, item: bytes) -> None: + """ + Send the given bytes to the peer. + + :param item: the bytes to send + """ + + +class ByteStream(ByteReceiveStream, ByteSendStream): + """A bidirectional byte stream.""" + + @abstractmethod + async def send_eof(self) -> None: + """ + Send an end-of-file indication to the peer. + + You should not try to send any further data to this stream after calling this + method. This method is idempotent (does nothing on successive calls). + """ + + +#: Type alias for all unreliable bytes-oriented receive streams. +AnyUnreliableByteReceiveStream: TypeAlias = ( + UnreliableObjectReceiveStream[bytes] | ByteReceiveStream +) +#: Type alias for all unreliable bytes-oriented send streams. +AnyUnreliableByteSendStream: TypeAlias = ( + UnreliableObjectSendStream[bytes] | ByteSendStream +) +#: Type alias for all unreliable bytes-oriented streams. +AnyUnreliableByteStream: TypeAlias = UnreliableObjectStream[bytes] | ByteStream +#: Type alias for all bytes-oriented receive streams. +AnyByteReceiveStream: TypeAlias = ObjectReceiveStream[bytes] | ByteReceiveStream +#: Type alias for all bytes-oriented send streams. +AnyByteSendStream: TypeAlias = ObjectSendStream[bytes] | ByteSendStream +#: Type alias for all bytes-oriented streams. +AnyByteStream: TypeAlias = ObjectStream[bytes] | ByteStream + + +class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): + """An interface for objects that let you accept incoming connections.""" + + @abstractmethod + async def serve( + self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None + ) -> None: + """ + Accept incoming connections as they come in and start tasks to handle them. + + :param handler: a callable that will be used to handle each accepted connection + :param task_group: the task group that will be used to start tasks for handling + each accepted connection (if omitted, an ad-hoc task group will be created) + """ + + +class ObjectStreamConnectable(Generic[T_co], metaclass=ABCMeta): + @abstractmethod + async def connect(self) -> ObjectStream[T_co]: + """ + Connect to the remote endpoint. + + :return: an object stream connected to the remote end + :raises ConnectionFailed: if the connection fails + """ + + +class ByteStreamConnectable(metaclass=ABCMeta): + @abstractmethod + async def connect(self) -> ByteStream: + """ + Connect to the remote endpoint. + + :return: a bytestream connected to the remote end + :raises ConnectionFailed: if the connection fails + """ + + +#: Type alias for all connectables returning bytestreams or bytes-oriented object streams +AnyByteStreamConnectable: TypeAlias = ( + ObjectStreamConnectable[bytes] | ByteStreamConnectable +) diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py b/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py new file mode 100644 index 0000000000000000000000000000000000000000..ce0564ceac8aac425675b5c8f7f7205d08061fd3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_subprocesses.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +from abc import abstractmethod +from signal import Signals + +from ._resources import AsyncResource +from ._streams import ByteReceiveStream, ByteSendStream + + +class Process(AsyncResource): + """An asynchronous version of :class:`subprocess.Popen`.""" + + @abstractmethod + async def wait(self) -> int: + """ + Wait until the process exits. + + :return: the exit code of the process + """ + + @abstractmethod + def terminate(self) -> None: + """ + Terminates the process, gracefully if possible. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGTERM`` to the process. + + .. seealso:: :meth:`subprocess.Popen.terminate` + """ + + @abstractmethod + def kill(self) -> None: + """ + Kills the process. + + On Windows, this calls ``TerminateProcess()``. + On POSIX systems, this sends ``SIGKILL`` to the process. + + .. seealso:: :meth:`subprocess.Popen.kill` + """ + + @abstractmethod + def send_signal(self, signal: Signals) -> None: + """ + Send a signal to the subprocess. + + .. seealso:: :meth:`subprocess.Popen.send_signal` + + :param signal: the signal number (e.g. :data:`signal.SIGHUP`) + """ + + @property + @abstractmethod + def pid(self) -> int: + """The process ID of the process.""" + + @property + @abstractmethod + def returncode(self) -> int | None: + """ + The return code of the process. If the process has not yet terminated, this will + be ``None``. + """ + + @property + @abstractmethod + def stdin(self) -> ByteSendStream | None: + """The stream for the standard input of the process.""" + + @property + @abstractmethod + def stdout(self) -> ByteReceiveStream | None: + """The stream for the standard output of the process.""" + + @property + @abstractmethod + def stderr(self) -> ByteReceiveStream | None: + """The stream for the standard error output of the process.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py b/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py new file mode 100644 index 0000000000000000000000000000000000000000..516b3ec3b38a4b140f5d607dd28da989f057b832 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_tasks.py @@ -0,0 +1,117 @@ +from __future__ import annotations + +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Awaitable, Callable +from types import TracebackType +from typing import TYPE_CHECKING, Any, Protocol, overload + +if sys.version_info >= (3, 13): + from typing import TypeVar +else: + from typing_extensions import TypeVar + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if TYPE_CHECKING: + from .._core._tasks import CancelScope + +T_Retval = TypeVar("T_Retval") +T_contra = TypeVar("T_contra", contravariant=True, default=None) +PosArgsT = TypeVarTuple("PosArgsT") + + +class TaskStatus(Protocol[T_contra]): + @overload + def started(self: TaskStatus[None]) -> None: ... + + @overload + def started(self, value: T_contra) -> None: ... + + def started(self, value: T_contra | None = None) -> None: + """ + Signal that the task has started. + + :param value: object passed back to the starter of the task + """ + + +class TaskGroup(metaclass=ABCMeta): + """ + Groups several asynchronous tasks together. + + :ivar cancel_scope: the cancel scope inherited by all child tasks + :vartype cancel_scope: CancelScope + + .. note:: On asyncio, support for eager task factories is considered to be + **experimental**. In particular, they don't follow the usual semantics of new + tasks being scheduled on the next iteration of the event loop, and may thus + cause unexpected behavior in code that wasn't written with such semantics in + mind. + """ + + cancel_scope: CancelScope + + @abstractmethod + def start_soon( + self, + func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], + *args: Unpack[PosArgsT], + name: object = None, + ) -> None: + """ + Start a new task in this task group. + + :param func: a coroutine function + :param args: positional arguments to call the function with + :param name: name of the task, for the purposes of introspection and debugging + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def start( + self, + func: Callable[..., Awaitable[Any]], + *args: object, + name: object = None, + ) -> Any: + """ + Start a new task and wait until it signals for readiness. + + The target callable must accept a keyword argument ``task_status`` (of type + :class:`TaskStatus`). Awaiting on this method will return whatever was passed to + ``task_status.started()`` (``None`` by default). + + .. note:: The :class:`TaskStatus` class is generic, and the type argument should + indicate the type of the value that will be passed to + ``task_status.started()``. + + :param func: a coroutine function that accepts the ``task_status`` keyword + argument + :param args: positional arguments to call the function with + :param name: an optional name for the task, for introspection and debugging + :return: the value passed to ``task_status.started()`` + :raises RuntimeError: if the task finishes without calling + ``task_status.started()`` + + .. seealso:: :ref:`start_initialize` + + .. versionadded:: 3.0 + """ + + @abstractmethod + async def __aenter__(self) -> TaskGroup: + """Enter the task group context and allow starting new tasks.""" + + @abstractmethod + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + """Exit the task group context waiting for all tasks to finish.""" diff --git a/venv/lib/python3.12/site-packages/anyio/abc/_testing.py b/venv/lib/python3.12/site-packages/anyio/abc/_testing.py new file mode 100644 index 0000000000000000000000000000000000000000..7c50ed76dc4d8df41262973a0122295523e2a935 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/abc/_testing.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import types +from abc import ABCMeta, abstractmethod +from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable +from typing import Any, TypeVar + +_T = TypeVar("_T") + + +class TestRunner(metaclass=ABCMeta): + """ + Encapsulates a running event loop. Every call made through this object will use the + same event loop. + """ + + def __enter__(self) -> TestRunner: + return self + + @abstractmethod + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: types.TracebackType | None, + ) -> bool | None: ... + + @abstractmethod + def run_asyncgen_fixture( + self, + fixture_func: Callable[..., AsyncGenerator[_T, Any]], + kwargs: dict[str, Any], + ) -> Iterable[_T]: + """ + Run an async generator fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: an iterator yielding the value yielded from the async generator + """ + + @abstractmethod + def run_fixture( + self, + fixture_func: Callable[..., Coroutine[Any, Any, _T]], + kwargs: dict[str, Any], + ) -> _T: + """ + Run an async fixture. + + :param fixture_func: the fixture function + :param kwargs: keyword arguments to call the fixture function with + :return: the return value of the fixture function + """ + + @abstractmethod + def run_test( + self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] + ) -> None: + """ + Run an async test function. + + :param test_func: the test function + :param kwargs: keyword arguments to call the test function with + """ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__init__.py b/venv/lib/python3.12/site-packages/anyio/streams/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d19316c54d2a7f4c6490077f68c65cfda6ccbd4a Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6abb0dbdb75ec5102fffb4bd4e992204a6bfff41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/buffered.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e21de261804b020521f7155c63cc76fa87cde90 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..36f7e7529fc1f30ee781f201af286a85c5ddc461 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/memory.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e966f5ebde0886995e7294a06a1527c56204cfbe Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/stapled.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6aa8172578eb9d423cfd42c403c046e398919dd8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/text.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5c0fd4fdd226851810b85511c0745a74b98b7846 Binary files /dev/null and b/venv/lib/python3.12/site-packages/anyio/streams/__pycache__/tls.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/anyio/streams/buffered.py b/venv/lib/python3.12/site-packages/anyio/streams/buffered.py new file mode 100644 index 0000000000000000000000000000000000000000..57c7cd749bfb94bbe7a992aa9a05af268a841d3d --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/buffered.py @@ -0,0 +1,188 @@ +from __future__ import annotations + +__all__ = ( + "BufferedByteReceiveStream", + "BufferedByteStream", + "BufferedConnectable", +) + +import sys +from collections.abc import Callable, Iterable, Mapping +from dataclasses import dataclass, field +from typing import Any, SupportsIndex + +from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead +from ..abc import ( + AnyByteReceiveStream, + AnyByteStream, + AnyByteStreamConnectable, + ByteReceiveStream, + ByteStream, + ByteStreamConnectable, +) + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@dataclass(eq=False) +class BufferedByteReceiveStream(ByteReceiveStream): + """ + Wraps any bytes-based receive stream and uses a buffer to provide sophisticated + receiving capabilities in the form of a byte stream. + """ + + receive_stream: AnyByteReceiveStream + _buffer: bytearray = field(init=False, default_factory=bytearray) + _closed: bool = field(init=False, default=False) + + async def aclose(self) -> None: + await self.receive_stream.aclose() + self._closed = True + + @property + def buffer(self) -> bytes: + """The bytes currently in the buffer.""" + return bytes(self._buffer) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.receive_stream.extra_attributes + + def feed_data(self, data: Iterable[SupportsIndex], /) -> None: + """ + Append data directly into the buffer. + + Any data in the buffer will be consumed by receive operations before receiving + anything from the wrapped stream. + + :param data: the data to append to the buffer (can be bytes or anything else + that supports ``__index__()``) + + """ + self._buffer.extend(data) + + async def receive(self, max_bytes: int = 65536) -> bytes: + if self._closed: + raise ClosedResourceError + + if self._buffer: + chunk = bytes(self._buffer[:max_bytes]) + del self._buffer[:max_bytes] + return chunk + elif isinstance(self.receive_stream, ByteReceiveStream): + return await self.receive_stream.receive(max_bytes) + else: + # With a bytes-oriented object stream, we need to handle any surplus bytes + # we get from the receive() call + chunk = await self.receive_stream.receive() + if len(chunk) > max_bytes: + # Save the surplus bytes in the buffer + self._buffer.extend(chunk[max_bytes:]) + return chunk[:max_bytes] + else: + return chunk + + async def receive_exactly(self, nbytes: int) -> bytes: + """ + Read exactly the given amount of bytes from the stream. + + :param nbytes: the number of bytes to read + :return: the bytes read + :raises ~anyio.IncompleteRead: if the stream was closed before the requested + amount of bytes could be read from the stream + + """ + while True: + remaining = nbytes - len(self._buffer) + if remaining <= 0: + retval = self._buffer[:nbytes] + del self._buffer[:nbytes] + return bytes(retval) + + try: + if isinstance(self.receive_stream, ByteReceiveStream): + chunk = await self.receive_stream.receive(remaining) + else: + chunk = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + self._buffer.extend(chunk) + + async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: + """ + Read from the stream until the delimiter is found or max_bytes have been read. + + :param delimiter: the marker to look for in the stream + :param max_bytes: maximum number of bytes that will be read before raising + :exc:`~anyio.DelimiterNotFound` + :return: the bytes read (not including the delimiter) + :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter + was found + :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the + bytes read up to the maximum allowed + + """ + delimiter_size = len(delimiter) + offset = 0 + while True: + # Check if the delimiter can be found in the current buffer + index = self._buffer.find(delimiter, offset) + if index >= 0: + found = self._buffer[:index] + del self._buffer[: index + len(delimiter) :] + return bytes(found) + + # Check if the buffer is already at or over the limit + if len(self._buffer) >= max_bytes: + raise DelimiterNotFound(max_bytes) + + # Read more data into the buffer from the socket + try: + data = await self.receive_stream.receive() + except EndOfStream as exc: + raise IncompleteRead from exc + + # Move the offset forward and add the new data to the buffer + offset = max(len(self._buffer) - delimiter_size + 1, 0) + self._buffer.extend(data) + + +class BufferedByteStream(BufferedByteReceiveStream, ByteStream): + """ + A full-duplex variant of :class:`BufferedByteReceiveStream`. All writes are passed + through to the wrapped stream as-is. + """ + + def __init__(self, stream: AnyByteStream): + """ + :param stream: the stream to be wrapped + + """ + super().__init__(stream) + self._stream = stream + + @override + async def send_eof(self) -> None: + await self._stream.send_eof() + + @override + async def send(self, item: bytes) -> None: + await self._stream.send(item) + + +class BufferedConnectable(ByteStreamConnectable): + def __init__(self, connectable: AnyByteStreamConnectable): + """ + :param connectable: the connectable to wrap + + """ + self.connectable = connectable + + @override + async def connect(self) -> BufferedByteStream: + stream = await self.connectable.connect() + return BufferedByteStream(stream) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/file.py b/venv/lib/python3.12/site-packages/anyio/streams/file.py new file mode 100644 index 0000000000000000000000000000000000000000..79c3d500beae578832b7e960a76ead95c80be01e --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/file.py @@ -0,0 +1,154 @@ +from __future__ import annotations + +__all__ = ( + "FileReadStream", + "FileStreamAttribute", + "FileWriteStream", +) + +from collections.abc import Callable, Mapping +from io import SEEK_SET, UnsupportedOperation +from os import PathLike +from pathlib import Path +from typing import IO, Any + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + TypedAttributeSet, + to_thread, + typed_attribute, +) +from ..abc import ByteReceiveStream, ByteSendStream + + +class FileStreamAttribute(TypedAttributeSet): + #: the open file descriptor + file: IO[bytes] = typed_attribute() + #: the path of the file on the file system, if available (file must be a real file) + path: Path = typed_attribute() + #: the file number, if available (file must be a real file or a TTY) + fileno: int = typed_attribute() + + +class _BaseFileStream: + def __init__(self, file: IO[bytes]): + self._file = file + + async def aclose(self) -> None: + await to_thread.run_sync(self._file.close) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict[Any, Callable[[], Any]] = { + FileStreamAttribute.file: lambda: self._file, + } + + if hasattr(self._file, "name"): + attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) + + try: + self._file.fileno() + except UnsupportedOperation: + pass + else: + attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() + + return attributes + + +class FileReadStream(_BaseFileStream, ByteReceiveStream): + """ + A byte stream that reads from a file in the file system. + + :param file: a file that has been opened for reading in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: + """ + Create a file read stream by opening the given file. + + :param path: path of the file to read from + + """ + file = await to_thread.run_sync(Path(path).open, "rb") + return cls(file) + + async def receive(self, max_bytes: int = 65536) -> bytes: + try: + data = await to_thread.run_sync(self._file.read, max_bytes) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc + + if data: + return data + else: + raise EndOfStream + + async def seek(self, position: int, whence: int = SEEK_SET) -> int: + """ + Seek the file to the given position. + + .. seealso:: :meth:`io.IOBase.seek` + + .. note:: Not all file descriptors are seekable. + + :param position: position to seek the file to + :param whence: controls how ``position`` is interpreted + :return: the new absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.seek, position, whence) + + async def tell(self) -> int: + """ + Return the current stream position. + + .. note:: Not all file descriptors are seekable. + + :return: the current absolute position + :raises OSError: if the file is not seekable + + """ + return await to_thread.run_sync(self._file.tell) + + +class FileWriteStream(_BaseFileStream, ByteSendStream): + """ + A byte stream that writes to a file in the file system. + + :param file: a file that has been opened for writing in binary mode + + .. versionadded:: 3.0 + """ + + @classmethod + async def from_path( + cls, path: str | PathLike[str], append: bool = False + ) -> FileWriteStream: + """ + Create a file write stream by opening the given file for writing. + + :param path: path of the file to write to + :param append: if ``True``, open the file for appending; if ``False``, any + existing file at the given path will be truncated + + """ + mode = "ab" if append else "wb" + file = await to_thread.run_sync(Path(path).open, mode) + return cls(file) + + async def send(self, item: bytes) -> None: + try: + await to_thread.run_sync(self._file.write, item) + except ValueError: + raise ClosedResourceError from None + except OSError as exc: + raise BrokenResourceError from exc diff --git a/venv/lib/python3.12/site-packages/anyio/streams/memory.py b/venv/lib/python3.12/site-packages/anyio/streams/memory.py new file mode 100644 index 0000000000000000000000000000000000000000..a3fa0c3d9783f34fb5225938f6ce5d1d31b9b85c --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/memory.py @@ -0,0 +1,325 @@ +from __future__ import annotations + +__all__ = ( + "MemoryObjectReceiveStream", + "MemoryObjectSendStream", + "MemoryObjectStreamStatistics", +) + +import warnings +from collections import OrderedDict, deque +from dataclasses import dataclass, field +from types import TracebackType +from typing import Generic, NamedTuple, TypeVar + +from .. import ( + BrokenResourceError, + ClosedResourceError, + EndOfStream, + WouldBlock, +) +from .._core._testing import TaskInfo, get_current_task +from ..abc import Event, ObjectReceiveStream, ObjectSendStream +from ..lowlevel import checkpoint + +T_Item = TypeVar("T_Item") +T_co = TypeVar("T_co", covariant=True) +T_contra = TypeVar("T_contra", contravariant=True) + + +class MemoryObjectStreamStatistics(NamedTuple): + current_buffer_used: int #: number of items stored in the buffer + #: maximum number of items that can be stored on this stream (or :data:`math.inf`) + max_buffer_size: float + open_send_streams: int #: number of unclosed clones of the send stream + open_receive_streams: int #: number of unclosed clones of the receive stream + #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` + tasks_waiting_send: int + #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` + tasks_waiting_receive: int + + +@dataclass(eq=False) +class _MemoryObjectItemReceiver(Generic[T_Item]): + task_info: TaskInfo = field(init=False, default_factory=get_current_task) + item: T_Item = field(init=False) + + def __repr__(self) -> str: + # When item is not defined, we get following error with default __repr__: + # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' + item = getattr(self, "item", None) + return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" + + +@dataclass(eq=False) +class _MemoryObjectStreamState(Generic[T_Item]): + max_buffer_size: float = field() + buffer: deque[T_Item] = field(init=False, default_factory=deque) + open_send_channels: int = field(init=False, default=0) + open_receive_channels: int = field(init=False, default=0) + waiting_receivers: OrderedDict[Event, _MemoryObjectItemReceiver[T_Item]] = field( + init=False, default_factory=OrderedDict + ) + waiting_senders: OrderedDict[Event, T_Item] = field( + init=False, default_factory=OrderedDict + ) + + def statistics(self) -> MemoryObjectStreamStatistics: + return MemoryObjectStreamStatistics( + len(self.buffer), + self.max_buffer_size, + self.open_send_channels, + self.open_receive_channels, + len(self.waiting_senders), + len(self.waiting_receivers), + ) + + +@dataclass(eq=False) +class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): + _state: _MemoryObjectStreamState[T_co] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_receive_channels += 1 + + def receive_nowait(self) -> T_co: + """ + Receive the next item if it can be done without waiting. + + :return: the received item + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been + closed from the sending end + :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks + waiting to send + + """ + if self._closed: + raise ClosedResourceError + + if self._state.waiting_senders: + # Get the item from the next sender + send_event, item = self._state.waiting_senders.popitem(last=False) + self._state.buffer.append(item) + send_event.set() + + if self._state.buffer: + return self._state.buffer.popleft() + elif not self._state.open_send_channels: + raise EndOfStream + + raise WouldBlock + + async def receive(self) -> T_co: + await checkpoint() + try: + return self.receive_nowait() + except WouldBlock: + # Add ourselves in the queue + receive_event = Event() + receiver = _MemoryObjectItemReceiver[T_co]() + self._state.waiting_receivers[receive_event] = receiver + + try: + await receive_event.wait() + finally: + self._state.waiting_receivers.pop(receive_event, None) + + try: + return receiver.item + except AttributeError: + raise EndOfStream from None + + def clone(self) -> MemoryObjectReceiveStream[T_co]: + """ + Create a clone of this receive stream. + + Each clone can be closed separately. Only when all clones have been closed will + the receiving end of the memory stream be considered closed by the sending ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectReceiveStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_receive_channels -= 1 + if self._state.open_receive_channels == 0: + send_events = list(self._state.waiting_senders.keys()) + for event in send_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectReceiveStream[T_co]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + stacklevel=1, + source=self, + ) + + +@dataclass(eq=False) +class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): + _state: _MemoryObjectStreamState[T_contra] + _closed: bool = field(init=False, default=False) + + def __post_init__(self) -> None: + self._state.open_send_channels += 1 + + def send_nowait(self, item: T_contra) -> None: + """ + Send an item immediately if it can be done without waiting. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting + to receive + + """ + if self._closed: + raise ClosedResourceError + if not self._state.open_receive_channels: + raise BrokenResourceError + + while self._state.waiting_receivers: + receive_event, receiver = self._state.waiting_receivers.popitem(last=False) + if not receiver.task_info.has_pending_cancellation(): + receiver.item = item + receive_event.set() + return + + if len(self._state.buffer) < self._state.max_buffer_size: + self._state.buffer.append(item) + else: + raise WouldBlock + + async def send(self, item: T_contra) -> None: + """ + Send an item to the stream. + + If the buffer is full, this method blocks until there is again room in the + buffer or the item can be sent directly to a receiver. + + :param item: the item to send + :raises ~anyio.ClosedResourceError: if this send stream has been closed + :raises ~anyio.BrokenResourceError: if the stream has been closed from the + receiving end + + """ + await checkpoint() + try: + self.send_nowait(item) + except WouldBlock: + # Wait until there's someone on the receiving end + send_event = Event() + self._state.waiting_senders[send_event] = item + try: + await send_event.wait() + except BaseException: + self._state.waiting_senders.pop(send_event, None) + raise + + if send_event in self._state.waiting_senders: + del self._state.waiting_senders[send_event] + raise BrokenResourceError from None + + def clone(self) -> MemoryObjectSendStream[T_contra]: + """ + Create a clone of this send stream. + + Each clone can be closed separately. Only when all clones have been closed will + the sending end of the memory stream be considered closed by the receiving ends. + + :return: the cloned stream + + """ + if self._closed: + raise ClosedResourceError + + return MemoryObjectSendStream(_state=self._state) + + def close(self) -> None: + """ + Close the stream. + + This works the exact same way as :meth:`aclose`, but is provided as a special + case for the benefit of synchronous callbacks. + + """ + if not self._closed: + self._closed = True + self._state.open_send_channels -= 1 + if self._state.open_send_channels == 0: + receive_events = list(self._state.waiting_receivers.keys()) + self._state.waiting_receivers.clear() + for event in receive_events: + event.set() + + async def aclose(self) -> None: + self.close() + + def statistics(self) -> MemoryObjectStreamStatistics: + """ + Return statistics about the current state of this stream. + + .. versionadded:: 3.0 + """ + return self._state.statistics() + + def __enter__(self) -> MemoryObjectSendStream[T_contra]: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + self.close() + + def __del__(self) -> None: + if not self._closed: + warnings.warn( + f"Unclosed <{self.__class__.__name__} at {id(self):x}>", + ResourceWarning, + stacklevel=1, + source=self, + ) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/stapled.py b/venv/lib/python3.12/site-packages/anyio/streams/stapled.py new file mode 100644 index 0000000000000000000000000000000000000000..9248b68abfbff90ddd64646fbe9cabb9f0ebe869 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/stapled.py @@ -0,0 +1,147 @@ +from __future__ import annotations + +__all__ = ( + "MultiListener", + "StapledByteStream", + "StapledObjectStream", +) + +from collections.abc import Callable, Mapping, Sequence +from dataclasses import dataclass +from typing import Any, Generic, TypeVar + +from ..abc import ( + ByteReceiveStream, + ByteSendStream, + ByteStream, + Listener, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + TaskGroup, +) + +T_Item = TypeVar("T_Item") +T_Stream = TypeVar("T_Stream") + + +@dataclass(eq=False) +class StapledByteStream(ByteStream): + """ + Combines two byte streams into a single, bidirectional byte stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ByteSendStream send_stream: the sending byte stream + :param ByteReceiveStream receive_stream: the receiving byte stream + """ + + send_stream: ByteSendStream + receive_stream: ByteReceiveStream + + async def receive(self, max_bytes: int = 65536) -> bytes: + return await self.receive_stream.receive(max_bytes) + + async def send(self, item: bytes) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): + """ + Combines two object streams into a single, bidirectional object stream. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param ObjectSendStream send_stream: the sending object stream + :param ObjectReceiveStream receive_stream: the receiving object stream + """ + + send_stream: ObjectSendStream[T_Item] + receive_stream: ObjectReceiveStream[T_Item] + + async def receive(self) -> T_Item: + return await self.receive_stream.receive() + + async def send(self, item: T_Item) -> None: + await self.send_stream.send(item) + + async def send_eof(self) -> None: + await self.send_stream.aclose() + + async def aclose(self) -> None: + await self.send_stream.aclose() + await self.receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.send_stream.extra_attributes, + **self.receive_stream.extra_attributes, + } + + +@dataclass(eq=False) +class MultiListener(Generic[T_Stream], Listener[T_Stream]): + """ + Combines multiple listeners into one, serving connections from all of them at once. + + Any MultiListeners in the given collection of listeners will have their listeners + moved into this one. + + Extra attributes are provided from each listener, with each successive listener + overriding any conflicting attributes from the previous one. + + :param listeners: listeners to serve + :type listeners: Sequence[Listener[T_Stream]] + """ + + listeners: Sequence[Listener[T_Stream]] + + def __post_init__(self) -> None: + listeners: list[Listener[T_Stream]] = [] + for listener in self.listeners: + if isinstance(listener, MultiListener): + listeners.extend(listener.listeners) + del listener.listeners[:] # type: ignore[attr-defined] + else: + listeners.append(listener) + + self.listeners = listeners + + async def serve( + self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None + ) -> None: + from .. import create_task_group + + async with create_task_group() as tg: + for listener in self.listeners: + tg.start_soon(listener.serve, handler, task_group) + + async def aclose(self) -> None: + for listener in self.listeners: + await listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + attributes: dict = {} + for listener in self.listeners: + attributes.update(listener.extra_attributes) + + return attributes diff --git a/venv/lib/python3.12/site-packages/anyio/streams/text.py b/venv/lib/python3.12/site-packages/anyio/streams/text.py new file mode 100644 index 0000000000000000000000000000000000000000..296cd250459f3848bb333301fff1ac32973f219a --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/text.py @@ -0,0 +1,176 @@ +from __future__ import annotations + +__all__ = ( + "TextConnectable", + "TextReceiveStream", + "TextSendStream", + "TextStream", +) + +import codecs +import sys +from collections.abc import Callable, Mapping +from dataclasses import InitVar, dataclass, field +from typing import Any + +from ..abc import ( + AnyByteReceiveStream, + AnyByteSendStream, + AnyByteStream, + AnyByteStreamConnectable, + ObjectReceiveStream, + ObjectSendStream, + ObjectStream, + ObjectStreamConnectable, +) + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@dataclass(eq=False) +class TextReceiveStream(ObjectReceiveStream[str]): + """ + Stream wrapper that decodes bytes to strings using the given encoding. + + Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any + completely received unicode characters as soon as they come in. + + :param transport_stream: any bytes-based receive stream + :param encoding: character encoding to use for decoding bytes to strings (defaults + to ``utf-8``) + :param errors: handling scheme for decoding errors (defaults to ``strict``; see the + `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteReceiveStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _decoder: codecs.IncrementalDecoder = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + decoder_class = codecs.getincrementaldecoder(encoding) + self._decoder = decoder_class(errors=errors) + + async def receive(self) -> str: + while True: + chunk = await self.transport_stream.receive() + decoded = self._decoder.decode(chunk) + if decoded: + return decoded + + async def aclose(self) -> None: + await self.transport_stream.aclose() + self._decoder.reset() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextSendStream(ObjectSendStream[str]): + """ + Sends strings to the wrapped stream as bytes using the given encoding. + + :param AnyByteSendStream transport_stream: any bytes-based send stream + :param str encoding: character encoding to use for encoding strings to bytes + (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteSendStream + encoding: InitVar[str] = "utf-8" + errors: str = "strict" + _encoder: Callable[..., tuple[bytes, int]] = field(init=False) + + def __post_init__(self, encoding: str) -> None: + self._encoder = codecs.getencoder(encoding) + + async def send(self, item: str) -> None: + encoded = self._encoder(item, self.errors)[0] + await self.transport_stream.send(encoded) + + async def aclose(self) -> None: + await self.transport_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return self.transport_stream.extra_attributes + + +@dataclass(eq=False) +class TextStream(ObjectStream[str]): + """ + A bidirectional stream that decodes bytes to strings on receive and encodes strings + to bytes on send. + + Extra attributes will be provided from both streams, with the receive stream + providing the values in case of a conflict. + + :param AnyByteStream transport_stream: any bytes-based stream + :param str encoding: character encoding to use for encoding/decoding strings to/from + bytes (defaults to ``utf-8``) + :param str errors: handling scheme for encoding errors (defaults to ``strict``; see + the `codecs module documentation`_ for a comprehensive list of options) + + .. _codecs module documentation: + https://docs.python.org/3/library/codecs.html#codec-objects + """ + + transport_stream: AnyByteStream + encoding: InitVar[str] = "utf-8" + errors: InitVar[str] = "strict" + _receive_stream: TextReceiveStream = field(init=False) + _send_stream: TextSendStream = field(init=False) + + def __post_init__(self, encoding: str, errors: str) -> None: + self._receive_stream = TextReceiveStream( + self.transport_stream, encoding=encoding, errors=errors + ) + self._send_stream = TextSendStream( + self.transport_stream, encoding=encoding, errors=errors + ) + + async def receive(self) -> str: + return await self._receive_stream.receive() + + async def send(self, item: str) -> None: + await self._send_stream.send(item) + + async def send_eof(self) -> None: + await self.transport_stream.send_eof() + + async def aclose(self) -> None: + await self._send_stream.aclose() + await self._receive_stream.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self._send_stream.extra_attributes, + **self._receive_stream.extra_attributes, + } + + +class TextConnectable(ObjectStreamConnectable[str]): + def __init__(self, connectable: AnyByteStreamConnectable): + """ + :param connectable: the bytestream endpoint to wrap + + """ + self.connectable = connectable + + @override + async def connect(self) -> TextStream: + stream = await self.connectable.connect() + return TextStream(stream) diff --git a/venv/lib/python3.12/site-packages/anyio/streams/tls.py b/venv/lib/python3.12/site-packages/anyio/streams/tls.py new file mode 100644 index 0000000000000000000000000000000000000000..e2a7ca5b17b9b6c2d3f0f39e86b22342d0b4df65 --- /dev/null +++ b/venv/lib/python3.12/site-packages/anyio/streams/tls.py @@ -0,0 +1,421 @@ +from __future__ import annotations + +__all__ = ( + "TLSAttribute", + "TLSConnectable", + "TLSListener", + "TLSStream", +) + +import logging +import re +import ssl +import sys +from collections.abc import Callable, Mapping +from dataclasses import dataclass +from functools import wraps +from ssl import SSLContext +from typing import Any, TypeAlias, TypeVar + +from .. import ( + BrokenResourceError, + EndOfStream, + aclose_forcefully, + get_cancelled_exc_class, + to_thread, +) +from .._core._typedattr import TypedAttributeSet, typed_attribute +from ..abc import ( + AnyByteStream, + AnyByteStreamConnectable, + ByteStream, + ByteStreamConnectable, + Listener, + TaskGroup, +) + +if sys.version_info >= (3, 11): + from typing import TypeVarTuple, Unpack +else: + from typing_extensions import TypeVarTuple, Unpack + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +T_Retval = TypeVar("T_Retval") +PosArgsT = TypeVarTuple("PosArgsT") +_PCTRTT: TypeAlias = tuple[tuple[str, str], ...] +_PCTRTTT: TypeAlias = tuple[_PCTRTT, ...] + + +class TLSAttribute(TypedAttributeSet): + """Contains Transport Layer Security related attributes.""" + + #: the selected ALPN protocol + alpn_protocol: str | None = typed_attribute() + #: the channel binding for type ``tls-unique`` + channel_binding_tls_unique: bytes = typed_attribute() + #: the selected cipher + cipher: tuple[str, str, int] = typed_attribute() + #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` + # for more information) + peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() + #: the peer certificate in binary form + peer_certificate_binary: bytes | None = typed_attribute() + #: ``True`` if this is the server side of the connection + server_side: bool = typed_attribute() + #: ciphers shared by the client during the TLS handshake (``None`` if this is the + #: client side) + shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() + #: the :class:`~ssl.SSLObject` used for encryption + ssl_object: ssl.SSLObject = typed_attribute() + #: ``True`` if this stream does (and expects) a closing TLS handshake when the + #: stream is being closed + standard_compatible: bool = typed_attribute() + #: the TLS protocol version (e.g. ``TLSv1.2``) + tls_version: str = typed_attribute() + + +@dataclass(eq=False) +class TLSStream(ByteStream): + """ + A stream wrapper that encrypts all sent data and decrypts received data. + + This class has no public initializer; use :meth:`wrap` instead. + All extra attributes from :class:`~TLSAttribute` are supported. + + :var AnyByteStream transport_stream: the wrapped stream + + """ + + transport_stream: AnyByteStream + standard_compatible: bool + _ssl_object: ssl.SSLObject + _read_bio: ssl.MemoryBIO + _write_bio: ssl.MemoryBIO + + @classmethod + async def wrap( + cls, + transport_stream: AnyByteStream, + *, + server_side: bool | None = None, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> TLSStream: + """ + Wrap an existing stream with Transport Layer Security. + + This performs a TLS handshake with the peer. + + :param transport_stream: a bytes-transporting stream to wrap + :param server_side: ``True`` if this is the server side of the connection, + ``False`` if this is the client side (if omitted, will be set to ``False`` + if ``hostname`` has been provided, ``False`` otherwise). Used only to create + a default context when an explicit context has not been provided. + :param hostname: host name of the peer (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure + default will be created) + :param standard_compatible: if ``False``, skip the closing handshake when + closing the connection, and don't raise an exception if the peer does the + same + :raises ~ssl.SSLError: if the TLS handshake fails + + """ + if server_side is None: + server_side = not hostname + + if not ssl_context: + purpose = ( + ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH + ) + ssl_context = ssl.create_default_context(purpose) + + # Re-enable detection of unexpected EOFs if it was disabled by Python + if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): + ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF + + bio_in = ssl.MemoryBIO() + bio_out = ssl.MemoryBIO() + + # External SSLContext implementations may do blocking I/O in wrap_bio(), + # but the standard library implementation won't + if type(ssl_context) is ssl.SSLContext: + ssl_object = ssl_context.wrap_bio( + bio_in, bio_out, server_side=server_side, server_hostname=hostname + ) + else: + ssl_object = await to_thread.run_sync( + ssl_context.wrap_bio, + bio_in, + bio_out, + server_side, + hostname, + None, + ) + + wrapper = cls( + transport_stream=transport_stream, + standard_compatible=standard_compatible, + _ssl_object=ssl_object, + _read_bio=bio_in, + _write_bio=bio_out, + ) + await wrapper._call_sslobject_method(ssl_object.do_handshake) + return wrapper + + async def _call_sslobject_method( + self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] + ) -> T_Retval: + while True: + try: + result = func(*args) + except ssl.SSLWantReadError: + try: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + data = await self.transport_stream.receive() + except EndOfStream: + self._read_bio.write_eof() + except OSError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + else: + self._read_bio.write(data) + except ssl.SSLWantWriteError: + await self.transport_stream.send(self._write_bio.read()) + except ssl.SSLSyscallError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + raise BrokenResourceError from exc + except ssl.SSLError as exc: + self._read_bio.write_eof() + self._write_bio.write_eof() + if isinstance(exc, ssl.SSLEOFError) or ( + exc.strerror and "UNEXPECTED_EOF_WHILE_READING" in exc.strerror + ): + if self.standard_compatible: + raise BrokenResourceError from exc + else: + raise EndOfStream from None + + raise + else: + # Flush any pending writes first + if self._write_bio.pending: + await self.transport_stream.send(self._write_bio.read()) + + return result + + async def unwrap(self) -> tuple[AnyByteStream, bytes]: + """ + Does the TLS closing handshake. + + :return: a tuple of (wrapped byte stream, bytes left in the read buffer) + + """ + await self._call_sslobject_method(self._ssl_object.unwrap) + self._read_bio.write_eof() + self._write_bio.write_eof() + return self.transport_stream, self._read_bio.read() + + async def aclose(self) -> None: + if self.standard_compatible: + try: + await self.unwrap() + except BaseException: + await aclose_forcefully(self.transport_stream) + raise + + await self.transport_stream.aclose() + + async def receive(self, max_bytes: int = 65536) -> bytes: + data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) + if not data: + raise EndOfStream + + return data + + async def send(self, item: bytes) -> None: + await self._call_sslobject_method(self._ssl_object.write, item) + + async def send_eof(self) -> None: + tls_version = self.extra(TLSAttribute.tls_version) + match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) + if match: + major, minor = int(match.group(1)), int(match.group(2) or 0) + if (major, minor) < (1, 3): + raise NotImplementedError( + f"send_eof() requires at least TLSv1.3; current " + f"session uses {tls_version}" + ) + + raise NotImplementedError( + "send_eof() has not yet been implemented for TLS streams" + ) + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + **self.transport_stream.extra_attributes, + TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, + TLSAttribute.channel_binding_tls_unique: ( + self._ssl_object.get_channel_binding + ), + TLSAttribute.cipher: self._ssl_object.cipher, + TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), + TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( + True + ), + TLSAttribute.server_side: lambda: self._ssl_object.server_side, + TLSAttribute.shared_ciphers: lambda: ( + self._ssl_object.shared_ciphers() + if self._ssl_object.server_side + else None + ), + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + TLSAttribute.ssl_object: lambda: self._ssl_object, + TLSAttribute.tls_version: self._ssl_object.version, + } + + +@dataclass(eq=False) +class TLSListener(Listener[TLSStream]): + """ + A convenience listener that wraps another listener and auto-negotiates a TLS session + on every accepted connection. + + If the TLS handshake times out or raises an exception, + :meth:`handle_handshake_error` is called to do whatever post-mortem processing is + deemed necessary. + + Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. + + :param Listener listener: the listener to wrap + :param ssl_context: the SSL context object + :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` + :param handshake_timeout: time limit for the TLS handshake + (passed to :func:`~anyio.fail_after`) + """ + + listener: Listener[Any] + ssl_context: ssl.SSLContext + standard_compatible: bool = True + handshake_timeout: float = 30 + + @staticmethod + async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: + """ + Handle an exception raised during the TLS handshake. + + This method does 3 things: + + #. Forcefully closes the original stream + #. Logs the exception (unless it was a cancellation exception) using the + ``anyio.streams.tls`` logger + #. Reraises the exception if it was a base exception or a cancellation exception + + :param exc: the exception + :param stream: the original stream + + """ + await aclose_forcefully(stream) + + # Log all except cancellation exceptions + if not isinstance(exc, get_cancelled_exc_class()): + # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using + # any asyncio implementation, so we explicitly pass the exception to log + # (https://github.com/python/cpython/issues/108668). Trio does not have this + # issue because it works around the CPython bug. + logging.getLogger(__name__).exception( + "Error during TLS handshake", exc_info=exc + ) + + # Only reraise base exceptions and cancellation exceptions + if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): + raise + + async def serve( + self, + handler: Callable[[TLSStream], Any], + task_group: TaskGroup | None = None, + ) -> None: + @wraps(handler) + async def handler_wrapper(stream: AnyByteStream) -> None: + from .. import fail_after + + try: + with fail_after(self.handshake_timeout): + wrapped_stream = await TLSStream.wrap( + stream, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException as exc: + await self.handle_handshake_error(exc, stream) + else: + await handler(wrapped_stream) + + await self.listener.serve(handler_wrapper, task_group) + + async def aclose(self) -> None: + await self.listener.aclose() + + @property + def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: + return { + TLSAttribute.standard_compatible: lambda: self.standard_compatible, + } + + +class TLSConnectable(ByteStreamConnectable): + """ + Wraps another connectable and does TLS negotiation after a successful connection. + + :param connectable: the connectable to wrap + :param hostname: host name of the server (if host name checking is desired) + :param ssl_context: the SSLContext object to use (if not provided, a secure default + will be created) + :param standard_compatible: if ``False``, skip the closing handshake when closing + the connection, and don't raise an exception if the server does the same + """ + + def __init__( + self, + connectable: AnyByteStreamConnectable, + *, + hostname: str | None = None, + ssl_context: ssl.SSLContext | None = None, + standard_compatible: bool = True, + ) -> None: + self.connectable = connectable + self.ssl_context: SSLContext = ssl_context or ssl.create_default_context( + ssl.Purpose.SERVER_AUTH + ) + if not isinstance(self.ssl_context, ssl.SSLContext): + raise TypeError( + "ssl_context must be an instance of ssl.SSLContext, not " + f"{type(self.ssl_context).__name__}" + ) + self.hostname = hostname + self.standard_compatible = standard_compatible + + @override + async def connect(self) -> TLSStream: + stream = await self.connectable.connect() + try: + return await TLSStream.wrap( + stream, + hostname=self.hostname, + ssl_context=self.ssl_context, + standard_compatible=self.standard_compatible, + ) + except BaseException: + await aclose_forcefully(stream) + raise diff --git a/venv/lib/python3.12/site-packages/certifi-2026.4.22.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/certifi-2026.4.22.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..62b076cdee58ec8f34034141ba0befd9015b0c7e --- /dev/null +++ b/venv/lib/python3.12/site-packages/certifi-2026.4.22.dist-info/licenses/LICENSE @@ -0,0 +1,20 @@ +This package contains a modified version of ca-bundle.crt: + +ca-bundle.crt -- Bundle of CA Root Certificates + +This is a bundle of X.509 certificates of public Certificate Authorities +(CA). These were automatically extracted from Mozilla's root certificates +file (certdata.txt). This file can be found in the mozilla source tree: +https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt +It contains the certificates in PEM format and therefore +can be directly used with curl / libcurl / php_curl, or with +an Apache+mod_ssl webserver for SSL client authentication. +Just configure this file as the SSLCACertificateFile.# + +***** BEGIN LICENSE BLOCK ***** +This Source Code Form is subject to the terms of the Mozilla Public License, +v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain +one at http://mozilla.org/MPL/2.0/. + +***** END LICENSE BLOCK ***** +@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ece4e764bf299b11538962f3544324d75eded731 Binary files /dev/null and b/venv/lib/python3.12/site-packages/certifi/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ed5bb6a853a6d1c4b8a9de5e76fc9753ffa747c Binary files /dev/null and b/venv/lib/python3.12/site-packages/certifi/__pycache__/__main__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6d0067d3cbf5164e33efcc2fd84df7f92ec3758 Binary files /dev/null and b/venv/lib/python3.12/site-packages/certifi/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt new file mode 100644 index 0000000000000000000000000000000000000000..8f080eae848f759c9173bfc0c79506357ebe5090 --- /dev/null +++ b/venv/lib/python3.12/site-packages/h11-0.16.0.dist-info/licenses/LICENSE.txt @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 Nathaniel J. Smith and other contributors + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2509866d934221b43c2f08c6c5281345574be01 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c7bfd8e32916fb758e1fbd35bbd8f28d2caf442b Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_abnf.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..033069d09d854504bc9acb20ed43a56750a7b552 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af20c9cec59a166ac68586a5d2a680d2810bbca9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_events.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa8bc9666f7277517187c9f41ceb1a19f3ee3894 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_headers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fd1ba032e8ab2f6efe02e85f9cf4b11f0447440 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_readers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7e91379cc3ecc070822a0a858b892f2d2807426 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_receivebuffer.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c86bb045e9c2b0f9c51d9dcac5b65a0a8ecec46f Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_state.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..232d2d7dd25d75e7890e693afcd901b7054c95d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_util.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e0d2ff1657616ef42da652f744ce4623680e8960 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc b/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3f5f5764611881f57907b55e94396b5094c91ea9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/h11/__pycache__/_writers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..311b2b56c53f678ab95fc0def708c675d521a807 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore-1.0.9.dist-info/licenses/LICENSE.md @@ -0,0 +1,27 @@ +Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9959411ebdca14ae36a303ee08266eef54e20860 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..94109d95b859cc7ccfaea8e6c64cafe1bdaa076b Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..529139711f443e2b52ca5d71ffe9a8099351b8bb Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b723369bb0a79316c5558d8f9bafb6874658eec Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f3e15a421438dfc310d46fe1d19804713478dc5d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_ssl.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4e8f6ce96c259cb12bcb4e7d5b276c2b26984ed4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_synchronization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..383b1a613d37bbb597e7883e0bedf6d34b8b26eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_trace.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6177b0caee58b0ff0669a15fda0511aed21a2439 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..88dc7f01e132933728cbcf45c88ce82e85ddf65f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/__init__.py @@ -0,0 +1,39 @@ +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .http_proxy import AsyncHTTPProxy +from .interfaces import AsyncConnectionInterface + +try: + from .http2 import AsyncHTTP2Connection +except ImportError: # pragma: nocover + + class AsyncHTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import AsyncSOCKSProxy +except ImportError: # pragma: nocover + + class AsyncSOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "AsyncHTTPConnection", + "AsyncConnectionPool", + "AsyncHTTPProxy", + "AsyncHTTP11Connection", + "AsyncHTTP2Connection", + "AsyncConnectionInterface", + "AsyncSOCKSProxy", +] diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4870ca92a47bd8d73a01f9fe312472a963a83aa3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2efbd1af95ba666d678ea14aa88800da12df3322 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e6ea9cd658f29c4353ffc6d8de4943ecd09b3e7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8735dc36074c8d268c2059a071e5d106e036820b Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http11.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..359058372171620d818d816c3db6b4eb26a48a01 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c79f8054a0ab18463aface61ba654c303ba767a Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1482548beecde0cb768c8b220c28595f6fbb2563 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/interfaces.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ffb66c935b5f6f9d907d896ba1be683c832aebf8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/connection.py b/venv/lib/python3.12/site-packages/httpcore/_async/connection.py new file mode 100644 index 0000000000000000000000000000000000000000..b42581dff8aabf4c2ef80ffda26296e1b368d693 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class AsyncHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connection: AsyncConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = AsyncLock() + self._socket_options = socket_options + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + async with self._request_lock: + if self._connection is None: + stream = await self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return await self._connection.handle_async_request(request) + + async def _connect(self, request: Request) -> AsyncNetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + async with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = await self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + async with Trace("retry", logger, request, kwargs) as trace: + await self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + async def aclose(self) -> None: + if self._connection is not None: + async with Trace("close", logger, None, {}): + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTPConnection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py b/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py new file mode 100644 index 0000000000000000000000000000000000000000..96e973d0ce223f6bed9be9e6a6a2f3c01622c611 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.auto import AutoBackend +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock +from .connection import AsyncHTTPConnection +from .interfaces import AsyncConnectionInterface, AsyncRequestInterface + + +class AsyncPoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: AsyncConnectionInterface | None = None + self._connection_acquired = AsyncEvent() + + def assign_to_connection(self, connection: AsyncConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = AsyncEvent() + + async def wait_for_connection( + self, timeout: float | None = None + ) -> AsyncConnectionInterface: + if self.connection is None: + await self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class AsyncConnectionPool(AsyncRequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[AsyncConnectionInterface] = [] + self._requests: list[AsyncPoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = AsyncThreadLock() + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import AsyncSocks5Connection + + return AsyncSocks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import AsyncForwardHTTPConnection + + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import AsyncTunnelHTTPConnection + + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return AsyncHTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[AsyncConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + async def handle_async_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = AsyncPoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + await self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = await pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = await connection.handle_async_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + await self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.AsyncIterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[AsyncConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + async def _close_connections(self, closing: list[AsyncConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with AsyncShieldCancellation(): + for connection in closing: + await connection.aclose() + + async def aclose(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + await self._close_connections(closing_connections) + + async def __aenter__(self) -> AsyncConnectionPool: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.AsyncIterable[bytes], + pool_request: AsyncPoolRequest, + pool: AsyncConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + try: + async for part in self._stream: + yield part + except BaseException as exc: + await self.aclose() + raise exc from None + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + with AsyncShieldCancellation(): + if hasattr(self._stream, "aclose"): + await self._stream.aclose() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + await self._pool._close_connections(closing) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http11.py b/venv/lib/python3.12/site-packages/httpcore/_async/http11.py new file mode 100644 index 0000000000000000000000000000000000000000..e6d6d709852b137a862cfe2b3af42dc790fa705d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP11Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = AsyncLock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + async with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + await self._send_request_headers(**kwargs) + async with Trace("send_request_body", logger, request, kwargs) as trace: + await self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = await self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with AsyncShieldCancellation(): + async with Trace("response_closed", logger, request) as trace: + await self._response_closed() + raise exc + + # Sending the request... + + async def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + await self._send_event(event, timeout=timeout) + + async def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.AsyncIterable) + async for chunk in request.stream: + event = h11.Data(data=chunk) + await self._send_event(event, timeout=timeout) + + await self._send_event(h11.EndOfMessage(), timeout=timeout) + + async def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + await self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + async def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + async def _receive_response_body( + self, request: Request + ) -> typing.AsyncIterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = await self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + async def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = await self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + async def _response_closed(self) -> None: + async with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + await self.aclose() + + # Once the connection is no longer required... + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # The AsyncConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP11Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + async with Trace("response_closed", logger, self._request): + await self._connection._response_closed() + + +class AsyncHTTP11UpgradeStream(AsyncNetworkStream): + def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return await self._stream.read(max_bytes, timeout) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + await self._stream.write(buffer, timeout) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return await self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http2.py b/venv/lib/python3.12/site-packages/httpcore/_async/http2.py new file mode 100644 index 0000000000000000000000000000000000000000..dbd0beeb4da32d8c0175d412fa442eae8f837723 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import AsyncNetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation +from .._trace import Trace +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class AsyncHTTP2Connection(AsyncConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: AsyncNetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = AsyncLock() + self._state_lock = AsyncLock() + self._read_lock = AsyncLock() + self._write_lock = AsyncLock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + async def handle_async_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + async with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + async with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + async with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + await self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + await self._max_streams_semaphore.acquire() + + await self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + async with Trace("send_request_headers", logger, request, kwargs): + await self._send_request_headers(request=request, stream_id=stream_id) + async with Trace("send_request_body", logger, request, kwargs): + await self._send_request_body(request=request, stream_id=stream_id) + async with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = await self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with AsyncShieldCancellation(): + kwargs = {"stream_id": stream_id} + async with Trace("response_closed", logger, request, kwargs): + await self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + async def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + await self._write_outgoing_data(request) + + # Sending the request... + + async def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + await self._write_outgoing_data(request) + + async def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.AsyncIterable) + async for data in request.stream: + await self._send_stream_data(request, stream_id, data) + await self._send_end_stream(request, stream_id) + + async def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = await self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + await self._write_outgoing_data(request) + + async def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + await self._write_outgoing_data(request) + + # Receiving the response... + + async def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + async def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.AsyncIterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = await self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + await self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + async def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + await self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + async def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + async with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = await self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + async with Trace( + "receive_remote_settings", logger, request + ) as trace: + await self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + await self._write_outgoing_data(request) + + async def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + await self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + await self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + async def _response_closed(self, stream_id: int) -> None: + await self._max_streams_semaphore.release() + del self._events[stream_id] + async with self._state_lock: + if self._connection_terminated and not self._events: + await self.aclose() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + await self.aclose() + + async def aclose(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + await self._network_stream.aclose() + + # Wrappers around network read/write operations... + + async def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + async def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + async with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + await self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + await self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + async def __aenter__(self) -> AsyncHTTP2Connection: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + await self.aclose() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: AsyncHTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + async with Trace("receive_response_body", logger, self._request, kwargs): + async for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with AsyncShieldCancellation(): + await self.aclose() + raise exc + + async def aclose(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + async with Trace("response_closed", logger, self._request, kwargs): + await self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..cc9d92066e1680576846e46ccdf645a2b1dd5718 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection import AsyncHTTPConnection +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class AsyncHTTPProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + if origin.scheme == b"http": + return AsyncForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return AsyncTunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncForwardHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + async def handle_async_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return await self._connection.handle_async_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class AsyncTunnelHTTPConnection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: AsyncConnectionInterface = AsyncHTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = AsyncLock() + self._connected = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = await self._connection.handle_async_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + await self._connection.aclose() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + await self._connection.aclose() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py b/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py new file mode 100644 index 0000000000000000000000000000000000000000..361583bede6b2b84088b38054d5d8116ef9f1597 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class AsyncRequestInterface: + async def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + await response.aread() + finally: + await response.aclose() + return response + + @contextlib.asynccontextmanager + async def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.AsyncIterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.AsyncIterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = await self.handle_async_request(request) + try: + yield response + finally: + await response.aclose() + + async def handle_async_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class AsyncConnectionInterface(AsyncRequestInterface): + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..b363f55a0b071de6c5f377726be82dc2110e373c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_async/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.auto import AutoBackend +from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import AsyncLock +from .._trace import Trace +from .connection_pool import AsyncConnectionPool +from .http11 import AsyncHTTP11Connection +from .interfaces import AsyncConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +async def _init_socks5_connection( + stream: AsyncNetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + await stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = await stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class AsyncSOCKSProxy(AsyncConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> AsyncConnectionInterface: + return AsyncSocks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class AsyncSocks5Connection(AsyncConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: AsyncNetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: AsyncNetworkBackend = ( + AutoBackend() if network_backend is None else network_backend + ) + self._connect_lock = AsyncLock() + self._connection: AsyncConnectionInterface | None = None + self._connect_failed = False + + async def handle_async_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + async with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + async with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = await self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + async with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + await _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + async with Trace("start_tls", logger, request, kwargs) as trace: + stream = await stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import AsyncHTTP2Connection + + self._connection = AsyncHTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = AsyncHTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return await self._connection.handle_async_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + async def aclose(self) -> None: + if self._connection is not None: + await self._connection.aclose() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_backends/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f640c9a166397dfa58a3346423423a8bca465b38 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..955a0fc1bead3bbd57ddf9a8c6cb0327586cf570 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/anyio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e4bb76c3f7af12f2d903170a98d3f6ff0d9c6ad Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/auto.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7fe936af7de6ca6f2e4c238fd3c9c809c4801b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..173f0cbcef6c7992f893d44e62dff88d314b85a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/mock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c039e0e65ff02d931bc5cf670cc992dfda6b144 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/sync.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3e9bd9024386e2496635d57e2040a87bed1cdade Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_backends/__pycache__/trio.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py b/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py new file mode 100644 index 0000000000000000000000000000000000000000..a140095e1b8de022f321a41c0125e0e5febc0749 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/anyio.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import ssl +import typing + +import anyio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AnyIOStream(AsyncNetworkStream): + def __init__(self, stream: anyio.abc.ByteStream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map = { + TimeoutError: ReadTimeout, + anyio.BrokenResourceError: ReadError, + anyio.ClosedResourceError: ReadError, + anyio.EndOfStream: ReadError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + try: + return await self._stream.receive(max_bytes=max_bytes) + except anyio.EndOfStream: # pragma: nocover + return b"" + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map = { + TimeoutError: WriteTimeout, + anyio.BrokenResourceError: WriteError, + anyio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + await self._stream.send(item=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + exc_map = { + TimeoutError: ConnectTimeout, + anyio.BrokenResourceError: ConnectError, + anyio.EndOfStream: ConnectError, + ssl.SSLError: ConnectError, + } + with map_exceptions(exc_map): + try: + with anyio.fail_after(timeout): + ssl_stream = await anyio.streams.tls.TLSStream.wrap( + self._stream, + ssl_context=ssl_context, + hostname=server_hostname, + standard_compatible=False, + server_side=False, + ) + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return AnyIOStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) + if info == "client_addr": + return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) + if info == "server_addr": + return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) + if info == "socket": + return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + if info == "is_readable": + sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) + return is_socket_readable(sock) + return None + + +class AnyIOBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_tcp( + remote_host=host, + remote_port=port, + local_host=local_address, + ) + # By default TCP sockets opened in `asyncio` include TCP_NODELAY. + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + exc_map = { + TimeoutError: ConnectTimeout, + OSError: ConnectError, + anyio.BrokenResourceError: ConnectError, + } + with map_exceptions(exc_map): + with anyio.fail_after(timeout): + stream: anyio.abc.ByteStream = await anyio.connect_unix(path) + for option in socket_options: + stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return AnyIOStream(stream) + + async def sleep(self, seconds: float) -> None: + await anyio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py b/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py new file mode 100644 index 0000000000000000000000000000000000000000..49f0e698c97ad5623f376d8182675352e21c2c3c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/auto.py @@ -0,0 +1,52 @@ +from __future__ import annotations + +import typing + +from .._synchronization import current_async_library +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class AutoBackend(AsyncNetworkBackend): + async def _init_backend(self) -> None: + if not (hasattr(self, "_backend")): + backend = current_async_library() + if backend == "trio": + from .trio import TrioBackend + + self._backend: AsyncNetworkBackend = TrioBackend() + else: + from .anyio import AnyIOBackend + + self._backend = AnyIOBackend() + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + await self._init_backend() + return await self._backend.connect_tcp( + host, + port, + timeout=timeout, + local_address=local_address, + socket_options=socket_options, + ) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + await self._init_backend() + return await self._backend.connect_unix_socket( + path, timeout=timeout, socket_options=socket_options + ) + + async def sleep(self, seconds: float) -> None: # pragma: nocover + await self._init_backend() + return await self._backend.sleep(seconds) diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/base.py b/venv/lib/python3.12/site-packages/httpcore/_backends/base.py new file mode 100644 index 0000000000000000000000000000000000000000..cf55c8b10eb543872550be863206fe2f760d0d8d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/base.py @@ -0,0 +1,101 @@ +from __future__ import annotations + +import ssl +import time +import typing + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + + +class NetworkStream: + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class NetworkBackend: + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + raise NotImplementedError() # pragma: nocover + + def sleep(self, seconds: float) -> None: + time.sleep(seconds) # pragma: nocover + + +class AsyncNetworkStream: + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + raise NotImplementedError() # pragma: nocover + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + raise NotImplementedError() # pragma: nocover + + async def aclose(self) -> None: + raise NotImplementedError() # pragma: nocover + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + def get_extra_info(self, info: str) -> typing.Any: + return None # pragma: nocover + + +class AsyncNetworkBackend: + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + raise NotImplementedError() # pragma: nocover + + async def sleep(self, seconds: float) -> None: + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py b/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py new file mode 100644 index 0000000000000000000000000000000000000000..9b6edca03d4d4b34f355fd53e49d4b4c699c972c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/mock.py @@ -0,0 +1,143 @@ +from __future__ import annotations + +import ssl +import typing + +from .._exceptions import ReadError +from .base import ( + SOCKET_OPTION, + AsyncNetworkBackend, + AsyncNetworkStream, + NetworkBackend, + NetworkStream, +) + + +class MockSSLObject: + def __init__(self, http2: bool): + self._http2 = http2 + + def selected_alpn_protocol(self) -> str: + return "h2" if self._http2 else "http/1.1" + + +class MockStream(NetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + def close(self) -> None: + self._closed = True + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class MockBackend(NetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + return MockStream(list(self._buffer), http2=self._http2) + + def sleep(self, seconds: float) -> None: + pass + + +class AsyncMockStream(AsyncNetworkStream): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + self._closed = False + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._closed: + raise ReadError("Connection closed") + if not self._buffer: + return b"" + return self._buffer.pop(0) + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + pass + + async def aclose(self) -> None: + self._closed = True + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + return self + + def get_extra_info(self, info: str) -> typing.Any: + return MockSSLObject(http2=self._http2) if info == "ssl_object" else None + + def __repr__(self) -> str: + return "" + + +class AsyncMockBackend(AsyncNetworkBackend): + def __init__(self, buffer: list[bytes], http2: bool = False) -> None: + self._buffer = buffer + self._http2 = http2 + + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + return AsyncMockStream(list(self._buffer), http2=self._http2) + + async def sleep(self, seconds: float) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py b/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py new file mode 100644 index 0000000000000000000000000000000000000000..4018a09c6fb1e0ef1b03ab8d84b13ebef4031f7c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/sync.py @@ -0,0 +1,241 @@ +from __future__ import annotations + +import functools +import socket +import ssl +import sys +import typing + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .._utils import is_socket_readable +from .base import SOCKET_OPTION, NetworkBackend, NetworkStream + + +class TLSinTLSStream(NetworkStream): # pragma: no cover + """ + Because the standard `SSLContext.wrap_socket` method does + not work for `SSLSocket` objects, we need this class + to implement TLS stream using an underlying `SSLObject` + instance in order to support TLS on top of TLS. + """ + + # Defined in RFC 8449 + TLS_RECORD_SIZE = 16384 + + def __init__( + self, + sock: socket.socket, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ): + self._sock = sock + self._incoming = ssl.MemoryBIO() + self._outgoing = ssl.MemoryBIO() + + self.ssl_obj = ssl_context.wrap_bio( + incoming=self._incoming, + outgoing=self._outgoing, + server_hostname=server_hostname, + ) + + self._sock.settimeout(timeout) + self._perform_io(self.ssl_obj.do_handshake) + + def _perform_io( + self, + func: typing.Callable[..., typing.Any], + ) -> typing.Any: + ret = None + + while True: + errno = None + try: + ret = func() + except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: + errno = e.errno + + self._sock.sendall(self._outgoing.read()) + + if errno == ssl.SSL_ERROR_WANT_READ: + buf = self._sock.recv(self.TLS_RECORD_SIZE) + + if buf: + self._incoming.write(buf) + else: + self._incoming.write_eof() + if errno is None: + return ret + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return typing.cast( + bytes, self._perform_io(functools.partial(self.ssl_obj.read, max_bytes)) + ) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + while buffer: + nsent = self._perform_io(functools.partial(self.ssl_obj.write, buffer)) + buffer = buffer[nsent:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + raise NotImplementedError() + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object": + return self.ssl_obj + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncStream(NetworkStream): + def __init__(self, sock: socket.socket) -> None: + self._sock = sock + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} + with map_exceptions(exc_map): + self._sock.settimeout(timeout) + return self._sock.recv(max_bytes) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} + with map_exceptions(exc_map): + while buffer: + self._sock.settimeout(timeout) + n = self._sock.send(buffer) + buffer = buffer[n:] + + def close(self) -> None: + self._sock.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + try: + if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover + # If the underlying socket has already been upgraded + # to the TLS layer (i.e. is an instance of SSLSocket), + # we need some additional smarts to support TLS-in-TLS. + return TLSinTLSStream( + self._sock, ssl_context, server_hostname, timeout + ) + else: + self._sock.settimeout(timeout) + sock = ssl_context.wrap_socket( + self._sock, server_hostname=server_hostname + ) + except Exception as exc: # pragma: nocover + self.close() + raise exc + return SyncStream(sock) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): + return self._sock._sslobj # type: ignore + if info == "client_addr": + return self._sock.getsockname() + if info == "server_addr": + return self._sock.getpeername() + if info == "socket": + return self._sock + if info == "is_readable": + return is_socket_readable(self._sock) + return None + + +class SyncBackend(NetworkBackend): + def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: + # Note that we automatically include `TCP_NODELAY` + # in addition to any other custom socket options. + if socket_options is None: + socket_options = [] # pragma: no cover + address = (host, port) + source_address = None if local_address is None else (local_address, 0) + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + + with map_exceptions(exc_map): + sock = socket.create_connection( + address, + timeout, + source_address=source_address, + ) + for option in socket_options: + sock.setsockopt(*option) # pragma: no cover + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + return SyncStream(sock) + + def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> NetworkStream: # pragma: nocover + if sys.platform == "win32": + raise RuntimeError( + "Attempted to connect to a UNIX socket on a Windows system." + ) + if socket_options is None: + socket_options = [] + + exc_map: ExceptionMapping = { + socket.timeout: ConnectTimeout, + OSError: ConnectError, + } + with map_exceptions(exc_map): + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + for option in socket_options: + sock.setsockopt(*option) + sock.settimeout(timeout) + sock.connect(path) + return SyncStream(sock) diff --git a/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py b/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py new file mode 100644 index 0000000000000000000000000000000000000000..6f53f5f2a025e01e9949e2530bd9ca6928859251 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_backends/trio.py @@ -0,0 +1,159 @@ +from __future__ import annotations + +import ssl +import typing + +import trio + +from .._exceptions import ( + ConnectError, + ConnectTimeout, + ExceptionMapping, + ReadError, + ReadTimeout, + WriteError, + WriteTimeout, + map_exceptions, +) +from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream + + +class TrioStream(AsyncNetworkStream): + def __init__(self, stream: trio.abc.Stream) -> None: + self._stream = stream + + async def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ReadTimeout, + trio.BrokenResourceError: ReadError, + trio.ClosedResourceError: ReadError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + data: bytes = await self._stream.receive_some(max_bytes=max_bytes) + return data + + async def write(self, buffer: bytes, timeout: float | None = None) -> None: + if not buffer: + return + + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: WriteTimeout, + trio.BrokenResourceError: WriteError, + trio.ClosedResourceError: WriteError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + await self._stream.send_all(data=buffer) + + async def aclose(self) -> None: + await self._stream.aclose() + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> AsyncNetworkStream: + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + } + ssl_stream = trio.SSLStream( + self._stream, + ssl_context=ssl_context, + server_hostname=server_hostname, + https_compatible=True, + server_side=False, + ) + with map_exceptions(exc_map): + try: + with trio.fail_after(timeout_or_inf): + await ssl_stream.do_handshake() + except Exception as exc: # pragma: nocover + await self.aclose() + raise exc + return TrioStream(ssl_stream) + + def get_extra_info(self, info: str) -> typing.Any: + if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): + # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. + # Tracked at https://github.com/python-trio/trio/issues/542 + return self._stream._ssl_object # type: ignore[attr-defined] + if info == "client_addr": + return self._get_socket_stream().socket.getsockname() + if info == "server_addr": + return self._get_socket_stream().socket.getpeername() + if info == "socket": + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream.socket + if info == "is_readable": + socket = self.get_extra_info("socket") + return socket.is_readable() + return None + + def _get_socket_stream(self) -> trio.SocketStream: + stream = self._stream + while isinstance(stream, trio.SSLStream): + stream = stream.transport_stream + assert isinstance(stream, trio.SocketStream) + return stream + + +class TrioBackend(AsyncNetworkBackend): + async def connect_tcp( + self, + host: str, + port: int, + timeout: float | None = None, + local_address: str | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: + # By default for TCP sockets, trio enables TCP_NODELAY. + # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream + if socket_options is None: + socket_options = [] # pragma: no cover + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_tcp_stream( + host=host, port=port, local_address=local_address + ) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def connect_unix_socket( + self, + path: str, + timeout: float | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> AsyncNetworkStream: # pragma: nocover + if socket_options is None: + socket_options = [] + timeout_or_inf = float("inf") if timeout is None else timeout + exc_map: ExceptionMapping = { + trio.TooSlowError: ConnectTimeout, + trio.BrokenResourceError: ConnectError, + OSError: ConnectError, + } + with map_exceptions(exc_map): + with trio.fail_after(timeout_or_inf): + stream: trio.abc.Stream = await trio.open_unix_socket(path) + for option in socket_options: + stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover + return TrioStream(stream) + + async def sleep(self, seconds: float) -> None: + await trio.sleep(seconds) # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py b/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b476d76d9a7ff45de8d18ec22d33d6af2982f92e --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/__init__.py @@ -0,0 +1,39 @@ +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .http_proxy import HTTPProxy +from .interfaces import ConnectionInterface + +try: + from .http2 import HTTP2Connection +except ImportError: # pragma: nocover + + class HTTP2Connection: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use http2 support, but the `h2` package is not " + "installed. Use 'pip install httpcore[http2]'." + ) + + +try: + from .socks_proxy import SOCKSProxy +except ImportError: # pragma: nocover + + class SOCKSProxy: # type: ignore + def __init__(self, *args, **kwargs) -> None: # type: ignore + raise RuntimeError( + "Attempted to use SOCKS support, but the `socksio` package is not " + "installed. Use 'pip install httpcore[socks]'." + ) + + +__all__ = [ + "HTTPConnection", + "ConnectionPool", + "HTTPProxy", + "HTTP11Connection", + "HTTP2Connection", + "ConnectionInterface", + "SOCKSProxy", +] diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b0ddbe8ef8a473043fcf77c04a9eb4f63810289 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5596724f02aeb0bc458937ff7708b668a4fa087 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6526f79fdab0648e65d5db7ea72f77037c9f102f Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..61b5eaf2a34e4c745f5834ca3ade9a868f206d76 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http11.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3419fb285cf895e1306eed61a8d121703769b6ce Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da8a693b1d8c1336340d9285209be78bf8f59359 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f94637790ea3c57cd02a9afc0ffe1d77477dde1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/interfaces.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0814dd3d6247b25699605ca2a73448e3ee5eb14e Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py b/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py new file mode 100644 index 0000000000000000000000000000000000000000..363f8be819d2576ea65365e625dd1596ea40429a --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/connection.py @@ -0,0 +1,222 @@ +from __future__ import annotations + +import itertools +import logging +import ssl +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream +from .._exceptions import ConnectError, ConnectTimeout +from .._models import Origin, Request, Response +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. + + +logger = logging.getLogger("httpcore.connection") + + +def exponential_backoff(factor: float) -> typing.Iterator[float]: + """ + Generate a geometric sequence that has a ratio of 2 and starts with 0. + + For example: + - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` + - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` + """ + yield 0 + for n in itertools.count(): + yield factor * 2**n + + +class HTTPConnection(ConnectionInterface): + def __init__( + self, + origin: Origin, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._origin = origin + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connection: ConnectionInterface | None = None + self._connect_failed: bool = False + self._request_lock = Lock() + self._socket_options = socket_options + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection to {self._origin}" + ) + + try: + with self._request_lock: + if self._connection is None: + stream = self._connect(request) + + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except BaseException as exc: + self._connect_failed = True + raise exc + + return self._connection.handle_request(request) + + def _connect(self, request: Request) -> NetworkStream: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + retries_left = self._retries + delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) + + while True: + try: + if self._uds is None: + kwargs = { + "host": self._origin.host.decode("ascii"), + "port": self._origin.port, + "local_address": self._local_address, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + else: + kwargs = { + "path": self._uds, + "timeout": timeout, + "socket_options": self._socket_options, + } + with Trace( + "connect_unix_socket", logger, request, kwargs + ) as trace: + stream = self._network_backend.connect_unix_socket( + **kwargs + ) + trace.return_value = stream + + if self._origin.scheme in (b"https", b"wss"): + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + return stream + except (ConnectError, ConnectTimeout): + if retries_left <= 0: + raise + retries_left -= 1 + delay = next(delays) + with Trace("retry", logger, request, kwargs) as trace: + self._network_backend.sleep(delay) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def close(self) -> None: + if self._connection is not None: + with Trace("close", logger, None, {}): + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTPConnection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py b/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py new file mode 100644 index 0000000000000000000000000000000000000000..9ccfa53e597a29ee387f9d16f3af4f695ac0d33a --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/connection_pool.py @@ -0,0 +1,420 @@ +from __future__ import annotations + +import ssl +import sys +import types +import typing + +from .._backends.sync import SyncBackend +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol +from .._models import Origin, Proxy, Request, Response +from .._synchronization import Event, ShieldCancellation, ThreadLock +from .connection import HTTPConnection +from .interfaces import ConnectionInterface, RequestInterface + + +class PoolRequest: + def __init__(self, request: Request) -> None: + self.request = request + self.connection: ConnectionInterface | None = None + self._connection_acquired = Event() + + def assign_to_connection(self, connection: ConnectionInterface | None) -> None: + self.connection = connection + self._connection_acquired.set() + + def clear_connection(self) -> None: + self.connection = None + self._connection_acquired = Event() + + def wait_for_connection( + self, timeout: float | None = None + ) -> ConnectionInterface: + if self.connection is None: + self._connection_acquired.wait(timeout=timeout) + assert self.connection is not None + return self.connection + + def is_queued(self) -> bool: + return self.connection is None + + +class ConnectionPool(RequestInterface): + """ + A connection pool for making HTTP requests. + """ + + def __init__( + self, + ssl_context: ssl.SSLContext | None = None, + proxy: Proxy | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish a + connection. + local_address: Local address to connect from. Can also be used to connect + using a particular address family. Using `local_address="0.0.0.0"` + will connect using an `AF_INET` address (IPv4), while using + `local_address="::"` will connect using an `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + socket_options: Socket options that have to be included + in the TCP socket when the connection was established. + """ + self._ssl_context = ssl_context + self._proxy = proxy + self._max_connections = ( + sys.maxsize if max_connections is None else max_connections + ) + self._max_keepalive_connections = ( + sys.maxsize + if max_keepalive_connections is None + else max_keepalive_connections + ) + self._max_keepalive_connections = min( + self._max_connections, self._max_keepalive_connections + ) + + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._retries = retries + self._local_address = local_address + self._uds = uds + + self._network_backend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._socket_options = socket_options + + # The mutable state on a connection pool is the queue of incoming requests, + # and the set of connections that are servicing those requests. + self._connections: list[ConnectionInterface] = [] + self._requests: list[PoolRequest] = [] + + # We only mutate the state of the connection pool within an 'optional_thread_lock' + # context. This holds a threading lock unless we're running in async mode, + # in which case it is a no-op. + self._optional_thread_lock = ThreadLock() + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if self._proxy is not None: + if self._proxy.url.scheme in (b"socks5", b"socks5h"): + from .socks_proxy import Socks5Connection + + return Socks5Connection( + proxy_origin=self._proxy.url.origin, + proxy_auth=self._proxy.auth, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + elif origin.scheme == b"http": + from .http_proxy import ForwardHTTPConnection + + return ForwardHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + ) + from .http_proxy import TunnelHTTPConnection + + return TunnelHTTPConnection( + proxy_origin=self._proxy.url.origin, + proxy_headers=self._proxy.headers, + proxy_ssl_context=self._proxy.ssl_context, + remote_origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + return HTTPConnection( + origin=origin, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + retries=self._retries, + local_address=self._local_address, + uds=self._uds, + network_backend=self._network_backend, + socket_options=self._socket_options, + ) + + @property + def connections(self) -> list[ConnectionInterface]: + """ + Return a list of the connections currently in the pool. + + For example: + + ```python + >>> pool.connections + [ + , + , + , + ] + ``` + """ + return list(self._connections) + + def handle_request(self, request: Request) -> Response: + """ + Send an HTTP request, and return an HTTP response. + + This is the core implementation that is called into by `.request()` or `.stream()`. + """ + scheme = request.url.scheme.decode() + if scheme == "": + raise UnsupportedProtocol( + "Request URL is missing an 'http://' or 'https://' protocol." + ) + if scheme not in ("http", "https", "ws", "wss"): + raise UnsupportedProtocol( + f"Request URL has an unsupported protocol '{scheme}://'." + ) + + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("pool", None) + + with self._optional_thread_lock: + # Add the incoming request to our request queue. + pool_request = PoolRequest(request) + self._requests.append(pool_request) + + try: + while True: + with self._optional_thread_lock: + # Assign incoming requests to available connections, + # closing or creating new connections as required. + closing = self._assign_requests_to_connections() + self._close_connections(closing) + + # Wait until this request has an assigned connection. + connection = pool_request.wait_for_connection(timeout=timeout) + + try: + # Send the request on the assigned connection. + response = connection.handle_request( + pool_request.request + ) + except ConnectionNotAvailable: + # In some cases a connection may initially be available to + # handle a request, but then become unavailable. + # + # In this case we clear the connection and try again. + pool_request.clear_connection() + else: + break # pragma: nocover + + except BaseException as exc: + with self._optional_thread_lock: + # For any exception or cancellation we remove the request from + # the queue, and then re-assign requests to connections. + self._requests.remove(pool_request) + closing = self._assign_requests_to_connections() + + self._close_connections(closing) + raise exc from None + + # Return the response. Note that in this case we still have to manage + # the point at which the response is closed. + assert isinstance(response.stream, typing.Iterable) + return Response( + status=response.status, + headers=response.headers, + content=PoolByteStream( + stream=response.stream, pool_request=pool_request, pool=self + ), + extensions=response.extensions, + ) + + def _assign_requests_to_connections(self) -> list[ConnectionInterface]: + """ + Manage the state of the connection pool, assigning incoming + requests to connections as available. + + Called whenever a new request is added or removed from the pool. + + Any closing connections are returned, allowing the I/O for closing + those connections to be handled seperately. + """ + closing_connections = [] + + # First we handle cleaning up any connections that are closed, + # have expired their keep-alive, or surplus idle connections. + for connection in list(self._connections): + if connection.is_closed(): + # log: "removing closed connection" + self._connections.remove(connection) + elif connection.has_expired(): + # log: "closing expired connection" + self._connections.remove(connection) + closing_connections.append(connection) + elif ( + connection.is_idle() + and len([connection.is_idle() for connection in self._connections]) + > self._max_keepalive_connections + ): + # log: "closing idle connection" + self._connections.remove(connection) + closing_connections.append(connection) + + # Assign queued requests to connections. + queued_requests = [request for request in self._requests if request.is_queued()] + for pool_request in queued_requests: + origin = pool_request.request.url.origin + available_connections = [ + connection + for connection in self._connections + if connection.can_handle_request(origin) and connection.is_available() + ] + idle_connections = [ + connection for connection in self._connections if connection.is_idle() + ] + + # There are three cases for how we may be able to handle the request: + # + # 1. There is an existing connection that can handle the request. + # 2. We can create a new connection to handle the request. + # 3. We can close an idle connection and then create a new connection + # to handle the request. + if available_connections: + # log: "reusing existing connection" + connection = available_connections[0] + pool_request.assign_to_connection(connection) + elif len(self._connections) < self._max_connections: + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + elif idle_connections: + # log: "closing idle connection" + connection = idle_connections[0] + self._connections.remove(connection) + closing_connections.append(connection) + # log: "creating new connection" + connection = self.create_connection(origin) + self._connections.append(connection) + pool_request.assign_to_connection(connection) + + return closing_connections + + def _close_connections(self, closing: list[ConnectionInterface]) -> None: + # Close connections which have been removed from the pool. + with ShieldCancellation(): + for connection in closing: + connection.close() + + def close(self) -> None: + # Explicitly close the connection pool. + # Clears all existing requests and connections. + with self._optional_thread_lock: + closing_connections = list(self._connections) + self._connections = [] + self._close_connections(closing_connections) + + def __enter__(self) -> ConnectionPool: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + with self._optional_thread_lock: + request_is_queued = [request.is_queued() for request in self._requests] + connection_is_idle = [ + connection.is_idle() for connection in self._connections + ] + + num_active_requests = request_is_queued.count(False) + num_queued_requests = request_is_queued.count(True) + num_active_connections = connection_is_idle.count(False) + num_idle_connections = connection_is_idle.count(True) + + requests_info = ( + f"Requests: {num_active_requests} active, {num_queued_requests} queued" + ) + connection_info = ( + f"Connections: {num_active_connections} active, {num_idle_connections} idle" + ) + + return f"<{class_name} [{requests_info} | {connection_info}]>" + + +class PoolByteStream: + def __init__( + self, + stream: typing.Iterable[bytes], + pool_request: PoolRequest, + pool: ConnectionPool, + ) -> None: + self._stream = stream + self._pool_request = pool_request + self._pool = pool + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + try: + for part in self._stream: + yield part + except BaseException as exc: + self.close() + raise exc from None + + def close(self) -> None: + if not self._closed: + self._closed = True + with ShieldCancellation(): + if hasattr(self._stream, "close"): + self._stream.close() + + with self._pool._optional_thread_lock: + self._pool._requests.remove(self._pool_request) + closing = self._pool._assign_requests_to_connections() + + self._pool._close_connections(closing) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py new file mode 100644 index 0000000000000000000000000000000000000000..ebd3a97480c720d418acb1285a7b75da19b62c8c --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/http11.py @@ -0,0 +1,379 @@ +from __future__ import annotations + +import enum +import logging +import ssl +import time +import types +import typing + +import h11 + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, + WriteError, + map_exceptions, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http11") + + +# A subset of `h11.Event` types supported by `_send_event` +H11SendEvent = typing.Union[ + h11.Request, + h11.Data, + h11.EndOfMessage, +] + + +class HTTPConnectionState(enum.IntEnum): + NEW = 0 + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP11Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ) -> None: + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._expire_at: float | None = None + self._state = HTTPConnectionState.NEW + self._state_lock = Lock() + self._request_count = 0 + self._h11_state = h11.Connection( + our_role=h11.CLIENT, + max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, + ) + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): + self._request_count += 1 + self._state = HTTPConnectionState.ACTIVE + self._expire_at = None + else: + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request} + try: + with Trace( + "send_request_headers", logger, request, kwargs + ) as trace: + self._send_request_headers(**kwargs) + with Trace("send_request_body", logger, request, kwargs) as trace: + self._send_request_body(**kwargs) + except WriteError: + # If we get a write error while we're writing the request, + # then we supress this error and move on to attempting to + # read the response. Servers can sometimes close the request + # pre-emptively and then respond with a well formed HTTP + # error response. + pass + + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + ( + http_version, + status, + reason_phrase, + headers, + trailing_data, + ) = self._receive_response_headers(**kwargs) + trace.return_value = ( + http_version, + status, + reason_phrase, + headers, + ) + + network_stream = self._network_stream + + # CONNECT or Upgrade request + if (status == 101) or ( + (request.method == b"CONNECT") and (200 <= status < 300) + ): + network_stream = HTTP11UpgradeStream(network_stream, trailing_data) + + return Response( + status=status, + headers=headers, + content=HTTP11ConnectionByteStream(self, request), + extensions={ + "http_version": http_version, + "reason_phrase": reason_phrase, + "network_stream": network_stream, + }, + ) + except BaseException as exc: + with ShieldCancellation(): + with Trace("response_closed", logger, request) as trace: + self._response_closed() + raise exc + + # Sending the request... + + def _send_request_headers(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): + event = h11.Request( + method=request.method, + target=request.url.target, + headers=request.headers, + ) + self._send_event(event, timeout=timeout) + + def _send_request_body(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + assert isinstance(request.stream, typing.Iterable) + for chunk in request.stream: + event = h11.Data(data=chunk) + self._send_event(event, timeout=timeout) + + self._send_event(h11.EndOfMessage(), timeout=timeout) + + def _send_event(self, event: h11.Event, timeout: float | None = None) -> None: + bytes_to_send = self._h11_state.send(event) + if bytes_to_send is not None: + self._network_stream.write(bytes_to_send, timeout=timeout) + + # Receiving the response... + + def _receive_response_headers( + self, request: Request + ) -> tuple[bytes, int, bytes, list[tuple[bytes, bytes]], bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Response): + break + if ( + isinstance(event, h11.InformationalResponse) + and event.status_code == 101 + ): + break + + http_version = b"HTTP/" + event.http_version + + # h11 version 0.11+ supports a `raw_items` interface to get the + # raw header casing, rather than the enforced lowercase headers. + headers = event.headers.raw_items() + + trailing_data, _ = self._h11_state.trailing_data + + return http_version, event.status_code, event.reason, headers, trailing_data + + def _receive_response_body( + self, request: Request + ) -> typing.Iterator[bytes]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + while True: + event = self._receive_event(timeout=timeout) + if isinstance(event, h11.Data): + yield bytes(event.data) + elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): + break + + def _receive_event( + self, timeout: float | None = None + ) -> h11.Event | type[h11.PAUSED]: + while True: + with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): + event = self._h11_state.next_event() + + if event is h11.NEED_DATA: + data = self._network_stream.read( + self.READ_NUM_BYTES, timeout=timeout + ) + + # If we feed this case through h11 we'll raise an exception like: + # + # httpcore.RemoteProtocolError: can't handle event type + # ConnectionClosed when role=SERVER and state=SEND_RESPONSE + # + # Which is accurate, but not very informative from an end-user + # perspective. Instead we handle this case distinctly and treat + # it as a ConnectError. + if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: + msg = "Server disconnected without sending a response." + raise RemoteProtocolError(msg) + + self._h11_state.receive_data(data) + else: + # mypy fails to narrow the type in the above if statement above + return event # type: ignore[return-value] + + def _response_closed(self) -> None: + with self._state_lock: + if ( + self._h11_state.our_state is h11.DONE + and self._h11_state.their_state is h11.DONE + ): + self._state = HTTPConnectionState.IDLE + self._h11_state.start_next_cycle() + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + else: + self.close() + + # Once the connection is no longer required... + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # The ConnectionInterface methods provide information about the state of + # the connection, allowing for a connection pooling implementation to + # determine when to reuse and when to close the connection... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + # Note that HTTP/1.1 connections in the "NEW" state are not treated as + # being "available". The control flow which created the connection will + # be able to send an outgoing request, but the connection will not be + # acquired from the connection pool for any other request. + return self._state == HTTPConnectionState.IDLE + + def has_expired(self) -> bool: + now = time.monotonic() + keepalive_expired = self._expire_at is not None and now > self._expire_at + + # If the HTTP connection is idle but the socket is readable, then the + # only valid state is that the socket is about to return b"", indicating + # a server-initiated disconnect. + server_disconnected = ( + self._state == HTTPConnectionState.IDLE + and self._network_stream.get_extra_info("is_readable") + ) + + return keepalive_expired or server_disconnected + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/1.1, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP11Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP11ConnectionByteStream: + def __init__(self, connection: HTTP11Connection, request: Request) -> None: + self._connection = connection + self._request = request + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body(**kwargs): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + with Trace("response_closed", logger, self._request): + self._connection._response_closed() + + +class HTTP11UpgradeStream(NetworkStream): + def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: + self._stream = stream + self._leading_data = leading_data + + def read(self, max_bytes: int, timeout: float | None = None) -> bytes: + if self._leading_data: + buffer = self._leading_data[:max_bytes] + self._leading_data = self._leading_data[max_bytes:] + return buffer + else: + return self._stream.read(max_bytes, timeout) + + def write(self, buffer: bytes, timeout: float | None = None) -> None: + self._stream.write(buffer, timeout) + + def close(self) -> None: + self._stream.close() + + def start_tls( + self, + ssl_context: ssl.SSLContext, + server_hostname: str | None = None, + timeout: float | None = None, + ) -> NetworkStream: + return self._stream.start_tls(ssl_context, server_hostname, timeout) + + def get_extra_info(self, info: str) -> typing.Any: + return self._stream.get_extra_info(info) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py new file mode 100644 index 0000000000000000000000000000000000000000..ddcc189001c50c37c6a03810dc21d955df919f10 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/http2.py @@ -0,0 +1,592 @@ +from __future__ import annotations + +import enum +import logging +import time +import types +import typing + +import h2.config +import h2.connection +import h2.events +import h2.exceptions +import h2.settings + +from .._backends.base import NetworkStream +from .._exceptions import ( + ConnectionNotAvailable, + LocalProtocolError, + RemoteProtocolError, +) +from .._models import Origin, Request, Response +from .._synchronization import Lock, Semaphore, ShieldCancellation +from .._trace import Trace +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.http2") + + +def has_body_headers(request: Request) -> bool: + return any( + k.lower() == b"content-length" or k.lower() == b"transfer-encoding" + for k, v in request.headers + ) + + +class HTTPConnectionState(enum.IntEnum): + ACTIVE = 1 + IDLE = 2 + CLOSED = 3 + + +class HTTP2Connection(ConnectionInterface): + READ_NUM_BYTES = 64 * 1024 + CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) + + def __init__( + self, + origin: Origin, + stream: NetworkStream, + keepalive_expiry: float | None = None, + ): + self._origin = origin + self._network_stream = stream + self._keepalive_expiry: float | None = keepalive_expiry + self._h2_state = h2.connection.H2Connection(config=self.CONFIG) + self._state = HTTPConnectionState.IDLE + self._expire_at: float | None = None + self._request_count = 0 + self._init_lock = Lock() + self._state_lock = Lock() + self._read_lock = Lock() + self._write_lock = Lock() + self._sent_connection_init = False + self._used_all_stream_ids = False + self._connection_error = False + + # Mapping from stream ID to response stream events. + self._events: dict[ + int, + list[ + h2.events.ResponseReceived + | h2.events.DataReceived + | h2.events.StreamEnded + | h2.events.StreamReset, + ], + ] = {} + + # Connection terminated events are stored as state since + # we need to handle them for all streams. + self._connection_terminated: h2.events.ConnectionTerminated | None = None + + self._read_exception: Exception | None = None + self._write_exception: Exception | None = None + + def handle_request(self, request: Request) -> Response: + if not self.can_handle_request(request.url.origin): + # This cannot occur in normal operation, since the connection pool + # will only send requests on connections that handle them. + # It's in place simply for resilience as a guard against incorrect + # usage, for anyone working directly with httpcore connections. + raise RuntimeError( + f"Attempted to send request to {request.url.origin} on connection " + f"to {self._origin}" + ) + + with self._state_lock: + if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): + self._request_count += 1 + self._expire_at = None + self._state = HTTPConnectionState.ACTIVE + else: + raise ConnectionNotAvailable() + + with self._init_lock: + if not self._sent_connection_init: + try: + sci_kwargs = {"request": request} + with Trace( + "send_connection_init", logger, request, sci_kwargs + ): + self._send_connection_init(**sci_kwargs) + except BaseException as exc: + with ShieldCancellation(): + self.close() + raise exc + + self._sent_connection_init = True + + # Initially start with just 1 until the remote server provides + # its max_concurrent_streams value + self._max_streams = 1 + + local_settings_max_streams = ( + self._h2_state.local_settings.max_concurrent_streams + ) + self._max_streams_semaphore = Semaphore(local_settings_max_streams) + + for _ in range(local_settings_max_streams - self._max_streams): + self._max_streams_semaphore.acquire() + + self._max_streams_semaphore.acquire() + + try: + stream_id = self._h2_state.get_next_available_stream_id() + self._events[stream_id] = [] + except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover + self._used_all_stream_ids = True + self._request_count -= 1 + raise ConnectionNotAvailable() + + try: + kwargs = {"request": request, "stream_id": stream_id} + with Trace("send_request_headers", logger, request, kwargs): + self._send_request_headers(request=request, stream_id=stream_id) + with Trace("send_request_body", logger, request, kwargs): + self._send_request_body(request=request, stream_id=stream_id) + with Trace( + "receive_response_headers", logger, request, kwargs + ) as trace: + status, headers = self._receive_response( + request=request, stream_id=stream_id + ) + trace.return_value = (status, headers) + + return Response( + status=status, + headers=headers, + content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), + extensions={ + "http_version": b"HTTP/2", + "network_stream": self._network_stream, + "stream_id": stream_id, + }, + ) + except BaseException as exc: # noqa: PIE786 + with ShieldCancellation(): + kwargs = {"stream_id": stream_id} + with Trace("response_closed", logger, request, kwargs): + self._response_closed(stream_id=stream_id) + + if isinstance(exc, h2.exceptions.ProtocolError): + # One case where h2 can raise a protocol error is when a + # closed frame has been seen by the state machine. + # + # This happens when one stream is reading, and encounters + # a GOAWAY event. Other flows of control may then raise + # a protocol error at any point they interact with the 'h2_state'. + # + # In this case we'll have stored the event, and should raise + # it as a RemoteProtocolError. + if self._connection_terminated: # pragma: nocover + raise RemoteProtocolError(self._connection_terminated) + # If h2 raises a protocol error in some other state then we + # must somehow have made a protocol violation. + raise LocalProtocolError(exc) # pragma: nocover + + raise exc + + def _send_connection_init(self, request: Request) -> None: + """ + The HTTP/2 connection requires some initial setup before we can start + using individual request/response streams on it. + """ + # Need to set these manually here instead of manipulating via + # __setitem__() otherwise the H2Connection will emit SettingsUpdate + # frames in addition to sending the undesired defaults. + self._h2_state.local_settings = h2.settings.Settings( + client=True, + initial_values={ + # Disable PUSH_PROMISE frames from the server since we don't do anything + # with them for now. Maybe when we support caching? + h2.settings.SettingCodes.ENABLE_PUSH: 0, + # These two are taken from h2 for safe defaults + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, + h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, + }, + ) + + # Some websites (*cough* Yahoo *cough*) balk at this setting being + # present in the initial handshake since it's not defined in the original + # RFC despite the RFC mandating ignoring settings you don't know about. + del self._h2_state.local_settings[ + h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL + ] + + self._h2_state.initiate_connection() + self._h2_state.increment_flow_control_window(2**24) + self._write_outgoing_data(request) + + # Sending the request... + + def _send_request_headers(self, request: Request, stream_id: int) -> None: + """ + Send the request headers to a given stream ID. + """ + end_stream = not has_body_headers(request) + + # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. + # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require + # HTTP/1.1 style headers, and map them appropriately if we end up on + # an HTTP/2 connection. + authority = [v for k, v in request.headers if k.lower() == b"host"][0] + + headers = [ + (b":method", request.method), + (b":authority", authority), + (b":scheme", request.url.scheme), + (b":path", request.url.target), + ] + [ + (k.lower(), v) + for k, v in request.headers + if k.lower() + not in ( + b"host", + b"transfer-encoding", + ) + ] + + self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) + self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) + self._write_outgoing_data(request) + + def _send_request_body(self, request: Request, stream_id: int) -> None: + """ + Iterate over the request body sending it to a given stream ID. + """ + if not has_body_headers(request): + return + + assert isinstance(request.stream, typing.Iterable) + for data in request.stream: + self._send_stream_data(request, stream_id, data) + self._send_end_stream(request, stream_id) + + def _send_stream_data( + self, request: Request, stream_id: int, data: bytes + ) -> None: + """ + Send a single chunk of data in one or more data frames. + """ + while data: + max_flow = self._wait_for_outgoing_flow(request, stream_id) + chunk_size = min(len(data), max_flow) + chunk, data = data[:chunk_size], data[chunk_size:] + self._h2_state.send_data(stream_id, chunk) + self._write_outgoing_data(request) + + def _send_end_stream(self, request: Request, stream_id: int) -> None: + """ + Send an empty data frame on on a given stream ID with the END_STREAM flag set. + """ + self._h2_state.end_stream(stream_id) + self._write_outgoing_data(request) + + # Receiving the response... + + def _receive_response( + self, request: Request, stream_id: int + ) -> tuple[int, list[tuple[bytes, bytes]]]: + """ + Return the response status code and headers for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.ResponseReceived): + break + + status_code = 200 + headers = [] + assert event.headers is not None + for k, v in event.headers: + if k == b":status": + status_code = int(v.decode("ascii", errors="ignore")) + elif not k.startswith(b":"): + headers.append((k, v)) + + return (status_code, headers) + + def _receive_response_body( + self, request: Request, stream_id: int + ) -> typing.Iterator[bytes]: + """ + Iterator that returns the bytes of the response body for a given stream ID. + """ + while True: + event = self._receive_stream_event(request, stream_id) + if isinstance(event, h2.events.DataReceived): + assert event.flow_controlled_length is not None + assert event.data is not None + amount = event.flow_controlled_length + self._h2_state.acknowledge_received_data(amount, stream_id) + self._write_outgoing_data(request) + yield event.data + elif isinstance(event, h2.events.StreamEnded): + break + + def _receive_stream_event( + self, request: Request, stream_id: int + ) -> h2.events.ResponseReceived | h2.events.DataReceived | h2.events.StreamEnded: + """ + Return the next available event for a given stream ID. + + Will read more data from the network if required. + """ + while not self._events.get(stream_id): + self._receive_events(request, stream_id) + event = self._events[stream_id].pop(0) + if isinstance(event, h2.events.StreamReset): + raise RemoteProtocolError(event) + return event + + def _receive_events( + self, request: Request, stream_id: int | None = None + ) -> None: + """ + Read some data from the network until we see one or more events + for a given stream ID. + """ + with self._read_lock: + if self._connection_terminated is not None: + last_stream_id = self._connection_terminated.last_stream_id + if stream_id and last_stream_id and stream_id > last_stream_id: + self._request_count -= 1 + raise ConnectionNotAvailable() + raise RemoteProtocolError(self._connection_terminated) + + # This conditional is a bit icky. We don't want to block reading if we've + # actually got an event to return for a given stream. We need to do that + # check *within* the atomic read lock. Though it also need to be optional, + # because when we call it from `_wait_for_outgoing_flow` we *do* want to + # block until we've available flow control, event when we have events + # pending for the stream ID we're attempting to send on. + if stream_id is None or not self._events.get(stream_id): + events = self._read_incoming_data(request) + for event in events: + if isinstance(event, h2.events.RemoteSettingsChanged): + with Trace( + "receive_remote_settings", logger, request + ) as trace: + self._receive_remote_settings_change(event) + trace.return_value = event + + elif isinstance( + event, + ( + h2.events.ResponseReceived, + h2.events.DataReceived, + h2.events.StreamEnded, + h2.events.StreamReset, + ), + ): + if event.stream_id in self._events: + self._events[event.stream_id].append(event) + + elif isinstance(event, h2.events.ConnectionTerminated): + self._connection_terminated = event + + self._write_outgoing_data(request) + + def _receive_remote_settings_change( + self, event: h2.events.RemoteSettingsChanged + ) -> None: + max_concurrent_streams = event.changed_settings.get( + h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS + ) + if max_concurrent_streams: + new_max_streams = min( + max_concurrent_streams.new_value, + self._h2_state.local_settings.max_concurrent_streams, + ) + if new_max_streams and new_max_streams != self._max_streams: + while new_max_streams > self._max_streams: + self._max_streams_semaphore.release() + self._max_streams += 1 + while new_max_streams < self._max_streams: + self._max_streams_semaphore.acquire() + self._max_streams -= 1 + + def _response_closed(self, stream_id: int) -> None: + self._max_streams_semaphore.release() + del self._events[stream_id] + with self._state_lock: + if self._connection_terminated and not self._events: + self.close() + + elif self._state == HTTPConnectionState.ACTIVE and not self._events: + self._state = HTTPConnectionState.IDLE + if self._keepalive_expiry is not None: + now = time.monotonic() + self._expire_at = now + self._keepalive_expiry + if self._used_all_stream_ids: # pragma: nocover + self.close() + + def close(self) -> None: + # Note that this method unilaterally closes the connection, and does + # not have any kind of locking in place around it. + self._h2_state.close_connection() + self._state = HTTPConnectionState.CLOSED + self._network_stream.close() + + # Wrappers around network read/write operations... + + def _read_incoming_data(self, request: Request) -> list[h2.events.Event]: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("read", None) + + if self._read_exception is not None: + raise self._read_exception # pragma: nocover + + try: + data = self._network_stream.read(self.READ_NUM_BYTES, timeout) + if data == b"": + raise RemoteProtocolError("Server disconnected") + except Exception as exc: + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future reads. + # (For example, this means that a single read timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._read_exception = exc + self._connection_error = True + raise exc + + events: list[h2.events.Event] = self._h2_state.receive_data(data) + + return events + + def _write_outgoing_data(self, request: Request) -> None: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("write", None) + + with self._write_lock: + data_to_send = self._h2_state.data_to_send() + + if self._write_exception is not None: + raise self._write_exception # pragma: nocover + + try: + self._network_stream.write(data_to_send, timeout) + except Exception as exc: # pragma: nocover + # If we get a network error we should: + # + # 1. Save the exception and just raise it immediately on any future write. + # (For example, this means that a single write timeout or disconnect will + # immediately close all pending streams. Without requiring multiple + # sequential timeouts.) + # 2. Mark the connection as errored, so that we don't accept any other + # incoming requests. + self._write_exception = exc + self._connection_error = True + raise exc + + # Flow control... + + def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: + """ + Returns the maximum allowable outgoing flow for a given stream. + + If the allowable flow is zero, then waits on the network until + WindowUpdated frames have increased the flow rate. + https://tools.ietf.org/html/rfc7540#section-6.9 + """ + local_flow: int = self._h2_state.local_flow_control_window(stream_id) + max_frame_size: int = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + while flow == 0: + self._receive_events(request) + local_flow = self._h2_state.local_flow_control_window(stream_id) + max_frame_size = self._h2_state.max_outbound_frame_size + flow = min(local_flow, max_frame_size) + return flow + + # Interface for connection pooling... + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._origin + + def is_available(self) -> bool: + return ( + self._state != HTTPConnectionState.CLOSED + and not self._connection_error + and not self._used_all_stream_ids + and not ( + self._h2_state.state_machine.state + == h2.connection.ConnectionState.CLOSED + ) + ) + + def has_expired(self) -> bool: + now = time.monotonic() + return self._expire_at is not None and now > self._expire_at + + def is_idle(self) -> bool: + return self._state == HTTPConnectionState.IDLE + + def is_closed(self) -> bool: + return self._state == HTTPConnectionState.CLOSED + + def info(self) -> str: + origin = str(self._origin) + return ( + f"{origin!r}, HTTP/2, {self._state.name}, " + f"Request Count: {self._request_count}" + ) + + def __repr__(self) -> str: + class_name = self.__class__.__name__ + origin = str(self._origin) + return ( + f"<{class_name} [{origin!r}, {self._state.name}, " + f"Request Count: {self._request_count}]>" + ) + + # These context managers are not used in the standard flow, but are + # useful for testing or working with connection instances directly. + + def __enter__(self) -> HTTP2Connection: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: types.TracebackType | None = None, + ) -> None: + self.close() + + +class HTTP2ConnectionByteStream: + def __init__( + self, connection: HTTP2Connection, request: Request, stream_id: int + ) -> None: + self._connection = connection + self._request = request + self._stream_id = stream_id + self._closed = False + + def __iter__(self) -> typing.Iterator[bytes]: + kwargs = {"request": self._request, "stream_id": self._stream_id} + try: + with Trace("receive_response_body", logger, self._request, kwargs): + for chunk in self._connection._receive_response_body( + request=self._request, stream_id=self._stream_id + ): + yield chunk + except BaseException as exc: + # If we get an exception while streaming the response, + # we want to close the response (and possibly the connection) + # before raising that exception. + with ShieldCancellation(): + self.close() + raise exc + + def close(self) -> None: + if not self._closed: + self._closed = True + kwargs = {"stream_id": self._stream_id} + with Trace("response_closed", logger, self._request, kwargs): + self._connection._response_closed(stream_id=self._stream_id) diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..ecca88f7dc93b78f2aa26f16cf29d17a8a83ae27 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/http_proxy.py @@ -0,0 +1,367 @@ +from __future__ import annotations + +import base64 +import logging +import ssl +import typing + +from .._backends.base import SOCKET_OPTION, NetworkBackend +from .._exceptions import ProxyError +from .._models import ( + URL, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, +) +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection import HTTPConnection +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +ByteOrStr = typing.Union[bytes, str] +HeadersAsSequence = typing.Sequence[typing.Tuple[ByteOrStr, ByteOrStr]] +HeadersAsMapping = typing.Mapping[ByteOrStr, ByteOrStr] + + +logger = logging.getLogger("httpcore.proxy") + + +def merge_headers( + default_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + override_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, +) -> list[tuple[bytes, bytes]]: + """ + Append default_headers and override_headers, de-duplicating if a key exists + in both cases. + """ + default_headers = [] if default_headers is None else list(default_headers) + override_headers = [] if override_headers is None else list(override_headers) + has_override = set(key.lower() for key, value in override_headers) + default_headers = [ + (key, value) + for key, value in default_headers + if key.lower() not in has_override + ] + return default_headers + override_headers + + +class HTTPProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + local_address: str | None = None, + uds: str | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + proxy_auth: Any proxy authentication as a two-tuple of + (username, password). May be either bytes or ascii-only str. + proxy_headers: Any HTTP headers to use for the proxy requests. + For example `{"Proxy-Authorization": "Basic :"}`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + local_address=local_address, + uds=uds, + socket_options=socket_options, + ) + + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if ( + self._proxy_url.scheme == b"http" and proxy_ssl_context is not None + ): # pragma: no cover + raise RuntimeError( + "The `proxy_ssl_context` argument is not allowed for the http scheme" + ) + + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + if proxy_auth is not None: + username = enforce_bytes(proxy_auth[0], name="proxy_auth") + password = enforce_bytes(proxy_auth[1], name="proxy_auth") + userpass = username + b":" + password + authorization = b"Basic " + base64.b64encode(userpass) + self._proxy_headers = [ + (b"Proxy-Authorization", authorization) + ] + self._proxy_headers + + def create_connection(self, origin: Origin) -> ConnectionInterface: + if origin.scheme == b"http": + return ForwardHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + keepalive_expiry=self._keepalive_expiry, + network_backend=self._network_backend, + proxy_ssl_context=self._proxy_ssl_context, + ) + return TunnelHTTPConnection( + proxy_origin=self._proxy_url.origin, + proxy_headers=self._proxy_headers, + remote_origin=origin, + ssl_context=self._ssl_context, + proxy_ssl_context=self._proxy_ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class ForwardHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_headers: HeadersAsMapping | HeadersAsSequence | None = None, + keepalive_expiry: float | None = None, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + ) -> None: + self._connection = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._remote_origin = remote_origin + + def handle_request(self, request: Request) -> Response: + headers = merge_headers(self._proxy_headers, request.headers) + url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=bytes(request.url), + ) + proxy_request = Request( + method=request.method, + url=url, + headers=headers, + content=request.stream, + extensions=request.extensions, + ) + return self._connection.handle_request(proxy_request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" + + +class TunnelHTTPConnection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + ssl_context: ssl.SSLContext | None = None, + proxy_ssl_context: ssl.SSLContext | None = None, + proxy_headers: typing.Sequence[tuple[bytes, bytes]] | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + self._connection: ConnectionInterface = HTTPConnection( + origin=proxy_origin, + keepalive_expiry=keepalive_expiry, + network_backend=network_backend, + socket_options=socket_options, + ssl_context=proxy_ssl_context, + ) + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._ssl_context = ssl_context + self._proxy_ssl_context = proxy_ssl_context + self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + self._connect_lock = Lock() + self._connected = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if not self._connected: + target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) + + connect_url = URL( + scheme=self._proxy_origin.scheme, + host=self._proxy_origin.host, + port=self._proxy_origin.port, + target=target, + ) + connect_headers = merge_headers( + [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers + ) + connect_request = Request( + method=b"CONNECT", + url=connect_url, + headers=connect_headers, + extensions=request.extensions, + ) + connect_response = self._connection.handle_request( + connect_request + ) + + if connect_response.status < 200 or connect_response.status > 299: + reason_bytes = connect_response.extensions.get("reason_phrase", b"") + reason_str = reason_bytes.decode("ascii", errors="ignore") + msg = "%d %s" % (connect_response.status, reason_str) + self._connection.close() + raise ProxyError(msg) + + stream = connect_response.extensions["network_stream"] + + # Upgrade the stream to SSL + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or (self._http2 and not self._http1): + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + + self._connected = True + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + self._connection.close() + + def info(self) -> str: + return self._connection.info() + + def is_available(self) -> bool: + return self._connection.is_available() + + def has_expired(self) -> bool: + return self._connection.has_expired() + + def is_idle(self) -> bool: + return self._connection.is_idle() + + def is_closed(self) -> bool: + return self._connection.is_closed() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py b/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py new file mode 100644 index 0000000000000000000000000000000000000000..e673d4cc1b1dd7e7ecdbde91fd6ada386c3de03f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/interfaces.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +import contextlib +import typing + +from .._models import ( + URL, + Extensions, + HeaderTypes, + Origin, + Request, + Response, + enforce_bytes, + enforce_headers, + enforce_url, + include_request_headers, +) + + +class RequestInterface: + def request( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> Response: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + response.read() + finally: + response.close() + return response + + @contextlib.contextmanager + def stream( + self, + method: bytes | str, + url: URL | bytes | str, + *, + headers: HeaderTypes = None, + content: bytes | typing.Iterator[bytes] | None = None, + extensions: Extensions | None = None, + ) -> typing.Iterator[Response]: + # Strict type checking on our parameters. + method = enforce_bytes(method, name="method") + url = enforce_url(url, name="url") + headers = enforce_headers(headers, name="headers") + + # Include Host header, and optionally Content-Length or Transfer-Encoding. + headers = include_request_headers(headers, url=url, content=content) + + request = Request( + method=method, + url=url, + headers=headers, + content=content, + extensions=extensions, + ) + response = self.handle_request(request) + try: + yield response + finally: + response.close() + + def handle_request(self, request: Request) -> Response: + raise NotImplementedError() # pragma: nocover + + +class ConnectionInterface(RequestInterface): + def close(self) -> None: + raise NotImplementedError() # pragma: nocover + + def info(self) -> str: + raise NotImplementedError() # pragma: nocover + + def can_handle_request(self, origin: Origin) -> bool: + raise NotImplementedError() # pragma: nocover + + def is_available(self) -> bool: + """ + Return `True` if the connection is currently able to accept an + outgoing request. + + An HTTP/1.1 connection will only be available if it is currently idle. + + An HTTP/2 connection will be available so long as the stream ID space is + not yet exhausted, and the connection is not in an error state. + + While the connection is being established we may not yet know if it is going + to result in an HTTP/1.1 or HTTP/2 connection. The connection should be + treated as being available, but might ultimately raise `NewConnectionRequired` + required exceptions if multiple requests are attempted over a connection + that ends up being established as HTTP/1.1. + """ + raise NotImplementedError() # pragma: nocover + + def has_expired(self) -> bool: + """ + Return `True` if the connection is in a state where it should be closed. + + This either means that the connection is idle and it has passed the + expiry time on its keep-alive, or that server has sent an EOF. + """ + raise NotImplementedError() # pragma: nocover + + def is_idle(self) -> bool: + """ + Return `True` if the connection is currently idle. + """ + raise NotImplementedError() # pragma: nocover + + def is_closed(self) -> bool: + """ + Return `True` if the connection has been closed. + + Used when a response is closed to determine if the connection may be + returned to the connection pool or not. + """ + raise NotImplementedError() # pragma: nocover diff --git a/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py b/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py new file mode 100644 index 0000000000000000000000000000000000000000..0ca96ddfb580b19413797f41e79f7abcecdd9d79 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpcore/_sync/socks_proxy.py @@ -0,0 +1,341 @@ +from __future__ import annotations + +import logging +import ssl + +import socksio + +from .._backends.sync import SyncBackend +from .._backends.base import NetworkBackend, NetworkStream +from .._exceptions import ConnectionNotAvailable, ProxyError +from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url +from .._ssl import default_ssl_context +from .._synchronization import Lock +from .._trace import Trace +from .connection_pool import ConnectionPool +from .http11 import HTTP11Connection +from .interfaces import ConnectionInterface + +logger = logging.getLogger("httpcore.socks") + + +AUTH_METHODS = { + b"\x00": "NO AUTHENTICATION REQUIRED", + b"\x01": "GSSAPI", + b"\x02": "USERNAME/PASSWORD", + b"\xff": "NO ACCEPTABLE METHODS", +} + +REPLY_CODES = { + b"\x00": "Succeeded", + b"\x01": "General SOCKS server failure", + b"\x02": "Connection not allowed by ruleset", + b"\x03": "Network unreachable", + b"\x04": "Host unreachable", + b"\x05": "Connection refused", + b"\x06": "TTL expired", + b"\x07": "Command not supported", + b"\x08": "Address type not supported", +} + + +def _init_socks5_connection( + stream: NetworkStream, + *, + host: bytes, + port: int, + auth: tuple[bytes, bytes] | None = None, +) -> None: + conn = socksio.socks5.SOCKS5Connection() + + # Auth method request + auth_method = ( + socksio.socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED + if auth is None + else socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD + ) + conn.send(socksio.socks5.SOCKS5AuthMethodsRequest([auth_method])) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Auth method response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5AuthReply) + if response.method != auth_method: + requested = AUTH_METHODS.get(auth_method, "UNKNOWN") + responded = AUTH_METHODS.get(response.method, "UNKNOWN") + raise ProxyError( + f"Requested {requested} from proxy server, but got {responded}." + ) + + if response.method == socksio.socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: + # Username/password request + assert auth is not None + username, password = auth + conn.send(socksio.socks5.SOCKS5UsernamePasswordRequest(username, password)) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Username/password response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5UsernamePasswordReply) + if not response.success: + raise ProxyError("Invalid username/password") + + # Connect request + conn.send( + socksio.socks5.SOCKS5CommandRequest.from_address( + socksio.socks5.SOCKS5Command.CONNECT, (host, port) + ) + ) + outgoing_bytes = conn.data_to_send() + stream.write(outgoing_bytes) + + # Connect response + incoming_bytes = stream.read(max_bytes=4096) + response = conn.receive_data(incoming_bytes) + assert isinstance(response, socksio.socks5.SOCKS5Reply) + if response.reply_code != socksio.socks5.SOCKS5ReplyCode.SUCCEEDED: + reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") + raise ProxyError(f"Proxy Server could not connect: {reply_code}.") + + +class SOCKSProxy(ConnectionPool): # pragma: nocover + """ + A connection pool that sends requests via an HTTP proxy. + """ + + def __init__( + self, + proxy_url: URL | bytes | str, + proxy_auth: tuple[bytes | str, bytes | str] | None = None, + ssl_context: ssl.SSLContext | None = None, + max_connections: int | None = 10, + max_keepalive_connections: int | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + retries: int = 0, + network_backend: NetworkBackend | None = None, + ) -> None: + """ + A connection pool for making HTTP requests. + + Parameters: + proxy_url: The URL to use when connecting to the proxy server. + For example `"http://127.0.0.1:8080/"`. + ssl_context: An SSL context to use for verifying connections. + If not specified, the default `httpcore.default_ssl_context()` + will be used. + max_connections: The maximum number of concurrent HTTP connections that + the pool should allow. Any attempt to send a request on a pool that + would exceed this amount will block until a connection is available. + max_keepalive_connections: The maximum number of idle HTTP connections + that will be maintained in the pool. + keepalive_expiry: The duration in seconds that an idle HTTP connection + may be maintained for before being expired from the pool. + http1: A boolean indicating if HTTP/1.1 requests should be supported + by the connection pool. Defaults to True. + http2: A boolean indicating if HTTP/2 requests should be supported by + the connection pool. Defaults to False. + retries: The maximum number of retries when trying to establish + a connection. + local_address: Local address to connect from. Can also be used to + connect using a particular address family. Using + `local_address="0.0.0.0"` will connect using an `AF_INET` address + (IPv4), while using `local_address="::"` will connect using an + `AF_INET6` address (IPv6). + uds: Path to a Unix Domain Socket to use instead of TCP sockets. + network_backend: A backend instance to use for handling network I/O. + """ + super().__init__( + ssl_context=ssl_context, + max_connections=max_connections, + max_keepalive_connections=max_keepalive_connections, + keepalive_expiry=keepalive_expiry, + http1=http1, + http2=http2, + network_backend=network_backend, + retries=retries, + ) + self._ssl_context = ssl_context + self._proxy_url = enforce_url(proxy_url, name="proxy_url") + if proxy_auth is not None: + username, password = proxy_auth + username_bytes = enforce_bytes(username, name="proxy_auth") + password_bytes = enforce_bytes(password, name="proxy_auth") + self._proxy_auth: tuple[bytes, bytes] | None = ( + username_bytes, + password_bytes, + ) + else: + self._proxy_auth = None + + def create_connection(self, origin: Origin) -> ConnectionInterface: + return Socks5Connection( + proxy_origin=self._proxy_url.origin, + remote_origin=origin, + proxy_auth=self._proxy_auth, + ssl_context=self._ssl_context, + keepalive_expiry=self._keepalive_expiry, + http1=self._http1, + http2=self._http2, + network_backend=self._network_backend, + ) + + +class Socks5Connection(ConnectionInterface): + def __init__( + self, + proxy_origin: Origin, + remote_origin: Origin, + proxy_auth: tuple[bytes, bytes] | None = None, + ssl_context: ssl.SSLContext | None = None, + keepalive_expiry: float | None = None, + http1: bool = True, + http2: bool = False, + network_backend: NetworkBackend | None = None, + ) -> None: + self._proxy_origin = proxy_origin + self._remote_origin = remote_origin + self._proxy_auth = proxy_auth + self._ssl_context = ssl_context + self._keepalive_expiry = keepalive_expiry + self._http1 = http1 + self._http2 = http2 + + self._network_backend: NetworkBackend = ( + SyncBackend() if network_backend is None else network_backend + ) + self._connect_lock = Lock() + self._connection: ConnectionInterface | None = None + self._connect_failed = False + + def handle_request(self, request: Request) -> Response: + timeouts = request.extensions.get("timeout", {}) + sni_hostname = request.extensions.get("sni_hostname", None) + timeout = timeouts.get("connect", None) + + with self._connect_lock: + if self._connection is None: + try: + # Connect to the proxy + kwargs = { + "host": self._proxy_origin.host.decode("ascii"), + "port": self._proxy_origin.port, + "timeout": timeout, + } + with Trace("connect_tcp", logger, request, kwargs) as trace: + stream = self._network_backend.connect_tcp(**kwargs) + trace.return_value = stream + + # Connect to the remote host using socks5 + kwargs = { + "stream": stream, + "host": self._remote_origin.host.decode("ascii"), + "port": self._remote_origin.port, + "auth": self._proxy_auth, + } + with Trace( + "setup_socks5_connection", logger, request, kwargs + ) as trace: + _init_socks5_connection(**kwargs) + trace.return_value = stream + + # Upgrade the stream to SSL + if self._remote_origin.scheme == b"https": + ssl_context = ( + default_ssl_context() + if self._ssl_context is None + else self._ssl_context + ) + alpn_protocols = ( + ["http/1.1", "h2"] if self._http2 else ["http/1.1"] + ) + ssl_context.set_alpn_protocols(alpn_protocols) + + kwargs = { + "ssl_context": ssl_context, + "server_hostname": sni_hostname + or self._remote_origin.host.decode("ascii"), + "timeout": timeout, + } + with Trace("start_tls", logger, request, kwargs) as trace: + stream = stream.start_tls(**kwargs) + trace.return_value = stream + + # Determine if we should be using HTTP/1.1 or HTTP/2 + ssl_object = stream.get_extra_info("ssl_object") + http2_negotiated = ( + ssl_object is not None + and ssl_object.selected_alpn_protocol() == "h2" + ) + + # Create the HTTP/1.1 or HTTP/2 connection + if http2_negotiated or ( + self._http2 and not self._http1 + ): # pragma: nocover + from .http2 import HTTP2Connection + + self._connection = HTTP2Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + else: + self._connection = HTTP11Connection( + origin=self._remote_origin, + stream=stream, + keepalive_expiry=self._keepalive_expiry, + ) + except Exception as exc: + self._connect_failed = True + raise exc + elif not self._connection.is_available(): # pragma: nocover + raise ConnectionNotAvailable() + + return self._connection.handle_request(request) + + def can_handle_request(self, origin: Origin) -> bool: + return origin == self._remote_origin + + def close(self) -> None: + if self._connection is not None: + self._connection.close() + + def is_available(self) -> bool: + if self._connection is None: # pragma: nocover + # If HTTP/2 support is enabled, and the resulting connection could + # end up as HTTP/2 then we should indicate the connection as being + # available to service multiple requests. + return ( + self._http2 + and (self._remote_origin.scheme == b"https" or not self._http1) + and not self._connect_failed + ) + return self._connection.is_available() + + def has_expired(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.has_expired() + + def is_idle(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_idle() + + def is_closed(self) -> bool: + if self._connection is None: # pragma: nocover + return self._connect_failed + return self._connection.is_closed() + + def info(self) -> str: + if self._connection is None: # pragma: nocover + return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" + return self._connection.info() + + def __repr__(self) -> str: + return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..ab79d16a3f4c6c894c028d1f7431811e8711b42b --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx-0.28.1.dist-info/licenses/LICENSE.md @@ -0,0 +1,12 @@ +Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..82e3e42dff0c925bbed86ffd3cf92506aaa98f85 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..049a55a419dab07e4094d823c15cb13f1a1003e1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/__version__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d6160b911dc52e8fb6f7ea0eb598556780d1cfd6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..db68e4cc1248247beca9b194cb668039de5e8b66 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_auth.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2cd7e0ac61b40aa69c464a88864a3ce7126fef1d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0969517121489356fb008d3ea33b3b8f9f117e4f Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da3ab7e51f26565ee4076a72997adf109634c13e Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_content.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7408c4570c858869dccacb13e21bb0153cc7f522 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_decoders.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e2599d80346ba425f2290830222e1150594052e Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ba3d2537b3fe1729affcfa41c4d8687d4655c8f Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_main.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..28b26250c2cb0fb993139230db4f5052ce800007 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..905834014fb65d75c5acf460c462852dda17651e Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3005e78e0ed3434bdc1443da7032c41ae384e0d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_status_codes.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..da041c5a494ef00436b73fa5aee7d1a515e17721 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e6e6a058f51a4f64957b52c0483fa1e59443705f Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urlparse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc44c4f55447e7fb48166c803a9ff2d2b4fc0296 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_urls.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c023407ce545b8a83ff09f6cc01bdb79e6338bb4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py b/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7a321053b29bcd48698cf2bd74a1d19c8556aefb --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/__init__.py @@ -0,0 +1,15 @@ +from .asgi import * +from .base import * +from .default import * +from .mock import * +from .wsgi import * + +__all__ = [ + "ASGITransport", + "AsyncBaseTransport", + "BaseTransport", + "AsyncHTTPTransport", + "HTTPTransport", + "MockTransport", + "WSGITransport", +] diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7bc9acca4875024e85a074d86bc914a00af31507 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6fa3248490d7242b3a137c4566a09b004f710446 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/asgi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0280f25947373a31a6e38af3c96f9414b215d56c Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/base.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6aa9b51d74e51515058b61dca719bb623c932a7d Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/default.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e8b1cd705d3ea25d3a8f53413df51e1473089a8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/mock.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..89b12734ce02dd1694daccd5d3c08ceb9ba12577 Binary files /dev/null and b/venv/lib/python3.12/site-packages/httpx/_transports/__pycache__/wsgi.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py b/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py new file mode 100644 index 0000000000000000000000000000000000000000..2bc4efae0e1b14620f75f712eb15ecf500d14eef --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/asgi.py @@ -0,0 +1,187 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .._types import AsyncByteStream +from .base import AsyncBaseTransport + +if typing.TYPE_CHECKING: # pragma: no cover + import asyncio + + import trio + + Event = typing.Union[asyncio.Event, trio.Event] + + +_Message = typing.MutableMapping[str, typing.Any] +_Receive = typing.Callable[[], typing.Awaitable[_Message]] +_Send = typing.Callable[ + [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None] +] +_ASGIApp = typing.Callable[ + [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None] +] + +__all__ = ["ASGITransport"] + + +def is_running_trio() -> bool: + try: + # sniffio is a dependency of trio. + + # See https://github.com/python-trio/trio/issues/2802 + import sniffio + + if sniffio.current_async_library() == "trio": + return True + except ImportError: # pragma: nocover + pass + + return False + + +def create_event() -> Event: + if is_running_trio(): + import trio + + return trio.Event() + + import asyncio + + return asyncio.Event() + + +class ASGIResponseStream(AsyncByteStream): + def __init__(self, body: list[bytes]) -> None: + self._body = body + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + yield b"".join(self._body) + + +class ASGITransport(AsyncBaseTransport): + """ + A custom AsyncTransport that handles sending requests directly to an ASGI app. + + ```python + transport = httpx.ASGITransport( + app=app, + root_path="/submount", + client=("1.2.3.4", 123) + ) + client = httpx.AsyncClient(transport=transport) + ``` + + Arguments: + + * `app` - The ASGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `root_path` - The root path on which the ASGI application should be mounted. + * `client` - A two-tuple indicating the client IP and port of incoming requests. + ``` + """ + + def __init__( + self, + app: _ASGIApp, + raise_app_exceptions: bool = True, + root_path: str = "", + client: tuple[str, int] = ("127.0.0.1", 123), + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.root_path = root_path + self.client = client + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + + # ASGI scope. + scope = { + "type": "http", + "asgi": {"version": "3.0"}, + "http_version": "1.1", + "method": request.method, + "headers": [(k.lower(), v) for (k, v) in request.headers.raw], + "scheme": request.url.scheme, + "path": request.url.path, + "raw_path": request.url.raw_path.split(b"?")[0], + "query_string": request.url.query, + "server": (request.url.host, request.url.port), + "client": self.client, + "root_path": self.root_path, + } + + # Request. + request_body_chunks = request.stream.__aiter__() + request_complete = False + + # Response. + status_code = None + response_headers = None + body_parts = [] + response_started = False + response_complete = create_event() + + # ASGI callables. + + async def receive() -> dict[str, typing.Any]: + nonlocal request_complete + + if request_complete: + await response_complete.wait() + return {"type": "http.disconnect"} + + try: + body = await request_body_chunks.__anext__() + except StopAsyncIteration: + request_complete = True + return {"type": "http.request", "body": b"", "more_body": False} + return {"type": "http.request", "body": body, "more_body": True} + + async def send(message: typing.MutableMapping[str, typing.Any]) -> None: + nonlocal status_code, response_headers, response_started + + if message["type"] == "http.response.start": + assert not response_started + + status_code = message["status"] + response_headers = message.get("headers", []) + response_started = True + + elif message["type"] == "http.response.body": + assert not response_complete.is_set() + body = message.get("body", b"") + more_body = message.get("more_body", False) + + if body and request.method != "HEAD": + body_parts.append(body) + + if not more_body: + response_complete.set() + + try: + await self.app(scope, receive, send) + except Exception: # noqa: PIE-786 + if self.raise_app_exceptions: + raise + + response_complete.set() + if status_code is None: + status_code = 500 + if response_headers is None: + response_headers = {} + + assert response_complete.is_set() + assert status_code is not None + assert response_headers is not None + + stream = ASGIResponseStream(body_parts) + + return Response(status_code, headers=response_headers, stream=stream) diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/base.py b/venv/lib/python3.12/site-packages/httpx/_transports/base.py new file mode 100644 index 0000000000000000000000000000000000000000..66fd99d702480b555c06694fe14715ea6df3dfc3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/base.py @@ -0,0 +1,86 @@ +from __future__ import annotations + +import typing +from types import TracebackType + +from .._models import Request, Response + +T = typing.TypeVar("T", bound="BaseTransport") +A = typing.TypeVar("A", bound="AsyncBaseTransport") + +__all__ = ["AsyncBaseTransport", "BaseTransport"] + + +class BaseTransport: + def __enter__(self: T) -> T: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + self.close() + + def handle_request(self, request: Request) -> Response: + """ + Send a single HTTP request and return a response. + + Developers shouldn't typically ever need to call into this API directly, + since the Client class provides all the higher level user-facing API + niceties. + + In order to properly release any network resources, the response + stream should *either* be consumed immediately, with a call to + `response.stream.read()`, or else the `handle_request` call should + be followed with a try/finally block to ensuring the stream is + always closed. + + Example usage: + + with httpx.HTTPTransport() as transport: + req = httpx.Request( + method=b"GET", + url=(b"https", b"www.example.com", 443, b"/"), + headers=[(b"Host", b"www.example.com")], + ) + resp = transport.handle_request(req) + body = resp.stream.read() + print(resp.status_code, resp.headers, body) + + + Takes a `Request` instance as the only argument. + + Returns a `Response` instance. + """ + raise NotImplementedError( + "The 'handle_request' method must be implemented." + ) # pragma: no cover + + def close(self) -> None: + pass + + +class AsyncBaseTransport: + async def __aenter__(self: A) -> A: + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + await self.aclose() + + async def handle_async_request( + self, + request: Request, + ) -> Response: + raise NotImplementedError( + "The 'handle_async_request' method must be implemented." + ) # pragma: no cover + + async def aclose(self) -> None: + pass diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/default.py b/venv/lib/python3.12/site-packages/httpx/_transports/default.py new file mode 100644 index 0000000000000000000000000000000000000000..d5aa05ff234fd3fbf4fee88c4a7d3e3c151a538f --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/default.py @@ -0,0 +1,406 @@ +""" +Custom transports, with nicely configured defaults. + +The following additional keyword arguments are currently supported by httpcore... + +* uds: str +* local_address: str +* retries: int + +Example usages... + +# Disable HTTP/2 on a single specific domain. +mounts = { + "all://": httpx.HTTPTransport(http2=True), + "all://*example.org": httpx.HTTPTransport() +} + +# Using advanced httpcore configuration, with connection retries. +transport = httpx.HTTPTransport(retries=1) +client = httpx.Client(transport=transport) + +# Using advanced httpcore configuration, with unix domain sockets. +transport = httpx.HTTPTransport(uds="socket.uds") +client = httpx.Client(transport=transport) +""" + +from __future__ import annotations + +import contextlib +import typing +from types import TracebackType + +if typing.TYPE_CHECKING: + import ssl # pragma: no cover + + import httpx # pragma: no cover + +from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context +from .._exceptions import ( + ConnectError, + ConnectTimeout, + LocalProtocolError, + NetworkError, + PoolTimeout, + ProtocolError, + ProxyError, + ReadError, + ReadTimeout, + RemoteProtocolError, + TimeoutException, + UnsupportedProtocol, + WriteError, + WriteTimeout, +) +from .._models import Request, Response +from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream +from .._urls import URL +from .base import AsyncBaseTransport, BaseTransport + +T = typing.TypeVar("T", bound="HTTPTransport") +A = typing.TypeVar("A", bound="AsyncHTTPTransport") + +SOCKET_OPTION = typing.Union[ + typing.Tuple[int, int, int], + typing.Tuple[int, int, typing.Union[bytes, bytearray]], + typing.Tuple[int, int, None, int], +] + +__all__ = ["AsyncHTTPTransport", "HTTPTransport"] + +HTTPCORE_EXC_MAP: dict[type[Exception], type[httpx.HTTPError]] = {} + + +def _load_httpcore_exceptions() -> dict[type[Exception], type[httpx.HTTPError]]: + import httpcore + + return { + httpcore.TimeoutException: TimeoutException, + httpcore.ConnectTimeout: ConnectTimeout, + httpcore.ReadTimeout: ReadTimeout, + httpcore.WriteTimeout: WriteTimeout, + httpcore.PoolTimeout: PoolTimeout, + httpcore.NetworkError: NetworkError, + httpcore.ConnectError: ConnectError, + httpcore.ReadError: ReadError, + httpcore.WriteError: WriteError, + httpcore.ProxyError: ProxyError, + httpcore.UnsupportedProtocol: UnsupportedProtocol, + httpcore.ProtocolError: ProtocolError, + httpcore.LocalProtocolError: LocalProtocolError, + httpcore.RemoteProtocolError: RemoteProtocolError, + } + + +@contextlib.contextmanager +def map_httpcore_exceptions() -> typing.Iterator[None]: + global HTTPCORE_EXC_MAP + if len(HTTPCORE_EXC_MAP) == 0: + HTTPCORE_EXC_MAP = _load_httpcore_exceptions() + try: + yield + except Exception as exc: + mapped_exc = None + + for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): + if not isinstance(exc, from_exc): + continue + # We want to map to the most specific exception we can find. + # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to + # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. + if mapped_exc is None or issubclass(to_exc, mapped_exc): + mapped_exc = to_exc + + if mapped_exc is None: # pragma: no cover + raise + + message = str(exc) + raise mapped_exc(message) from exc + + +class ResponseStream(SyncByteStream): + def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + def __iter__(self) -> typing.Iterator[bytes]: + with map_httpcore_exceptions(): + for part in self._httpcore_stream: + yield part + + def close(self) -> None: + if hasattr(self._httpcore_stream, "close"): + self._httpcore_stream.close() + + +class HTTPTransport(BaseTransport): + def __init__( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + import httpcore + + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.ConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.HTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + ssl_context=ssl_context, + proxy_ssl_context=proxy.ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("socks5", "socks5h"): + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.SOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," + f" but got {proxy.url.scheme!r}." + ) + + def __enter__(self: T) -> T: # Use generics for subclass support. + self._pool.__enter__() + return self + + def __exit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + self._pool.__exit__(exc_type, exc_value, traceback) + + def handle_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, SyncByteStream) + import httpcore + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = self._pool.handle_request(req) + + assert isinstance(resp.stream, typing.Iterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=ResponseStream(resp.stream), + extensions=resp.extensions, + ) + + def close(self) -> None: + self._pool.close() + + +class AsyncResponseStream(AsyncByteStream): + def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: + self._httpcore_stream = httpcore_stream + + async def __aiter__(self) -> typing.AsyncIterator[bytes]: + with map_httpcore_exceptions(): + async for part in self._httpcore_stream: + yield part + + async def aclose(self) -> None: + if hasattr(self._httpcore_stream, "aclose"): + await self._httpcore_stream.aclose() + + +class AsyncHTTPTransport(AsyncBaseTransport): + def __init__( + self, + verify: ssl.SSLContext | str | bool = True, + cert: CertTypes | None = None, + trust_env: bool = True, + http1: bool = True, + http2: bool = False, + limits: Limits = DEFAULT_LIMITS, + proxy: ProxyTypes | None = None, + uds: str | None = None, + local_address: str | None = None, + retries: int = 0, + socket_options: typing.Iterable[SOCKET_OPTION] | None = None, + ) -> None: + import httpcore + + proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy + ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) + + if proxy is None: + self._pool = httpcore.AsyncConnectionPool( + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + uds=uds, + local_address=local_address, + retries=retries, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("http", "https"): + self._pool = httpcore.AsyncHTTPProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + proxy_headers=proxy.headers.raw, + proxy_ssl_context=proxy.ssl_context, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + socket_options=socket_options, + ) + elif proxy.url.scheme in ("socks5", "socks5h"): + try: + import socksio # noqa + except ImportError: # pragma: no cover + raise ImportError( + "Using SOCKS proxy, but the 'socksio' package is not installed. " + "Make sure to install httpx using `pip install httpx[socks]`." + ) from None + + self._pool = httpcore.AsyncSOCKSProxy( + proxy_url=httpcore.URL( + scheme=proxy.url.raw_scheme, + host=proxy.url.raw_host, + port=proxy.url.port, + target=proxy.url.raw_path, + ), + proxy_auth=proxy.raw_auth, + ssl_context=ssl_context, + max_connections=limits.max_connections, + max_keepalive_connections=limits.max_keepalive_connections, + keepalive_expiry=limits.keepalive_expiry, + http1=http1, + http2=http2, + ) + else: # pragma: no cover + raise ValueError( + "Proxy protocol must be either 'http', 'https', 'socks5', or 'socks5h'," + " but got {proxy.url.scheme!r}." + ) + + async def __aenter__(self: A) -> A: # Use generics for subclass support. + await self._pool.__aenter__() + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + traceback: TracebackType | None = None, + ) -> None: + with map_httpcore_exceptions(): + await self._pool.__aexit__(exc_type, exc_value, traceback) + + async def handle_async_request( + self, + request: Request, + ) -> Response: + assert isinstance(request.stream, AsyncByteStream) + import httpcore + + req = httpcore.Request( + method=request.method, + url=httpcore.URL( + scheme=request.url.raw_scheme, + host=request.url.raw_host, + port=request.url.port, + target=request.url.raw_path, + ), + headers=request.headers.raw, + content=request.stream, + extensions=request.extensions, + ) + with map_httpcore_exceptions(): + resp = await self._pool.handle_async_request(req) + + assert isinstance(resp.stream, typing.AsyncIterable) + + return Response( + status_code=resp.status, + headers=resp.headers, + stream=AsyncResponseStream(resp.stream), + extensions=resp.extensions, + ) + + async def aclose(self) -> None: + await self._pool.aclose() diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/mock.py b/venv/lib/python3.12/site-packages/httpx/_transports/mock.py new file mode 100644 index 0000000000000000000000000000000000000000..8c418f59e06cae43abdbb626ec21cafc7e8c6277 --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/mock.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +import typing + +from .._models import Request, Response +from .base import AsyncBaseTransport, BaseTransport + +SyncHandler = typing.Callable[[Request], Response] +AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] + + +__all__ = ["MockTransport"] + + +class MockTransport(AsyncBaseTransport, BaseTransport): + def __init__(self, handler: SyncHandler | AsyncHandler) -> None: + self.handler = handler + + def handle_request( + self, + request: Request, + ) -> Response: + request.read() + response = self.handler(request) + if not isinstance(response, Response): # pragma: no cover + raise TypeError("Cannot use an async handler in a sync Client") + return response + + async def handle_async_request( + self, + request: Request, + ) -> Response: + await request.aread() + response = self.handler(request) + + # Allow handler to *optionally* be an `async` function. + # If it is, then the `response` variable need to be awaited to actually + # return the result. + + if not isinstance(response, Response): + response = await response + + return response diff --git a/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py b/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py new file mode 100644 index 0000000000000000000000000000000000000000..8592ffe017a87367cc7578184540096a9682908d --- /dev/null +++ b/venv/lib/python3.12/site-packages/httpx/_transports/wsgi.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +import io +import itertools +import sys +import typing + +from .._models import Request, Response +from .._types import SyncByteStream +from .base import BaseTransport + +if typing.TYPE_CHECKING: + from _typeshed import OptExcInfo # pragma: no cover + from _typeshed.wsgi import WSGIApplication # pragma: no cover + +_T = typing.TypeVar("_T") + + +__all__ = ["WSGITransport"] + + +def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: + body = iter(body) + for chunk in body: + if chunk: + return itertools.chain([chunk], body) + return [] + + +class WSGIByteStream(SyncByteStream): + def __init__(self, result: typing.Iterable[bytes]) -> None: + self._close = getattr(result, "close", None) + self._result = _skip_leading_empty_chunks(result) + + def __iter__(self) -> typing.Iterator[bytes]: + for part in self._result: + yield part + + def close(self) -> None: + if self._close is not None: + self._close() + + +class WSGITransport(BaseTransport): + """ + A custom transport that handles sending requests directly to an WSGI app. + The simplest way to use this functionality is to use the `app` argument. + + ``` + client = httpx.Client(app=app) + ``` + + Alternatively, you can setup the transport instance explicitly. + This allows you to include any additional configuration arguments specific + to the WSGITransport class: + + ``` + transport = httpx.WSGITransport( + app=app, + script_name="/submount", + remote_addr="1.2.3.4" + ) + client = httpx.Client(transport=transport) + ``` + + Arguments: + + * `app` - The WSGI application. + * `raise_app_exceptions` - Boolean indicating if exceptions in the application + should be raised. Default to `True`. Can be set to `False` for use cases + such as testing the content of a client 500 response. + * `script_name` - The root path on which the WSGI application should be mounted. + * `remote_addr` - A string indicating the client IP of incoming requests. + ``` + """ + + def __init__( + self, + app: WSGIApplication, + raise_app_exceptions: bool = True, + script_name: str = "", + remote_addr: str = "127.0.0.1", + wsgi_errors: typing.TextIO | None = None, + ) -> None: + self.app = app + self.raise_app_exceptions = raise_app_exceptions + self.script_name = script_name + self.remote_addr = remote_addr + self.wsgi_errors = wsgi_errors + + def handle_request(self, request: Request) -> Response: + request.read() + wsgi_input = io.BytesIO(request.content) + + port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] + environ = { + "wsgi.version": (1, 0), + "wsgi.url_scheme": request.url.scheme, + "wsgi.input": wsgi_input, + "wsgi.errors": self.wsgi_errors or sys.stderr, + "wsgi.multithread": True, + "wsgi.multiprocess": False, + "wsgi.run_once": False, + "REQUEST_METHOD": request.method, + "SCRIPT_NAME": self.script_name, + "PATH_INFO": request.url.path, + "QUERY_STRING": request.url.query.decode("ascii"), + "SERVER_NAME": request.url.host, + "SERVER_PORT": str(port), + "SERVER_PROTOCOL": "HTTP/1.1", + "REMOTE_ADDR": self.remote_addr, + } + for header_key, header_value in request.headers.raw: + key = header_key.decode("ascii").upper().replace("-", "_") + if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): + key = "HTTP_" + key + environ[key] = header_value.decode("ascii") + + seen_status = None + seen_response_headers = None + seen_exc_info = None + + def start_response( + status: str, + response_headers: list[tuple[str, str]], + exc_info: OptExcInfo | None = None, + ) -> typing.Callable[[bytes], typing.Any]: + nonlocal seen_status, seen_response_headers, seen_exc_info + seen_status = status + seen_response_headers = response_headers + seen_exc_info = exc_info + return lambda _: None + + result = self.app(environ, start_response) + + stream = WSGIByteStream(result) + + assert seen_status is not None + assert seen_response_headers is not None + if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: + raise seen_exc_info[1] + + status_code = int(seen_status.split()[0]) + headers = [ + (key.encode("ascii"), value.encode("ascii")) + for key, value in seen_response_headers + ] + + return Response(status_code, headers=headers, stream=stream) diff --git a/venv/lib/python3.12/site-packages/idna-3.13.dist-info/licenses/LICENSE.md b/venv/lib/python3.12/site-packages/idna-3.13.dist-info/licenses/LICENSE.md new file mode 100644 index 0000000000000000000000000000000000000000..f706835ab3a5dd709a6e1f57aee0a94ae1415df6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/idna-3.13.dist-info/licenses/LICENSE.md @@ -0,0 +1,31 @@ +BSD 3-Clause License + +Copyright (c) 2013-2026, Kim Davies and contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED +TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dae717a56ec3b2745d189ac07bc9e988b95b56f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dbac536f2c214daa9aed98741374d9d85f5c79b4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/codec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af3bfa7d85a7ab7b38843ed9898bbcc2b6aaa473 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8c452070a0d675c5a78536b8e4a9ef47625e99c4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/core.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..461e3e9746d60aa9032a70a16ae7e4315f79463e Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/intranges.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d27723ad74dd5b66e96f5997e22f5d1db5321578 Binary files /dev/null and b/venv/lib/python3.12/site-packages/idna/__pycache__/package_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..488c6260c10f2e88fa1fae58a63fccec8d600cd1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic-2.13.4.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2017 to present Pydantic Services Inc. and individual contributors. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..97ebbb420f35024baea8e4c8487f711f976ec74e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/_migration.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/_migration.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d06a23f78d91c2fdba7d7169f86e9d9ad3c5535d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/_migration.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/alias_generators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/alias_generators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc6338be320a86a43a16e702acac907ca2f986aa Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/alias_generators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/aliases.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/aliases.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e9f31c7eb618e21bdc6d89b97d1e3356d23b0bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/aliases.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/annotated_handlers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/annotated_handlers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a9cbe9d73a7058a9f4c902705cc62e1cc83db16 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/annotated_handlers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/class_validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/class_validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b4d9d382a51bf9e320a87fd3efed934078f2e80 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/class_validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/color.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/color.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e8627dd1e0571b9416f40983e869b41bee71f5a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/color.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..830e3dfa0f7fad2c3a53f34735ca8fb336fba1a5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/dataclasses.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/dataclasses.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..de411b8be6b832d2811232902de0521a4ac0576e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/dataclasses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/datetime_parse.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/datetime_parse.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..58cad96fa6bc1c5a21495c3fcc1b7d1e02fd9794 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/datetime_parse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/decorator.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/decorator.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fbf4c25d1499781feb071ab61df71b54fc17542 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/decorator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/env_settings.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/env_settings.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6ec71f0ef1e9647de05f486e65428e4f2b5fe8dd Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/env_settings.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/error_wrappers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/error_wrappers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d328a74a31ec9104f23401fe2737591e22cc0f8a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/error_wrappers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f10bb98c605d7e47bb385578901321b8b2118f6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/errors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/fields.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/fields.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..397556320d5d8de83614167c5d577d787127645a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/fields.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_serializers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_serializers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fba9f98cb460f0911dca84f1b18922be261e22cd Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_serializers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41c30293e671902a93d7173a1c41af4fbe9996f8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/functional_validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/generics.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/generics.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2dbbefb5dec6eef7d1353bb3ee40b634aac5478b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/generics.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/json.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/json.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed37d54bba4bfd98ebb5c9cb4e6fdf4e6ececd28 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/json.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/main.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38437ea435c4d8dcfe71d4bf3bbd614996de6a8e Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/main.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/mypy.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/mypy.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb75a61870a0944f08d1755109ce183f54c45106 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/mypy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/networks.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/networks.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8498daed38933aa8eaca7544c53ea4c42292fe5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/networks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/parse.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/parse.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ec7916c655382c6d887c9f769852a05f2599deb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/parse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/root_model.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/root_model.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44582a3fe0e7b14268df77b0adb8d0a7754f73d2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/root_model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/schema.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/schema.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ff4e4a1c425409551c54b8e90c0eeee762f967b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/schema.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/tools.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/tools.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..386b691cdb9490db78ecf67415916cb88c833e4f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/tools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/type_adapter.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/type_adapter.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be3de8a6a5bd3294848c90c16d05320bd3504b2f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/type_adapter.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4222900ace65143a97a109b0ab656debf224d5e5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/typing.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/typing.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2273e2af6d78ad4a63a97afbbe991b132c3da69f Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/typing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6bec0e7379b417c184a4d6d2ccf6cf65fd4a0eff Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/validate_call_decorator.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/validate_call_decorator.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa7c002f5c64911a69d5cfc7b12220d3df9481f9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/validate_call_decorator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..14cbbb7ef9d01da33bd9b1b87993d1c4ee5e22aa Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..207e7fffb686055119b960589879d5b8aef9e6eb Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/__pycache__/warnings.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/__pycache__/warnings.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7c51aeafa1f8f1a0fbff05c72e11aff2b3b8f981 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/__pycache__/warnings.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__init__.py b/venv/lib/python3.12/site-packages/pydantic/_internal/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8fdd5f0dc98e3fdaef96d3b0a2afc47426c3f5c9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_config.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a32fe5452f317eaa767cb44d05e3ca9a054a554 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..211222836944e3a8b6ca4d87fbf7e5a96f627eed Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_core_metadata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9dcdd54aa5e9114e1191cbc9fec4595996c3fa0b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_core_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_dataclasses.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_dataclasses.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..35a7050b32b79bf3be8771248a5cbe659cee6af5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_dataclasses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aa446207600cf6d073cfde6b6a773751013a1d5d Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_decorators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_decorators_v1.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_decorators_v1.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e832c02a8fd5a9f1f20cfbc98ba32b83349d52f0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_decorators_v1.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fa1ac89939605bd9536793a90bfc7d00485ff017 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_discriminated_union.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9dae22aaa5f989d13f21f00a0385e7aff95d3579 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_docs_extraction.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_fields.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_fields.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ff27f49ede7532a14a87ce228ab07fab7ba2221 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_fields.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a709963be351b6314f53036f827830b3ec63fef7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_forward_ref.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4b8466d5767959a42562b31174bb5d6594b6b919 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generate_schema.cpython-312.pyc @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e24914605f4fb2ce2a5ce98c904f8f8b0dff15ff269871578a9da3b2c2803646 +size 132755 diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generics.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generics.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..430ac1b3ce506cba78b091d074a78e2a77578cee Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_generics.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_git.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_git.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be7a588bf5c7608b4e56e663756b0dab8f99e2f5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_git.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_import_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_import_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..aaada2c32f1d429bb193e78be17b12c5df426998 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_import_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..abc5764b09f6399a339d5e46b7b37aa5bbe86809 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_internal_dataclass.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17a3c85f20757f5eec912cae4c931333dc6c1c50 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_known_annotated_metadata.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee20dde7b4f1695fbd6d4b879b78ad3cef78daa7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_mock_val_ser.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..912e4816671e38f2a8e9d71edbf5bc5608f9a4a3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_model_construction.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_namespace_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_namespace_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e0eca83f3506c1dc38364f19255f89f90979d2d2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_namespace_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_repr.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_repr.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5aad50fc940ad30d5892d9f199ff1ce973d547b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_repr.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_schema_gather.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_schema_gather.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f9e5b60136e278bc3674ced93cec0c57234c563 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_schema_gather.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb146d81d735e08d782a87c2dc14d0a9e8a8d701 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_schema_generation_shared.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_serializers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_serializers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..72f422af97956a9926bf922ca43090f80451c528 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_serializers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_signature.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_signature.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..157687348a682d9c3027da57c82999112c6987c5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_signature.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..728d1c434408b379be9964de1893b1ced9c9884c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_typing_extra.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e99e7281bf578ed8bc0151a36f1a001a0c5a2822 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..988a95ca14e4578aa6a0babb5a2673f15e205768 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_validate_call.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e179663b7ec843420989f6d93de49bb2d8a1bc92 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/_internal/__pycache__/_validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_config.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..73b5870891629a6f1351ef825cc1e2a3d1728268 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_config.py @@ -0,0 +1,386 @@ +from __future__ import annotations as _annotations + +import warnings +from contextlib import contextmanager +from re import Pattern +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Literal, + cast, +) + +from pydantic_core import core_schema +from typing_extensions import Self + +from ..aliases import AliasGenerator +from ..config import ConfigDict, ExtraValues, JsonDict, JsonEncoder, JsonSchemaExtraCallable +from ..errors import PydanticUserError +from ..warnings import PydanticDeprecatedSince20, PydanticDeprecatedSince210 + +if TYPE_CHECKING: + from .._internal._schema_generation_shared import GenerateSchema + from ..fields import ComputedFieldInfo, FieldInfo + +DEPRECATION_MESSAGE = 'Support for class-based `config` is deprecated, use ConfigDict instead.' + + +class ConfigWrapper: + """Internal wrapper for Config which exposes ConfigDict items as attributes.""" + + __slots__ = ('config_dict',) + + config_dict: ConfigDict + + # all annotations are copied directly from ConfigDict, and should be kept up to date, a test will fail if they + # stop matching + title: str | None + str_to_lower: bool + str_to_upper: bool + str_strip_whitespace: bool + str_min_length: int + str_max_length: int | None + extra: ExtraValues | None + frozen: bool + populate_by_name: bool + use_enum_values: bool + validate_assignment: bool + arbitrary_types_allowed: bool + from_attributes: bool + # whether to use the actual key provided in the data (e.g. alias or first alias for "field required" errors) instead of field_names + # to construct error `loc`s, default `True` + loc_by_alias: bool + alias_generator: Callable[[str], str] | AliasGenerator | None + model_title_generator: Callable[[type], str] | None + field_title_generator: Callable[[str, FieldInfo | ComputedFieldInfo], str] | None + ignored_types: tuple[type, ...] + allow_inf_nan: bool + json_schema_extra: JsonDict | JsonSchemaExtraCallable | None + json_encoders: dict[type[object], JsonEncoder] | None + + # new in V2 + strict: bool + # whether instances of models and dataclasses (including subclass instances) should re-validate, default 'never' + revalidate_instances: Literal['always', 'never', 'subclass-instances'] + ser_json_timedelta: Literal['iso8601', 'float'] + ser_json_temporal: Literal['iso8601', 'seconds', 'milliseconds'] + val_temporal_unit: Literal['seconds', 'milliseconds', 'infer'] + ser_json_bytes: Literal['utf8', 'base64', 'hex'] + val_json_bytes: Literal['utf8', 'base64', 'hex'] + ser_json_inf_nan: Literal['null', 'constants', 'strings'] + # whether to validate default values during validation, default False + validate_default: bool + validate_return: bool + protected_namespaces: tuple[str | Pattern[str], ...] + hide_input_in_errors: bool + defer_build: bool + plugin_settings: dict[str, object] | None + schema_generator: type[GenerateSchema] | None + json_schema_serialization_defaults_required: bool + json_schema_mode_override: Literal['validation', 'serialization', None] + coerce_numbers_to_str: bool + regex_engine: Literal['rust-regex', 'python-re'] + validation_error_cause: bool + use_attribute_docstrings: bool + cache_strings: bool | Literal['all', 'keys', 'none'] + validate_by_alias: bool + validate_by_name: bool + serialize_by_alias: bool + url_preserve_empty_path: bool + polymorphic_serialization: bool + + def __init__(self, config: ConfigDict | dict[str, Any] | type[Any] | None, *, check: bool = True): + if check: + self.config_dict = prepare_config(config) + else: + self.config_dict = cast(ConfigDict, config) + + @classmethod + def for_model( + cls, + bases: tuple[type[Any], ...], + namespace: dict[str, Any], + raw_annotations: dict[str, Any], + kwargs: dict[str, Any], + ) -> Self: + """Build a new `ConfigWrapper` instance for a `BaseModel`. + + The config wrapper built based on (in descending order of priority): + - options from `kwargs` + - options from the `namespace` + - options from the base classes (`bases`) + + Args: + bases: A tuple of base classes. + namespace: The namespace of the class being created. + raw_annotations: The (non-evaluated) annotations of the model. + kwargs: The kwargs passed to the class being created. + + Returns: + A `ConfigWrapper` instance for `BaseModel`. + """ + config_new = ConfigDict() + for base in bases: + config = getattr(base, 'model_config', None) + if config: + config_new.update(config.copy()) + + config_class_from_namespace = namespace.get('Config') + config_dict_from_namespace = namespace.get('model_config') + + if raw_annotations.get('model_config') and config_dict_from_namespace is None: + raise PydanticUserError( + '`model_config` cannot be used as a model field name. Use `model_config` for model configuration.', + code='model-config-invalid-field-name', + ) + + if config_class_from_namespace and config_dict_from_namespace: + raise PydanticUserError('"Config" and "model_config" cannot be used together', code='config-both') + + config_from_namespace = config_dict_from_namespace or prepare_config(config_class_from_namespace) + + config_new.update(config_from_namespace) + + for k in list(kwargs.keys()): + if k in config_keys: + config_new[k] = kwargs.pop(k) + + return cls(config_new) + + # we don't show `__getattr__` to type checkers so missing attributes cause errors + if not TYPE_CHECKING: # pragma: no branch + + def __getattr__(self, name: str) -> Any: + try: + return self.config_dict[name] + except KeyError: + try: + return config_defaults[name] + except KeyError: + raise AttributeError(f'Config has no attribute {name!r}') from None + + def core_config(self, title: str | None) -> core_schema.CoreConfig: + """Create a pydantic-core config. + + We don't use getattr here since we don't want to populate with defaults. + + Args: + title: The title to use if not set in config. + + Returns: + A `CoreConfig` object created from config. + """ + config = self.config_dict + + if config.get('schema_generator') is not None: + warnings.warn( + 'The `schema_generator` setting has been deprecated since v2.10. This setting no longer has any effect.', + PydanticDeprecatedSince210, + stacklevel=2, + ) + + if (populate_by_name := config.get('populate_by_name')) is not None: + # We include this patch for backwards compatibility purposes, but this config setting will be deprecated in v3.0, and likely removed in v4.0. + # Thus, the above warning and this patch can be removed then as well. + if config.get('validate_by_name') is None: + config['validate_by_alias'] = True + config['validate_by_name'] = populate_by_name + + # We dynamically patch validate_by_name to be True if validate_by_alias is set to False + # and validate_by_name is not explicitly set. + if config.get('validate_by_alias') is False and config.get('validate_by_name') is None: + config['validate_by_name'] = True + + if (not config.get('validate_by_alias', True)) and (not config.get('validate_by_name', False)): + raise PydanticUserError( + 'At least one of `validate_by_alias` or `validate_by_name` must be set to True.', + code='validate-by-alias-and-name-false', + ) + + return core_schema.CoreConfig( + **{ # pyright: ignore[reportArgumentType] + k: v + for k, v in ( + ('title', config.get('title') or title or None), + ('extra_fields_behavior', config.get('extra')), + ('allow_inf_nan', config.get('allow_inf_nan')), + ('str_strip_whitespace', config.get('str_strip_whitespace')), + ('str_to_lower', config.get('str_to_lower')), + ('str_to_upper', config.get('str_to_upper')), + ('strict', config.get('strict')), + ('ser_json_timedelta', config.get('ser_json_timedelta')), + ('ser_json_temporal', config.get('ser_json_temporal')), + ('val_temporal_unit', config.get('val_temporal_unit')), + ('ser_json_bytes', config.get('ser_json_bytes')), + ('val_json_bytes', config.get('val_json_bytes')), + ('ser_json_inf_nan', config.get('ser_json_inf_nan')), + ('from_attributes', config.get('from_attributes')), + ('loc_by_alias', config.get('loc_by_alias')), + ('revalidate_instances', config.get('revalidate_instances')), + ('validate_default', config.get('validate_default')), + ('str_max_length', config.get('str_max_length')), + ('str_min_length', config.get('str_min_length')), + ('hide_input_in_errors', config.get('hide_input_in_errors')), + ('coerce_numbers_to_str', config.get('coerce_numbers_to_str')), + ('regex_engine', config.get('regex_engine')), + ('validation_error_cause', config.get('validation_error_cause')), + ('cache_strings', config.get('cache_strings')), + ('validate_by_alias', config.get('validate_by_alias')), + ('validate_by_name', config.get('validate_by_name')), + ('serialize_by_alias', config.get('serialize_by_alias')), + ('url_preserve_empty_path', config.get('url_preserve_empty_path')), + ('polymorphic_serialization', config.get('polymorphic_serialization')), + ) + if v is not None + } + ) + + def __repr__(self): + c = ', '.join(f'{k}={v!r}' for k, v in self.config_dict.items()) + return f'ConfigWrapper({c})' + + +class ConfigWrapperStack: + """A stack of `ConfigWrapper` instances.""" + + def __init__(self, config_wrapper: ConfigWrapper): + self._config_wrapper_stack: list[ConfigWrapper] = [config_wrapper] + + @property + def tail(self) -> ConfigWrapper: + return self._config_wrapper_stack[-1] + + @contextmanager + def push(self, config_wrapper: ConfigWrapper | ConfigDict | None): + if config_wrapper is None: + yield + return + + if not isinstance(config_wrapper, ConfigWrapper): + config_wrapper = ConfigWrapper(config_wrapper, check=False) + + self._config_wrapper_stack.append(config_wrapper) + try: + yield + finally: + self._config_wrapper_stack.pop() + + +config_defaults = ConfigDict( + title=None, + str_to_lower=False, + str_to_upper=False, + str_strip_whitespace=False, + str_min_length=0, + str_max_length=None, + # let the model / dataclass decide how to handle it + extra=None, + frozen=False, + populate_by_name=False, + use_enum_values=False, + validate_assignment=False, + arbitrary_types_allowed=False, + from_attributes=False, + loc_by_alias=True, + alias_generator=None, + model_title_generator=None, + field_title_generator=None, + ignored_types=(), + allow_inf_nan=True, + json_schema_extra=None, + strict=False, + revalidate_instances='never', + ser_json_timedelta='iso8601', + ser_json_temporal='iso8601', + val_temporal_unit='infer', + ser_json_bytes='utf8', + val_json_bytes='utf8', + ser_json_inf_nan='null', + validate_default=False, + validate_return=False, + protected_namespaces=('model_validate', 'model_dump'), + hide_input_in_errors=False, + json_encoders=None, + defer_build=False, + schema_generator=None, + plugin_settings=None, + json_schema_serialization_defaults_required=False, + json_schema_mode_override=None, + coerce_numbers_to_str=False, + regex_engine='rust-regex', + validation_error_cause=False, + use_attribute_docstrings=False, + cache_strings=True, + validate_by_alias=True, + validate_by_name=False, + serialize_by_alias=False, + url_preserve_empty_path=False, + polymorphic_serialization=False, +) + + +def prepare_config(config: ConfigDict | dict[str, Any] | type[Any] | None) -> ConfigDict: + """Create a `ConfigDict` instance from an existing dict, a class (e.g. old class-based config) or None. + + Args: + config: The input config. + + Returns: + A ConfigDict object created from config. + """ + if config is None: + return ConfigDict() + + if not isinstance(config, dict): + warnings.warn(DEPRECATION_MESSAGE, PydanticDeprecatedSince20, stacklevel=4) + config = {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} + + config_dict = cast(ConfigDict, config) + check_deprecated(config_dict) + return config_dict + + +config_keys = set(ConfigDict.__annotations__.keys()) + + +V2_REMOVED_KEYS = { + 'allow_mutation', + 'error_msg_templates', + 'fields', + 'getter_dict', + 'smart_union', + 'underscore_attrs_are_private', + 'json_loads', + 'json_dumps', + 'copy_on_model_validation', + 'post_init_call', +} +V2_RENAMED_KEYS = { + 'allow_population_by_field_name': 'validate_by_name', + 'anystr_lower': 'str_to_lower', + 'anystr_strip_whitespace': 'str_strip_whitespace', + 'anystr_upper': 'str_to_upper', + 'keep_untouched': 'ignored_types', + 'max_anystr_length': 'str_max_length', + 'min_anystr_length': 'str_min_length', + 'orm_mode': 'from_attributes', + 'schema_extra': 'json_schema_extra', + 'validate_all': 'validate_default', +} + + +def check_deprecated(config_dict: ConfigDict) -> None: + """Check for deprecated config keys and warn the user. + + Args: + config_dict: The input config. + """ + deprecated_removed_keys = V2_REMOVED_KEYS & config_dict.keys() + deprecated_renamed_keys = V2_RENAMED_KEYS.keys() & config_dict.keys() + if deprecated_removed_keys or deprecated_renamed_keys: + renamings = {k: V2_RENAMED_KEYS[k] for k in sorted(deprecated_renamed_keys)} + renamed_bullets = [f'* {k!r} has been renamed to {v!r}' for k, v in renamings.items()] + removed_bullets = [f'* {k!r} has been removed' for k in sorted(deprecated_removed_keys)] + message = '\n'.join(['Valid config keys have changed in V2:'] + renamed_bullets + removed_bullets) + warnings.warn(message, UserWarning) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..9f2510c03429b6de24063bec5937e4334077722d --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_core_metadata.py @@ -0,0 +1,97 @@ +from __future__ import annotations as _annotations + +from typing import TYPE_CHECKING, Any, TypedDict, cast +from warnings import warn + +if TYPE_CHECKING: + from ..config import JsonDict, JsonSchemaExtraCallable + from ._schema_generation_shared import ( + GetJsonSchemaFunction, + ) + + +class CoreMetadata(TypedDict, total=False): + """A `TypedDict` for holding the metadata dict of the schema. + + Attributes: + pydantic_js_functions: List of JSON schema functions that resolve refs during application. + pydantic_js_annotation_functions: List of JSON schema functions that don't resolve refs during application. + pydantic_js_prefer_positional_arguments: Whether JSON schema generator will + prefer positional over keyword arguments for an 'arguments' schema. + custom validation function. Only applies to before, plain, and wrap validators. + pydantic_js_updates: key / value pair updates to apply to the JSON schema for a type. + pydantic_js_extra: WIP, either key/value pair updates to apply to the JSON schema, or a custom callable. + pydantic_internal_union_tag_key: Used internally by the `Tag` metadata to specify the tag used for a discriminated union. + pydantic_internal_union_discriminator: Used internally to specify the discriminator value for a discriminated union + when the discriminator was applied to a `'definition-ref'` schema, and that reference was missing at the time + of the annotation application. + + TODO: Perhaps we should move this structure to pydantic-core. At the moment, though, + it's easier to iterate on if we leave it in pydantic until we feel there is a semi-stable API. + + TODO: It's unfortunate how functionally oriented JSON schema generation is, especially that which occurs during + the core schema generation process. It's inevitable that we need to store some json schema related information + on core schemas, given that we generate JSON schemas directly from core schemas. That being said, debugging related + issues is quite difficult when JSON schema information is disguised via dynamically defined functions. + """ + + pydantic_js_functions: list[GetJsonSchemaFunction] + pydantic_js_annotation_functions: list[GetJsonSchemaFunction] + pydantic_js_prefer_positional_arguments: bool + pydantic_js_updates: JsonDict + pydantic_js_extra: JsonDict | JsonSchemaExtraCallable + pydantic_internal_union_tag_key: str + pydantic_internal_union_discriminator: str + + +def update_core_metadata( + core_metadata: Any, + /, + *, + pydantic_js_functions: list[GetJsonSchemaFunction] | None = None, + pydantic_js_annotation_functions: list[GetJsonSchemaFunction] | None = None, + pydantic_js_updates: JsonDict | None = None, + pydantic_js_extra: JsonDict | JsonSchemaExtraCallable | None = None, +) -> None: + from ..json_schema import PydanticJsonSchemaWarning + + """Update CoreMetadata instance in place. When we make modifications in this function, they + take effect on the `core_metadata` reference passed in as the first (and only) positional argument. + + First, cast to `CoreMetadata`, then finish with a cast to `dict[str, Any]` for core schema compatibility. + We do this here, instead of before / after each call to this function so that this typing hack + can be easily removed if/when we move `CoreMetadata` to `pydantic-core`. + + For parameter descriptions, see `CoreMetadata` above. + """ + core_metadata = cast(CoreMetadata, core_metadata) + + if pydantic_js_functions: + core_metadata.setdefault('pydantic_js_functions', []).extend(pydantic_js_functions) + + if pydantic_js_annotation_functions: + core_metadata.setdefault('pydantic_js_annotation_functions', []).extend(pydantic_js_annotation_functions) + + if pydantic_js_updates: + if (existing_updates := core_metadata.get('pydantic_js_updates')) is not None: + core_metadata['pydantic_js_updates'] = {**existing_updates, **pydantic_js_updates} + else: + core_metadata['pydantic_js_updates'] = pydantic_js_updates + + if pydantic_js_extra is not None: + existing_pydantic_js_extra = core_metadata.get('pydantic_js_extra') + if existing_pydantic_js_extra is None: + core_metadata['pydantic_js_extra'] = pydantic_js_extra + if isinstance(existing_pydantic_js_extra, dict): + if isinstance(pydantic_js_extra, dict): + core_metadata['pydantic_js_extra'] = {**existing_pydantic_js_extra, **pydantic_js_extra} + if callable(pydantic_js_extra): + warn( + 'Composing `dict` and `callable` type `json_schema_extra` is not supported.' + 'The `callable` type is being ignored.' + "If you'd like support for this behavior, please open an issue on pydantic.", + PydanticJsonSchemaWarning, + ) + if callable(existing_pydantic_js_extra): + # if ever there's a case of a callable, we'll just keep the last json schema extra spec + core_metadata['pydantic_js_extra'] = pydantic_js_extra diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..caa51e8c4630458020486407c032f0a8b78bba75 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_core_utils.py @@ -0,0 +1,174 @@ +from __future__ import annotations + +import inspect +from collections.abc import Mapping, Sequence +from typing import TYPE_CHECKING, Any, Union + +from pydantic_core import CoreSchema, core_schema +from typing_extensions import TypeGuard, get_args, get_origin +from typing_inspection import typing_objects + +from . import _repr +from ._typing_extra import is_generic_alias + +if TYPE_CHECKING: + from rich.console import Console + +AnyFunctionSchema = Union[ + core_schema.AfterValidatorFunctionSchema, + core_schema.BeforeValidatorFunctionSchema, + core_schema.WrapValidatorFunctionSchema, + core_schema.PlainValidatorFunctionSchema, +] + + +FunctionSchemaWithInnerSchema = Union[ + core_schema.AfterValidatorFunctionSchema, + core_schema.BeforeValidatorFunctionSchema, + core_schema.WrapValidatorFunctionSchema, +] + +CoreSchemaField = Union[ + core_schema.ModelField, core_schema.DataclassField, core_schema.TypedDictField, core_schema.ComputedField +] +CoreSchemaOrField = Union[core_schema.CoreSchema, CoreSchemaField] + +_CORE_SCHEMA_FIELD_TYPES = {'typed-dict-field', 'dataclass-field', 'model-field', 'computed-field'} +_FUNCTION_WITH_INNER_SCHEMA_TYPES = {'function-before', 'function-after', 'function-wrap'} +_LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES = {'list', 'set', 'frozenset'} + + +def is_core_schema( + schema: CoreSchemaOrField, +) -> TypeGuard[CoreSchema]: + return schema['type'] not in _CORE_SCHEMA_FIELD_TYPES + + +def is_core_schema_field( + schema: CoreSchemaOrField, +) -> TypeGuard[CoreSchemaField]: + return schema['type'] in _CORE_SCHEMA_FIELD_TYPES + + +def is_function_with_inner_schema( + schema: CoreSchemaOrField, +) -> TypeGuard[FunctionSchemaWithInnerSchema]: + return schema['type'] in _FUNCTION_WITH_INNER_SCHEMA_TYPES + + +def is_list_like_schema_with_items_schema( + schema: CoreSchema, +) -> TypeGuard[core_schema.ListSchema | core_schema.SetSchema | core_schema.FrozenSetSchema]: + return schema['type'] in _LIST_LIKE_SCHEMA_WITH_ITEMS_TYPES + + +def get_type_ref(type_: Any, args_override: tuple[type[Any], ...] | None = None) -> str: + """Produces the ref to be used for this type by pydantic_core's core schemas. + + This `args_override` argument was added for the purpose of creating valid recursive references + when creating generic models without needing to create a concrete class. + """ + origin = get_origin(type_) or type_ + + args = get_args(type_) if is_generic_alias(type_) else (args_override or ()) + generic_metadata = getattr(type_, '__pydantic_generic_metadata__', None) + if generic_metadata: + origin = generic_metadata['origin'] or origin + args = generic_metadata['args'] or args + + module_name = getattr(origin, '__module__', '') + if typing_objects.is_typealiastype(origin): + type_ref = f'{module_name}.{origin.__name__}:{id(origin)}' + else: + try: + qualname = getattr(origin, '__qualname__', f'') + except Exception: + qualname = getattr(origin, '__qualname__', '') + type_ref = f'{module_name}.{qualname}:{id(origin)}' + + arg_refs: list[str] = [] + for arg in args: + if isinstance(arg, str): + # Handle string literals as a special case; we may be able to remove this special handling if we + # wrap them in a ForwardRef at some point. + arg_ref = f'{arg}:str-{id(arg)}' + else: + arg_ref = f'{_repr.display_as_type(arg)}:{id(arg)}' + arg_refs.append(arg_ref) + if arg_refs: + type_ref = f'{type_ref}[{",".join(arg_refs)}]' + return type_ref + + +def get_ref(s: core_schema.CoreSchema) -> None | str: + """Get the ref from the schema if it has one. + This exists just for type checking to work correctly. + """ + return s.get('ref', None) + + +def _clean_schema_for_pretty_print(obj: Any, strip_metadata: bool = True) -> Any: # pragma: no cover + """A utility function to remove irrelevant information from a core schema.""" + if isinstance(obj, Mapping): + new_dct = {} + for k, v in obj.items(): + if k == 'metadata' and strip_metadata: + new_metadata = {} + + for meta_k, meta_v in v.items(): + if meta_k in ('pydantic_js_functions', 'pydantic_js_annotation_functions'): + new_metadata['js_metadata'] = '' + else: + new_metadata[meta_k] = _clean_schema_for_pretty_print(meta_v, strip_metadata=strip_metadata) + + if list(new_metadata.keys()) == ['js_metadata']: + new_metadata = {''} + + new_dct[k] = new_metadata + # Remove some defaults: + elif k in ('custom_init', 'root_model') and not v: + continue + else: + new_dct[k] = _clean_schema_for_pretty_print(v, strip_metadata=strip_metadata) + + return new_dct + elif isinstance(obj, Sequence) and not isinstance(obj, str): + return [_clean_schema_for_pretty_print(v, strip_metadata=strip_metadata) for v in obj] + else: + return obj + + +def pretty_print_core_schema( + val: Any, + *, + console: Console | None = None, + max_depth: int | None = None, + strip_metadata: bool = True, +) -> None: # pragma: no cover + """Pretty-print a core schema using the `rich` library. + + Args: + val: The core schema to print, or a Pydantic model/dataclass/type adapter + (in which case the cached core schema is fetched and printed). + console: A rich console to use when printing. Defaults to the global rich console instance. + max_depth: The number of nesting levels which may be printed. + strip_metadata: Whether to strip metadata in the output. If `True` any known core metadata + attributes will be stripped (but custom attributes are kept). Defaults to `True`. + """ + # lazy import: + from rich.pretty import pprint + + # circ. imports: + from pydantic import BaseModel, TypeAdapter + from pydantic.dataclasses import is_pydantic_dataclass + + if (inspect.isclass(val) and issubclass(val, BaseModel)) or is_pydantic_dataclass(val): + val = val.__pydantic_core_schema__ + if isinstance(val, TypeAdapter): + val = val.core_schema + cleaned_schema = _clean_schema_for_pretty_print(val, strip_metadata=strip_metadata) + + pprint(cleaned_schema, console=console, max_depth=max_depth) + + +pps = pretty_print_core_schema diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py new file mode 100644 index 0000000000000000000000000000000000000000..f00c8b77723ba2c269d858c0c7c6bb9f04fba7f2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_dataclasses.py @@ -0,0 +1,315 @@ +"""Private logic for creating pydantic dataclasses.""" + +from __future__ import annotations as _annotations + +import copy +import dataclasses +import sys +import warnings +from collections.abc import Generator +from contextlib import contextmanager +from functools import partial +from typing import TYPE_CHECKING, Any, ClassVar, Protocol, cast + +from pydantic_core import ( + ArgsKwargs, + SchemaSerializer, + SchemaValidator, + core_schema, +) +from typing_extensions import TypeAlias, TypeIs + +from ..errors import PydanticUndefinedAnnotation +from ..fields import FieldInfo +from ..plugin._schema_validator import PluggableSchemaValidator, create_schema_validator +from ..warnings import PydanticDeprecatedSince20 +from . import _config, _decorators +from ._fields import collect_dataclass_fields +from ._generate_schema import GenerateSchema, InvalidSchemaError +from ._generics import get_standard_typevars_map +from ._mock_val_ser import set_dataclass_mocks +from ._namespace_utils import NsResolver +from ._signature import generate_pydantic_signature +from ._utils import LazyClassAttribute + +if TYPE_CHECKING: + from _typeshed import DataclassInstance as StandardDataclass + + from ..config import ConfigDict + + class PydanticDataclass(StandardDataclass, Protocol): + """A protocol containing attributes only available once a class has been decorated as a Pydantic dataclass. + + Attributes: + __pydantic_config__: Pydantic-specific configuration settings for the dataclass. + __pydantic_complete__: Whether dataclass building is completed, or if there are still undefined fields. + __pydantic_core_schema__: The pydantic-core schema used to build the SchemaValidator and SchemaSerializer. + __pydantic_decorators__: Metadata containing the decorators defined on the dataclass. + __pydantic_fields__: Metadata about the fields defined on the dataclass. + __pydantic_serializer__: The pydantic-core SchemaSerializer used to dump instances of the dataclass. + __pydantic_validator__: The pydantic-core SchemaValidator used to validate instances of the dataclass. + """ + + __pydantic_config__: ClassVar[ConfigDict] + __pydantic_complete__: ClassVar[bool] + __pydantic_core_schema__: ClassVar[core_schema.CoreSchema] + __pydantic_decorators__: ClassVar[_decorators.DecoratorInfos] + __pydantic_fields__: ClassVar[dict[str, FieldInfo]] + __pydantic_serializer__: ClassVar[SchemaSerializer] + __pydantic_validator__: ClassVar[SchemaValidator | PluggableSchemaValidator] + + @classmethod + def __pydantic_fields_complete__(cls) -> bool: ... + + +def set_dataclass_fields( + cls: type[StandardDataclass], + config_wrapper: _config.ConfigWrapper, + ns_resolver: NsResolver | None = None, +) -> None: + """Collect and set `cls.__pydantic_fields__`. + + Args: + cls: The class. + config_wrapper: The config wrapper instance. + ns_resolver: Namespace resolver to use when getting dataclass annotations. + """ + typevars_map = get_standard_typevars_map(cls) + fields = collect_dataclass_fields( + cls, ns_resolver=ns_resolver, typevars_map=typevars_map, config_wrapper=config_wrapper + ) + + cls.__pydantic_fields__ = fields # type: ignore + + +def complete_dataclass( + cls: type[Any], + config_wrapper: _config.ConfigWrapper, + *, + raise_errors: bool = True, + ns_resolver: NsResolver | None = None, + _force_build: bool = False, +) -> bool: + """Finish building a pydantic dataclass. + + This logic is called on a class which has already been wrapped in `dataclasses.dataclass()`. + + This is somewhat analogous to `pydantic._internal._model_construction.complete_model_class`. + + Args: + cls: The class. + config_wrapper: The config wrapper instance. + raise_errors: Whether to raise errors, defaults to `True`. + ns_resolver: The namespace resolver instance to use when collecting dataclass fields + and during schema building. + _force_build: Whether to force building the dataclass, no matter if + [`defer_build`][pydantic.config.ConfigDict.defer_build] is set. + + Returns: + `True` if building a pydantic dataclass is successfully completed, `False` otherwise. + + Raises: + PydanticUndefinedAnnotation: If `raise_error` is `True` and there is an undefined annotations. + """ + original_init = cls.__init__ + + # dataclass.__init__ must be defined here so its `__qualname__` can be changed since functions can't be copied, + # and so that the mock validator is used if building was deferred: + def __init__(__dataclass_self__: PydanticDataclass, *args: Any, **kwargs: Any) -> None: + __tracebackhide__ = True + s = __dataclass_self__ + s.__pydantic_validator__.validate_python(ArgsKwargs(args, kwargs), self_instance=s) + + __init__.__qualname__ = f'{cls.__qualname__}.__init__' + + cls.__init__ = __init__ # type: ignore + cls.__pydantic_config__ = config_wrapper.config_dict # type: ignore + + set_dataclass_fields(cls, config_wrapper=config_wrapper, ns_resolver=ns_resolver) + + if not _force_build and config_wrapper.defer_build: + set_dataclass_mocks(cls) + return False + + if hasattr(cls, '__post_init_post_parse__'): + warnings.warn( + 'Support for `__post_init_post_parse__` has been dropped, the method will not be called', + PydanticDeprecatedSince20, + ) + + typevars_map = get_standard_typevars_map(cls) + gen_schema = GenerateSchema( + config_wrapper, + ns_resolver=ns_resolver, + typevars_map=typevars_map, + ) + + # set __signature__ attr only for the class, but not for its instances + # (because instances can define `__call__`, and `inspect.signature` shouldn't + # use the `__signature__` attribute and instead generate from `__call__`). + cls.__signature__ = LazyClassAttribute( + '__signature__', + partial( + generate_pydantic_signature, + # It's important that we reference the `original_init` here, + # as it is the one synthesized by the stdlib `dataclass` module: + init=original_init, + fields=cls.__pydantic_fields__, # type: ignore + validate_by_name=config_wrapper.validate_by_name, + extra=config_wrapper.extra, + is_dataclass=True, + ), + ) + + try: + schema = gen_schema.generate_schema(cls) + except PydanticUndefinedAnnotation as e: + if raise_errors: + raise + set_dataclass_mocks(cls, f'`{e.name}`') + return False + + core_config = config_wrapper.core_config(title=cls.__name__) + + try: + schema = gen_schema.clean_schema(schema) + except InvalidSchemaError: + set_dataclass_mocks(cls) + return False + + # We are about to set all the remaining required properties expected for this cast; + # __pydantic_decorators__ and __pydantic_fields__ should already be set + cls = cast('type[PydanticDataclass]', cls) + + cls.__pydantic_core_schema__ = schema + cls.__pydantic_validator__ = create_schema_validator( + schema, cls, cls.__module__, cls.__qualname__, 'dataclass', core_config, config_wrapper.plugin_settings + ) + cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config) + cls.__pydantic_complete__ = True + return True + + +def is_stdlib_dataclass(cls: type[Any], /) -> TypeIs[type[StandardDataclass]]: + """Returns `True` if the class is a stdlib dataclass and *not* a Pydantic dataclass. + + Unlike the stdlib `dataclasses.is_dataclass()` function, this does *not* include subclasses + of a dataclass that are themselves not dataclasses. + + Args: + cls: The class. + + Returns: + `True` if the class is a stdlib dataclass, `False` otherwise. + """ + return '__dataclass_fields__' in cls.__dict__ and not hasattr(cls, '__pydantic_validator__') + + +def as_dataclass_field(pydantic_field: FieldInfo) -> dataclasses.Field[Any]: + field_args: dict[str, Any] = {'default': pydantic_field} + + # Needed because if `doc` is set, the dataclass slots will be a dict (field name -> doc) instead of a tuple: + if sys.version_info >= (3, 14) and pydantic_field.description is not None: + field_args['doc'] = pydantic_field.description + + # Needed as the stdlib dataclass module processes kw_only in a specific way during class construction: + if sys.version_info >= (3, 10) and pydantic_field.kw_only is not None: + field_args['kw_only'] = pydantic_field.kw_only + + # Needed as the stdlib dataclass modules generates `__repr__()` during class construction: + if pydantic_field.repr is not True: + field_args['repr'] = pydantic_field.repr + + return dataclasses.field(**field_args) + + +DcFields: TypeAlias = dict[str, dataclasses.Field[Any]] + + +@contextmanager +def patch_base_fields(cls: type[Any]) -> Generator[None]: + """Temporarily patch the stdlib dataclasses bases of `cls` if the Pydantic `Field()` function is used. + + When creating a Pydantic dataclass, it is possible to inherit from stdlib dataclasses, where + the Pydantic `Field()` function is used. To create this Pydantic dataclass, we first apply + the stdlib `@dataclass` decorator on it. During the construction of the stdlib dataclass, + the `kw_only` and `repr` field arguments need to be understood by the stdlib *during* the + dataclass construction. To do so, we temporarily patch the fields dictionary of the affected + bases. + + For instance, with the following example: + + ```python {test="skip" lint="skip"} + import dataclasses as stdlib_dc + + import pydantic + import pydantic.dataclasses as pydantic_dc + + @stdlib_dc.dataclass + class A: + a: int = pydantic.Field(repr=False) + + # Notice that the `repr` attribute of the dataclass field is `True`: + A.__dataclass_fields__['a'] + #> dataclass.Field(default=FieldInfo(repr=False), repr=True, ...) + + @pydantic_dc.dataclass + class B(A): + b: int = pydantic.Field(repr=False) + ``` + + When passing `B` to the stdlib `@dataclass` decorator, it will look for fields in the parent classes + and reuse them directly. When this context manager is active, `A` will be temporarily patched to be + equivalent to: + + ```python {test="skip" lint="skip"} + @stdlib_dc.dataclass + class A: + a: int = stdlib_dc.field(default=Field(repr=False), repr=False) + ``` + + !!! note + This is only applied to the bases of `cls`, and not `cls` itself. The reason is that the Pydantic + dataclass decorator "owns" `cls` (in the previous example, `B`). As such, we instead modify the fields + directly (in the previous example, we simply do `setattr(B, 'b', as_dataclass_field(pydantic_field))`). + + !!! note + This approach is far from ideal, and can probably be the source of unwanted side effects/race conditions. + The previous implemented approach was mutating the `__annotations__` dict of `cls`, which is no longer a + safe operation in Python 3.14+, and resulted in unexpected behavior with field ordering anyway. + """ + # A list of two-tuples, the first element being a reference to the + # dataclass fields dictionary, the second element being a mapping between + # the field names that were modified, and their original `Field`: + original_fields_list: list[tuple[DcFields, DcFields]] = [] + + for base in cls.__mro__[1:]: + dc_fields: dict[str, dataclasses.Field[Any]] = base.__dict__.get('__dataclass_fields__', {}) + dc_fields_with_pydantic_field_defaults = { + field_name: field + for field_name, field in dc_fields.items() + if isinstance(field.default, FieldInfo) + # Only do the patching if one of the affected attributes is set: + and (field.default.description is not None or field.default.kw_only or field.default.repr is not True) + } + if dc_fields_with_pydantic_field_defaults: + original_fields_list.append((dc_fields, dc_fields_with_pydantic_field_defaults)) + for field_name, field in dc_fields_with_pydantic_field_defaults.items(): + default = cast(FieldInfo, field.default) + # `dataclasses.Field` isn't documented as working with `copy.copy()`. + # It is a class with `__slots__`, so should work (and we hope for the best): + new_dc_field = copy.copy(field) + # For base fields, no need to set `doc` from `FieldInfo.description`, this is only relevant + # for the class under construction and handled in `as_dataclass_field()`. + if sys.version_info >= (3, 10) and default.kw_only: + new_dc_field.kw_only = True + if default.repr is not True: + new_dc_field.repr = default.repr + dc_fields[field_name] = new_dc_field + + try: + yield + finally: + for fields, original_fields in original_fields_list: + for field_name, original_field in original_fields.items(): + fields[field_name] = original_field diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..e24ad2cf924b0940b1d1d41f2ccbe4b20f380947 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_decorators.py @@ -0,0 +1,873 @@ +"""Logic related to validators applied to models etc. via the `@field_validator` and `@model_validator` decorators.""" + +from __future__ import annotations as _annotations + +import types +from collections import deque +from collections.abc import Iterable +from copy import copy +from dataclasses import dataclass, field +from functools import cached_property, partial, partialmethod +from inspect import Parameter, Signature, isdatadescriptor, ismethoddescriptor +from itertools import islice +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Generic, Literal, TypeVar, Union + +from pydantic_core import PydanticUndefined, PydanticUndefinedType, core_schema +from typing_extensions import Self, TypeAlias, is_typeddict + +from ..errors import PydanticUserError +from ._core_utils import get_type_ref +from ._internal_dataclass import slots_true +from ._namespace_utils import GlobalsNamespace, MappingNamespace +from ._typing_extra import get_function_type_hints, signature_no_eval +from ._utils import can_be_positional + +if TYPE_CHECKING: + from ..fields import ComputedFieldInfo + from ..functional_validators import FieldValidatorModes + from ._config import ConfigWrapper + + +@dataclass(**slots_true) +class ValidatorDecoratorInfo: + """A container for data from `@validator` so that we can access it + while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@validator'. + fields: A tuple of field names the validator should be called on. + mode: The proposed validator mode. + each_item: For complex objects (sets, lists etc.) whether to validate individual + elements rather than the whole object. + always: Whether this method and other validators should be called even if the value is missing. + check_fields: Whether to check that the fields actually exist on the model. + """ + + decorator_repr: ClassVar[str] = '@validator' + + fields: tuple[str, ...] + mode: Literal['before', 'after'] + each_item: bool + always: bool + check_fields: bool | None + + +@dataclass(**slots_true) +class FieldValidatorDecoratorInfo: + """A container for data from `@field_validator` so that we can access it + while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@field_validator'. + fields: A tuple of field names the validator should be called on. + mode: The proposed validator mode. + check_fields: Whether to check that the fields actually exist on the model. + json_schema_input_type: The input type of the function. This is only used to generate + the appropriate JSON Schema (in validation mode) and can only specified + when `mode` is either `'before'`, `'plain'` or `'wrap'`. + """ + + decorator_repr: ClassVar[str] = '@field_validator' + + fields: tuple[str, ...] + mode: FieldValidatorModes + check_fields: bool | None + json_schema_input_type: Any + + +@dataclass(**slots_true) +class RootValidatorDecoratorInfo: + """A container for data from `@root_validator` so that we can access it + while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@root_validator'. + mode: The proposed validator mode. + """ + + decorator_repr: ClassVar[str] = '@root_validator' + mode: Literal['before', 'after'] + + +@dataclass(**slots_true) +class FieldSerializerDecoratorInfo: + """A container for data from `@field_serializer` so that we can access it + while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@field_serializer'. + fields: A tuple of field names the serializer should be called on. + mode: The proposed serializer mode. + return_type: The type of the serializer's return value. + when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`, + and `'json-unless-none'`. + check_fields: Whether to check that the fields actually exist on the model. + """ + + decorator_repr: ClassVar[str] = '@field_serializer' + fields: tuple[str, ...] + mode: Literal['plain', 'wrap'] + return_type: Any + when_used: core_schema.WhenUsed + check_fields: bool | None + + +@dataclass(**slots_true) +class ModelSerializerDecoratorInfo: + """A container for data from `@model_serializer` so that we can access it + while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@model_serializer'. + mode: The proposed serializer mode. + return_type: The type of the serializer's return value. + when_used: The serialization condition. Accepts a string with values `'always'`, `'unless-none'`, `'json'`, + and `'json-unless-none'`. + """ + + decorator_repr: ClassVar[str] = '@model_serializer' + mode: Literal['plain', 'wrap'] + return_type: Any + when_used: core_schema.WhenUsed + + +@dataclass(**slots_true) +class ModelValidatorDecoratorInfo: + """A container for data from `@model_validator` so that we can access it + while building the pydantic-core schema. + + Attributes: + decorator_repr: A class variable representing the decorator string, '@model_validator'. + mode: The proposed serializer mode. + """ + + decorator_repr: ClassVar[str] = '@model_validator' + mode: Literal['wrap', 'before', 'after'] + + +DecoratorInfo: TypeAlias = """Union[ + ValidatorDecoratorInfo, + FieldValidatorDecoratorInfo, + RootValidatorDecoratorInfo, + FieldSerializerDecoratorInfo, + ModelSerializerDecoratorInfo, + ModelValidatorDecoratorInfo, + ComputedFieldInfo, +]""" + +ReturnType = TypeVar('ReturnType') +DecoratedType: TypeAlias = ( + 'Union[classmethod[Any, Any, ReturnType], staticmethod[Any, ReturnType], Callable[..., ReturnType], property]' +) + + +@dataclass # can't use slots here since we set attributes on `__post_init__` +class PydanticDescriptorProxy(Generic[ReturnType]): + """Wrap a classmethod, staticmethod, property or unbound function + and act as a descriptor that allows us to detect decorated items + from the class' attributes. + + This class' __get__ returns the wrapped item's __get__ result, + which makes it transparent for classmethods and staticmethods. + + Attributes: + wrapped: The decorator that has to be wrapped. + decorator_info: The decorator info. + shim: A wrapper function to wrap V1 style function. + """ + + wrapped: DecoratedType[ReturnType] + decorator_info: DecoratorInfo + shim: Callable[[Callable[..., Any]], Callable[..., Any]] | None = None + + def __post_init__(self): + for attr in 'setter', 'deleter': + if hasattr(self.wrapped, attr): + f = partial(self._call_wrapped_attr, name=attr) + setattr(self, attr, f) + + def _call_wrapped_attr(self, func: Callable[[Any], None], *, name: str) -> PydanticDescriptorProxy[ReturnType]: + self.wrapped = getattr(self.wrapped, name)(func) + if isinstance(self.wrapped, property): + # update ComputedFieldInfo.wrapped_property + from ..fields import ComputedFieldInfo + + if isinstance(self.decorator_info, ComputedFieldInfo): + self.decorator_info.wrapped_property = self.wrapped + return self + + def __get__(self, obj: object | None, obj_type: type[object] | None = None) -> PydanticDescriptorProxy[ReturnType]: + try: + return self.wrapped.__get__(obj, obj_type) # pyright: ignore[reportReturnType] + except AttributeError: + # not a descriptor, e.g. a partial object + return self.wrapped # type: ignore[return-value] + + def __set_name__(self, instance: Any, name: str) -> None: + if hasattr(self.wrapped, '__set_name__'): + self.wrapped.__set_name__(instance, name) # pyright: ignore[reportFunctionMemberAccess] + + def __getattr__(self, name: str, /) -> Any: + """Forward checks for __isabstractmethod__ and such.""" + return getattr(self.wrapped, name) + + +DecoratorInfoType = TypeVar('DecoratorInfoType', bound=DecoratorInfo) + + +@dataclass(**slots_true) +class Decorator(Generic[DecoratorInfoType]): + """A generic container class to join together the decorator metadata + (metadata from decorator itself, which we have when the + decorator is called but not when we are building the core-schema) + and the bound function (which we have after the class itself is created). + + Attributes: + cls_ref: The class ref. + cls_var_name: The decorated function name. + func: The decorated function. + shim: A wrapper function to wrap V1 style function. + info: The decorator info. + """ + + cls_ref: str + cls_var_name: str + func: Callable[..., Any] + shim: Callable[[Any], Any] | None + info: DecoratorInfoType + + @staticmethod + def build( + cls_: Any, + *, + cls_var_name: str, + shim: Callable[[Any], Any] | None, + info: DecoratorInfoType, + ) -> Decorator[DecoratorInfoType]: + """Build a new decorator. + + Args: + cls_: The class. + cls_var_name: The decorated function name. + shim: A wrapper function to wrap V1 style function. + info: The decorator info. + + Returns: + The new decorator instance. + """ + func = get_attribute_from_bases(cls_, cls_var_name) + if shim is not None: + func = shim(func) + func = unwrap_wrapped_function(func, unwrap_partial=False) + if not callable(func): + # TODO most likely this branch can be removed when we drop support for Python 3.12: + # This branch will get hit for classmethod properties + attribute = get_attribute_from_base_dicts(cls_, cls_var_name) # prevents the binding call to `__get__` + if isinstance(attribute, PydanticDescriptorProxy): + func = unwrap_wrapped_function(attribute.wrapped) + return Decorator( + cls_ref=get_type_ref(cls_), + cls_var_name=cls_var_name, + func=func, + shim=shim, + info=info, + ) + + def bind_to_cls(self, cls: Any) -> Decorator[DecoratorInfoType]: + """Bind the decorator to a class. + + Args: + cls: the class. + + Returns: + The new decorator instance. + """ + return self.build( + cls, + cls_var_name=self.cls_var_name, + shim=self.shim, + info=copy(self.info), + ) + + +def get_bases(tp: type[Any]) -> tuple[type[Any], ...]: + """Get the base classes of a class or typeddict. + + Args: + tp: The type or class to get the bases. + + Returns: + The base classes. + """ + if is_typeddict(tp): + return tp.__orig_bases__ # type: ignore + try: + return tp.__bases__ + except AttributeError: + return () + + +def mro(tp: type[Any]) -> tuple[type[Any], ...]: + """Calculate the Method Resolution Order of bases using the C3 algorithm. + + See https://www.python.org/download/releases/2.3/mro/ + """ + # try to use the existing mro, for performance mainly + # but also because it helps verify the implementation below + if not is_typeddict(tp): + try: + return tp.__mro__ + except AttributeError: + # GenericAlias and some other cases + pass + + bases = get_bases(tp) + return (tp,) + mro_for_bases(bases) + + +def mro_for_bases(bases: tuple[type[Any], ...]) -> tuple[type[Any], ...]: + def merge_seqs(seqs: list[deque[type[Any]]]) -> Iterable[type[Any]]: + while True: + non_empty = [seq for seq in seqs if seq] + if not non_empty: + # Nothing left to process, we're done. + return + candidate: type[Any] | None = None + for seq in non_empty: # Find merge candidates among seq heads. + candidate = seq[0] + not_head = [s for s in non_empty if candidate in islice(s, 1, None)] + if not_head: + # Reject the candidate. + candidate = None + else: + break + if not candidate: + raise TypeError('Inconsistent hierarchy, no C3 MRO is possible') + yield candidate + for seq in non_empty: + # Remove candidate. + if seq[0] == candidate: + seq.popleft() + + seqs = [deque(mro(base)) for base in bases] + [deque(bases)] + return tuple(merge_seqs(seqs)) + + +_sentinel = object() + + +def get_attribute_from_bases(tp: type[Any] | tuple[type[Any], ...], name: str) -> Any: + """Get the attribute from the next class in the MRO that has it, + aiming to simulate calling the method on the actual class. + + The reason for iterating over the mro instead of just getting + the attribute (which would do that for us) is to support TypedDict, + which lacks a real __mro__, but can have a virtual one constructed + from its bases (as done here). + + Args: + tp: The type or class to search for the attribute. If a tuple, this is treated as a set of base classes. + name: The name of the attribute to retrieve. + + Returns: + Any: The attribute value, if found. + + Raises: + AttributeError: If the attribute is not found in any class in the MRO. + """ + if isinstance(tp, tuple): + for base in mro_for_bases(tp): + attribute = base.__dict__.get(name, _sentinel) + if attribute is not _sentinel: + attribute_get = getattr(attribute, '__get__', None) + if attribute_get is not None: + return attribute_get(None, tp) + return attribute + raise AttributeError(f'{name} not found in {tp}') + else: + try: + return getattr(tp, name) + except AttributeError: + return get_attribute_from_bases(mro(tp), name) + + +def get_attribute_from_base_dicts(tp: type[Any], name: str) -> Any: + """Get an attribute out of the `__dict__` following the MRO. + This prevents the call to `__get__` on the descriptor, and allows + us to get the original function for classmethod properties. + + Args: + tp: The type or class to search for the attribute. + name: The name of the attribute to retrieve. + + Returns: + Any: The attribute value, if found. + + Raises: + KeyError: If the attribute is not found in any class's `__dict__` in the MRO. + """ + for base in reversed(mro(tp)): + if name in base.__dict__: + return base.__dict__[name] + return tp.__dict__[name] # raise the error + + +@dataclass(**slots_true) +class DecoratorInfos: + """Mapping of name in the class namespace to decorator info. + + note that the name in the class namespace is the function or attribute name + not the field name! + """ + + validators: dict[str, Decorator[ValidatorDecoratorInfo]] = field(default_factory=dict) + field_validators: dict[str, Decorator[FieldValidatorDecoratorInfo]] = field(default_factory=dict) + root_validators: dict[str, Decorator[RootValidatorDecoratorInfo]] = field(default_factory=dict) + field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]] = field(default_factory=dict) + model_serializers: dict[str, Decorator[ModelSerializerDecoratorInfo]] = field(default_factory=dict) + model_validators: dict[str, Decorator[ModelValidatorDecoratorInfo]] = field(default_factory=dict) + computed_fields: dict[str, Decorator[ComputedFieldInfo]] = field(default_factory=dict) + + @classmethod + def build( + cls, + typ: type[Any], + # Default to `True` for backwards compatibility: + replace_wrapped_methods: bool = True, + ) -> Self: + """Build a `DecoratorInfos` instance for the given model, dataclass or `TypedDict` type. + + Decorators from parent classes are included, including "bare" classes (e.g. if `typ` + is a Pydantic model, non Pydantic parent model classes are also taken into account). + The collection of the decorators happens by respecting the MRO. + + If one of the bases has an `__pydantic_decorators__` attribute set, it is assumed to be + a `DecoratorInfos` instance and is used as-is. The `__pydantic_decorators__` attribute + is *not* being set on the provided `typ`. + + Args: + typ: The model, dataclass or `TypedDict` type to use when building the `DecoratorInfos` instance. + replace_wrapped_methods: Whether to replace the decorator's wrapped methods on `typ`. + This is useful e.g. for field validators which are initially class methods. This should + only be set to `True` if `typ` is a Pydantic model or dataclass (otherwise this results + in mutations of classes Pydantic doesn't "own"). + """ + # reminder: dicts are ordered and replacement does not alter the order + res = cls() + # Iterate over the bases, without the actual `typ`. + # `1:-1` because we don't need to include `object`/`TypedDict`: + for base in reversed(mro(typ)[1:-1]): + existing: DecoratorInfos | None = base.__dict__.get('__pydantic_decorators__') + if existing is None: + existing, _ = _decorator_infos_for_class(base, collect_to_replace=False) + res.validators.update({k: v.bind_to_cls(typ) for k, v in existing.validators.items()}) + res.field_validators.update({k: v.bind_to_cls(typ) for k, v in existing.field_validators.items()}) + res.root_validators.update({k: v.bind_to_cls(typ) for k, v in existing.root_validators.items()}) + res.field_serializers.update({k: v.bind_to_cls(typ) for k, v in existing.field_serializers.items()}) + res.model_serializers.update({k: v.bind_to_cls(typ) for k, v in existing.model_serializers.items()}) + res.model_validators.update({k: v.bind_to_cls(typ) for k, v in existing.model_validators.items()}) + res.computed_fields.update({k: v.bind_to_cls(typ) for k, v in existing.computed_fields.items()}) + + decorator_infos, to_replace = _decorator_infos_for_class(typ, collect_to_replace=True) + + res.validators.update(decorator_infos.validators) + res.field_validators.update(decorator_infos.field_validators) + res.root_validators.update(decorator_infos.root_validators) + res.field_serializers.update(decorator_infos.field_serializers) + res.model_serializers.update(decorator_infos.model_serializers) + res.model_validators.update(decorator_infos.model_validators) + res.computed_fields.update(decorator_infos.computed_fields) + + if replace_wrapped_methods and to_replace: + for name, value in to_replace: + setattr(typ, name, value) + + res._validate() + return res + + def _validate(self) -> None: + seen: set[str] = set() + for field_ser in self.field_serializers.values(): + for f_name in field_ser.info.fields: + if f_name in seen: + raise PydanticUserError( + f'Multiple field serializer functions were defined for field {f_name!r}, this is not allowed.', + code='multiple-field-serializers', + ) + seen.add(f_name) + + def update_from_config(self, config_wrapper: ConfigWrapper) -> None: + """Update the decorator infos from the configuration of the class they are attached to.""" + for name, computed_field_dec in self.computed_fields.items(): + computed_field_dec.info._update_from_config(config_wrapper, name) + + +def _decorator_infos_for_class( + typ: type[Any], + *, + collect_to_replace: bool, +) -> tuple[DecoratorInfos, list[tuple[str, Any]]]: + """Collect a `DecoratorInfos` for class, without looking into bases.""" + res = DecoratorInfos() + to_replace: list[tuple[str, Any]] = [] + + for var_name, var_value in vars(typ).items(): + if isinstance(var_value, PydanticDescriptorProxy): + info = var_value.decorator_info + if isinstance(info, ValidatorDecoratorInfo): + res.validators[var_name] = Decorator.build(typ, cls_var_name=var_name, shim=var_value.shim, info=info) + elif isinstance(info, FieldValidatorDecoratorInfo): + res.field_validators[var_name] = Decorator.build( + typ, cls_var_name=var_name, shim=var_value.shim, info=info + ) + elif isinstance(info, RootValidatorDecoratorInfo): + res.root_validators[var_name] = Decorator.build( + typ, cls_var_name=var_name, shim=var_value.shim, info=info + ) + elif isinstance(info, FieldSerializerDecoratorInfo): + res.field_serializers[var_name] = Decorator.build( + typ, cls_var_name=var_name, shim=var_value.shim, info=info + ) + elif isinstance(info, ModelValidatorDecoratorInfo): + res.model_validators[var_name] = Decorator.build( + typ, cls_var_name=var_name, shim=var_value.shim, info=info + ) + elif isinstance(info, ModelSerializerDecoratorInfo): + res.model_serializers[var_name] = Decorator.build( + typ, cls_var_name=var_name, shim=var_value.shim, info=info + ) + else: + from ..fields import ComputedFieldInfo + + isinstance(var_value, ComputedFieldInfo) + res.computed_fields[var_name] = Decorator.build(typ, cls_var_name=var_name, shim=None, info=info) + if collect_to_replace: + to_replace.append((var_name, var_value.wrapped)) + + return res, to_replace + + +def inspect_validator( + validator: Callable[..., Any], *, mode: FieldValidatorModes, type: Literal['field', 'model'] +) -> bool: + """Look at a field or model validator function and determine whether it takes an info argument. + + An error is raised if the function has an invalid signature. + + Args: + validator: The validator function to inspect. + mode: The proposed validator mode. + type: The type of validator, either 'field' or 'model'. + + Returns: + Whether the validator takes an info argument. + """ + try: + sig = signature_no_eval(validator) + except (ValueError, TypeError): + # `inspect.signature` might not be able to infer a signature, e.g. with C objects. + # In this case, we assume no info argument is present: + return False + n_positional = count_positional_required_params(sig) + if mode == 'wrap': + if n_positional == 3: + return True + elif n_positional == 2: + return False + else: + assert mode in {'before', 'after', 'plain'}, f"invalid mode: {mode!r}, expected 'before', 'after' or 'plain" + if n_positional == 2: + return True + elif n_positional == 1: + return False + + raise PydanticUserError( + f'Unrecognized {type} validator function signature for {validator} with `mode={mode}`: {sig}', + code='validator-signature', + ) + + +def inspect_field_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> tuple[bool, bool]: + """Look at a field serializer function and determine if it is a field serializer, + and whether it takes an info argument. + + An error is raised if the function has an invalid signature. + + Args: + serializer: The serializer function to inspect. + mode: The serializer mode, either 'plain' or 'wrap'. + + Returns: + Tuple of (is_field_serializer, info_arg). + """ + try: + sig = signature_no_eval(serializer) + except (ValueError, TypeError): + # `inspect.signature` might not be able to infer a signature, e.g. with C objects. + # In this case, we assume no info argument is present and this is not a method: + return (False, False) + + first = next(iter(sig.parameters.values()), None) + is_field_serializer = first is not None and first.name == 'self' + + n_positional = count_positional_required_params(sig) + if is_field_serializer: + # -1 to correct for self parameter + info_arg = _serializer_info_arg(mode, n_positional - 1) + else: + info_arg = _serializer_info_arg(mode, n_positional) + + if info_arg is None: + raise PydanticUserError( + f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}', + code='field-serializer-signature', + ) + + return is_field_serializer, info_arg + + +def inspect_annotated_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool: + """Look at a serializer function used via `Annotated` and determine whether it takes an info argument. + + An error is raised if the function has an invalid signature. + + Args: + serializer: The serializer function to check. + mode: The serializer mode, either 'plain' or 'wrap'. + + Returns: + info_arg + """ + try: + sig = signature_no_eval(serializer) + except (ValueError, TypeError): + # `inspect.signature` might not be able to infer a signature, e.g. with C objects. + # In this case, we assume no info argument is present: + return False + info_arg = _serializer_info_arg(mode, count_positional_required_params(sig)) + if info_arg is None: + raise PydanticUserError( + f'Unrecognized field_serializer function signature for {serializer} with `mode={mode}`:{sig}', + code='field-serializer-signature', + ) + else: + return info_arg + + +def inspect_model_serializer(serializer: Callable[..., Any], mode: Literal['plain', 'wrap']) -> bool: + """Look at a model serializer function and determine whether it takes an info argument. + + An error is raised if the function has an invalid signature. + + Args: + serializer: The serializer function to check. + mode: The serializer mode, either 'plain' or 'wrap'. + + Returns: + `info_arg` - whether the function expects an info argument. + """ + if isinstance(serializer, (staticmethod, classmethod)) or not is_instance_method_from_sig(serializer): + raise PydanticUserError( + '`@model_serializer` must be applied to instance methods', code='model-serializer-instance-method' + ) + + sig = signature_no_eval(serializer) + info_arg = _serializer_info_arg(mode, count_positional_required_params(sig)) + if info_arg is None: + raise PydanticUserError( + f'Unrecognized model_serializer function signature for {serializer} with `mode={mode}`:{sig}', + code='model-serializer-signature', + ) + else: + return info_arg + + +def _serializer_info_arg(mode: Literal['plain', 'wrap'], n_positional: int) -> bool | None: + if mode == 'plain': + if n_positional == 1: + # (input_value: Any, /) -> Any + return False + elif n_positional == 2: + # (model: Any, input_value: Any, /) -> Any + return True + else: + assert mode == 'wrap', f"invalid mode: {mode!r}, expected 'plain' or 'wrap'" + if n_positional == 2: + # (input_value: Any, serializer: SerializerFunctionWrapHandler, /) -> Any + return False + elif n_positional == 3: + # (input_value: Any, serializer: SerializerFunctionWrapHandler, info: SerializationInfo, /) -> Any + return True + + return None + + +AnyDecoratorCallable: TypeAlias = ( + 'Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any], Callable[..., Any]]' +) + + +def is_instance_method_from_sig(function: AnyDecoratorCallable) -> bool: + """Whether the function is an instance method. + + It will consider a function as instance method if the first parameter of + function is `self`. + + Args: + function: The function to check. + + Returns: + `True` if the function is an instance method, `False` otherwise. + """ + sig = signature_no_eval(unwrap_wrapped_function(function)) + first = next(iter(sig.parameters.values()), None) + if first and first.name == 'self': + return True + return False + + +def ensure_classmethod_based_on_signature(function: AnyDecoratorCallable) -> Any: + """Apply the `@classmethod` decorator on the function. + + Args: + function: The function to apply the decorator on. + + Return: + The `@classmethod` decorator applied function. + """ + if not isinstance( + unwrap_wrapped_function(function, unwrap_class_static_method=False), classmethod + ) and _is_classmethod_from_sig(function): + return classmethod(function) # type: ignore[arg-type] + return function + + +def _is_classmethod_from_sig(function: AnyDecoratorCallable) -> bool: + sig = signature_no_eval(unwrap_wrapped_function(function)) + first = next(iter(sig.parameters.values()), None) + if first and first.name == 'cls': + return True + return False + + +def unwrap_wrapped_function( + func: Any, + *, + unwrap_partial: bool = True, + unwrap_class_static_method: bool = True, +) -> Any: + """Recursively unwraps a wrapped function until the underlying function is reached. + This handles property, functools.partial, functools.partialmethod, staticmethod, and classmethod. + + Args: + func: The function to unwrap. + unwrap_partial: If True (default), unwrap partial and partialmethod decorators. + unwrap_class_static_method: If True (default), also unwrap classmethod and staticmethod + decorators. If False, only unwrap partial and partialmethod decorators. + + Returns: + The underlying function of the wrapped function. + """ + # Define the types we want to check against as a single tuple. + unwrap_types = ( + (property, cached_property) + + ((partial, partialmethod) if unwrap_partial else ()) + + ((staticmethod, classmethod) if unwrap_class_static_method else ()) + ) + + while isinstance(func, unwrap_types): + if unwrap_class_static_method and isinstance(func, (classmethod, staticmethod)): + func = func.__func__ + elif isinstance(func, (partial, partialmethod)): + func = func.func + elif isinstance(func, property): + func = func.fget # arbitrary choice, convenient for computed fields + else: + # Make coverage happy as it can only get here in the last possible case + assert isinstance(func, cached_property) + func = func.func # type: ignore + + return func + + +_function_like = ( + partial, + partialmethod, + types.FunctionType, + types.BuiltinFunctionType, + types.MethodType, + types.WrapperDescriptorType, + types.MethodWrapperType, + types.MemberDescriptorType, +) + + +def get_callable_return_type( + callable_obj: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, +) -> Any | PydanticUndefinedType: + """Get the callable return type. + + Args: + callable_obj: The callable to analyze. + globalns: The globals namespace to use during type annotation evaluation. + localns: The locals namespace to use during type annotation evaluation. + + Returns: + The function return type. + """ + if isinstance(callable_obj, type): + # types are callables, and we assume the return type + # is the type itself (e.g. `int()` results in an instance of `int`). + return callable_obj + + if not isinstance(callable_obj, _function_like): + call_func = getattr(type(callable_obj), '__call__', None) # noqa: B004 + if call_func is not None: + callable_obj = call_func + + hints = get_function_type_hints( + unwrap_wrapped_function(callable_obj), + include_keys={'return'}, + globalns=globalns, + localns=localns, + ) + return hints.get('return', PydanticUndefined) + + +def count_positional_required_params(sig: Signature) -> int: + """Get the number of positional (required) arguments of a signature. + + This function should only be used to inspect signatures of validation and serialization functions. + The first argument (the value being serialized or validated) is counted as a required argument + even if a default value exists. + + Returns: + The number of positional arguments of a signature. + """ + parameters = list(sig.parameters.values()) + return sum( + 1 + for param in parameters + if can_be_positional(param) + # First argument is the value being validated/serialized, and can have a default value + # (e.g. `float`, which has signature `(x=0, /)`). We assume other parameters (the info arg + # for instance) should be required, and thus without any default value. + and (param.default is Parameter.empty or param is parameters[0]) + ) + + +def ensure_property(f: Any) -> Any: + """Ensure that a function is a `property` or `cached_property`, or is a valid descriptor. + + Args: + f: The function to check. + + Returns: + The function, or a `property` or `cached_property` instance wrapping the function. + """ + if ismethoddescriptor(f) or isdatadescriptor(f): + return f + else: + return property(f) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..34273779d77666f5cd775f6a2684641092db67ee --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_decorators_v1.py @@ -0,0 +1,174 @@ +"""Logic for V1 validators, e.g. `@validator` and `@root_validator`.""" + +from __future__ import annotations as _annotations + +from inspect import Parameter, signature +from typing import Any, Union, cast + +from pydantic_core import core_schema +from typing_extensions import Protocol + +from ..errors import PydanticUserError +from ._utils import can_be_positional + + +class V1OnlyValueValidator(Protocol): + """A simple validator, supported for V1 validators and V2 validators.""" + + def __call__(self, __value: Any) -> Any: ... + + +class V1ValidatorWithValues(Protocol): + """A validator with `values` argument, supported for V1 validators and V2 validators.""" + + def __call__(self, __value: Any, values: dict[str, Any]) -> Any: ... + + +class V1ValidatorWithValuesKwOnly(Protocol): + """A validator with keyword only `values` argument, supported for V1 validators and V2 validators.""" + + def __call__(self, __value: Any, *, values: dict[str, Any]) -> Any: ... + + +class V1ValidatorWithKwargs(Protocol): + """A validator with `kwargs` argument, supported for V1 validators and V2 validators.""" + + def __call__(self, __value: Any, **kwargs: Any) -> Any: ... + + +class V1ValidatorWithValuesAndKwargs(Protocol): + """A validator with `values` and `kwargs` arguments, supported for V1 validators and V2 validators.""" + + def __call__(self, __value: Any, values: dict[str, Any], **kwargs: Any) -> Any: ... + + +V1Validator = Union[ + V1ValidatorWithValues, V1ValidatorWithValuesKwOnly, V1ValidatorWithKwargs, V1ValidatorWithValuesAndKwargs +] + + +def can_be_keyword(param: Parameter) -> bool: + return param.kind in (Parameter.POSITIONAL_OR_KEYWORD, Parameter.KEYWORD_ONLY) + + +def make_generic_v1_field_validator(validator: V1Validator) -> core_schema.WithInfoValidatorFunction: + """Wrap a V1 style field validator for V2 compatibility. + + Args: + validator: The V1 style field validator. + + Returns: + A wrapped V2 style field validator. + + Raises: + PydanticUserError: If the signature is not supported or the parameters are + not available in Pydantic V2. + """ + sig = signature(validator) + + needs_values_kw = False + + for param_num, (param_name, parameter) in enumerate(sig.parameters.items()): + if can_be_keyword(parameter) and param_name in ('field', 'config'): + raise PydanticUserError( + 'The `field` and `config` parameters are not available in Pydantic V2, ' + 'please use the `info` parameter instead.', + code='validator-field-config-info', + ) + if parameter.kind is Parameter.VAR_KEYWORD: + needs_values_kw = True + elif can_be_keyword(parameter) and param_name == 'values': + needs_values_kw = True + elif can_be_positional(parameter) and param_num == 0: + # value + continue + elif parameter.default is Parameter.empty: # ignore params with defaults e.g. bound by functools.partial + raise PydanticUserError( + f'Unsupported signature for V1 style validator {validator}: {sig} is not supported.', + code='validator-v1-signature', + ) + + if needs_values_kw: + # (v, **kwargs), (v, values, **kwargs), (v, *, values, **kwargs) or (v, *, values) + val1 = cast(V1ValidatorWithValues, validator) + + def wrapper1(value: Any, info: core_schema.ValidationInfo) -> Any: + return val1(value, values=info.data) + + return wrapper1 + else: + val2 = cast(V1OnlyValueValidator, validator) + + def wrapper2(value: Any, _: core_schema.ValidationInfo) -> Any: + return val2(value) + + return wrapper2 + + +RootValidatorValues = dict[str, Any] +# technically tuple[model_dict, model_extra, fields_set] | tuple[dataclass_dict, init_vars] +RootValidatorFieldsTuple = tuple[Any, ...] + + +class V1RootValidatorFunction(Protocol): + """A simple root validator, supported for V1 validators and V2 validators.""" + + def __call__(self, __values: RootValidatorValues) -> RootValidatorValues: ... + + +class V2CoreBeforeRootValidator(Protocol): + """V2 validator with mode='before'.""" + + def __call__(self, __values: RootValidatorValues, __info: core_schema.ValidationInfo) -> RootValidatorValues: ... + + +class V2CoreAfterRootValidator(Protocol): + """V2 validator with mode='after'.""" + + def __call__( + self, __fields_tuple: RootValidatorFieldsTuple, __info: core_schema.ValidationInfo + ) -> RootValidatorFieldsTuple: ... + + +def make_v1_generic_root_validator( + validator: V1RootValidatorFunction, pre: bool +) -> V2CoreBeforeRootValidator | V2CoreAfterRootValidator: + """Wrap a V1 style root validator for V2 compatibility. + + Args: + validator: The V1 style field validator. + pre: Whether the validator is a pre validator. + + Returns: + A wrapped V2 style validator. + """ + if pre is True: + # mode='before' for pydantic-core + def _wrapper1(values: RootValidatorValues, _: core_schema.ValidationInfo) -> RootValidatorValues: + return validator(values) + + return _wrapper1 + + # mode='after' for pydantic-core + def _wrapper2(fields_tuple: RootValidatorFieldsTuple, _: core_schema.ValidationInfo) -> RootValidatorFieldsTuple: + if len(fields_tuple) == 2: + # dataclass, this is easy + values, init_vars = fields_tuple + values = validator(values) + return values, init_vars + else: + # ugly hack: to match v1 behaviour, we merge values and model_extra, then split them up based on fields + # afterwards + model_dict, model_extra, fields_set = fields_tuple + if model_extra: + fields = set(model_dict.keys()) + model_dict.update(model_extra) + model_dict_new = validator(model_dict) + for k in list(model_dict_new.keys()): + if k not in fields: + model_extra[k] = model_dict_new.pop(k) + else: + model_dict_new = validator(model_dict) + return model_dict_new, model_extra, fields_set + + return _wrapper2 diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py new file mode 100644 index 0000000000000000000000000000000000000000..fc52f30889816b7161f41ae030df05e46ec5f557 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_discriminated_union.py @@ -0,0 +1,494 @@ +from __future__ import annotations as _annotations + +from collections.abc import Hashable, Sequence +from typing import TYPE_CHECKING, Any, cast + +from pydantic_core import CoreSchema, core_schema + +from ..errors import PydanticUserError +from . import _core_utils +from ._core_utils import ( + CoreSchemaField, +) + +if TYPE_CHECKING: + from ..types import Discriminator + from ._core_metadata import CoreMetadata + + +class MissingDefinitionForUnionRef(Exception): + """Raised when applying a discriminated union discriminator to a schema + requires a definition that is not yet defined + """ + + def __init__(self, ref: str) -> None: + self.ref = ref + super().__init__(f'Missing definition for ref {self.ref!r}') + + +def set_discriminator_in_metadata(schema: CoreSchema, discriminator: Any) -> None: + metadata = cast('CoreMetadata', schema.setdefault('metadata', {})) + metadata['pydantic_internal_union_discriminator'] = discriminator + + +def apply_discriminator( + schema: core_schema.CoreSchema, + discriminator: str | Discriminator, + definitions: dict[str, core_schema.CoreSchema] | None = None, +) -> core_schema.CoreSchema: + """Applies the discriminator and returns a new core schema. + + Args: + schema: The input schema. + discriminator: The name of the field which will serve as the discriminator. + definitions: A mapping of schema ref to schema. + + Returns: + The new core schema. + + Raises: + TypeError: + - If `discriminator` is used with invalid union variant. + - If `discriminator` is used with `Union` type with one variant. + - If `discriminator` value mapped to multiple choices. + MissingDefinitionForUnionRef: + If the definition for ref is missing. + PydanticUserError: + - If a model in union doesn't have a discriminator field. + - If discriminator field has a non-string alias. + - If discriminator fields have different aliases. + - If discriminator field not of type `Literal`. + """ + from ..types import Discriminator + + if isinstance(discriminator, Discriminator): + if isinstance(discriminator.discriminator, str): + discriminator = discriminator.discriminator + else: + return discriminator._convert_schema(schema) + + return _ApplyInferredDiscriminator(discriminator, definitions or {}).apply(schema) + + +class _ApplyInferredDiscriminator: + """This class is used to convert an input schema containing a union schema into one where that union is + replaced with a tagged-union, with all the associated debugging and performance benefits. + + This is done by: + * Validating that the input schema is compatible with the provided discriminator + * Introspecting the schema to determine which discriminator values should map to which union choices + * Handling various edge cases such as 'definitions', 'default', 'nullable' schemas, and more + + I have chosen to implement the conversion algorithm in this class, rather than a function, + to make it easier to maintain state while recursively walking the provided CoreSchema. + """ + + def __init__(self, discriminator: str, definitions: dict[str, core_schema.CoreSchema]): + # `discriminator` should be the name of the field which will serve as the discriminator. + # It must be the python name of the field, and *not* the field's alias. Note that as of now, + # all members of a discriminated union _must_ use a field with the same name as the discriminator. + # This may change if/when we expose a way to manually specify the TaggedUnionSchema's choices. + self.discriminator = discriminator + + # `definitions` should contain a mapping of schema ref to schema for all schemas which might + # be referenced by some choice + self.definitions = definitions + + # `_discriminator_alias` will hold the value, if present, of the alias for the discriminator + # + # Note: following the v1 implementation, we currently disallow the use of different aliases + # for different choices. This is not a limitation of pydantic_core, but if we try to handle + # this, the inference logic gets complicated very quickly, and could result in confusing + # debugging challenges for users making subtle mistakes. + # + # Rather than trying to do the most powerful inference possible, I think we should eventually + # expose a way to more-manually control the way the TaggedUnionSchema is constructed through + # the use of a new type which would be placed as an Annotation on the Union type. This would + # provide the full flexibility/power of pydantic_core's TaggedUnionSchema where necessary for + # more complex cases, without over-complicating the inference logic for the common cases. + self._discriminator_alias: str | None = None + + # `_should_be_nullable` indicates whether the converted union has `None` as an allowed value. + # If `None` is an acceptable value of the (possibly-wrapped) union, we ignore it while + # constructing the TaggedUnionSchema, but set the `_should_be_nullable` attribute to True. + # Once we have constructed the TaggedUnionSchema, if `_should_be_nullable` is True, we ensure + # that the final schema gets wrapped as a NullableSchema. This has the same semantics on the + # python side, but resolves the issue that `None` cannot correspond to any discriminator values. + self._should_be_nullable = False + + # `_is_nullable` is used to track if the final produced schema will definitely be nullable; + # we set it to True if the input schema is wrapped in a nullable schema that we know will be preserved + # as an indication that, even if None is discovered as one of the union choices, we will not need to wrap + # the final value in another nullable schema. + # + # This is more complicated than just checking for the final outermost schema having type 'nullable' thanks + # to the possible presence of other wrapper schemas such as DefinitionsSchema, WithDefaultSchema, etc. + self._is_nullable = False + + # `_choices_to_handle` serves as a stack of choices to add to the tagged union. Initially, choices + # from the union in the wrapped schema will be appended to this list, and the recursive choice-handling + # algorithm may add more choices to this stack as (nested) unions are encountered. + self._choices_to_handle: list[core_schema.CoreSchema] = [] + + # `_tagged_union_choices` is built during the call to `apply`, and will hold the choices to be included + # in the output TaggedUnionSchema that will replace the union from the input schema + self._tagged_union_choices: dict[Hashable, core_schema.CoreSchema] = {} + + # `_used` is changed to True after applying the discriminator to prevent accidental reuse + self._used = False + + def apply(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: + """Return a new CoreSchema based on `schema` that uses a tagged-union with the discriminator provided + to this class. + + Args: + schema: The input schema. + + Returns: + The new core schema. + + Raises: + TypeError: + - If `discriminator` is used with invalid union variant. + - If `discriminator` is used with `Union` type with one variant. + - If `discriminator` value mapped to multiple choices. + ValueError: + If the definition for ref is missing. + PydanticUserError: + - If a model in union doesn't have a discriminator field. + - If discriminator field has a non-string alias. + - If discriminator fields have different aliases. + - If discriminator field not of type `Literal`. + """ + assert not self._used + schema = self._apply_to_root(schema) + if self._should_be_nullable and not self._is_nullable: + schema = core_schema.nullable_schema(schema) + self._used = True + return schema + + def _apply_to_root(self, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: + """This method handles the outer-most stage of recursion over the input schema: + unwrapping nullable or definitions schemas, and calling the `_handle_choice` + method iteratively on the choices extracted (recursively) from the possibly-wrapped union. + """ + if schema['type'] == 'nullable': + self._is_nullable = True + wrapped = self._apply_to_root(schema['schema']) + nullable_wrapper = schema.copy() + nullable_wrapper['schema'] = wrapped + return nullable_wrapper + + if schema['type'] == 'definitions': + wrapped = self._apply_to_root(schema['schema']) + definitions_wrapper = schema.copy() + definitions_wrapper['schema'] = wrapped + return definitions_wrapper + + if schema['type'] == 'definition-ref': + schema_ref = schema['schema_ref'] + if schema_ref not in self.definitions: # pragma: no cover + raise MissingDefinitionForUnionRef(schema_ref) + + def_schema = self.definitions[schema_ref] + # If using a referenceable union as discriminated (e.g. `type Pet = Cat | Dog; field: Pet = Field(discriminator=...)`): + if def_schema['type'] == 'union': + schema = def_schema.copy() + schema.pop('ref') + + if schema['type'] != 'union': + # If the schema is not a union, it probably means it just had a single member and + # was flattened by pydantic_core. + # However, it still may make sense to apply the discriminator to this schema, + # as a way to get discriminated-union-style error messages, so we allow this here. + schema = core_schema.union_schema([schema]) + + # Reverse the choices list before extending the stack so that they get handled in the order they occur + choices_schemas = [v[0] if isinstance(v, tuple) else v for v in schema['choices'][::-1]] + self._choices_to_handle.extend(choices_schemas) + while self._choices_to_handle: + choice = self._choices_to_handle.pop() + self._handle_choice(choice) + + if self._discriminator_alias is not None and self._discriminator_alias != self.discriminator: + # * We need to annotate `discriminator` as a union here to handle both branches of this conditional + # * We need to annotate `discriminator` as list[list[str | int]] and not list[list[str]] due to the + # invariance of list, and because list[list[str | int]] is the type of the discriminator argument + # to tagged_union_schema below + # * See the docstring of pydantic_core.core_schema.tagged_union_schema for more details about how to + # interpret the value of the discriminator argument to tagged_union_schema. (The list[list[str]] here + # is the appropriate way to provide a list of fallback attributes to check for a discriminator value.) + discriminator: str | list[list[str | int]] = [[self.discriminator], [self._discriminator_alias]] + else: + discriminator = self.discriminator + return core_schema.tagged_union_schema( + choices=self._tagged_union_choices, + discriminator=discriminator, + custom_error_type=schema.get('custom_error_type'), + custom_error_message=schema.get('custom_error_message'), + custom_error_context=schema.get('custom_error_context'), + strict=False, + from_attributes=True, + ref=schema.get('ref'), + metadata=schema.get('metadata'), + serialization=schema.get('serialization'), + ) + + def _handle_choice(self, choice: core_schema.CoreSchema) -> None: + """This method handles the "middle" stage of recursion over the input schema. + Specifically, it is responsible for handling each choice of the outermost union + (and any "coalesced" choices obtained from inner unions). + + Here, "handling" entails: + * Coalescing nested unions and compatible tagged-unions + * Tracking the presence of 'none' and 'nullable' schemas occurring as choices + * Validating that each allowed discriminator value maps to a unique choice + * Updating the _tagged_union_choices mapping that will ultimately be used to build the TaggedUnionSchema. + """ + if choice['type'] == 'definition-ref': + if choice['schema_ref'] not in self.definitions: + raise MissingDefinitionForUnionRef(choice['schema_ref']) + + if choice['type'] == 'none': + self._should_be_nullable = True + elif choice['type'] == 'definitions': + self._handle_choice(choice['schema']) + elif choice['type'] == 'nullable': + self._should_be_nullable = True + self._handle_choice(choice['schema']) # unwrap the nullable schema + elif choice['type'] == 'union': + # Reverse the choices list before extending the stack so that they get handled in the order they occur + choices_schemas = [v[0] if isinstance(v, tuple) else v for v in choice['choices'][::-1]] + self._choices_to_handle.extend(choices_schemas) + elif choice['type'] not in { + 'model', + 'typed-dict', + 'tagged-union', + 'lax-or-strict', + 'dataclass', + 'dataclass-args', + 'definition-ref', + } and not _core_utils.is_function_with_inner_schema(choice): + # We should eventually handle 'definition-ref' as well + err_str = f'The core schema type {choice["type"]!r} is not a valid discriminated union variant.' + if choice['type'] == 'list': + err_str += ( + ' If you are making use of a list of union types, make sure the discriminator is applied to the ' + 'union type and not the list (e.g. `list[Annotated[ | , Field(discriminator=...)]]`).' + ) + raise TypeError(err_str) + else: + if choice['type'] == 'tagged-union' and self._is_discriminator_shared(choice): + # In this case, this inner tagged-union is compatible with the outer tagged-union, + # and its choices can be coalesced into the outer TaggedUnionSchema. + subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))] + # Reverse the choices list before extending the stack so that they get handled in the order they occur + self._choices_to_handle.extend(subchoices[::-1]) + return + + inferred_discriminator_values = self._infer_discriminator_values_for_choice(choice, source_name=None) + self._set_unique_choice_for_values(choice, inferred_discriminator_values) + + def _is_discriminator_shared(self, choice: core_schema.TaggedUnionSchema) -> bool: + """This method returns a boolean indicating whether the discriminator for the `choice` + is the same as that being used for the outermost tagged union. This is used to + determine whether this TaggedUnionSchema choice should be "coalesced" into the top level, + or whether it should be treated as a separate (nested) choice. + """ + inner_discriminator = choice['discriminator'] + return inner_discriminator == self.discriminator or ( + isinstance(inner_discriminator, list) + and (self.discriminator in inner_discriminator or [self.discriminator] in inner_discriminator) + ) + + def _infer_discriminator_values_for_choice( # noqa C901 + self, choice: core_schema.CoreSchema, source_name: str | None + ) -> list[str | int]: + """This function recurses over `choice`, extracting all discriminator values that should map to this choice. + + `model_name` is accepted for the purpose of producing useful error messages. + """ + if choice['type'] == 'definitions': + return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name) + + elif _core_utils.is_function_with_inner_schema(choice): + return self._infer_discriminator_values_for_choice(choice['schema'], source_name=source_name) + + elif choice['type'] == 'lax-or-strict': + return sorted( + set( + self._infer_discriminator_values_for_choice(choice['lax_schema'], source_name=None) + + self._infer_discriminator_values_for_choice(choice['strict_schema'], source_name=None) + ) + ) + + elif choice['type'] == 'tagged-union': + values: list[str | int] = [] + # Ignore str/int "choices" since these are just references to other choices + subchoices = [x for x in choice['choices'].values() if not isinstance(x, (str, int))] + for subchoice in subchoices: + subchoice_values = self._infer_discriminator_values_for_choice(subchoice, source_name=None) + values.extend(subchoice_values) + return values + + elif choice['type'] == 'union': + values = [] + for subchoice in choice['choices']: + subchoice_schema = subchoice[0] if isinstance(subchoice, tuple) else subchoice + subchoice_values = self._infer_discriminator_values_for_choice(subchoice_schema, source_name=None) + values.extend(subchoice_values) + return values + + elif choice['type'] == 'nullable': + self._should_be_nullable = True + return self._infer_discriminator_values_for_choice(choice['schema'], source_name=None) + + elif choice['type'] == 'model': + return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__) + + elif choice['type'] == 'dataclass': + return self._infer_discriminator_values_for_choice(choice['schema'], source_name=choice['cls'].__name__) + + elif choice['type'] == 'model-fields': + return self._infer_discriminator_values_for_model_choice(choice, source_name=source_name) + + elif choice['type'] == 'dataclass-args': + return self._infer_discriminator_values_for_dataclass_choice(choice, source_name=source_name) + + elif choice['type'] == 'typed-dict': + return self._infer_discriminator_values_for_typed_dict_choice(choice, source_name=source_name) + + elif choice['type'] == 'definition-ref': + schema_ref = choice['schema_ref'] + if schema_ref not in self.definitions: + raise MissingDefinitionForUnionRef(schema_ref) + return self._infer_discriminator_values_for_choice(self.definitions[schema_ref], source_name=source_name) + else: + err_str = f'The core schema type {choice["type"]!r} is not a valid discriminated union variant.' + if choice['type'] == 'list': + err_str += ( + ' If you are making use of a list of union types, make sure the discriminator is applied to the ' + 'union type and not the list (e.g. `list[Annotated[ | , Field(discriminator=...)]]`).' + ) + raise TypeError(err_str) + + def _infer_discriminator_values_for_typed_dict_choice( + self, choice: core_schema.TypedDictSchema, source_name: str | None = None + ) -> list[str | int]: + """This method just extracts the _infer_discriminator_values_for_choice logic specific to TypedDictSchema + for the sake of readability. + """ + source = 'TypedDict' if source_name is None else f'TypedDict {source_name!r}' + field = choice['fields'].get(self.discriminator) + if field is None: + raise PydanticUserError( + f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field' + ) + return self._infer_discriminator_values_for_field(field, source) + + def _infer_discriminator_values_for_model_choice( + self, choice: core_schema.ModelFieldsSchema, source_name: str | None = None + ) -> list[str | int]: + source = 'ModelFields' if source_name is None else f'Model {source_name!r}' + field = choice['fields'].get(self.discriminator) + if field is None: + raise PydanticUserError( + f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field' + ) + return self._infer_discriminator_values_for_field(field, source) + + def _infer_discriminator_values_for_dataclass_choice( + self, choice: core_schema.DataclassArgsSchema, source_name: str | None = None + ) -> list[str | int]: + source = 'DataclassArgs' if source_name is None else f'Dataclass {source_name!r}' + for field in choice['fields']: + if field['name'] == self.discriminator: + break + else: + raise PydanticUserError( + f'{source} needs a discriminator field for key {self.discriminator!r}', code='discriminator-no-field' + ) + return self._infer_discriminator_values_for_field(field, source) + + def _infer_discriminator_values_for_field(self, field: CoreSchemaField, source: str) -> list[str | int]: + if field['type'] == 'computed-field': + # This should never occur as a discriminator, as it is only relevant to serialization + return [] + alias = field.get('validation_alias', self.discriminator) + if not isinstance(alias, str): + raise PydanticUserError( + f'Alias {alias!r} is not supported in a discriminated union', code='discriminator-alias-type' + ) + if self._discriminator_alias is None: + self._discriminator_alias = alias + elif self._discriminator_alias != alias: + raise PydanticUserError( + f'Aliases for discriminator {self.discriminator!r} must be the same ' + f'(got {alias}, {self._discriminator_alias})', + code='discriminator-alias', + ) + return self._infer_discriminator_values_for_inner_schema(field['schema'], source) + + def _infer_discriminator_values_for_inner_schema( + self, schema: core_schema.CoreSchema, source: str + ) -> list[str | int]: + """When inferring discriminator values for a field, we typically extract the expected values from a literal + schema. This function does that, but also handles nested unions and defaults. + """ + if schema['type'] == 'literal': + return schema['expected'] + + elif schema['type'] == 'union': + # Generally when multiple values are allowed they should be placed in a single `Literal`, but + # we add this case to handle the situation where a field is annotated as a `Union` of `Literal`s. + # For example, this lets us handle `Union[Literal['key'], Union[Literal['Key'], Literal['KEY']]]` + values: list[Any] = [] + for choice in schema['choices']: + choice_schema = choice[0] if isinstance(choice, tuple) else choice + choice_values = self._infer_discriminator_values_for_inner_schema(choice_schema, source) + values.extend(choice_values) + return values + + elif schema['type'] == 'default': + # This will happen if the field has a default value; we ignore it while extracting the discriminator values + return self._infer_discriminator_values_for_inner_schema(schema['schema'], source) + + elif schema['type'] == 'function-after': + # After validators don't affect the discriminator values + return self._infer_discriminator_values_for_inner_schema(schema['schema'], source) + + elif schema['type'] == 'model' and schema.get('root_model'): + # Support RootModel[Literal[...]] as discriminator field type + return self._infer_discriminator_values_for_inner_schema(schema['schema'], source) + + elif schema['type'] in {'function-before', 'function-wrap', 'function-plain'}: + validator_type = repr(schema['type'].split('-')[1]) + raise PydanticUserError( + f'Cannot use a mode={validator_type} validator in the' + f' discriminator field {self.discriminator!r} of {source}', + code='discriminator-validator', + ) + + else: + raise PydanticUserError( + f'{source} needs field {self.discriminator!r} to be of type `Literal`', + code='discriminator-needs-literal', + ) + + def _set_unique_choice_for_values(self, choice: core_schema.CoreSchema, values: Sequence[str | int]) -> None: + """This method updates `self.tagged_union_choices` so that all provided (discriminator) `values` map to the + provided `choice`, validating that none of these values already map to another (different) choice. + """ + for discriminator_value in values: + if discriminator_value in self._tagged_union_choices: + # It is okay if `value` is already in tagged_union_choices as long as it maps to the same value. + # Because tagged_union_choices may map values to other values, we need to walk the choices dict + # until we get to a "real" choice, and confirm that is equal to the one assigned. + existing_choice = self._tagged_union_choices[discriminator_value] + if existing_choice != choice: + raise TypeError( + f'Value {discriminator_value!r} for discriminator ' + f'{self.discriminator!r} mapped to multiple choices' + ) + else: + self._tagged_union_choices[discriminator_value] = choice diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py new file mode 100644 index 0000000000000000000000000000000000000000..6df77bf6c7786a480a9b5aa4c9a816944dc72ed2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_docs_extraction.py @@ -0,0 +1,113 @@ +"""Utilities related to attribute docstring extraction.""" + +from __future__ import annotations + +import ast +import inspect +import sys +import textwrap +from typing import Any + + +class DocstringVisitor(ast.NodeVisitor): + def __init__(self) -> None: + super().__init__() + + self.target: str | None = None + self.attrs: dict[str, str] = {} + self.previous_node_type: type[ast.AST] | None = None + + def visit(self, node: ast.AST) -> Any: + node_result = super().visit(node) + self.previous_node_type = type(node) + return node_result + + def visit_AnnAssign(self, node: ast.AnnAssign) -> Any: + if isinstance(node.target, ast.Name): + self.target = node.target.id + + def visit_Expr(self, node: ast.Expr) -> Any: + if ( + isinstance(node.value, ast.Constant) + and isinstance(node.value.value, str) + and self.previous_node_type is ast.AnnAssign + ): + docstring = inspect.cleandoc(node.value.value) + if self.target: + self.attrs[self.target] = docstring + self.target = None + + +def _dedent_source_lines(source: list[str]) -> str: + # Required for nested class definitions, e.g. in a function block + dedent_source = textwrap.dedent(''.join(source)) + if dedent_source.startswith((' ', '\t')): + # We are in the case where there's a dedented (usually multiline) string + # at a lower indentation level than the class itself. We wrap our class + # in a function as a workaround. + dedent_source = f'def dedent_workaround():\n{dedent_source}' + return dedent_source + + +def _extract_source_from_frame(cls: type[Any]) -> list[str] | None: + frame = inspect.currentframe() + + while frame: + if inspect.getmodule(frame) is inspect.getmodule(cls): + lnum = frame.f_lineno + try: + lines, _ = inspect.findsource(frame) + except OSError: # pragma: no cover + # Source can't be retrieved (maybe because running in an interactive terminal), + # we don't want to error here. + pass + else: + block_lines = inspect.getblock(lines[lnum - 1 :]) + dedent_source = _dedent_source_lines(block_lines) + try: + block_tree = ast.parse(dedent_source) + except SyntaxError: + pass + else: + stmt = block_tree.body[0] + if isinstance(stmt, ast.FunctionDef) and stmt.name == 'dedent_workaround': + # `_dedent_source_lines` wrapped the class around the workaround function + stmt = stmt.body[0] + if isinstance(stmt, ast.ClassDef) and stmt.name == cls.__name__: + return block_lines + + frame = frame.f_back + + +def extract_docstrings_from_cls(cls: type[Any], use_inspect: bool = False) -> dict[str, str]: + """Map model attributes and their corresponding docstring. + + Args: + cls: The class of the Pydantic model to inspect. + use_inspect: Whether to skip usage of frames to find the object and use + the `inspect` module instead. + + Returns: + A mapping containing attribute names and their corresponding docstring. + """ + if use_inspect or sys.version_info >= (3, 13): + # On Python < 3.13, `inspect.getsourcelines()` might not work as expected + # if two classes have the same name in the same source file. + # On Python 3.13+, it will use the new `__firstlineno__` class attribute, + # making it way more robust. + try: + source, _ = inspect.getsourcelines(cls) + except OSError: # pragma: no cover + return {} + else: + # TODO remove this implementation when we drop support for Python 3.12: + source = _extract_source_from_frame(cls) + + if not source: + return {} + + dedent_source = _dedent_source_lines(source) + + visitor = DocstringVisitor() + visitor.visit(ast.parse(dedent_source)) + return visitor.attrs diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py new file mode 100644 index 0000000000000000000000000000000000000000..9a2a96b9269c18a40550cb8bd4965e31e8e8c605 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_fields.py @@ -0,0 +1,729 @@ +"""Private logic related to fields (the `Field()` function and `FieldInfo` class), and arguments to `Annotated`.""" + +from __future__ import annotations as _annotations + +import dataclasses +import warnings +from collections.abc import Mapping +from functools import cache +from inspect import Parameter, ismethoddescriptor +from re import Pattern +from typing import TYPE_CHECKING, Any, Callable, TypeVar, cast + +from pydantic_core import PydanticUndefined +from typing_extensions import TypeIs +from typing_inspection.introspection import AnnotationSource + +from pydantic import PydanticDeprecatedSince211 +from pydantic.errors import PydanticUserError + +from ..aliases import AliasGenerator +from . import _generics, _typing_extra +from ._config import ConfigWrapper +from ._docs_extraction import extract_docstrings_from_cls +from ._import_utils import import_cached_base_model, import_cached_field_info +from ._internal_dataclass import slots_true +from ._namespace_utils import NsResolver +from ._repr import Representation +from ._utils import can_be_positional, get_first_not_none + +if TYPE_CHECKING: + from annotated_types import BaseMetadata + + from ..fields import FieldInfo + from ..main import BaseModel + from ._dataclasses import PydanticDataclass, StandardDataclass + from ._decorators import DecoratorInfos + + +class PydanticMetadata(Representation): + """Base class for annotation markers like `Strict`.""" + + __slots__ = () + + +@dataclasses.dataclass(**slots_true) # TODO: make kw_only when we drop support for 3.9. +class PydanticExtraInfo: + # TODO: make use of PEP 747: + annotation: Any + complete: bool + + +def pydantic_general_metadata(**metadata: Any) -> BaseMetadata: + """Create a new `_PydanticGeneralMetadata` class with the given metadata. + + Args: + **metadata: The metadata to add. + + Returns: + The new `_PydanticGeneralMetadata` class. + """ + return _general_metadata_cls()(metadata) # type: ignore + + +@cache +def _general_metadata_cls() -> type[BaseMetadata]: + """Do it this way to avoid importing `annotated_types` at import time.""" + from annotated_types import BaseMetadata + + class _PydanticGeneralMetadata(PydanticMetadata, BaseMetadata): + """Pydantic general metadata like `max_digits`.""" + + def __init__(self, metadata: Any): + self.__dict__ = metadata + + return _PydanticGeneralMetadata # type: ignore + + +def _check_protected_namespaces( + protected_namespaces: tuple[str | Pattern[str], ...], + ann_name: str, + bases: tuple[type[Any], ...], + cls_name: str, +) -> None: + BaseModel = import_cached_base_model() + + for protected_namespace in protected_namespaces: + ns_violation = False + if isinstance(protected_namespace, Pattern): + ns_violation = protected_namespace.match(ann_name) is not None + elif isinstance(protected_namespace, str): + ns_violation = ann_name.startswith(protected_namespace) + + if ns_violation: + for b in bases: + if hasattr(b, ann_name): + if not (issubclass(b, BaseModel) and ann_name in getattr(b, '__pydantic_fields__', {})): + raise ValueError( + f'Field {ann_name!r} conflicts with member {getattr(b, ann_name)}' + f' of protected namespace {protected_namespace!r}.' + ) + else: + valid_namespaces: list[str] = [] + for pn in protected_namespaces: + if isinstance(pn, Pattern): + if not pn.match(ann_name): + valid_namespaces.append(f're.compile({pn.pattern!r})') + else: + if not ann_name.startswith(pn): + valid_namespaces.append(f"'{pn}'") + + valid_namespaces_str = f'({", ".join(valid_namespaces)}{",)" if len(valid_namespaces) == 1 else ")"}' + + warnings.warn( + f'Field {ann_name!r} in {cls_name!r} conflicts with protected namespace {protected_namespace!r}.\n\n' + f"You may be able to solve this by setting the 'protected_namespaces' configuration to {valid_namespaces_str}.", + UserWarning, + stacklevel=5, + ) + + +def _update_fields_from_docstrings(cls: type[Any], fields: dict[str, FieldInfo], use_inspect: bool = False) -> None: + fields_docs = extract_docstrings_from_cls(cls, use_inspect=use_inspect) + for ann_name, field_info in fields.items(): + if field_info.description is None and ann_name in fields_docs: + field_info.description = fields_docs[ann_name] + + +def _apply_field_title_generator_to_field_info( + title_generator: Callable[[str, FieldInfo], str], + field_name: str, + field_info: FieldInfo, +): + if field_info.title is None: + title = title_generator(field_name, field_info) + if not isinstance(title, str): + raise TypeError(f'field_title_generator {title_generator} must return str, not {title.__class__}') + + field_info.title = title + + +def _apply_alias_generator_to_field_info( + alias_generator: Callable[[str], str] | AliasGenerator, field_name: str, field_info: FieldInfo +): + """Apply an alias generator to aliases on a `FieldInfo` instance if appropriate. + + Args: + alias_generator: A callable that takes a string and returns a string, or an `AliasGenerator` instance. + field_name: The name of the field from which to generate the alias. + field_info: The `FieldInfo` instance to which the alias generator is (maybe) applied. + """ + # Apply an alias_generator if + # 1. An alias is not specified + # 2. An alias is specified, but the priority is <= 1 + if ( + field_info.alias_priority is None + or field_info.alias_priority <= 1 + or field_info.alias is None + or field_info.validation_alias is None + or field_info.serialization_alias is None + ): + alias, validation_alias, serialization_alias = None, None, None + + if isinstance(alias_generator, AliasGenerator): + alias, validation_alias, serialization_alias = alias_generator.generate_aliases(field_name) + elif callable(alias_generator): + alias = alias_generator(field_name) + if not isinstance(alias, str): + raise TypeError(f'alias_generator {alias_generator} must return str, not {alias.__class__}') + + # if priority is not set, we set to 1 + # which supports the case where the alias_generator from a child class is used + # to generate an alias for a field in a parent class + if field_info.alias_priority is None or field_info.alias_priority <= 1: + field_info.alias_priority = 1 + + # if the priority is 1, then we set the aliases to the generated alias + if field_info.alias_priority == 1: + field_info.serialization_alias = get_first_not_none(serialization_alias, alias) + field_info.validation_alias = get_first_not_none(validation_alias, alias) + field_info.alias = alias + + # if any of the aliases are not set, then we set them to the corresponding generated alias + if field_info.alias is None: + field_info.alias = alias + if field_info.serialization_alias is None: + field_info.serialization_alias = get_first_not_none(serialization_alias, alias) + if field_info.validation_alias is None: + field_info.validation_alias = get_first_not_none(validation_alias, alias) + + +def update_field_from_config(config_wrapper: ConfigWrapper, field_name: str, field_info: FieldInfo) -> None: + """Update the `FieldInfo` instance from the configuration set on the model it belongs to. + + This will apply the title and alias generators from the configuration. + + Args: + config_wrapper: The configuration from the model. + field_name: The field name the `FieldInfo` instance is attached to. + field_info: The `FieldInfo` instance to update. + """ + field_title_generator = field_info.field_title_generator or config_wrapper.field_title_generator + if field_title_generator is not None: + _apply_field_title_generator_to_field_info(field_title_generator, field_name, field_info) + if config_wrapper.alias_generator is not None: + _apply_alias_generator_to_field_info(config_wrapper.alias_generator, field_name, field_info) + + +_deprecated_method_names = {'dict', 'json', 'copy', '_iter', '_copy_and_set_values', '_calculate_keys'} + +_deprecated_classmethod_names = { + 'parse_obj', + 'parse_raw', + 'parse_file', + 'from_orm', + 'construct', + 'schema', + 'schema_json', + 'validate', + 'update_forward_refs', + '_get_value', +} + + +def collect_model_fields( # noqa: C901 + cls: type[BaseModel], + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver, + *, + typevars_map: Mapping[TypeVar, Any] | None = None, +) -> tuple[dict[str, FieldInfo], PydanticExtraInfo | None, set[str]]: + """Collect the fields and class variables names of a nascent Pydantic model. + + The fields collection process is *lenient*, meaning it won't error if string annotations + fail to evaluate. If this happens, the original annotation (and assigned value, if any) + is stored on the created `FieldInfo` instance. + + The `rebuild_model_fields()` should be called at a later point (e.g. when rebuilding the model), + and will make use of these stored attributes. + + Args: + cls: BaseModel or dataclass. + config_wrapper: The config wrapper instance. + ns_resolver: Namespace resolver to use when getting model annotations. + typevars_map: A dictionary mapping type variables to their concrete types. + + Returns: + A three-tuple containing the model fields, the `PydanticExtraInfo` instance if the `__pydantic_extra__` annotation is set, + and class variables names. + + Raises: + NameError: + - If there is a conflict between a field name and protected namespaces. + - If there is a field other than `root` in `RootModel`. + - If a field shadows an attribute in the parent model. + """ + FieldInfo_ = import_cached_field_info() + BaseModel_ = import_cached_base_model() + + bases = cls.__bases__ + parent_fields_lookup: dict[str, FieldInfo] = {} + for base in reversed(bases): + if model_fields := getattr(base, '__pydantic_fields__', None): + parent_fields_lookup.update(model_fields) + + type_hints = _typing_extra.get_model_type_hints(cls, ns_resolver=ns_resolver) + + # `cls_annotations` is only used to determine if an annotation comes from a parent class + cls_annotations = _typing_extra.safe_get_annotations(cls) + + fields: dict[str, FieldInfo] = {} + + class_vars: set[str] = set() + for ann_name, (ann_type, evaluated) in type_hints.items(): + if ann_name == 'model_config': + # We never want to treat `model_config` as a field + # Note: we may need to change this logic if/when we introduce a `BareModel` class with no + # protected namespaces (where `model_config` might be allowed as a field name) + continue + + _check_protected_namespaces( + protected_namespaces=config_wrapper.protected_namespaces, + ann_name=ann_name, + bases=bases, + cls_name=cls.__name__, + ) + + if _typing_extra.is_classvar_annotation(ann_type): + class_vars.add(ann_name) + continue + + assigned_value = getattr(cls, ann_name, PydanticUndefined) + if assigned_value is not PydanticUndefined and ( + # One of the deprecated instance methods was used as a field name (e.g. `dict()`): + any(getattr(BaseModel_, depr_name, None) is assigned_value for depr_name in _deprecated_method_names) + # One of the deprecated class methods was used as a field name (e.g. `schema()`): + or ( + hasattr(assigned_value, '__func__') + and any( + getattr(getattr(BaseModel_, depr_name, None), '__func__', None) is assigned_value.__func__ # pyright: ignore[reportAttributeAccessIssue] + for depr_name in _deprecated_classmethod_names + ) + ) + ): + # Then `assigned_value` would be the method, even though no default was specified: + assigned_value = PydanticUndefined + + if not is_valid_field_name(ann_name): + continue + if cls.__pydantic_root_model__ and ann_name != 'root': + raise NameError( + f"Unexpected field with name {ann_name!r}; only 'root' is allowed as a field of a `RootModel`" + ) + + for base in bases: + if hasattr(base, ann_name): + if ann_name not in cls_annotations: + # Don't warn when a field exists in a parent class but has not been defined in the current class + continue + + # when building a generic model with `MyModel[int]`, the generic_origin check makes sure we don't get + # "... shadows an attribute" warnings + generic_origin = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin') + if base is generic_origin: + # Don't warn when "shadowing" of attributes in parametrized generics + continue + + dataclass_fields = { + field.name for field in (dataclasses.fields(base) if dataclasses.is_dataclass(base) else ()) + } + if ann_name in dataclass_fields: + # Don't warn when inheriting stdlib dataclasses whose fields are "shadowed" by defaults being set + # on the class instance. + continue + + warnings.warn( + f'Field name "{ann_name}" in "{cls.__qualname__}" shadows an attribute in parent ' + f'"{base.__qualname__}"', + UserWarning, + stacklevel=4, + ) + + if assigned_value is PydanticUndefined: # no assignment, just a plain annotation + if ann_name in cls_annotations or ann_name not in parent_fields_lookup: + # field is either: + # - present in the current model's annotations (and *not* from parent classes) + # - not found on any base classes; this seems to be caused by fields not getting + # generated due to models not being fully defined while initializing recursive models. + # Nothing stops us from just creating a `FieldInfo` for this type hint, so we do this. + field_info = FieldInfo_.from_annotation(ann_type, _source=AnnotationSource.CLASS) + field_info._original_annotation = ann_type + if not evaluated: + field_info._complete = False + # Store the original annotation that should be used to rebuild + # the field info later: + else: + # The field was present on one of the (possibly multiple) base classes, we make a copy directly from it. + parent_field_info = parent_fields_lookup[ann_name]._copy() + + # The only case where substituting the type variables is relevant (i.e. when `typevars_map` is not empty) + # is when a generic class is parameterized (e.g. `MyGenericModel[int, str]`), which creates a new class object + # (unlike the stdlib genercis that create a generic alias). In this case, we are guaranteed to only have to copy + # from the origin/parent model (e.g. `MyGenericModel`). + if typevars_map: + field_info = _recreate_field_info( + parent_field_info, ns_resolver=ns_resolver, typevars_map=typevars_map, lenient=True + ) + else: + field_info = parent_field_info + + else: # An assigned value is present (either the default value, or a `Field()` function) + if isinstance(assigned_value, FieldInfo_) and ismethoddescriptor(assigned_value.default): + # `assigned_value` was fetched using `getattr`, which triggers a call to `__get__` + # for descriptors, so we do the same if the `= field(default=...)` form is used. + # Note that we only do this for method descriptors for now, we might want to + # extend this to any descriptor in the future (by simply checking for + # `hasattr(assigned_value.default, '__get__')`). + default = assigned_value.default.__get__(None, cls) + assigned_value.default = default + assigned_value._attributes_set['default'] = default + + field_info = FieldInfo_.from_annotated_attribute(ann_type, assigned_value, _source=AnnotationSource.CLASS) + + # Store the original annotation and assignment value that could be used to rebuild the field info later. + field_info._original_assignment = assigned_value + field_info._original_annotation = ann_type + if not evaluated: + field_info._complete = False + elif 'final' in field_info._qualifiers and not field_info.is_required(): + warnings.warn( + f'Annotation {ann_name!r} is marked as final and has a default value. Pydantic treats {ann_name!r} as a ' + 'class variable, but it will be considered as a normal field in V3 to be aligned with dataclasses. If you ' + f'still want {ann_name!r} to be considered as a class variable, annotate it as: `ClassVar[] = .`', + category=PydanticDeprecatedSince211, + # Incorrect when `create_model` is used, but the chance that final with a default is used is low in that case: + stacklevel=4, + ) + class_vars.add(ann_name) + continue + + # attributes which are fields are removed from the class namespace: + # 1. To match the behaviour of annotation-only fields + # 2. To avoid false positives in the NameError check above + try: + delattr(cls, ann_name) + except AttributeError: + pass # indicates the attribute was on a parent class + + # Use cls.__dict__['__pydantic_decorators__'] instead of cls.__pydantic_decorators__ + # to make sure the decorators have already been built for this exact class + decorators: DecoratorInfos = cls.__dict__['__pydantic_decorators__'] + if ann_name in decorators.computed_fields: + raise TypeError( + f'Field {ann_name!r} of class {cls.__name__!r} overrides symbol of same name in a parent class. ' + 'This override with a computed_field is incompatible.' + ) + fields[ann_name] = field_info + + if field_info._complete: + # If not complete, this will be called in `rebuild_model_fields()`: + update_field_from_config(config_wrapper, ann_name, field_info) + + if config_wrapper.use_attribute_docstrings: + _update_fields_from_docstrings(cls, fields) + + pydantic_extra_info: PydanticExtraInfo | None = None + if '__pydantic_extra__' in type_hints: + ann, complete = type_hints['__pydantic_extra__'] + pydantic_extra_info = PydanticExtraInfo( + annotation=ann, + complete=complete, + ) + + return fields, pydantic_extra_info, class_vars + + +def rebuild_model_fields( + cls: type[BaseModel], + *, + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver, + typevars_map: Mapping[TypeVar, Any], +) -> tuple[dict[str, FieldInfo], PydanticExtraInfo | None]: + """Rebuild the (already present) model fields by trying to reevaluate annotations. + + This function should be called whenever a model with incomplete fields is encountered. + + Returns: + A two-tuple, the first element being the rebuilt fields, the second element being + the rebuild `PydanticExtraInfo` instance, if available. + + Raises: + NameError: If one of the annotations failed to evaluate. + + Note: + This function *doesn't* mutate the model fields in place, as it can be called during + schema generation, where you don't want to mutate other model's fields. + """ + rebuilt_fields: dict[str, FieldInfo] = {} + with ns_resolver.push(cls): + for f_name, field_info in cls.__pydantic_fields__.items(): + if field_info._complete: + rebuilt_fields[f_name] = field_info + else: + new_field = _recreate_field_info( + field_info, ns_resolver=ns_resolver, typevars_map=typevars_map, lenient=False + ) + update_field_from_config(config_wrapper, f_name, new_field) + rebuilt_fields[f_name] = new_field + + if cls.__pydantic_extra_info__ is not None and not cls.__pydantic_extra_info__.complete: + rebuilt_extra_info = PydanticExtraInfo( + annotation=_typing_extra.eval_type( + cls.__pydantic_extra_info__.annotation, *ns_resolver.types_namespace + ), + complete=True, + ) + else: + rebuilt_extra_info = cls.__pydantic_extra_info__ + + return rebuilt_fields, rebuilt_extra_info + + +def _recreate_field_info( + field_info: FieldInfo, + ns_resolver: NsResolver, + typevars_map: Mapping[TypeVar, Any], + *, + lenient: bool, +) -> FieldInfo: + FieldInfo_ = import_cached_field_info() + + existing_desc = field_info.description + if lenient: + ann = _generics.replace_types(field_info._original_annotation, typevars_map) + ann, evaluated = _typing_extra.try_eval_type( + ann, + *ns_resolver.types_namespace, + ) + else: + # Not the best pattern, maybe we could ship our own `eval_type()`, + # that would replace the type variables on the fly during evaluation. + ann = _typing_extra.eval_type( + field_info._original_annotation, + *ns_resolver.types_namespace, + ) + ann = _generics.replace_types(ann, typevars_map) + ann = _typing_extra.eval_type( + ann, + *ns_resolver.types_namespace, + ) + evaluated = True + + if (assign := field_info._original_assignment) is PydanticUndefined: + new_field = FieldInfo_.from_annotation(ann, _source=AnnotationSource.CLASS) + else: + new_field = FieldInfo_.from_annotated_attribute(ann, assign, _source=AnnotationSource.CLASS) + new_field._original_assignment = assign + new_field._original_annotation = ann + # The description might come from the docstring if `use_attribute_docstrings` was `True`: + new_field.description = new_field.description if new_field.description is not None else existing_desc + if not evaluated: + new_field._complete = False + + return new_field + + +def collect_dataclass_fields( + cls: type[StandardDataclass], + *, + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver | None = None, + typevars_map: dict[Any, Any] | None = None, +) -> dict[str, FieldInfo]: + """Collect the fields of a dataclass. + + Args: + cls: dataclass. + config_wrapper: The config wrapper instance. + ns_resolver: Namespace resolver to use when getting dataclass annotations. + Defaults to an empty instance. + typevars_map: A dictionary mapping type variables to their concrete types. + + Returns: + The dataclass fields. + """ + FieldInfo_ = import_cached_field_info() + + fields: dict[str, FieldInfo] = {} + ns_resolver = ns_resolver or NsResolver() + dataclass_fields = cls.__dataclass_fields__ + + # The logic here is similar to `_typing_extra.get_cls_type_hints`, + # although we do it manually as stdlib dataclasses already have annotations + # collected in each class: + for base in reversed(cls.__mro__): + if not dataclasses.is_dataclass(base): + continue + + with ns_resolver.push(base): + for ann_name, dataclass_field in dataclass_fields.items(): + base_anns = _typing_extra.safe_get_annotations(base) + + if ann_name not in base_anns: + # `__dataclass_fields__`contains every field, even the ones from base classes. + # Only collect the ones defined on `base`. + continue + + globalns, localns = ns_resolver.types_namespace + ann_type, evaluated = _typing_extra.try_eval_type(dataclass_field.type, globalns, localns) + + if _typing_extra.is_classvar_annotation(ann_type): + continue + + if ( + not dataclass_field.init + and dataclass_field.default is dataclasses.MISSING + and dataclass_field.default_factory is dataclasses.MISSING + ): + # TODO: We should probably do something with this so that validate_assignment behaves properly + # Issue: https://github.com/pydantic/pydantic/issues/5470 + continue + + if isinstance(dataclass_field.default, FieldInfo_): + if dataclass_field.default.init_var: + if dataclass_field.default.init is False: + raise PydanticUserError( + f'Dataclass field {ann_name} has init=False and init_var=True, but these are mutually exclusive.', + code='clashing-init-and-init-var', + ) + + # TODO: same note as above re validate_assignment + continue + field_info = FieldInfo_.from_annotated_attribute( + ann_type, dataclass_field.default, _source=AnnotationSource.DATACLASS + ) + field_info._original_assignment = dataclass_field.default + else: + field_info = FieldInfo_.from_annotated_attribute( + ann_type, dataclass_field, _source=AnnotationSource.DATACLASS + ) + field_info._original_assignment = dataclass_field + + if not evaluated: + field_info._complete = False + field_info._original_annotation = ann_type + + fields[ann_name] = field_info + update_field_from_config(config_wrapper, ann_name, field_info) + + if field_info.default is not PydanticUndefined and isinstance( + getattr(cls, ann_name, field_info), FieldInfo_ + ): + # We need this to fix the default when the "default" from __dataclass_fields__ is a pydantic.FieldInfo + setattr(cls, ann_name, field_info.default) + + if typevars_map: + for field in fields.values(): + # We don't pass any ns, as `field.annotation` + # was already evaluated. TODO: is this method relevant? + # Can't we juste use `_generics.replace_types`? + field.apply_typevars_map(typevars_map) + + if config_wrapper.use_attribute_docstrings: + _update_fields_from_docstrings( + cls, + fields, + # We can't rely on the (more reliable) frame inspection method + # for stdlib dataclasses: + use_inspect=not hasattr(cls, '__is_pydantic_dataclass__'), + ) + + return fields + + +def rebuild_dataclass_fields( + cls: type[PydanticDataclass], + *, + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver, + typevars_map: Mapping[TypeVar, Any], +) -> dict[str, FieldInfo]: + """Rebuild the (already present) dataclass fields by trying to reevaluate annotations. + + This function should be called whenever a dataclass with incomplete fields is encountered. + + Raises: + NameError: If one of the annotations failed to evaluate. + + Note: + This function *doesn't* mutate the dataclass fields in place, as it can be called during + schema generation, where you don't want to mutate other dataclass's fields. + """ + FieldInfo_ = import_cached_field_info() + + rebuilt_fields: dict[str, FieldInfo] = {} + with ns_resolver.push(cls): + for f_name, field_info in cls.__pydantic_fields__.items(): + if field_info._complete: + rebuilt_fields[f_name] = field_info + else: + existing_desc = field_info.description + ann = _typing_extra.eval_type( + field_info._original_annotation, + *ns_resolver.types_namespace, + ) + ann = _generics.replace_types(ann, typevars_map) + new_field = FieldInfo_.from_annotated_attribute( + ann, + field_info._original_assignment, + _source=AnnotationSource.DATACLASS, + ) + + # The description might come from the docstring if `use_attribute_docstrings` was `True`: + new_field.description = new_field.description if new_field.description is not None else existing_desc + update_field_from_config(config_wrapper, f_name, new_field) + rebuilt_fields[f_name] = new_field + + return rebuilt_fields + + +def is_valid_field_name(name: str) -> bool: + return not name.startswith('_') + + +def is_valid_privateattr_name(name: str) -> bool: + return name.startswith('_') and not name.startswith('__') + + +def takes_validated_data_argument( + default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any], +) -> TypeIs[Callable[[dict[str, Any]], Any]]: + """Whether the provided default factory callable has a validated data parameter.""" + try: + sig = _typing_extra.signature_no_eval(default_factory) + except (ValueError, TypeError): + # `inspect.signature` might not be able to infer a signature, e.g. with C objects. + # In this case, we assume no data argument is present: + return False + + parameters = list(sig.parameters.values()) + + return len(parameters) == 1 and can_be_positional(parameters[0]) and parameters[0].default is Parameter.empty + + +def resolve_default_value( + default: Any, + default_factory: Callable[[], Any] | Callable[[dict[str, Any]], Any] | None, + *, + validated_data: dict[str, Any] | None = None, + call_default_factory: bool = False, +) -> Any: + """Resolve the default value using either a static default or a default_factory.""" + from ._utils import smart_deepcopy + + if default_factory is None: + return smart_deepcopy(default) + if call_default_factory: + if takes_validated_data_argument(default_factory=default_factory): + fac = cast('Callable[[dict[str, Any]], Any]', default_factory) + if validated_data is None: + raise ValueError( + "The default factory requires the 'validated_data' argument, which was not provided when calling 'get_default()'." + ) + return fac(validated_data) + else: + fac = cast('Callable[[], Any]', default_factory) + return fac() + + return PydanticUndefined diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py new file mode 100644 index 0000000000000000000000000000000000000000..231f81d11b87ef05d4765cd61d88d7348eb9e401 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_forward_ref.py @@ -0,0 +1,23 @@ +from __future__ import annotations as _annotations + +from dataclasses import dataclass +from typing import Union + + +@dataclass +class PydanticRecursiveRef: + type_ref: str + + __name__ = 'PydanticRecursiveRef' + __hash__ = object.__hash__ + + def __call__(self) -> None: + """Defining __call__ is necessary for the `typing` module to let you use an instance of + this class as the result of resolving a standard ForwardRef. + """ + + def __or__(self, other): + return Union[self, other] # type: ignore + + def __ror__(self, other): + return Union[other, self] # type: ignore diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py new file mode 100644 index 0000000000000000000000000000000000000000..98fae40d6e311c432cc108ae6accf774d3c5c4e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_generate_schema.py @@ -0,0 +1,2934 @@ +"""Convert python types to pydantic-core schema.""" + +from __future__ import annotations as _annotations + +import collections.abc +import dataclasses +import datetime +import inspect +import os +import pathlib +import re +import sys +import typing +import warnings +from collections.abc import Generator, Iterable, Iterator, Mapping +from contextlib import contextmanager +from copy import copy +from decimal import Decimal +from enum import Enum +from fractions import Fraction +from functools import partial +from inspect import Parameter, _ParameterKind +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from itertools import chain +from operator import attrgetter +from types import FunctionType, GenericAlias, LambdaType, MethodType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Final, + ForwardRef, + Literal, + TypeVar, + Union, + cast, + overload, +) +from uuid import UUID +from zoneinfo import ZoneInfo + +import typing_extensions +from pydantic_core import ( + MISSING, + CoreSchema, + MultiHostUrl, + PydanticCustomError, + PydanticSerializationUnexpectedValue, + PydanticUndefined, + Url, + core_schema, + to_jsonable_python, +) +from typing_extensions import TypeAlias, TypeAliasType, get_args, get_origin, is_typeddict +from typing_inspection import typing_objects +from typing_inspection.introspection import AnnotationSource, get_literal_values, is_union_origin + +from ..aliases import AliasChoices, AliasPath +from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler +from ..config import ConfigDict, JsonDict, JsonEncoder, JsonSchemaExtraCallable +from ..errors import ( + PydanticForbiddenQualifier, + PydanticInvalidForJsonSchema, + PydanticSchemaGenerationError, + PydanticUndefinedAnnotation, + PydanticUserError, +) +from ..functional_validators import AfterValidator, BeforeValidator, FieldValidatorModes, PlainValidator, WrapValidator +from ..json_schema import JsonSchemaValue +from ..version import version_short +from ..warnings import ( + ArbitraryTypeWarning, + PydanticDeprecatedSince20, + TypedDictExtraConfigWarning, + UnsupportedFieldAttributeWarning, +) +from . import _decorators, _discriminated_union, _known_annotated_metadata, _repr, _typing_extra +from ._config import ConfigWrapper, ConfigWrapperStack +from ._core_metadata import CoreMetadata, update_core_metadata +from ._core_utils import ( + get_ref, + get_type_ref, + is_list_like_schema_with_items_schema, +) +from ._decorators import ( + Decorator, + DecoratorInfos, + FieldSerializerDecoratorInfo, + FieldValidatorDecoratorInfo, + ModelSerializerDecoratorInfo, + ModelValidatorDecoratorInfo, + RootValidatorDecoratorInfo, + ValidatorDecoratorInfo, + get_attribute_from_bases, + inspect_field_serializer, + inspect_model_serializer, + inspect_validator, +) +from ._docs_extraction import extract_docstrings_from_cls +from ._fields import ( + collect_dataclass_fields, + rebuild_dataclass_fields, + rebuild_model_fields, + takes_validated_data_argument, + update_field_from_config, +) +from ._forward_ref import PydanticRecursiveRef +from ._generics import get_standard_typevars_map, replace_types +from ._import_utils import import_cached_base_model, import_cached_field_info +from ._mock_val_ser import MockCoreSchema +from ._namespace_utils import NamespacesTuple, NsResolver +from ._schema_gather import MissingDefinitionError, gather_schemas_for_cleaning +from ._schema_generation_shared import CallbackGetCoreSchemaHandler +from ._utils import lenient_issubclass, smart_deepcopy + +if TYPE_CHECKING: + from ..fields import ComputedFieldInfo, FieldInfo + from ..main import BaseModel + from ..types import Discriminator + from ._dataclasses import StandardDataclass + from ._schema_generation_shared import GetJsonSchemaFunction + +_SUPPORTS_TYPEDDICT = sys.version_info >= (3, 12) + +FieldDecoratorInfo = Union[ValidatorDecoratorInfo, FieldValidatorDecoratorInfo, FieldSerializerDecoratorInfo] +FieldDecoratorInfoType = TypeVar('FieldDecoratorInfoType', bound=FieldDecoratorInfo) +AnyFieldDecorator = Union[ + Decorator[ValidatorDecoratorInfo], + Decorator[FieldValidatorDecoratorInfo], + Decorator[FieldSerializerDecoratorInfo], +] + +ModifyCoreSchemaWrapHandler: TypeAlias = GetCoreSchemaHandler +GetCoreSchemaFunction: TypeAlias = Callable[[Any, ModifyCoreSchemaWrapHandler], core_schema.CoreSchema] +ParametersCallback: TypeAlias = "Callable[[int, str, Any], Literal['skip'] | None]" + +TUPLE_TYPES: list[type] = [typing.Tuple, tuple] # noqa: UP006 +LIST_TYPES: list[type] = [typing.List, list, collections.abc.MutableSequence] # noqa: UP006 +SET_TYPES: list[type] = [typing.Set, set, collections.abc.MutableSet] # noqa: UP006 +FROZEN_SET_TYPES: list[type] = [typing.FrozenSet, frozenset, collections.abc.Set] # noqa: UP006 +DICT_TYPES: list[type] = [typing.Dict, dict] # noqa: UP006 +IP_TYPES: list[type] = [IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network] +SEQUENCE_TYPES: list[type] = [typing.Sequence, collections.abc.Sequence] +ITERABLE_TYPES: list[type] = [typing.Iterable, collections.abc.Iterable, typing.Generator, collections.abc.Generator] +TYPE_TYPES: list[type] = [typing.Type, type] # noqa: UP006 +PATTERN_TYPES: list[type] = [typing.Pattern, re.Pattern] +PATH_TYPES: list[type] = [ + os.PathLike, + pathlib.Path, + pathlib.PurePath, + pathlib.PosixPath, + pathlib.PurePosixPath, + pathlib.PureWindowsPath, +] +MAPPING_TYPES = [ + typing.Mapping, + typing.MutableMapping, + collections.abc.Mapping, + collections.abc.MutableMapping, + collections.OrderedDict, + typing_extensions.OrderedDict, + typing.DefaultDict, # noqa: UP006 + collections.defaultdict, +] +COUNTER_TYPES = [collections.Counter, typing.Counter] +DEQUE_TYPES: list[type] = [collections.deque, typing.Deque] # noqa: UP006 + +# Note: This does not play very well with type checkers. For example, +# `a: LambdaType = lambda x: x` will raise a type error by Pyright. +ValidateCallSupportedTypes = Union[ + LambdaType, + FunctionType, + MethodType, + partial, +] + +VALIDATE_CALL_SUPPORTED_TYPES = get_args(ValidateCallSupportedTypes) +UNSUPPORTED_STANDALONE_FIELDINFO_ATTRIBUTES: list[tuple[str, Any]] = [ + ('alias', None), + ('validation_alias', None), + ('serialization_alias', None), + # will be set if any alias is set, so disable it to avoid double warnings: + # 'alias_priority', + ('default', PydanticUndefined), + ('default_factory', None), + ('exclude', None), + ('deprecated', None), + ('repr', True), + ('validate_default', None), + ('frozen', None), + ('init', None), + ('init_var', None), + ('kw_only', None), +] +"""`FieldInfo` attributes (and their default value) that can't be used outside of a model (e.g. in a type adapter or a PEP 695 type alias).""" + +_mode_to_validator: dict[ + FieldValidatorModes, type[BeforeValidator | AfterValidator | PlainValidator | WrapValidator] +] = {'before': BeforeValidator, 'after': AfterValidator, 'plain': PlainValidator, 'wrap': WrapValidator} + + +def check_validator_fields_against_field_name( + info: FieldDecoratorInfo, + field: str, +) -> bool: + """Check if field name is in validator fields. + + Args: + info: The field info. + field: The field name to check. + + Returns: + `True` if field name is in validator fields, `False` otherwise. + """ + fields = info.fields + return '*' in fields or field in fields + + +def check_decorator_fields_exist(decorators: Iterable[AnyFieldDecorator], fields: Iterable[str]) -> None: + """Check if the defined fields in decorators exist in `fields` param. + + It ignores the check for a decorator if the decorator has `*` as field or `check_fields=False`. + + Args: + decorators: An iterable of decorators. + fields: An iterable of fields name. + + Raises: + PydanticUserError: If one of the field names does not exist in `fields` param. + """ + fields = set(fields) + for dec in decorators: + if '*' in dec.info.fields: + continue + if dec.info.check_fields is False: + continue + for field in dec.info.fields: + if field not in fields: + raise PydanticUserError( + f'Decorators defined with incorrect fields: {dec.cls_ref}.{dec.cls_var_name}' + " (use check_fields=False if you're inheriting from the model and intended this)", + code='decorator-missing-field', + ) + + +def filter_field_decorator_info_by_field( + validator_functions: Iterable[Decorator[FieldDecoratorInfoType]], field: str +) -> list[Decorator[FieldDecoratorInfoType]]: + return [dec for dec in validator_functions if check_validator_fields_against_field_name(dec.info, field)] + + +def apply_each_item_validators( + schema: core_schema.CoreSchema, + each_item_validators: list[Decorator[ValidatorDecoratorInfo]], +) -> core_schema.CoreSchema: + # This V1 compatibility shim should eventually be removed + + # fail early if each_item_validators is empty + if not each_item_validators: + return schema + + # push down any `each_item=True` validators + # note that this won't work for any Annotated types that get wrapped by a function validator + # but that's okay because that didn't exist in V1 + if schema['type'] == 'nullable': + schema['schema'] = apply_each_item_validators(schema['schema'], each_item_validators) + return schema + elif schema['type'] == 'tuple': + if (variadic_item_index := schema.get('variadic_item_index')) is not None: + schema['items_schema'][variadic_item_index] = apply_validators( + schema['items_schema'][variadic_item_index], + each_item_validators, + ) + elif is_list_like_schema_with_items_schema(schema): + inner_schema = schema.get('items_schema', core_schema.any_schema()) + schema['items_schema'] = apply_validators(inner_schema, each_item_validators) + elif schema['type'] == 'dict': + inner_schema = schema.get('values_schema', core_schema.any_schema()) + schema['values_schema'] = apply_validators(inner_schema, each_item_validators) + else: + raise TypeError( + f'`@validator(..., each_item=True)` cannot be applied to fields with a schema of {schema["type"]}' + ) + return schema + + +def _extract_json_schema_info_from_field_info( + info: FieldInfo | ComputedFieldInfo, +) -> tuple[JsonDict | None, JsonDict | JsonSchemaExtraCallable | None]: + json_schema_updates = { + 'title': info.title, + 'description': info.description, + 'deprecated': bool(info.deprecated) or info.deprecated == '' or None, + 'examples': to_jsonable_python(info.examples), + } + json_schema_updates = {k: v for k, v in json_schema_updates.items() if v is not None} + return (json_schema_updates or None, info.json_schema_extra) + + +JsonEncoders = dict[type[Any], JsonEncoder] + + +def _add_custom_serialization_from_json_encoders( + json_encoders: JsonEncoders | None, tp: Any, schema: CoreSchema +) -> CoreSchema: + """Iterate over the json_encoders and add the first matching encoder to the schema. + + Args: + json_encoders: A dictionary of types and their encoder functions. + tp: The type to check for a matching encoder. + schema: The schema to add the encoder to. + """ + if not json_encoders: + return schema + if 'serialization' in schema: + return schema + # Check the class type and its superclasses for a matching encoder + # Decimal.__class__.__mro__ (and probably other cases) doesn't include Decimal itself + # if the type is a GenericAlias (e.g. from list[int]) we need to use __class__ instead of .__mro__ + for base in (tp, *getattr(tp, '__mro__', tp.__class__.__mro__)[:-1]): + encoder = json_encoders.get(base) + if encoder is None: + continue + + warnings.warn( + f'`json_encoders` is deprecated. See https://docs.pydantic.dev/{version_short()}/concepts/serialization/#custom-serializers for alternatives', + PydanticDeprecatedSince20, + ) + + # TODO: in theory we should check that the schema accepts a serialization key + schema['serialization'] = core_schema.plain_serializer_function_ser_schema(encoder, when_used='json') + return schema + + return schema + + +GENERATE_SCHEMA_ERRORS = ( + PydanticForbiddenQualifier, + PydanticInvalidForJsonSchema, + PydanticSchemaGenerationError, + PydanticUndefinedAnnotation, +) +"""Errors raised during core schema generation. This does *not* include `InvalidSchemaError`, which is raised during schema cleaning.""" + + +class InvalidSchemaError(Exception): + """The core schema is invalid.""" + + +class GenerateSchema: + """Generate core schema for a Pydantic model, dataclass and types like `str`, `datetime`, ... .""" + + __slots__ = ( + '_config_wrapper_stack', + '_ns_resolver', + '_typevars_map', + 'field_name_stack', + 'model_type_stack', + 'defs', + ) + + def __init__( + self, + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver | None = None, + typevars_map: Mapping[TypeVar, Any] | None = None, + ) -> None: + # we need a stack for recursing into nested models + self._config_wrapper_stack = ConfigWrapperStack(config_wrapper) + self._ns_resolver = ns_resolver or NsResolver() + self._typevars_map = typevars_map + self.field_name_stack = _FieldNameStack() + self.model_type_stack = _ModelTypeStack() + self.defs = _Definitions() + + def __init_subclass__(cls) -> None: + super().__init_subclass__() + warnings.warn( + 'Subclassing `GenerateSchema` is not supported. The API is highly subject to change in minor versions.', + UserWarning, + stacklevel=2, + ) + + @property + def _config_wrapper(self) -> ConfigWrapper: + return self._config_wrapper_stack.tail + + @property + def _types_namespace(self) -> NamespacesTuple: + return self._ns_resolver.types_namespace + + @property + def _arbitrary_types(self) -> bool: + return self._config_wrapper.arbitrary_types_allowed + + # the following methods can be overridden but should be considered + # unstable / private APIs + def _list_schema(self, items_type: Any) -> CoreSchema: + return core_schema.list_schema(self.generate_schema(items_type)) + + def _dict_schema(self, keys_type: Any, values_type: Any) -> CoreSchema: + return core_schema.dict_schema(self.generate_schema(keys_type), self.generate_schema(values_type)) + + def _set_schema(self, items_type: Any) -> CoreSchema: + return core_schema.set_schema(self.generate_schema(items_type)) + + def _frozenset_schema(self, items_type: Any) -> CoreSchema: + return core_schema.frozenset_schema(self.generate_schema(items_type)) + + def _enum_schema(self, enum_type: type[Enum]) -> CoreSchema: + cases: list[Any] = list(enum_type.__members__.values()) + + enum_ref = get_type_ref(enum_type) + description = None if not enum_type.__doc__ else inspect.cleandoc(enum_type.__doc__) + if ( + description == 'An enumeration.' + ): # This is the default value provided by enum.EnumMeta.__new__; don't use it + description = None + js_updates = {'title': enum_type.__name__, 'description': description} + js_updates = {k: v for k, v in js_updates.items() if v is not None} + + sub_type: Literal['str', 'int', 'float'] | None = None + if issubclass(enum_type, int): + sub_type = 'int' + value_ser_type: core_schema.SerSchema = core_schema.simple_ser_schema('int') + elif issubclass(enum_type, str): + # this handles `StrEnum` (3.11 only), and also `Foobar(str, Enum)` + sub_type = 'str' + value_ser_type = core_schema.simple_ser_schema('str') + elif issubclass(enum_type, float): + sub_type = 'float' + value_ser_type = core_schema.simple_ser_schema('float') + else: + # TODO this is an ugly hack, how do we trigger an Any schema for serialization? + value_ser_type = core_schema.plain_serializer_function_ser_schema(lambda x: x) + + if cases: + + def get_json_schema(schema: CoreSchema, handler: GetJsonSchemaHandler) -> JsonSchemaValue: + json_schema = handler(schema) + original_schema = handler.resolve_ref_schema(json_schema) + original_schema.update(js_updates) + return json_schema + + # we don't want to add the missing to the schema if it's the default one + default_missing = getattr(enum_type._missing_, '__func__', None) is Enum._missing_.__func__ # pyright: ignore[reportFunctionMemberAccess] + enum_schema = core_schema.enum_schema( + enum_type, + cases, + sub_type=sub_type, + missing=None if default_missing else enum_type._missing_, + ref=enum_ref, + metadata={'pydantic_js_functions': [get_json_schema]}, + ) + + if self._config_wrapper.use_enum_values: + enum_schema = core_schema.no_info_after_validator_function( + attrgetter('value'), enum_schema, serialization=value_ser_type + ) + + return enum_schema + + else: + + def get_json_schema_no_cases(_, handler: GetJsonSchemaHandler) -> JsonSchemaValue: + json_schema = handler(core_schema.enum_schema(enum_type, cases, sub_type=sub_type, ref=enum_ref)) + original_schema = handler.resolve_ref_schema(json_schema) + original_schema.update(js_updates) + return json_schema + + # Use an isinstance check for enums with no cases. + # The most important use case for this is creating TypeVar bounds for generics that should + # be restricted to enums. This is more consistent than it might seem at first, since you can only + # subclass enum.Enum (or subclasses of enum.Enum) if all parent classes have no cases. + # We use the get_json_schema function when an Enum subclass has been declared with no cases + # so that we can still generate a valid json schema. + return core_schema.is_instance_schema( + enum_type, + metadata={'pydantic_js_functions': [get_json_schema_no_cases]}, + ) + + def _ip_schema(self, tp: Any) -> CoreSchema: + from ._validators import IP_VALIDATOR_LOOKUP, IpType + + ip_type_json_schema_format: dict[type[IpType], str] = { + IPv4Address: 'ipv4', + IPv4Network: 'ipv4network', + IPv4Interface: 'ipv4interface', + IPv6Address: 'ipv6', + IPv6Network: 'ipv6network', + IPv6Interface: 'ipv6interface', + } + + def ser_ip(ip: Any, info: core_schema.SerializationInfo) -> str | IpType: + if not isinstance(ip, (tp, str)): + raise PydanticSerializationUnexpectedValue( + f"Expected `{tp}` but got `{type(ip)}` with value `'{ip}'` - serialized value may not be as expected." + ) + if info.mode == 'python': + return ip + return str(ip) + + return core_schema.lax_or_strict_schema( + lax_schema=core_schema.no_info_plain_validator_function(IP_VALIDATOR_LOOKUP[tp]), + strict_schema=core_schema.json_or_python_schema( + json_schema=core_schema.no_info_after_validator_function(tp, core_schema.str_schema()), + python_schema=core_schema.is_instance_schema(tp), + ), + serialization=core_schema.plain_serializer_function_ser_schema(ser_ip, info_arg=True, when_used='always'), + metadata={ + 'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': ip_type_json_schema_format[tp]}] + }, + ) + + def _path_schema(self, tp: Any, path_type: Any) -> CoreSchema: + if tp is os.PathLike and (path_type not in {str, bytes} and not typing_objects.is_any(path_type)): + raise PydanticUserError( + '`os.PathLike` can only be used with `str`, `bytes` or `Any`', code='schema-for-unknown-type' + ) + + path_constructor = pathlib.PurePath if tp is os.PathLike else tp + strict_inner_schema = ( + core_schema.bytes_schema(strict=True) if (path_type is bytes) else core_schema.str_schema(strict=True) + ) + lax_inner_schema = core_schema.bytes_schema() if (path_type is bytes) else core_schema.str_schema() + + def path_validator(input_value: str | bytes) -> os.PathLike[Any]: # type: ignore + try: + if path_type is bytes: + if isinstance(input_value, bytes): + try: + input_value = input_value.decode() + except UnicodeDecodeError as e: + raise PydanticCustomError('bytes_type', 'Input must be valid bytes') from e + else: + raise PydanticCustomError('bytes_type', 'Input must be bytes') + elif not isinstance(input_value, str): + raise PydanticCustomError('path_type', 'Input is not a valid path') + + return path_constructor(input_value) # type: ignore + except TypeError as e: + raise PydanticCustomError('path_type', 'Input is not a valid path') from e + + def ser_path(path: Any, info: core_schema.SerializationInfo) -> str | os.PathLike[Any]: + if not isinstance(path, (tp, str)): + raise PydanticSerializationUnexpectedValue( + f"Expected `{tp}` but got `{type(path)}` with value `'{path}'` - serialized value may not be as expected." + ) + if info.mode == 'python': + return path + return str(path) + + instance_schema = core_schema.json_or_python_schema( + json_schema=core_schema.no_info_after_validator_function(path_validator, lax_inner_schema), + python_schema=core_schema.is_instance_schema(tp), + ) + + schema = core_schema.lax_or_strict_schema( + lax_schema=core_schema.union_schema( + [ + instance_schema, + core_schema.no_info_after_validator_function(path_validator, strict_inner_schema), + ], + custom_error_type='path_type', + custom_error_message=f'Input is not a valid path for {tp}', + ), + strict_schema=instance_schema, + serialization=core_schema.plain_serializer_function_ser_schema(ser_path, info_arg=True, when_used='always'), + metadata={'pydantic_js_functions': [lambda source, handler: {**handler(source), 'format': 'path'}]}, + ) + return schema + + def _deque_schema(self, items_type: Any) -> CoreSchema: + from ._serializers import serialize_sequence_via_list + from ._validators import deque_validator + + item_type_schema = self.generate_schema(items_type) + + # we have to use a lax list schema here, because we need to validate the deque's + # items via a list schema, but it's ok if the deque itself is not a list + list_schema = core_schema.list_schema(item_type_schema, strict=False) + + check_instance = core_schema.json_or_python_schema( + json_schema=list_schema, + python_schema=core_schema.is_instance_schema(collections.deque, cls_repr='Deque'), + ) + + lax_schema = core_schema.no_info_wrap_validator_function(deque_validator, list_schema) + + return core_schema.lax_or_strict_schema( + lax_schema=lax_schema, + strict_schema=core_schema.chain_schema([check_instance, lax_schema]), + serialization=core_schema.wrap_serializer_function_ser_schema( + serialize_sequence_via_list, schema=item_type_schema, info_arg=True + ), + ) + + def _mapping_schema(self, tp: Any, keys_type: Any, values_type: Any) -> CoreSchema: + from ._validators import MAPPING_ORIGIN_MAP, defaultdict_validator, get_defaultdict_default_default_factory + + mapped_origin = MAPPING_ORIGIN_MAP[tp] + keys_schema = self.generate_schema(keys_type) + with warnings.catch_warnings(): + # We kind of abused `Field()` default factories to be able to specify + # the `defaultdict`'s `default_factory`. As a consequence, we get warnings + # as normally `FieldInfo.default_factory` is unsupported in the context where + # `Field()` is used and our only solution is to ignore them (note that this might + # wrongfully ignore valid warnings, e.g. if the `value_type` is a PEP 695 type alias + # with unsupported metadata). + warnings.simplefilter('ignore', category=UnsupportedFieldAttributeWarning) + values_schema = self.generate_schema(values_type) + dict_schema = core_schema.dict_schema(keys_schema, values_schema, strict=False) + + if mapped_origin is dict: + schema = dict_schema + else: + check_instance = core_schema.json_or_python_schema( + json_schema=dict_schema, + python_schema=core_schema.is_instance_schema(mapped_origin), + ) + + if tp is collections.defaultdict: + default_default_factory = get_defaultdict_default_default_factory(values_type) + coerce_instance_wrap = partial( + core_schema.no_info_wrap_validator_function, + partial(defaultdict_validator, default_default_factory=default_default_factory), + ) + else: + coerce_instance_wrap = partial(core_schema.no_info_after_validator_function, mapped_origin) + + lax_schema = coerce_instance_wrap(dict_schema) + strict_schema = core_schema.chain_schema([check_instance, lax_schema]) + + schema = core_schema.lax_or_strict_schema( + lax_schema=lax_schema, + strict_schema=strict_schema, + serialization=core_schema.wrap_serializer_function_ser_schema( + lambda v, h: h(v), schema=dict_schema, info_arg=False + ), + ) + + return schema + + def _fraction_schema(self) -> CoreSchema: + """Support for [`fractions.Fraction`][fractions.Fraction].""" + from ._validators import fraction_validator + + # TODO: note, this is a fairly common pattern, re lax / strict for attempted type coercion, + # can we use a helper function to reduce boilerplate? + return core_schema.lax_or_strict_schema( + lax_schema=core_schema.no_info_plain_validator_function(fraction_validator), + strict_schema=core_schema.json_or_python_schema( + json_schema=core_schema.no_info_plain_validator_function(fraction_validator), + python_schema=core_schema.is_instance_schema(Fraction), + ), + # use str serialization to guarantee round trip behavior + serialization=core_schema.to_string_ser_schema(when_used='always'), + metadata={'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': 'fraction'}]}, + ) + + def _arbitrary_type_schema(self, tp: Any) -> CoreSchema: + if not isinstance(tp, type): + warnings.warn( + f'{tp!r} is not a Python type (it may be an instance of an object),' + ' Pydantic will allow any object with no validation since we cannot even' + ' enforce that the input is an instance of the given type.' + ' To get rid of this error wrap the type with `pydantic.SkipValidation`.', + ArbitraryTypeWarning, + ) + return core_schema.any_schema() + return core_schema.is_instance_schema(tp) + + def _unknown_type_schema(self, obj: Any) -> CoreSchema: + raise PydanticSchemaGenerationError( + f'Unable to generate pydantic-core schema for {obj!r}. ' + 'Set `arbitrary_types_allowed=True` in the model_config to ignore this error' + ' or implement `__get_pydantic_core_schema__` on your type to fully support it.' + '\n\nIf you got this error by calling handler() within' + ' `__get_pydantic_core_schema__` then you likely need to call' + ' `handler.generate_schema()` since we do not call' + ' `__get_pydantic_core_schema__` on `` otherwise to avoid infinite recursion.' + ) + + def _apply_discriminator_to_union( + self, schema: CoreSchema, discriminator: str | Discriminator | None + ) -> CoreSchema: + if discriminator is None: + return schema + try: + return _discriminated_union.apply_discriminator( + schema, + discriminator, + self.defs._definitions, + ) + except _discriminated_union.MissingDefinitionForUnionRef: + # defer until defs are resolved + _discriminated_union.set_discriminator_in_metadata( + schema, + discriminator, + ) + return schema + + def clean_schema(self, schema: CoreSchema) -> CoreSchema: + return self.defs.finalize_schema(schema) + + def _add_js_function(self, metadata_schema: CoreSchema, js_function: Callable[..., Any]) -> None: + metadata = metadata_schema.get('metadata', {}) + pydantic_js_functions = metadata.setdefault('pydantic_js_functions', []) + # because of how we generate core schemas for nested generic models + # we can end up adding `BaseModel.__get_pydantic_json_schema__` multiple times + # this check may fail to catch duplicates if the function is a `functools.partial` + # or something like that, but if it does it'll fail by inserting the duplicate + if js_function not in pydantic_js_functions: + pydantic_js_functions.append(js_function) + metadata_schema['metadata'] = metadata + + def generate_schema( + self, + obj: Any, + ) -> core_schema.CoreSchema: + """Generate core schema. + + Args: + obj: The object to generate core schema for. + + Returns: + The generated core schema. + + Raises: + PydanticUndefinedAnnotation: + If it is not possible to evaluate forward reference. + PydanticSchemaGenerationError: + If it is not possible to generate pydantic-core schema. + TypeError: + - If `alias_generator` returns a disallowed type (must be str, AliasPath or AliasChoices). + - If V1 style validator with `each_item=True` applied on a wrong field. + PydanticUserError: + - If `typing.TypedDict` is used instead of `typing_extensions.TypedDict` on Python < 3.12. + - If `__modify_schema__` method is used instead of `__get_pydantic_json_schema__`. + """ + schema = self._generate_schema_from_get_schema_method(obj, obj) + + if schema is None: + schema = self._generate_schema_inner(obj) + + metadata_js_function = _extract_get_pydantic_json_schema(obj) + if metadata_js_function is not None: + metadata_schema = resolve_original_schema(schema, self.defs) + if metadata_schema: + self._add_js_function(metadata_schema, metadata_js_function) + + schema = _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, obj, schema) + + return schema + + def _model_schema(self, cls: type[BaseModel]) -> core_schema.CoreSchema: + """Generate schema for a Pydantic model.""" + BaseModel_ = import_cached_base_model() + + with self.defs.get_schema_or_ref(cls) as (model_ref, maybe_schema): + if maybe_schema is not None: + return maybe_schema + + schema = cls.__dict__.get('__pydantic_core_schema__') + if schema is not None and not isinstance(schema, MockCoreSchema): + if schema['type'] == 'definitions': + schema = self.defs.unpack_definitions(schema) + ref = get_ref(schema) + if ref: + return self.defs.create_definition_reference_schema(schema) + else: + return schema + + config_wrapper = ConfigWrapper(cls.model_config, check=False) + + with self._config_wrapper_stack.push(config_wrapper), self._ns_resolver.push(cls): + core_config = self._config_wrapper.core_config(title=cls.__name__) + + if cls.__pydantic_fields_complete__ or cls is BaseModel_: + fields = getattr(cls, '__pydantic_fields__', {}) + extra_info = getattr(cls, '__pydantic_extra_info__', None) + else: + if '__pydantic_fields__' not in cls.__dict__: + # This happens when we have a loop in the schema generation: + # class Base[T](BaseModel): + # t: T + # + # class Other(BaseModel): + # b: 'Base[Other]' + # When we build fields for `Other`, we evaluate the forward annotation. + # At this point, `Other` doesn't have the model fields set. We create + # `Base[Other]`; model fields are successfully built, and we try to generate + # a schema for `t: Other`. As `Other.__pydantic_fields__` aren't set, we abort. + raise PydanticUndefinedAnnotation( + name=cls.__name__, + message=f'Class {cls.__name__!r} is not defined', + ) + try: + fields, extra_info = rebuild_model_fields( + cls, + config_wrapper=self._config_wrapper, + ns_resolver=self._ns_resolver, + typevars_map=self._typevars_map or {}, + ) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + + decorators = cls.__pydantic_decorators__ + computed_fields = decorators.computed_fields + check_decorator_fields_exist( + chain( + decorators.field_validators.values(), + decorators.field_serializers.values(), + decorators.validators.values(), + ), + {*fields.keys(), *computed_fields.keys()}, + ) + + model_validators = decorators.model_validators.values() + + extras_schema = None + extras_keys_schema = None + if core_config.get('extra_fields_behavior') == 'allow' and extra_info is not None: + tp = get_origin(extra_info.annotation) + if tp not in DICT_TYPES: + raise PydanticSchemaGenerationError( + 'The type annotation for `__pydantic_extra__` must be `dict[str, ...]`' + ) + # See the comments in `_get_args_resolving_forward_refs()` for why we need + # to re-evaluate the annotation: + extra_keys_type, extra_items_type = self._get_args_resolving_forward_refs( + extra_info.annotation, + required=True, + ) + if extra_keys_type is not str: + extras_keys_schema = self.generate_schema(extra_keys_type) + if not typing_objects.is_any(extra_items_type): + extras_schema = self.generate_schema(extra_items_type) + + generic_origin: type[BaseModel] | None = getattr(cls, '__pydantic_generic_metadata__', {}).get('origin') + + if cls.__pydantic_root_model__: + inner_schema, metadata = self._common_field_schema('root', fields['root'], decorators) + if cls.__doc__ and metadata.get('pydantic_js_updates', {}).get('description'): + # This is a bit of a leaky abstraction, but as the model docstring takes priority + # over the root field's description, we need to override it here. This can't be done + # in the JSON Schema generation logic because the metadata's `pydantic_js_updates` are + # applied last, and overrides any value previously set (so the description set from the + # docstring in `GenerateJsonSchema._update_class_schema()` is overridden): + update_core_metadata( + metadata, pydantic_js_updates={'description': inspect.cleandoc(cls.__doc__)} + ) + + inner_schema = apply_model_validators(inner_schema, model_validators, 'inner') + model_schema = core_schema.model_schema( + cls, + inner_schema, + generic_origin=generic_origin, + custom_init=getattr(cls, '__pydantic_custom_init__', None), + root_model=True, + post_init=getattr(cls, '__pydantic_post_init__', None), + config=core_config, + ref=model_ref, + metadata=metadata, + ) + else: + fields_schema: core_schema.CoreSchema = core_schema.model_fields_schema( + {k: self._generate_md_field_schema(k, v, decorators) for k, v in fields.items()}, + computed_fields=[ + self._computed_field_schema(d, decorators.field_serializers) + for d in computed_fields.values() + ], + extras_schema=extras_schema, + extras_keys_schema=extras_keys_schema, + model_name=cls.__name__, + ) + inner_schema = apply_validators(fields_schema, decorators.root_validators.values()) + inner_schema = apply_model_validators(inner_schema, model_validators, 'inner') + + model_schema = core_schema.model_schema( + cls, + inner_schema, + generic_origin=generic_origin, + custom_init=getattr(cls, '__pydantic_custom_init__', None), + root_model=False, + post_init=getattr(cls, '__pydantic_post_init__', None), + config=core_config, + ref=model_ref, + ) + + schema = self._apply_model_serializers(model_schema, decorators.model_serializers.values()) + schema = apply_model_validators(schema, model_validators, 'outer') + return self.defs.create_definition_reference_schema(schema) + + def _resolve_self_type(self, obj: Any) -> Any: + obj = self.model_type_stack.get() + if obj is None: + raise PydanticUserError('`typing.Self` is invalid in this context', code='invalid-self-type') + return obj + + def _generate_schema_from_get_schema_method(self, obj: Any, source: Any) -> core_schema.CoreSchema | None: + BaseModel_ = import_cached_base_model() + + get_schema = getattr(obj, '__get_pydantic_core_schema__', None) + is_base_model_get_schema = ( + getattr(get_schema, '__func__', None) is BaseModel_.__get_pydantic_core_schema__.__func__ # pyright: ignore[reportFunctionMemberAccess] + ) + + if ( + get_schema is not None + # BaseModel.__get_pydantic_core_schema__ is defined for backwards compatibility, + # to allow existing code to call `super().__get_pydantic_core_schema__` in Pydantic + # model that overrides `__get_pydantic_core_schema__`. However, it raises a deprecation + # warning stating that the method will be removed, and during the core schema gen we actually + # don't call the method: + and not is_base_model_get_schema + ): + # Some referenceable types might have a `__get_pydantic_core_schema__` method + # defined on it by users (e.g. on a dataclass). This generally doesn't play well + # as these types are already recognized by the `GenerateSchema` class and isn't ideal + # as we might end up calling `get_schema_or_ref` (expensive) on types that are actually + # not referenceable: + with self.defs.get_schema_or_ref(obj) as (_, maybe_schema): + if maybe_schema is not None: + return maybe_schema + + if obj is source: + ref_mode = 'unpack' + else: + ref_mode = 'to-def' + schema = get_schema( + source, CallbackGetCoreSchemaHandler(self._generate_schema_inner, self, ref_mode=ref_mode) + ) + if schema['type'] == 'definitions': + schema = self.defs.unpack_definitions(schema) + + ref = get_ref(schema) + if ref: + return self.defs.create_definition_reference_schema(schema) + + # Note: if schema is of type `'definition-ref'`, we might want to copy it as a + # safety measure (because these are inlined in place -- i.e. mutated directly) + return schema + + if get_schema is None and (validators := getattr(obj, '__get_validators__', None)) is not None: + from pydantic.v1 import BaseModel as BaseModelV1 + + if issubclass(obj, BaseModelV1): + warnings.warn( + f'Mixing V1 models and V2 models (or constructs, like `TypeAdapter`) is not supported. Please upgrade `{obj.__name__}` to V2.', + UserWarning, + ) + else: + warnings.warn( + '`__get_validators__` is deprecated and will be removed, use `__get_pydantic_core_schema__` instead.', + PydanticDeprecatedSince20, + ) + return core_schema.chain_schema([core_schema.with_info_plain_validator_function(v) for v in validators()]) + + def _resolve_forward_ref(self, obj: Any) -> Any: + # we assume that types_namespace has the target of forward references in its scope, + # but this could fail, for example, if calling Validator on an imported type which contains + # forward references to other types only defined in the module from which it was imported + # `Validator(SomeImportedTypeAliasWithAForwardReference)` + # or the equivalent for BaseModel + # class Model(BaseModel): + # x: SomeImportedTypeAliasWithAForwardReference + try: + obj = _typing_extra.eval_type_backport(obj, *self._types_namespace) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + + # if obj is still a ForwardRef, it means we can't evaluate it, raise PydanticUndefinedAnnotation + if isinstance(obj, ForwardRef): + raise PydanticUndefinedAnnotation(obj.__forward_arg__, f'Unable to evaluate forward reference {obj}') + + if self._typevars_map: + obj = replace_types(obj, self._typevars_map) + + return obj + + @overload + def _get_args_resolving_forward_refs(self, obj: Any, required: Literal[True]) -> tuple[Any, ...]: ... + + @overload + def _get_args_resolving_forward_refs(self, obj: Any) -> tuple[Any, ...] | None: ... + + def _get_args_resolving_forward_refs(self, obj: Any, required: bool = False) -> tuple[Any, ...] | None: + args = get_args(obj) + if args: + if isinstance(obj, GenericAlias): + # PEP 585 generic aliases don't convert args to ForwardRefs, unlike `typing.List/Dict` etc. + # This was fixed in https://github.com/python/cpython/pull/30900 (Python 3.11). + # TODO: this shouldn't be necessary (probably even this `_get_args_resolving_forward_refs()` function) + # once we drop support for Python 3.10 *or* if we implement our own `typing._eval_type()` implementation. + args = (_typing_extra._make_forward_ref(a) if isinstance(a, str) else a for a in args) + args = tuple(self._resolve_forward_ref(a) if isinstance(a, ForwardRef) else a for a in args) + elif required: # pragma: no cover + raise TypeError(f'Expected {obj} to have generic parameters but it had none') + return args + + def _get_first_arg_or_any(self, obj: Any) -> Any: + args = self._get_args_resolving_forward_refs(obj) + if not args: + return Any + return args[0] + + def _get_first_two_args_or_any(self, obj: Any) -> tuple[Any, Any]: + args = self._get_args_resolving_forward_refs(obj) + if not args: + return (Any, Any) + if len(args) < 2: + origin = get_origin(obj) + raise TypeError(f'Expected two type arguments for {origin}, got 1') + return args[0], args[1] + + def _generate_schema_inner(self, obj: Any) -> core_schema.CoreSchema: + if typing_objects.is_self(obj): + obj = self._resolve_self_type(obj) + + if typing_objects.is_annotated(get_origin(obj)): + return self._annotated_schema(obj) + + if isinstance(obj, dict): + # we assume this is already a valid schema + return obj # type: ignore[return-value] + + if isinstance(obj, str): + obj = ForwardRef(obj) + + if isinstance(obj, ForwardRef): + return self.generate_schema(self._resolve_forward_ref(obj)) + + BaseModel = import_cached_base_model() + + if lenient_issubclass(obj, BaseModel): + with self.model_type_stack.push(obj): + return self._model_schema(obj) + + if isinstance(obj, PydanticRecursiveRef): + return core_schema.definition_reference_schema(schema_ref=obj.type_ref) + + return self.match_type(obj) + + def match_type(self, obj: Any) -> core_schema.CoreSchema: # noqa: C901 + """Main mapping of types to schemas. + + The general structure is a series of if statements starting with the simple cases + (non-generic primitive types) and then handling generics and other more complex cases. + + Each case either generates a schema directly, calls into a public user-overridable method + (like `GenerateSchema.tuple_variable_schema`) or calls into a private method that handles some + boilerplate before calling into the user-facing method (e.g. `GenerateSchema._tuple_schema`). + + The idea is that we'll evolve this into adding more and more user facing methods over time + as they get requested and we figure out what the right API for them is. + """ + if obj is str: + return core_schema.str_schema() + elif obj is bytes: + return core_schema.bytes_schema() + elif obj is int: + return core_schema.int_schema() + elif obj is float: + return core_schema.float_schema() + elif obj is bool: + return core_schema.bool_schema() + elif obj is complex: + return core_schema.complex_schema() + elif typing_objects.is_any(obj) or obj is object: + return core_schema.any_schema() + elif obj is datetime.date: + return core_schema.date_schema() + elif obj is datetime.datetime: + return core_schema.datetime_schema() + elif obj is datetime.time: + return core_schema.time_schema() + elif obj is datetime.timedelta: + return core_schema.timedelta_schema() + elif obj is Decimal: + return core_schema.decimal_schema() + elif obj is UUID: + return core_schema.uuid_schema() + elif obj is Url: + return core_schema.url_schema() + elif obj is Fraction: + return self._fraction_schema() + elif obj is MultiHostUrl: + return core_schema.multi_host_url_schema() + elif obj is None or obj is _typing_extra.NoneType: + return core_schema.none_schema() + if obj is MISSING: + return core_schema.missing_sentinel_schema() + elif obj in IP_TYPES: + return self._ip_schema(obj) + elif obj in TUPLE_TYPES: + return self._tuple_schema(obj) + elif obj in LIST_TYPES: + return self._list_schema(Any) + elif obj in SET_TYPES: + return self._set_schema(Any) + elif obj in FROZEN_SET_TYPES: + return self._frozenset_schema(Any) + elif obj in SEQUENCE_TYPES: + return self._sequence_schema(Any) + elif obj in ITERABLE_TYPES: + return self._iterable_schema(obj) + elif obj in DICT_TYPES: + return self._dict_schema(Any, Any) + elif obj in PATH_TYPES: + return self._path_schema(obj, Any) + elif obj in DEQUE_TYPES: + return self._deque_schema(Any) + elif obj in MAPPING_TYPES: + return self._mapping_schema(obj, Any, Any) + elif obj in COUNTER_TYPES: + return self._mapping_schema(obj, Any, int) + elif typing_objects.is_typealiastype(obj): + return self._type_alias_type_schema(obj) + elif obj is type: + return self._type_schema() + elif _typing_extra.is_callable(obj): + return core_schema.callable_schema() + elif typing_objects.is_literal(get_origin(obj)): + return self._literal_schema(obj) + elif is_typeddict(obj): + return self._typed_dict_schema(obj, None) + elif inspect.isclass(obj) and issubclass(obj, Enum): + # NOTE: this must come before the `is_namedtuple()` check as enums values + # can be namedtuples: + return self._enum_schema(obj) + elif _typing_extra.is_namedtuple(obj): + return self._namedtuple_schema(obj, None) + elif typing_objects.is_newtype(obj): + # NewType, can't use isinstance because it fails <3.10 + return self.generate_schema(obj.__supertype__) + elif obj in PATTERN_TYPES: + return self._pattern_schema(obj) + elif _typing_extra.is_hashable(obj): + return self._hashable_schema() + elif isinstance(obj, typing.TypeVar): + return self._unsubstituted_typevar_schema(obj) + elif _typing_extra.is_finalvar(obj): + if obj is Final: + return core_schema.any_schema() + return self.generate_schema( + self._get_first_arg_or_any(obj), + ) + elif isinstance(obj, VALIDATE_CALL_SUPPORTED_TYPES): + return self._call_schema(obj) # pyright: ignore[reportArgumentType] + elif obj is ZoneInfo: + return self._zoneinfo_schema() + + # dataclasses.is_dataclass coerces dc instances to types, but we only handle + # the case of a dc type here + if dataclasses.is_dataclass(obj): + return self._dataclass_schema(obj, None) # pyright: ignore[reportArgumentType] + + origin = get_origin(obj) + if origin is not None: + return self._match_generic_type(obj, origin) + + if self._arbitrary_types: + return self._arbitrary_type_schema(obj) + return self._unknown_type_schema(obj) + + def _match_generic_type(self, obj: Any, origin: Any) -> CoreSchema: # noqa: C901 + # Need to handle generic dataclasses before looking for the schema properties because attribute accesses + # on _GenericAlias delegate to the origin type, so lose the information about the concrete parametrization + # As a result, currently, there is no way to cache the schema for generic dataclasses. This may be possible + # to resolve by modifying the value returned by `Generic.__class_getitem__`, but that is a dangerous game. + if dataclasses.is_dataclass(origin): + return self._dataclass_schema(obj, origin) # pyright: ignore[reportArgumentType] + if _typing_extra.is_namedtuple(origin): + return self._namedtuple_schema(obj, origin) + + schema = self._generate_schema_from_get_schema_method(origin, obj) + if schema is not None: + return schema + + if typing_objects.is_typealiastype(origin): + return self._type_alias_type_schema(obj) + elif is_union_origin(origin): + return self._union_schema(obj) + elif origin in TUPLE_TYPES: + return self._tuple_schema(obj) + elif origin in LIST_TYPES: + return self._list_schema(self._get_first_arg_or_any(obj)) + elif origin in SET_TYPES: + return self._set_schema(self._get_first_arg_or_any(obj)) + elif origin in FROZEN_SET_TYPES: + return self._frozenset_schema(self._get_first_arg_or_any(obj)) + elif origin in DICT_TYPES: + return self._dict_schema(*self._get_first_two_args_or_any(obj)) + elif origin in PATH_TYPES: + return self._path_schema(origin, self._get_first_arg_or_any(obj)) + elif origin in DEQUE_TYPES: + return self._deque_schema(self._get_first_arg_or_any(obj)) + elif origin in MAPPING_TYPES: + return self._mapping_schema(origin, *self._get_first_two_args_or_any(obj)) + elif origin in COUNTER_TYPES: + return self._mapping_schema(origin, self._get_first_arg_or_any(obj), int) + elif is_typeddict(origin): + return self._typed_dict_schema(obj, origin) + elif origin in TYPE_TYPES: + return self._subclass_schema(obj) + elif origin in SEQUENCE_TYPES: + return self._sequence_schema(self._get_first_arg_or_any(obj)) + elif origin in ITERABLE_TYPES: + return self._iterable_schema(obj) + elif origin in PATTERN_TYPES: + return self._pattern_schema(obj) + + if self._arbitrary_types: + return self._arbitrary_type_schema(origin) + return self._unknown_type_schema(obj) + + def _generate_td_field_schema( + self, + name: str, + field_info: FieldInfo, + decorators: DecoratorInfos, + *, + required: bool = True, + ) -> core_schema.TypedDictField: + """Prepare a TypedDictField to represent a model or typeddict field.""" + schema, metadata = self._common_field_schema(name, field_info, decorators) + return core_schema.typed_dict_field( + schema, + required=False if not field_info.is_required() else required, + serialization_exclude=field_info.exclude, + validation_alias=_convert_to_aliases(field_info.validation_alias), + serialization_alias=field_info.serialization_alias, + serialization_exclude_if=field_info.exclude_if, + metadata=metadata, + ) + + def _generate_md_field_schema( + self, + name: str, + field_info: FieldInfo, + decorators: DecoratorInfos, + ) -> core_schema.ModelField: + """Prepare a ModelField to represent a model field.""" + schema, metadata = self._common_field_schema(name, field_info, decorators) + return core_schema.model_field( + schema, + serialization_exclude=field_info.exclude, + validation_alias=_convert_to_aliases(field_info.validation_alias), + serialization_alias=field_info.serialization_alias, + serialization_exclude_if=field_info.exclude_if, + frozen=field_info.frozen, + metadata=metadata, + ) + + def _generate_dc_field_schema( + self, + name: str, + field_info: FieldInfo, + decorators: DecoratorInfos, + ) -> core_schema.DataclassField: + """Prepare a DataclassField to represent the parameter/field, of a dataclass.""" + schema, metadata = self._common_field_schema(name, field_info, decorators) + return core_schema.dataclass_field( + name, + schema, + init=field_info.init, + init_only=field_info.init_var or None, + kw_only=None if field_info.kw_only else False, + serialization_exclude=field_info.exclude, + validation_alias=_convert_to_aliases(field_info.validation_alias), + serialization_alias=field_info.serialization_alias, + serialization_exclude_if=field_info.exclude_if, + frozen=field_info.frozen, + metadata=metadata, + ) + + def _common_field_schema( # C901 + self, name: str, field_info: FieldInfo, decorators: DecoratorInfos + ) -> tuple[CoreSchema, dict[str, Any]]: + source_type, annotations = field_info.annotation, field_info.metadata + + def set_discriminator(schema: CoreSchema) -> CoreSchema: + schema = self._apply_discriminator_to_union(schema, field_info.discriminator) + return schema + + # Convert `@field_validator` decorators to `Before/After/Plain/WrapValidator` instances: + validators_from_decorators = [ + _mode_to_validator[decorator.info.mode]._from_decorator(decorator) + for decorator in filter_field_decorator_info_by_field(decorators.field_validators.values(), name) + ] + + with self.field_name_stack.push(name): + if field_info.discriminator is not None: + schema = self._apply_annotations( + source_type, annotations + validators_from_decorators, transform_inner_schema=set_discriminator + ) + else: + schema = self._apply_annotations( + source_type, + annotations + validators_from_decorators, + ) + + # This V1 compatibility shim should eventually be removed + # push down any `each_item=True` validators + # note that this won't work for any Annotated types that get wrapped by a function validator + # but that's okay because that didn't exist in V1 + this_field_validators = filter_field_decorator_info_by_field(decorators.validators.values(), name) + if _validators_require_validate_default(this_field_validators): + field_info.validate_default = True + each_item_validators = [v for v in this_field_validators if v.info.each_item is True] + this_field_validators = [v for v in this_field_validators if v not in each_item_validators] + schema = apply_each_item_validators(schema, each_item_validators) + + schema = apply_validators(schema, this_field_validators) + + # the default validator needs to go outside of any other validators + # so that it is the topmost validator for the field validator + # which uses it to check if the field has a default value or not + if not field_info.is_required(): + schema = wrap_default(field_info, schema) + + schema = self._apply_field_serializers( + schema, filter_field_decorator_info_by_field(decorators.field_serializers.values(), name) + ) + + pydantic_js_updates, pydantic_js_extra = _extract_json_schema_info_from_field_info(field_info) + core_metadata: dict[str, Any] = {} + update_core_metadata( + core_metadata, pydantic_js_updates=pydantic_js_updates, pydantic_js_extra=pydantic_js_extra + ) + + return schema, core_metadata + + def _union_schema(self, union_type: Any) -> core_schema.CoreSchema: + """Generate schema for a Union.""" + args = self._get_args_resolving_forward_refs(union_type, required=True) + choices: list[CoreSchema] = [] + nullable = False + for arg in args: + if arg is None or arg is _typing_extra.NoneType: + nullable = True + else: + choices.append(self.generate_schema(arg)) + + if len(choices) == 1: + s = choices[0] + else: + choices_with_tags: list[CoreSchema | tuple[CoreSchema, str]] = [] + for choice in choices: + tag = cast(CoreMetadata, choice.get('metadata', {})).get('pydantic_internal_union_tag_key') + if tag is not None: + choices_with_tags.append((choice, tag)) + else: + choices_with_tags.append(choice) + s = core_schema.union_schema(choices_with_tags) + + if nullable: + s = core_schema.nullable_schema(s) + return s + + def _type_alias_type_schema(self, obj: TypeAliasType) -> CoreSchema: + with self.defs.get_schema_or_ref(obj) as (ref, maybe_schema): + if maybe_schema is not None: + return maybe_schema + + origin: TypeAliasType = get_origin(obj) or obj + typevars_map = get_standard_typevars_map(obj) + + with self._ns_resolver.push(origin): + try: + annotation = _typing_extra.eval_type(origin.__value__, *self._types_namespace) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + annotation = replace_types(annotation, typevars_map) + schema = self.generate_schema(annotation) + assert schema['type'] != 'definitions' + schema['ref'] = ref # type: ignore + return self.defs.create_definition_reference_schema(schema) + + def _literal_schema(self, literal_type: Any) -> CoreSchema: + """Generate schema for a Literal.""" + expected = list(get_literal_values(literal_type, type_check=False, unpack_type_aliases='eager')) + assert expected, f'literal "expected" cannot be empty, obj={literal_type}' + schema = core_schema.literal_schema(expected) + + if self._config_wrapper.use_enum_values and any(isinstance(v, Enum) for v in expected): + schema = core_schema.no_info_after_validator_function( + lambda v: v.value if isinstance(v, Enum) else v, schema + ) + + return schema + + def _typed_dict_schema(self, typed_dict_cls: Any, origin: Any) -> core_schema.CoreSchema: + """Generate a core schema for a `TypedDict` class. + + To be able to build a `DecoratorInfos` instance for the `TypedDict` class (which will include + validators, serializers, etc.), we need to have access to the original bases of the class + (see https://docs.python.org/3/library/types.html#types.get_original_bases). + However, the `__orig_bases__` attribute was only added in 3.12 (https://github.com/python/cpython/pull/103698). + + For this reason, we require Python 3.12 (or using the `typing_extensions` backport). + """ + FieldInfo = import_cached_field_info() + + with ( + self.model_type_stack.push(typed_dict_cls), + self.defs.get_schema_or_ref(typed_dict_cls) as ( + typed_dict_ref, + maybe_schema, + ), + ): + if maybe_schema is not None: + return maybe_schema + + typevars_map = get_standard_typevars_map(typed_dict_cls) + if origin is not None: + typed_dict_cls = origin + + if not _SUPPORTS_TYPEDDICT and type(typed_dict_cls).__module__ == 'typing': + raise PydanticUserError( + 'Please use `typing_extensions.TypedDict` instead of `typing.TypedDict` on Python < 3.12.', + code='typed-dict-version', + ) + + try: + # if a typed dictionary class doesn't have config, we use the parent's config, hence a default of `None` + # see https://github.com/pydantic/pydantic/issues/10917 + config: ConfigDict | None = get_attribute_from_bases(typed_dict_cls, '__pydantic_config__') + except AttributeError: + config = None + + with self._config_wrapper_stack.push(config): + core_config = self._config_wrapper.core_config(title=typed_dict_cls.__name__) + + required_keys: frozenset[str] = typed_dict_cls.__required_keys__ + + fields: dict[str, core_schema.TypedDictField] = {} + + decorators = DecoratorInfos.build(typed_dict_cls, replace_wrapped_methods=False) + decorators.update_from_config(self._config_wrapper) + + if self._config_wrapper.use_attribute_docstrings: + field_docstrings = extract_docstrings_from_cls(typed_dict_cls, use_inspect=True) + else: + field_docstrings = None + + try: + annotations = _typing_extra.get_cls_type_hints(typed_dict_cls, ns_resolver=self._ns_resolver) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + + readonly_fields: list[str] = [] + + for field_name, annotation in annotations.items(): + field_info = FieldInfo.from_annotation(annotation, _source=AnnotationSource.TYPED_DICT) + field_info.annotation = replace_types(field_info.annotation, typevars_map) + + required = ( + field_name in required_keys or 'required' in field_info._qualifiers + ) and 'not_required' not in field_info._qualifiers + if 'read_only' in field_info._qualifiers: + readonly_fields.append(field_name) + + if ( + field_docstrings is not None + and field_info.description is None + and field_name in field_docstrings + ): + field_info.description = field_docstrings[field_name] + update_field_from_config(self._config_wrapper, field_name, field_info) + + fields[field_name] = self._generate_td_field_schema( + field_name, field_info, decorators, required=required + ) + + if readonly_fields: + fields_repr = ', '.join(repr(f) for f in readonly_fields) + plural = len(readonly_fields) >= 2 + warnings.warn( + f'Item{"s" if plural else ""} {fields_repr} on TypedDict class {typed_dict_cls.__name__!r} ' + f'{"are" if plural else "is"} using the `ReadOnly` qualifier. Pydantic will not protect items ' + 'from any mutation on dictionary instances.', + UserWarning, + ) + + extra_behavior: core_schema.ExtraBehavior = 'ignore' + extras_schema: CoreSchema | None = None # For 'allow', equivalent to `Any` - no validation performed. + + # `__closed__` is `None` when not specified (equivalent to `False`): + is_closed = bool(getattr(typed_dict_cls, '__closed__', False)) + extra_items = getattr(typed_dict_cls, '__extra_items__', typing_extensions.NoExtraItems) + if is_closed: + extra_behavior = 'forbid' + extras_schema = None + elif not typing_objects.is_noextraitems(extra_items): + extra_behavior = 'allow' + extras_schema = self.generate_schema(replace_types(extra_items, typevars_map)) + + if (config_extra := self._config_wrapper.extra) in ('allow', 'forbid'): + if is_closed and config_extra == 'allow': + warnings.warn( + f"TypedDict class {typed_dict_cls.__qualname__!r} is closed, but 'extra' configuration " + "is set to `'allow'`. The 'extra' configuration value will be ignored.", + category=TypedDictExtraConfigWarning, + ) + elif not typing_objects.is_noextraitems(extra_items) and config_extra == 'forbid': + warnings.warn( + f"TypedDict class {typed_dict_cls.__qualname__!r} allows extra items, but 'extra' configuration " + "is set to `'forbid'`. The 'extra' configuration value will be ignored.", + category=TypedDictExtraConfigWarning, + ) + else: + extra_behavior = config_extra + + td_schema = core_schema.typed_dict_schema( + fields, + cls=typed_dict_cls, + computed_fields=[ + self._computed_field_schema(d, decorators.field_serializers) + for d in decorators.computed_fields.values() + ], + extra_behavior=extra_behavior, + extras_schema=extras_schema, + ref=typed_dict_ref, + config=core_config, + ) + + schema = self._apply_model_serializers(td_schema, decorators.model_serializers.values()) + schema = apply_model_validators(schema, decorators.model_validators.values(), 'all') + return self.defs.create_definition_reference_schema(schema) + + def _namedtuple_schema(self, namedtuple_cls: Any, origin: Any) -> core_schema.CoreSchema: + """Generate schema for a NamedTuple.""" + with ( + self.model_type_stack.push(namedtuple_cls), + self.defs.get_schema_or_ref(namedtuple_cls) as ( + namedtuple_ref, + maybe_schema, + ), + ): + if maybe_schema is not None: + return maybe_schema + typevars_map = get_standard_typevars_map(namedtuple_cls) + if origin is not None: + namedtuple_cls = origin + + try: + annotations = _typing_extra.get_cls_type_hints(namedtuple_cls, ns_resolver=self._ns_resolver) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + + # Filter annotations to only include fields that are actually in the NamedTuple + # (as subclassing an existing NamedTuple is not supported yet - see https://github.com/python/typing/issues/427) + # and use `Any` if no annotation exist (i.e. when using `collections.namedtuple()`). + annotations = {field_name: annotations.get(field_name, Any) for field_name in namedtuple_cls._fields} + + if typevars_map: + annotations = { + field_name: replace_types(annotation, typevars_map) + for field_name, annotation in annotations.items() + } + + arguments_schema = core_schema.arguments_schema( + [ + self._generate_parameter_schema( + field_name, + annotation, + source=AnnotationSource.NAMED_TUPLE, + default=namedtuple_cls._field_defaults.get(field_name, Parameter.empty), + ) + for field_name, annotation in annotations.items() + ], + metadata={'pydantic_js_prefer_positional_arguments': True}, + ) + schema = core_schema.call_schema(arguments_schema, namedtuple_cls, ref=namedtuple_ref) + return self.defs.create_definition_reference_schema(schema) + + def _generate_parameter_schema( + self, + name: str, + annotation: type[Any], + source: AnnotationSource, + default: Any = Parameter.empty, + mode: Literal['positional_only', 'positional_or_keyword', 'keyword_only'] | None = None, + ) -> core_schema.ArgumentsParameter: + """Generate the definition of a field in a namedtuple or a parameter in a function signature. + + This definition is meant to be used for the `'arguments'` core schema, which will be replaced + in V3 by the `'arguments-v3`'. + """ + FieldInfo = import_cached_field_info() + + if default is Parameter.empty: + field = FieldInfo.from_annotation(annotation, _source=source) + else: + field = FieldInfo.from_annotated_attribute(annotation, default, _source=source) + + assert field.annotation is not None, 'field.annotation should not be None when generating a schema' + update_field_from_config(self._config_wrapper, name, field) + + with self.field_name_stack.push(name): + schema = self._apply_annotations( + field.annotation, + [field], + # Because we pass `field` as metadata above (required for attributes relevant for + # JSON Scheme generation), we need to ignore the potential warnings about `FieldInfo` + # attributes that will not be used: + check_unsupported_field_info_attributes=False, + ) + + if not field.is_required(): + schema = wrap_default(field, schema) + + parameter_schema = core_schema.arguments_parameter( + name, + schema, + mode=mode, + alias=_convert_to_aliases(field.validation_alias), + ) + + return parameter_schema + + def _generate_parameter_v3_schema( + self, + name: str, + annotation: Any, + source: AnnotationSource, + mode: Literal[ + 'positional_only', + 'positional_or_keyword', + 'keyword_only', + 'var_args', + 'var_kwargs_uniform', + 'var_kwargs_unpacked_typed_dict', + ], + default: Any = Parameter.empty, + ) -> core_schema.ArgumentsV3Parameter: + """Generate the definition of a parameter in a function signature. + + This definition is meant to be used for the `'arguments-v3'` core schema, which will replace + the `'arguments`' schema in V3. + """ + FieldInfo = import_cached_field_info() + + if default is Parameter.empty: + field = FieldInfo.from_annotation(annotation, _source=source) + else: + field = FieldInfo.from_annotated_attribute(annotation, default, _source=source) + update_field_from_config(self._config_wrapper, name, field) + + with self.field_name_stack.push(name): + schema = self._apply_annotations( + field.annotation, + [field], + # Because we pass `field` as metadata above (required for attributes relevant for + # JSON Scheme generation), we need to ignore the potential warnings about `FieldInfo` + # attributes that will not be used: + check_unsupported_field_info_attributes=False, + ) + + if not field.is_required(): + schema = wrap_default(field, schema) + + parameter_schema = core_schema.arguments_v3_parameter( + name=name, + schema=schema, + mode=mode, + alias=_convert_to_aliases(field.validation_alias), + ) + + return parameter_schema + + def _tuple_schema(self, tuple_type: Any) -> core_schema.CoreSchema: + """Generate schema for a Tuple, e.g. `tuple[int, str]` or `tuple[int, ...]`.""" + # TODO: do we really need to resolve type vars here? + typevars_map = get_standard_typevars_map(tuple_type) + params = self._get_args_resolving_forward_refs(tuple_type) + + if typevars_map and params: + params = tuple(replace_types(param, typevars_map) for param in params) + + # NOTE: subtle difference: `tuple[()]` gives `params=()`, whereas `typing.Tuple[()]` gives `params=((),)` + # This is only true for <3.11, on Python 3.11+ `typing.Tuple[()]` gives `params=()` + if not params: + if tuple_type in TUPLE_TYPES: + return core_schema.tuple_schema([core_schema.any_schema()], variadic_item_index=0) + else: + # special case for `tuple[()]` which means `tuple[]` - an empty tuple + return core_schema.tuple_schema([]) + elif params[-1] is Ellipsis: + if len(params) == 2: + return core_schema.tuple_schema([self.generate_schema(params[0])], variadic_item_index=0) + else: + # TODO: something like https://github.com/pydantic/pydantic/issues/5952 + raise ValueError('Variable tuples can only have one type') + elif len(params) == 1 and params[0] == (): + # special case for `tuple[()]` which means `tuple[]` - an empty tuple + # NOTE: This conditional can be removed when we drop support for Python 3.10. + return core_schema.tuple_schema([]) + else: + return core_schema.tuple_schema([self.generate_schema(param) for param in params]) + + def _type_schema(self) -> core_schema.CoreSchema: + return core_schema.custom_error_schema( + core_schema.is_instance_schema(type), + custom_error_type='is_type', + custom_error_message='Input should be a type', + ) + + def _zoneinfo_schema(self) -> core_schema.CoreSchema: + """Generate schema for a zone_info.ZoneInfo object""" + from ._validators import validate_str_is_valid_iana_tz + + metadata = {'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': 'zoneinfo'}]} + return core_schema.no_info_plain_validator_function( + validate_str_is_valid_iana_tz, + serialization=core_schema.to_string_ser_schema(), + metadata=metadata, + ) + + def _union_is_subclass_schema(self, union_type: Any) -> core_schema.CoreSchema: + """Generate schema for `type[Union[X, ...]]`.""" + args = self._get_args_resolving_forward_refs(union_type, required=True) + return core_schema.union_schema([self.generate_schema(type[args]) for args in args]) + + def _subclass_schema(self, type_: Any) -> core_schema.CoreSchema: + """Generate schema for a type, e.g. `type[int]`.""" + type_param = self._get_first_arg_or_any(type_) + + # Assume `type[Annotated[, ...]]` is equivalent to `type[]`: + type_param = _typing_extra.annotated_type(type_param) or type_param + + if typing_objects.is_any(type_param): + return self._type_schema() + elif typing_objects.is_typealiastype(type_param): + return self.generate_schema(type[type_param.__value__]) + elif typing_objects.is_typevar(type_param): + if type_param.__bound__: + if is_union_origin(get_origin(type_param.__bound__)): + return self._union_is_subclass_schema(type_param.__bound__) + return core_schema.is_subclass_schema(type_param.__bound__) + elif type_param.__constraints__: + return core_schema.union_schema([self.generate_schema(type[c]) for c in type_param.__constraints__]) + else: + return self._type_schema() + elif is_union_origin(get_origin(type_param)): + return self._union_is_subclass_schema(type_param) + else: + if typing_objects.is_self(type_param): + type_param = self._resolve_self_type(type_param) + if _typing_extra.is_generic_alias(type_param): + raise PydanticUserError( + 'Subscripting `type[]` with an already parametrized type is not supported. ' + f'Instead of using type[{type_param!r}], use type[{_repr.display_as_type(get_origin(type_param))}].', + code=None, + ) + if not inspect.isclass(type_param): + # when using type[None], this doesn't type convert to type[NoneType], and None isn't a class + # so we handle it manually here + if type_param is None: + return core_schema.is_subclass_schema(_typing_extra.NoneType) + raise TypeError(f'Expected a class, got {type_param!r}') + return core_schema.is_subclass_schema(type_param) + + def _sequence_schema(self, items_type: Any) -> core_schema.CoreSchema: + """Generate schema for a Sequence, e.g. `Sequence[int]`.""" + from ._serializers import serialize_sequence_via_list + + item_type_schema = self.generate_schema(items_type) + list_schema = core_schema.list_schema(item_type_schema) + + json_schema = smart_deepcopy(list_schema) + python_schema = core_schema.is_instance_schema(typing.Sequence, cls_repr='Sequence') + if not typing_objects.is_any(items_type): + from ._validators import sequence_validator + + python_schema = core_schema.chain_schema( + [python_schema, core_schema.no_info_wrap_validator_function(sequence_validator, list_schema)], + ) + + serialization = core_schema.wrap_serializer_function_ser_schema( + serialize_sequence_via_list, schema=item_type_schema, info_arg=True + ) + return core_schema.json_or_python_schema( + json_schema=json_schema, python_schema=python_schema, serialization=serialization + ) + + def _iterable_schema(self, type_: Any) -> core_schema.GeneratorSchema: + """Generate a schema for an `Iterable`.""" + item_type = self._get_first_arg_or_any(type_) + + return core_schema.generator_schema(self.generate_schema(item_type)) + + def _pattern_schema(self, pattern_type: Any) -> core_schema.CoreSchema: + from . import _validators + + metadata = {'pydantic_js_functions': [lambda _1, _2: {'type': 'string', 'format': 'regex'}]} + ser = core_schema.plain_serializer_function_ser_schema( + attrgetter('pattern'), when_used='json', return_schema=core_schema.str_schema() + ) + if pattern_type is typing.Pattern or pattern_type is re.Pattern: + # bare type + return core_schema.no_info_plain_validator_function( + _validators.pattern_either_validator, serialization=ser, metadata=metadata + ) + + param = self._get_args_resolving_forward_refs( + pattern_type, + required=True, + )[0] + if param is str: + return core_schema.no_info_plain_validator_function( + _validators.pattern_str_validator, serialization=ser, metadata=metadata + ) + elif param is bytes: + return core_schema.no_info_plain_validator_function( + _validators.pattern_bytes_validator, serialization=ser, metadata=metadata + ) + else: + raise PydanticSchemaGenerationError(f'Unable to generate pydantic-core schema for {pattern_type!r}.') + + def _hashable_schema(self) -> core_schema.CoreSchema: + return core_schema.custom_error_schema( + schema=core_schema.json_or_python_schema( + json_schema=core_schema.chain_schema( + [core_schema.any_schema(), core_schema.is_instance_schema(collections.abc.Hashable)] + ), + python_schema=core_schema.is_instance_schema(collections.abc.Hashable), + ), + custom_error_type='is_hashable', + custom_error_message='Input should be hashable', + ) + + def _dataclass_schema( + self, dataclass: type[StandardDataclass], origin: type[StandardDataclass] | None + ) -> core_schema.CoreSchema: + """Generate schema for a dataclass.""" + with ( + self.model_type_stack.push(dataclass), + self.defs.get_schema_or_ref(dataclass) as ( + dataclass_ref, + maybe_schema, + ), + ): + if maybe_schema is not None: + return maybe_schema + + schema = dataclass.__dict__.get('__pydantic_core_schema__') + if schema is not None and not isinstance(schema, MockCoreSchema): + if schema['type'] == 'definitions': + schema = self.defs.unpack_definitions(schema) + ref = get_ref(schema) + if ref: + return self.defs.create_definition_reference_schema(schema) + else: + return schema + + typevars_map = get_standard_typevars_map(dataclass) + if origin is not None: + dataclass = origin + + # if (plain) dataclass doesn't have config, we use the parent's config, hence a default of `None` + # (Pydantic dataclasses have an empty dict config by default). + # see https://github.com/pydantic/pydantic/issues/10917 + config = getattr(dataclass, '__pydantic_config__', None) + + from ..dataclasses import is_pydantic_dataclass + + with self._ns_resolver.push(dataclass), self._config_wrapper_stack.push(config): + if is_pydantic_dataclass(dataclass): + if dataclass.__pydantic_fields_complete__(): + # Copy the field info instances to avoid mutating the `FieldInfo` instances + # of the generic dataclass generic origin (e.g. `apply_typevars_map` below). + # Note that we don't apply `deepcopy` on `__pydantic_fields__` because we + # don't want to copy the `FieldInfo` attributes: + fields = { + f_name: copy(field_info) for f_name, field_info in dataclass.__pydantic_fields__.items() + } + if typevars_map: + for field in fields.values(): + field.apply_typevars_map(typevars_map, *self._types_namespace) + else: + try: + fields = rebuild_dataclass_fields( + dataclass, + config_wrapper=self._config_wrapper, + ns_resolver=self._ns_resolver, + typevars_map=typevars_map or {}, + ) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + else: + fields = collect_dataclass_fields( + dataclass, + typevars_map=typevars_map, + config_wrapper=self._config_wrapper, + ) + + if self._config_wrapper.extra == 'allow': + # disallow combination of init=False on a dataclass field and extra='allow' on a dataclass + for field_name, field in fields.items(): + if field.init is False: + raise PydanticUserError( + f'Field {field_name} has `init=False` and dataclass has config setting `extra="allow"`. ' + f'This combination is not allowed.', + code='dataclass-init-false-extra-allow', + ) + + decorators = dataclass.__dict__.get('__pydantic_decorators__') + if decorators is None: + decorators = DecoratorInfos.build(dataclass, replace_wrapped_methods=False) + decorators.update_from_config(self._config_wrapper) + # Move kw_only=False args to the start of the list, as this is how vanilla dataclasses work. + # Note that when kw_only is missing or None, it is treated as equivalent to kw_only=True + args = sorted( + (self._generate_dc_field_schema(k, v, decorators) for k, v in fields.items()), + key=lambda a: a.get('kw_only') is not False, + ) + has_post_init = hasattr(dataclass, '__post_init__') + has_slots = hasattr(dataclass, '__slots__') + + args_schema = core_schema.dataclass_args_schema( + dataclass.__name__, + args, + computed_fields=[ + self._computed_field_schema(d, decorators.field_serializers) + for d in decorators.computed_fields.values() + ], + collect_init_only=has_post_init, + ) + + inner_schema = apply_validators(args_schema, decorators.root_validators.values()) + + model_validators = decorators.model_validators.values() + inner_schema = apply_model_validators(inner_schema, model_validators, 'inner') + + core_config = self._config_wrapper.core_config(title=dataclass.__name__) + + dc_schema = core_schema.dataclass_schema( + dataclass, + inner_schema, + generic_origin=origin, + post_init=has_post_init, + ref=dataclass_ref, + fields=[field.name for field in dataclasses.fields(dataclass)], + slots=has_slots, + config=core_config, + # we don't use a custom __setattr__ for dataclasses, so we must + # pass along the frozen config setting to the pydantic-core schema + frozen=self._config_wrapper_stack.tail.frozen, + ) + schema = self._apply_model_serializers(dc_schema, decorators.model_serializers.values()) + schema = apply_model_validators(schema, model_validators, 'outer') + return self.defs.create_definition_reference_schema(schema) + + def _call_schema(self, function: ValidateCallSupportedTypes) -> core_schema.CallSchema: + """Generate schema for a Callable. + + TODO support functional validators once we support them in Config + """ + arguments_schema = self._arguments_schema(function) + + return_schema: core_schema.CoreSchema | None = None + config_wrapper = self._config_wrapper + if config_wrapper.validate_return: + sig = _typing_extra.signature_no_eval(function) + return_hint = sig.return_annotation + if return_hint is not sig.empty: + globalns, localns = self._types_namespace + type_hints = _typing_extra.get_function_type_hints( + function, globalns=globalns, localns=localns, include_keys={'return'} + ) + return_schema = self.generate_schema(type_hints['return']) + + return core_schema.call_schema( + arguments_schema, + function, + return_schema=return_schema, + ) + + def _arguments_schema( + self, function: ValidateCallSupportedTypes, parameters_callback: ParametersCallback | None = None + ) -> core_schema.ArgumentsSchema: + """Generate schema for a Signature.""" + mode_lookup: dict[_ParameterKind, Literal['positional_only', 'positional_or_keyword', 'keyword_only']] = { + Parameter.POSITIONAL_ONLY: 'positional_only', + Parameter.POSITIONAL_OR_KEYWORD: 'positional_or_keyword', + Parameter.KEYWORD_ONLY: 'keyword_only', + } + + sig = _typing_extra.signature_no_eval(function) + globalns, localns = self._types_namespace + type_hints = _typing_extra.get_function_type_hints(function, globalns=globalns, localns=localns) + + arguments_list: list[core_schema.ArgumentsParameter] = [] + var_args_schema: core_schema.CoreSchema | None = None + var_kwargs_schema: core_schema.CoreSchema | None = None + var_kwargs_mode: core_schema.VarKwargsMode | None = None + + for i, (name, p) in enumerate(sig.parameters.items()): + if p.annotation is sig.empty: + annotation = typing.cast(Any, Any) + else: + annotation = type_hints[name] + + if parameters_callback is not None: + result = parameters_callback(i, name, annotation) + if result == 'skip': + continue + + parameter_mode = mode_lookup.get(p.kind) + if parameter_mode is not None: + arg_schema = self._generate_parameter_schema( + name, annotation, AnnotationSource.FUNCTION, p.default, parameter_mode + ) + arguments_list.append(arg_schema) + elif p.kind == Parameter.VAR_POSITIONAL: + var_args_schema = self.generate_schema(annotation) + else: + assert p.kind == Parameter.VAR_KEYWORD, p.kind + + unpack_type = _typing_extra.unpack_type(annotation) + if unpack_type is not None: + origin = get_origin(unpack_type) or unpack_type + if not is_typeddict(origin): + raise PydanticUserError( + f'Expected a `TypedDict` class inside `Unpack[...]`, got {unpack_type!r}', + code='unpack-typed-dict', + ) + non_pos_only_param_names = { + name for name, p in sig.parameters.items() if p.kind != Parameter.POSITIONAL_ONLY + } + overlapping_params = non_pos_only_param_names.intersection(origin.__annotations__) + if overlapping_params: + raise PydanticUserError( + f'Typed dictionary {origin.__name__!r} overlaps with parameter' + f'{"s" if len(overlapping_params) >= 2 else ""} ' + f'{", ".join(repr(p) for p in sorted(overlapping_params))}', + code='overlapping-unpack-typed-dict', + ) + + var_kwargs_mode = 'unpacked-typed-dict' + var_kwargs_schema = self._typed_dict_schema(unpack_type, get_origin(unpack_type)) + else: + var_kwargs_mode = 'uniform' + var_kwargs_schema = self.generate_schema(annotation) + + return core_schema.arguments_schema( + arguments_list, + var_args_schema=var_args_schema, + var_kwargs_mode=var_kwargs_mode, + var_kwargs_schema=var_kwargs_schema, + validate_by_name=self._config_wrapper.validate_by_name, + ) + + def _arguments_v3_schema( + self, function: ValidateCallSupportedTypes, parameters_callback: ParametersCallback | None = None + ) -> core_schema.ArgumentsV3Schema: + mode_lookup: dict[ + _ParameterKind, Literal['positional_only', 'positional_or_keyword', 'var_args', 'keyword_only'] + ] = { + Parameter.POSITIONAL_ONLY: 'positional_only', + Parameter.POSITIONAL_OR_KEYWORD: 'positional_or_keyword', + Parameter.VAR_POSITIONAL: 'var_args', + Parameter.KEYWORD_ONLY: 'keyword_only', + } + + sig = _typing_extra.signature_no_eval(function) + globalns, localns = self._types_namespace + type_hints = _typing_extra.get_function_type_hints(function, globalns=globalns, localns=localns) + + parameters_list: list[core_schema.ArgumentsV3Parameter] = [] + + for i, (name, p) in enumerate(sig.parameters.items()): + if parameters_callback is not None: + result = parameters_callback(i, name, p.annotation) + if result == 'skip': + continue + + if p.annotation is Parameter.empty: + annotation = typing.cast(Any, Any) + else: + annotation = type_hints[name] + + parameter_mode = mode_lookup.get(p.kind) + if parameter_mode is None: + assert p.kind == Parameter.VAR_KEYWORD, p.kind + + unpack_type = _typing_extra.unpack_type(annotation) + if unpack_type is not None: + origin = get_origin(unpack_type) or unpack_type + if not is_typeddict(origin): + raise PydanticUserError( + f'Expected a `TypedDict` class inside `Unpack[...]`, got {unpack_type!r}', + code='unpack-typed-dict', + ) + non_pos_only_param_names = { + name for name, p in sig.parameters.items() if p.kind != Parameter.POSITIONAL_ONLY + } + overlapping_params = non_pos_only_param_names.intersection(origin.__annotations__) + if overlapping_params: + raise PydanticUserError( + f'Typed dictionary {origin.__name__!r} overlaps with parameter' + f'{"s" if len(overlapping_params) >= 2 else ""} ' + f'{", ".join(repr(p) for p in sorted(overlapping_params))}', + code='overlapping-unpack-typed-dict', + ) + parameter_mode = 'var_kwargs_unpacked_typed_dict' + annotation = unpack_type + else: + parameter_mode = 'var_kwargs_uniform' + + parameters_list.append( + self._generate_parameter_v3_schema( + name, annotation, AnnotationSource.FUNCTION, parameter_mode, default=p.default + ) + ) + + return core_schema.arguments_v3_schema( + parameters_list, + validate_by_name=self._config_wrapper.validate_by_name, + ) + + def _unsubstituted_typevar_schema(self, typevar: typing.TypeVar) -> core_schema.CoreSchema: + try: + has_default = typevar.has_default() # pyright: ignore[reportAttributeAccessIssue] + except AttributeError: + # Happens if using `typing.TypeVar` (and not `typing_extensions`) on Python < 3.13 + pass + else: + if has_default: + return self.generate_schema(typevar.__default__) # pyright: ignore[reportAttributeAccessIssue] + + if constraints := typevar.__constraints__: + return self._union_schema(typing.Union[constraints]) + + if bound := typevar.__bound__: + schema = self.generate_schema(bound) + schema['serialization'] = core_schema.simple_ser_schema('any') + return schema + + return core_schema.any_schema() + + def _computed_field_schema( + self, + d: Decorator[ComputedFieldInfo], + field_serializers: dict[str, Decorator[FieldSerializerDecoratorInfo]], + ) -> core_schema.ComputedField: + if d.info.return_type is not PydanticUndefined: + return_type = d.info.return_type + else: + try: + # Do not pass in globals as the function could be defined in a different module. + # Instead, let `get_callable_return_type` infer the globals to use, but still pass + # in locals that may contain a parent/rebuild namespace: + return_type = _decorators.get_callable_return_type(d.func, localns=self._types_namespace.locals) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + if return_type is PydanticUndefined: + raise PydanticUserError( + 'Computed field is missing return type annotation or specifying `return_type`' + ' to the `@computed_field` decorator (e.g. `@computed_field(return_type=int | str)`)', + code='model-field-missing-annotation', + ) + + return_type = replace_types(return_type, self._typevars_map) + # Create a new ComputedFieldInfo so that different type parametrizations of the same + # generic model's computed field can have different return types. + d.info = dataclasses.replace(d.info, return_type=return_type) + return_type_schema = self.generate_schema(return_type) + # Apply serializers to computed field if there exist + return_type_schema = self._apply_field_serializers( + return_type_schema, + filter_field_decorator_info_by_field(field_serializers.values(), d.cls_var_name), + ) + + pydantic_js_updates, pydantic_js_extra = _extract_json_schema_info_from_field_info(d.info) + core_metadata: dict[str, Any] = {} + update_core_metadata( + core_metadata, + pydantic_js_updates={'readOnly': True, **(pydantic_js_updates if pydantic_js_updates else {})}, + pydantic_js_extra=pydantic_js_extra, + ) + exclude_if = d.info.exclude_if + # TODO: Should we support exclude_if from annotations? + return core_schema.computed_field( + d.cls_var_name, + return_schema=return_type_schema, + alias=d.info.alias, + serialization_exclude_if=exclude_if, + metadata=core_metadata, + ) + + def _annotated_schema(self, annotated_type: Any) -> core_schema.CoreSchema: + """Generate schema for an Annotated type, e.g. `Annotated[int, Field(...)]` or `Annotated[int, Gt(0)]`.""" + FieldInfo = import_cached_field_info() + source_type, *annotations = self._get_args_resolving_forward_refs( + annotated_type, + required=True, + ) + schema = self._apply_annotations(source_type, annotations) + # put the default validator last so that TypeAdapter.get_default_value() works + # even if there are function validators involved + for annotation in annotations: + if isinstance(annotation, FieldInfo): + schema = wrap_default(annotation, schema) + return schema + + def _apply_annotations( + self, + source_type: Any, + annotations: list[Any], + transform_inner_schema: Callable[[CoreSchema], CoreSchema] = lambda x: x, + check_unsupported_field_info_attributes: bool = True, + ) -> CoreSchema: + """Apply arguments from `Annotated` or from `FieldInfo` to a schema. + + This gets called by `GenerateSchema._annotated_schema` but differs from it in that it does + not expect `source_type` to be an `Annotated` object, it expects it to be the first argument of that + (in other words, `GenerateSchema._annotated_schema` just unpacks `Annotated`, this process it). + """ + annotations = list(_known_annotated_metadata.expand_grouped_metadata(annotations)) + + pydantic_js_annotation_functions: list[GetJsonSchemaFunction] = [] + + def inner_handler(obj: Any) -> CoreSchema: + schema = self._generate_schema_from_get_schema_method(obj, source_type) + + if schema is None: + schema = self._generate_schema_inner(obj) + + metadata_js_function = _extract_get_pydantic_json_schema(obj) + if metadata_js_function is not None: + metadata_schema = resolve_original_schema(schema, self.defs) + if metadata_schema is not None: + self._add_js_function(metadata_schema, metadata_js_function) + return transform_inner_schema(schema) + + get_inner_schema = CallbackGetCoreSchemaHandler(inner_handler, self) + + for annotation in annotations: + if annotation is None: + continue + get_inner_schema = self._get_wrapped_inner_schema( + get_inner_schema, + annotation, + pydantic_js_annotation_functions, + check_unsupported_field_info_attributes=check_unsupported_field_info_attributes, + ) + + schema = get_inner_schema(source_type) + if pydantic_js_annotation_functions: + core_metadata = schema.setdefault('metadata', {}) + update_core_metadata(core_metadata, pydantic_js_annotation_functions=pydantic_js_annotation_functions) + return _add_custom_serialization_from_json_encoders(self._config_wrapper.json_encoders, source_type, schema) + + def _apply_single_annotation( + self, + schema: core_schema.CoreSchema, + metadata: Any, + check_unsupported_field_info_attributes: bool = True, + ) -> core_schema.CoreSchema: + FieldInfo = import_cached_field_info() + + if isinstance(metadata, FieldInfo): + if ( + check_unsupported_field_info_attributes + # HACK: we don't want to emit the warning for `FieldInfo` subclasses, because FastAPI does weird manipulations + # with its subclasses and their annotations: + and type(metadata) is FieldInfo + ): + for attr, value in (unsupported_attributes := self._get_unsupported_field_info_attributes(metadata)): + warnings.warn( + f'The {attr!r} attribute with value {value!r} was provided to the `Field()` function, ' + f'which has no effect in the context it was used. {attr!r} is field-specific metadata, ' + 'and can only be attached to a model field using `Annotated` metadata or by assignment. ' + 'This may have happened because an `Annotated` type alias using the `type` statement was ' + 'used, or if the `Field()` function was attached to a single member of a union type.', + category=UnsupportedFieldAttributeWarning, + ) + + if ( + metadata.default_factory_takes_validated_data + and self.model_type_stack.get() is None + and 'defaut_factory' not in unsupported_attributes + ): + warnings.warn( + "A 'default_factory' taking validated data as an argument was provided to the `Field()` function, " + 'but no validated data is available in the context it was used.', + category=UnsupportedFieldAttributeWarning, + ) + + for field_metadata in metadata.metadata: + schema = self._apply_single_annotation(schema, field_metadata) + + if metadata.discriminator is not None: + schema = self._apply_discriminator_to_union(schema, metadata.discriminator) + return schema + + if schema['type'] == 'nullable': + # for nullable schemas, metadata is automatically applied to the inner schema + inner = schema.get('schema', core_schema.any_schema()) + inner = self._apply_single_annotation(inner, metadata) + if inner: + schema['schema'] = inner + return schema + + if schema['type'] == 'union' and any( + choice['type'] == 'missing-sentinel' for choice in core_schema.iter_union_choices(schema) + ): + # Same behavior as for nullable schemas. This is a bit gross, but we have to support the same pattern + filtered_choices = [ + choice + for choice in schema['choices'] + if (choice[0] if isinstance(choice, tuple) else choice)['type'] != 'missing-sentinel' + ] + if len(filtered_choices) >= 2: + # e.g. `Annotated[int | str | MISSING, Constraint(...)]`. We apply `Constraint(...)` to `int | str`, + # and create a new union semantically equivalent to `Annotated[int | str, Constraint(...)] | MISSING`: + filtered_union = core_schema.union_schema(filtered_choices) + filtered_union = self._apply_single_annotation(filtered_union, metadata) + new_union = schema.copy() + new_union['choices'] = [ + filtered_union, + next( + choice + for choice in schema['choices'] + if (choice[0] if isinstance(choice, tuple) else choice)['type'] == 'missing-sentinel' + ), + ] + return new_union + elif len(filtered_choices) == 1: + # e.g. `Annotated[int | MISSING, Constraint(...)]`. We apply `Constraint(...)` to `int`, and reconstruct + # a new union preserving the order. + inner = filtered_choices[0][0] if isinstance(filtered_choices[0], tuple) else filtered_choices[0] + inner = self._apply_single_annotation(inner, metadata) + + # Create a new union schema, preserving the order of the union: + new_union = schema.copy() + new_union['choices'] = [ + (inner, choice[1]) + if isinstance(choice, tuple) and choice[0]['type'] != 'missing-sentinel' + else inner + if not isinstance(choice, tuple) and choice['type'] != 'missing-sentinel' + else choice + for choice in schema['choices'] + ] + return new_union + + original_schema = schema + ref = schema.get('ref') + if ref is not None: + schema = schema.copy() + new_ref = ref + f'_{repr(metadata)}' + if (existing := self.defs.get_schema_from_ref(new_ref)) is not None: + return existing + schema['ref'] = new_ref # pyright: ignore[reportGeneralTypeIssues] + elif schema['type'] == 'definition-ref': + ref = schema['schema_ref'] + if (referenced_schema := self.defs.get_schema_from_ref(ref)) is not None: + schema = referenced_schema.copy() + new_ref = ref + f'_{repr(metadata)}' + if (existing := self.defs.get_schema_from_ref(new_ref)) is not None: + return existing + schema['ref'] = new_ref # pyright: ignore[reportGeneralTypeIssues] + + maybe_updated_schema = _known_annotated_metadata.apply_known_metadata(metadata, schema) + + if maybe_updated_schema is not None: + return maybe_updated_schema + return original_schema + + def _apply_single_annotation_json_schema( + self, schema: core_schema.CoreSchema, metadata: Any + ) -> core_schema.CoreSchema: + FieldInfo = import_cached_field_info() + + if isinstance(metadata, FieldInfo): + for field_metadata in metadata.metadata: + schema = self._apply_single_annotation_json_schema(schema, field_metadata) + + pydantic_js_updates, pydantic_js_extra = _extract_json_schema_info_from_field_info(metadata) + core_metadata = schema.setdefault('metadata', {}) + update_core_metadata( + core_metadata, pydantic_js_updates=pydantic_js_updates, pydantic_js_extra=pydantic_js_extra + ) + return schema + + def _get_unsupported_field_info_attributes(self, field_info: FieldInfo) -> list[tuple[str, Any]]: + """Get the list of unsupported `FieldInfo` attributes when not directly used in `Annotated` for field annotations.""" + unused_metadata: list[tuple[str, Any]] = [] + for unused_metadata_name, unset_value in UNSUPPORTED_STANDALONE_FIELDINFO_ATTRIBUTES: + if ( + (unused_metadata_value := getattr(field_info, unused_metadata_name)) is not unset_value + # `default` and `default_factory` can still be used with a type adapter, so only include them + # if used with a model-like class: + and ( + unused_metadata_name not in ('default', 'default_factory') + or self.model_type_stack.get() is not None + ) + # Setting `alias` will set `validation/serialization_alias` as well, so we want to avoid duplicate warnings: + and ( + unused_metadata_name not in ('validation_alias', 'serialization_alias') + or 'alias' not in field_info._attributes_set + ) + ): + unused_metadata.append((unused_metadata_name, unused_metadata_value)) + + return unused_metadata + + def _get_wrapped_inner_schema( + self, + get_inner_schema: GetCoreSchemaHandler, + annotation: Any, + pydantic_js_annotation_functions: list[GetJsonSchemaFunction], + check_unsupported_field_info_attributes: bool = False, + ) -> CallbackGetCoreSchemaHandler: + annotation_get_schema: GetCoreSchemaFunction | None = getattr(annotation, '__get_pydantic_core_schema__', None) + + def new_handler(source: Any) -> core_schema.CoreSchema: + if annotation_get_schema is not None: + schema = annotation_get_schema(source, get_inner_schema) + else: + schema = get_inner_schema(source) + schema = self._apply_single_annotation( + schema, + annotation, + check_unsupported_field_info_attributes=check_unsupported_field_info_attributes, + ) + schema = self._apply_single_annotation_json_schema(schema, annotation) + + metadata_js_function = _extract_get_pydantic_json_schema(annotation) + if metadata_js_function is not None: + pydantic_js_annotation_functions.append(metadata_js_function) + return schema + + return CallbackGetCoreSchemaHandler(new_handler, self) + + def _apply_field_serializers( + self, + schema: core_schema.CoreSchema, + serializers: list[Decorator[FieldSerializerDecoratorInfo]], + ) -> core_schema.CoreSchema: + """Apply field serializers to a schema.""" + if serializers: + schema = copy(schema) + if schema['type'] == 'definitions': + inner_schema = schema['schema'] + schema['schema'] = self._apply_field_serializers(inner_schema, serializers) + return schema + elif 'ref' in schema: + schema = self.defs.create_definition_reference_schema(schema) + + # use the last serializer to make it easy to override a serializer set on a parent model + serializer = serializers[-1] + is_field_serializer, info_arg = inspect_field_serializer(serializer.func, serializer.info.mode) + + if serializer.info.return_type is not PydanticUndefined: + return_type = serializer.info.return_type + else: + try: + # Do not pass in globals as the function could be defined in a different module. + # Instead, let `get_callable_return_type` infer the globals to use, but still pass + # in locals that may contain a parent/rebuild namespace: + return_type = _decorators.get_callable_return_type( + serializer.func, localns=self._types_namespace.locals + ) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + + if return_type is PydanticUndefined: + return_schema = None + else: + return_schema = self.generate_schema(return_type) + + if serializer.info.mode == 'wrap': + schema['serialization'] = core_schema.wrap_serializer_function_ser_schema( + serializer.func, + is_field_serializer=is_field_serializer, + info_arg=info_arg, + return_schema=return_schema, + when_used=serializer.info.when_used, + ) + else: + assert serializer.info.mode == 'plain' + schema['serialization'] = core_schema.plain_serializer_function_ser_schema( + serializer.func, + is_field_serializer=is_field_serializer, + info_arg=info_arg, + return_schema=return_schema, + when_used=serializer.info.when_used, + ) + return schema + + def _apply_model_serializers( + self, schema: core_schema.CoreSchema, serializers: Iterable[Decorator[ModelSerializerDecoratorInfo]] + ) -> core_schema.CoreSchema: + """Apply model serializers to a schema.""" + ref: str | None = schema.pop('ref', None) # type: ignore + if serializers: + serializer = list(serializers)[-1] + info_arg = inspect_model_serializer(serializer.func, serializer.info.mode) + + if serializer.info.return_type is not PydanticUndefined: + return_type = serializer.info.return_type + else: + try: + # Do not pass in globals as the function could be defined in a different module. + # Instead, let `get_callable_return_type` infer the globals to use, but still pass + # in locals that may contain a parent/rebuild namespace: + return_type = _decorators.get_callable_return_type( + serializer.func, localns=self._types_namespace.locals + ) + except NameError as e: + raise PydanticUndefinedAnnotation.from_name_error(e) from e + + if return_type is PydanticUndefined: + return_schema = None + else: + return_schema = self.generate_schema(return_type) + + if serializer.info.mode == 'wrap': + ser_schema: core_schema.SerSchema = core_schema.wrap_serializer_function_ser_schema( + serializer.func, + info_arg=info_arg, + return_schema=return_schema, + when_used=serializer.info.when_used, + ) + else: + # plain + ser_schema = core_schema.plain_serializer_function_ser_schema( + serializer.func, + info_arg=info_arg, + return_schema=return_schema, + when_used=serializer.info.when_used, + ) + schema['serialization'] = ser_schema + if ref: + schema['ref'] = ref # type: ignore + return schema + + +_VALIDATOR_F_MATCH: Mapping[ + tuple[FieldValidatorModes, Literal['no-info', 'with-info']], + Callable[[Callable[..., Any], core_schema.CoreSchema], core_schema.CoreSchema], +] = { + ('before', 'no-info'): lambda f, schema: core_schema.no_info_before_validator_function(f, schema), + ('after', 'no-info'): lambda f, schema: core_schema.no_info_after_validator_function(f, schema), + ('plain', 'no-info'): lambda f, _: core_schema.no_info_plain_validator_function(f), + ('wrap', 'no-info'): lambda f, schema: core_schema.no_info_wrap_validator_function(f, schema), + ('before', 'with-info'): lambda f, schema: core_schema.with_info_before_validator_function(f, schema), + ('after', 'with-info'): lambda f, schema: core_schema.with_info_after_validator_function(f, schema), + ('plain', 'with-info'): lambda f, _: core_schema.with_info_plain_validator_function(f), + ('wrap', 'with-info'): lambda f, schema: core_schema.with_info_wrap_validator_function(f, schema), +} + + +# TODO V3: this function is only used for deprecated decorators. It should +# be removed once we drop support for those. +def apply_validators( + schema: core_schema.CoreSchema, + validators: Iterable[Decorator[RootValidatorDecoratorInfo]] + | Iterable[Decorator[ValidatorDecoratorInfo]] + | Iterable[Decorator[FieldValidatorDecoratorInfo]], +) -> core_schema.CoreSchema: + """Apply validators to a schema. + + Args: + schema: The schema to apply validators on. + validators: An iterable of validators. + field_name: The name of the field if validators are being applied to a model field. + + Returns: + The updated schema. + """ + for validator in validators: + # Actually, type could be 'field' or 'model', but this is only used for deprecated + # decorators, so let's not worry about it. + info_arg = inspect_validator(validator.func, mode=validator.info.mode, type='field') + val_type = 'with-info' if info_arg else 'no-info' + + schema = _VALIDATOR_F_MATCH[(validator.info.mode, val_type)](validator.func, schema) + return schema + + +def _validators_require_validate_default(validators: Iterable[Decorator[ValidatorDecoratorInfo]]) -> bool: + """In v1, if any of the validators for a field had `always=True`, the default value would be validated. + + This serves as an auxiliary function for re-implementing that logic, by looping over a provided + collection of (v1-style) ValidatorDecoratorInfo's and checking if any of them have `always=True`. + + We should be able to drop this function and the associated logic calling it once we drop support + for v1-style validator decorators. (Or we can extend it and keep it if we add something equivalent + to the v1-validator `always` kwarg to `field_validator`.) + """ + for validator in validators: + if validator.info.always: + return True + return False + + +def _convert_to_aliases( + alias: str | AliasChoices | AliasPath | None, +) -> str | list[str | int] | list[list[str | int]] | None: + if isinstance(alias, (AliasChoices, AliasPath)): + return alias.convert_to_aliases() + else: + return alias + + +def apply_model_validators( + schema: core_schema.CoreSchema, + validators: Iterable[Decorator[ModelValidatorDecoratorInfo]], + mode: Literal['inner', 'outer', 'all'], +) -> core_schema.CoreSchema: + """Apply model validators to a schema. + + If mode == 'inner', only "before" validators are applied + If mode == 'outer', validators other than "before" are applied + If mode == 'all', all validators are applied + + Args: + schema: The schema to apply validators on. + validators: An iterable of validators. + mode: The validator mode. + + Returns: + The updated schema. + """ + ref: str | None = schema.pop('ref', None) # type: ignore + for validator in validators: + if mode == 'inner' and validator.info.mode != 'before': + continue + if mode == 'outer' and validator.info.mode == 'before': + continue + info_arg = inspect_validator(validator.func, mode=validator.info.mode, type='model') + if validator.info.mode == 'wrap': + if info_arg: + schema = core_schema.with_info_wrap_validator_function(function=validator.func, schema=schema) + else: + schema = core_schema.no_info_wrap_validator_function(function=validator.func, schema=schema) + elif validator.info.mode == 'before': + if info_arg: + schema = core_schema.with_info_before_validator_function(function=validator.func, schema=schema) + else: + schema = core_schema.no_info_before_validator_function(function=validator.func, schema=schema) + else: + assert validator.info.mode == 'after' + if info_arg: + schema = core_schema.with_info_after_validator_function(function=validator.func, schema=schema) + else: + schema = core_schema.no_info_after_validator_function(function=validator.func, schema=schema) + if ref: + schema['ref'] = ref # type: ignore + return schema + + +def wrap_default(field_info: FieldInfo, schema: core_schema.CoreSchema) -> core_schema.CoreSchema: + """Wrap schema with default schema if default value or `default_factory` are available. + + Args: + field_info: The field info object. + schema: The schema to apply default on. + + Returns: + Updated schema by default value or `default_factory`. + """ + if field_info.default_factory: + return core_schema.with_default_schema( + schema, + default_factory=field_info.default_factory, + default_factory_takes_data=takes_validated_data_argument(field_info.default_factory), + validate_default=field_info.validate_default, + ) + elif field_info.default is not PydanticUndefined: + return core_schema.with_default_schema( + schema, default=field_info.default, validate_default=field_info.validate_default + ) + else: + return schema + + +def _extract_get_pydantic_json_schema(tp: Any) -> GetJsonSchemaFunction | None: + """Extract `__get_pydantic_json_schema__` from a type, handling the deprecated `__modify_schema__`.""" + js_modify_function = getattr(tp, '__get_pydantic_json_schema__', None) + + if hasattr(tp, '__modify_schema__'): + BaseModel = import_cached_base_model() + + has_custom_v2_modify_js_func = ( + js_modify_function is not None + and BaseModel.__get_pydantic_json_schema__.__func__ # type: ignore + not in (js_modify_function, getattr(js_modify_function, '__func__', None)) + ) + + if not has_custom_v2_modify_js_func: + cls_name = getattr(tp, '__name__', None) + raise PydanticUserError( + f'The `__modify_schema__` method is not supported in Pydantic v2. ' + f'Use `__get_pydantic_json_schema__` instead{f" in class `{cls_name}`" if cls_name else ""}.', + code='custom-json-schema', + ) + + if (origin := get_origin(tp)) is not None: + # Generic aliases proxy attribute access to the origin, *except* dunder attributes, + # such as `__get_pydantic_json_schema__`, hence the explicit check. + return _extract_get_pydantic_json_schema(origin) + + if js_modify_function is None: + return None + + return js_modify_function + + +def resolve_original_schema(schema: CoreSchema, definitions: _Definitions) -> CoreSchema | None: + if schema['type'] == 'definition-ref': + return definitions.get_schema_from_ref(schema['schema_ref']) + elif schema['type'] == 'definitions': + return schema['schema'] + else: + return schema + + +def _inlining_behavior( + def_ref: core_schema.DefinitionReferenceSchema, +) -> Literal['inline', 'keep', 'preserve_metadata']: + """Determine the inlining behavior of the `'definition-ref'` schema. + + - If no `'serialization'` schema and no metadata is attached, the schema can safely be inlined. + - If it has metadata but only related to the deferred discriminator application, it can be inlined + provided that such metadata is kept. + - Otherwise, the schema should not be inlined. Doing so would remove the `'serialization'` schema or metadata. + """ + if 'serialization' in def_ref: + return 'keep' + metadata = def_ref.get('metadata') + if not metadata: + return 'inline' + if len(metadata) == 1 and 'pydantic_internal_union_discriminator' in metadata: + return 'preserve_metadata' + return 'keep' + + +class _Definitions: + """Keeps track of references and definitions.""" + + _recursively_seen: set[str] + """A set of recursively seen references. + + When a referenceable type is encountered, the `get_schema_or_ref` context manager is + entered to compute the reference. If the type references itself by some way (e.g. for + a dataclass a Pydantic model, the class can be referenced as a field annotation), + entering the context manager again will yield a `'definition-ref'` schema that should + short-circuit the normal generation process, as the reference was already in this set. + """ + + _definitions: dict[str, core_schema.CoreSchema] + """A mapping of references to their corresponding schema. + + When a schema for a referenceable type is generated, it is stored in this mapping. If the + same type is encountered again, the reference is yielded by the `get_schema_or_ref` context + manager. + """ + + def __init__(self) -> None: + self._recursively_seen = set() + self._definitions = {} + + @contextmanager + def get_schema_or_ref(self, tp: Any, /) -> Generator[tuple[str, core_schema.DefinitionReferenceSchema | None]]: + """Get a definition for `tp` if one exists. + + If a definition exists, a tuple of `(ref_string, CoreSchema)` is returned. + If no definition exists yet, a tuple of `(ref_string, None)` is returned. + + Note that the returned `CoreSchema` will always be a `DefinitionReferenceSchema`, + not the actual definition itself. + + This should be called for any type that can be identified by reference. + This includes any recursive types. + + At present the following types can be named/recursive: + + - Pydantic model + - Pydantic and stdlib dataclasses + - Typed dictionaries + - Named tuples + - `TypeAliasType` instances + - Enums + """ + ref = get_type_ref(tp) + # return the reference if we're either (1) in a cycle or (2) it the reference was already encountered: + if ref in self._recursively_seen or ref in self._definitions: + yield (ref, core_schema.definition_reference_schema(ref)) + else: + self._recursively_seen.add(ref) + try: + yield (ref, None) + finally: + self._recursively_seen.discard(ref) + + def get_schema_from_ref(self, ref: str) -> CoreSchema | None: + """Resolve the schema from the given reference.""" + return self._definitions.get(ref) + + def create_definition_reference_schema(self, schema: CoreSchema) -> core_schema.DefinitionReferenceSchema: + """Store the schema as a definition and return a `'definition-reference'` schema pointing to it. + + The schema must have a reference attached to it. + """ + ref = schema['ref'] # pyright: ignore + self._definitions[ref] = schema + return core_schema.definition_reference_schema(ref) + + def unpack_definitions(self, schema: core_schema.DefinitionsSchema) -> CoreSchema: + """Store the definitions of the `'definitions'` core schema and return the inner core schema.""" + for def_schema in schema['definitions']: + self._definitions[def_schema['ref']] = def_schema # pyright: ignore + return schema['schema'] + + def finalize_schema(self, schema: CoreSchema) -> CoreSchema: + """Finalize the core schema. + + This traverses the core schema and referenced definitions, replaces `'definition-ref'` schemas + by the referenced definition if possible, and applies deferred discriminators. + """ + definitions = self._definitions + try: + gather_result = gather_schemas_for_cleaning( + schema, + definitions=definitions, + ) + except MissingDefinitionError as e: + raise InvalidSchemaError from e + + remaining_defs: dict[str, CoreSchema] = {} + + # Note: this logic doesn't play well when core schemas with deferred discriminator metadata + # and references are encountered. See the `test_deferred_discriminated_union_and_references()` test. + for ref, inlinable_def_ref in gather_result['collected_references'].items(): + if inlinable_def_ref is not None and (inlining_behavior := _inlining_behavior(inlinable_def_ref)) != 'keep': + if inlining_behavior == 'inline': + # `ref` was encountered, and only once: + # - `inlinable_def_ref` is a `'definition-ref'` schema and is guaranteed to be + # the only one. Transform it into the definition it points to. + # - Do not store the definition in the `remaining_defs`. + inlinable_def_ref.clear() # pyright: ignore[reportAttributeAccessIssue] + inlinable_def_ref.update(self._resolve_definition(ref, definitions)) # pyright: ignore + elif inlining_behavior == 'preserve_metadata': + # `ref` was encountered, and only once, but contains discriminator metadata. + # We will do the same thing as if `inlining_behavior` was `'inline'`, but make + # sure to keep the metadata for the deferred discriminator application logic below. + meta = inlinable_def_ref.pop('metadata') + inlinable_def_ref.clear() # pyright: ignore[reportAttributeAccessIssue] + inlinable_def_ref.update(self._resolve_definition(ref, definitions)) # pyright: ignore + inlinable_def_ref['metadata'] = meta + else: + # `ref` was encountered, at least two times (or only once, but with metadata or a serialization schema): + # - Do not inline the `'definition-ref'` schemas (they are not provided in the gather result anyway). + # - Store the definition in the `remaining_defs` + remaining_defs[ref] = self._resolve_definition(ref, definitions) + + for cs in gather_result['deferred_discriminator_schemas']: + discriminator: str | None = cs['metadata'].pop('pydantic_internal_union_discriminator', None) # pyright: ignore[reportTypedDictNotRequiredAccess] + if discriminator is None: + # This can happen in rare scenarios, when a deferred schema is present multiple times in the + # gather result (e.g. when using the `Sequence` type -- see `test_sequence_discriminated_union()`). + # In this case, a previous loop iteration applied the discriminator and so we can just skip it here. + continue + applied = _discriminated_union.apply_discriminator(cs.copy(), discriminator, remaining_defs) + # Mutate the schema directly to have the discriminator applied + cs.clear() # pyright: ignore[reportAttributeAccessIssue] + cs.update(applied) # pyright: ignore + + if remaining_defs: + schema = core_schema.definitions_schema(schema=schema, definitions=[*remaining_defs.values()]) + return schema + + def _resolve_definition(self, ref: str, definitions: dict[str, CoreSchema]) -> CoreSchema: + definition = definitions[ref] + if definition['type'] != 'definition-ref': + return definition + + # Some `'definition-ref'` schemas might act as "intermediate" references (e.g. when using + # a PEP 695 type alias (which is referenceable) that references another PEP 695 type alias): + visited: set[str] = set() + while definition['type'] == 'definition-ref' and _inlining_behavior(definition) == 'inline': + schema_ref = definition['schema_ref'] + if schema_ref in visited: + raise PydanticUserError( + f'{ref} contains a circular reference to itself.', code='circular-reference-schema' + ) + visited.add(schema_ref) + definition = definitions[schema_ref] + return {**definition, 'ref': ref} # pyright: ignore[reportReturnType] + + +class _FieldNameStack: + __slots__ = ('_stack',) + + def __init__(self) -> None: + self._stack: list[str] = [] + + @contextmanager + def push(self, field_name: str) -> Iterator[None]: + self._stack.append(field_name) + yield + self._stack.pop() + + def get(self) -> str | None: + if self._stack: + return self._stack[-1] + else: + return None + + +class _ModelTypeStack: + __slots__ = ('_stack',) + + def __init__(self) -> None: + self._stack: list[type] = [] + + @contextmanager + def push(self, type_obj: type) -> Iterator[None]: + self._stack.append(type_obj) + yield + self._stack.pop() + + def get(self) -> type | None: + if self._stack: + return self._stack[-1] + else: + return None diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py new file mode 100644 index 0000000000000000000000000000000000000000..f3e927cb0697f162039ebf6371e1b4903d8417a5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_generics.py @@ -0,0 +1,530 @@ +from __future__ import annotations + +import operator +import sys +import types +import typing +from collections import ChainMap +from collections.abc import Iterator, Mapping +from contextlib import contextmanager +from contextvars import ContextVar +from functools import reduce +from itertools import zip_longest +from types import prepare_class +from typing import TYPE_CHECKING, Annotated, Any, TypedDict, TypeVar, cast +from weakref import WeakValueDictionary + +import typing_extensions +from typing_inspection import typing_objects +from typing_inspection.introspection import is_union_origin + +from . import _typing_extra +from ._core_utils import get_type_ref +from ._forward_ref import PydanticRecursiveRef +from ._utils import all_identical, is_model_class + +if TYPE_CHECKING: + from ..main import BaseModel + +GenericTypesCacheKey = tuple[Any, Any, tuple[Any, ...]] + +# Note: We want to remove LimitedDict, but to do this, we'd need to improve the handling of generics caching. +# Right now, to handle recursive generics, we some types must remain cached for brief periods without references. +# By chaining the WeakValuesDict with a LimitedDict, we have a way to retain caching for all types with references, +# while also retaining a limited number of types even without references. This is generally enough to build +# specific recursive generic models without losing required items out of the cache. + +KT = TypeVar('KT') +VT = TypeVar('VT') +_LIMITED_DICT_SIZE = 100 + + +class LimitedDict(dict[KT, VT]): + def __init__(self, size_limit: int = _LIMITED_DICT_SIZE) -> None: + self.size_limit = size_limit + super().__init__() + + def __setitem__(self, key: KT, value: VT, /) -> None: + super().__setitem__(key, value) + if len(self) > self.size_limit: + excess = len(self) - self.size_limit + self.size_limit // 10 + to_remove = list(self.keys())[:excess] + for k in to_remove: + del self[k] + + +# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected +# once they are no longer referenced by the caller. +GenericTypesCache = WeakValueDictionary[GenericTypesCacheKey, 'type[BaseModel]'] + +if TYPE_CHECKING: + + class DeepChainMap(ChainMap[KT, VT]): # type: ignore + ... + +else: + + class DeepChainMap(ChainMap): + """Variant of ChainMap that allows direct updates to inner scopes. + + Taken from https://docs.python.org/3/library/collections.html#collections.ChainMap, + with some light modifications for this use case. + """ + + def clear(self) -> None: + for mapping in self.maps: + mapping.clear() + + def __setitem__(self, key: KT, value: VT) -> None: + for mapping in self.maps: + mapping[key] = value + + def __delitem__(self, key: KT) -> None: + hit = False + for mapping in self.maps: + if key in mapping: + del mapping[key] + hit = True + if not hit: + raise KeyError(key) + + +# Despite the fact that LimitedDict _seems_ no longer necessary, I'm very nervous to actually remove it +# and discover later on that we need to re-add all this infrastructure... +# _GENERIC_TYPES_CACHE = DeepChainMap(GenericTypesCache(), LimitedDict()) + +_GENERIC_TYPES_CACHE = GenericTypesCache() + + +class PydanticGenericMetadata(TypedDict): + origin: type[BaseModel] | None # analogous to typing._GenericAlias.__origin__ + args: tuple[Any, ...] # analogous to typing._GenericAlias.__args__ + parameters: tuple[TypeVar, ...] # analogous to typing.Generic.__parameters__ + + +def create_generic_submodel( + model_name: str, origin: type[BaseModel], args: tuple[Any, ...], params: tuple[Any, ...] +) -> type[BaseModel]: + """Dynamically create a submodel of a provided (generic) BaseModel. + + This is used when producing concrete parametrizations of generic models. This function + only *creates* the new subclass; the schema/validators/serialization must be updated to + reflect a concrete parametrization elsewhere. + + Args: + model_name: The name of the newly created model. + origin: The base class for the new model to inherit from. + args: A tuple of generic metadata arguments. + params: A tuple of generic metadata parameters. + + Returns: + The created submodel. + """ + namespace: dict[str, Any] = {'__module__': origin.__module__} + bases = (origin,) + meta, ns, kwds = prepare_class(model_name, bases) + namespace.update(ns) + created_model = meta( + model_name, + bases, + namespace, + __pydantic_generic_metadata__={ + 'origin': origin, + 'args': args, + 'parameters': params, + }, + __pydantic_reset_parent_namespace__=False, + **kwds, + ) + + model_module, called_globally = _get_caller_frame_info(depth=3) + if called_globally: # create global reference and therefore allow pickling + object_by_reference = None + reference_name = model_name + reference_module_globals = sys.modules[created_model.__module__].__dict__ + while object_by_reference is not created_model: + object_by_reference = reference_module_globals.setdefault(reference_name, created_model) + reference_name += '_' + + return created_model + + +def _get_caller_frame_info(depth: int = 2) -> tuple[str | None, bool]: + """Used inside a function to check whether it was called globally. + + Args: + depth: The depth to get the frame. + + Returns: + A tuple contains `module_name` and `called_globally`. + + Raises: + RuntimeError: If the function is not called inside a function. + """ + try: + previous_caller_frame = sys._getframe(depth) + except ValueError as e: + raise RuntimeError('This function must be used inside another function') from e + except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it + return None, False + frame_globals = previous_caller_frame.f_globals + return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals + + +DictValues: type[Any] = {}.values().__class__ + + +def iter_contained_typevars(v: Any) -> Iterator[TypeVar]: + """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found. + + This is inspired as an alternative to directly accessing the `__parameters__` attribute of a GenericAlias, + since __parameters__ of (nested) generic BaseModel subclasses won't show up in that list. + """ + if isinstance(v, TypeVar): + yield v + elif is_model_class(v): + yield from v.__pydantic_generic_metadata__['parameters'] + elif isinstance(v, (DictValues, list)): + for var in v: + yield from iter_contained_typevars(var) + else: + args = get_args(v) + for arg in args: + yield from iter_contained_typevars(arg) + + +def get_args(v: Any) -> Any: + pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None) + if pydantic_generic_metadata: + return pydantic_generic_metadata.get('args') + return typing_extensions.get_args(v) + + +def get_origin(v: Any) -> Any: + pydantic_generic_metadata: PydanticGenericMetadata | None = getattr(v, '__pydantic_generic_metadata__', None) + if pydantic_generic_metadata: + return pydantic_generic_metadata.get('origin') + return typing_extensions.get_origin(v) + + +def get_standard_typevars_map(cls: Any) -> dict[TypeVar, Any] | None: + """Package a generic type's typevars and parametrization (if present) into a dictionary compatible with the + `replace_types` function. Specifically, this works with standard typing generics and typing._GenericAlias. + """ + origin = get_origin(cls) + if origin is None: + return None + if not hasattr(origin, '__parameters__'): + return None + + # In this case, we know that cls is a _GenericAlias, and origin is the generic type + # So it is safe to access cls.__args__ and origin.__parameters__ + args: tuple[Any, ...] = cls.__args__ # type: ignore + parameters: tuple[TypeVar, ...] = origin.__parameters__ + return dict(zip(parameters, args)) + + +def get_model_typevars_map(cls: type[BaseModel]) -> dict[TypeVar, Any]: + """Package a generic BaseModel's typevars and concrete parametrization (if present) into a dictionary compatible + with the `replace_types` function. + + Since BaseModel.__class_getitem__ does not produce a typing._GenericAlias, and the BaseModel generic info is + stored in the __pydantic_generic_metadata__ attribute, we need special handling here. + """ + # TODO: This could be unified with `get_standard_typevars_map` if we stored the generic metadata + # in the __origin__, __args__, and __parameters__ attributes of the model. + generic_metadata = cls.__pydantic_generic_metadata__ + origin = generic_metadata['origin'] + args = generic_metadata['args'] + if not args: + # No need to go into `iter_contained_typevars`: + return {} + return dict(zip(iter_contained_typevars(origin), args)) + + +def replace_types(type_: Any, type_map: Mapping[TypeVar, Any] | None) -> Any: + """Return type with all occurrences of `type_map` keys recursively replaced with their values. + + Args: + type_: The class or generic alias. + type_map: Mapping from `TypeVar` instance to concrete types. + + Returns: + A new type representing the basic structure of `type_` with all + `typevar_map` keys recursively replaced. + + Example: + ```python + from typing import Union + + from pydantic._internal._generics import replace_types + + replace_types(tuple[str, Union[list[str], float]], {str: int}) + #> tuple[int, Union[list[int], float]] + ``` + """ + if not type_map: + return type_ + + type_args = get_args(type_) + origin_type = get_origin(type_) + + if typing_objects.is_annotated(origin_type): + annotated_type, *annotations = type_args + annotated_type = replace_types(annotated_type, type_map) + # TODO remove parentheses when we drop support for Python 3.10: + return Annotated[(annotated_type, *annotations)] + + # Having type args is a good indicator that this is a typing special form + # instance or a generic alias of some sort. + if type_args: + resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) + if all_identical(type_args, resolved_type_args): + # If all arguments are the same, there is no need to modify the + # type or create a new object at all + return type_ + + if ( + origin_type is not None + and isinstance(type_, _typing_extra.typing_base) + and not isinstance(origin_type, _typing_extra.typing_base) + and getattr(type_, '_name', None) is not None + ): + # In python < 3.9 generic aliases don't exist so any of these like `list`, + # `type` or `collections.abc.Callable` need to be translated. + # See: https://www.python.org/dev/peps/pep-0585 + origin_type = getattr(typing, type_._name) + assert origin_type is not None + + if is_union_origin(origin_type): + if any(typing_objects.is_any(arg) for arg in resolved_type_args): + # `Any | T` ~ `Any`: + resolved_type_args = (Any,) + # `Never | T` ~ `T`: + resolved_type_args = tuple( + arg + for arg in resolved_type_args + if not (typing_objects.is_noreturn(arg) or typing_objects.is_never(arg)) + ) + + # PEP-604 syntax (e.g. `list | str`) is represented with a types.UnionType object that does not + # implement `__getitem__()`. In Python 3.14+, `typing.Union` and `types.UnionType` are the same, + # and we instead rely on `typing.Union` as it implicitly converts string annotations to `ForwardRef` + # instances (this is to avoid type errors as per https://github.com/python/cpython/pull/105366). + # TODO remove type ignore comment when we drop support for Python 3.9 (https://github.com/microsoft/pyright/issues/11241): + if (3, 10) <= sys.version_info < (3, 14) and origin_type is types.UnionType: # pyright: ignore[reportAttributeAccessIssue] + return reduce(operator.or_, resolved_type_args) + # NotRequired[T] and Required[T] don't support tuple type resolved_type_args, hence the condition below + return origin_type[resolved_type_args[0] if len(resolved_type_args) == 1 else resolved_type_args] + + # We handle pydantic generic models separately as they don't have the same + # semantics as "typing" classes or generic aliases + + if not origin_type and is_model_class(type_): + parameters = type_.__pydantic_generic_metadata__['parameters'] + if not parameters: + return type_ + resolved_type_args = tuple(replace_types(t, type_map) for t in parameters) + if all_identical(parameters, resolved_type_args): + return type_ + return type_[resolved_type_args] + + # Handle special case for typehints that can have lists as arguments. + # `typing.Callable[[int, str], int]` is an example for this. + if isinstance(type_, list): + resolved_list = [replace_types(element, type_map) for element in type_] + if all_identical(type_, resolved_list): + return type_ + return resolved_list + + # If all else fails, we try to resolve the type directly and otherwise just + # return the input with no modifications. + return type_map.get(type_, type_) + + +def map_generic_model_arguments(cls: type[BaseModel], args: tuple[Any, ...]) -> dict[TypeVar, Any]: + """Return a mapping between the parameters of a generic model and the provided arguments during parameterization. + + Raises: + TypeError: If the number of arguments does not match the parameters (i.e. if providing too few or too many arguments). + + Example: + ```python {test="skip" lint="skip"} + class Model[T, U, V = int](BaseModel): ... + + map_generic_model_arguments(Model, (str, bytes)) + #> {T: str, U: bytes, V: int} + + map_generic_model_arguments(Model, (str,)) + #> TypeError: Too few arguments for ; actual 1, expected at least 2 + + map_generic_model_arguments(Model, (str, bytes, int, complex)) + #> TypeError: Too many arguments for ; actual 4, expected 3 + ``` + + Note: + This function is analogous to the private `typing._check_generic_specialization` function. + """ + parameters = cls.__pydantic_generic_metadata__['parameters'] + expected_len = len(parameters) + typevars_map: dict[TypeVar, Any] = {} + + _missing = object() + for parameter, argument in zip_longest(parameters, args, fillvalue=_missing): + if parameter is _missing: + raise TypeError(f'Too many arguments for {cls}; actual {len(args)}, expected {expected_len}') + + if argument is _missing: + param = cast(TypeVar, parameter) + try: + has_default = param.has_default() # pyright: ignore[reportAttributeAccessIssue] + except AttributeError: + # Happens if using `typing.TypeVar` (and not `typing_extensions`) on Python < 3.13. + has_default = False + if has_default: + # The default might refer to other type parameters. For an example, see: + # https://typing.python.org/en/latest/spec/generics.html#type-parameters-as-parameters-to-generics + typevars_map[param] = replace_types(param.__default__, typevars_map) # pyright: ignore[reportAttributeAccessIssue] + else: + expected_len -= sum(hasattr(p, 'has_default') and p.has_default() for p in parameters) # pyright: ignore[reportAttributeAccessIssue] + raise TypeError(f'Too few arguments for {cls}; actual {len(args)}, expected at least {expected_len}') + else: + param = cast(TypeVar, parameter) + typevars_map[param] = argument + + return typevars_map + + +_generic_recursion_cache: ContextVar[set[str] | None] = ContextVar('_generic_recursion_cache', default=None) + + +@contextmanager +def generic_recursion_self_type( + origin: type[BaseModel], args: tuple[Any, ...] +) -> Iterator[PydanticRecursiveRef | None]: + """This contextmanager should be placed around the recursive calls used to build a generic type, + and accept as arguments the generic origin type and the type arguments being passed to it. + + If the same origin and arguments are observed twice, it implies that a self-reference placeholder + can be used while building the core schema, and will produce a schema_ref that will be valid in the + final parent schema. + """ + previously_seen_type_refs = _generic_recursion_cache.get() + if previously_seen_type_refs is None: + previously_seen_type_refs = set() + token = _generic_recursion_cache.set(previously_seen_type_refs) + else: + token = None + + try: + type_ref = get_type_ref(origin, args_override=args) + if type_ref in previously_seen_type_refs: + self_type = PydanticRecursiveRef(type_ref=type_ref) + yield self_type + else: + previously_seen_type_refs.add(type_ref) + yield + previously_seen_type_refs.remove(type_ref) + finally: + if token: + _generic_recursion_cache.reset(token) + + +def recursively_defined_type_refs() -> set[str]: + visited = _generic_recursion_cache.get() + if not visited: + return set() # not in a generic recursion, so there are no types + + return visited.copy() # don't allow modifications + + +def get_cached_generic_type_early(parent: type[BaseModel], typevar_values: Any) -> type[BaseModel] | None: + """The use of a two-stage cache lookup approach was necessary to have the highest performance possible for + repeated calls to `__class_getitem__` on generic types (which may happen in tighter loops during runtime), + while still ensuring that certain alternative parametrizations ultimately resolve to the same type. + + As a concrete example, this approach was necessary to make Model[List[T]][int] equal to Model[List[int]]. + The approach could be modified to not use two different cache keys at different points, but the + _early_cache_key is optimized to be as quick to compute as possible (for repeated-access speed), and the + _late_cache_key is optimized to be as "correct" as possible, so that two types that will ultimately be the + same after resolving the type arguments will always produce cache hits. + + If we wanted to move to only using a single cache key per type, we would either need to always use the + slower/more computationally intensive logic associated with _late_cache_key, or would need to accept + that Model[List[T]][int] is a different type than Model[List[T]][int]. Because we rely on subclass relationships + during validation, I think it is worthwhile to ensure that types that are functionally equivalent are actually + equal. + """ + return _GENERIC_TYPES_CACHE.get(_early_cache_key(parent, typevar_values)) + + +def get_cached_generic_type_late( + parent: type[BaseModel], typevar_values: Any, origin: type[BaseModel], args: tuple[Any, ...] +) -> type[BaseModel] | None: + """See the docstring of `get_cached_generic_type_early` for more information about the two-stage cache lookup.""" + cached = _GENERIC_TYPES_CACHE.get(_late_cache_key(origin, args, typevar_values)) + if cached is not None: + set_cached_generic_type(parent, typevar_values, cached, origin, args) + return cached + + +def set_cached_generic_type( + parent: type[BaseModel], + typevar_values: tuple[Any, ...], + type_: type[BaseModel], + origin: type[BaseModel] | None = None, + args: tuple[Any, ...] | None = None, +) -> None: + """See the docstring of `get_cached_generic_type_early` for more information about why items are cached with + two different keys. + """ + _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values)] = type_ + if len(typevar_values) == 1: + _GENERIC_TYPES_CACHE[_early_cache_key(parent, typevar_values[0])] = type_ + if origin and args: + _GENERIC_TYPES_CACHE[_late_cache_key(origin, args, typevar_values)] = type_ + + +def _union_orderings_key(typevar_values: Any) -> Any: + """This is intended to help differentiate between Union types with the same arguments in different order. + + Thanks to caching internal to the `typing` module, it is not possible to distinguish between + List[Union[int, float]] and List[Union[float, int]] (and similarly for other "parent" origins besides List) + because `typing` considers Union[int, float] to be equal to Union[float, int]. + + However, you _can_ distinguish between (top-level) Union[int, float] vs. Union[float, int]. + Because we parse items as the first Union type that is successful, we get slightly more consistent behavior + if we make an effort to distinguish the ordering of items in a union. It would be best if we could _always_ + get the exact-correct order of items in the union, but that would require a change to the `typing` module itself. + (See https://github.com/python/cpython/issues/86483 for reference.) + """ + if isinstance(typevar_values, tuple): + return tuple(_union_orderings_key(value) for value in typevar_values) + elif typing_objects.is_union(typing_extensions.get_origin(typevar_values)): + return get_args(typevar_values) + else: + return () + + +def _early_cache_key(cls: type[BaseModel], typevar_values: Any) -> GenericTypesCacheKey: + """This is intended for minimal computational overhead during lookups of cached types. + + Note that this is overly simplistic, and it's possible that two different cls/typevar_values + inputs would ultimately result in the same type being created in BaseModel.__class_getitem__. + To handle this, we have a fallback _late_cache_key that is checked later if the _early_cache_key + lookup fails, and should result in a cache hit _precisely_ when the inputs to __class_getitem__ + would result in the same type. + """ + return cls, typevar_values, _union_orderings_key(typevar_values) + + +def _late_cache_key(origin: type[BaseModel], args: tuple[Any, ...], typevar_values: Any) -> GenericTypesCacheKey: + """This is intended for use later in the process of creating a new type, when we have more information + about the exact args that will be passed. If it turns out that a different set of inputs to + __class_getitem__ resulted in the same inputs to the generic type creation process, we can still + return the cached type, and update the cache with the _early_cache_key as well. + """ + # The _union_orderings_key is placed at the start here to ensure there cannot be a collision with an + # _early_cache_key, as that function will always produce a BaseModel subclass as the first item in the key, + # whereas this function will always produce a tuple as the first item in the key. + return _union_orderings_key(typevar_values), origin, args diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_git.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_git.py new file mode 100644 index 0000000000000000000000000000000000000000..96dcda28f801cd19989838b029f6f2b250dedc52 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_git.py @@ -0,0 +1,27 @@ +"""Git utilities, adopted from mypy's git utilities (https://github.com/python/mypy/blob/master/mypy/git.py).""" + +from __future__ import annotations + +import subprocess +from pathlib import Path + + +def is_git_repo(dir: Path) -> bool: + """Is the given directory version-controlled with git?""" + return dir.joinpath('.git').exists() + + +def have_git() -> bool: # pragma: no cover + """Can we run the git executable?""" + try: + subprocess.check_output(['git', '--help']) + return True + except subprocess.CalledProcessError: + return False + except OSError: + return False + + +def git_revision(dir: Path) -> str: + """Get the SHA-1 of the HEAD of a git repository.""" + return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD'], cwd=dir).decode('utf-8').strip() diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..638102f771dd674799255cc6ef69cacb2c184d13 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_import_utils.py @@ -0,0 +1,20 @@ +from functools import cache +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pydantic import BaseModel + from pydantic.fields import FieldInfo + + +@cache +def import_cached_base_model() -> type['BaseModel']: + from pydantic import BaseModel + + return BaseModel + + +@cache +def import_cached_field_info() -> type['FieldInfo']: + from pydantic.fields import FieldInfo + + return FieldInfo diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py new file mode 100644 index 0000000000000000000000000000000000000000..33e152cc8d178486ef016285855495a86d5991d8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_internal_dataclass.py @@ -0,0 +1,7 @@ +import sys + +# `slots` is available on Python >= 3.10 +if sys.version_info >= (3, 10): + slots_true = {'slots': True} +else: + slots_true = {} diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..5954dba7017edd9b55ddb0eb1831fa449a8edf7c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_known_annotated_metadata.py @@ -0,0 +1,403 @@ +from __future__ import annotations + +from collections import defaultdict +from collections.abc import Iterable +from copy import copy +from functools import lru_cache, partial +from typing import TYPE_CHECKING, Any + +from pydantic_core import CoreSchema, PydanticCustomError, ValidationError, to_jsonable_python +from pydantic_core import core_schema as cs + +from ._fields import PydanticMetadata +from ._import_utils import import_cached_field_info + +if TYPE_CHECKING: + pass + +STRICT = {'strict'} +FAIL_FAST = {'fail_fast'} +LENGTH_CONSTRAINTS = {'min_length', 'max_length'} +INEQUALITY = {'le', 'ge', 'lt', 'gt'} +NUMERIC_CONSTRAINTS = {'multiple_of', *INEQUALITY} +ALLOW_INF_NAN = {'allow_inf_nan'} + +STR_CONSTRAINTS = { + *LENGTH_CONSTRAINTS, + *STRICT, + 'strip_whitespace', + 'to_lower', + 'to_upper', + 'pattern', + 'coerce_numbers_to_str', + 'ascii_only', +} +BYTES_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT} + +LIST_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST} +TUPLE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST} +SET_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT, *FAIL_FAST} +DICT_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT} +GENERATOR_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *STRICT} +SEQUENCE_CONSTRAINTS = {*LENGTH_CONSTRAINTS, *FAIL_FAST} + +FLOAT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT} +DECIMAL_CONSTRAINTS = {'max_digits', 'decimal_places', *FLOAT_CONSTRAINTS} +INT_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *ALLOW_INF_NAN, *STRICT} +BOOL_CONSTRAINTS = STRICT +UUID_CONSTRAINTS = STRICT + +DATE_TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} +TIMEDELTA_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} +TIME_CONSTRAINTS = {*NUMERIC_CONSTRAINTS, *STRICT} +LAX_OR_STRICT_CONSTRAINTS = STRICT +ENUM_CONSTRAINTS = STRICT +COMPLEX_CONSTRAINTS = STRICT + +UNION_CONSTRAINTS = {'union_mode'} +URL_CONSTRAINTS = { + 'max_length', + 'allowed_schemes', + 'host_required', + 'default_host', + 'default_port', + 'default_path', +} + +TEXT_SCHEMA_TYPES = ('str', 'bytes', 'url', 'multi-host-url') +SEQUENCE_SCHEMA_TYPES = ('list', 'tuple', 'set', 'frozenset', 'generator', *TEXT_SCHEMA_TYPES) +NUMERIC_SCHEMA_TYPES = ('float', 'int', 'date', 'time', 'timedelta', 'datetime') + +CONSTRAINTS_TO_ALLOWED_SCHEMAS: dict[str, set[str]] = defaultdict(set) + +constraint_schema_pairings: list[tuple[set[str], tuple[str, ...]]] = [ + (STR_CONSTRAINTS, TEXT_SCHEMA_TYPES), + (BYTES_CONSTRAINTS, ('bytes',)), + (LIST_CONSTRAINTS, ('list',)), + (TUPLE_CONSTRAINTS, ('tuple',)), + (SET_CONSTRAINTS, ('set', 'frozenset')), + (DICT_CONSTRAINTS, ('dict',)), + (GENERATOR_CONSTRAINTS, ('generator',)), + (FLOAT_CONSTRAINTS, ('float',)), + (INT_CONSTRAINTS, ('int',)), + (DATE_TIME_CONSTRAINTS, ('date', 'time', 'datetime', 'timedelta')), + # TODO: this is a bit redundant, we could probably avoid some of these + (STRICT, (*TEXT_SCHEMA_TYPES, *SEQUENCE_SCHEMA_TYPES, *NUMERIC_SCHEMA_TYPES, 'typed-dict', 'model')), + (UNION_CONSTRAINTS, ('union',)), + (URL_CONSTRAINTS, ('url', 'multi-host-url')), + (BOOL_CONSTRAINTS, ('bool',)), + (UUID_CONSTRAINTS, ('uuid',)), + (LAX_OR_STRICT_CONSTRAINTS, ('lax-or-strict',)), + (ENUM_CONSTRAINTS, ('enum',)), + (DECIMAL_CONSTRAINTS, ('decimal',)), + (COMPLEX_CONSTRAINTS, ('complex',)), +] + +for constraints, schemas in constraint_schema_pairings: + for c in constraints: + CONSTRAINTS_TO_ALLOWED_SCHEMAS[c].update(schemas) + + +def as_jsonable_value(v: Any) -> Any: + if type(v) not in (int, str, float, bytes, bool, type(None)): + return to_jsonable_python(v) + return v + + +def expand_grouped_metadata(annotations: Iterable[Any]) -> Iterable[Any]: + """Expand the annotations. + + Args: + annotations: An iterable of annotations. + + Returns: + An iterable of expanded annotations. + + Example: + ```python + from annotated_types import Ge, Len + + from pydantic._internal._known_annotated_metadata import expand_grouped_metadata + + print(list(expand_grouped_metadata([Ge(4), Len(5)]))) + #> [Ge(ge=4), MinLen(min_length=5)] + ``` + """ + import annotated_types as at + + FieldInfo = import_cached_field_info() + + for annotation in annotations: + if isinstance(annotation, at.GroupedMetadata): + yield from annotation + elif isinstance(annotation, FieldInfo): + yield from annotation.metadata + # this is a bit problematic in that it results in duplicate metadata + # all of our "consumers" can handle it, but it is not ideal + # we probably should split up FieldInfo into: + # - annotated types metadata + # - individual metadata known only to Pydantic + annotation = copy(annotation) + annotation.metadata = [] + yield annotation + else: + yield annotation + + +@lru_cache +def _get_at_to_constraint_map() -> dict[type, str]: + """Return a mapping of annotated types to constraints. + + Normally, we would define a mapping like this in the module scope, but we can't do that + because we don't permit module level imports of `annotated_types`, in an attempt to speed up + the import time of `pydantic`. We still only want to have this dictionary defined in one place, + so we use this function to cache the result. + """ + import annotated_types as at + + return { + at.Gt: 'gt', + at.Ge: 'ge', + at.Lt: 'lt', + at.Le: 'le', + at.MultipleOf: 'multiple_of', + at.MinLen: 'min_length', + at.MaxLen: 'max_length', + } + + +def apply_known_metadata(annotation: Any, schema: CoreSchema) -> CoreSchema | None: # noqa: C901 + """Apply `annotation` to `schema` if it is an annotation we know about (Gt, Le, etc.). + Otherwise return `None`. + + This does not handle all known annotations. If / when it does, it can always + return a CoreSchema and return the unmodified schema if the annotation should be ignored. + + Assumes that GroupedMetadata has already been expanded via `expand_grouped_metadata`. + + Args: + annotation: The annotation. + schema: The schema. + + Returns: + An updated schema with annotation if it is an annotation we know about, `None` otherwise. + + Raises: + RuntimeError: If a constraint can't be applied to a specific schema type. + ValueError: If an unknown constraint is encountered. + """ + import annotated_types as at + + from ._validators import NUMERIC_VALIDATOR_LOOKUP, forbid_inf_nan_check + + schema = schema.copy() + schema_update, other_metadata = collect_known_metadata([annotation]) + schema_type = schema['type'] + + chain_schema_constraints: set[str] = { + 'pattern', + 'strip_whitespace', + 'to_lower', + 'to_upper', + 'coerce_numbers_to_str', + 'ascii_only', + } + chain_schema_steps: list[CoreSchema] = [] + + for constraint, value in schema_update.items(): + if constraint not in CONSTRAINTS_TO_ALLOWED_SCHEMAS: + raise ValueError(f'Unknown constraint {constraint}') + allowed_schemas = CONSTRAINTS_TO_ALLOWED_SCHEMAS[constraint] + + # if it becomes necessary to handle more than one constraint + # in this recursive case with function-after or function-wrap, we should refactor + # this is a bit challenging because we sometimes want to apply constraints to the inner schema, + # whereas other times we want to wrap the existing schema with a new one that enforces a new constraint. + if schema_type in {'function-before', 'function-wrap', 'function-after'} and constraint == 'strict': + schema['schema'] = apply_known_metadata(annotation, schema['schema']) # type: ignore # schema is function schema + return schema + + # if we're allowed to apply constraint directly to the schema, like le to int, do that + if schema_type in allowed_schemas: + if constraint == 'union_mode' and schema_type == 'union': + schema['mode'] = value # type: ignore # schema is UnionSchema + else: + schema[constraint] = value + continue + + # else, apply a function after validator to the schema to enforce the corresponding constraint + if constraint in chain_schema_constraints: + + def _apply_constraint_with_incompatibility_info( + value: Any, handler: cs.ValidatorFunctionWrapHandler + ) -> Any: + try: + x = handler(value) + except ValidationError as ve: + # if the error is about the type, it's likely that the constraint is incompatible the type of the field + # for example, the following invalid schema wouldn't be caught during schema build, but rather at this point + # with a cryptic 'string_type' error coming from the string validator, + # that we'd rather express as a constraint incompatibility error (TypeError) + # Annotated[list[int], Field(pattern='abc')] + if 'type' in ve.errors()[0]['type']: + raise TypeError( + f"Unable to apply constraint '{constraint}' to supplied value {value} for schema of type '{schema_type}'" # noqa: B023 + ) + raise ve + return x + + chain_schema_steps.append( + cs.no_info_wrap_validator_function( + _apply_constraint_with_incompatibility_info, cs.str_schema(**{constraint: value}) + ) + ) + elif constraint in NUMERIC_VALIDATOR_LOOKUP: + if constraint in LENGTH_CONSTRAINTS: + inner_schema = schema + while inner_schema['type'] in {'function-before', 'function-wrap', 'function-after'}: + inner_schema = inner_schema['schema'] # type: ignore + inner_schema_type = inner_schema['type'] + if inner_schema_type == 'list' or ( + inner_schema_type == 'json-or-python' and inner_schema['json_schema']['type'] == 'list' # type: ignore + ): + js_constraint_key = 'minItems' if constraint == 'min_length' else 'maxItems' + else: + js_constraint_key = 'minLength' if constraint == 'min_length' else 'maxLength' + else: + js_constraint_key = constraint + + schema = cs.no_info_after_validator_function( + partial(NUMERIC_VALIDATOR_LOOKUP[constraint], **{constraint: value}), schema + ) + metadata = schema.get('metadata', {}) + if (existing_json_schema_updates := metadata.get('pydantic_js_updates')) is not None: + metadata['pydantic_js_updates'] = { + **existing_json_schema_updates, + **{js_constraint_key: as_jsonable_value(value)}, + } + else: + metadata['pydantic_js_updates'] = {js_constraint_key: as_jsonable_value(value)} + schema['metadata'] = metadata + elif constraint == 'allow_inf_nan' and value is False: + schema = cs.no_info_after_validator_function( + forbid_inf_nan_check, + schema, + ) + else: + # It's rare that we'd get here, but it's possible if we add a new constraint and forget to handle it + # Most constraint errors are caught at runtime during attempted application + raise RuntimeError(f"Unable to apply constraint '{constraint}' to schema of type '{schema_type}'") + + for annotation in other_metadata: + if (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()): + constraint = at_to_constraint_map[annotation_type] + validator = NUMERIC_VALIDATOR_LOOKUP.get(constraint) + if validator is None: + raise ValueError(f'Unknown constraint {constraint}') + schema = cs.no_info_after_validator_function( + partial(validator, {constraint: getattr(annotation, constraint)}), schema + ) + continue + elif isinstance(annotation, (at.Predicate, at.Not)): + predicate_name = f'{annotation.func.__qualname__!r} ' if hasattr(annotation.func, '__qualname__') else '' + + # Note: B023 is ignored because even though we iterate over `other_metadata`, it is guaranteed + # to be of length 1. `apply_known_metadata()` is called from `GenerateSchema`, where annotations + # were already expanded via `expand_grouped_metadata()`. Confusing, but this falls into the annotations + # refactor. + if isinstance(annotation, at.Predicate): + + def val_func(v: Any) -> Any: + predicate_satisfied = annotation.func(v) # noqa: B023 + if not predicate_satisfied: + raise PydanticCustomError( + 'predicate_failed', + f'Predicate {predicate_name}failed', # pyright: ignore[reportArgumentType] # noqa: B023 + ) + return v + + else: + + def val_func(v: Any) -> Any: + predicate_satisfied = annotation.func(v) # noqa: B023 + if predicate_satisfied: + raise PydanticCustomError( + 'not_operation_failed', + f'Not of {predicate_name}failed', # pyright: ignore[reportArgumentType] # noqa: B023 + ) + return v + + schema = cs.no_info_after_validator_function(val_func, schema) + else: + # ignore any other unknown metadata + return None + + if chain_schema_steps: + chain_schema_steps = [schema] + chain_schema_steps + return cs.chain_schema(chain_schema_steps) + + return schema + + +def collect_known_metadata(annotations: Iterable[Any]) -> tuple[dict[str, Any], list[Any]]: + """Split `annotations` into known metadata and unknown annotations. + + Args: + annotations: An iterable of annotations. + + Returns: + A tuple contains a dict of known metadata and a list of unknown annotations. + + Example: + ```python + from annotated_types import Gt, Len + + from pydantic._internal._known_annotated_metadata import collect_known_metadata + + print(collect_known_metadata([Gt(1), Len(42), ...])) + #> ({'gt': 1, 'min_length': 42}, [Ellipsis]) + ``` + """ + annotations = expand_grouped_metadata(annotations) + + res: dict[str, Any] = {} + remaining: list[Any] = [] + + for annotation in annotations: + # isinstance(annotation, PydanticMetadata) also covers ._fields:_PydanticGeneralMetadata + if isinstance(annotation, PydanticMetadata): + res.update(annotation.__dict__) + # we don't use dataclasses.asdict because that recursively calls asdict on the field values + elif (annotation_type := type(annotation)) in (at_to_constraint_map := _get_at_to_constraint_map()): + constraint = at_to_constraint_map[annotation_type] + res[constraint] = getattr(annotation, constraint) + elif isinstance(annotation, type) and issubclass(annotation, PydanticMetadata): + # also support PydanticMetadata classes being used without initialisation, + # e.g. `Annotated[int, Strict]` as well as `Annotated[int, Strict()]` + res.update({k: v for k, v in vars(annotation).items() if not k.startswith('_')}) + else: + remaining.append(annotation) + # Nones can sneak in but pydantic-core will reject them + # it'd be nice to clean things up so we don't put in None (we probably don't _need_ to, it was just easier) + # but this is simple enough to kick that can down the road + res = {k: v for k, v in res.items() if v is not None} + return res, remaining + + +def check_metadata(metadata: dict[str, Any], allowed: Iterable[str], source_type: Any) -> None: + """A small utility function to validate that the given metadata can be applied to the target. + More than saving lines of code, this gives us a consistent error message for all of our internal implementations. + + Args: + metadata: A dict of metadata. + allowed: An iterable of allowed metadata. + source_type: The source type. + + Raises: + TypeError: If there is metadatas that can't be applied on source type. + """ + unknown = metadata.keys() - set(allowed) + if unknown: + raise TypeError( + f'The following constraints cannot be applied to {source_type!r}: {", ".join([f"{k!r}" for k in unknown])}' + ) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py new file mode 100644 index 0000000000000000000000000000000000000000..9125ab32e5348b6e873c4b1939a887ebde35d97a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_mock_val_ser.py @@ -0,0 +1,228 @@ +from __future__ import annotations + +from collections.abc import Iterator, Mapping +from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, TypeVar, Union + +from pydantic_core import CoreSchema, SchemaSerializer, SchemaValidator + +from ..errors import PydanticErrorCodes, PydanticUserError +from ..plugin._schema_validator import PluggableSchemaValidator + +if TYPE_CHECKING: + from ..dataclasses import PydanticDataclass + from ..main import BaseModel + from ..type_adapter import TypeAdapter + + +ValSer = TypeVar('ValSer', bound=Union[SchemaValidator, PluggableSchemaValidator, SchemaSerializer]) +T = TypeVar('T') + + +class MockCoreSchema(Mapping[str, Any]): + """Mocker for `pydantic_core.CoreSchema` which optionally attempts to + rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails. + """ + + __slots__ = '_error_message', '_code', '_attempt_rebuild', '_built_memo' + + def __init__( + self, + error_message: str, + *, + code: PydanticErrorCodes, + attempt_rebuild: Callable[[], CoreSchema | None] | None = None, + ) -> None: + self._error_message = error_message + self._code: PydanticErrorCodes = code + self._attempt_rebuild = attempt_rebuild + self._built_memo: CoreSchema | None = None + + def __getitem__(self, key: str) -> Any: + return self._get_built().__getitem__(key) + + def __len__(self) -> int: + return self._get_built().__len__() + + def __iter__(self) -> Iterator[str]: + return self._get_built().__iter__() + + def _get_built(self) -> CoreSchema: + if self._built_memo is not None: + return self._built_memo + + if self._attempt_rebuild: + schema = self._attempt_rebuild() + if schema is not None: + self._built_memo = schema + return schema + raise PydanticUserError(self._error_message, code=self._code) + + def rebuild(self) -> CoreSchema | None: + self._built_memo = None + if self._attempt_rebuild: + schema = self._attempt_rebuild() + if schema is not None: + return schema + else: + raise PydanticUserError(self._error_message, code=self._code) + return None + + +class MockValSer(Generic[ValSer]): + """Mocker for `pydantic_core.SchemaValidator` or `pydantic_core.SchemaSerializer` which optionally attempts to + rebuild the thing it's mocking when one of its methods is accessed and raises an error if that fails. + """ + + __slots__ = '_error_message', '_code', '_val_or_ser', '_attempt_rebuild' + + def __init__( + self, + error_message: str, + *, + code: PydanticErrorCodes, + val_or_ser: Literal['validator', 'serializer'], + attempt_rebuild: Callable[[], ValSer | None] | None = None, + ) -> None: + self._error_message = error_message + self._val_or_ser = SchemaValidator if val_or_ser == 'validator' else SchemaSerializer + self._code: PydanticErrorCodes = code + self._attempt_rebuild = attempt_rebuild + + def __getattr__(self, item: str) -> None: + __tracebackhide__ = True + if self._attempt_rebuild: + val_ser = self._attempt_rebuild() + if val_ser is not None: + return getattr(val_ser, item) + + # raise an AttributeError if `item` doesn't exist + getattr(self._val_or_ser, item) + raise PydanticUserError(self._error_message, code=self._code) + + def rebuild(self) -> ValSer | None: + if self._attempt_rebuild: + val_ser = self._attempt_rebuild() + if val_ser is not None: + return val_ser + else: + raise PydanticUserError(self._error_message, code=self._code) + return None + + +def set_type_adapter_mocks(adapter: TypeAdapter) -> None: + """Set `core_schema`, `validator` and `serializer` to mock core types on a type adapter instance. + + Args: + adapter: The type adapter instance to set the mocks on + """ + type_repr = str(adapter._type) + undefined_type_error_message = ( + f'`TypeAdapter[{type_repr}]` is not fully defined; you should define `{type_repr}` and all referenced types,' + f' then call `.rebuild()` on the instance.' + ) + + def attempt_rebuild_fn(attr_fn: Callable[[TypeAdapter], T]) -> Callable[[], T | None]: + def handler() -> T | None: + if adapter.rebuild(raise_errors=False, _parent_namespace_depth=5) is not False: + return attr_fn(adapter) + return None + + return handler + + adapter.core_schema = MockCoreSchema( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + attempt_rebuild=attempt_rebuild_fn(lambda ta: ta.core_schema), + ) + adapter.validator = MockValSer( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + val_or_ser='validator', + attempt_rebuild=attempt_rebuild_fn(lambda ta: ta.validator), + ) + adapter.serializer = MockValSer( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + val_or_ser='serializer', + attempt_rebuild=attempt_rebuild_fn(lambda ta: ta.serializer), + ) + + +def set_model_mocks(cls: type[BaseModel], undefined_name: str = 'all referenced types') -> None: + """Set `__pydantic_core_schema__`, `__pydantic_validator__` and `__pydantic_serializer__` to mock core types on a model. + + Args: + cls: The model class to set the mocks on + undefined_name: Name of the undefined thing, used in error messages + """ + undefined_type_error_message = ( + f'`{cls.__name__}` is not fully defined; you should define {undefined_name},' + f' then call `{cls.__name__}.model_rebuild()`.' + ) + + def attempt_rebuild_fn(attr_fn: Callable[[type[BaseModel]], T]) -> Callable[[], T | None]: + def handler() -> T | None: + if cls.model_rebuild(raise_errors=False, _parent_namespace_depth=5) is not False: + return attr_fn(cls) + return None + + return handler + + cls.__pydantic_core_schema__ = MockCoreSchema( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_core_schema__), + ) + cls.__pydantic_validator__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + val_or_ser='validator', + attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_validator__), + ) + cls.__pydantic_serializer__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + val_or_ser='serializer', + attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_serializer__), + ) + + +def set_dataclass_mocks(cls: type[PydanticDataclass], undefined_name: str = 'all referenced types') -> None: + """Set `__pydantic_validator__` and `__pydantic_serializer__` to `MockValSer`s on a dataclass. + + Args: + cls: The model class to set the mocks on + undefined_name: Name of the undefined thing, used in error messages + """ + from ..dataclasses import rebuild_dataclass + + undefined_type_error_message = ( + f'`{cls.__name__}` is not fully defined; you should define {undefined_name},' + f' then call `pydantic.dataclasses.rebuild_dataclass({cls.__name__})`.' + ) + + def attempt_rebuild_fn(attr_fn: Callable[[type[PydanticDataclass]], T]) -> Callable[[], T | None]: + def handler() -> T | None: + if rebuild_dataclass(cls, raise_errors=False, _parent_namespace_depth=5) is not False: + return attr_fn(cls) + return None + + return handler + + cls.__pydantic_core_schema__ = MockCoreSchema( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_core_schema__), + ) + cls.__pydantic_validator__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + val_or_ser='validator', + attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_validator__), + ) + cls.__pydantic_serializer__ = MockValSer( # pyright: ignore[reportAttributeAccessIssue] + undefined_type_error_message, + code='class-not-fully-defined', + val_or_ser='serializer', + attempt_rebuild=attempt_rebuild_fn(lambda c: c.__pydantic_serializer__), + ) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py new file mode 100644 index 0000000000000000000000000000000000000000..b7945442fb20d23ae16a2603130ffe566fb0989f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_model_construction.py @@ -0,0 +1,868 @@ +"""Private logic for creating models.""" + +from __future__ import annotations as _annotations + +import operator +import sys +import typing +import warnings +import weakref +from abc import ABCMeta +from functools import cache, partial, wraps +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, Generic, Literal, NoReturn, TypeVar, cast + +from pydantic_core import PydanticUndefined, SchemaSerializer +from typing_extensions import TypeAliasType, dataclass_transform, deprecated, get_args, get_origin +from typing_inspection import typing_objects + +from ..errors import PydanticUndefinedAnnotation, PydanticUserError +from ..plugin._schema_validator import create_schema_validator +from ..warnings import GenericBeforeBaseModelWarning, PydanticDeprecatedSince20 +from ._config import ConfigWrapper +from ._decorators import DecoratorInfos, PydanticDescriptorProxy, get_attribute_from_bases, unwrap_wrapped_function +from ._fields import collect_model_fields, is_valid_field_name, is_valid_privateattr_name, rebuild_model_fields +from ._generate_schema import GenerateSchema, InvalidSchemaError +from ._generics import PydanticGenericMetadata, get_model_typevars_map +from ._import_utils import import_cached_base_model, import_cached_field_info +from ._mock_val_ser import set_model_mocks +from ._namespace_utils import NsResolver +from ._signature import generate_pydantic_signature +from ._typing_extra import ( + _make_forward_ref, + eval_type_backport, + is_classvar_annotation, + parent_frame_namespace, +) +from ._utils import LazyClassAttribute, SafeGetItemProxy + +if TYPE_CHECKING: + from ..fields import Field as PydanticModelField + from ..fields import FieldInfo, ModelPrivateAttr + from ..fields import PrivateAttr as PydanticModelPrivateAttr + from ..main import BaseModel + from ._fields import PydanticExtraInfo +else: + PydanticModelField = object() + PydanticModelPrivateAttr = object() + +object_setattr = object.__setattr__ + + +class _ModelNamespaceDict(dict): + """A dictionary subclass that intercepts attribute setting on model classes and + warns about overriding of decorators. + """ + + def __setitem__(self, k: str, v: object) -> None: + existing: Any = self.get(k, None) + if existing and v is not existing and isinstance(existing, PydanticDescriptorProxy): + warnings.warn( + f'`{k}` overrides an existing Pydantic `{existing.decorator_info.decorator_repr}` decorator', + stacklevel=2, + ) + + return super().__setitem__(k, v) + + +def NoInitField( + *, + init: Literal[False] = False, +) -> Any: + """Only for typing purposes. Used as default value of `__pydantic_fields_set__`, + `__pydantic_extra__`, `__pydantic_private__`, so they could be ignored when + synthesizing the `__init__` signature. + """ + + +# For ModelMetaclass.register(): +_T = TypeVar('_T') + + +@dataclass_transform(kw_only_default=True, field_specifiers=(PydanticModelField, PydanticModelPrivateAttr, NoInitField)) +class ModelMetaclass(ABCMeta): + def __new__( + mcs, + cls_name: str, + bases: tuple[type[Any], ...], + namespace: dict[str, Any], + __pydantic_generic_metadata__: PydanticGenericMetadata | None = None, + __pydantic_reset_parent_namespace__: bool = True, + _create_model_module: str | None = None, + **kwargs: Any, + ) -> type: + """Metaclass for creating Pydantic models. + + Args: + cls_name: The name of the class to be created. + bases: The base classes of the class to be created. + namespace: The attribute dictionary of the class to be created. + __pydantic_generic_metadata__: Metadata for generic models. + __pydantic_reset_parent_namespace__: Reset parent namespace. + _create_model_module: The module of the class to be created, if created by `create_model`. + **kwargs: Catch-all for any other keyword arguments. + + Returns: + The new class created by the metaclass. + """ + # Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we rely on the fact + # that `BaseModel` itself won't have any bases, but any subclass of it will, to determine whether the `__new__` + # call we're in the middle of is for the `BaseModel` class. + if bases: + raw_annotations: dict[str, Any] + if sys.version_info >= (3, 14): + if ( + '__annotations__' in namespace + ): # `from __future__ import annotations` was used in the model's module + raw_annotations = namespace['__annotations__'] + else: + # See https://docs.python.org/3.14/library/annotationlib.html#using-annotations-in-a-metaclass: + from annotationlib import Format, call_annotate_function, get_annotate_from_class_namespace + + if annotate := get_annotate_from_class_namespace(namespace): + raw_annotations = call_annotate_function(annotate, format=Format.FORWARDREF) + else: + raw_annotations = {} + else: + raw_annotations = namespace.get('__annotations__', {}) + + base_field_names, class_vars, base_private_attributes = mcs._collect_bases_data(bases) + + config_wrapper = ConfigWrapper.for_model(bases, namespace, raw_annotations, kwargs) + namespace['model_config'] = config_wrapper.config_dict + private_attributes = inspect_namespace( + namespace, raw_annotations, config_wrapper.ignored_types, class_vars, base_field_names + ) + if private_attributes or base_private_attributes: + original_model_post_init = get_model_post_init(namespace, bases) + if original_model_post_init is not None: + # if there are private attributes and a model_post_init function, we handle both + + @wraps(original_model_post_init) + def wrapped_model_post_init(self: BaseModel, context: Any, /) -> None: + """We need to both initialize private attributes and call the user-defined model_post_init + method. + """ + init_private_attributes(self, context) + original_model_post_init(self, context) + + namespace['model_post_init'] = wrapped_model_post_init + else: + namespace['model_post_init'] = init_private_attributes + + namespace['__class_vars__'] = class_vars + namespace['__private_attributes__'] = {**base_private_attributes, **private_attributes} + + cls = cast('type[BaseModel]', super().__new__(mcs, cls_name, bases, namespace, **kwargs)) + BaseModel_ = import_cached_base_model() + + mro = cls.__mro__ + if Generic in mro and mro.index(Generic) < mro.index(BaseModel_): + warnings.warn( + GenericBeforeBaseModelWarning( + 'Classes should inherit from `BaseModel` before generic classes (e.g. `typing.Generic[T]`) ' + 'for pydantic generics to work properly.' + ), + stacklevel=2, + ) + + cls.__pydantic_custom_init__ = not getattr(cls.__init__, '__pydantic_base_init__', False) + cls.__pydantic_post_init__ = ( + None if cls.model_post_init is BaseModel_.model_post_init else 'model_post_init' + ) + + cls.__pydantic_setattr_handlers__ = {} + + cls.__pydantic_decorators__ = DecoratorInfos.build(cls, replace_wrapped_methods=True) + cls.__pydantic_decorators__.update_from_config(config_wrapper) + + # Use the getattr below to grab the __parameters__ from the `typing.Generic` parent class + if __pydantic_generic_metadata__: + cls.__pydantic_generic_metadata__ = __pydantic_generic_metadata__ + else: + parent_parameters = getattr(cls, '__pydantic_generic_metadata__', {}).get('parameters', ()) + parameters = getattr(cls, '__parameters__', None) or parent_parameters + if parameters and parent_parameters and not all(x in parameters for x in parent_parameters): + from ..root_model import RootModelRootType + + missing_parameters = tuple(x for x in parameters if x not in parent_parameters) + if RootModelRootType in parent_parameters and RootModelRootType not in parameters: + # This is a special case where the user has subclassed RootModel, but has not parameterized + # RootModel with the generic type identifiers being used. Ex: + # class MyModel(RootModel, Generic[T]): + # root: T + # Should instead just be: + # class MyModel(RootModel[T]): + # root: T + parameters_str = ', '.join([x.__name__ for x in missing_parameters]) + error_message = ( + f'{cls.__name__} is a subclass of `RootModel`, but does not include the generic type identifier(s) ' + f'{parameters_str} in its parameters. ' + f'You should parametrize RootModel directly, e.g., `class {cls.__name__}(RootModel[{parameters_str}]): ...`.' + ) + else: + combined_parameters = parent_parameters + missing_parameters + parameters_str = ', '.join([str(x) for x in combined_parameters]) + generic_type_label = f'typing.Generic[{parameters_str}]' + error_message = ( + f'All parameters must be present on typing.Generic;' + f' you should inherit from {generic_type_label}.' + ) + if Generic not in bases: # pragma: no cover + # We raise an error here not because it is desirable, but because some cases are mishandled. + # It would be nice to remove this error and still have things behave as expected, it's just + # challenging because we are using a custom `__class_getitem__` to parametrize generic models, + # and not returning a typing._GenericAlias from it. + bases_str = ', '.join([x.__name__ for x in bases] + [generic_type_label]) + error_message += ( + f' Note: `typing.Generic` must go last: `class {cls.__name__}({bases_str}): ...`)' + ) + raise TypeError(error_message) + + cls.__pydantic_generic_metadata__ = { + 'origin': None, + 'args': (), + 'parameters': parameters, + } + + cls.__pydantic_complete__ = False # Ensure this specific class gets completed + + # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 + # for attributes not in `new_namespace` (e.g. private attributes) + for name, obj in private_attributes.items(): + obj.__set_name__(cls, name) + + if __pydantic_reset_parent_namespace__: + cls.__pydantic_parent_namespace__ = build_lenient_weakvaluedict(parent_frame_namespace()) + parent_namespace: dict[str, Any] | None = getattr(cls, '__pydantic_parent_namespace__', None) + if isinstance(parent_namespace, dict): + parent_namespace = unpack_lenient_weakvaluedict(parent_namespace) + + ns_resolver = NsResolver(parent_namespace=parent_namespace) + + set_model_fields(cls, config_wrapper=config_wrapper, ns_resolver=ns_resolver) + + # This is also set in `complete_model_class()`, after schema gen because they are recreated. + # We set them here as well for backwards compatibility: + cls.__pydantic_computed_fields__ = { + k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items() + } + + if config_wrapper.defer_build: + set_model_mocks(cls) + else: + # Any operation that requires accessing the field infos instances should be put inside + # `complete_model_class()`: + complete_model_class( + cls, + config_wrapper, + ns_resolver, + raise_errors=False, + create_model_module=_create_model_module, + ) + + if config_wrapper.frozen and '__hash__' not in namespace: + set_default_hash_func(cls, bases) + + # using super(cls, cls) on the next line ensures we only call the parent class's __pydantic_init_subclass__ + # I believe the `type: ignore` is only necessary because mypy doesn't realize that this code branch is + # only hit for _proper_ subclasses of BaseModel + super(cls, cls).__pydantic_init_subclass__(**kwargs) # type: ignore[misc] + return cls + else: + # These are instance variables, but have been assigned to `NoInitField` to trick the type checker. + for instance_slot in '__pydantic_fields_set__', '__pydantic_extra__', '__pydantic_private__': + namespace.pop( + instance_slot, + None, # In case the metaclass is used with a class other than `BaseModel`. + ) + namespace.get('__annotations__', {}).clear() + return super().__new__(mcs, cls_name, bases, namespace, **kwargs) + + if not TYPE_CHECKING: # pragma: no branch + # We put `__getattr__` in a non-TYPE_CHECKING block because otherwise, mypy allows arbitrary attribute access + + def __getattr__(self, item: str) -> Any: + """This is necessary to keep attribute access working for class attribute access.""" + private_attributes = self.__dict__.get('__private_attributes__') + if private_attributes and item in private_attributes: + return private_attributes[item] + raise AttributeError(item) + + @classmethod + def __prepare__(cls, *args: Any, **kwargs: Any) -> dict[str, object]: + return _ModelNamespaceDict() + + # Due to performance and memory issues, in the ABCMeta.__subclasscheck__ implementation, we don't support + # registered virtual subclasses. See https://github.com/python/cpython/issues/92810#issuecomment-2762454345. + # This may change once CPython is fixed (possibly in 3.15), in which case we should conditionally + # define `register()`. + def register(self, subclass: type[_T]) -> type[_T]: + warnings.warn( + f"For performance reasons, virtual subclasses registered using '{self.__qualname__}.register()' " + "are not supported in 'isinstance()' and 'issubclass()' checks.", + stacklevel=2, + ) + return super().register(subclass) + + __instancecheck__ = type.__instancecheck__ # pyright: ignore[reportAssignmentType] + __subclasscheck__ = type.__subclasscheck__ # pyright: ignore[reportAssignmentType] + + @staticmethod + def _collect_bases_data(bases: tuple[type[Any], ...]) -> tuple[set[str], set[str], dict[str, ModelPrivateAttr]]: + BaseModel = import_cached_base_model() + + field_names: set[str] = set() + class_vars: set[str] = set() + private_attributes: dict[str, ModelPrivateAttr] = {} + for base in bases: + if issubclass(base, BaseModel) and base is not BaseModel: + # model_fields might not be defined yet in the case of generics, so we use getattr here: + field_names.update(getattr(base, '__pydantic_fields__', {}).keys()) + class_vars.update(base.__class_vars__) + private_attributes.update(base.__private_attributes__) + return field_names, class_vars, private_attributes + + @property + @deprecated( + 'The `__fields__` attribute is deprecated, use the `model_fields` class property instead.', category=None + ) + def __fields__(self) -> dict[str, FieldInfo]: + warnings.warn( + 'The `__fields__` attribute is deprecated, use the `model_fields` class property instead.', + PydanticDeprecatedSince20, + stacklevel=2, + ) + return getattr(self, '__pydantic_fields__', {}) + + @property + def __pydantic_fields_complete__(self) -> bool: + """Whether the fields were successfully collected (i.e. type hints were successfully resolved). + + This is a private attribute, not meant to be used outside Pydantic. + """ + if '__pydantic_fields__' not in self.__dict__: + return False + + field_infos = cast('dict[str, FieldInfo]', self.__pydantic_fields__) # pyright: ignore[reportAttributeAccessIssue] + + pydantic_extra_info = cast('PydanticExtraInfo | None', self.__pydantic_extra_info__) # pyright: ignore[reportAttributeAccessIssue] + if pydantic_extra_info is not None: + extra_complete = pydantic_extra_info.complete + else: + extra_complete = True + + return all(field_info._complete for field_info in field_infos.values()) and extra_complete + + def __dir__(self) -> list[str]: + attributes = list(super().__dir__()) + if '__fields__' in attributes: + attributes.remove('__fields__') + return attributes + + +def init_private_attributes(self: BaseModel, context: Any, /) -> None: + """This function is meant to behave like a BaseModel method to initialize private attributes. + + It takes context as an argument since that's what pydantic-core passes when calling it. + + Args: + self: The BaseModel instance. + context: The context. + """ + if getattr(self, '__pydantic_private__', None) is None: + pydantic_private = {} + for name, private_attr in self.__private_attributes__.items(): + # Avoid needlessly creating a new dict for the validated data: + if private_attr.default_factory_takes_validated_data: + default = private_attr.get_default( + call_default_factory=True, validated_data={**self.__dict__, **pydantic_private} + ) + else: + default = private_attr.get_default(call_default_factory=True) + if default is not PydanticUndefined: + pydantic_private[name] = default + object_setattr(self, '__pydantic_private__', pydantic_private) + + +def get_model_post_init(namespace: dict[str, Any], bases: tuple[type[Any], ...]) -> Callable[..., Any] | None: + """Get the `model_post_init` method from the namespace or the class bases, or `None` if not defined.""" + if 'model_post_init' in namespace: + return namespace['model_post_init'] + + BaseModel = import_cached_base_model() + + model_post_init = get_attribute_from_bases(bases, 'model_post_init') + if model_post_init is not BaseModel.model_post_init: + return model_post_init + + +def inspect_namespace( # noqa C901 + namespace: dict[str, Any], + raw_annotations: dict[str, Any], + ignored_types: tuple[type[Any], ...], + base_class_vars: set[str], + base_class_fields: set[str], +) -> dict[str, ModelPrivateAttr]: + """Iterate over the namespace and: + * gather private attributes + * check for items which look like fields but are not (e.g. have no annotation) and warn. + + Args: + namespace: The attribute dictionary of the class to be created. + raw_annotations: The (non-evaluated) annotations of the model. + ignored_types: A tuple of ignore types. + base_class_vars: A set of base class class variables. + base_class_fields: A set of base class fields. + + Returns: + A dict containing private attributes info. + + Raises: + TypeError: If there is a `__root__` field in model. + NameError: If private attribute name is invalid. + PydanticUserError: + - If a field does not have a type annotation. + - If a field on base class was overridden by a non-annotated attribute. + """ + from ..fields import ModelPrivateAttr, PrivateAttr + + FieldInfo = import_cached_field_info() + + all_ignored_types = ignored_types + default_ignored_types() + + private_attributes: dict[str, ModelPrivateAttr] = {} + + if '__root__' in raw_annotations or '__root__' in namespace: + raise TypeError("To define root models, use `pydantic.RootModel` rather than a field called '__root__'") + + ignored_names: set[str] = set() + for var_name, value in list(namespace.items()): + if var_name == 'model_config' or var_name == '__pydantic_extra__': + continue + elif ( + isinstance(value, type) + and value.__module__ == namespace['__module__'] + and '__qualname__' in namespace + and value.__qualname__.startswith(f'{namespace["__qualname__"]}.') + ): + # `value` is a nested type defined in this namespace; don't error + continue + elif isinstance(value, all_ignored_types) or value.__class__.__module__ == 'functools': + ignored_names.add(var_name) + continue + elif isinstance(value, ModelPrivateAttr): + if var_name.startswith('__'): + raise NameError( + 'Private attributes must not use dunder names;' + f' use a single underscore prefix instead of {var_name!r}.' + ) + elif is_valid_field_name(var_name): + raise NameError( + 'Private attributes must not use valid field names;' + f' use sunder names, e.g. {"_" + var_name!r} instead of {var_name!r}.' + ) + private_attributes[var_name] = value + del namespace[var_name] + elif isinstance(value, FieldInfo) and not is_valid_field_name(var_name): + suggested_name = var_name.lstrip('_') or 'my_field' # don't suggest '' for all-underscore name + raise NameError( + f'Fields must not use names with leading underscores;' + f' e.g., use {suggested_name!r} instead of {var_name!r}.' + ) + + elif var_name.startswith('__'): + continue + elif is_valid_privateattr_name(var_name): + if var_name not in raw_annotations or not is_classvar_annotation(raw_annotations[var_name]): + private_attributes[var_name] = cast(ModelPrivateAttr, PrivateAttr(default=value)) + del namespace[var_name] + elif var_name in base_class_vars: + continue + elif var_name not in raw_annotations: + if var_name in base_class_fields: + raise PydanticUserError( + f'Field {var_name!r} defined on a base class was overridden by a non-annotated attribute. ' + f'All field definitions, including overrides, require a type annotation.', + code='model-field-overridden', + ) + elif isinstance(value, FieldInfo): + raise PydanticUserError( + f'Field {var_name!r} requires a type annotation', code='model-field-missing-annotation' + ) + else: + raise PydanticUserError( + f'A non-annotated attribute was detected: `{var_name} = {value!r}`. All model fields require a ' + f'type annotation; if `{var_name}` is not meant to be a field, you may be able to resolve this ' + f"error by annotating it as a `ClassVar` or updating `model_config['ignored_types']`.", + code='model-field-missing-annotation', + ) + + for ann_name, ann_type in raw_annotations.items(): + if ( + is_valid_privateattr_name(ann_name) + and ann_name not in private_attributes + and ann_name not in ignored_names + # This condition can be a false negative when `ann_type` is stringified, + # but it is handled in most cases in `set_model_fields`: + and not is_classvar_annotation(ann_type) + and ann_type not in all_ignored_types + and getattr(ann_type, '__module__', None) != 'functools' + ): + if isinstance(ann_type, str): + # Walking up the frames to get the module namespace where the model is defined + # (as the model class wasn't created yet, we unfortunately can't use `cls.__module__`): + frame = sys._getframe(2) + if frame is not None: + try: + ann_type = eval_type_backport( + _make_forward_ref(ann_type, is_argument=False, is_class=True), + globalns=frame.f_globals, + localns=frame.f_locals, + ) + except (NameError, TypeError): + pass + + if typing_objects.is_annotated(get_origin(ann_type)): + _, *metadata = get_args(ann_type) + private_attr = next((v for v in metadata if isinstance(v, ModelPrivateAttr)), None) + if private_attr is not None: + private_attributes[ann_name] = private_attr + continue + private_attributes[ann_name] = PrivateAttr() + + return private_attributes + + +def set_default_hash_func(cls: type[BaseModel], bases: tuple[type[Any], ...]) -> None: + base_hash_func = get_attribute_from_bases(bases, '__hash__') + new_hash_func = make_hash_func(cls) + if base_hash_func in {None, object.__hash__} or getattr(base_hash_func, '__code__', None) == new_hash_func.__code__: + # If `__hash__` is some default, we generate a hash function. + # It will be `None` if not overridden from BaseModel. + # It may be `object.__hash__` if there is another + # parent class earlier in the bases which doesn't override `__hash__` (e.g. `typing.Generic`). + # It may be a value set by `set_default_hash_func` if `cls` is a subclass of another frozen model. + # In the last case we still need a new hash function to account for new `model_fields`. + cls.__hash__ = new_hash_func + + +def make_hash_func(cls: type[BaseModel]) -> Any: + getter = operator.itemgetter(*cls.__pydantic_fields__.keys()) if cls.__pydantic_fields__ else lambda _: 0 + + def hash_func(self: Any) -> int: + try: + return hash(getter(self.__dict__)) + except KeyError: + # In rare cases (such as when using the deprecated copy method), the __dict__ may not contain + # all model fields, which is how we can get here. + # getter(self.__dict__) is much faster than any 'safe' method that accounts for missing keys, + # and wrapping it in a `try` doesn't slow things down much in the common case. + return hash(getter(SafeGetItemProxy(self.__dict__))) + + return hash_func + + +def set_model_fields( + cls: type[BaseModel], + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver, +) -> None: + """Collect and set `cls.__pydantic_fields__` and `cls.__class_vars__`. + + Args: + cls: BaseModel or dataclass. + config_wrapper: The config wrapper instance. + ns_resolver: Namespace resolver to use when getting model annotations. + """ + typevars_map = get_model_typevars_map(cls) + fields, pydantic_extra_info, class_vars = collect_model_fields( + cls, config_wrapper, ns_resolver, typevars_map=typevars_map + ) + + cls.__pydantic_fields__ = fields + cls.__pydantic_extra_info__ = pydantic_extra_info + cls.__class_vars__.update(class_vars) + + for k in class_vars: + # Class vars should not be private attributes + # We remove them _here_ and not earlier because we rely on inspecting the class to determine its classvars, + # but private attributes are determined by inspecting the namespace _prior_ to class creation. + # In the case that a classvar with a leading-'_' is defined via a ForwardRef (e.g., when using + # `__future__.annotations`), we want to remove the private attribute which was detected _before_ we knew it + # evaluated to a classvar + + value = cls.__private_attributes__.pop(k, None) + if value is not None and value.default is not PydanticUndefined: + setattr(cls, k, value.default) + + +def complete_model_class( + cls: type[BaseModel], + config_wrapper: ConfigWrapper, + ns_resolver: NsResolver, + *, + raise_errors: bool = True, + call_on_complete_hook: bool = True, + create_model_module: str | None = None, + is_force_rebuild: bool = False, +) -> bool: + """Finish building a model class. + + This logic must be called after class has been created since validation functions must be bound + and `get_type_hints` requires a class object. + + Args: + cls: BaseModel or dataclass. + config_wrapper: The config wrapper instance. + ns_resolver: The namespace resolver instance to use during schema building. + raise_errors: Whether to raise errors. + call_on_complete_hook: Whether to call the `__pydantic_on_complete__` hook. + create_model_module: The module of the class to be created, if created by `create_model`. + is_force_rebuild: Whether the model is being force-rebuilt (if True, pre-built serializers and + validators are not used, to avoid stale references). + + Returns: + `True` if the model is successfully completed, else `False`. + + Raises: + PydanticUndefinedAnnotation: If PydanticUndefinedAnnotation occurs in __get_pydantic_core_schema__ + and `raise_errors=True`. + """ + typevars_map = get_model_typevars_map(cls) + + if not cls.__pydantic_fields_complete__: + # Note: when coming from `ModelMetaclass.__new__()`, this results in fields being built twice. + # We do so a second time here so that we can get the ``NameError`` for the specific undefined annotation. + # Alternatively, we could let `GenerateSchema()` raise the error, but there are cases where incomplete + # fields are inherited in `collect_model_fields()` and can actually have their annotation resolved in the + # generate schema process. As we want to avoid having `__pydantic_fields_complete__` set to `False` + # when `__pydantic_complete__` is `True`, we rebuild here: + try: + cls.__pydantic_fields__, cls.__pydantic_extra_info__ = rebuild_model_fields( + cls, + config_wrapper=config_wrapper, + ns_resolver=ns_resolver, + typevars_map=typevars_map, + ) + except NameError as e: + exc = PydanticUndefinedAnnotation.from_name_error(e) + set_model_mocks(cls, f'`{exc.name}`') + if raise_errors: + raise exc from e + + if not raise_errors and not cls.__pydantic_fields_complete__: + # No need to continue with schema gen, it is guaranteed to fail + return False + + assert cls.__pydantic_fields_complete__ + + gen_schema = GenerateSchema( + config_wrapper, + ns_resolver, + typevars_map, + ) + + try: + schema = gen_schema.generate_schema(cls) + except PydanticUndefinedAnnotation as e: + if raise_errors: + raise + set_model_mocks(cls, f'`{e.name}`') + return False + + core_config = config_wrapper.core_config(title=cls.__name__) + + try: + schema = gen_schema.clean_schema(schema) + except InvalidSchemaError: + set_model_mocks(cls) + return False + + # This needs to happen *after* model schema generation, as the return types + # of the properties are evaluated and the `ComputedFieldInfo` are recreated: + cls.__pydantic_computed_fields__ = {k: v.info for k, v in cls.__pydantic_decorators__.computed_fields.items()} + + set_deprecated_descriptors(cls) + + cls.__pydantic_core_schema__ = schema + + cls.__pydantic_validator__ = create_schema_validator( + schema, + cls, + create_model_module or cls.__module__, + cls.__qualname__, + 'create_model' if create_model_module else 'BaseModel', + core_config, + config_wrapper.plugin_settings, + _use_prebuilt=not is_force_rebuild, + ) + cls.__pydantic_serializer__ = SchemaSerializer(schema, core_config, _use_prebuilt=not is_force_rebuild) + + # set __signature__ attr only for model class, but not for its instances + # (because instances can define `__call__`, and `inspect.signature` shouldn't + # use the `__signature__` attribute and instead generate from `__call__`). + cls.__signature__ = LazyClassAttribute( + '__signature__', + partial( + generate_pydantic_signature, + init=cls.__init__, + fields=cls.__pydantic_fields__, + validate_by_name=config_wrapper.validate_by_name, + extra=config_wrapper.extra, + ), + ) + + cls.__pydantic_complete__ = True + + if call_on_complete_hook: + cls.__pydantic_on_complete__() + + return True + + +def set_deprecated_descriptors(cls: type[BaseModel]) -> None: + """Set data descriptors on the class for deprecated fields.""" + for field, field_info in cls.__pydantic_fields__.items(): + if (msg := field_info.deprecation_message) is not None: + desc = _DeprecatedFieldDescriptor(msg) + desc.__set_name__(cls, field) + setattr(cls, field, desc) + + for field, computed_field_info in cls.__pydantic_computed_fields__.items(): + if ( + (msg := computed_field_info.deprecation_message) is not None + # Avoid having two warnings emitted: + and not hasattr(unwrap_wrapped_function(computed_field_info.wrapped_property), '__deprecated__') + ): + desc = _DeprecatedFieldDescriptor(msg, computed_field_info.wrapped_property) + desc.__set_name__(cls, field) + setattr(cls, field, desc) + + +class _DeprecatedFieldDescriptor: + """Read-only data descriptor used to emit a runtime deprecation warning before accessing a deprecated field. + + Attributes: + msg: The deprecation message to be emitted. + wrapped_property: The property instance if the deprecated field is a computed field, or `None`. + field_name: The name of the field being deprecated. + """ + + field_name: str + + def __init__(self, msg: str, wrapped_property: property | None = None) -> None: + self.msg = msg + self.wrapped_property = wrapped_property + + def __set_name__(self, cls: type[BaseModel], name: str) -> None: + self.field_name = name + + def __get__(self, obj: BaseModel | None, obj_type: type[BaseModel] | None = None) -> Any: + if obj is None: + if self.wrapped_property is not None: + return self.wrapped_property.__get__(None, obj_type) + raise AttributeError(self.field_name) + + warnings.warn(self.msg, DeprecationWarning, stacklevel=2) + + if self.wrapped_property is not None: + return self.wrapped_property.__get__(obj, obj_type) + return obj.__dict__[self.field_name] + + # Defined to make it a data descriptor and take precedence over the instance's dictionary. + # Note that it will not be called when setting a value on a model instance + # as `BaseModel.__setattr__` is defined and takes priority. + def __set__(self, obj: Any, value: Any) -> NoReturn: + raise AttributeError(self.field_name) + + +class _PydanticWeakRef: + """Wrapper for `weakref.ref` that enables `pickle` serialization. + + Cloudpickle fails to serialize weakref.ref objects due to an arcane error related to + to abstract base classes (`abc.ABC`). This class works around the issue by wrapping + `weakref.ref` instead of subclassing it. + + See https://github.com/pydantic/pydantic/issues/6763 for context. + + Semantics: + - If not pickled, behaves the same as a `weakref.ref`. + - If pickled along with the referenced object, the same `weakref.ref` behavior + will be maintained between them after unpickling. + - If pickled without the referenced object, after unpickling the underlying + reference will be cleared (`__call__` will always return `None`). + """ + + def __init__(self, obj: Any): + if obj is None: + # The object will be `None` upon deserialization if the serialized weakref + # had lost its underlying object. + self._wr = None + else: + self._wr = weakref.ref(obj) + + def __call__(self) -> Any: + if self._wr is None: + return None + else: + return self._wr() + + def __reduce__(self) -> tuple[Callable, tuple[weakref.ReferenceType | None]]: + return _PydanticWeakRef, (self(),) + + +def build_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None: + """Takes an input dictionary, and produces a new value that (invertibly) replaces the values with weakrefs. + + We can't just use a WeakValueDictionary because many types (including int, str, etc.) can't be stored as values + in a WeakValueDictionary. + + The `unpack_lenient_weakvaluedict` function can be used to reverse this operation. + """ + if d is None: + return None + result = {} + for k, v in d.items(): + try: + proxy = _PydanticWeakRef(v) + except TypeError: + proxy = v + result[k] = proxy + return result + + +def unpack_lenient_weakvaluedict(d: dict[str, Any] | None) -> dict[str, Any] | None: + """Inverts the transform performed by `build_lenient_weakvaluedict`.""" + if d is None: + return None + + result = {} + for k, v in d.items(): + if isinstance(v, _PydanticWeakRef): + v = v() + if v is not None: + result[k] = v + else: + result[k] = v + return result + + +@cache +def default_ignored_types() -> tuple[type[Any], ...]: + from ..fields import ComputedFieldInfo + + ignored_types = [ + FunctionType, + property, + classmethod, + staticmethod, + PydanticDescriptorProxy, + ComputedFieldInfo, + TypeAliasType, # from `typing_extensions` + ] + + if sys.version_info >= (3, 12): + ignored_types.append(typing.TypeAliasType) + + return tuple(ignored_types) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..af0cddb03d6700b5f292c7231b32d9a92a8472eb --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_namespace_utils.py @@ -0,0 +1,293 @@ +from __future__ import annotations + +import sys +from collections.abc import Generator, Iterator, Mapping +from contextlib import contextmanager +from functools import cached_property +from typing import Any, Callable, NamedTuple, TypeVar + +from typing_extensions import ParamSpec, TypeAlias, TypeAliasType, TypeVarTuple + +GlobalsNamespace: TypeAlias = 'dict[str, Any]' +"""A global namespace. + +In most cases, this is a reference to the `__dict__` attribute of a module. +This namespace type is expected as the `globals` argument during annotations evaluation. +""" + +MappingNamespace: TypeAlias = Mapping[str, Any] +"""Any kind of namespace. + +In most cases, this is a local namespace (e.g. the `__dict__` attribute of a class, +the [`f_locals`][frame.f_locals] attribute of a frame object, when dealing with types +defined inside functions). +This namespace type is expected as the `locals` argument during annotations evaluation. +""" + +_TypeVarLike: TypeAlias = 'TypeVar | ParamSpec | TypeVarTuple' + + +class NamespacesTuple(NamedTuple): + """A tuple of globals and locals to be used during annotations evaluation. + + This datastructure is defined as a named tuple so that it can easily be unpacked: + + ```python {lint="skip" test="skip"} + def eval_type(typ: type[Any], ns: NamespacesTuple) -> None: + return eval(typ, *ns) + ``` + """ + + globals: GlobalsNamespace + """The namespace to be used as the `globals` argument during annotations evaluation.""" + + locals: MappingNamespace + """The namespace to be used as the `locals` argument during annotations evaluation.""" + + +def get_module_ns_of(obj: Any) -> dict[str, Any]: + """Get the namespace of the module where the object is defined. + + Caution: this function does not return a copy of the module namespace, so the result + should not be mutated. The burden of enforcing this is on the caller. + """ + module_name = getattr(obj, '__module__', None) + if module_name: + try: + return sys.modules[module_name].__dict__ + except KeyError: + # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 + return {} + return {} + + +# Note that this class is almost identical to `collections.ChainMap`, but need to enforce +# immutable mappings here: +class LazyLocalNamespace(Mapping[str, Any]): + """A lazily evaluated mapping, to be used as the `locals` argument during annotations evaluation. + + While the [`eval`][eval] function expects a mapping as the `locals` argument, it only + performs `__getitem__` calls. The [`Mapping`][collections.abc.Mapping] abstract base class + is fully implemented only for type checking purposes. + + Args: + *namespaces: The namespaces to consider, in ascending order of priority. + + Example: + ```python {lint="skip" test="skip"} + ns = LazyLocalNamespace({'a': 1, 'b': 2}, {'a': 3}) + ns['a'] + #> 3 + ns['b'] + #> 2 + ``` + """ + + def __init__(self, *namespaces: MappingNamespace) -> None: + self._namespaces = namespaces + + @cached_property + def data(self) -> dict[str, Any]: + return {k: v for ns in self._namespaces for k, v in ns.items()} + + def __len__(self) -> int: + return len(self.data) + + def __getitem__(self, key: str) -> Any: + return self.data[key] + + def __contains__(self, key: object) -> bool: + return key in self.data + + def __iter__(self) -> Iterator[str]: + return iter(self.data) + + +def ns_for_function(obj: Callable[..., Any], parent_namespace: MappingNamespace | None = None) -> NamespacesTuple: + """Return the global and local namespaces to be used when evaluating annotations for the provided function. + + The global namespace will be the `__dict__` attribute of the module the function was defined in. + The local namespace will contain the `__type_params__` introduced by PEP 695. + + Args: + obj: The object to use when building namespaces. + parent_namespace: Optional namespace to be added with the lowest priority in the local namespace. + If the passed function is a method, the `parent_namespace` will be the namespace of the class + the method is defined in. Thus, we also fetch type `__type_params__` from there (i.e. the + class-scoped type variables). + """ + locals_list: list[MappingNamespace] = [] + if parent_namespace is not None: + locals_list.append(parent_namespace) + + # Get the `__type_params__` attribute introduced by PEP 695. + # Note that the `typing._eval_type` function expects type params to be + # passed as a separate argument. However, internally, `_eval_type` calls + # `ForwardRef._evaluate` which will merge type params with the localns, + # essentially mimicking what we do here. + type_params: tuple[_TypeVarLike, ...] = getattr(obj, '__type_params__', ()) + if parent_namespace is not None: + # We also fetch type params from the parent namespace. If present, it probably + # means the function was defined in a class. This is to support the following: + # https://github.com/python/cpython/issues/124089. + type_params += parent_namespace.get('__type_params__', ()) + + locals_list.append({t.__name__: t for t in type_params}) + + # What about short-circuiting to `obj.__globals__`? + globalns = get_module_ns_of(obj) + + return NamespacesTuple(globalns, LazyLocalNamespace(*locals_list)) + + +class NsResolver: + """A class responsible for the namespaces resolving logic for annotations evaluation. + + This class handles the namespace logic when evaluating annotations mainly for class objects. + + It holds a stack of classes that are being inspected during the core schema building, + and the `types_namespace` property exposes the globals and locals to be used for + type annotation evaluation. Additionally -- if no class is present in the stack -- a + fallback globals and locals can be provided using the `namespaces_tuple` argument + (this is useful when generating a schema for a simple annotation, e.g. when using + `TypeAdapter`). + + The namespace creation logic is unfortunately flawed in some cases, for backwards + compatibility reasons and to better support valid edge cases. See the description + for the `parent_namespace` argument and the example for more details. + + Args: + namespaces_tuple: The default globals and locals to use if no class is present + on the stack. This can be useful when using the `GenerateSchema` class + with `TypeAdapter`, where the "type" being analyzed is a simple annotation. + parent_namespace: An optional parent namespace that will be added to the locals + with the lowest priority. For a given class defined in a function, the locals + of this function are usually used as the parent namespace: + + ```python {lint="skip" test="skip"} + from pydantic import BaseModel + + def func() -> None: + SomeType = int + + class Model(BaseModel): + f: 'SomeType' + + # when collecting fields, an namespace resolver instance will be created + # this way: + # ns_resolver = NsResolver(parent_namespace={'SomeType': SomeType}) + ``` + + For backwards compatibility reasons and to support valid edge cases, this parent + namespace will be used for *every* type being pushed to the stack. In the future, + we might want to be smarter by only doing so when the type being pushed is defined + in the same module as the parent namespace. + + Example: + ```python {lint="skip" test="skip"} + ns_resolver = NsResolver( + parent_namespace={'fallback': 1}, + ) + + class Sub: + m: 'Model' + + class Model: + some_local = 1 + sub: Sub + + ns_resolver = NsResolver() + + # This is roughly what happens when we build a core schema for `Model`: + with ns_resolver.push(Model): + ns_resolver.types_namespace + #> NamespacesTuple({'Sub': Sub}, {'Model': Model, 'some_local': 1}) + # First thing to notice here, the model being pushed is added to the locals. + # Because `NsResolver` is being used during the model definition, it is not + # yet added to the globals. This is useful when resolving self-referencing annotations. + + with ns_resolver.push(Sub): + ns_resolver.types_namespace + #> NamespacesTuple({'Sub': Sub}, {'Sub': Sub, 'Model': Model}) + # Second thing to notice: `Sub` is present in both the globals and locals. + # This is not an issue, just that as described above, the model being pushed + # is added to the locals, but it happens to be present in the globals as well + # because it is already defined. + # Third thing to notice: `Model` is also added in locals. This is a backwards + # compatibility workaround that allows for `Sub` to be able to resolve `'Model'` + # correctly (as otherwise models would have to be rebuilt even though this + # doesn't look necessary). + ``` + """ + + def __init__( + self, + namespaces_tuple: NamespacesTuple | None = None, + parent_namespace: MappingNamespace | None = None, + ) -> None: + self._base_ns_tuple = namespaces_tuple or NamespacesTuple({}, {}) + self._parent_ns = parent_namespace + self._types_stack: list[type[Any] | TypeAliasType] = [] + + @cached_property + def types_namespace(self) -> NamespacesTuple: + """The current global and local namespaces to be used for annotations evaluation.""" + if not self._types_stack: + # TODO: should we merge the parent namespace here? + # This is relevant for TypeAdapter, where there are no types on the stack, and we might + # need access to the parent_ns. Right now, we sidestep this in `type_adapter.py` by passing + # locals to both parent_ns and the base_ns_tuple, but this is a bit hacky. + # we might consider something like: + # if self._parent_ns is not None: + # # Hacky workarounds, see class docstring: + # # An optional parent namespace that will be added to the locals with the lowest priority + # locals_list: list[MappingNamespace] = [self._parent_ns, self._base_ns_tuple.locals] + # return NamespacesTuple(self._base_ns_tuple.globals, LazyLocalNamespace(*locals_list)) + return self._base_ns_tuple + + typ = self._types_stack[-1] + + globalns = get_module_ns_of(typ) + + locals_list: list[MappingNamespace] = [] + # Hacky workarounds, see class docstring: + # An optional parent namespace that will be added to the locals with the lowest priority + if self._parent_ns is not None: + locals_list.append(self._parent_ns) + if len(self._types_stack) > 1: + first_type = self._types_stack[0] + locals_list.append({first_type.__name__: first_type}) + + # Adding `__type_params__` *before* `vars(typ)`, as the latter takes priority + # (see https://github.com/python/cpython/pull/120272). + # TODO `typ.__type_params__` when we drop support for Python 3.11: + type_params: tuple[_TypeVarLike, ...] = getattr(typ, '__type_params__', ()) + if type_params: + # Adding `__type_params__` is mostly useful for generic classes defined using + # PEP 695 syntax *and* using forward annotations (see the example in + # https://github.com/python/cpython/issues/114053). For TypeAliasType instances, + # it is way less common, but still required if using a string annotation in the alias + # value, e.g. `type A[T] = 'T'` (which is not necessary in most cases). + locals_list.append({t.__name__: t for t in type_params}) + + # TypeAliasType instances don't have a `__dict__` attribute, so the check + # is necessary: + if hasattr(typ, '__dict__'): + locals_list.append(vars(typ)) + + # The `len(self._types_stack) > 1` check above prevents this from being added twice: + locals_list.append({typ.__name__: typ}) + + return NamespacesTuple(globalns, LazyLocalNamespace(*locals_list)) + + @contextmanager + def push(self, typ: type[Any] | TypeAliasType, /) -> Generator[None]: + """Push a type to the stack.""" + self._types_stack.append(typ) + # Reset the cached property: + self.__dict__.pop('types_namespace', None) + try: + yield + finally: + self._types_stack.pop() + self.__dict__.pop('types_namespace', None) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py new file mode 100644 index 0000000000000000000000000000000000000000..7e80a9c83db0f142fd5ae577a4388aa8ad4d0444 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_repr.py @@ -0,0 +1,124 @@ +"""Tools to provide pretty/human-readable display of objects.""" + +from __future__ import annotations as _annotations + +import types +from collections.abc import Callable, Collection, Generator, Iterable +from typing import TYPE_CHECKING, Any, ForwardRef, cast + +import typing_extensions +from typing_extensions import TypeAlias +from typing_inspection import typing_objects +from typing_inspection.introspection import is_union_origin + +from . import _typing_extra + +if TYPE_CHECKING: + # TODO remove type error comments when we drop support for Python 3.9 + ReprArgs: TypeAlias = Iterable[tuple[str | None, Any]] # pyright: ignore[reportGeneralTypeIssues] + RichReprResult: TypeAlias = Iterable[Any | tuple[Any] | tuple[str, Any] | tuple[str, Any, Any]] # pyright: ignore[reportGeneralTypeIssues] + + +class PlainRepr(str): + """String class where repr doesn't include quotes. Useful with Representation when you want to return a string + representation of something that is valid (or pseudo-valid) python. + """ + + def __repr__(self) -> str: + return str(self) + + +class Representation: + # Mixin to provide `__str__`, `__repr__`, and `__pretty__` and `__rich_repr__` methods. + # `__pretty__` is used by [devtools](https://python-devtools.helpmanual.io/). + # `__rich_repr__` is used by [rich](https://rich.readthedocs.io/en/stable/pretty.html). + # (this is not a docstring to avoid adding a docstring to classes which inherit from Representation) + + __slots__ = () + + def __repr_args__(self) -> ReprArgs: + """Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs_names = cast(Collection[str], self.__slots__) + if not attrs_names and hasattr(self, '__dict__'): + attrs_names = self.__dict__.keys() + attrs = ((s, getattr(self, s)) for s in attrs_names) + return [(a, v if v is not self else self.__repr_recursion__(v)) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """Name of the instance's class, used in __repr__.""" + return self.__class__.__name__ + + def __repr_recursion__(self, object: Any) -> str: + """Returns the string representation of a recursive object.""" + # This is copied over from the stdlib `pprint` module: + return f'' + + def __repr_str__(self, join_str: str) -> str: + return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) + + def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any]: + """Used by devtools (https://python-devtools.helpmanual.io/) to pretty print objects.""" + yield self.__repr_name__() + '(' + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + '=' + yield fmt(value) + yield ',' + yield 0 + yield -1 + yield ')' + + def __rich_repr__(self) -> RichReprResult: + """Used by Rich (https://rich.readthedocs.io/en/stable/pretty.html) to pretty print objects.""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + + def __str__(self) -> str: + return self.__repr_str__(' ') + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + +def display_as_type(obj: Any) -> str: + """Pretty representation of a type, should be as close as possible to the original type definition string. + + Takes some logic from `typing._type_repr`. + """ + if isinstance(obj, (types.FunctionType, types.BuiltinFunctionType)): + return obj.__name__ + elif obj is ...: + return '...' + elif isinstance(obj, Representation): + return repr(obj) + elif isinstance(obj, ForwardRef) or typing_objects.is_typealiastype(obj): + return str(obj) + + if not isinstance(obj, (_typing_extra.typing_base, _typing_extra.WithArgsTypes, type)): + obj = obj.__class__ + + if is_union_origin(typing_extensions.get_origin(obj)): + args = ', '.join(map(display_as_type, typing_extensions.get_args(obj))) + return f'Union[{args}]' + elif isinstance(obj, _typing_extra.WithArgsTypes): + if typing_objects.is_literal(typing_extensions.get_origin(obj)): + args = ', '.join(map(repr, typing_extensions.get_args(obj))) + else: + args = ', '.join(map(display_as_type, typing_extensions.get_args(obj))) + try: + return f'{obj.__qualname__}[{args}]' + except AttributeError: + return str(obj).replace('typing.', '').replace('typing_extensions.', '') # handles TypeAliasType in 3.12 + elif isinstance(obj, type): + return obj.__qualname__ + else: + return repr(obj).replace('typing.', '').replace('typing_extensions.', '') diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_schema_gather.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_schema_gather.py new file mode 100644 index 0000000000000000000000000000000000000000..8e07b20e70990287a874c8cf5bd932fb57d30d45 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_schema_gather.py @@ -0,0 +1,212 @@ +# pyright: reportTypedDictNotRequiredAccess=false, reportGeneralTypeIssues=false, reportArgumentType=false, reportAttributeAccessIssue=false +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import TypedDict + +from pydantic_core.core_schema import ( + ComputedField, + CoreSchema, + DefinitionReferenceSchema, + SerSchema, + iter_union_choices, +) +from typing_extensions import TypeAlias + +AllSchemas: TypeAlias = 'CoreSchema | SerSchema | ComputedField' + + +class GatherResult(TypedDict): + """Schema traversing result.""" + + collected_references: dict[str, DefinitionReferenceSchema | None] + """The collected definition references. + + If a definition reference schema can be inlined, it means that there is + only one in the whole core schema. As such, it is stored as the value. + Otherwise, the value is set to `None`. + """ + + deferred_discriminator_schemas: list[CoreSchema] + """The list of core schemas having the discriminator application deferred.""" + + +class MissingDefinitionError(LookupError): + """A reference was pointing to a non-existing core schema.""" + + def __init__(self, schema_reference: str, /) -> None: + self.schema_reference = schema_reference + + +@dataclass +class GatherContext: + """The current context used during core schema traversing. + + Context instances should only be used during schema traversing. + """ + + definitions: dict[str, CoreSchema] + """The available definitions.""" + + deferred_discriminator_schemas: list[CoreSchema] = field(init=False, default_factory=list) + """The list of core schemas having the discriminator application deferred. + + Internally, these core schemas have a specific key set in the core metadata dict. + """ + + collected_references: dict[str, DefinitionReferenceSchema | None] = field(init=False, default_factory=dict) + """The collected definition references. + + If a definition reference schema can be inlined, it means that there is + only one in the whole core schema. As such, it is stored as the value. + Otherwise, the value is set to `None`. + + During schema traversing, definition reference schemas can be added as candidates, or removed + (by setting the value to `None`). + """ + + +def traverse_metadata(schema: AllSchemas, ctx: GatherContext) -> None: + meta = schema.get('metadata') + if meta is not None and 'pydantic_internal_union_discriminator' in meta: + ctx.deferred_discriminator_schemas.append(schema) # pyright: ignore[reportArgumentType] + + +def traverse_definition_ref(def_ref_schema: DefinitionReferenceSchema, ctx: GatherContext) -> None: + schema_ref = def_ref_schema['schema_ref'] + + if schema_ref not in ctx.collected_references: + definition = ctx.definitions.get(schema_ref) + if definition is None: + raise MissingDefinitionError(schema_ref) + + # The `'definition-ref'` schema was only encountered once, make it + # a candidate to be inlined: + ctx.collected_references[schema_ref] = def_ref_schema + traverse_schema(definition, ctx) + if 'serialization' in def_ref_schema: + traverse_schema(def_ref_schema['serialization'], ctx) + traverse_metadata(def_ref_schema, ctx) + else: + # The `'definition-ref'` schema was already encountered, meaning + # the previously encountered schema (and this one) can't be inlined: + ctx.collected_references[schema_ref] = None + + +def traverse_schema(schema: AllSchemas, context: GatherContext) -> None: + # TODO When we drop 3.9, use a match statement to get better type checking and remove + # file-level type ignore. + # (the `'type'` could also be fetched in every `if/elif` statement, but this alters performance). + schema_type = schema['type'] + + if schema_type == 'definition-ref': + traverse_definition_ref(schema, context) + # `traverse_definition_ref` handles the possible serialization and metadata schemas: + return + elif schema_type == 'definitions': + traverse_schema(schema['schema'], context) + for definition in schema['definitions']: + traverse_schema(definition, context) + elif schema_type in {'list', 'set', 'frozenset', 'generator'}: + if 'items_schema' in schema: + traverse_schema(schema['items_schema'], context) + elif schema_type == 'tuple': + if 'items_schema' in schema: + for s in schema['items_schema']: + traverse_schema(s, context) + elif schema_type == 'dict': + if 'keys_schema' in schema: + traverse_schema(schema['keys_schema'], context) + if 'values_schema' in schema: + traverse_schema(schema['values_schema'], context) + elif schema_type == 'union': + for choice in iter_union_choices(schema): + traverse_schema(choice, context) + elif schema_type == 'tagged-union': + for v in schema['choices'].values(): + traverse_schema(v, context) + elif schema_type == 'chain': + for step in schema['steps']: + traverse_schema(step, context) + elif schema_type == 'lax-or-strict': + traverse_schema(schema['lax_schema'], context) + traverse_schema(schema['strict_schema'], context) + elif schema_type == 'json-or-python': + traverse_schema(schema['json_schema'], context) + traverse_schema(schema['python_schema'], context) + elif schema_type in {'model-fields', 'typed-dict'}: + if 'extras_schema' in schema: + traverse_schema(schema['extras_schema'], context) + if 'computed_fields' in schema: + for s in schema['computed_fields']: + traverse_schema(s, context) + for s in schema['fields'].values(): + traverse_schema(s, context) + elif schema_type == 'dataclass-args': + if 'computed_fields' in schema: + for s in schema['computed_fields']: + traverse_schema(s, context) + for s in schema['fields']: + traverse_schema(s, context) + elif schema_type == 'arguments': + for s in schema['arguments_schema']: + traverse_schema(s['schema'], context) + if 'var_args_schema' in schema: + traverse_schema(schema['var_args_schema'], context) + if 'var_kwargs_schema' in schema: + traverse_schema(schema['var_kwargs_schema'], context) + elif schema_type == 'arguments-v3': + for s in schema['arguments_schema']: + traverse_schema(s['schema'], context) + elif schema_type == 'call': + traverse_schema(schema['arguments_schema'], context) + if 'return_schema' in schema: + traverse_schema(schema['return_schema'], context) + elif schema_type == 'computed-field': + traverse_schema(schema['return_schema'], context) + elif schema_type == 'function-before': + if 'schema' in schema: + traverse_schema(schema['schema'], context) + if 'json_schema_input_schema' in schema: + traverse_schema(schema['json_schema_input_schema'], context) + elif schema_type == 'function-plain': + # TODO duplicate schema types for serializers and validators, needs to be deduplicated. + if 'return_schema' in schema: + traverse_schema(schema['return_schema'], context) + if 'json_schema_input_schema' in schema: + traverse_schema(schema['json_schema_input_schema'], context) + elif schema_type == 'function-wrap': + # TODO duplicate schema types for serializers and validators, needs to be deduplicated. + if 'return_schema' in schema: + traverse_schema(schema['return_schema'], context) + if 'schema' in schema: + traverse_schema(schema['schema'], context) + if 'json_schema_input_schema' in schema: + traverse_schema(schema['json_schema_input_schema'], context) + else: + if 'schema' in schema: + traverse_schema(schema['schema'], context) + + if 'serialization' in schema: + traverse_schema(schema['serialization'], context) + traverse_metadata(schema, context) + + +def gather_schemas_for_cleaning(schema: CoreSchema, definitions: dict[str, CoreSchema]) -> GatherResult: + """Traverse the core schema and definitions and return the necessary information for schema cleaning. + + During the core schema traversing, any `'definition-ref'` schema is: + + - Validated: the reference must point to an existing definition. If this is not the case, a + `MissingDefinitionError` exception is raised. + - Stored in the context: the actual reference is stored in the context. Depending on whether + the `'definition-ref'` schema is encountered more that once, the schema itself is also + saved in the context to be inlined (i.e. replaced by the definition it points to). + """ + context = GatherContext(definitions) + traverse_schema(schema, context) + + return { + 'collected_references': context.collected_references, + 'deferred_discriminator_schemas': context.deferred_discriminator_schemas, + } diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py new file mode 100644 index 0000000000000000000000000000000000000000..b231a82e1969822695bb3680217b66e73c81d017 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_schema_generation_shared.py @@ -0,0 +1,125 @@ +"""Types and utility functions used by various other internal tools.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Callable, Literal + +from pydantic_core import core_schema + +from ..annotated_handlers import GetCoreSchemaHandler, GetJsonSchemaHandler + +if TYPE_CHECKING: + from ..json_schema import GenerateJsonSchema, JsonSchemaValue + from ._core_utils import CoreSchemaOrField + from ._generate_schema import GenerateSchema + from ._namespace_utils import NamespacesTuple + + GetJsonSchemaFunction = Callable[[CoreSchemaOrField, GetJsonSchemaHandler], JsonSchemaValue] + HandlerOverride = Callable[[CoreSchemaOrField], JsonSchemaValue] + + +class GenerateJsonSchemaHandler(GetJsonSchemaHandler): + """JsonSchemaHandler implementation that doesn't do ref unwrapping by default. + + This is used for any Annotated metadata so that we don't end up with conflicting + modifications to the definition schema. + + Used internally by Pydantic, please do not rely on this implementation. + See `GetJsonSchemaHandler` for the handler API. + """ + + def __init__(self, generate_json_schema: GenerateJsonSchema, handler_override: HandlerOverride | None) -> None: + self.generate_json_schema = generate_json_schema + self.handler = handler_override or generate_json_schema.generate_inner + self.mode = generate_json_schema.mode + + def __call__(self, core_schema: CoreSchemaOrField, /) -> JsonSchemaValue: + return self.handler(core_schema) + + def resolve_ref_schema(self, maybe_ref_json_schema: JsonSchemaValue) -> JsonSchemaValue: + """Resolves `$ref` in the json schema. + + This returns the input json schema if there is no `$ref` in json schema. + + Args: + maybe_ref_json_schema: The input json schema that may contains `$ref`. + + Returns: + Resolved json schema. + + Raises: + LookupError: If it can't find the definition for `$ref`. + """ + if '$ref' not in maybe_ref_json_schema: + return maybe_ref_json_schema + ref = maybe_ref_json_schema['$ref'] + json_schema = self.generate_json_schema.get_schema_from_definitions(ref) + if json_schema is None: + raise LookupError( + f'Could not find a ref for {ref}.' + ' Maybe you tried to call resolve_ref_schema from within a recursive model?' + ) + return json_schema + + +class CallbackGetCoreSchemaHandler(GetCoreSchemaHandler): + """Wrapper to use an arbitrary function as a `GetCoreSchemaHandler`. + + Used internally by Pydantic, please do not rely on this implementation. + See `GetCoreSchemaHandler` for the handler API. + """ + + def __init__( + self, + handler: Callable[[Any], core_schema.CoreSchema], + generate_schema: GenerateSchema, + ref_mode: Literal['to-def', 'unpack'] = 'to-def', + ) -> None: + self._handler = handler + self._generate_schema = generate_schema + self._ref_mode = ref_mode + + def __call__(self, source_type: Any, /) -> core_schema.CoreSchema: + schema = self._handler(source_type) + if self._ref_mode == 'to-def': + ref = schema.get('ref') + if ref is not None: + return self._generate_schema.defs.create_definition_reference_schema(schema) + return schema + else: # ref_mode = 'unpack' + return self.resolve_ref_schema(schema) + + def _get_types_namespace(self) -> NamespacesTuple: + return self._generate_schema._types_namespace + + def generate_schema(self, source_type: Any, /) -> core_schema.CoreSchema: + return self._generate_schema.generate_schema(source_type) + + @property + def field_name(self) -> str | None: + return self._generate_schema.field_name_stack.get() + + def resolve_ref_schema(self, maybe_ref_schema: core_schema.CoreSchema) -> core_schema.CoreSchema: + """Resolves reference in the core schema. + + Args: + maybe_ref_schema: The input core schema that may contains reference. + + Returns: + Resolved core schema. + + Raises: + LookupError: If it can't find the definition for reference. + """ + if maybe_ref_schema['type'] == 'definition-ref': + ref = maybe_ref_schema['schema_ref'] + definition = self._generate_schema.defs.get_schema_from_ref(ref) + if definition is None: + raise LookupError( + f'Could not find a ref for {ref}.' + ' Maybe you tried to call resolve_ref_schema from within a recursive model?' + ) + return definition + elif maybe_ref_schema['type'] == 'definitions': + return self.resolve_ref_schema(maybe_ref_schema['schema']) + return maybe_ref_schema diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py new file mode 100644 index 0000000000000000000000000000000000000000..a4058e009020e28658e13e30731b2eb29eb0d154 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_serializers.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import collections +import collections.abc +import typing +from typing import Any + +from pydantic_core import PydanticOmit, core_schema + +SEQUENCE_ORIGIN_MAP: dict[Any, Any] = { + typing.Deque: collections.deque, # noqa: UP006 + collections.deque: collections.deque, + list: list, + typing.List: list, # noqa: UP006 + tuple: tuple, + typing.Tuple: tuple, # noqa: UP006 + set: set, + typing.AbstractSet: set, + typing.Set: set, # noqa: UP006 + frozenset: frozenset, + typing.FrozenSet: frozenset, # noqa: UP006 + typing.Sequence: list, + typing.MutableSequence: list, + typing.MutableSet: set, + # this doesn't handle subclasses of these + # parametrized typing.Set creates one of these + collections.abc.MutableSet: set, + collections.abc.Set: frozenset, +} + + +def serialize_sequence_via_list( + v: Any, handler: core_schema.SerializerFunctionWrapHandler, info: core_schema.SerializationInfo +) -> Any: + items: list[Any] = [] + + mapped_origin = SEQUENCE_ORIGIN_MAP.get(type(v), None) + if mapped_origin is None: + # we shouldn't hit this branch, should probably add a serialization error or something + return v + + for index, item in enumerate(v): + try: + v = handler(item, index) + except PydanticOmit: # noqa: PERF203 + pass + else: + items.append(v) + + if info.mode_is_json(): + return items + else: + return mapped_origin(items) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py new file mode 100644 index 0000000000000000000000000000000000000000..3b0c5ae8c40158a28a3b0cf21116c9695208076e --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_signature.py @@ -0,0 +1,189 @@ +from __future__ import annotations + +import dataclasses +from inspect import Parameter, Signature +from typing import TYPE_CHECKING, Any, Callable + +from pydantic_core import PydanticUndefined + +from ._typing_extra import signature_no_eval +from ._utils import is_valid_identifier + +if TYPE_CHECKING: + from ..config import ExtraValues + from ..fields import FieldInfo + + +# Copied over from stdlib dataclasses +class _HAS_DEFAULT_FACTORY_CLASS: + def __repr__(self): + return '' + + +_HAS_DEFAULT_FACTORY = _HAS_DEFAULT_FACTORY_CLASS() + + +def _field_name_for_signature(field_name: str, field_info: FieldInfo) -> str: + """Extract the correct name to use for the field when generating a signature. + + Assuming the field has a valid alias, this will return the alias. Otherwise, it will return the field name. + First priority is given to the alias, then the validation_alias, then the field name. + + Args: + field_name: The name of the field + field_info: The corresponding FieldInfo object. + + Returns: + The correct name to use when generating a signature. + """ + if isinstance(field_info.alias, str) and is_valid_identifier(field_info.alias): + return field_info.alias + if isinstance(field_info.validation_alias, str) and is_valid_identifier(field_info.validation_alias): + return field_info.validation_alias + + return field_name + + +def _process_param_defaults(param: Parameter) -> Parameter: + """Modify the signature for a parameter in a dataclass where the default value is a FieldInfo instance. + + Args: + param (Parameter): The parameter + + Returns: + Parameter: The custom processed parameter + """ + from ..fields import FieldInfo + + param_default = param.default + if isinstance(param_default, FieldInfo): + annotation = param.annotation + # Replace the annotation if appropriate + # inspect does "clever" things to show annotations as strings because we have + # `from __future__ import annotations` in main, we don't want that + if annotation == 'Any': + annotation = Any + + # Replace the field default + default = param_default.default + if default is PydanticUndefined: + if param_default.default_factory is None: + default = Signature.empty + else: + # this is used by dataclasses to indicate a factory exists: + default = dataclasses._HAS_DEFAULT_FACTORY # type: ignore + return param.replace( + annotation=annotation, name=_field_name_for_signature(param.name, param_default), default=default + ) + return param + + +def _generate_signature_parameters( # noqa: C901 (ignore complexity, could use a refactor) + init: Callable[..., None], + fields: dict[str, FieldInfo], + validate_by_name: bool, + extra: ExtraValues | None, +) -> dict[str, Parameter]: + """Generate a mapping of parameter names to Parameter objects for a pydantic BaseModel or dataclass.""" + from itertools import islice + + present_params = signature_no_eval(init).parameters.values() + merged_params: dict[str, Parameter] = {} + var_kw = None + use_var_kw = False + + for param in islice(present_params, 1, None): # skip self arg + # inspect does "clever" things to show annotations as strings because we have + # `from __future__ import annotations` in main, we don't want that + if fields.get(param.name): + # exclude params with init=False + if getattr(fields[param.name], 'init', True) is False: + continue + param = param.replace(name=_field_name_for_signature(param.name, fields[param.name])) + if param.annotation == 'Any': + param = param.replace(annotation=Any) + if param.kind is param.VAR_KEYWORD: + var_kw = param + continue + merged_params[param.name] = param + + if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through + allow_names = validate_by_name + for field_name, field in fields.items(): + # when alias is a str it should be used for signature generation + param_name = _field_name_for_signature(field_name, field) + + if field_name in merged_params or param_name in merged_params: + continue + + if not is_valid_identifier(param_name): + if allow_names: + param_name = field_name + else: + use_var_kw = True + continue + + if field.is_required(): + default = Parameter.empty + elif field.default_factory is not None: + # Mimics stdlib dataclasses: + default = _HAS_DEFAULT_FACTORY + else: + default = field.default + merged_params[param_name] = Parameter( + param_name, + Parameter.KEYWORD_ONLY, + annotation=field.rebuild_annotation(), + default=default, + ) + + if extra == 'allow': + use_var_kw = True + + if var_kw and use_var_kw: + # Make sure the parameter for extra kwargs + # does not have the same name as a field + default_model_signature = [ + ('self', Parameter.POSITIONAL_ONLY), + ('data', Parameter.VAR_KEYWORD), + ] + if [(p.name, p.kind) for p in present_params] == default_model_signature: + # if this is the standard model signature, use extra_data as the extra args name + var_kw_name = 'extra_data' + else: + # else start from var_kw + var_kw_name = var_kw.name + + # generate a name that's definitely unique + while var_kw_name in fields: + var_kw_name += '_' + merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) + + return merged_params + + +def generate_pydantic_signature( + init: Callable[..., None], + fields: dict[str, FieldInfo], + validate_by_name: bool, + extra: ExtraValues | None, + is_dataclass: bool = False, +) -> Signature: + """Generate signature for a pydantic BaseModel or dataclass. + + Args: + init: The class init. + fields: The model fields. + validate_by_name: The `validate_by_name` value of the config. + extra: The `extra` value of the config. + is_dataclass: Whether the model is a dataclass. + + Returns: + The dataclass/BaseModel subclass signature. + """ + merged_params = _generate_signature_parameters(init, fields, validate_by_name, extra) + + if is_dataclass: + merged_params = {k: _process_param_defaults(v) for k, v in merged_params.items()} + + return Signature(parameters=list(merged_params.values()), return_annotation=None) diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py new file mode 100644 index 0000000000000000000000000000000000000000..c217f1127d7c71ab47720263384b9564abe69a29 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_typing_extra.py @@ -0,0 +1,785 @@ +"""Logic for interacting with type annotations, mostly extensions, shims and hacks to wrap Python's typing module.""" + +from __future__ import annotations + +import collections.abc +import re +import sys +import types +import typing +from functools import partial +from inspect import Signature, signature +from typing import TYPE_CHECKING, Any, Callable, cast + +import typing_extensions +from typing_extensions import deprecated, get_args, get_origin +from typing_inspection import typing_objects +from typing_inspection.introspection import is_union_origin + +from pydantic.version import version_short + +from ._namespace_utils import GlobalsNamespace, MappingNamespace, NsResolver, get_module_ns_of + +if sys.version_info < (3, 10): + NoneType = type(None) + EllipsisType = type(Ellipsis) +else: + from types import EllipsisType as EllipsisType + from types import NoneType as NoneType + +if sys.version_info >= (3, 14): + import annotationlib + +if TYPE_CHECKING: + from pydantic import BaseModel + from pydantic.fields import FieldInfo + +# As per https://typing-extensions.readthedocs.io/en/latest/#runtime-use-of-types, +# always check for both `typing` and `typing_extensions` variants of a typing construct. +# (this is implemented differently than the suggested approach in the `typing_extensions` +# docs for performance). + + +_t_annotated = typing.Annotated +_te_annotated = typing_extensions.Annotated + + +def is_annotated(tp: Any, /) -> bool: + """Return whether the provided argument is a `Annotated` special form. + + ```python {test="skip" lint="skip"} + is_annotated(Annotated[int, ...]) + #> True + ``` + """ + origin = get_origin(tp) + return origin is _t_annotated or origin is _te_annotated + + +def annotated_type(tp: Any, /) -> Any | None: + """Return the type of the `Annotated` special form, or `None`.""" + return tp.__origin__ if typing_objects.is_annotated(get_origin(tp)) else None + + +def unpack_type(tp: Any, /) -> Any | None: + """Return the type wrapped by the `Unpack` special form, or `None`.""" + return get_args(tp)[0] if typing_objects.is_unpack(get_origin(tp)) else None + + +def is_hashable(tp: Any, /) -> bool: + """Return whether the provided argument is the `Hashable` class. + + ```python {test="skip" lint="skip"} + is_hashable(Hashable) + #> True + ``` + """ + # `get_origin` is documented as normalizing any typing-module aliases to `collections` classes, + # hence the second check: + return tp is collections.abc.Hashable or get_origin(tp) is collections.abc.Hashable + + +def is_callable(tp: Any, /) -> bool: + """Return whether the provided argument is a `Callable`, parametrized or not. + + ```python {test="skip" lint="skip"} + is_callable(Callable[[int], str]) + #> True + is_callable(typing.Callable) + #> True + is_callable(collections.abc.Callable) + #> True + ``` + """ + # `get_origin` is documented as normalizing any typing-module aliases to `collections` classes, + # hence the second check: + return tp is collections.abc.Callable or get_origin(tp) is collections.abc.Callable + + +_classvar_re = re.compile(r'((\w+\.)?Annotated\[)?(\w+\.)?ClassVar\[') + + +def is_classvar_annotation(tp: Any, /) -> bool: + """Return whether the provided argument represents a class variable annotation. + + Although not explicitly stated by the typing specification, `ClassVar` can be used + inside `Annotated` and as such, this function checks for this specific scenario. + + Because this function is used to detect class variables before evaluating forward references + (or because evaluation failed), we also implement a naive regex match implementation. This is + required because class variables are inspected before fields are collected, so we try to be + as accurate as possible. + """ + if typing_objects.is_classvar(tp): + return True + + origin = get_origin(tp) + + if typing_objects.is_classvar(origin): + return True + + if typing_objects.is_annotated(origin): + annotated_type = tp.__origin__ + if typing_objects.is_classvar(annotated_type) or typing_objects.is_classvar(get_origin(annotated_type)): + return True + + str_ann: str | None = None + if isinstance(tp, typing.ForwardRef): + str_ann = tp.__forward_arg__ + if isinstance(tp, str): + str_ann = tp + + if str_ann is not None and _classvar_re.match(str_ann): + # stdlib dataclasses do something similar, although a bit more advanced + # (see `dataclass._is_type`). + return True + + return False + + +_t_final = typing.Final +_te_final = typing_extensions.Final + + +# TODO implement `is_finalvar_annotation` as Final can be wrapped with other special forms: +def is_finalvar(tp: Any, /) -> bool: + """Return whether the provided argument is a `Final` special form, parametrized or not. + + ```python {test="skip" lint="skip"} + is_finalvar(Final[int]) + #> True + is_finalvar(Final) + #> True + """ + # Final is not necessarily parametrized: + if tp is _t_final or tp is _te_final: + return True + origin = get_origin(tp) + return origin is _t_final or origin is _te_final + + +_NONE_TYPES: tuple[Any, ...] = (None, NoneType, typing.Literal[None], typing_extensions.Literal[None]) + + +def is_none_type(tp: Any, /) -> bool: + """Return whether the argument represents the `None` type as part of an annotation. + + ```python {test="skip" lint="skip"} + is_none_type(None) + #> True + is_none_type(NoneType) + #> True + is_none_type(Literal[None]) + #> True + is_none_type(type[None]) + #> False + """ + return tp in _NONE_TYPES + + +def is_namedtuple(tp: Any, /) -> bool: + """Return whether the provided argument is a named tuple class. + + The class can be created using `typing.NamedTuple` or `collections.namedtuple`. + Parametrized generic classes are *not* assumed to be named tuples. + """ + from ._utils import lenient_issubclass # circ. import + + return lenient_issubclass(tp, tuple) and hasattr(tp, '_fields') + + +# TODO In 2.12, delete this export. It is currently defined only to not break +# pydantic-settings which relies on it: +origin_is_union = is_union_origin + + +def is_generic_alias(tp: Any, /) -> bool: + return isinstance(tp, (types.GenericAlias, typing._GenericAlias)) # pyright: ignore[reportAttributeAccessIssue] + + +# TODO: Ideally, we should avoid relying on the private `typing` constructs: + +if sys.version_info < (3, 10): + WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias, types.GenericAlias) # pyright: ignore[reportAttributeAccessIssue] +else: + WithArgsTypes: tuple[Any, ...] = (typing._GenericAlias, types.GenericAlias, types.UnionType) # pyright: ignore[reportAttributeAccessIssue] + + +# Similarly, we shouldn't rely on this `_Final` class, which is even more private than `_GenericAlias`: +typing_base: Any = typing._Final # pyright: ignore[reportAttributeAccessIssue] + + +### Annotation evaluations functions: + + +def parent_frame_namespace(*, parent_depth: int = 2, force: bool = False) -> dict[str, Any] | None: + """Fetch the local namespace of the parent frame where this function is called. + + Using this function is mostly useful to resolve forward annotations pointing to members defined in a local namespace, + such as assignments inside a function. Using the standard library tools, it is currently not possible to resolve + such annotations: + + ```python {lint="skip" test="skip"} + from typing import get_type_hints + + def func() -> None: + Alias = int + + class C: + a: 'Alias' + + # Raises a `NameError: 'Alias' is not defined` + get_type_hints(C) + ``` + + Pydantic uses this function when a Pydantic model is being defined to fetch the parent frame locals. However, + this only allows us to fetch the parent frame namespace and not other parents (e.g. a model defined in a function, + itself defined in another function). Inspecting the next outer frames (using `f_back`) is not reliable enough + (see https://discuss.python.org/t/20659). + + Because this function is mostly used to better resolve forward annotations, nothing is returned if the parent frame's + code object is defined at the module level. In this case, the locals of the frame will be the same as the module + globals where the class is defined (see `_namespace_utils.get_module_ns_of`). However, if you still want to fetch + the module globals (e.g. when rebuilding a model, where the frame where the rebuild call is performed might contain + members that you want to use for forward annotations evaluation), you can use the `force` parameter. + + Args: + parent_depth: The depth at which to get the frame. Defaults to 2, meaning the parent frame where this function + is called will be used. + force: Whether to always return the frame locals, even if the frame's code object is defined at the module level. + + Returns: + The locals of the namespace, or `None` if it was skipped as per the described logic. + """ + frame = sys._getframe(parent_depth) + + if frame.f_code.co_name.startswith('`, + # and we need to skip this frame as it is irrelevant. + frame = cast(types.FrameType, frame.f_back) # guaranteed to not be `None` + + # note, we don't copy frame.f_locals here (or during the last return call), because we don't expect the namespace to be + # modified down the line if this becomes a problem, we could implement some sort of frozen mapping structure to enforce this. + if force: + return frame.f_locals + + # If either of the following conditions are true, the class is defined at the top module level. + # To better understand why we need both of these checks, see + # https://github.com/pydantic/pydantic/pull/10113#discussion_r1714981531. + if frame.f_back is None or frame.f_code.co_name == '': + return None + + return frame.f_locals + + +def _type_convert(arg: Any) -> Any: + """Convert `None` to `NoneType` and strings to `ForwardRef` instances. + + This is a backport of the private `typing._type_convert` function. When + evaluating a type, `ForwardRef._evaluate` ends up being called, and is + responsible for making this conversion. However, we still have to apply + it for the first argument passed to our type evaluation functions, similarly + to the `typing.get_type_hints` function. + """ + if arg is None: + return NoneType + if isinstance(arg, str): + # Like `typing.get_type_hints`, assume the arg can be in any context, + # hence the proper `is_argument` and `is_class` args: + return _make_forward_ref(arg, is_argument=False, is_class=True) + return arg + + +def safe_get_annotations(obj: Any) -> dict[str, Any]: + """Get the annotations for the provided object, accounting for potential deferred forward references. + + Starting with Python 3.14, accessing the `__annotations__` attribute might raise a `NameError` if + a referenced symbol isn't defined yet. In this case, we return the annotation in the *forward ref* + format. + """ + if sys.version_info >= (3, 14): + return annotationlib.get_annotations(obj, format=annotationlib.Format.FORWARDREF) + else: + # TODO just do getattr(obj, '__annotations__', {}) when dropping support for Python 3.9: + if isinstance(obj, type): + return obj.__dict__.get('__annotations__', {}) + else: + return getattr(obj, '__annotations__', {}) + + +def get_model_type_hints( + model_class: type[BaseModel], + *, + ns_resolver: NsResolver | None = None, +) -> dict[str, tuple[Any, bool]]: + """Collect annotations from a Pydantic model class, including those from parent classes. + + Args: + model_class: The Pydantic model class to inspect. + ns_resolver: A namespace resolver instance to use. Defaults to an empty instance. + + Returns: + A dictionary mapping annotation names to a two-tuple: the first element is the evaluated + type or the original annotation if a `NameError` occurred, the second element is a boolean + indicating if whether the evaluation succeeded. + """ + hints: dict[str, Any] | dict[str, tuple[Any, bool]] = {} + ns_resolver = ns_resolver or NsResolver() + + for base in reversed(model_class.__mro__[:-1]): + # For Python 3.14, we could also use `Format.VALUE` and pass the globals/locals + # from the ns_resolver, but we want to be able to know which specific field failed + # to evaluate: + ann = safe_get_annotations(base) + + if not ann: + continue + + with ns_resolver.push(base): + base_model_fields: dict[str, FieldInfo] | None = base.__dict__.get('__pydantic_fields__') + + for name, value in ann.items(): + if name.startswith('_'): + globalns, localns = ns_resolver.types_namespace + + # For private attributes, we only need the annotation to detect the `ClassVar` special form. + # For this reason, we still try to evaluate it, but we also catch any possible exception (on + # top of the `NameError`s caught in `try_eval_type`) that could happen so that users are free + # to use any kind of forward annotation for private fields (e.g. circular imports, new typing + # syntax, etc). + try: + hints[name] = try_eval_type(value, globalns, localns) + except Exception: + hints[name] = (value, False) + else: + if base_model_fields is not None and name in base_model_fields: + # Avoid unnecessarily evaluating annotations from parent models, as we'll end up + # copying the `FieldInfo` instance from it anyway if we need to. + # We use the `annotation` attribute here, but in reality could put anything here, + # As we are guaranteed to not make use of it: + hints[name] = (base_model_fields[name].annotation, True) + else: + globalns, localns = ns_resolver.types_namespace + + hints[name] = try_eval_type(value, globalns, localns) + + return hints + + +def get_cls_type_hints( + obj: type[Any], + *, + ns_resolver: NsResolver | None = None, +) -> dict[str, Any]: + """Collect annotations from a class, including those from parent classes. + + Args: + obj: The class to inspect. + ns_resolver: A namespace resolver instance to use. Defaults to an empty instance. + """ + hints: dict[str, Any] = {} + ns_resolver = ns_resolver or NsResolver() + + for base in reversed(obj.__mro__): + # For Python 3.14, we could also use `Format.VALUE` and pass the globals/locals + # from the ns_resolver, but we want to be able to know which specific field failed + # to evaluate: + ann = safe_get_annotations(base) + + if not ann: + continue + + with ns_resolver.push(base): + globalns, localns = ns_resolver.types_namespace + for name, value in ann.items(): + hints[name] = eval_type(value, globalns, localns) + return hints + + +def try_eval_type( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, +) -> tuple[Any, bool]: + """Try evaluating the annotation using the provided namespaces. + + Args: + value: The value to evaluate. If `None`, it will be replaced by `type[None]`. If an instance + of `str`, it will be converted to a `ForwardRef`. + localns: The global namespace to use during annotation evaluation. + globalns: The local namespace to use during annotation evaluation. + + Returns: + A two-tuple containing the possibly evaluated type and a boolean indicating + whether the evaluation succeeded or not. + """ + value = _type_convert(value) + + try: + return eval_type_backport(value, globalns, localns), True + except NameError: + return value, False + + +def eval_type( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, +) -> Any: + """Evaluate the annotation using the provided namespaces. + + Args: + value: The value to evaluate. If `None`, it will be replaced by `type[None]`. If an instance + of `str`, it will be converted to a `ForwardRef`. + localns: The global namespace to use during annotation evaluation. + globalns: The local namespace to use during annotation evaluation. + """ + value = _type_convert(value) + return eval_type_backport(value, globalns, localns) + + +@deprecated( + '`eval_type_lenient` is deprecated, use `try_eval_type` instead.', + category=None, +) +def eval_type_lenient( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, +) -> Any: + ev, _ = try_eval_type(value, globalns, localns) + return ev + + +def eval_type_backport( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, + type_params: tuple[Any, ...] | None = None, +) -> Any: + """An enhanced version of `typing._eval_type` which will fall back to using the `eval_type_backport` + package if it's installed to let older Python versions use newer typing constructs. + + Specifically, this transforms `X | Y` into `typing.Union[X, Y]` and `list[X]` into `typing.List[X]` + (as well as all the types made generic in PEP 585) if the original syntax is not supported in the + current Python version. + + This function will also display a helpful error if the value passed fails to evaluate. + """ + try: + return _eval_type_backport(value, globalns, localns, type_params) + except TypeError as e: + if 'Unable to evaluate type annotation' in str(e): + raise + + # If it is a `TypeError` and value isn't a `ForwardRef`, it would have failed during annotation definition. + # Thus we assert here for type checking purposes: + assert isinstance(value, typing.ForwardRef) + + message = f'Unable to evaluate type annotation {value.__forward_arg__!r}.' + if sys.version_info >= (3, 11): + e.add_note(message) + raise + else: + raise TypeError(message) from e + except RecursionError as e: + # TODO ideally recursion errors should be checked in `eval_type` above, but `eval_type_backport` + # is used directly in some places. + message = ( + "If you made use of an implicit recursive type alias (e.g. `MyType = list['MyType']), " + 'consider using PEP 695 type aliases instead. For more details, refer to the documentation: ' + f'https://docs.pydantic.dev/{version_short()}/concepts/types/#named-recursive-types' + ) + if sys.version_info >= (3, 11): + e.add_note(message) + raise + else: + raise RecursionError(f'{e.args[0]}\n{message}') + + +def _eval_type_backport( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, + type_params: tuple[Any, ...] | None = None, +) -> Any: + try: + return _eval_type(value, globalns, localns, type_params) + except TypeError as e: + if not (isinstance(value, typing.ForwardRef) and is_backport_fixable_error(e)): + raise + + try: + from eval_type_backport import eval_type_backport + except ImportError: + raise TypeError( + f'Unable to evaluate type annotation {value.__forward_arg__!r}. If you are making use ' + 'of the new typing syntax (unions using `|` since Python 3.10 or builtins subscripting ' + 'since Python 3.9), you should either replace the use of new syntax with the existing ' + '`typing` constructs or install the `eval_type_backport` package.' + ) from e + + return eval_type_backport( + value, + globalns, + localns, # pyright: ignore[reportArgumentType], waiting on a new `eval_type_backport` release. + try_default=False, + ) + + +def _eval_type( + value: Any, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, + type_params: tuple[Any, ...] | None = None, +) -> Any: + if sys.version_info >= (3, 14): + # Starting in 3.14, `_eval_type()` does *not* apply `_type_convert()` + # anymore. This means the `None` -> `type(None)` conversion does not apply: + evaluated = typing._eval_type( # type: ignore + value, + globalns, + localns, + type_params=type_params, + # This is relevant when evaluating types from `TypedDict` classes, where string annotations + # are automatically converted to `ForwardRef` instances with a module set. In this case, + # Our `globalns` is irrelevant and we need to indicate `typing._eval_type()` that it should + # infer it from the `ForwardRef.__forward_module__` attribute instead (`typing.get_type_hints()` + # does the same). Note that this would probably be unnecessary if we properly iterated over the + # `__orig_bases__` for TypedDicts in `get_cls_type_hints()`: + prefer_fwd_module=True, + ) + if evaluated is None: + evaluated = type(None) + return evaluated + elif sys.version_info >= (3, 13): + return typing._eval_type( # type: ignore + value, globalns, localns, type_params=type_params + ) + else: + return typing._eval_type( # type: ignore + value, globalns, localns + ) + + +def is_backport_fixable_error(e: TypeError) -> bool: + msg = str(e) + + return sys.version_info < (3, 10) and msg.startswith('unsupported operand type(s) for |: ') + + +def signature_no_eval(f: Callable[..., Any]) -> Signature: + """Get the signature of a callable without evaluating any annotations.""" + if sys.version_info >= (3, 14): + from annotationlib import Format + + return signature(f, annotation_format=Format.FORWARDREF) + else: + return signature(f) + + +def get_function_type_hints( + function: Callable[..., Any], + *, + include_keys: set[str] | None = None, + globalns: GlobalsNamespace | None = None, + localns: MappingNamespace | None = None, +) -> dict[str, Any]: + """Return type hints for a function. + + This is similar to the `typing.get_type_hints` function, with a few differences: + - Support `functools.partial` by using the underlying `func` attribute. + - Do not wrap type annotation of a parameter with `Optional` if it has a default value of `None` + (related bug: https://github.com/python/cpython/issues/90353, only fixed in 3.11+). + """ + if isinstance(function, partial): + annotations = safe_get_annotations(function.func) + else: + annotations = safe_get_annotations(function) + + if globalns is None: + globalns = get_module_ns_of(function) + type_params: tuple[Any, ...] | None = None + if localns is None: + # If localns was specified, it is assumed to already contain type params. This is because + # Pydantic has more advanced logic to do so (see `_namespace_utils.ns_for_function`). + type_params = getattr(function, '__type_params__', ()) + + type_hints = {} + for name, value in annotations.items(): + if include_keys is not None and name not in include_keys: + continue + if value is None: + value = NoneType + elif isinstance(value, str): + value = _make_forward_ref(value) + + type_hints[name] = eval_type_backport(value, globalns, localns, type_params) + + return type_hints + + +# TODO use typing.ForwardRef directly when we stop supporting 3.9: +if sys.version_info < (3, 9, 8) or (3, 10) <= sys.version_info < (3, 10, 1): + + def _make_forward_ref( + arg: Any, + is_argument: bool = True, + *, + is_class: bool = False, + ) -> typing.ForwardRef: + """Wrapper for ForwardRef that accounts for the `is_class` argument missing in older versions. + The `module` argument is omitted as it breaks <3.9.8, =3.10.0 and isn't used in the calls below. + + See https://github.com/python/cpython/pull/28560 for some background. + The backport happened on 3.9.8, see: + https://github.com/pydantic/pydantic/discussions/6244#discussioncomment-6275458, + and on 3.10.1 for the 3.10 branch, see: + https://github.com/pydantic/pydantic/issues/6912 + + Implemented as EAFP with memory. + """ + return typing.ForwardRef(arg, is_argument) # pyright: ignore[reportCallIssue] + +else: + _make_forward_ref = typing.ForwardRef # pyright: ignore[reportAssignmentType] + + +if sys.version_info >= (3, 10): + get_type_hints = typing.get_type_hints + +else: + """ + For older versions of python, we have a custom implementation of `get_type_hints` which is a close as possible to + the implementation in CPython 3.10.8. + """ + + @typing.no_type_check + def get_type_hints( # noqa: C901 + obj: Any, + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, + ) -> dict[str, Any]: # pragma: no cover + """Taken verbatim from python 3.10.8 unchanged, except: + * type annotations of the function definition above. + * prefixing `typing.` where appropriate + * Use `_make_forward_ref` instead of `typing.ForwardRef` to handle the `is_class` argument. + + https://github.com/python/cpython/blob/aaaf5174241496afca7ce4d4584570190ff972fe/Lib/typing.py#L1773-L1875 + + DO NOT CHANGE THIS METHOD UNLESS ABSOLUTELY NECESSARY. + ====================================================== + + Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]' with 'T' (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. For classes, the search + order is globals first then locals. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if getattr(obj, '__no_type_check__', None): + return {} + # Classes require a special treatment. + if isinstance(obj, type): + hints = {} + for base in reversed(obj.__mro__): + if globalns is None: + base_globals = getattr(sys.modules.get(base.__module__, None), '__dict__', {}) + else: + base_globals = globalns + ann = base.__dict__.get('__annotations__', {}) + if isinstance(ann, types.GetSetDescriptorType): + ann = {} + base_locals = dict(vars(base)) if localns is None else localns + if localns is None and globalns is None: + # This is surprising, but required. Before Python 3.10, + # get_type_hints only evaluated the globalns of + # a class. To maintain backwards compatibility, we reverse + # the globalns and localns order so that eval() looks into + # *base_globals* first rather than *base_locals*. + # This only affects ForwardRefs. + base_globals, base_locals = base_locals, base_globals + for name, value in ann.items(): + if value is None: + value = type(None) + if isinstance(value, str): + value = _make_forward_ref(value, is_argument=False, is_class=True) + + value = eval_type_backport(value, base_globals, base_locals) + hints[name] = value + if not include_extras and hasattr(typing, '_strip_annotations'): + return { + k: typing._strip_annotations(t) # type: ignore + for k, t in hints.items() + } + else: + return hints + + if globalns is None: + if isinstance(obj, types.ModuleType): + globalns = obj.__dict__ + else: + nsobj = obj + # Find globalns for the unwrapped object. + while hasattr(nsobj, '__wrapped__'): + nsobj = nsobj.__wrapped__ + globalns = getattr(nsobj, '__globals__', {}) + if localns is None: + localns = globalns + elif localns is None: + localns = globalns + hints = getattr(obj, '__annotations__', None) + if hints is None: + # Return empty annotations for something that _could_ have them. + if isinstance(obj, typing._allowed_types): # type: ignore + return {} + else: + raise TypeError(f'{obj!r} is not a module, class, method, or function.') + defaults = typing._get_defaults(obj) # type: ignore + hints = dict(hints) + for name, value in hints.items(): + if value is None: + value = type(None) + if isinstance(value, str): + # class-level forward refs were handled above, this must be either + # a module-level annotation or a function argument annotation + + value = _make_forward_ref( + value, + is_argument=not isinstance(obj, types.ModuleType), + is_class=False, + ) + value = eval_type_backport(value, globalns, localns) + if name in defaults and defaults[name] is None: + value = typing.Optional[value] + hints[name] = value + return hints if include_extras else {k: typing._strip_annotations(t) for k, t in hints.items()} # type: ignore diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..7a0346d8470c4a4827f785905a596e9e992ec741 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_utils.py @@ -0,0 +1,446 @@ +"""Bucket of reusable internal utilities. + +This should be reduced as much as possible with functions only used in one place, moved to that place. +""" + +from __future__ import annotations as _annotations + +import dataclasses +import keyword +import sys +import warnings +import weakref +from collections import OrderedDict, defaultdict, deque +from collections.abc import Callable, Iterable, Mapping +from collections.abc import Set as AbstractSet +from copy import deepcopy +from functools import cached_property +from inspect import Parameter +from itertools import zip_longest +from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType +from typing import TYPE_CHECKING, Any, Generic, TypeVar, overload + +from pydantic_core import MISSING, PydanticUndefined +from typing_extensions import TypeAlias, TypeGuard, deprecated + +from pydantic import PydanticDeprecatedSince211 + +from . import _repr, _typing_extra +from ._import_utils import import_cached_base_model + +if TYPE_CHECKING: + # TODO remove type error comments when we drop support for Python 3.9 + MappingIntStrAny: TypeAlias = Mapping[int, Any] | Mapping[str, Any] # pyright: ignore[reportGeneralTypeIssues] + AbstractSetIntStr: TypeAlias = AbstractSet[int] | AbstractSet[str] # pyright: ignore[reportGeneralTypeIssues] + from ..main import BaseModel + + +# these are types that are returned unchanged by deepcopy +IMMUTABLE_NON_COLLECTIONS_TYPES: set[type[Any]] = { + int, + float, + complex, + str, + bool, + bytes, + type, + _typing_extra.NoneType, + FunctionType, + BuiltinFunctionType, + LambdaType, + weakref.ref, + CodeType, + # note: including ModuleType will differ from behaviour of deepcopy by not producing error. + # It might be not a good idea in general, but considering that this function used only internally + # against default values of fields, this will allow to actually have a field with module as default value + ModuleType, + NotImplemented.__class__, + Ellipsis.__class__, +} + +# these are types that if empty, might be copied with simple copy() instead of deepcopy() +BUILTIN_COLLECTIONS: set[type[Any]] = { + list, + set, + tuple, + frozenset, + dict, + OrderedDict, + defaultdict, + deque, +} + + +def can_be_positional(param: Parameter) -> bool: + """Return whether the parameter accepts a positional argument. + + ```python {test="skip" lint="skip"} + def func(a, /, b, *, c): + pass + + params = inspect.signature(func).parameters + can_be_positional(params['a']) + #> True + can_be_positional(params['b']) + #> True + can_be_positional(params['c']) + #> False + ``` + """ + return param.kind in (Parameter.POSITIONAL_ONLY, Parameter.POSITIONAL_OR_KEYWORD) + + +def sequence_like(v: Any) -> bool: + return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) + + +def lenient_isinstance(o: Any, class_or_tuple: type[Any] | tuple[type[Any], ...] | None) -> bool: # pragma: no cover + try: + return isinstance(o, class_or_tuple) # type: ignore[arg-type] + except TypeError: + return False + + +def lenient_issubclass(cls: Any, class_or_tuple: Any) -> bool: # pragma: no cover + try: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) + except TypeError: + if isinstance(cls, _typing_extra.WithArgsTypes): + return False + raise # pragma: no cover + + +def is_model_class(cls: Any) -> TypeGuard[type[BaseModel]]: + """Returns true if cls is a _proper_ subclass of BaseModel, and provides proper type-checking, + unlike raw calls to lenient_issubclass. + """ + BaseModel = import_cached_base_model() + + return lenient_issubclass(cls, BaseModel) and cls is not BaseModel + + +def is_valid_identifier(identifier: str) -> bool: + """Checks that a string is a valid identifier and not a Python keyword. + :param identifier: The identifier to test. + :return: True if the identifier is valid. + """ + return identifier.isidentifier() and not keyword.iskeyword(identifier) + + +KeyType = TypeVar('KeyType') + + +def deep_update(mapping: dict[KeyType, Any], *updating_mappings: dict[KeyType, Any]) -> dict[KeyType, Any]: + updated_mapping = mapping.copy() + for updating_mapping in updating_mappings: + for k, v in updating_mapping.items(): + if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): + updated_mapping[k] = deep_update(updated_mapping[k], v) + else: + updated_mapping[k] = v + return updated_mapping + + +def update_not_none(mapping: dict[Any, Any], **update: Any) -> None: + mapping.update({k: v for k, v in update.items() if v is not None}) + + +T = TypeVar('T') + + +def unique_list( + input_list: list[T] | tuple[T, ...], + *, + name_factory: Callable[[T], str] = str, +) -> list[T]: + """Make a list unique while maintaining order. + We update the list if another one with the same name is set + (e.g. model validator overridden in subclass). + """ + result: list[T] = [] + result_names: list[str] = [] + for v in input_list: + v_name = name_factory(v) + if v_name not in result_names: + result_names.append(v_name) + result.append(v) + else: + result[result_names.index(v_name)] = v + + return result + + +class ValueItems(_repr.Representation): + """Class for more convenient calculation of excluded or included fields on values.""" + + __slots__ = ('_items', '_type') + + def __init__(self, value: Any, items: AbstractSetIntStr | MappingIntStrAny) -> None: + items = self._coerce_items(items) + + if isinstance(value, (list, tuple)): + items = self._normalize_indexes(items, len(value)) # type: ignore + + self._items: MappingIntStrAny = items # type: ignore + + def is_excluded(self, item: Any) -> bool: + """Check if item is fully excluded. + + :param item: key or index of a value + """ + return self.is_true(self._items.get(item)) + + def is_included(self, item: Any) -> bool: + """Check if value is contained in self._items. + + :param item: key or index of value + """ + return item in self._items + + def for_element(self, e: int | str) -> AbstractSetIntStr | MappingIntStrAny | None: + """:param e: key or index of element on value + :return: raw values for element if self._items is dict and contain needed element + """ + item = self._items.get(e) # type: ignore + return item if not self.is_true(item) else None + + def _normalize_indexes(self, items: MappingIntStrAny, v_length: int) -> dict[int | str, Any]: + """:param items: dict or set of indexes which will be normalized + :param v_length: length of sequence indexes of which will be + + >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) + {0: True, 2: True, 3: True} + >>> self._normalize_indexes({'__all__': True}, 4) + {0: True, 1: True, 2: True, 3: True} + """ + normalized_items: dict[int | str, Any] = {} + all_items = None + for i, v in items.items(): + if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): + raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') + if i == '__all__': + all_items = self._coerce_value(v) + continue + if not isinstance(i, int): + raise TypeError( + 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' + 'expected integer keys or keyword "__all__"' + ) + normalized_i = v_length + i if i < 0 else i + normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) + + if not all_items: + return normalized_items + if self.is_true(all_items): + for i in range(v_length): + normalized_items.setdefault(i, ...) + return normalized_items + for i in range(v_length): + normalized_item = normalized_items.setdefault(i, {}) + if not self.is_true(normalized_item): + normalized_items[i] = self.merge(all_items, normalized_item) + return normalized_items + + @classmethod + def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: + """Merge a `base` item with an `override` item. + + Both `base` and `override` are converted to dictionaries if possible. + Sets are converted to dictionaries with the sets entries as keys and + Ellipsis as values. + + Each key-value pair existing in `base` is merged with `override`, + while the rest of the key-value pairs are updated recursively with this function. + + Merging takes place based on the "union" of keys if `intersect` is + set to `False` (default) and on the intersection of keys if + `intersect` is set to `True`. + """ + override = cls._coerce_value(override) + base = cls._coerce_value(base) + if override is None: + return base + if cls.is_true(base) or base is None: + return override + if cls.is_true(override): + return base if intersect else override + + # intersection or union of keys while preserving ordering: + if intersect: + merge_keys = [k for k in base if k in override] + [k for k in override if k in base] + else: + merge_keys = list(base) + [k for k in override if k not in base] + + merged: dict[int | str, Any] = {} + for k in merge_keys: + merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) + if merged_item is not None: + merged[k] = merged_item + + return merged + + @staticmethod + def _coerce_items(items: AbstractSetIntStr | MappingIntStrAny) -> MappingIntStrAny: + if isinstance(items, Mapping): + pass + elif isinstance(items, AbstractSet): + items = dict.fromkeys(items, ...) # type: ignore + else: + class_name = getattr(items, '__class__', '???') + raise TypeError(f'Unexpected type of exclude value {class_name}') + return items # type: ignore + + @classmethod + def _coerce_value(cls, value: Any) -> Any: + if value is None or cls.is_true(value): + return value + return cls._coerce_items(value) + + @staticmethod + def is_true(v: Any) -> bool: + return v is True or v is ... + + def __repr_args__(self) -> _repr.ReprArgs: + return [(None, self._items)] + + +if TYPE_CHECKING: + + def LazyClassAttribute(name: str, get_value: Callable[[], T]) -> T: ... + +else: + + class LazyClassAttribute: + """A descriptor exposing an attribute only accessible on a class (hidden from instances). + + The attribute is lazily computed and cached during the first access. + """ + + def __init__(self, name: str, get_value: Callable[[], Any]) -> None: + self.name = name + self.get_value = get_value + + @cached_property + def value(self) -> Any: + return self.get_value() + + def __get__(self, instance: Any, owner: type[Any]) -> None: + if instance is None: + return self.value + raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') + + +Obj = TypeVar('Obj') + + +def smart_deepcopy(obj: Obj) -> Obj: + """Return type as is for immutable built-in types + Use obj.copy() for built-in empty collections + Use copy.deepcopy() for non-empty collections and unknown objects. + """ + if obj is MISSING or obj is PydanticUndefined: + return obj # pyright: ignore[reportReturnType] + obj_type = obj.__class__ + if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: + return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway + try: + if not obj and obj_type in BUILTIN_COLLECTIONS: + # faster way for empty collections, no need to copy its members + return obj if obj_type is tuple else obj.copy() # tuple doesn't have copy method # type: ignore + except (TypeError, ValueError, RuntimeError): + # do we really dare to catch ALL errors? Seems a bit risky + pass + + return deepcopy(obj) # slowest way when we actually might need a deepcopy + + +_SENTINEL = object() + + +def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: + """Check that the items of `left` are the same objects as those in `right`. + + >>> a, b = object(), object() + >>> all_identical([a, b, a], [a, b, a]) + True + >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" + False + """ + for left_item, right_item in zip_longest(left, right, fillvalue=_SENTINEL): + if left_item is not right_item: + return False + return True + + +def get_first_not_none(a: Any, b: Any) -> Any: + """Return the first argument if it is not `None`, otherwise return the second argument.""" + return a if a is not None else b + + +@dataclasses.dataclass(frozen=True) +class SafeGetItemProxy: + """Wrapper redirecting `__getitem__` to `get` with a sentinel value as default + + This makes is safe to use in `operator.itemgetter` when some keys may be missing + """ + + # Define __slots__manually for performances + # @dataclasses.dataclass() only support slots=True in python>=3.10 + __slots__ = ('wrapped',) + + wrapped: Mapping[str, Any] + + def __getitem__(self, key: str, /) -> Any: + return self.wrapped.get(key, _SENTINEL) + + # required to pass the object to operator.itemgetter() instances due to a quirk of typeshed + # https://github.com/python/mypy/issues/13713 + # https://github.com/python/typeshed/pull/8785 + # Since this is typing-only, hide it in a typing.TYPE_CHECKING block + if TYPE_CHECKING: + + def __contains__(self, key: str, /) -> bool: + return self.wrapped.__contains__(key) + + +_ModelT = TypeVar('_ModelT', bound='BaseModel') +_RT = TypeVar('_RT') + + +class deprecated_instance_property(Generic[_ModelT, _RT]): + """A decorator exposing the decorated class method as a property, with a warning on instance access. + + This decorator takes a class method defined on the `BaseModel` class and transforms it into + an attribute. The attribute can be accessed on both the class and instances of the class. If accessed + via an instance, a deprecation warning is emitted stating that instance access will be removed in V3. + """ + + def __init__(self, fget: Callable[[type[_ModelT]], _RT], /) -> None: + # Note: fget should be a classmethod: + self.fget = fget + + @overload + def __get__(self, instance: None, objtype: type[_ModelT]) -> _RT: ... + @overload + @deprecated( + 'Accessing this attribute on the instance is deprecated, and will be removed in Pydantic V3. ' + 'Instead, you should access this attribute from the model class.', + category=None, + ) + def __get__(self, instance: _ModelT, objtype: type[_ModelT]) -> _RT: ... + def __get__(self, instance: _ModelT | None, objtype: type[_ModelT]) -> _RT: + if instance is not None: + # fmt: off + attr_name = ( + self.fget.__name__ + if sys.version_info >= (3, 10) + else self.fget.__func__.__name__ # pyright: ignore[reportFunctionMemberAccess] + ) + # fmt: on + warnings.warn( + f'Accessing the {attr_name!r} attribute on the instance is deprecated. ' + 'Instead, you should access this attribute from the model class.', + category=PydanticDeprecatedSince211, + stacklevel=2, + ) + return self.fget.__get__(instance, objtype)() diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py new file mode 100644 index 0000000000000000000000000000000000000000..49ccd4f2d321128dbabb02a2eef2616875da0b95 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_validate_call.py @@ -0,0 +1,141 @@ +from __future__ import annotations as _annotations + +import functools +import inspect +from collections.abc import Awaitable +from functools import partial +from typing import Any, Callable + +import pydantic_core + +from ..config import ConfigDict +from ..plugin._schema_validator import create_schema_validator +from ._config import ConfigWrapper +from ._generate_schema import GenerateSchema, ValidateCallSupportedTypes +from ._namespace_utils import MappingNamespace, NsResolver, ns_for_function +from ._typing_extra import signature_no_eval + + +def extract_function_name(func: ValidateCallSupportedTypes) -> str: + """Extract the name of a `ValidateCallSupportedTypes` object.""" + return f'partial({func.func.__name__})' if isinstance(func, functools.partial) else func.__name__ + + +def extract_function_qualname(func: ValidateCallSupportedTypes) -> str: + """Extract the qualname of a `ValidateCallSupportedTypes` object.""" + return f'partial({func.func.__qualname__})' if isinstance(func, functools.partial) else func.__qualname__ + + +def update_wrapper_attributes(wrapped: ValidateCallSupportedTypes, wrapper: Callable[..., Any]): + """Update the `wrapper` function with the attributes of the `wrapped` function. Return the updated function.""" + if inspect.iscoroutinefunction(wrapped): + + @functools.wraps(wrapped) + async def wrapper_function(*args, **kwargs): # type: ignore + return await wrapper(*args, **kwargs) + else: + + @functools.wraps(wrapped) + def wrapper_function(*args, **kwargs): + return wrapper(*args, **kwargs) + + # We need to manually update this because `partial` object has no `__name__` and `__qualname__`. + wrapper_function.__name__ = extract_function_name(wrapped) + wrapper_function.__qualname__ = extract_function_qualname(wrapped) + wrapper_function.raw_function = wrapped # type: ignore + + return wrapper_function + + +class ValidateCallWrapper: + """This is a wrapper around a function that validates the arguments passed to it, and optionally the return value.""" + + __slots__ = ( + 'function', + 'validate_return', + 'schema_type', + 'module', + 'qualname', + 'ns_resolver', + 'config_wrapper', + '__pydantic_complete__', + '__pydantic_validator__', + '__return_pydantic_validator__', + ) + + def __init__( + self, + function: ValidateCallSupportedTypes, + config: ConfigDict | None, + validate_return: bool, + parent_namespace: MappingNamespace | None, + ) -> None: + self.function = function + self.validate_return = validate_return + if isinstance(function, partial): + self.schema_type = function.func + self.module = function.func.__module__ + else: + self.schema_type = function + self.module = function.__module__ + self.qualname = extract_function_qualname(function) + + self.ns_resolver = NsResolver( + namespaces_tuple=ns_for_function(self.schema_type, parent_namespace=parent_namespace) + ) + self.config_wrapper = ConfigWrapper(config) + if not self.config_wrapper.defer_build: + self._create_validators() + else: + self.__pydantic_complete__ = False + + def _create_validators(self) -> None: + gen_schema = GenerateSchema(self.config_wrapper, self.ns_resolver) + schema = gen_schema.clean_schema(gen_schema.generate_schema(self.function)) + core_config = self.config_wrapper.core_config(title=self.qualname) + + self.__pydantic_validator__ = create_schema_validator( + schema, + self.schema_type, + self.module, + self.qualname, + 'validate_call', + core_config, + self.config_wrapper.plugin_settings, + ) + if self.validate_return: + signature = signature_no_eval(self.function) + return_type = signature.return_annotation if signature.return_annotation is not signature.empty else Any + gen_schema = GenerateSchema(self.config_wrapper, self.ns_resolver) + schema = gen_schema.clean_schema(gen_schema.generate_schema(return_type)) + validator = create_schema_validator( + schema, + self.schema_type, + self.module, + self.qualname, + 'validate_call', + core_config, + self.config_wrapper.plugin_settings, + ) + if inspect.iscoroutinefunction(self.function): + + async def return_val_wrapper(aw: Awaitable[Any]) -> None: + return validator.validate_python(await aw) + + self.__return_pydantic_validator__ = return_val_wrapper + else: + self.__return_pydantic_validator__ = validator.validate_python + else: + self.__return_pydantic_validator__ = None + + self.__pydantic_complete__ = True + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + if not self.__pydantic_complete__: + self._create_validators() + + res = self.__pydantic_validator__.validate_python(pydantic_core.ArgsKwargs(args, kwargs)) + if self.__return_pydantic_validator__: + return self.__return_pydantic_validator__(res) + else: + return res diff --git a/venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py b/venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..3ad6c92790404b7b63cafed02e9dccb5c4fb7a3a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/_internal/_validators.py @@ -0,0 +1,534 @@ +"""Validator functions for standard library types. + +Import of this module is deferred since it contains imports of many standard library modules. +""" + +from __future__ import annotations as _annotations + +import collections.abc +import math +import re +import typing +from collections.abc import Sequence +from decimal import Decimal +from fractions import Fraction +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from typing import Any, Callable, TypeVar, Union, cast +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError + +import typing_extensions +from pydantic_core import PydanticCustomError, PydanticKnownError, core_schema +from typing_extensions import get_args, get_origin +from typing_inspection import typing_objects + +from pydantic._internal._import_utils import import_cached_field_info +from pydantic.errors import PydanticSchemaGenerationError + + +def sequence_validator( + input_value: Sequence[Any], + /, + validator: core_schema.ValidatorFunctionWrapHandler, +) -> Sequence[Any]: + """Validator for `Sequence` types, isinstance(v, Sequence) has already been called.""" + value_type = type(input_value) + + # We don't accept any plain string as a sequence + # Relevant issue: https://github.com/pydantic/pydantic/issues/5595 + if issubclass(value_type, (str, bytes)): + raise PydanticCustomError( + 'sequence_str', + "'{type_name}' instances are not allowed as a Sequence value", + {'type_name': value_type.__name__}, + ) + + # TODO: refactor sequence validation to validate with either a list or a tuple + # schema, depending on the type of the value. + # Additionally, we should be able to remove one of either this validator or the + # SequenceValidator in _std_types_schema.py (preferably this one, while porting over some logic). + # Effectively, a refactor for sequence validation is needed. + if value_type is tuple: + input_value = list(input_value) + + v_list = validator(input_value) + + # the rest of the logic is just re-creating the original type from `v_list` + if value_type is list: + return v_list + elif issubclass(value_type, range): + # return the list as we probably can't re-create the range + return v_list + elif value_type is tuple: + return tuple(v_list) + else: + # best guess at how to re-create the original type, more custom construction logic might be required + return value_type(v_list) # type: ignore[call-arg] + + +def import_string(value: Any) -> Any: + if isinstance(value, str): + try: + return _import_string_logic(value) + except ImportError as e: + raise PydanticCustomError('import_error', 'Invalid python path: {error}', {'error': str(e)}) from e + else: + # otherwise we just return the value and let the next validator do the rest of the work + return value + + +def _import_string_logic(dotted_path: str) -> Any: + """Inspired by uvicorn — dotted paths should include a colon before the final item if that item is not a module. + (This is necessary to distinguish between a submodule and an attribute when there is a conflict.). + + If the dotted path does not include a colon and the final item is not a valid module, importing as an attribute + rather than a submodule will be attempted automatically. + + So, for example, the following values of `dotted_path` result in the following returned values: + * 'collections': + * 'collections.abc': + * 'collections.abc:Mapping': + * `collections.abc.Mapping`: (though this is a bit slower than the previous line) + + An error will be raised under any of the following scenarios: + * `dotted_path` contains more than one colon (e.g., 'collections:abc:Mapping') + * the substring of `dotted_path` before the colon is not a valid module in the environment (e.g., '123:Mapping') + * the substring of `dotted_path` after the colon is not an attribute of the module (e.g., 'collections:abc123') + """ + from importlib import import_module + + components = dotted_path.strip().split(':') + if len(components) > 2: + raise ImportError(f"Import strings should have at most one ':'; received {dotted_path!r}") + attribute = None + if len(components) == 2: + attribute = components[1] + module_path = components[0] + if not module_path: + raise ImportError(f'Import strings should have a nonempty module name; received {dotted_path!r}') + + try: + module = import_module(module_path) + except ModuleNotFoundError: + if attribute is None and '.' in module_path: + # Try interpreting the final dotted segment as an attribute, not a submodule + maybe_module_path, maybe_attribute = module_path.rsplit('.', 1) + + try: + return _import_string_logic(f'{maybe_module_path}:{maybe_attribute}') + except ImportError: + pass + raise + + if attribute is not None: + try: + return getattr(module, attribute) + except AttributeError as e: + raise ImportError(f'cannot import name {attribute!r} from {module_path!r}') from e + else: + return module + + +def pattern_either_validator(input_value: Any, /) -> re.Pattern[Any]: + if isinstance(input_value, re.Pattern): + return input_value + elif isinstance(input_value, (str, bytes)): + # todo strict mode + return compile_pattern(input_value) # type: ignore + else: + raise PydanticCustomError('pattern_type', 'Input should be a valid pattern') + + +def pattern_str_validator(input_value: Any, /) -> re.Pattern[str]: + if isinstance(input_value, re.Pattern): + if isinstance(input_value.pattern, str): + return input_value + else: + raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern') + elif isinstance(input_value, str): + return compile_pattern(input_value) + elif isinstance(input_value, bytes): + raise PydanticCustomError('pattern_str_type', 'Input should be a string pattern') + else: + raise PydanticCustomError('pattern_type', 'Input should be a valid pattern') + + +def pattern_bytes_validator(input_value: Any, /) -> re.Pattern[bytes]: + if isinstance(input_value, re.Pattern): + if isinstance(input_value.pattern, bytes): + return input_value + else: + raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern') + elif isinstance(input_value, bytes): + return compile_pattern(input_value) + elif isinstance(input_value, str): + raise PydanticCustomError('pattern_bytes_type', 'Input should be a bytes pattern') + else: + raise PydanticCustomError('pattern_type', 'Input should be a valid pattern') + + +PatternType = TypeVar('PatternType', str, bytes) + + +def compile_pattern(pattern: PatternType) -> re.Pattern[PatternType]: + try: + return re.compile(pattern) + except re.error: + raise PydanticCustomError('pattern_regex', 'Input should be a valid regular expression') + + +def ip_v4_address_validator(input_value: Any, /) -> IPv4Address: + if isinstance(input_value, IPv4Address): + return input_value + + try: + return IPv4Address(input_value) + except ValueError: + raise PydanticCustomError('ip_v4_address', 'Input is not a valid IPv4 address') + + +def ip_v6_address_validator(input_value: Any, /) -> IPv6Address: + if isinstance(input_value, IPv6Address): + return input_value + + try: + return IPv6Address(input_value) + except ValueError: + raise PydanticCustomError('ip_v6_address', 'Input is not a valid IPv6 address') + + +def ip_v4_network_validator(input_value: Any, /) -> IPv4Network: + """Assume IPv4Network initialised with a default `strict` argument. + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network + """ + if isinstance(input_value, IPv4Network): + return input_value + + try: + return IPv4Network(input_value) + except ValueError: + raise PydanticCustomError('ip_v4_network', 'Input is not a valid IPv4 network') + + +def ip_v6_network_validator(input_value: Any, /) -> IPv6Network: + """Assume IPv6Network initialised with a default `strict` argument. + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network + """ + if isinstance(input_value, IPv6Network): + return input_value + + try: + return IPv6Network(input_value) + except ValueError: + raise PydanticCustomError('ip_v6_network', 'Input is not a valid IPv6 network') + + +def ip_v4_interface_validator(input_value: Any, /) -> IPv4Interface: + if isinstance(input_value, IPv4Interface): + return input_value + + try: + return IPv4Interface(input_value) + except ValueError: + raise PydanticCustomError('ip_v4_interface', 'Input is not a valid IPv4 interface') + + +def ip_v6_interface_validator(input_value: Any, /) -> IPv6Interface: + if isinstance(input_value, IPv6Interface): + return input_value + + try: + return IPv6Interface(input_value) + except ValueError: + raise PydanticCustomError('ip_v6_interface', 'Input is not a valid IPv6 interface') + + +def fraction_validator(input_value: Any, /) -> Fraction: + if isinstance(input_value, Fraction): + return input_value + + try: + return Fraction(input_value) + except ValueError: + raise PydanticCustomError('fraction_parsing', 'Input is not a valid fraction') + + +def forbid_inf_nan_check(x: Any) -> Any: + if not math.isfinite(x): + raise PydanticKnownError('finite_number') + return x + + +def _safe_repr(v: Any) -> int | float | str: + """The context argument for `PydanticKnownError` requires a number or str type, so we do a simple repr() coercion for types like timedelta. + + See tests/test_types.py::test_annotated_metadata_any_order for some context. + """ + if isinstance(v, (int, float, str)): + return v + return repr(v) + + +def greater_than_validator(x: Any, gt: Any) -> Any: + try: + if not (x > gt): + raise PydanticKnownError('greater_than', {'gt': _safe_repr(gt)}) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'gt' to supplied value {x}") + + +def greater_than_or_equal_validator(x: Any, ge: Any) -> Any: + try: + if not (x >= ge): + raise PydanticKnownError('greater_than_equal', {'ge': _safe_repr(ge)}) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'ge' to supplied value {x}") + + +def less_than_validator(x: Any, lt: Any) -> Any: + try: + if not (x < lt): + raise PydanticKnownError('less_than', {'lt': _safe_repr(lt)}) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'lt' to supplied value {x}") + + +def less_than_or_equal_validator(x: Any, le: Any) -> Any: + try: + if not (x <= le): + raise PydanticKnownError('less_than_equal', {'le': _safe_repr(le)}) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'le' to supplied value {x}") + + +def multiple_of_validator(x: Any, multiple_of: Any) -> Any: + try: + if x % multiple_of: + raise PydanticKnownError('multiple_of', {'multiple_of': _safe_repr(multiple_of)}) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'multiple_of' to supplied value {x}") + + +def min_length_validator(x: Any, min_length: Any) -> Any: + try: + if not (len(x) >= min_length): + raise PydanticKnownError( + 'too_short', {'field_type': 'Value', 'min_length': min_length, 'actual_length': len(x)} + ) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'min_length' to supplied value {x}") + + +def max_length_validator(x: Any, max_length: Any) -> Any: + try: + if len(x) > max_length: + raise PydanticKnownError( + 'too_long', + {'field_type': 'Value', 'max_length': max_length, 'actual_length': len(x)}, + ) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'max_length' to supplied value {x}") + + +def _extract_decimal_digits_info(decimal: Decimal) -> tuple[int, int]: + """Compute the total number of digits and decimal places for a given [`Decimal`][decimal.Decimal] instance. + + This function handles both normalized and non-normalized Decimal instances. + Example: Decimal('1.230') -> 4 digits, 3 decimal places + + Args: + decimal (Decimal): The decimal number to analyze. + + Returns: + tuple[int, int]: A tuple containing the number of decimal places and total digits. + + Though this could be divided into two separate functions, the logic is easier to follow if we couple the computation + of the number of decimals and digits together. + """ + try: + decimal_tuple = decimal.as_tuple() + + assert isinstance(decimal_tuple.exponent, int) + + exponent = decimal_tuple.exponent + num_digits = len(decimal_tuple.digits) + + if exponent >= 0: + # A positive exponent adds that many trailing zeros + # Ex: digit_tuple=(1, 2, 3), exponent=2 -> 12300 -> 0 decimal places, 5 digits + num_digits += exponent + decimal_places = 0 + else: + # If the absolute value of the negative exponent is larger than the + # number of digits, then it's the same as the number of digits, + # because it'll consume all the digits in digit_tuple and then + # add abs(exponent) - len(digit_tuple) leading zeros after the decimal point. + # Ex: digit_tuple=(1, 2, 3), exponent=-2 -> 1.23 -> 2 decimal places, 3 digits + # Ex: digit_tuple=(1, 2, 3), exponent=-4 -> 0.0123 -> 4 decimal places, 4 digits + decimal_places = abs(exponent) + num_digits = max(num_digits, decimal_places) + + return decimal_places, num_digits + except (AssertionError, AttributeError): + raise TypeError(f'Unable to extract decimal digits info from supplied value {decimal}') + + +def max_digits_validator(x: Any, max_digits: Any) -> Any: + try: + _, num_digits = _extract_decimal_digits_info(x) + _, normalized_num_digits = _extract_decimal_digits_info(x.normalize()) + if (num_digits > max_digits) and (normalized_num_digits > max_digits): + raise PydanticKnownError( + 'decimal_max_digits', + {'max_digits': max_digits}, + ) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'max_digits' to supplied value {x}") + + +def decimal_places_validator(x: Any, decimal_places: Any) -> Any: + try: + decimal_places_, _ = _extract_decimal_digits_info(x) + if decimal_places_ > decimal_places: + normalized_decimal_places, _ = _extract_decimal_digits_info(x.normalize()) + if normalized_decimal_places > decimal_places: + raise PydanticKnownError( + 'decimal_max_places', + {'decimal_places': decimal_places}, + ) + return x + except TypeError: + raise TypeError(f"Unable to apply constraint 'decimal_places' to supplied value {x}") + + +def deque_validator(input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler) -> collections.deque[Any]: + return collections.deque(handler(input_value), maxlen=getattr(input_value, 'maxlen', None)) + + +def defaultdict_validator( + input_value: Any, handler: core_schema.ValidatorFunctionWrapHandler, default_default_factory: Callable[[], Any] +) -> collections.defaultdict[Any, Any]: + if isinstance(input_value, collections.defaultdict): + default_factory = input_value.default_factory + return collections.defaultdict(default_factory, handler(input_value)) + else: + return collections.defaultdict(default_default_factory, handler(input_value)) + + +def get_defaultdict_default_default_factory(values_source_type: Any) -> Callable[[], Any]: + FieldInfo = import_cached_field_info() + + values_type_origin = get_origin(values_source_type) + + def infer_default() -> Callable[[], Any]: + allowed_default_types: dict[Any, Any] = { + tuple: tuple, + collections.abc.Sequence: tuple, + collections.abc.MutableSequence: list, + list: list, + typing.Sequence: list, + set: set, + typing.MutableSet: set, + collections.abc.MutableSet: set, + collections.abc.Set: frozenset, + typing.MutableMapping: dict, + typing.Mapping: dict, + collections.abc.Mapping: dict, + collections.abc.MutableMapping: dict, + float: float, + int: int, + str: str, + bool: bool, + } + values_type = values_type_origin or values_source_type + instructions = 'set using `DefaultDict[..., Annotated[..., Field(default_factory=...)]]`' + if typing_objects.is_typevar(values_type): + + def type_var_default_factory() -> None: + raise RuntimeError( + 'Generic defaultdict cannot be used without a concrete value type or an' + ' explicit default factory, ' + instructions + ) + + return type_var_default_factory + elif values_type not in allowed_default_types: + # a somewhat subjective set of types that have reasonable default values + allowed_msg = ', '.join([t.__name__ for t in set(allowed_default_types.values())]) + raise PydanticSchemaGenerationError( + f'Unable to infer a default factory for keys of type {values_source_type}.' + f' Only {allowed_msg} are supported, other types require an explicit default factory' + ' ' + instructions + ) + return allowed_default_types[values_type] + + # Assume Annotated[..., Field(...)] + if typing_objects.is_annotated(values_type_origin): + field_info = next((v for v in get_args(values_source_type) if isinstance(v, FieldInfo)), None) + else: + field_info = None + if field_info and field_info.default_factory: + # Assume the default factory does not take any argument: + default_default_factory = cast(Callable[[], Any], field_info.default_factory) + else: + default_default_factory = infer_default() + return default_default_factory + + +def validate_str_is_valid_iana_tz(value: Any, /) -> ZoneInfo: + if isinstance(value, ZoneInfo): + return value + try: + return ZoneInfo(value) + except (ZoneInfoNotFoundError, ValueError, TypeError): + raise PydanticCustomError('zoneinfo_str', 'invalid timezone: {value}', {'value': value}) + + +NUMERIC_VALIDATOR_LOOKUP: dict[str, Callable] = { + 'gt': greater_than_validator, + 'ge': greater_than_or_equal_validator, + 'lt': less_than_validator, + 'le': less_than_or_equal_validator, + 'multiple_of': multiple_of_validator, + 'min_length': min_length_validator, + 'max_length': max_length_validator, + 'max_digits': max_digits_validator, + 'decimal_places': decimal_places_validator, +} + +IpType = Union[IPv4Address, IPv6Address, IPv4Network, IPv6Network, IPv4Interface, IPv6Interface] + +IP_VALIDATOR_LOOKUP: dict[type[IpType], Callable] = { + IPv4Address: ip_v4_address_validator, + IPv6Address: ip_v6_address_validator, + IPv4Network: ip_v4_network_validator, + IPv6Network: ip_v6_network_validator, + IPv4Interface: ip_v4_interface_validator, + IPv6Interface: ip_v6_interface_validator, +} + +MAPPING_ORIGIN_MAP: dict[Any, Any] = { + typing.DefaultDict: collections.defaultdict, # noqa: UP006 + collections.defaultdict: collections.defaultdict, + typing.OrderedDict: collections.OrderedDict, # noqa: UP006 + collections.OrderedDict: collections.OrderedDict, + typing_extensions.OrderedDict: collections.OrderedDict, + typing.Counter: collections.Counter, + collections.Counter: collections.Counter, + # this doesn't handle subclasses of these + typing.Mapping: dict, + typing.MutableMapping: dict, + # parametrized typing.{Mutable}Mapping creates one of these + collections.abc.Mapping: dict, + collections.abc.MutableMapping: dict, +} diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__init__.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2553f1d6baa12d455b49b892851b920c9dba4f41 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/class_validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/class_validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..28c082df39a50b4d85617c27ea70052cdd38d57b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/class_validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3aa71d2a5b8bdcf5ae8ac70a8d1ebc5fe002f1d6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/copy_internals.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/copy_internals.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bb7b1c66d79200435e91ff582a3d259b941c2983 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/copy_internals.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/decorator.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/decorator.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f2cb1f3219c74b4716fa05e17f5de9c67322746 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/decorator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/json.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/json.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d3b856854f9dc42134cdd06e49cb0ce3b0e991af Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/json.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/parse.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/parse.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65c770d3d790d090201cb3f78ad4468bf600bd0b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/parse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/tools.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/tools.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee12c441f157954f753e1c71f727a0ba5628ba14 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/deprecated/__pycache__/tools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..e6ba195064646af6b128c6be769790d8fa06c925 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/class_validators.py @@ -0,0 +1,256 @@ +"""Old `@validator` and `@root_validator` function validators from V1.""" + +from __future__ import annotations as _annotations + +from functools import partial, partialmethod +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, Union, overload +from warnings import warn + +from typing_extensions import Protocol, TypeAlias, deprecated + +from .._internal import _decorators, _decorators_v1 +from ..errors import PydanticUserError +from ..warnings import PydanticDeprecatedSince20 + +_ALLOW_REUSE_WARNING_MESSAGE = '`allow_reuse` is deprecated and will be ignored; it should no longer be necessary' + + +if TYPE_CHECKING: + + class _OnlyValueValidatorClsMethod(Protocol): + def __call__(self, __cls: Any, __value: Any) -> Any: ... + + class _V1ValidatorWithValuesClsMethod(Protocol): + def __call__(self, __cls: Any, __value: Any, values: dict[str, Any]) -> Any: ... + + class _V1ValidatorWithValuesKwOnlyClsMethod(Protocol): + def __call__(self, __cls: Any, __value: Any, *, values: dict[str, Any]) -> Any: ... + + class _V1ValidatorWithKwargsClsMethod(Protocol): + def __call__(self, __cls: Any, **kwargs: Any) -> Any: ... + + class _V1ValidatorWithValuesAndKwargsClsMethod(Protocol): + def __call__(self, __cls: Any, values: dict[str, Any], **kwargs: Any) -> Any: ... + + class _V1RootValidatorClsMethod(Protocol): + def __call__( + self, __cls: Any, __values: _decorators_v1.RootValidatorValues + ) -> _decorators_v1.RootValidatorValues: ... + + V1Validator = Union[ + _OnlyValueValidatorClsMethod, + _V1ValidatorWithValuesClsMethod, + _V1ValidatorWithValuesKwOnlyClsMethod, + _V1ValidatorWithKwargsClsMethod, + _V1ValidatorWithValuesAndKwargsClsMethod, + _decorators_v1.V1ValidatorWithValues, + _decorators_v1.V1ValidatorWithValuesKwOnly, + _decorators_v1.V1ValidatorWithKwargs, + _decorators_v1.V1ValidatorWithValuesAndKwargs, + ] + + V1RootValidator = Union[ + _V1RootValidatorClsMethod, + _decorators_v1.V1RootValidatorFunction, + ] + + _PartialClsOrStaticMethod: TypeAlias = Union[classmethod[Any, Any, Any], staticmethod[Any, Any], partialmethod[Any]] + + # Allow both a V1 (assumed pre=False) or V2 (assumed mode='after') validator + # We lie to type checkers and say we return the same thing we get + # but in reality we return a proxy object that _mostly_ behaves like the wrapped thing + _V1ValidatorType = TypeVar('_V1ValidatorType', V1Validator, _PartialClsOrStaticMethod) + _V1RootValidatorFunctionType = TypeVar( + '_V1RootValidatorFunctionType', + _decorators_v1.V1RootValidatorFunction, + _V1RootValidatorClsMethod, + _PartialClsOrStaticMethod, + ) +else: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + + +@deprecated( + 'Pydantic V1 style `@validator` validators are deprecated.' + ' You should migrate to Pydantic V2 style `@field_validator` validators,' + ' see the migration guide for more details', + category=None, +) +def validator( + __field: str, + *fields: str, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool | None = None, + allow_reuse: bool = False, +) -> Callable[[_V1ValidatorType], _V1ValidatorType]: + """Decorate methods on the class indicating that they should be used to validate fields. + + Args: + __field (str): The first field the validator should be called on; this is separate + from `fields` to ensure an error is raised if you don't pass at least one. + *fields (str): Additional field(s) the validator should be called on. + pre (bool, optional): Whether this validator should be called before the standard + validators (else after). Defaults to False. + each_item (bool, optional): For complex objects (sets, lists etc.) whether to validate + individual elements rather than the whole object. Defaults to False. + always (bool, optional): Whether this method and other validators should be called even if + the value is missing. Defaults to False. + check_fields (bool | None, optional): Whether to check that the fields actually exist on the model. + Defaults to None. + allow_reuse (bool, optional): Whether to track and raise an error if another validator refers to + the decorated function. Defaults to False. + + Returns: + Callable: A decorator that can be used to decorate a + function to be used as a validator. + """ + warn( + 'Pydantic V1 style `@validator` validators are deprecated.' + ' You should migrate to Pydantic V2 style `@field_validator` validators,' + ' see the migration guide for more details', + DeprecationWarning, + stacklevel=2, + ) + + if allow_reuse is True: # pragma: no cover + warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning, stacklevel=2) + fields = __field, *fields + if isinstance(fields[0], FunctionType): + raise PydanticUserError( + '`@validator` should be used with fields and keyword arguments, not bare. ' + "E.g. usage should be `@validator('', ...)`", + code='decorator-missing-arguments', + ) + elif not all(isinstance(field, str) for field in fields): + raise PydanticUserError( + '`@validator` fields should be passed as separate string args. ' + "E.g. usage should be `@validator('', '', ...)`", + code='decorator-invalid-fields', + ) + + mode: Literal['before', 'after'] = 'before' if pre is True else 'after' + + def dec(f: Any) -> _decorators.PydanticDescriptorProxy[Any]: + if _decorators.is_instance_method_from_sig(f): + raise PydanticUserError( + '`@validator` cannot be applied to instance methods', code='validator-instance-method' + ) + # auto apply the @classmethod decorator + f = _decorators.ensure_classmethod_based_on_signature(f) + wrap = _decorators_v1.make_generic_v1_field_validator + validator_wrapper_info = _decorators.ValidatorDecoratorInfo( + fields=fields, + mode=mode, + each_item=each_item, + always=always, + check_fields=check_fields, + ) + return _decorators.PydanticDescriptorProxy(f, validator_wrapper_info, shim=wrap) + + return dec # type: ignore[return-value] + + +@overload +def root_validator( + *, + # if you don't specify `pre` the default is `pre=False` + # which means you need to specify `skip_on_failure=True` + skip_on_failure: Literal[True], + allow_reuse: bool = ..., +) -> Callable[ + [_V1RootValidatorFunctionType], + _V1RootValidatorFunctionType, +]: ... + + +@overload +def root_validator( + *, + # if you specify `pre=True` then you don't need to specify + # `skip_on_failure`, in fact it is not allowed as an argument! + pre: Literal[True], + allow_reuse: bool = ..., +) -> Callable[ + [_V1RootValidatorFunctionType], + _V1RootValidatorFunctionType, +]: ... + + +@overload +def root_validator( + *, + # if you explicitly specify `pre=False` then you + # MUST specify `skip_on_failure=True` + pre: Literal[False], + skip_on_failure: Literal[True], + allow_reuse: bool = ..., +) -> Callable[ + [_V1RootValidatorFunctionType], + _V1RootValidatorFunctionType, +]: ... + + +@deprecated( + 'Pydantic V1 style `@root_validator` validators are deprecated.' + ' You should migrate to Pydantic V2 style `@model_validator` validators,' + ' see the migration guide for more details', + category=None, +) +def root_validator( + *__args, + pre: bool = False, + skip_on_failure: bool = False, + allow_reuse: bool = False, +) -> Any: + """Decorate methods on a model indicating that they should be used to validate (and perhaps + modify) data either before or after standard model parsing/validation is performed. + + Args: + pre (bool, optional): Whether this validator should be called before the standard + validators (else after). Defaults to False. + skip_on_failure (bool, optional): Whether to stop validation and return as soon as a + failure is encountered. Defaults to False. + allow_reuse (bool, optional): Whether to track and raise an error if another validator + refers to the decorated function. Defaults to False. + + Returns: + Any: A decorator that can be used to decorate a function to be used as a root_validator. + """ + warn( + 'Pydantic V1 style `@root_validator` validators are deprecated.' + ' You should migrate to Pydantic V2 style `@model_validator` validators,' + ' see the migration guide for more details', + DeprecationWarning, + stacklevel=2, + ) + + if __args: + # Ensure a nice error is raised if someone attempts to use the bare decorator + return root_validator()(*__args) # type: ignore + + if allow_reuse is True: # pragma: no cover + warn(_ALLOW_REUSE_WARNING_MESSAGE, DeprecationWarning, stacklevel=2) + mode: Literal['before', 'after'] = 'before' if pre is True else 'after' + if pre is False and skip_on_failure is not True: + raise PydanticUserError( + 'If you use `@root_validator` with pre=False (the default) you MUST specify `skip_on_failure=True`.' + ' Note that `@root_validator` is deprecated and should be replaced with `@model_validator`.', + code='root-validator-pre-skip', + ) + + wrap = partial(_decorators_v1.make_v1_generic_root_validator, pre=pre) + + def dec(f: Callable[..., Any] | classmethod[Any, Any, Any] | staticmethod[Any, Any]) -> Any: + if _decorators.is_instance_method_from_sig(f): + raise TypeError('`@root_validator` cannot be applied to instance methods') + # auto apply the @classmethod decorator + res = _decorators.ensure_classmethod_based_on_signature(f) + dec_info = _decorators.RootValidatorDecoratorInfo(mode=mode) + return _decorators.PydanticDescriptorProxy(res, dec_info, shim=wrap) + + return dec diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/config.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/config.py new file mode 100644 index 0000000000000000000000000000000000000000..bd4692ace9b4594799974d6ecf8d44357d1d02c5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/config.py @@ -0,0 +1,72 @@ +from __future__ import annotations as _annotations + +import warnings +from typing import TYPE_CHECKING, Any, Literal + +from typing_extensions import deprecated + +from .._internal import _config +from ..warnings import PydanticDeprecatedSince20 + +if not TYPE_CHECKING: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + +__all__ = 'BaseConfig', 'Extra' + + +class _ConfigMetaclass(type): + def __getattr__(self, item: str) -> Any: + try: + obj = _config.config_defaults[item] + warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning) + return obj + except KeyError as exc: + raise AttributeError(f"type object '{self.__name__}' has no attribute {exc}") from exc + + +@deprecated('BaseConfig is deprecated. Use the `pydantic.ConfigDict` instead.', category=PydanticDeprecatedSince20) +class BaseConfig(metaclass=_ConfigMetaclass): + """This class is only retained for backwards compatibility. + + !!! Warning "Deprecated" + BaseConfig is deprecated. Use the [`pydantic.ConfigDict`][pydantic.ConfigDict] instead. + """ + + def __getattr__(self, item: str) -> Any: + try: + obj = super().__getattribute__(item) + warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning) + return obj + except AttributeError as exc: + try: + return getattr(type(self), item) + except AttributeError: + # re-raising changes the displayed text to reflect that `self` is not a type + raise AttributeError(str(exc)) from exc + + def __init_subclass__(cls, **kwargs: Any) -> None: + warnings.warn(_config.DEPRECATION_MESSAGE, DeprecationWarning) + return super().__init_subclass__(**kwargs) + + +class _ExtraMeta(type): + def __getattribute__(self, __name: str) -> Any: + # The @deprecated decorator accesses other attributes, so we only emit a warning for the expected ones + if __name in {'allow', 'ignore', 'forbid'}: + warnings.warn( + "`pydantic.config.Extra` is deprecated, use literal values instead (e.g. `extra='allow'`)", + DeprecationWarning, + stacklevel=2, + ) + return super().__getattribute__(__name) + + +@deprecated( + "Extra is deprecated. Use literal values instead (e.g. `extra='allow'`)", category=PydanticDeprecatedSince20 +) +class Extra(metaclass=_ExtraMeta): + allow: Literal['allow'] = 'allow' + ignore: Literal['ignore'] = 'ignore' + forbid: Literal['forbid'] = 'forbid' diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py new file mode 100644 index 0000000000000000000000000000000000000000..0170dc08bc13d29ef4b23476877fc22dc81bdfff --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/copy_internals.py @@ -0,0 +1,224 @@ +from __future__ import annotations as _annotations + +import typing +from copy import deepcopy +from enum import Enum +from typing import Any + +import typing_extensions + +from .._internal import ( + _model_construction, + _typing_extra, + _utils, +) + +if typing.TYPE_CHECKING: + from .. import BaseModel + from .._internal._utils import AbstractSetIntStr, MappingIntStrAny + + AnyClassMethod = classmethod[Any, Any, Any] + TupleGenerator = typing.Generator[tuple[str, Any], None, None] + Model = typing.TypeVar('Model', bound='BaseModel') + # should be `set[int] | set[str] | dict[int, IncEx] | dict[str, IncEx] | None`, but mypy can't cope + IncEx: typing_extensions.TypeAlias = 'set[int] | set[str] | dict[int, Any] | dict[str, Any] | None' + +_object_setattr = _model_construction.object_setattr + + +def _iter( + self: BaseModel, + to_dict: bool = False, + by_alias: bool = False, + include: AbstractSetIntStr | MappingIntStrAny | None = None, + exclude: AbstractSetIntStr | MappingIntStrAny | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, +) -> TupleGenerator: + # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. + # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. + if exclude is not None: + exclude = _utils.ValueItems.merge( + {k: v.exclude for k, v in self.__pydantic_fields__.items() if v.exclude is not None}, exclude + ) + + if include is not None: + include = _utils.ValueItems.merge(dict.fromkeys(self.__pydantic_fields__, True), include, intersect=True) + + allowed_keys = _calculate_keys(self, include=include, exclude=exclude, exclude_unset=exclude_unset) # type: ignore + if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): + # huge boost for plain _iter() + yield from self.__dict__.items() + if self.__pydantic_extra__: + yield from self.__pydantic_extra__.items() + return + + value_exclude = _utils.ValueItems(self, exclude) if exclude is not None else None + value_include = _utils.ValueItems(self, include) if include is not None else None + + if self.__pydantic_extra__ is None: + items = self.__dict__.items() + else: + items = list(self.__dict__.items()) + list(self.__pydantic_extra__.items()) + + for field_key, v in items: + if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): + continue + + if exclude_defaults: + try: + field = self.__pydantic_fields__[field_key] + except KeyError: + pass + else: + if not field.is_required() and field.default == v: + continue + + if by_alias and field_key in self.__pydantic_fields__: + dict_key = self.__pydantic_fields__[field_key].alias or field_key + else: + dict_key = field_key + + if to_dict or value_include or value_exclude: + v = _get_value( + type(self), + v, + to_dict=to_dict, + by_alias=by_alias, + include=value_include and value_include.for_element(field_key), + exclude=value_exclude and value_exclude.for_element(field_key), + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + yield dict_key, v + + +def _copy_and_set_values( + self: Model, + values: dict[str, Any], + fields_set: set[str], + extra: dict[str, Any] | None = None, + private: dict[str, Any] | None = None, + *, + deep: bool, # UP006 +) -> Model: + if deep: + # chances of having empty dict here are quite low for using smart_deepcopy + values = deepcopy(values) + extra = deepcopy(extra) + private = deepcopy(private) + + cls = self.__class__ + m = cls.__new__(cls) + _object_setattr(m, '__dict__', values) + _object_setattr(m, '__pydantic_extra__', extra) + _object_setattr(m, '__pydantic_fields_set__', fields_set) + _object_setattr(m, '__pydantic_private__', private) + + return m + + +@typing.no_type_check +def _get_value( + cls: type[BaseModel], + v: Any, + to_dict: bool, + by_alias: bool, + include: AbstractSetIntStr | MappingIntStrAny | None, + exclude: AbstractSetIntStr | MappingIntStrAny | None, + exclude_unset: bool, + exclude_defaults: bool, + exclude_none: bool, +) -> Any: + from .. import BaseModel + + if isinstance(v, BaseModel): + if to_dict: + return v.model_dump( + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=include, # type: ignore + exclude=exclude, # type: ignore + exclude_none=exclude_none, + ) + else: + return v.copy(include=include, exclude=exclude) + + value_exclude = _utils.ValueItems(v, exclude) if exclude else None + value_include = _utils.ValueItems(v, include) if include else None + + if isinstance(v, dict): + return { + k_: _get_value( + cls, + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(k_), + exclude=value_exclude and value_exclude.for_element(k_), + exclude_none=exclude_none, + ) + for k_, v_ in v.items() + if (not value_exclude or not value_exclude.is_excluded(k_)) + and (not value_include or value_include.is_included(k_)) + } + + elif _utils.sequence_like(v): + seq_args = ( + _get_value( + cls, + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(i), + exclude=value_exclude and value_exclude.for_element(i), + exclude_none=exclude_none, + ) + for i, v_ in enumerate(v) + if (not value_exclude or not value_exclude.is_excluded(i)) + and (not value_include or value_include.is_included(i)) + ) + + return v.__class__(*seq_args) if _typing_extra.is_namedtuple(v.__class__) else v.__class__(seq_args) + + elif isinstance(v, Enum) and getattr(cls.model_config, 'use_enum_values', False): + return v.value + + else: + return v + + +def _calculate_keys( + self: BaseModel, + include: MappingIntStrAny | None, + exclude: MappingIntStrAny | None, + exclude_unset: bool, + update: dict[str, Any] | None = None, # noqa UP006 +) -> typing.AbstractSet[str] | None: + if include is None and exclude is None and exclude_unset is False: + return None + + keys: typing.AbstractSet[str] + if exclude_unset: + keys = self.__pydantic_fields_set__.copy() + else: + keys = set(self.__dict__.keys()) + keys = keys | (self.__pydantic_extra__ or {}).keys() + + if include is not None: + keys &= include.keys() + + if update: + keys -= update.keys() + + if exclude: + keys -= {k for k, v in exclude.items() if _utils.ValueItems.is_true(v)} + + return keys diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py new file mode 100644 index 0000000000000000000000000000000000000000..e73ad209ae86b26d6fa892f1eba39cfc28980970 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/decorator.py @@ -0,0 +1,284 @@ +import warnings +from collections.abc import Mapping +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union, overload + +from typing_extensions import deprecated + +from .._internal import _config, _typing_extra +from ..alias_generators import to_pascal +from ..errors import PydanticUserError +from ..functional_validators import field_validator +from ..main import BaseModel, create_model +from ..warnings import PydanticDeprecatedSince20 + +if not TYPE_CHECKING: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + +__all__ = ('validate_arguments',) + +if TYPE_CHECKING: + AnyCallable = Callable[..., Any] + + AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) + ConfigType = Union[None, type[Any], dict[str, Any]] + + +@overload +def validate_arguments( + func: None = None, *, config: 'ConfigType' = None +) -> Callable[['AnyCallableT'], 'AnyCallableT']: ... + + +@overload +def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': ... + + +@deprecated( + 'The `validate_arguments` method is deprecated; use `validate_call` instead.', + category=None, +) +def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: + """Decorator to validate the arguments passed to a function.""" + warnings.warn( + 'The `validate_arguments` method is deprecated; use `validate_call` instead.', + PydanticDeprecatedSince20, + stacklevel=2, + ) + + def validate(_func: 'AnyCallable') -> 'AnyCallable': + vd = ValidatedFunction(_func, config) + + @wraps(_func) + def wrapper_function(*args: Any, **kwargs: Any) -> Any: + return vd.call(*args, **kwargs) + + wrapper_function.vd = vd # type: ignore + wrapper_function.validate = vd.init_model_instance # type: ignore + wrapper_function.raw_function = vd.raw_function # type: ignore + wrapper_function.model = vd.model # type: ignore + return wrapper_function + + if func: + return validate(func) + else: + return validate + + +ALT_V_ARGS = 'v__args' +ALT_V_KWARGS = 'v__kwargs' +V_POSITIONAL_ONLY_NAME = 'v__positional_only' +V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' + + +class ValidatedFunction: + def __init__(self, function: 'AnyCallable', config: 'ConfigType'): + from inspect import Parameter, signature + + parameters: Mapping[str, Parameter] = signature(function).parameters + + if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: + raise PydanticUserError( + f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' + f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator', + code=None, + ) + + self.raw_function = function + self.arg_mapping: dict[int, str] = {} + self.positional_only_args: set[str] = set() + self.v_args_name = 'args' + self.v_kwargs_name = 'kwargs' + + type_hints = _typing_extra.get_type_hints(function, include_extras=True) + takes_args = False + takes_kwargs = False + fields: dict[str, tuple[Any, Any]] = {} + for i, (name, p) in enumerate(parameters.items()): + if p.annotation is p.empty: + annotation = Any + else: + annotation = type_hints[name] + + default = ... if p.default is p.empty else p.default + if p.kind == Parameter.POSITIONAL_ONLY: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_POSITIONAL_ONLY_NAME] = list[str], None + self.positional_only_args.add(name) + elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_DUPLICATE_KWARGS] = list[str], None + elif p.kind == Parameter.KEYWORD_ONLY: + fields[name] = annotation, default + elif p.kind == Parameter.VAR_POSITIONAL: + self.v_args_name = name + fields[name] = tuple[annotation, ...], None + takes_args = True + else: + assert p.kind == Parameter.VAR_KEYWORD, p.kind + self.v_kwargs_name = name + fields[name] = dict[str, annotation], None + takes_kwargs = True + + # these checks avoid a clash between "args" and a field with that name + if not takes_args and self.v_args_name in fields: + self.v_args_name = ALT_V_ARGS + + # same with "kwargs" + if not takes_kwargs and self.v_kwargs_name in fields: + self.v_kwargs_name = ALT_V_KWARGS + + if not takes_args: + # we add the field so validation below can raise the correct exception + fields[self.v_args_name] = list[Any], None + + if not takes_kwargs: + # same with kwargs + fields[self.v_kwargs_name] = dict[Any, Any], None + + self.create_model(fields, takes_args, takes_kwargs, config) + + def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: + values = self.build_values(args, kwargs) + return self.model(**values) + + def call(self, *args: Any, **kwargs: Any) -> Any: + m = self.init_model_instance(*args, **kwargs) + return self.execute(m) + + def build_values(self, args: tuple[Any, ...], kwargs: dict[str, Any]) -> dict[str, Any]: + values: dict[str, Any] = {} + if args: + arg_iter = enumerate(args) + while True: + try: + i, a = next(arg_iter) + except StopIteration: + break + arg_name = self.arg_mapping.get(i) + if arg_name is not None: + values[arg_name] = a + else: + values[self.v_args_name] = [a] + [a for _, a in arg_iter] + break + + var_kwargs: dict[str, Any] = {} + wrong_positional_args = [] + duplicate_kwargs = [] + fields_alias = [ + field.alias + for name, field in self.model.__pydantic_fields__.items() + if name not in (self.v_args_name, self.v_kwargs_name) + ] + non_var_fields = set(self.model.__pydantic_fields__) - {self.v_args_name, self.v_kwargs_name} + for k, v in kwargs.items(): + if k in non_var_fields or k in fields_alias: + if k in self.positional_only_args: + wrong_positional_args.append(k) + if k in values: + duplicate_kwargs.append(k) + values[k] = v + else: + var_kwargs[k] = v + + if var_kwargs: + values[self.v_kwargs_name] = var_kwargs + if wrong_positional_args: + values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args + if duplicate_kwargs: + values[V_DUPLICATE_KWARGS] = duplicate_kwargs + return values + + def execute(self, m: BaseModel) -> Any: + d = { + k: v + for k, v in m.__dict__.items() + if k in m.__pydantic_fields_set__ or m.__pydantic_fields__[k].default_factory + } + var_kwargs = d.pop(self.v_kwargs_name, {}) + + if self.v_args_name in d: + args_: list[Any] = [] + in_kwargs = False + kwargs = {} + for name, value in d.items(): + if in_kwargs: + kwargs[name] = value + elif name == self.v_args_name: + args_ += value + in_kwargs = True + else: + args_.append(value) + return self.raw_function(*args_, **kwargs, **var_kwargs) + elif self.positional_only_args: + args_ = [] + kwargs = {} + for name, value in d.items(): + if name in self.positional_only_args: + args_.append(value) + else: + kwargs[name] = value + return self.raw_function(*args_, **kwargs, **var_kwargs) + else: + return self.raw_function(**d, **var_kwargs) + + def create_model(self, fields: dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: + pos_args = len(self.arg_mapping) + + config_wrapper = _config.ConfigWrapper(config) + + if config_wrapper.alias_generator: + raise PydanticUserError( + 'Setting the "alias_generator" property on custom Config for ' + '@validate_arguments is not yet supported, please remove.', + code=None, + ) + if config_wrapper.extra is None: + config_wrapper.config_dict['extra'] = 'forbid' + + class DecoratorBaseModel(BaseModel): + @field_validator(self.v_args_name, check_fields=False) + @classmethod + def check_args(cls, v: Optional[list[Any]]) -> Optional[list[Any]]: + if takes_args or v is None: + return v + + raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') + + @field_validator(self.v_kwargs_name, check_fields=False) + @classmethod + def check_kwargs(cls, v: Optional[dict[str, Any]]) -> Optional[dict[str, Any]]: + if takes_kwargs or v is None: + return v + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v.keys())) + raise TypeError(f'unexpected keyword argument{plural}: {keys}') + + @field_validator(V_POSITIONAL_ONLY_NAME, check_fields=False) + @classmethod + def check_positional_only(cls, v: Optional[list[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') + + @field_validator(V_DUPLICATE_KWARGS, check_fields=False) + @classmethod + def check_duplicate_kwargs(cls, v: Optional[list[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'multiple values for argument{plural}: {keys}') + + model_config = config_wrapper.config_dict + + self.model = create_model(to_pascal(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/json.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/json.py new file mode 100644 index 0000000000000000000000000000000000000000..1e216a765d15d5dd9e379f9de4f8f91ab8063877 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/json.py @@ -0,0 +1,141 @@ +import datetime +import warnings +from collections import deque +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from re import Pattern +from types import GeneratorType +from typing import TYPE_CHECKING, Any, Callable, Union +from uuid import UUID + +from typing_extensions import deprecated + +from .._internal._import_utils import import_cached_base_model +from ..color import Color +from ..networks import NameEmail +from ..types import SecretBytes, SecretStr +from ..warnings import PydanticDeprecatedSince20 + +if not TYPE_CHECKING: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + +__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' + + +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """Encodes a Decimal as int of there's no exponent, otherwise float. + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + exponent = dec_value.as_tuple().exponent + if isinstance(exponent, int) and exponent >= 0: + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: dict[type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, +} + + +@deprecated( + '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.', + category=None, +) +def pydantic_encoder(obj: Any) -> Any: + warnings.warn( + '`pydantic_encoder` is deprecated, use `pydantic_core.to_jsonable_python` instead.', + category=PydanticDeprecatedSince20, + stacklevel=2, + ) + from dataclasses import asdict, is_dataclass + + BaseModel = import_cached_base_model() + + if isinstance(obj, BaseModel): + return obj.model_dump() + elif is_dataclass(obj): + return asdict(obj) # type: ignore + + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = ENCODERS_BY_TYPE[base] + except KeyError: + continue + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") + + +# TODO: Add a suggested migration path once there is a way to use custom encoders +@deprecated( + '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.', + category=None, +) +def custom_pydantic_encoder(type_encoders: dict[Any, Callable[[type[Any]], Any]], obj: Any) -> Any: + warnings.warn( + '`custom_pydantic_encoder` is deprecated, use `BaseModel.model_dump` instead.', + category=PydanticDeprecatedSince20, + stacklevel=2, + ) + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = type_encoders[base] + except KeyError: + continue + + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + return pydantic_encoder(obj) + + +@deprecated('`timedelta_isoformat` is deprecated.', category=None) +def timedelta_isoformat(td: datetime.timedelta) -> str: + """ISO 8601 encoding for Python timedelta object.""" + warnings.warn('`timedelta_isoformat` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2) + minutes, seconds = divmod(td.seconds, 60) + hours, minutes = divmod(minutes, 60) + return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py new file mode 100644 index 0000000000000000000000000000000000000000..2a92e62b7b2e8dab77cbe0c2dbb79c810af7f452 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/parse.py @@ -0,0 +1,80 @@ +from __future__ import annotations + +import json +import pickle +import warnings +from enum import Enum +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable + +from typing_extensions import deprecated + +from ..warnings import PydanticDeprecatedSince20 + +if not TYPE_CHECKING: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + + +class Protocol(str, Enum): + json = 'json' + pickle = 'pickle' + + +@deprecated('`load_str_bytes` is deprecated.', category=None) +def load_str_bytes( + b: str | bytes, + *, + content_type: str | None = None, + encoding: str = 'utf8', + proto: Protocol | None = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + warnings.warn('`load_str_bytes` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2) + if proto is None and content_type: + if content_type.endswith(('json', 'javascript')): + pass + elif allow_pickle and content_type.endswith('pickle'): + proto = Protocol.pickle + else: + raise TypeError(f'Unknown content-type: {content_type}') + + proto = proto or Protocol.json + + if proto == Protocol.json: + if isinstance(b, bytes): + b = b.decode(encoding) + return json_loads(b) # type: ignore + elif proto == Protocol.pickle: + if not allow_pickle: + raise RuntimeError('Trying to decode with pickle with allow_pickle=False') + bb = b if isinstance(b, bytes) else b.encode() # type: ignore + return pickle.loads(bb) + else: + raise TypeError(f'Unknown protocol: {proto}') + + +@deprecated('`load_file` is deprecated.', category=None) +def load_file( + path: str | Path, + *, + content_type: str | None = None, + encoding: str = 'utf8', + proto: Protocol | None = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + warnings.warn('`load_file` is deprecated.', category=PydanticDeprecatedSince20, stacklevel=2) + path = Path(path) + b = path.read_bytes() + if content_type is None: + if path.suffix in ('.js', '.json'): + proto = Protocol.json + elif path.suffix == '.pkl': + proto = Protocol.pickle + + return load_str_bytes( + b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads + ) diff --git a/venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py b/venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..5ad7faef2ec69eaa737a58d3587770a612a510d8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/deprecated/tools.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +import json +import warnings +from typing import TYPE_CHECKING, Any, Callable, TypeVar, Union + +from typing_extensions import deprecated + +from ..json_schema import DEFAULT_REF_TEMPLATE, GenerateJsonSchema +from ..type_adapter import TypeAdapter +from ..warnings import PydanticDeprecatedSince20 + +if not TYPE_CHECKING: + # See PyCharm issues https://youtrack.jetbrains.com/issue/PY-21915 + # and https://youtrack.jetbrains.com/issue/PY-51428 + DeprecationWarning = PydanticDeprecatedSince20 + +__all__ = 'parse_obj_as', 'schema_of', 'schema_json_of' + +NameFactory = Union[str, Callable[[type[Any]], str]] + + +T = TypeVar('T') + + +@deprecated( + '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.', + category=None, +) +def parse_obj_as(type_: type[T], obj: Any, type_name: NameFactory | None = None) -> T: + warnings.warn( + '`parse_obj_as` is deprecated. Use `pydantic.TypeAdapter.validate_python` instead.', + category=PydanticDeprecatedSince20, + stacklevel=2, + ) + if type_name is not None: # pragma: no cover + warnings.warn( + 'The type_name parameter is deprecated. parse_obj_as no longer creates temporary models', + DeprecationWarning, + stacklevel=2, + ) + return TypeAdapter(type_).validate_python(obj) + + +@deprecated( + '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', + category=None, +) +def schema_of( + type_: Any, + *, + title: NameFactory | None = None, + by_alias: bool = True, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, +) -> dict[str, Any]: + """Generate a JSON schema (as dict) for the passed model or dynamically generated one.""" + warnings.warn( + '`schema_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', + category=PydanticDeprecatedSince20, + stacklevel=2, + ) + res = TypeAdapter(type_).json_schema( + by_alias=by_alias, + schema_generator=schema_generator, + ref_template=ref_template, + ) + if title is not None: + if isinstance(title, str): + res['title'] = title + else: + warnings.warn( + 'Passing a callable for the `title` parameter is deprecated and no longer supported', + DeprecationWarning, + stacklevel=2, + ) + res['title'] = title(type_) + return res + + +@deprecated( + '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', + category=None, +) +def schema_json_of( + type_: Any, + *, + title: NameFactory | None = None, + by_alias: bool = True, + ref_template: str = DEFAULT_REF_TEMPLATE, + schema_generator: type[GenerateJsonSchema] = GenerateJsonSchema, + **dumps_kwargs: Any, +) -> str: + """Generate a JSON schema (as JSON) for the passed model or dynamically generated one.""" + warnings.warn( + '`schema_json_of` is deprecated. Use `pydantic.TypeAdapter.json_schema` instead.', + category=PydanticDeprecatedSince20, + stacklevel=2, + ) + return json.dumps( + schema_of(type_, title=title, by_alias=by_alias, ref_template=ref_template, schema_generator=schema_generator), + **dumps_kwargs, + ) diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/__init__.py b/venv/lib/python3.12/site-packages/pydantic/experimental/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5b5add10921077da0928529592a75e7ada7ecee5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/experimental/__init__.py @@ -0,0 +1 @@ +"""The "experimental" module of pydantic contains potential new features that are subject to change.""" diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d158548f3380398adfa9c362d2d7af007a14770b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/arguments_schema.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/arguments_schema.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24aa327bd2e2a84ca0ac0e739c043fb668e1ef70 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/arguments_schema.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/missing_sentinel.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/missing_sentinel.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5359425754a8d5b6400dec299e8a7adffae08674 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/missing_sentinel.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/pipeline.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/pipeline.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef575cd50cb72fa69555135b16f1423413855943 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/experimental/__pycache__/pipeline.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/arguments_schema.py b/venv/lib/python3.12/site-packages/pydantic/experimental/arguments_schema.py new file mode 100644 index 0000000000000000000000000000000000000000..af4a8f3be3242519b529668e321dd8393241f7ea --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/experimental/arguments_schema.py @@ -0,0 +1,44 @@ +"""Experimental module exposing a function to generate a core schema that validates callable arguments.""" + +from __future__ import annotations + +from collections.abc import Callable +from typing import Any, Literal + +from pydantic_core import CoreSchema + +from pydantic import ConfigDict +from pydantic._internal import _config, _generate_schema, _namespace_utils + + +def generate_arguments_schema( + func: Callable[..., Any], + schema_type: Literal['arguments', 'arguments-v3'] = 'arguments-v3', + parameters_callback: Callable[[int, str, Any], Literal['skip'] | None] | None = None, + config: ConfigDict | None = None, +) -> CoreSchema: + """Generate the schema for the arguments of a function. + + Args: + func: The function to generate the schema for. + schema_type: The type of schema to generate. + parameters_callback: A callable that will be invoked for each parameter. The callback + should take three required arguments: the index, the name and the type annotation + (or [`Parameter.empty`][inspect.Parameter.empty] if not annotated) of the parameter. + The callback can optionally return `'skip'`, so that the parameter gets excluded + from the resulting schema. + config: The configuration to use. + + Returns: + The generated schema. + """ + generate_schema = _generate_schema.GenerateSchema( + _config.ConfigWrapper(config), + ns_resolver=_namespace_utils.NsResolver(namespaces_tuple=_namespace_utils.ns_for_function(func)), + ) + + if schema_type == 'arguments': + schema = generate_schema._arguments_schema(func, parameters_callback) # pyright: ignore[reportArgumentType] + else: + schema = generate_schema._arguments_v3_schema(func, parameters_callback) # pyright: ignore[reportArgumentType] + return generate_schema.clean_schema(schema) diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/missing_sentinel.py b/venv/lib/python3.12/site-packages/pydantic/experimental/missing_sentinel.py new file mode 100644 index 0000000000000000000000000000000000000000..3e7f820ceda3b8d984f01ad0aaa1e150f06beda3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/experimental/missing_sentinel.py @@ -0,0 +1,5 @@ +"""Experimental module exposing a function a `MISSING` sentinel.""" + +from pydantic_core import MISSING + +__all__ = ('MISSING',) diff --git a/venv/lib/python3.12/site-packages/pydantic/experimental/pipeline.py b/venv/lib/python3.12/site-packages/pydantic/experimental/pipeline.py new file mode 100644 index 0000000000000000000000000000000000000000..9ead658f06ff547613803ac852be85e643bfc584 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/experimental/pipeline.py @@ -0,0 +1,663 @@ +"""Experimental pipeline API functionality. Be careful with this API, it's subject to change.""" + +from __future__ import annotations + +import datetime +import operator +import re +import sys +from collections import deque +from collections.abc import Container +from dataclasses import dataclass +from decimal import Decimal +from functools import cached_property, partial +from re import Pattern +from typing import TYPE_CHECKING, Annotated, Any, Callable, Generic, Protocol, TypeVar, Union, overload + +import annotated_types + +if TYPE_CHECKING: + from pydantic import GetCoreSchemaHandler + +from pydantic_core import PydanticCustomError +from pydantic_core import core_schema as cs + +from pydantic import Strict +from pydantic._internal._internal_dataclass import slots_true as _slots_true + +if sys.version_info < (3, 10): + EllipsisType = type(Ellipsis) +else: + from types import EllipsisType + +__all__ = ['validate_as', 'validate_as_deferred', 'transform'] + +_slots_frozen = {**_slots_true, 'frozen': True} + + +@dataclass(**_slots_frozen) +class _ValidateAs: + tp: type[Any] + strict: bool = False + + +@dataclass +class _ValidateAsDefer: + func: Callable[[], type[Any]] + + @cached_property + def tp(self) -> type[Any]: + return self.func() + + +@dataclass(**_slots_frozen) +class _Transform: + func: Callable[[Any], Any] + + +@dataclass(**_slots_frozen) +class _PipelineOr: + left: _Pipeline[Any, Any] + right: _Pipeline[Any, Any] + + +@dataclass(**_slots_frozen) +class _PipelineAnd: + left: _Pipeline[Any, Any] + right: _Pipeline[Any, Any] + + +@dataclass(**_slots_frozen) +class _Eq: + value: Any + + +@dataclass(**_slots_frozen) +class _NotEq: + value: Any + + +@dataclass(**_slots_frozen) +class _In: + values: Container[Any] + + +@dataclass(**_slots_frozen) +class _NotIn: + values: Container[Any] + + +_ConstraintAnnotation = Union[ + annotated_types.Le, + annotated_types.Ge, + annotated_types.Lt, + annotated_types.Gt, + annotated_types.Len, + annotated_types.MultipleOf, + annotated_types.Timezone, + annotated_types.Interval, + annotated_types.Predicate, + # common predicates not included in annotated_types + _Eq, + _NotEq, + _In, + _NotIn, + # regular expressions + Pattern[str], +] + + +@dataclass(**_slots_frozen) +class _Constraint: + constraint: _ConstraintAnnotation + + +_Step = Union[_ValidateAs, _ValidateAsDefer, _Transform, _PipelineOr, _PipelineAnd, _Constraint] + +_InT = TypeVar('_InT') +_OutT = TypeVar('_OutT') +_NewOutT = TypeVar('_NewOutT') + + +class _FieldTypeMarker: + pass + + +# TODO: ultimately, make this public, see https://github.com/pydantic/pydantic/pull/9459#discussion_r1628197626 +# Also, make this frozen eventually, but that doesn't work right now because of the generic base +# Which attempts to modify __orig_base__ and such. +# We could go with a manual freeze, but that seems overkill for now. +@dataclass(**_slots_true) +class _Pipeline(Generic[_InT, _OutT]): + """Abstract representation of a chain of validation, transformation, and parsing steps.""" + + _steps: tuple[_Step, ...] + + def transform( + self, + func: Callable[[_OutT], _NewOutT], + ) -> _Pipeline[_InT, _NewOutT]: + """Transform the output of the previous step. + + If used as the first step in a pipeline, the type of the field is used. + That is, the transformation is applied to after the value is parsed to the field's type. + """ + return _Pipeline[_InT, _NewOutT](self._steps + (_Transform(func),)) + + @overload + def validate_as(self, tp: type[_NewOutT], *, strict: bool = False) -> _Pipeline[_InT, _NewOutT]: ... + + @overload + def validate_as( + self, + tp: ellipsis, # noqa: F821 # TODO: use `_typing_extra.EllipsisType` when we drop Py3.9 + *, + strict: bool = False, + ) -> _Pipeline[_InT, Any]: ... + + # TODO PEP 747: use TypeForm to properly type Annotated aliases (e.g. NewPath, FilePath). + # This fallback accepts any type expression but loses generic type inference. + @overload + def validate_as(self, tp: Any, *, strict: bool = ...) -> _Pipeline[_InT, Any]: ... + + def validate_as(self, tp: type[_NewOutT] | EllipsisType | Any, *, strict: bool = False) -> _Pipeline[_InT, Any]: # type: ignore + """Validate / parse the input into a new type. + + If no type is provided, the type of the field is used. + + Types are parsed in Pydantic's `lax` mode by default, + but you can enable `strict` mode by passing `strict=True`. + """ + if isinstance(tp, EllipsisType): + return _Pipeline[_InT, Any](self._steps + (_ValidateAs(_FieldTypeMarker, strict=strict),)) + return _Pipeline[_InT, _NewOutT](self._steps + (_ValidateAs(tp, strict=strict),)) + + def validate_as_deferred(self, func: Callable[[], type[_NewOutT]]) -> _Pipeline[_InT, _NewOutT]: + """Parse the input into a new type, deferring resolution of the type until the current class + is fully defined. + + This is useful when you need to reference the class in it's own type annotations. + """ + return _Pipeline[_InT, _NewOutT](self._steps + (_ValidateAsDefer(func),)) + + # constraints + @overload + def constrain(self: _Pipeline[_InT, _NewOutGe], constraint: annotated_types.Ge) -> _Pipeline[_InT, _NewOutGe]: ... + + @overload + def constrain(self: _Pipeline[_InT, _NewOutGt], constraint: annotated_types.Gt) -> _Pipeline[_InT, _NewOutGt]: ... + + @overload + def constrain(self: _Pipeline[_InT, _NewOutLe], constraint: annotated_types.Le) -> _Pipeline[_InT, _NewOutLe]: ... + + @overload + def constrain(self: _Pipeline[_InT, _NewOutLt], constraint: annotated_types.Lt) -> _Pipeline[_InT, _NewOutLt]: ... + + @overload + def constrain( + self: _Pipeline[_InT, _NewOutLen], constraint: annotated_types.Len + ) -> _Pipeline[_InT, _NewOutLen]: ... + + @overload + def constrain( + self: _Pipeline[_InT, _NewOutT], constraint: annotated_types.MultipleOf + ) -> _Pipeline[_InT, _NewOutT]: ... + + @overload + def constrain( + self: _Pipeline[_InT, _NewOutDatetime], constraint: annotated_types.Timezone + ) -> _Pipeline[_InT, _NewOutDatetime]: ... + + @overload + def constrain(self: _Pipeline[_InT, _OutT], constraint: annotated_types.Predicate) -> _Pipeline[_InT, _OutT]: ... + + @overload + def constrain( + self: _Pipeline[_InT, _NewOutInterval], constraint: annotated_types.Interval + ) -> _Pipeline[_InT, _NewOutInterval]: ... + + @overload + def constrain(self: _Pipeline[_InT, _OutT], constraint: _Eq) -> _Pipeline[_InT, _OutT]: ... + + @overload + def constrain(self: _Pipeline[_InT, _OutT], constraint: _NotEq) -> _Pipeline[_InT, _OutT]: ... + + @overload + def constrain(self: _Pipeline[_InT, _OutT], constraint: _In) -> _Pipeline[_InT, _OutT]: ... + + @overload + def constrain(self: _Pipeline[_InT, _OutT], constraint: _NotIn) -> _Pipeline[_InT, _OutT]: ... + + @overload + def constrain(self: _Pipeline[_InT, _NewOutT], constraint: Pattern[str]) -> _Pipeline[_InT, _NewOutT]: ... + + def constrain(self, constraint: _ConstraintAnnotation) -> Any: + """Constrain a value to meet a certain condition. + + We support most conditions from `annotated_types`, as well as regular expressions. + + Most of the time you'll be calling a shortcut method like `gt`, `lt`, `len`, etc + so you don't need to call this directly. + """ + return _Pipeline[_InT, _OutT](self._steps + (_Constraint(constraint),)) + + def predicate(self: _Pipeline[_InT, _NewOutT], func: Callable[[_NewOutT], bool]) -> _Pipeline[_InT, _NewOutT]: + """Constrain a value to meet a certain predicate.""" + return self.constrain(annotated_types.Predicate(func)) + + def gt(self: _Pipeline[_InT, _NewOutGt], gt: _NewOutGt) -> _Pipeline[_InT, _NewOutGt]: + """Constrain a value to be greater than a certain value.""" + return self.constrain(annotated_types.Gt(gt)) + + def lt(self: _Pipeline[_InT, _NewOutLt], lt: _NewOutLt) -> _Pipeline[_InT, _NewOutLt]: + """Constrain a value to be less than a certain value.""" + return self.constrain(annotated_types.Lt(lt)) + + def ge(self: _Pipeline[_InT, _NewOutGe], ge: _NewOutGe) -> _Pipeline[_InT, _NewOutGe]: + """Constrain a value to be greater than or equal to a certain value.""" + return self.constrain(annotated_types.Ge(ge)) + + def le(self: _Pipeline[_InT, _NewOutLe], le: _NewOutLe) -> _Pipeline[_InT, _NewOutLe]: + """Constrain a value to be less than or equal to a certain value.""" + return self.constrain(annotated_types.Le(le)) + + def len(self: _Pipeline[_InT, _NewOutLen], min_len: int, max_len: int | None = None) -> _Pipeline[_InT, _NewOutLen]: + """Constrain a value to have a certain length.""" + return self.constrain(annotated_types.Len(min_len, max_len)) + + @overload + def multiple_of(self: _Pipeline[_InT, _NewOutDiv], multiple_of: _NewOutDiv) -> _Pipeline[_InT, _NewOutDiv]: ... + + @overload + def multiple_of(self: _Pipeline[_InT, _NewOutMod], multiple_of: _NewOutMod) -> _Pipeline[_InT, _NewOutMod]: ... + + def multiple_of(self: _Pipeline[_InT, Any], multiple_of: Any) -> _Pipeline[_InT, Any]: + """Constrain a value to be a multiple of a certain number.""" + return self.constrain(annotated_types.MultipleOf(multiple_of)) + + def eq(self: _Pipeline[_InT, _OutT], value: _OutT) -> _Pipeline[_InT, _OutT]: + """Constrain a value to be equal to a certain value.""" + return self.constrain(_Eq(value)) + + def not_eq(self: _Pipeline[_InT, _OutT], value: _OutT) -> _Pipeline[_InT, _OutT]: + """Constrain a value to not be equal to a certain value.""" + return self.constrain(_NotEq(value)) + + def in_(self: _Pipeline[_InT, _OutT], values: Container[_OutT]) -> _Pipeline[_InT, _OutT]: + """Constrain a value to be in a certain set.""" + return self.constrain(_In(values)) + + def not_in(self: _Pipeline[_InT, _OutT], values: Container[_OutT]) -> _Pipeline[_InT, _OutT]: + """Constrain a value to not be in a certain set.""" + return self.constrain(_NotIn(values)) + + # timezone methods + def datetime_tz_naive(self: _Pipeline[_InT, datetime.datetime]) -> _Pipeline[_InT, datetime.datetime]: + return self.constrain(annotated_types.Timezone(None)) + + def datetime_tz_aware(self: _Pipeline[_InT, datetime.datetime]) -> _Pipeline[_InT, datetime.datetime]: + return self.constrain(annotated_types.Timezone(...)) + + def datetime_tz( + self: _Pipeline[_InT, datetime.datetime], tz: datetime.tzinfo + ) -> _Pipeline[_InT, datetime.datetime]: + return self.constrain(annotated_types.Timezone(tz)) # type: ignore + + def datetime_with_tz( + self: _Pipeline[_InT, datetime.datetime], tz: datetime.tzinfo | None + ) -> _Pipeline[_InT, datetime.datetime]: + return self.transform(partial(datetime.datetime.replace, tzinfo=tz)) + + # string methods + def str_lower(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]: + return self.transform(str.lower) + + def str_upper(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]: + return self.transform(str.upper) + + def str_title(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]: + return self.transform(str.title) + + def str_strip(self: _Pipeline[_InT, str]) -> _Pipeline[_InT, str]: + return self.transform(str.strip) + + def str_pattern(self: _Pipeline[_InT, str], pattern: str) -> _Pipeline[_InT, str]: + return self.constrain(re.compile(pattern)) + + def str_contains(self: _Pipeline[_InT, str], substring: str) -> _Pipeline[_InT, str]: + return self.predicate(lambda v: substring in v) + + def str_starts_with(self: _Pipeline[_InT, str], prefix: str) -> _Pipeline[_InT, str]: + return self.predicate(lambda v: v.startswith(prefix)) + + def str_ends_with(self: _Pipeline[_InT, str], suffix: str) -> _Pipeline[_InT, str]: + return self.predicate(lambda v: v.endswith(suffix)) + + # operators + def otherwise(self, other: _Pipeline[_OtherIn, _OtherOut]) -> _Pipeline[_InT | _OtherIn, _OutT | _OtherOut]: + """Combine two validation chains, returning the result of the first chain if it succeeds, and the second chain if it fails.""" + return _Pipeline((_PipelineOr(self, other),)) + + __or__ = otherwise + + def then(self, other: _Pipeline[_OutT, _OtherOut]) -> _Pipeline[_InT, _OtherOut]: + """Pipe the result of one validation chain into another.""" + return _Pipeline((_PipelineAnd(self, other),)) + + __and__ = then + + def __get_pydantic_core_schema__(self, source_type: Any, handler: GetCoreSchemaHandler) -> cs.CoreSchema: + queue = deque(self._steps) + + s = None + + while queue: + step = queue.popleft() + s = _apply_step(step, s, handler, source_type) + + s = s or cs.any_schema() + return s + + def __supports_type__(self, _: _OutT) -> bool: + raise NotImplementedError + + +validate_as = _Pipeline[Any, Any](()).validate_as +validate_as_deferred = _Pipeline[Any, Any](()).validate_as_deferred +transform = _Pipeline[Any, Any]((_ValidateAs(_FieldTypeMarker),)).transform + + +def _check_func( + func: Callable[[Any], bool], predicate_err: str | Callable[[], str], s: cs.CoreSchema | None +) -> cs.CoreSchema: + def handler(v: Any) -> Any: + if func(v): + return v + raise ValueError(f'Expected {predicate_err if isinstance(predicate_err, str) else predicate_err()}') + + if s is None: + return cs.no_info_plain_validator_function(handler) + else: + return cs.no_info_after_validator_function(handler, s) + + +def _apply_step(step: _Step, s: cs.CoreSchema | None, handler: GetCoreSchemaHandler, source_type: Any) -> cs.CoreSchema: + if isinstance(step, _ValidateAs): + s = _apply_parse(s, step.tp, step.strict, handler, source_type) + elif isinstance(step, _ValidateAsDefer): + s = _apply_parse(s, step.tp, False, handler, source_type) + elif isinstance(step, _Transform): + s = _apply_transform(s, step.func, handler) + elif isinstance(step, _Constraint): + s = _apply_constraint(s, step.constraint) + elif isinstance(step, _PipelineOr): + s = cs.union_schema([handler(step.left), handler(step.right)]) + else: + assert isinstance(step, _PipelineAnd) + s = cs.chain_schema([handler(step.left), handler(step.right)]) + return s + + +def _apply_parse( + s: cs.CoreSchema | None, + tp: type[Any], + strict: bool, + handler: GetCoreSchemaHandler, + source_type: Any, +) -> cs.CoreSchema: + if tp is _FieldTypeMarker: + return cs.chain_schema([s, handler(source_type)]) if s else handler(source_type) + + if strict: + tp = Annotated[tp, Strict()] # type: ignore + + if s and s['type'] == 'any': + return handler(tp) + else: + return cs.chain_schema([s, handler(tp)]) if s else handler(tp) + + +def _apply_transform( + s: cs.CoreSchema | None, func: Callable[[Any], Any], handler: GetCoreSchemaHandler +) -> cs.CoreSchema: + if s is None: + return cs.no_info_plain_validator_function(func) + + if s['type'] == 'str': + if func is str.strip: + s = s.copy() + s['strip_whitespace'] = True + return s + elif func is str.lower: + s = s.copy() + s['to_lower'] = True + return s + elif func is str.upper: + s = s.copy() + s['to_upper'] = True + return s + + return cs.no_info_after_validator_function(func, s) + + +def _apply_constraint( # noqa: C901 + s: cs.CoreSchema | None, constraint: _ConstraintAnnotation +) -> cs.CoreSchema: + """Apply a single constraint to a schema.""" + if isinstance(constraint, annotated_types.Gt): + gt = constraint.gt + if s and s['type'] in {'int', 'float', 'decimal'}: + s = s.copy() + if s['type'] == 'int' and isinstance(gt, int): + s['gt'] = gt + elif s['type'] == 'float' and isinstance(gt, float): + s['gt'] = gt + elif s['type'] == 'decimal' and isinstance(gt, Decimal): + s['gt'] = gt + else: + + def check_gt(v: Any) -> bool: + return v > gt + + s = _check_func(check_gt, f'> {gt}', s) + elif isinstance(constraint, annotated_types.Ge): + ge = constraint.ge + if s and s['type'] in {'int', 'float', 'decimal'}: + s = s.copy() + if s['type'] == 'int' and isinstance(ge, int): + s['ge'] = ge + elif s['type'] == 'float' and isinstance(ge, float): + s['ge'] = ge + elif s['type'] == 'decimal' and isinstance(ge, Decimal): + s['ge'] = ge + + def check_ge(v: Any) -> bool: + return v >= ge + + s = _check_func(check_ge, f'>= {ge}', s) + elif isinstance(constraint, annotated_types.Lt): + lt = constraint.lt + if s and s['type'] in {'int', 'float', 'decimal'}: + s = s.copy() + if s['type'] == 'int' and isinstance(lt, int): + s['lt'] = lt + elif s['type'] == 'float' and isinstance(lt, float): + s['lt'] = lt + elif s['type'] == 'decimal' and isinstance(lt, Decimal): + s['lt'] = lt + + def check_lt(v: Any) -> bool: + return v < lt + + s = _check_func(check_lt, f'< {lt}', s) + elif isinstance(constraint, annotated_types.Le): + le = constraint.le + if s and s['type'] in {'int', 'float', 'decimal'}: + s = s.copy() + if s['type'] == 'int' and isinstance(le, int): + s['le'] = le + elif s['type'] == 'float' and isinstance(le, float): + s['le'] = le + elif s['type'] == 'decimal' and isinstance(le, Decimal): + s['le'] = le + + def check_le(v: Any) -> bool: + return v <= le + + s = _check_func(check_le, f'<= {le}', s) + elif isinstance(constraint, annotated_types.Len): + min_len = constraint.min_length + max_len = constraint.max_length + + if s and s['type'] in {'str', 'list', 'tuple', 'set', 'frozenset', 'dict'}: + assert ( + s['type'] == 'str' + or s['type'] == 'list' + or s['type'] == 'tuple' + or s['type'] == 'set' + or s['type'] == 'dict' + or s['type'] == 'frozenset' + ) + s = s.copy() + if min_len != 0: + s['min_length'] = min_len + if max_len is not None: + s['max_length'] = max_len + + def check_len(v: Any) -> bool: + if max_len is not None: + return (min_len <= len(v)) and (len(v) <= max_len) + return min_len <= len(v) + + s = _check_func(check_len, f'length >= {min_len} and length <= {max_len}', s) + elif isinstance(constraint, annotated_types.MultipleOf): + multiple_of = constraint.multiple_of + if s and s['type'] in {'int', 'float', 'decimal'}: + s = s.copy() + if s['type'] == 'int' and isinstance(multiple_of, int): + s['multiple_of'] = multiple_of + elif s['type'] == 'float' and isinstance(multiple_of, float): + s['multiple_of'] = multiple_of + elif s['type'] == 'decimal' and isinstance(multiple_of, Decimal): + s['multiple_of'] = multiple_of + + def check_multiple_of(v: Any) -> bool: + return v % multiple_of == 0 + + s = _check_func(check_multiple_of, f'% {multiple_of} == 0', s) + elif isinstance(constraint, annotated_types.Timezone): + tz = constraint.tz + + if tz is ...: + if s and s['type'] == 'datetime': + s = s.copy() + s['tz_constraint'] = 'aware' + else: + + def check_tz_aware(v: object) -> bool: + assert isinstance(v, datetime.datetime) + return v.tzinfo is not None + + s = _check_func(check_tz_aware, 'timezone aware', s) + elif tz is None: + if s and s['type'] == 'datetime': + s = s.copy() + s['tz_constraint'] = 'naive' + else: + + def check_tz_naive(v: object) -> bool: + assert isinstance(v, datetime.datetime) + return v.tzinfo is None + + s = _check_func(check_tz_naive, 'timezone naive', s) + else: + raise NotImplementedError('Constraining to a specific timezone is not yet supported') + elif isinstance(constraint, annotated_types.Interval): + if constraint.ge: + s = _apply_constraint(s, annotated_types.Ge(constraint.ge)) + if constraint.gt: + s = _apply_constraint(s, annotated_types.Gt(constraint.gt)) + if constraint.le: + s = _apply_constraint(s, annotated_types.Le(constraint.le)) + if constraint.lt: + s = _apply_constraint(s, annotated_types.Lt(constraint.lt)) + assert s is not None + elif isinstance(constraint, annotated_types.Predicate): + func = constraint.func + # Same logic as in `_known_annotated_metadata.apply_known_metadata()`: + predicate_name = f'{func.__qualname__!r} ' if hasattr(func, '__qualname__') else '' + + def predicate_func(v: Any) -> Any: + if not func(v): + raise PydanticCustomError( + 'predicate_failed', + f'Predicate {predicate_name}failed', # pyright: ignore[reportArgumentType] + ) + return v + + if s is None: + s = cs.no_info_plain_validator_function(predicate_func) + else: + s = cs.no_info_after_validator_function(predicate_func, s) + elif isinstance(constraint, _NotEq): + value = constraint.value + + def check_not_eq(v: Any) -> bool: + return operator.__ne__(v, value) + + s = _check_func(check_not_eq, f'!= {value}', s) + elif isinstance(constraint, _Eq): + value = constraint.value + + def check_eq(v: Any) -> bool: + return operator.__eq__(v, value) + + s = _check_func(check_eq, f'== {value}', s) + elif isinstance(constraint, _In): + values = constraint.values + + def check_in(v: Any) -> bool: + return operator.__contains__(values, v) + + s = _check_func(check_in, f'in {values}', s) + elif isinstance(constraint, _NotIn): + values = constraint.values + + def check_not_in(v: Any) -> bool: + return operator.__not__(operator.__contains__(values, v)) + + s = _check_func(check_not_in, f'not in {values}', s) + else: + assert isinstance(constraint, Pattern) + if s and s['type'] == 'str': + s = s.copy() + s['pattern'] = constraint.pattern + else: + + def check_pattern(v: object) -> bool: + assert isinstance(v, str) + return constraint.match(v) is not None + + s = _check_func(check_pattern, f'~ {constraint.pattern}', s) + return s + + +class _SupportsRange(annotated_types.SupportsLe, annotated_types.SupportsGe, Protocol): + pass + + +class _SupportsLen(Protocol): + def __len__(self) -> int: ... + + +_NewOutGt = TypeVar('_NewOutGt', bound=annotated_types.SupportsGt) +_NewOutGe = TypeVar('_NewOutGe', bound=annotated_types.SupportsGe) +_NewOutLt = TypeVar('_NewOutLt', bound=annotated_types.SupportsLt) +_NewOutLe = TypeVar('_NewOutLe', bound=annotated_types.SupportsLe) +_NewOutLen = TypeVar('_NewOutLen', bound=_SupportsLen) +_NewOutDiv = TypeVar('_NewOutDiv', bound=annotated_types.SupportsDiv) +_NewOutMod = TypeVar('_NewOutMod', bound=annotated_types.SupportsMod) +_NewOutDatetime = TypeVar('_NewOutDatetime', bound=datetime.datetime) +_NewOutInterval = TypeVar('_NewOutInterval', bound=_SupportsRange) +_OtherIn = TypeVar('_OtherIn') +_OtherOut = TypeVar('_OtherOut') diff --git a/venv/lib/python3.12/site-packages/pydantic/plugin/__init__.py b/venv/lib/python3.12/site-packages/pydantic/plugin/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..840d20a09ef27b084440ad16e221403bddd2ac3a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/plugin/__init__.py @@ -0,0 +1,193 @@ +"""!!! abstract "Usage Documentation" + [Build a Plugin](../concepts/plugins.md#build-a-plugin) + +Plugin interface for Pydantic plugins, and related types. +""" + +from __future__ import annotations + +from typing import Any, Callable, Literal, NamedTuple + +from pydantic_core import CoreConfig, CoreSchema, ValidationError +from typing_extensions import Protocol, TypeAlias + +from pydantic.config import ExtraValues + +__all__ = ( + 'PydanticPluginProtocol', + 'BaseValidateHandlerProtocol', + 'ValidatePythonHandlerProtocol', + 'ValidateJsonHandlerProtocol', + 'ValidateStringsHandlerProtocol', + 'NewSchemaReturns', + 'SchemaTypePath', + 'SchemaKind', +) + +NewSchemaReturns: TypeAlias = 'tuple[ValidatePythonHandlerProtocol | None, ValidateJsonHandlerProtocol | None, ValidateStringsHandlerProtocol | None]' + + +class SchemaTypePath(NamedTuple): + """Path defining where `schema_type` was defined, or where `TypeAdapter` was called.""" + + module: str + name: str + + +SchemaKind: TypeAlias = Literal['BaseModel', 'TypeAdapter', 'dataclass', 'create_model', 'validate_call'] + + +class PydanticPluginProtocol(Protocol): + """Protocol defining the interface for Pydantic plugins.""" + + def new_schema_validator( + self, + schema: CoreSchema, + schema_type: Any, + schema_type_path: SchemaTypePath, + schema_kind: SchemaKind, + config: CoreConfig | None, + plugin_settings: dict[str, object], + ) -> tuple[ + ValidatePythonHandlerProtocol | None, ValidateJsonHandlerProtocol | None, ValidateStringsHandlerProtocol | None + ]: + """This method is called for each plugin every time a new [`SchemaValidator`][pydantic_core.SchemaValidator] + is created. + + It should return an event handler for each of the three validation methods, or `None` if the plugin does not + implement that method. + + Args: + schema: The schema to validate against. + schema_type: The original type which the schema was created from, e.g. the model class. + schema_type_path: Path defining where `schema_type` was defined, or where `TypeAdapter` was called. + schema_kind: The kind of schema to validate against. + config: The config to use for validation. + plugin_settings: Any plugin settings. + + Returns: + A tuple of optional event handlers for each of the three validation methods - + `validate_python`, `validate_json`, `validate_strings`. + """ + raise NotImplementedError('Pydantic plugins should implement `new_schema_validator`.') + + +class BaseValidateHandlerProtocol(Protocol): + """Base class for plugin callbacks protocols. + + You shouldn't implement this protocol directly, instead use one of the subclasses with adds the correctly + typed `on_error` method. + """ + + on_enter: Callable[..., None] + """`on_enter` is changed to be more specific on all subclasses""" + + def on_success(self, result: Any) -> None: + """Callback to be notified of successful validation. + + Args: + result: The result of the validation. + """ + return + + def on_error(self, error: ValidationError) -> None: + """Callback to be notified of validation errors. + + Args: + error: The validation error. + """ + return + + def on_exception(self, exception: Exception) -> None: + """Callback to be notified of validation exceptions. + + Args: + exception: The exception raised during validation. + """ + return + + +class ValidatePythonHandlerProtocol(BaseValidateHandlerProtocol, Protocol): + """Event handler for `SchemaValidator.validate_python`.""" + + def on_enter( + self, + input: Any, + *, + strict: bool | None = None, + extra: ExtraValues | None = None, + from_attributes: bool | None = None, + context: Any | None = None, + self_instance: Any | None = None, + by_alias: bool | None = None, + by_name: bool | None = None, + ) -> None: + """Callback to be notified of validation start, and create an instance of the event handler. + + Args: + input: The input to be validated. + strict: Whether to validate the object in strict mode. + extra: Whether to ignore, allow, or forbid extra data during model validation. + from_attributes: Whether to validate objects as inputs by extracting attributes. + context: The context to use for validation, this is passed to functional validators. + self_instance: An instance of a model to set attributes on from validation, this is used when running + validation from the `__init__` method of a model. + by_alias: Whether to use the field's alias to match the input data to an attribute. + by_name: Whether to use the field's name to match the input data to an attribute. + """ + + +class ValidateJsonHandlerProtocol(BaseValidateHandlerProtocol, Protocol): + """Event handler for `SchemaValidator.validate_json`.""" + + def on_enter( + self, + input: str | bytes | bytearray, + *, + strict: bool | None = None, + extra: ExtraValues | None = None, + context: Any | None = None, + self_instance: Any | None = None, + by_alias: bool | None = None, + by_name: bool | None = None, + ) -> None: + """Callback to be notified of validation start, and create an instance of the event handler. + + Args: + input: The JSON data to be validated. + strict: Whether to validate the object in strict mode. + extra: Whether to ignore, allow, or forbid extra data during model validation. + context: The context to use for validation, this is passed to functional validators. + self_instance: An instance of a model to set attributes on from validation, this is used when running + validation from the `__init__` method of a model. + by_alias: Whether to use the field's alias to match the input data to an attribute. + by_name: Whether to use the field's name to match the input data to an attribute. + """ + + +StringInput: TypeAlias = 'dict[str, StringInput]' + + +class ValidateStringsHandlerProtocol(BaseValidateHandlerProtocol, Protocol): + """Event handler for `SchemaValidator.validate_strings`.""" + + def on_enter( + self, + input: StringInput, + *, + strict: bool | None = None, + extra: ExtraValues | None = None, + context: Any | None = None, + by_alias: bool | None = None, + by_name: bool | None = None, + ) -> None: + """Callback to be notified of validation start, and create an instance of the event handler. + + Args: + input: The string data to be validated. + strict: Whether to validate the object in strict mode. + extra: Whether to ignore, allow, or forbid extra data during model validation. + context: The context to use for validation, this is passed to functional validators. + by_alias: Whether to use the field's alias to match the input data to an attribute. + by_name: Whether to use the field's name to match the input data to an attribute. + """ diff --git a/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ed0d734d4956b33d45be399cc2dbda27bdd0d9b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/_loader.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/_loader.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..20a8f2a615de3b828f779c41c8f8625dfd748a59 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/_loader.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8bc1325ec04edef049e9a7ac011d6c7c1874b02 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/plugin/__pycache__/_schema_validator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/plugin/_loader.py b/venv/lib/python3.12/site-packages/pydantic/plugin/_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..e8226fafe98131afa3bee9b70d0324d694c72da5 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/plugin/_loader.py @@ -0,0 +1,58 @@ +from __future__ import annotations + +import importlib.metadata as importlib_metadata +import os +import warnings +from collections.abc import Iterable +from typing import TYPE_CHECKING, Final + +if TYPE_CHECKING: + from . import PydanticPluginProtocol + + +PYDANTIC_ENTRY_POINT_GROUP: Final[str] = 'pydantic' + +# cache of plugins +_plugins: dict[str, PydanticPluginProtocol] | None = None +# return no plugins while loading plugins to avoid recursion and errors while importing plugins +# this means that if plugins use pydantic +_loading_plugins: bool = False + + +def get_plugins() -> Iterable[PydanticPluginProtocol]: + """Load plugins for Pydantic. + + Inspired by: https://github.com/pytest-dev/pluggy/blob/1.3.0/src/pluggy/_manager.py#L376-L402 + """ + disabled_plugins = os.getenv('PYDANTIC_DISABLE_PLUGINS') + global _plugins, _loading_plugins + if _loading_plugins: + # this happens when plugins themselves use pydantic, we return no plugins + return () + elif disabled_plugins in ('__all__', '1', 'true'): + return () + elif _plugins is None: + _plugins = {} + # set _loading_plugins so any plugins that use pydantic don't themselves use plugins + _loading_plugins = True + try: + for dist in importlib_metadata.distributions(): + for entry_point in dist.entry_points: + if entry_point.group != PYDANTIC_ENTRY_POINT_GROUP: + continue + if entry_point.value in _plugins: + continue + if disabled_plugins is not None and entry_point.name in disabled_plugins.split(','): + continue + try: + _plugins[entry_point.value] = entry_point.load() + except (ImportError, AttributeError) as e: + warnings.warn( + f'{e.__class__.__name__} while loading the `{entry_point.name}` Pydantic plugin, ' + f'this plugin will not be installed.\n\n{e!r}', + stacklevel=2, + ) + finally: + _loading_plugins = False + + return _plugins.values() diff --git a/venv/lib/python3.12/site-packages/pydantic/plugin/_schema_validator.py b/venv/lib/python3.12/site-packages/pydantic/plugin/_schema_validator.py new file mode 100644 index 0000000000000000000000000000000000000000..492cc39cba7f1244b8c152df244064067270e2d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/plugin/_schema_validator.py @@ -0,0 +1,143 @@ +"""Pluggable schema validator for pydantic.""" + +from __future__ import annotations + +import functools +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar + +from pydantic_core import CoreConfig, CoreSchema, SchemaValidator, ValidationError +from typing_extensions import ParamSpec + +if TYPE_CHECKING: + from . import BaseValidateHandlerProtocol, PydanticPluginProtocol, SchemaKind, SchemaTypePath + + +P = ParamSpec('P') +R = TypeVar('R') +Event = Literal['on_validate_python', 'on_validate_json', 'on_validate_strings'] +events: list[Event] = list(Event.__args__) # type: ignore + + +def create_schema_validator( + schema: CoreSchema, + schema_type: Any, + schema_type_module: str, + schema_type_name: str, + schema_kind: SchemaKind, + config: CoreConfig | None = None, + plugin_settings: dict[str, Any] | None = None, + _use_prebuilt: bool = True, +) -> SchemaValidator | PluggableSchemaValidator: + """Create a `SchemaValidator` or `PluggableSchemaValidator` if plugins are installed. + + Returns: + If plugins are installed then return `PluggableSchemaValidator`, otherwise return `SchemaValidator`. + """ + from . import SchemaTypePath + from ._loader import get_plugins + + plugins = get_plugins() + if plugins: + return PluggableSchemaValidator( + schema, + schema_type, + SchemaTypePath(schema_type_module, schema_type_name), + schema_kind, + config, + plugins, + plugin_settings or {}, + _use_prebuilt=_use_prebuilt, + ) + else: + return SchemaValidator(schema, config, _use_prebuilt=_use_prebuilt) + + +class PluggableSchemaValidator: + """Pluggable schema validator.""" + + __slots__ = '_schema_validator', 'validate_json', 'validate_python', 'validate_strings' + + def __init__( + self, + schema: CoreSchema, + schema_type: Any, + schema_type_path: SchemaTypePath, + schema_kind: SchemaKind, + config: CoreConfig | None, + plugins: Iterable[PydanticPluginProtocol], + plugin_settings: dict[str, Any], + _use_prebuilt: bool = True, + ) -> None: + self._schema_validator = SchemaValidator(schema, config, _use_prebuilt=_use_prebuilt) + + python_event_handlers: list[BaseValidateHandlerProtocol] = [] + json_event_handlers: list[BaseValidateHandlerProtocol] = [] + strings_event_handlers: list[BaseValidateHandlerProtocol] = [] + for plugin in plugins: + try: + p, j, s = plugin.new_schema_validator( + schema, schema_type, schema_type_path, schema_kind, config, plugin_settings + ) + except TypeError as e: # pragma: no cover + raise TypeError(f'Error using plugin `{plugin.__module__}:{plugin.__class__.__name__}`: {e}') from e + if p is not None: + python_event_handlers.append(p) + if j is not None: + json_event_handlers.append(j) + if s is not None: + strings_event_handlers.append(s) + + self.validate_python = build_wrapper(self._schema_validator.validate_python, python_event_handlers) + self.validate_json = build_wrapper(self._schema_validator.validate_json, json_event_handlers) + self.validate_strings = build_wrapper(self._schema_validator.validate_strings, strings_event_handlers) + + def __getattr__(self, name: str) -> Any: + return getattr(self._schema_validator, name) + + +def build_wrapper(func: Callable[P, R], event_handlers: list[BaseValidateHandlerProtocol]) -> Callable[P, R]: + if not event_handlers: + return func + else: + on_enters = tuple(h.on_enter for h in event_handlers if filter_handlers(h, 'on_enter')) + on_successes = tuple(h.on_success for h in event_handlers if filter_handlers(h, 'on_success')) + on_errors = tuple(h.on_error for h in event_handlers if filter_handlers(h, 'on_error')) + on_exceptions = tuple(h.on_exception for h in event_handlers if filter_handlers(h, 'on_exception')) + + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: + for on_enter_handler in on_enters: + on_enter_handler(*args, **kwargs) + + try: + result = func(*args, **kwargs) + except ValidationError as error: + for on_error_handler in on_errors: + on_error_handler(error) + raise + except Exception as exception: + for on_exception_handler in on_exceptions: + on_exception_handler(exception) + raise + else: + for on_success_handler in on_successes: + on_success_handler(result) + return result + + return wrapper + + +def filter_handlers(handler_cls: BaseValidateHandlerProtocol, method_name: str) -> bool: + """Filter out handler methods which are not implemented by the plugin directly - e.g. those that are missing + or are inherited from the protocol. + """ + handler = getattr(handler_cls, method_name, None) + if handler is None: + return False + elif handler.__module__ == 'pydantic.plugin': + # this is the original handler, from the protocol due to runtime inheritance + # we don't want to call it + return False + else: + return True diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__init__.py b/venv/lib/python3.12/site-packages/pydantic/v1/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6ad3f466684df5c377480a8e181971a1b2f0016a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/__init__.py @@ -0,0 +1,131 @@ +# flake8: noqa +from pydantic.v1 import dataclasses +from pydantic.v1.annotated_types import create_model_from_namedtuple, create_model_from_typeddict +from pydantic.v1.class_validators import root_validator, validator +from pydantic.v1.config import BaseConfig, ConfigDict, Extra +from pydantic.v1.decorator import validate_arguments +from pydantic.v1.env_settings import BaseSettings +from pydantic.v1.error_wrappers import ValidationError +from pydantic.v1.errors import * +from pydantic.v1.fields import Field, PrivateAttr, Required +from pydantic.v1.main import * +from pydantic.v1.networks import * +from pydantic.v1.parse import Protocol +from pydantic.v1.tools import * +from pydantic.v1.types import * +from pydantic.v1.version import VERSION, compiled + +__version__ = VERSION + +# WARNING __all__ from pydantic.errors is not included here, it will be removed as an export here in v2 +# please use "from pydantic.v1.errors import ..." instead +__all__ = [ + # annotated types utils + 'create_model_from_namedtuple', + 'create_model_from_typeddict', + # dataclasses + 'dataclasses', + # class_validators + 'root_validator', + 'validator', + # config + 'BaseConfig', + 'ConfigDict', + 'Extra', + # decorator + 'validate_arguments', + # env_settings + 'BaseSettings', + # error_wrappers + 'ValidationError', + # fields + 'Field', + 'Required', + # main + 'BaseModel', + 'create_model', + 'validate_model', + # network + 'AnyUrl', + 'AnyHttpUrl', + 'FileUrl', + 'HttpUrl', + 'stricturl', + 'EmailStr', + 'NameEmail', + 'IPvAnyAddress', + 'IPvAnyInterface', + 'IPvAnyNetwork', + 'PostgresDsn', + 'CockroachDsn', + 'AmqpDsn', + 'RedisDsn', + 'MongoDsn', + 'KafkaDsn', + 'validate_email', + # parse + 'Protocol', + # tools + 'parse_file_as', + 'parse_obj_as', + 'parse_raw_as', + 'schema_of', + 'schema_json_of', + # types + 'NoneStr', + 'NoneBytes', + 'StrBytes', + 'NoneStrBytes', + 'StrictStr', + 'ConstrainedBytes', + 'conbytes', + 'ConstrainedList', + 'conlist', + 'ConstrainedSet', + 'conset', + 'ConstrainedFrozenSet', + 'confrozenset', + 'ConstrainedStr', + 'constr', + 'PyObject', + 'ConstrainedInt', + 'conint', + 'PositiveInt', + 'NegativeInt', + 'NonNegativeInt', + 'NonPositiveInt', + 'ConstrainedFloat', + 'confloat', + 'PositiveFloat', + 'NegativeFloat', + 'NonNegativeFloat', + 'NonPositiveFloat', + 'FiniteFloat', + 'ConstrainedDecimal', + 'condecimal', + 'ConstrainedDate', + 'condate', + 'UUID1', + 'UUID3', + 'UUID4', + 'UUID5', + 'FilePath', + 'DirectoryPath', + 'Json', + 'JsonWrapper', + 'SecretField', + 'SecretStr', + 'SecretBytes', + 'StrictBool', + 'StrictBytes', + 'StrictInt', + 'StrictFloat', + 'PaymentCardNumber', + 'PrivateAttr', + 'ByteSize', + 'PastDate', + 'FutureDate', + # version + 'compiled', + 'VERSION', +] diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29e130d43e43cdbb17cef9a31e449d21fa69c31c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/_hypothesis_plugin.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/_hypothesis_plugin.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..868d35e845e47ede6eebac91116aca8f9d326bbf Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/_hypothesis_plugin.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24060d60c6d2a392be178b9a1bb58a3cedd5acc3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/annotated_types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/class_validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/class_validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..34efa4a9f6caf2944fee41c23decf7044fff564b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/class_validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/color.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/color.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b41aba7ecafe3650f7cec04f85a8a471337a36b1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/color.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/config.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/config.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4a00098f32645f3259686aeb893f99702d485393 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/config.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d1a60e43fc2e1b973388571b768e8ee5a81221e0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/dataclasses.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc00ad38e7f17aab952b746ec5611ccb0e6e0899 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/datetime_parse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/decorator.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/decorator.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eb29d795e1cd36134e935b43ea17ff07c379b15a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/decorator.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/env_settings.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/env_settings.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8d4ded19f2c8481830ccad4e36f221a10368d5d4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/env_settings.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4509d09e50249d52b908559f0596c13aadf24123 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/error_wrappers.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/errors.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..adf88b17895b36dc95c288d97bf8442632935b9b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/errors.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/fields.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/fields.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d423400540d6253b834987f3e74b4317ff623726 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/fields.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/generics.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/generics.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0ba04050823f0d3f8dc64c9c5253cf93fe83839a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/generics.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/json.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/json.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a008c7fe259f7160d768ab55b8c0d703acfa3e09 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/json.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/main.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/main.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..38a61e1ad4cb7dff26b38e39a8df390420e00166 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/main.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/mypy.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/mypy.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1645b7e5e185f858dcd14bdab45e649f7cd8a37a Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/mypy.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/networks.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/networks.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ef43589ca3cf1ad7246b9a137802ce0b7cebfb01 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/networks.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/parse.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/parse.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..eff9d9ce8674273c00bc0f7edf2957f4dfe159f5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/parse.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/schema.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/schema.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6004fe07a31b25d2b6d907653b6c2672ed8eecb9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/schema.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/tools.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/tools.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b8a6f9fe68fb10f3d8d155d26b85e9e4ee6f41c Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/tools.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/types.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ede959c5f0c0c79fe0ba4eb7f248ca637fa4c4ec Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/types.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/typing.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/typing.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c76a63531564539f780b91522c80e344cdda89e5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/typing.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e69e22bb1f20f3d62fb98cd407222993a82295b Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/validators.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/validators.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cade31139a9cc53a24296cc6b82a14e89cea96e8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/validators.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/version.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/version.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b64e91cd651e94de541df82b0f368be190cc819 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic/v1/__pycache__/version.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py b/venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py new file mode 100644 index 0000000000000000000000000000000000000000..b62234d50750cbcf6d3ebba02d2ee54afb8c7347 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/_hypothesis_plugin.py @@ -0,0 +1,391 @@ +""" +Register Hypothesis strategies for Pydantic custom types. + +This enables fully-automatic generation of test data for most Pydantic classes. + +Note that this module has *no* runtime impact on Pydantic itself; instead it +is registered as a setuptools entry point and Hypothesis will import it if +Pydantic is installed. See also: + +https://hypothesis.readthedocs.io/en/latest/strategies.html#registering-strategies-via-setuptools-entry-points +https://hypothesis.readthedocs.io/en/latest/data.html#hypothesis.strategies.register_type_strategy +https://hypothesis.readthedocs.io/en/latest/strategies.html#interaction-with-pytest-cov +https://docs.pydantic.dev/usage/types/#pydantic-types + +Note that because our motivation is to *improve user experience*, the strategies +are always sound (never generate invalid data) but sacrifice completeness for +maintainability (ie may be unable to generate some tricky but valid data). + +Finally, this module makes liberal use of `# type: ignore[]` pragmas. +This is because Hypothesis annotates `register_type_strategy()` with +`(T, SearchStrategy[T])`, but in most cases we register e.g. `ConstrainedInt` +to generate instances of the builtin `int` type which match the constraints. +""" + +import contextlib +import datetime +import ipaddress +import json +import math +from fractions import Fraction +from typing import Callable, Dict, Type, Union, cast, overload + +import hypothesis.strategies as st + +import pydantic +import pydantic.color +import pydantic.types +from pydantic.v1.utils import lenient_issubclass + +# FilePath and DirectoryPath are explicitly unsupported, as we'd have to create +# them on-disk, and that's unsafe in general without being told *where* to do so. +# +# URLs are unsupported because it's easy for users to define their own strategy for +# "normal" URLs, and hard for us to define a general strategy which includes "weird" +# URLs but doesn't also have unpredictable performance problems. +# +# conlist() and conset() are unsupported for now, because the workarounds for +# Cython and Hypothesis to handle parametrized generic types are incompatible. +# We are rethinking Hypothesis compatibility in Pydantic v2. + +# Emails +try: + import email_validator +except ImportError: # pragma: no cover + pass +else: + + def is_valid_email(s: str) -> bool: + # Hypothesis' st.emails() occasionally generates emails like 0@A0--0.ac + # that are invalid according to email-validator, so we filter those out. + try: + email_validator.validate_email(s, check_deliverability=False) + return True + except email_validator.EmailNotValidError: # pragma: no cover + return False + + # Note that these strategies deliberately stay away from any tricky Unicode + # or other encoding issues; we're just trying to generate *something* valid. + st.register_type_strategy(pydantic.EmailStr, st.emails().filter(is_valid_email)) # type: ignore[arg-type] + st.register_type_strategy( + pydantic.NameEmail, + st.builds( + '{} <{}>'.format, # type: ignore[arg-type] + st.from_regex('[A-Za-z0-9_]+( [A-Za-z0-9_]+){0,5}', fullmatch=True), + st.emails().filter(is_valid_email), + ), + ) + +# PyObject - dotted names, in this case taken from the math module. +st.register_type_strategy( + pydantic.PyObject, # type: ignore[arg-type] + st.sampled_from( + [cast(pydantic.PyObject, f'math.{name}') for name in sorted(vars(math)) if not name.startswith('_')] + ), +) + +# CSS3 Colors; as name, hex, rgb(a) tuples or strings, or hsl strings +_color_regexes = ( + '|'.join( + ( + pydantic.color.r_hex_short, + pydantic.color.r_hex_long, + pydantic.color.r_rgb, + pydantic.color.r_rgba, + pydantic.color.r_hsl, + pydantic.color.r_hsla, + ) + ) + # Use more precise regex patterns to avoid value-out-of-range errors + .replace(pydantic.color._r_sl, r'(?:(\d\d?(?:\.\d+)?|100(?:\.0+)?)%)') + .replace(pydantic.color._r_alpha, r'(?:(0(?:\.\d+)?|1(?:\.0+)?|\.\d+|\d{1,2}%))') + .replace(pydantic.color._r_255, r'(?:((?:\d|\d\d|[01]\d\d|2[0-4]\d|25[0-4])(?:\.\d+)?|255(?:\.0+)?))') +) +st.register_type_strategy( + pydantic.color.Color, + st.one_of( + st.sampled_from(sorted(pydantic.color.COLORS_BY_NAME)), + st.tuples( + st.integers(0, 255), + st.integers(0, 255), + st.integers(0, 255), + st.none() | st.floats(0, 1) | st.floats(0, 100).map('{}%'.format), + ), + st.from_regex(_color_regexes, fullmatch=True), + ), +) + + +# Card numbers, valid according to the Luhn algorithm + + +def add_luhn_digit(card_number: str) -> str: + # See https://en.wikipedia.org/wiki/Luhn_algorithm + for digit in '0123456789': + with contextlib.suppress(Exception): + pydantic.PaymentCardNumber.validate_luhn_check_digit(card_number + digit) + return card_number + digit + raise AssertionError('Unreachable') # pragma: no cover + + +card_patterns = ( + # Note that these patterns omit the Luhn check digit; that's added by the function above + '4[0-9]{14}', # Visa + '5[12345][0-9]{13}', # Mastercard + '3[47][0-9]{12}', # American Express + '[0-26-9][0-9]{10,17}', # other (incomplete to avoid overlap) +) +st.register_type_strategy( + pydantic.PaymentCardNumber, + st.from_regex('|'.join(card_patterns), fullmatch=True).map(add_luhn_digit), # type: ignore[arg-type] +) + +# UUIDs +st.register_type_strategy(pydantic.UUID1, st.uuids(version=1)) +st.register_type_strategy(pydantic.UUID3, st.uuids(version=3)) +st.register_type_strategy(pydantic.UUID4, st.uuids(version=4)) +st.register_type_strategy(pydantic.UUID5, st.uuids(version=5)) + +# Secrets +st.register_type_strategy(pydantic.SecretBytes, st.binary().map(pydantic.SecretBytes)) +st.register_type_strategy(pydantic.SecretStr, st.text().map(pydantic.SecretStr)) + +# IP addresses, networks, and interfaces +st.register_type_strategy(pydantic.IPvAnyAddress, st.ip_addresses()) # type: ignore[arg-type] +st.register_type_strategy( + pydantic.IPvAnyInterface, + st.from_type(ipaddress.IPv4Interface) | st.from_type(ipaddress.IPv6Interface), # type: ignore[arg-type] +) +st.register_type_strategy( + pydantic.IPvAnyNetwork, + st.from_type(ipaddress.IPv4Network) | st.from_type(ipaddress.IPv6Network), # type: ignore[arg-type] +) + +# We hook into the con***() functions and the ConstrainedNumberMeta metaclass, +# so here we only have to register subclasses for other constrained types which +# don't go via those mechanisms. Then there are the registration hooks below. +st.register_type_strategy(pydantic.StrictBool, st.booleans()) +st.register_type_strategy(pydantic.StrictStr, st.text()) + + +# FutureDate, PastDate +st.register_type_strategy(pydantic.FutureDate, st.dates(min_value=datetime.date.today() + datetime.timedelta(days=1))) +st.register_type_strategy(pydantic.PastDate, st.dates(max_value=datetime.date.today() - datetime.timedelta(days=1))) + + +# Constrained-type resolver functions +# +# For these ones, we actually want to inspect the type in order to work out a +# satisfying strategy. First up, the machinery for tracking resolver functions: + +RESOLVERS: Dict[type, Callable[[type], st.SearchStrategy]] = {} # type: ignore[type-arg] + + +@overload +def _registered(typ: Type[pydantic.types.T]) -> Type[pydantic.types.T]: + pass + + +@overload +def _registered(typ: pydantic.types.ConstrainedNumberMeta) -> pydantic.types.ConstrainedNumberMeta: + pass + + +def _registered( + typ: Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta] +) -> Union[Type[pydantic.types.T], pydantic.types.ConstrainedNumberMeta]: + # This function replaces the version in `pydantic.types`, in order to + # effect the registration of new constrained types so that Hypothesis + # can generate valid examples. + pydantic.types._DEFINED_TYPES.add(typ) + for supertype, resolver in RESOLVERS.items(): + if issubclass(typ, supertype): + st.register_type_strategy(typ, resolver(typ)) # type: ignore + return typ + raise NotImplementedError(f'Unknown type {typ!r} has no resolver to register') # pragma: no cover + + +def resolves( + typ: Union[type, pydantic.types.ConstrainedNumberMeta] +) -> Callable[[Callable[..., st.SearchStrategy]], Callable[..., st.SearchStrategy]]: # type: ignore[type-arg] + def inner(f): # type: ignore + assert f not in RESOLVERS + RESOLVERS[typ] = f + return f + + return inner + + +# Type-to-strategy resolver functions + + +@resolves(pydantic.JsonWrapper) +def resolve_json(cls): # type: ignore[no-untyped-def] + try: + inner = st.none() if cls.inner_type is None else st.from_type(cls.inner_type) + except Exception: # pragma: no cover + finite = st.floats(allow_infinity=False, allow_nan=False) + inner = st.recursive( + base=st.one_of(st.none(), st.booleans(), st.integers(), finite, st.text()), + extend=lambda x: st.lists(x) | st.dictionaries(st.text(), x), # type: ignore + ) + inner_type = getattr(cls, 'inner_type', None) + return st.builds( + cls.inner_type.json if lenient_issubclass(inner_type, pydantic.BaseModel) else json.dumps, + inner, + ensure_ascii=st.booleans(), + indent=st.none() | st.integers(0, 16), + sort_keys=st.booleans(), + ) + + +@resolves(pydantic.ConstrainedBytes) +def resolve_conbytes(cls): # type: ignore[no-untyped-def] # pragma: no cover + min_size = cls.min_length or 0 + max_size = cls.max_length + if not cls.strip_whitespace: + return st.binary(min_size=min_size, max_size=max_size) + # Fun with regex to ensure we neither start nor end with whitespace + repeats = '{{{},{}}}'.format( + min_size - 2 if min_size > 2 else 0, + max_size - 2 if (max_size or 0) > 2 else '', + ) + if min_size >= 2: + pattern = rf'\W.{repeats}\W' + elif min_size == 1: + pattern = rf'\W(.{repeats}\W)?' + else: + assert min_size == 0 + pattern = rf'(\W(.{repeats}\W)?)?' + return st.from_regex(pattern.encode(), fullmatch=True) + + +@resolves(pydantic.ConstrainedDecimal) +def resolve_condecimal(cls): # type: ignore[no-untyped-def] + min_value = cls.ge + max_value = cls.le + if cls.gt is not None: + assert min_value is None, 'Set `gt` or `ge`, but not both' + min_value = cls.gt + if cls.lt is not None: + assert max_value is None, 'Set `lt` or `le`, but not both' + max_value = cls.lt + s = st.decimals(min_value, max_value, allow_nan=False, places=cls.decimal_places) + if cls.lt is not None: + s = s.filter(lambda d: d < cls.lt) + if cls.gt is not None: + s = s.filter(lambda d: cls.gt < d) + return s + + +@resolves(pydantic.ConstrainedFloat) +def resolve_confloat(cls): # type: ignore[no-untyped-def] + min_value = cls.ge + max_value = cls.le + exclude_min = False + exclude_max = False + + if cls.gt is not None: + assert min_value is None, 'Set `gt` or `ge`, but not both' + min_value = cls.gt + exclude_min = True + if cls.lt is not None: + assert max_value is None, 'Set `lt` or `le`, but not both' + max_value = cls.lt + exclude_max = True + + if cls.multiple_of is None: + return st.floats(min_value, max_value, exclude_min=exclude_min, exclude_max=exclude_max, allow_nan=False) + + if min_value is not None: + min_value = math.ceil(min_value / cls.multiple_of) + if exclude_min: + min_value = min_value + 1 + if max_value is not None: + assert max_value >= cls.multiple_of, 'Cannot build model with max value smaller than multiple of' + max_value = math.floor(max_value / cls.multiple_of) + if exclude_max: + max_value = max_value - 1 + + return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of) + + +@resolves(pydantic.ConstrainedInt) +def resolve_conint(cls): # type: ignore[no-untyped-def] + min_value = cls.ge + max_value = cls.le + if cls.gt is not None: + assert min_value is None, 'Set `gt` or `ge`, but not both' + min_value = cls.gt + 1 + if cls.lt is not None: + assert max_value is None, 'Set `lt` or `le`, but not both' + max_value = cls.lt - 1 + + if cls.multiple_of is None or cls.multiple_of == 1: + return st.integers(min_value, max_value) + + # These adjustments and the .map handle integer-valued multiples, while the + # .filter handles trickier cases as for confloat. + if min_value is not None: + min_value = math.ceil(Fraction(min_value) / Fraction(cls.multiple_of)) + if max_value is not None: + max_value = math.floor(Fraction(max_value) / Fraction(cls.multiple_of)) + return st.integers(min_value, max_value).map(lambda x: x * cls.multiple_of) + + +@resolves(pydantic.ConstrainedDate) +def resolve_condate(cls): # type: ignore[no-untyped-def] + if cls.ge is not None: + assert cls.gt is None, 'Set `gt` or `ge`, but not both' + min_value = cls.ge + elif cls.gt is not None: + min_value = cls.gt + datetime.timedelta(days=1) + else: + min_value = datetime.date.min + if cls.le is not None: + assert cls.lt is None, 'Set `lt` or `le`, but not both' + max_value = cls.le + elif cls.lt is not None: + max_value = cls.lt - datetime.timedelta(days=1) + else: + max_value = datetime.date.max + return st.dates(min_value, max_value) + + +@resolves(pydantic.ConstrainedStr) +def resolve_constr(cls): # type: ignore[no-untyped-def] # pragma: no cover + min_size = cls.min_length or 0 + max_size = cls.max_length + + if cls.regex is None and not cls.strip_whitespace: + return st.text(min_size=min_size, max_size=max_size) + + if cls.regex is not None: + strategy = st.from_regex(cls.regex) + if cls.strip_whitespace: + strategy = strategy.filter(lambda s: s == s.strip()) + elif cls.strip_whitespace: + repeats = '{{{},{}}}'.format( + min_size - 2 if min_size > 2 else 0, + max_size - 2 if (max_size or 0) > 2 else '', + ) + if min_size >= 2: + strategy = st.from_regex(rf'\W.{repeats}\W') + elif min_size == 1: + strategy = st.from_regex(rf'\W(.{repeats}\W)?') + else: + assert min_size == 0 + strategy = st.from_regex(rf'(\W(.{repeats}\W)?)?') + + if min_size == 0 and max_size is None: + return strategy + elif max_size is None: + return strategy.filter(lambda s: min_size <= len(s)) + return strategy.filter(lambda s: min_size <= len(s) <= max_size) + + +# Finally, register all previously-defined types, and patch in our new function +for typ in list(pydantic.types._DEFINED_TYPES): + _registered(typ) +pydantic.types._registered = _registered +st.register_type_strategy(pydantic.Json, resolve_json) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py b/venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py new file mode 100644 index 0000000000000000000000000000000000000000..d9eaaafd5b6b2617baf3b3c889659c5bac807ff2 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/annotated_types.py @@ -0,0 +1,72 @@ +import sys +from typing import TYPE_CHECKING, Any, Dict, FrozenSet, NamedTuple, Type + +from pydantic.v1.fields import Required +from pydantic.v1.main import BaseModel, create_model +from pydantic.v1.typing import is_typeddict, is_typeddict_special + +if TYPE_CHECKING: + from typing_extensions import TypedDict + +if sys.version_info < (3, 11): + + def is_legacy_typeddict(typeddict_cls: Type['TypedDict']) -> bool: # type: ignore[valid-type] + return is_typeddict(typeddict_cls) and type(typeddict_cls).__module__ == 'typing' + +else: + + def is_legacy_typeddict(_: Any) -> Any: + return False + + +def create_model_from_typeddict( + # Mypy bug: `Type[TypedDict]` is resolved as `Any` https://github.com/python/mypy/issues/11030 + typeddict_cls: Type['TypedDict'], # type: ignore[valid-type] + **kwargs: Any, +) -> Type['BaseModel']: + """ + Create a `BaseModel` based on the fields of a `TypedDict`. + Since `typing.TypedDict` in Python 3.8 does not store runtime information about optional keys, + we raise an error if this happens (see https://bugs.python.org/issue38834). + """ + field_definitions: Dict[str, Any] + + # Best case scenario: with python 3.9+ or when `TypedDict` is imported from `typing_extensions` + if not hasattr(typeddict_cls, '__required_keys__'): + raise TypeError( + 'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.9.2. ' + 'Without it, there is no way to differentiate required and optional fields when subclassed.' + ) + + if is_legacy_typeddict(typeddict_cls) and any( + is_typeddict_special(t) for t in typeddict_cls.__annotations__.values() + ): + raise TypeError( + 'You should use `typing_extensions.TypedDict` instead of `typing.TypedDict` with Python < 3.11. ' + 'Without it, there is no way to reflect Required/NotRequired keys.' + ) + + required_keys: FrozenSet[str] = typeddict_cls.__required_keys__ # type: ignore[attr-defined] + field_definitions = { + field_name: (field_type, Required if field_name in required_keys else None) + for field_name, field_type in typeddict_cls.__annotations__.items() + } + + return create_model(typeddict_cls.__name__, **kwargs, **field_definitions) + + +def create_model_from_namedtuple(namedtuple_cls: Type['NamedTuple'], **kwargs: Any) -> Type['BaseModel']: + """ + Create a `BaseModel` based on the fields of a named tuple. + A named tuple can be created with `typing.NamedTuple` and declared annotations + but also with `collections.namedtuple`, in this case we consider all fields + to have type `Any`. + """ + # With python 3.10+, `__annotations__` always exists but can be empty hence the `getattr... or...` logic + namedtuple_annotations: Dict[str, Type[Any]] = getattr(namedtuple_cls, '__annotations__', None) or { + k: Any for k in namedtuple_cls._fields + } + field_definitions: Dict[str, Any] = { + field_name: (field_type, Required) for field_name, field_type in namedtuple_annotations.items() + } + return create_model(namedtuple_cls.__name__, **kwargs, **field_definitions) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py b/venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py new file mode 100644 index 0000000000000000000000000000000000000000..2f68fc8600038d8de10017b0d02a3fde77a06ba6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/class_validators.py @@ -0,0 +1,361 @@ +import warnings +from collections import ChainMap +from functools import partial, partialmethod, wraps +from itertools import chain +from types import FunctionType +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, Type, Union, overload + +from pydantic.v1.errors import ConfigError +from pydantic.v1.typing import AnyCallable +from pydantic.v1.utils import ROOT_KEY, in_ipython + +if TYPE_CHECKING: + from pydantic.v1.typing import AnyClassMethod + + +class Validator: + __slots__ = 'func', 'pre', 'each_item', 'always', 'check_fields', 'skip_on_failure' + + def __init__( + self, + func: AnyCallable, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = False, + skip_on_failure: bool = False, + ): + self.func = func + self.pre = pre + self.each_item = each_item + self.always = always + self.check_fields = check_fields + self.skip_on_failure = skip_on_failure + + +if TYPE_CHECKING: + from inspect import Signature + + from pydantic.v1.config import BaseConfig + from pydantic.v1.fields import ModelField + from pydantic.v1.types import ModelOrDc + + ValidatorCallable = Callable[[Optional[ModelOrDc], Any, Dict[str, Any], ModelField, Type[BaseConfig]], Any] + ValidatorsList = List[ValidatorCallable] + ValidatorListDict = Dict[str, List[Validator]] + +_FUNCS: Set[str] = set() +VALIDATOR_CONFIG_KEY = '__validator_config__' +ROOT_VALIDATOR_CONFIG_KEY = '__root_validator_config__' + + +def validator( + *fields: str, + pre: bool = False, + each_item: bool = False, + always: bool = False, + check_fields: bool = True, + whole: Optional[bool] = None, + allow_reuse: bool = False, +) -> Callable[[AnyCallable], 'AnyClassMethod']: + """ + Decorate methods on the class indicating that they should be used to validate fields + :param fields: which field(s) the method should be called on + :param pre: whether or not this validator should be called before the standard validators (else after) + :param each_item: for complex objects (sets, lists etc.) whether to validate individual elements rather than the + whole object + :param always: whether this method and other validators should be called even if the value is missing + :param check_fields: whether to check that the fields actually exist on the model + :param allow_reuse: whether to track and raise an error if another validator refers to the decorated function + """ + if not fields: + raise ConfigError('validator with no fields specified') + elif isinstance(fields[0], FunctionType): + raise ConfigError( + "validators should be used with fields and keyword arguments, not bare. " # noqa: Q000 + "E.g. usage should be `@validator('', ...)`" + ) + elif not all(isinstance(field, str) for field in fields): + raise ConfigError( + "validator fields should be passed as separate string args. " # noqa: Q000 + "E.g. usage should be `@validator('', '', ...)`" + ) + + if whole is not None: + warnings.warn( + 'The "whole" keyword argument is deprecated, use "each_item" (inverse meaning, default False) instead', + DeprecationWarning, + ) + assert each_item is False, '"each_item" and "whole" conflict, remove "whole"' + each_item = not whole + + def dec(f: AnyCallable) -> 'AnyClassMethod': + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, + VALIDATOR_CONFIG_KEY, + ( + fields, + Validator(func=f_cls.__func__, pre=pre, each_item=each_item, always=always, check_fields=check_fields), + ), + ) + return f_cls + + return dec + + +@overload +def root_validator(_func: AnyCallable) -> 'AnyClassMethod': + ... + + +@overload +def root_validator( + *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Callable[[AnyCallable], 'AnyClassMethod']: + ... + + +def root_validator( + _func: Optional[AnyCallable] = None, *, pre: bool = False, allow_reuse: bool = False, skip_on_failure: bool = False +) -> Union['AnyClassMethod', Callable[[AnyCallable], 'AnyClassMethod']]: + """ + Decorate methods on a model indicating that they should be used to validate (and perhaps modify) data either + before or after standard model parsing/validation is performed. + """ + if _func: + f_cls = _prepare_validator(_func, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + def dec(f: AnyCallable) -> 'AnyClassMethod': + f_cls = _prepare_validator(f, allow_reuse) + setattr( + f_cls, ROOT_VALIDATOR_CONFIG_KEY, Validator(func=f_cls.__func__, pre=pre, skip_on_failure=skip_on_failure) + ) + return f_cls + + return dec + + +def _prepare_validator(function: AnyCallable, allow_reuse: bool) -> 'AnyClassMethod': + """ + Avoid validators with duplicated names since without this, validators can be overwritten silently + which generally isn't the intended behaviour, don't run in ipython (see #312) or if allow_reuse is False. + """ + f_cls = function if isinstance(function, classmethod) else classmethod(function) + if not in_ipython() and not allow_reuse: + ref = ( + getattr(f_cls.__func__, '__module__', '') + + '.' + + getattr(f_cls.__func__, '__qualname__', f'') + ) + if ref in _FUNCS: + raise ConfigError(f'duplicate validator function "{ref}"; if this is intended, set `allow_reuse=True`') + _FUNCS.add(ref) + return f_cls + + +class ValidatorGroup: + def __init__(self, validators: 'ValidatorListDict') -> None: + self.validators = validators + self.used_validators = {'*'} + + def get_validators(self, name: str) -> Optional[Dict[str, Validator]]: + self.used_validators.add(name) + validators = self.validators.get(name, []) + if name != ROOT_KEY: + validators += self.validators.get('*', []) + if validators: + return {getattr(v.func, '__name__', f''): v for v in validators} + else: + return None + + def check_for_unused(self) -> None: + unused_validators = set( + chain.from_iterable( + ( + getattr(v.func, '__name__', f'') + for v in self.validators[f] + if v.check_fields + ) + for f in (self.validators.keys() - self.used_validators) + ) + ) + if unused_validators: + fn = ', '.join(unused_validators) + raise ConfigError( + f"Validators defined with incorrect fields: {fn} " # noqa: Q000 + f"(use check_fields=False if you're inheriting from the model and intended this)" + ) + + +def extract_validators(namespace: Dict[str, Any]) -> Dict[str, List[Validator]]: + validators: Dict[str, List[Validator]] = {} + for var_name, value in namespace.items(): + validator_config = getattr(value, VALIDATOR_CONFIG_KEY, None) + if validator_config: + fields, v = validator_config + for field in fields: + if field in validators: + validators[field].append(v) + else: + validators[field] = [v] + return validators + + +def extract_root_validators(namespace: Dict[str, Any]) -> Tuple[List[AnyCallable], List[Tuple[bool, AnyCallable]]]: + from inspect import signature + + pre_validators: List[AnyCallable] = [] + post_validators: List[Tuple[bool, AnyCallable]] = [] + for name, value in namespace.items(): + validator_config: Optional[Validator] = getattr(value, ROOT_VALIDATOR_CONFIG_KEY, None) + if validator_config: + sig = signature(validator_config.func) + args = list(sig.parameters.keys()) + if args[0] == 'self': + raise ConfigError( + f'Invalid signature for root validator {name}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, values).' + ) + if len(args) != 2: + raise ConfigError(f'Invalid signature for root validator {name}: {sig}, should be: (cls, values).') + # check function signature + if validator_config.pre: + pre_validators.append(validator_config.func) + else: + post_validators.append((validator_config.skip_on_failure, validator_config.func)) + return pre_validators, post_validators + + +def inherit_validators(base_validators: 'ValidatorListDict', validators: 'ValidatorListDict') -> 'ValidatorListDict': + for field, field_validators in base_validators.items(): + if field not in validators: + validators[field] = [] + validators[field] += field_validators + return validators + + +def make_generic_validator(validator: AnyCallable) -> 'ValidatorCallable': + """ + Make a generic function which calls a validator with the right arguments. + + Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, + hence this laborious way of doing things. + + It's done like this so validators don't all need **kwargs in their signature, eg. any combination of + the arguments "values", "fields" and/or "config" are permitted. + """ + from inspect import signature + + if not isinstance(validator, (partial, partialmethod)): + # This should be the default case, so overhead is reduced + sig = signature(validator) + args = list(sig.parameters.keys()) + else: + # Fix the generated argument lists of partial methods + sig = signature(validator.func) + args = [ + k + for k in signature(validator.func).parameters.keys() + if k not in validator.args | validator.keywords.keys() + ] + + first_arg = args.pop(0) + if first_arg == 'self': + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, "self" not permitted as first argument, ' + f'should be: (cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + elif first_arg == 'cls': + # assume the second argument is value + return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) + else: + # assume the first argument was value which has already been removed + return wraps(validator)(_generic_validator_basic(validator, sig, set(args))) + + +def prep_validators(v_funcs: Iterable[AnyCallable]) -> 'ValidatorsList': + return [make_generic_validator(f) for f in v_funcs if f] + + +all_kwargs = {'values', 'field', 'config'} + + +def _generic_validator_cls(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': + # assume the first argument is value + has_kwargs = False + if 'kwargs' in args: + has_kwargs = True + args -= {'kwargs'} + + if not args.issubset(all_kwargs): + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, should be: ' + f'(cls, value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(cls, v) + elif args == {'values'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values) + elif args == {'field'}: + return lambda cls, v, values, field, config: validator(cls, v, field=field) + elif args == {'config'}: + return lambda cls, v, values, field, config: validator(cls, v, config=config) + elif args == {'values', 'field'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field) + elif args == {'values', 'config'}: + return lambda cls, v, values, field, config: validator(cls, v, values=values, config=config) + elif args == {'field', 'config'}: + return lambda cls, v, values, field, config: validator(cls, v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(cls, v, values=values, field=field, config=config) + + +def _generic_validator_basic(validator: AnyCallable, sig: 'Signature', args: Set[str]) -> 'ValidatorCallable': + has_kwargs = False + if 'kwargs' in args: + has_kwargs = True + args -= {'kwargs'} + + if not args.issubset(all_kwargs): + raise ConfigError( + f'Invalid signature for validator {validator}: {sig}, should be: ' + f'(value, values, config, field), "values", "config" and "field" are all optional.' + ) + + if has_kwargs: + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + elif args == set(): + return lambda cls, v, values, field, config: validator(v) + elif args == {'values'}: + return lambda cls, v, values, field, config: validator(v, values=values) + elif args == {'field'}: + return lambda cls, v, values, field, config: validator(v, field=field) + elif args == {'config'}: + return lambda cls, v, values, field, config: validator(v, config=config) + elif args == {'values', 'field'}: + return lambda cls, v, values, field, config: validator(v, values=values, field=field) + elif args == {'values', 'config'}: + return lambda cls, v, values, field, config: validator(v, values=values, config=config) + elif args == {'field', 'config'}: + return lambda cls, v, values, field, config: validator(v, field=field, config=config) + else: + # args == {'values', 'field', 'config'} + return lambda cls, v, values, field, config: validator(v, values=values, field=field, config=config) + + +def gather_all_validators(type_: 'ModelOrDc') -> Dict[str, 'AnyClassMethod']: + all_attributes = ChainMap(*[cls.__dict__ for cls in type_.__mro__]) # type: ignore[arg-type,var-annotated] + return { + k: v + for k, v in all_attributes.items() + if hasattr(v, VALIDATOR_CONFIG_KEY) or hasattr(v, ROOT_VALIDATOR_CONFIG_KEY) + } diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/color.py b/venv/lib/python3.12/site-packages/pydantic/v1/color.py new file mode 100644 index 0000000000000000000000000000000000000000..b0bbf78f4367c3e13a5aac0df09b5824d8f7e6ea --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/color.py @@ -0,0 +1,494 @@ +""" +Color definitions are used as per CSS3 specification: +http://www.w3.org/TR/css3-color/#svg-color + +A few colors have multiple names referring to the sames colors, eg. `grey` and `gray` or `aqua` and `cyan`. + +In these cases the LAST color when sorted alphabetically takes preferences, +eg. Color((0, 255, 255)).as_named() == 'cyan' because "cyan" comes after "aqua". +""" +import math +import re +from colorsys import hls_to_rgb, rgb_to_hls +from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union, cast + +from pydantic.v1.errors import ColorError +from pydantic.v1.utils import Representation, almost_equal_floats + +if TYPE_CHECKING: + from pydantic.v1.typing import CallableGenerator, ReprArgs + +ColorTuple = Union[Tuple[int, int, int], Tuple[int, int, int, float]] +ColorType = Union[ColorTuple, str] +HslColorTuple = Union[Tuple[float, float, float], Tuple[float, float, float, float]] + + +class RGBA: + """ + Internal use only as a representation of a color. + """ + + __slots__ = 'r', 'g', 'b', 'alpha', '_tuple' + + def __init__(self, r: float, g: float, b: float, alpha: Optional[float]): + self.r = r + self.g = g + self.b = b + self.alpha = alpha + + self._tuple: Tuple[float, float, float, Optional[float]] = (r, g, b, alpha) + + def __getitem__(self, item: Any) -> Any: + return self._tuple[item] + + +# these are not compiled here to avoid import slowdown, they'll be compiled the first time they're used, then cached +r_hex_short = r'\s*(?:#|0x)?([0-9a-f])([0-9a-f])([0-9a-f])([0-9a-f])?\s*' +r_hex_long = r'\s*(?:#|0x)?([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})([0-9a-f]{2})?\s*' +_r_255 = r'(\d{1,3}(?:\.\d+)?)' +_r_comma = r'\s*,\s*' +r_rgb = fr'\s*rgb\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}\)\s*' +_r_alpha = r'(\d(?:\.\d+)?|\.\d+|\d{1,2}%)' +r_rgba = fr'\s*rgba\(\s*{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_255}{_r_comma}{_r_alpha}\s*\)\s*' +_r_h = r'(-?\d+(?:\.\d+)?|-?\.\d+)(deg|rad|turn)?' +_r_sl = r'(\d{1,3}(?:\.\d+)?)%' +r_hsl = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}\s*\)\s*' +r_hsla = fr'\s*hsl\(\s*{_r_h}{_r_comma}{_r_sl}{_r_comma}{_r_sl}{_r_comma}{_r_alpha}\s*\)\s*' + +# colors where the two hex characters are the same, if all colors match this the short version of hex colors can be used +repeat_colors = {int(c * 2, 16) for c in '0123456789abcdef'} +rads = 2 * math.pi + + +class Color(Representation): + __slots__ = '_original', '_rgba' + + def __init__(self, value: ColorType) -> None: + self._rgba: RGBA + self._original: ColorType + if isinstance(value, (tuple, list)): + self._rgba = parse_tuple(value) + elif isinstance(value, str): + self._rgba = parse_str(value) + elif isinstance(value, Color): + self._rgba = value._rgba + value = value._original + else: + raise ColorError(reason='value must be a tuple, list or string') + + # if we've got here value must be a valid color + self._original = value + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='color') + + def original(self) -> ColorType: + """ + Original value passed to Color + """ + return self._original + + def as_named(self, *, fallback: bool = False) -> str: + if self._rgba.alpha is None: + rgb = cast(Tuple[int, int, int], self.as_rgb_tuple()) + try: + return COLORS_BY_VALUE[rgb] + except KeyError as e: + if fallback: + return self.as_hex() + else: + raise ValueError('no named color found, use fallback=True, as_hex() or as_rgb()') from e + else: + return self.as_hex() + + def as_hex(self) -> str: + """ + Hex string representing the color can be 3, 4, 6 or 8 characters depending on whether the string + a "short" representation of the color is possible and whether there's an alpha channel. + """ + values = [float_to_255(c) for c in self._rgba[:3]] + if self._rgba.alpha is not None: + values.append(float_to_255(self._rgba.alpha)) + + as_hex = ''.join(f'{v:02x}' for v in values) + if all(c in repeat_colors for c in values): + as_hex = ''.join(as_hex[c] for c in range(0, len(as_hex), 2)) + return '#' + as_hex + + def as_rgb(self) -> str: + """ + Color as an rgb(, , ) or rgba(, , , ) string. + """ + if self._rgba.alpha is None: + return f'rgb({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)})' + else: + return ( + f'rgba({float_to_255(self._rgba.r)}, {float_to_255(self._rgba.g)}, {float_to_255(self._rgba.b)}, ' + f'{round(self._alpha_float(), 2)})' + ) + + def as_rgb_tuple(self, *, alpha: Optional[bool] = None) -> ColorTuple: + """ + Color as an RGB or RGBA tuple; red, green and blue are in the range 0 to 255, alpha if included is + in the range 0 to 1. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + r, g, b = (float_to_255(c) for c in self._rgba[:3]) + if alpha is None: + if self._rgba.alpha is None: + return r, g, b + else: + return r, g, b, self._alpha_float() + elif alpha: + return r, g, b, self._alpha_float() + else: + # alpha is False + return r, g, b + + def as_hsl(self) -> str: + """ + Color as an hsl(, , ) or hsl(, , , ) string. + """ + if self._rgba.alpha is None: + h, s, li = self.as_hsl_tuple(alpha=False) # type: ignore + return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%})' + else: + h, s, li, a = self.as_hsl_tuple(alpha=True) # type: ignore + return f'hsl({h * 360:0.0f}, {s:0.0%}, {li:0.0%}, {round(a, 2)})' + + def as_hsl_tuple(self, *, alpha: Optional[bool] = None) -> HslColorTuple: + """ + Color as an HSL or HSLA tuple, e.g. hue, saturation, lightness and optionally alpha; all elements are in + the range 0 to 1. + + NOTE: this is HSL as used in HTML and most other places, not HLS as used in python's colorsys. + + :param alpha: whether to include the alpha channel, options are + None - (default) include alpha only if it's set (e.g. not None) + True - always include alpha, + False - always omit alpha, + """ + h, l, s = rgb_to_hls(self._rgba.r, self._rgba.g, self._rgba.b) + if alpha is None: + if self._rgba.alpha is None: + return h, s, l + else: + return h, s, l, self._alpha_float() + if alpha: + return h, s, l, self._alpha_float() + else: + # alpha is False + return h, s, l + + def _alpha_float(self) -> float: + return 1 if self._rgba.alpha is None else self._rgba.alpha + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls + + def __str__(self) -> str: + return self.as_named(fallback=True) + + def __repr_args__(self) -> 'ReprArgs': + return [(None, self.as_named(fallback=True))] + [('rgb', self.as_rgb_tuple())] # type: ignore + + def __eq__(self, other: Any) -> bool: + return isinstance(other, Color) and self.as_rgb_tuple() == other.as_rgb_tuple() + + def __hash__(self) -> int: + return hash(self.as_rgb_tuple()) + + +def parse_tuple(value: Tuple[Any, ...]) -> RGBA: + """ + Parse a tuple or list as a color. + """ + if len(value) == 3: + r, g, b = (parse_color_value(v) for v in value) + return RGBA(r, g, b, None) + elif len(value) == 4: + r, g, b = (parse_color_value(v) for v in value[:3]) + return RGBA(r, g, b, parse_float_alpha(value[3])) + else: + raise ColorError(reason='tuples must have length 3 or 4') + + +def parse_str(value: str) -> RGBA: + """ + Parse a string to an RGBA tuple, trying the following formats (in this order): + * named color, see COLORS_BY_NAME below + * hex short eg. `fff` (prefix can be `#`, `0x` or nothing) + * hex long eg. `ffffff` (prefix can be `#`, `0x` or nothing) + * `rgb(, , ) ` + * `rgba(, , , )` + """ + value_lower = value.lower() + try: + r, g, b = COLORS_BY_NAME[value_lower] + except KeyError: + pass + else: + return ints_to_rgba(r, g, b, None) + + m = re.fullmatch(r_hex_short, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v * 2, 16) for v in rgb) + if a: + alpha: Optional[float] = int(a * 2, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_hex_long, value_lower) + if m: + *rgb, a = m.groups() + r, g, b = (int(v, 16) for v in rgb) + if a: + alpha = int(a, 16) / 255 + else: + alpha = None + return ints_to_rgba(r, g, b, alpha) + + m = re.fullmatch(r_rgb, value_lower) + if m: + return ints_to_rgba(*m.groups(), None) # type: ignore + + m = re.fullmatch(r_rgba, value_lower) + if m: + return ints_to_rgba(*m.groups()) # type: ignore + + m = re.fullmatch(r_hsl, value_lower) + if m: + h, h_units, s, l_ = m.groups() + return parse_hsl(h, h_units, s, l_) + + m = re.fullmatch(r_hsla, value_lower) + if m: + h, h_units, s, l_, a = m.groups() + return parse_hsl(h, h_units, s, l_, parse_float_alpha(a)) + + raise ColorError(reason='string not recognised as a valid color') + + +def ints_to_rgba(r: Union[int, str], g: Union[int, str], b: Union[int, str], alpha: Optional[float]) -> RGBA: + return RGBA(parse_color_value(r), parse_color_value(g), parse_color_value(b), parse_float_alpha(alpha)) + + +def parse_color_value(value: Union[int, str], max_val: int = 255) -> float: + """ + Parse a value checking it's a valid int in the range 0 to max_val and divide by max_val to give a number + in the range 0 to 1 + """ + try: + color = float(value) + except ValueError: + raise ColorError(reason='color values must be a valid number') + if 0 <= color <= max_val: + return color / max_val + else: + raise ColorError(reason=f'color values must be in the range 0 to {max_val}') + + +def parse_float_alpha(value: Union[None, str, float, int]) -> Optional[float]: + """ + Parse a value checking it's a valid float in the range 0 to 1 + """ + if value is None: + return None + try: + if isinstance(value, str) and value.endswith('%'): + alpha = float(value[:-1]) / 100 + else: + alpha = float(value) + except ValueError: + raise ColorError(reason='alpha values must be a valid float') + + if almost_equal_floats(alpha, 1): + return None + elif 0 <= alpha <= 1: + return alpha + else: + raise ColorError(reason='alpha values must be in the range 0 to 1') + + +def parse_hsl(h: str, h_units: str, sat: str, light: str, alpha: Optional[float] = None) -> RGBA: + """ + Parse raw hue, saturation, lightness and alpha values and convert to RGBA. + """ + s_value, l_value = parse_color_value(sat, 100), parse_color_value(light, 100) + + h_value = float(h) + if h_units in {None, 'deg'}: + h_value = h_value % 360 / 360 + elif h_units == 'rad': + h_value = h_value % rads / rads + else: + # turns + h_value = h_value % 1 + + r, g, b = hls_to_rgb(h_value, l_value, s_value) + return RGBA(r, g, b, alpha) + + +def float_to_255(c: float) -> int: + return int(round(c * 255)) + + +COLORS_BY_NAME = { + 'aliceblue': (240, 248, 255), + 'antiquewhite': (250, 235, 215), + 'aqua': (0, 255, 255), + 'aquamarine': (127, 255, 212), + 'azure': (240, 255, 255), + 'beige': (245, 245, 220), + 'bisque': (255, 228, 196), + 'black': (0, 0, 0), + 'blanchedalmond': (255, 235, 205), + 'blue': (0, 0, 255), + 'blueviolet': (138, 43, 226), + 'brown': (165, 42, 42), + 'burlywood': (222, 184, 135), + 'cadetblue': (95, 158, 160), + 'chartreuse': (127, 255, 0), + 'chocolate': (210, 105, 30), + 'coral': (255, 127, 80), + 'cornflowerblue': (100, 149, 237), + 'cornsilk': (255, 248, 220), + 'crimson': (220, 20, 60), + 'cyan': (0, 255, 255), + 'darkblue': (0, 0, 139), + 'darkcyan': (0, 139, 139), + 'darkgoldenrod': (184, 134, 11), + 'darkgray': (169, 169, 169), + 'darkgreen': (0, 100, 0), + 'darkgrey': (169, 169, 169), + 'darkkhaki': (189, 183, 107), + 'darkmagenta': (139, 0, 139), + 'darkolivegreen': (85, 107, 47), + 'darkorange': (255, 140, 0), + 'darkorchid': (153, 50, 204), + 'darkred': (139, 0, 0), + 'darksalmon': (233, 150, 122), + 'darkseagreen': (143, 188, 143), + 'darkslateblue': (72, 61, 139), + 'darkslategray': (47, 79, 79), + 'darkslategrey': (47, 79, 79), + 'darkturquoise': (0, 206, 209), + 'darkviolet': (148, 0, 211), + 'deeppink': (255, 20, 147), + 'deepskyblue': (0, 191, 255), + 'dimgray': (105, 105, 105), + 'dimgrey': (105, 105, 105), + 'dodgerblue': (30, 144, 255), + 'firebrick': (178, 34, 34), + 'floralwhite': (255, 250, 240), + 'forestgreen': (34, 139, 34), + 'fuchsia': (255, 0, 255), + 'gainsboro': (220, 220, 220), + 'ghostwhite': (248, 248, 255), + 'gold': (255, 215, 0), + 'goldenrod': (218, 165, 32), + 'gray': (128, 128, 128), + 'green': (0, 128, 0), + 'greenyellow': (173, 255, 47), + 'grey': (128, 128, 128), + 'honeydew': (240, 255, 240), + 'hotpink': (255, 105, 180), + 'indianred': (205, 92, 92), + 'indigo': (75, 0, 130), + 'ivory': (255, 255, 240), + 'khaki': (240, 230, 140), + 'lavender': (230, 230, 250), + 'lavenderblush': (255, 240, 245), + 'lawngreen': (124, 252, 0), + 'lemonchiffon': (255, 250, 205), + 'lightblue': (173, 216, 230), + 'lightcoral': (240, 128, 128), + 'lightcyan': (224, 255, 255), + 'lightgoldenrodyellow': (250, 250, 210), + 'lightgray': (211, 211, 211), + 'lightgreen': (144, 238, 144), + 'lightgrey': (211, 211, 211), + 'lightpink': (255, 182, 193), + 'lightsalmon': (255, 160, 122), + 'lightseagreen': (32, 178, 170), + 'lightskyblue': (135, 206, 250), + 'lightslategray': (119, 136, 153), + 'lightslategrey': (119, 136, 153), + 'lightsteelblue': (176, 196, 222), + 'lightyellow': (255, 255, 224), + 'lime': (0, 255, 0), + 'limegreen': (50, 205, 50), + 'linen': (250, 240, 230), + 'magenta': (255, 0, 255), + 'maroon': (128, 0, 0), + 'mediumaquamarine': (102, 205, 170), + 'mediumblue': (0, 0, 205), + 'mediumorchid': (186, 85, 211), + 'mediumpurple': (147, 112, 219), + 'mediumseagreen': (60, 179, 113), + 'mediumslateblue': (123, 104, 238), + 'mediumspringgreen': (0, 250, 154), + 'mediumturquoise': (72, 209, 204), + 'mediumvioletred': (199, 21, 133), + 'midnightblue': (25, 25, 112), + 'mintcream': (245, 255, 250), + 'mistyrose': (255, 228, 225), + 'moccasin': (255, 228, 181), + 'navajowhite': (255, 222, 173), + 'navy': (0, 0, 128), + 'oldlace': (253, 245, 230), + 'olive': (128, 128, 0), + 'olivedrab': (107, 142, 35), + 'orange': (255, 165, 0), + 'orangered': (255, 69, 0), + 'orchid': (218, 112, 214), + 'palegoldenrod': (238, 232, 170), + 'palegreen': (152, 251, 152), + 'paleturquoise': (175, 238, 238), + 'palevioletred': (219, 112, 147), + 'papayawhip': (255, 239, 213), + 'peachpuff': (255, 218, 185), + 'peru': (205, 133, 63), + 'pink': (255, 192, 203), + 'plum': (221, 160, 221), + 'powderblue': (176, 224, 230), + 'purple': (128, 0, 128), + 'red': (255, 0, 0), + 'rosybrown': (188, 143, 143), + 'royalblue': (65, 105, 225), + 'saddlebrown': (139, 69, 19), + 'salmon': (250, 128, 114), + 'sandybrown': (244, 164, 96), + 'seagreen': (46, 139, 87), + 'seashell': (255, 245, 238), + 'sienna': (160, 82, 45), + 'silver': (192, 192, 192), + 'skyblue': (135, 206, 235), + 'slateblue': (106, 90, 205), + 'slategray': (112, 128, 144), + 'slategrey': (112, 128, 144), + 'snow': (255, 250, 250), + 'springgreen': (0, 255, 127), + 'steelblue': (70, 130, 180), + 'tan': (210, 180, 140), + 'teal': (0, 128, 128), + 'thistle': (216, 191, 216), + 'tomato': (255, 99, 71), + 'turquoise': (64, 224, 208), + 'violet': (238, 130, 238), + 'wheat': (245, 222, 179), + 'white': (255, 255, 255), + 'whitesmoke': (245, 245, 245), + 'yellow': (255, 255, 0), + 'yellowgreen': (154, 205, 50), +} + +COLORS_BY_VALUE = {v: k for k, v in COLORS_BY_NAME.items()} diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/config.py b/venv/lib/python3.12/site-packages/pydantic/v1/config.py new file mode 100644 index 0000000000000000000000000000000000000000..18f7c999bee0fab95293b2434047fd20532a6446 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/config.py @@ -0,0 +1,191 @@ +import json +from enum import Enum +from typing import TYPE_CHECKING, Any, Callable, Dict, ForwardRef, Optional, Tuple, Type, Union + +from typing_extensions import Literal, Protocol + +from pydantic.v1.typing import AnyArgTCallable, AnyCallable +from pydantic.v1.utils import GetterDict +from pydantic.v1.version import compiled + +if TYPE_CHECKING: + from typing import overload + + from pydantic.v1.fields import ModelField + from pydantic.v1.main import BaseModel + + ConfigType = Type['BaseConfig'] + + class SchemaExtraCallable(Protocol): + @overload + def __call__(self, schema: Dict[str, Any]) -> None: + pass + + @overload + def __call__(self, schema: Dict[str, Any], model_class: Type[BaseModel]) -> None: + pass + +else: + SchemaExtraCallable = Callable[..., None] + +__all__ = 'BaseConfig', 'ConfigDict', 'get_config', 'Extra', 'inherit_config', 'prepare_config' + + +class Extra(str, Enum): + allow = 'allow' + ignore = 'ignore' + forbid = 'forbid' + + +# https://github.com/cython/cython/issues/4003 +# Fixed in Cython 3 and Pydantic v1 won't support Cython 3. +# Pydantic v2 doesn't depend on Cython at all. +if not compiled: + from typing_extensions import TypedDict + + class ConfigDict(TypedDict, total=False): + title: Optional[str] + anystr_lower: bool + anystr_strip_whitespace: bool + min_anystr_length: int + max_anystr_length: Optional[int] + validate_all: bool + extra: Extra + allow_mutation: bool + frozen: bool + allow_population_by_field_name: bool + use_enum_values: bool + fields: Dict[str, Union[str, Dict[str, str]]] + validate_assignment: bool + error_msg_templates: Dict[str, str] + arbitrary_types_allowed: bool + orm_mode: bool + getter_dict: Type[GetterDict] + alias_generator: Optional[Callable[[str], str]] + keep_untouched: Tuple[type, ...] + schema_extra: Union[Dict[str, object], 'SchemaExtraCallable'] + json_loads: Callable[[str], object] + json_dumps: AnyArgTCallable[str] + json_encoders: Dict[Type[object], AnyCallable] + underscore_attrs_are_private: bool + allow_inf_nan: bool + copy_on_model_validation: Literal['none', 'deep', 'shallow'] + # whether dataclass `__post_init__` should be run after validation + post_init_call: Literal['before_validation', 'after_validation'] + +else: + ConfigDict = dict # type: ignore + + +class BaseConfig: + title: Optional[str] = None + anystr_lower: bool = False + anystr_upper: bool = False + anystr_strip_whitespace: bool = False + min_anystr_length: int = 0 + max_anystr_length: Optional[int] = None + validate_all: bool = False + extra: Extra = Extra.ignore + allow_mutation: bool = True + frozen: bool = False + allow_population_by_field_name: bool = False + use_enum_values: bool = False + fields: Dict[str, Union[str, Dict[str, str]]] = {} + validate_assignment: bool = False + error_msg_templates: Dict[str, str] = {} + arbitrary_types_allowed: bool = False + orm_mode: bool = False + getter_dict: Type[GetterDict] = GetterDict + alias_generator: Optional[Callable[[str], str]] = None + keep_untouched: Tuple[type, ...] = () + schema_extra: Union[Dict[str, Any], 'SchemaExtraCallable'] = {} + json_loads: Callable[[str], Any] = json.loads + json_dumps: Callable[..., str] = json.dumps + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable] = {} + underscore_attrs_are_private: bool = False + allow_inf_nan: bool = True + + # whether inherited models as fields should be reconstructed as base model, + # and whether such a copy should be shallow or deep + copy_on_model_validation: Literal['none', 'deep', 'shallow'] = 'shallow' + + # whether `Union` should check all allowed types before even trying to coerce + smart_union: bool = False + # whether dataclass `__post_init__` should be run before or after validation + post_init_call: Literal['before_validation', 'after_validation'] = 'before_validation' + + @classmethod + def get_field_info(cls, name: str) -> Dict[str, Any]: + """ + Get properties of FieldInfo from the `fields` property of the config class. + """ + + fields_value = cls.fields.get(name) + + if isinstance(fields_value, str): + field_info: Dict[str, Any] = {'alias': fields_value} + elif isinstance(fields_value, dict): + field_info = fields_value + else: + field_info = {} + + if 'alias' in field_info: + field_info.setdefault('alias_priority', 2) + + if field_info.get('alias_priority', 0) <= 1 and cls.alias_generator: + alias = cls.alias_generator(name) + if not isinstance(alias, str): + raise TypeError(f'Config.alias_generator must return str, not {alias.__class__}') + field_info.update(alias=alias, alias_priority=1) + return field_info + + @classmethod + def prepare_field(cls, field: 'ModelField') -> None: + """ + Optional hook to check or modify fields during model creation. + """ + pass + + +def get_config(config: Union[ConfigDict, Type[object], None]) -> Type[BaseConfig]: + if config is None: + return BaseConfig + + else: + config_dict = ( + config + if isinstance(config, dict) + else {k: getattr(config, k) for k in dir(config) if not k.startswith('__')} + ) + + class Config(BaseConfig): + ... + + for k, v in config_dict.items(): + setattr(Config, k, v) + return Config + + +def inherit_config(self_config: 'ConfigType', parent_config: 'ConfigType', **namespace: Any) -> 'ConfigType': + if not self_config: + base_classes: Tuple['ConfigType', ...] = (parent_config,) + elif self_config == parent_config: + base_classes = (self_config,) + else: + base_classes = self_config, parent_config + + namespace['json_encoders'] = { + **getattr(parent_config, 'json_encoders', {}), + **getattr(self_config, 'json_encoders', {}), + **namespace.get('json_encoders', {}), + } + + return type('Config', base_classes, namespace) + + +def prepare_config(config: Type[BaseConfig], cls_name: str) -> None: + if not isinstance(config.extra, Extra): + try: + config.extra = Extra(config.extra) + except ValueError: + raise ValueError(f'"{cls_name}": {config.extra} is not a valid value for "extra"') diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py b/venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py new file mode 100644 index 0000000000000000000000000000000000000000..bd1670291d6622a28ecb5b8e9ad68f4bac9305d7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/dataclasses.py @@ -0,0 +1,500 @@ +""" +The main purpose is to enhance stdlib dataclasses by adding validation +A pydantic dataclass can be generated from scratch or from a stdlib one. + +Behind the scene, a pydantic dataclass is just like a regular one on which we attach +a `BaseModel` and magic methods to trigger the validation of the data. +`__init__` and `__post_init__` are hence overridden and have extra logic to be +able to validate input data. + +When a pydantic dataclass is generated from scratch, it's just a plain dataclass +with validation triggered at initialization + +The tricky part if for stdlib dataclasses that are converted after into pydantic ones e.g. + +```py +@dataclasses.dataclass +class M: + x: int + +ValidatedM = pydantic.dataclasses.dataclass(M) +``` + +We indeed still want to support equality, hashing, repr, ... as if it was the stdlib one! + +```py +assert isinstance(ValidatedM(x=1), M) +assert ValidatedM(x=1) == M(x=1) +``` + +This means we **don't want to create a new dataclass that inherits from it** +The trick is to create a wrapper around `M` that will act as a proxy to trigger +validation without altering default `M` behaviour. +""" +import copy +import dataclasses +import sys +from contextlib import contextmanager +from functools import wraps + +try: + from functools import cached_property +except ImportError: + # cached_property available only for python3.8+ + pass + +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Generator, Optional, Type, TypeVar, Union, overload + +from typing_extensions import dataclass_transform + +from pydantic.v1.class_validators import gather_all_validators +from pydantic.v1.config import BaseConfig, ConfigDict, Extra, get_config +from pydantic.v1.error_wrappers import ValidationError +from pydantic.v1.errors import DataclassTypeError +from pydantic.v1.fields import Field, FieldInfo, Required, Undefined +from pydantic.v1.main import create_model, validate_model +from pydantic.v1.utils import ClassAttribute + +if TYPE_CHECKING: + from pydantic.v1.main import BaseModel + from pydantic.v1.typing import CallableGenerator, NoArgAnyCallable + + DataclassT = TypeVar('DataclassT', bound='Dataclass') + + DataclassClassOrWrapper = Union[Type['Dataclass'], 'DataclassProxy'] + + class Dataclass: + # stdlib attributes + __dataclass_fields__: ClassVar[Dict[str, Any]] + __dataclass_params__: ClassVar[Any] # in reality `dataclasses._DataclassParams` + __post_init__: ClassVar[Callable[..., None]] + + # Added by pydantic + __pydantic_run_validation__: ClassVar[bool] + __post_init_post_parse__: ClassVar[Callable[..., None]] + __pydantic_initialised__: ClassVar[bool] + __pydantic_model__: ClassVar[Type[BaseModel]] + __pydantic_validate_values__: ClassVar[Callable[['Dataclass'], None]] + __pydantic_has_field_info_default__: ClassVar[bool] # whether a `pydantic.Field` is used as default value + + def __init__(self, *args: object, **kwargs: object) -> None: + pass + + @classmethod + def __get_validators__(cls: Type['Dataclass']) -> 'CallableGenerator': + pass + + @classmethod + def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT': + pass + + +__all__ = [ + 'dataclass', + 'set_validation', + 'create_pydantic_model_from_dataclass', + 'is_builtin_dataclass', + 'make_dataclass_validator', +] + +_T = TypeVar('_T') + +if sys.version_info >= (3, 10): + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + kw_only: bool = ..., + ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: + ... + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + _cls: Type[_T], + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + kw_only: bool = ..., + ) -> 'DataclassClassOrWrapper': + ... + +else: + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + ) -> Callable[[Type[_T]], 'DataclassClassOrWrapper']: + ... + + @dataclass_transform(field_specifiers=(dataclasses.field, Field)) + @overload + def dataclass( + _cls: Type[_T], + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + ) -> 'DataclassClassOrWrapper': + ... + + +@dataclass_transform(field_specifiers=(dataclasses.field, Field)) +def dataclass( + _cls: Optional[Type[_T]] = None, + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + config: Union[ConfigDict, Type[object], None] = None, + validate_on_init: Optional[bool] = None, + use_proxy: Optional[bool] = None, + kw_only: bool = False, +) -> Union[Callable[[Type[_T]], 'DataclassClassOrWrapper'], 'DataclassClassOrWrapper']: + """ + Like the python standard lib dataclasses but with type validation. + The result is either a pydantic dataclass that will validate input data + or a wrapper that will trigger validation around a stdlib dataclass + to avoid modifying it directly + """ + the_config = get_config(config) + + def wrap(cls: Type[Any]) -> 'DataclassClassOrWrapper': + should_use_proxy = ( + use_proxy + if use_proxy is not None + else ( + is_builtin_dataclass(cls) + and (cls.__bases__[0] is object or set(dir(cls)) == set(dir(cls.__bases__[0]))) + ) + ) + if should_use_proxy: + dc_cls_doc = '' + dc_cls = DataclassProxy(cls) + default_validate_on_init = False + else: + dc_cls_doc = cls.__doc__ or '' # needs to be done before generating dataclass + if sys.version_info >= (3, 10): + dc_cls = dataclasses.dataclass( + cls, + init=init, + repr=repr, + eq=eq, + order=order, + unsafe_hash=unsafe_hash, + frozen=frozen, + kw_only=kw_only, + ) + else: + dc_cls = dataclasses.dataclass( # type: ignore + cls, init=init, repr=repr, eq=eq, order=order, unsafe_hash=unsafe_hash, frozen=frozen + ) + default_validate_on_init = True + + should_validate_on_init = default_validate_on_init if validate_on_init is None else validate_on_init + _add_pydantic_validation_attributes(cls, the_config, should_validate_on_init, dc_cls_doc) + dc_cls.__pydantic_model__.__try_update_forward_refs__(**{cls.__name__: cls}) + return dc_cls + + if _cls is None: + return wrap + + return wrap(_cls) + + +@contextmanager +def set_validation(cls: Type['DataclassT'], value: bool) -> Generator[Type['DataclassT'], None, None]: + original_run_validation = cls.__pydantic_run_validation__ + try: + cls.__pydantic_run_validation__ = value + yield cls + finally: + cls.__pydantic_run_validation__ = original_run_validation + + +class DataclassProxy: + __slots__ = '__dataclass__' + + def __init__(self, dc_cls: Type['Dataclass']) -> None: + object.__setattr__(self, '__dataclass__', dc_cls) + + def __call__(self, *args: Any, **kwargs: Any) -> Any: + with set_validation(self.__dataclass__, True): + return self.__dataclass__(*args, **kwargs) + + def __getattr__(self, name: str) -> Any: + return getattr(self.__dataclass__, name) + + def __setattr__(self, __name: str, __value: Any) -> None: + return setattr(self.__dataclass__, __name, __value) + + def __instancecheck__(self, instance: Any) -> bool: + return isinstance(instance, self.__dataclass__) + + def __copy__(self) -> 'DataclassProxy': + return DataclassProxy(copy.copy(self.__dataclass__)) + + def __deepcopy__(self, memo: Any) -> 'DataclassProxy': + return DataclassProxy(copy.deepcopy(self.__dataclass__, memo)) + + +def _add_pydantic_validation_attributes( # noqa: C901 (ignore complexity) + dc_cls: Type['Dataclass'], + config: Type[BaseConfig], + validate_on_init: bool, + dc_cls_doc: str, +) -> None: + """ + We need to replace the right method. If no `__post_init__` has been set in the stdlib dataclass + it won't even exist (code is generated on the fly by `dataclasses`) + By default, we run validation after `__init__` or `__post_init__` if defined + """ + init = dc_cls.__init__ + + @wraps(init) + def handle_extra_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + if config.extra == Extra.ignore: + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + elif config.extra == Extra.allow: + for k, v in kwargs.items(): + self.__dict__.setdefault(k, v) + init(self, *args, **{k: v for k, v in kwargs.items() if k in self.__dataclass_fields__}) + + else: + init(self, *args, **kwargs) + + if hasattr(dc_cls, '__post_init__'): + try: + post_init = dc_cls.__post_init__.__wrapped__ # type: ignore[attr-defined] + except AttributeError: + post_init = dc_cls.__post_init__ + + @wraps(post_init) + def new_post_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + if config.post_init_call == 'before_validation': + post_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + if hasattr(self, '__post_init_post_parse__'): + self.__post_init_post_parse__(*args, **kwargs) + + if config.post_init_call == 'after_validation': + post_init(self, *args, **kwargs) + + setattr(dc_cls, '__init__', handle_extra_init) + setattr(dc_cls, '__post_init__', new_post_init) + + else: + + @wraps(init) + def new_init(self: 'Dataclass', *args: Any, **kwargs: Any) -> None: + handle_extra_init(self, *args, **kwargs) + + if self.__class__.__pydantic_run_validation__: + self.__pydantic_validate_values__() + + if hasattr(self, '__post_init_post_parse__'): + # We need to find again the initvars. To do that we use `__dataclass_fields__` instead of + # public method `dataclasses.fields` + + # get all initvars and their default values + initvars_and_values: Dict[str, Any] = {} + for i, f in enumerate(self.__class__.__dataclass_fields__.values()): + if f._field_type is dataclasses._FIELD_INITVAR: # type: ignore[attr-defined] + try: + # set arg value by default + initvars_and_values[f.name] = args[i] + except IndexError: + initvars_and_values[f.name] = kwargs.get(f.name, f.default) + + self.__post_init_post_parse__(**initvars_and_values) + + setattr(dc_cls, '__init__', new_init) + + setattr(dc_cls, '__pydantic_run_validation__', ClassAttribute('__pydantic_run_validation__', validate_on_init)) + setattr(dc_cls, '__pydantic_initialised__', False) + setattr(dc_cls, '__pydantic_model__', create_pydantic_model_from_dataclass(dc_cls, config, dc_cls_doc)) + setattr(dc_cls, '__pydantic_validate_values__', _dataclass_validate_values) + setattr(dc_cls, '__validate__', classmethod(_validate_dataclass)) + setattr(dc_cls, '__get_validators__', classmethod(_get_validators)) + + if dc_cls.__pydantic_model__.__config__.validate_assignment and not dc_cls.__dataclass_params__.frozen: + setattr(dc_cls, '__setattr__', _dataclass_validate_assignment_setattr) + + +def _get_validators(cls: 'DataclassClassOrWrapper') -> 'CallableGenerator': + yield cls.__validate__ + + +def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT': + with set_validation(cls, True): + if isinstance(v, cls): + v.__pydantic_validate_values__() + return v + elif isinstance(v, (list, tuple)): + return cls(*v) + elif isinstance(v, dict): + return cls(**v) + else: + raise DataclassTypeError(class_name=cls.__name__) + + +def create_pydantic_model_from_dataclass( + dc_cls: Type['Dataclass'], + config: Type[Any] = BaseConfig, + dc_cls_doc: Optional[str] = None, +) -> Type['BaseModel']: + field_definitions: Dict[str, Any] = {} + for field in dataclasses.fields(dc_cls): + default: Any = Undefined + default_factory: Optional['NoArgAnyCallable'] = None + field_info: FieldInfo + + if field.default is not dataclasses.MISSING: + default = field.default + elif field.default_factory is not dataclasses.MISSING: + default_factory = field.default_factory + else: + default = Required + + if isinstance(default, FieldInfo): + field_info = default + dc_cls.__pydantic_has_field_info_default__ = True + else: + field_info = Field(default=default, default_factory=default_factory, **field.metadata) + + field_definitions[field.name] = (field.type, field_info) + + validators = gather_all_validators(dc_cls) + model: Type['BaseModel'] = create_model( + dc_cls.__name__, + __config__=config, + __module__=dc_cls.__module__, + __validators__=validators, + __cls_kwargs__={'__resolve_forward_refs__': False}, + **field_definitions, + ) + model.__doc__ = dc_cls_doc if dc_cls_doc is not None else dc_cls.__doc__ or '' + return model + + +if sys.version_info >= (3, 8): + + def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool: + return isinstance(getattr(type(obj), k, None), cached_property) + +else: + + def _is_field_cached_property(obj: 'Dataclass', k: str) -> bool: + return False + + +def _dataclass_validate_values(self: 'Dataclass') -> None: + # validation errors can occur if this function is called twice on an already initialised dataclass. + # for example if Extra.forbid is enabled, it would consider __pydantic_initialised__ an invalid extra property + if getattr(self, '__pydantic_initialised__'): + return + if getattr(self, '__pydantic_has_field_info_default__', False): + # We need to remove `FieldInfo` values since they are not valid as input + # It's ok to do that because they are obviously the default values! + input_data = { + k: v + for k, v in self.__dict__.items() + if not (isinstance(v, FieldInfo) or _is_field_cached_property(self, k)) + } + else: + input_data = {k: v for k, v in self.__dict__.items() if not _is_field_cached_property(self, k)} + d, _, validation_error = validate_model(self.__pydantic_model__, input_data, cls=self.__class__) + if validation_error: + raise validation_error + self.__dict__.update(d) + object.__setattr__(self, '__pydantic_initialised__', True) + + +def _dataclass_validate_assignment_setattr(self: 'Dataclass', name: str, value: Any) -> None: + if self.__pydantic_initialised__: + d = dict(self.__dict__) + d.pop(name, None) + known_field = self.__pydantic_model__.__fields__.get(name, None) + if known_field: + value, error_ = known_field.validate(value, d, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + + object.__setattr__(self, name, value) + + +def is_builtin_dataclass(_cls: Type[Any]) -> bool: + """ + Whether a class is a stdlib dataclass + (useful to discriminated a pydantic dataclass that is actually a wrapper around a stdlib dataclass) + + we check that + - `_cls` is a dataclass + - `_cls` is not a processed pydantic dataclass (with a basemodel attached) + - `_cls` is not a pydantic dataclass inheriting directly from a stdlib dataclass + e.g. + ``` + @dataclasses.dataclass + class A: + x: int + + @pydantic.dataclasses.dataclass + class B(A): + y: int + ``` + In this case, when we first check `B`, we make an extra check and look at the annotations ('y'), + which won't be a superset of all the dataclass fields (only the stdlib fields i.e. 'x') + """ + return ( + dataclasses.is_dataclass(_cls) + and not hasattr(_cls, '__pydantic_model__') + and set(_cls.__dataclass_fields__).issuperset(set(getattr(_cls, '__annotations__', {}))) + ) + + +def make_dataclass_validator(dc_cls: Type['Dataclass'], config: Type[BaseConfig]) -> 'CallableGenerator': + """ + Create a pydantic.dataclass from a builtin dataclass to add type validation + and yield the validators + It retrieves the parameters of the dataclass and forwards them to the newly created dataclass + """ + yield from _get_validators(dataclass(dc_cls, config=config, use_proxy=True)) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py b/venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py new file mode 100644 index 0000000000000000000000000000000000000000..a7598fc6c56688c4dd6526fc6e8b8e03682f62e8 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/datetime_parse.py @@ -0,0 +1,248 @@ +""" +Functions to parse datetime objects. + +We're using regular expressions rather than time.strptime because: +- They provide both validation and parsing. +- They're more flexible for datetimes. +- The date/datetime/time constructors produce friendlier error messages. + +Stolen from https://raw.githubusercontent.com/django/django/main/django/utils/dateparse.py at +9718fa2e8abe430c3526a9278dd976443d4ae3c6 + +Changed to: +* use standard python datetime types not django.utils.timezone +* raise ValueError when regex doesn't match rather than returning None +* support parsing unix timestamps for dates and datetimes +""" +import re +from datetime import date, datetime, time, timedelta, timezone +from typing import Dict, Optional, Type, Union + +from pydantic.v1 import errors + +date_expr = r'(?P\d{4})-(?P\d{1,2})-(?P\d{1,2})' +time_expr = ( + r'(?P\d{1,2}):(?P\d{1,2})' + r'(?::(?P\d{1,2})(?:\.(?P\d{1,6})\d{0,6})?)?' + r'(?PZ|[+-]\d{2}(?::?\d{2})?)?$' +) + +date_re = re.compile(f'{date_expr}$') +time_re = re.compile(time_expr) +datetime_re = re.compile(f'{date_expr}[T ]{time_expr}') + +standard_duration_re = re.compile( + r'^' + r'(?:(?P-?\d+) (days?, )?)?' + r'((?:(?P-?\d+):)(?=\d+:\d+))?' + r'(?:(?P-?\d+):)?' + r'(?P-?\d+)' + r'(?:\.(?P\d{1,6})\d{0,6})?' + r'$' +) + +# Support the sections of ISO 8601 date representation that are accepted by timedelta +iso8601_duration_re = re.compile( + r'^(?P[-+]?)' + r'P' + r'(?:(?P\d+(.\d+)?)D)?' + r'(?:T' + r'(?:(?P\d+(.\d+)?)H)?' + r'(?:(?P\d+(.\d+)?)M)?' + r'(?:(?P\d+(.\d+)?)S)?' + r')?' + r'$' +) + +EPOCH = datetime(1970, 1, 1) +# if greater than this, the number is in ms, if less than or equal it's in seconds +# (in seconds this is 11th October 2603, in ms it's 20th August 1970) +MS_WATERSHED = int(2e10) +# slightly more than datetime.max in ns - (datetime.max - EPOCH).total_seconds() * 1e9 +MAX_NUMBER = int(3e20) +StrBytesIntFloat = Union[str, bytes, int, float] + + +def get_numeric(value: StrBytesIntFloat, native_expected_type: str) -> Union[None, int, float]: + if isinstance(value, (int, float)): + return value + try: + return float(value) + except ValueError: + return None + except TypeError: + raise TypeError(f'invalid type; expected {native_expected_type}, string, bytes, int or float') + + +def from_unix_seconds(seconds: Union[int, float]) -> datetime: + if seconds > MAX_NUMBER: + return datetime.max + elif seconds < -MAX_NUMBER: + return datetime.min + + while abs(seconds) > MS_WATERSHED: + seconds /= 1000 + dt = EPOCH + timedelta(seconds=seconds) + return dt.replace(tzinfo=timezone.utc) + + +def _parse_timezone(value: Optional[str], error: Type[Exception]) -> Union[None, int, timezone]: + if value == 'Z': + return timezone.utc + elif value is not None: + offset_mins = int(value[-2:]) if len(value) > 3 else 0 + offset = 60 * int(value[1:3]) + offset_mins + if value[0] == '-': + offset = -offset + try: + return timezone(timedelta(minutes=offset)) + except ValueError: + raise error() + else: + return None + + +def parse_date(value: Union[date, StrBytesIntFloat]) -> date: + """ + Parse a date/int/float/string and return a datetime.date. + + Raise ValueError if the input is well formatted but not a valid date. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, date): + if isinstance(value, datetime): + return value.date() + else: + return value + + number = get_numeric(value, 'date') + if number is not None: + return from_unix_seconds(number).date() + + if isinstance(value, bytes): + value = value.decode() + + match = date_re.match(value) # type: ignore + if match is None: + raise errors.DateError() + + kw = {k: int(v) for k, v in match.groupdict().items()} + + try: + return date(**kw) + except ValueError: + raise errors.DateError() + + +def parse_time(value: Union[time, StrBytesIntFloat]) -> time: + """ + Parse a time/string and return a datetime.time. + + Raise ValueError if the input is well formatted but not a valid time. + Raise ValueError if the input isn't well formatted, in particular if it contains an offset. + """ + if isinstance(value, time): + return value + + number = get_numeric(value, 'time') + if number is not None: + if number >= 86400: + # doesn't make sense since the time time loop back around to 0 + raise errors.TimeError() + return (datetime.min + timedelta(seconds=number)).time() + + if isinstance(value, bytes): + value = value.decode() + + match = time_re.match(value) # type: ignore + if match is None: + raise errors.TimeError() + + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + + tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.TimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_['tzinfo'] = tzinfo + + try: + return time(**kw_) # type: ignore + except ValueError: + raise errors.TimeError() + + +def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: + """ + Parse a datetime/int/float/string and return a datetime.datetime. + + This function supports time zone offsets. When the input contains one, + the output uses a timezone with a fixed offset from UTC. + + Raise ValueError if the input is well formatted but not a valid datetime. + Raise ValueError if the input isn't well formatted. + """ + if isinstance(value, datetime): + return value + + number = get_numeric(value, 'datetime') + if number is not None: + return from_unix_seconds(number) + + if isinstance(value, bytes): + value = value.decode() + + match = datetime_re.match(value) # type: ignore + if match is None: + raise errors.DateTimeError() + + kw = match.groupdict() + if kw['microsecond']: + kw['microsecond'] = kw['microsecond'].ljust(6, '0') + + tzinfo = _parse_timezone(kw.pop('tzinfo'), errors.DateTimeError) + kw_: Dict[str, Union[None, int, timezone]] = {k: int(v) for k, v in kw.items() if v is not None} + kw_['tzinfo'] = tzinfo + + try: + return datetime(**kw_) # type: ignore + except ValueError: + raise errors.DateTimeError() + + +def parse_duration(value: StrBytesIntFloat) -> timedelta: + """ + Parse a duration int/float/string and return a datetime.timedelta. + + The preferred format for durations in Django is '%d %H:%M:%S.%f'. + + Also supports ISO 8601 representation. + """ + if isinstance(value, timedelta): + return value + + if isinstance(value, (int, float)): + # below code requires a string + value = f'{value:f}' + elif isinstance(value, bytes): + value = value.decode() + + try: + match = standard_duration_re.match(value) or iso8601_duration_re.match(value) + except TypeError: + raise TypeError('invalid type; expected timedelta, string, bytes, int or float') + + if not match: + raise errors.DurationError() + + kw = match.groupdict() + sign = -1 if kw.pop('sign', '+') == '-' else 1 + if kw.get('microseconds'): + kw['microseconds'] = kw['microseconds'].ljust(6, '0') + + if kw.get('seconds') and kw.get('microseconds') and kw['seconds'].startswith('-'): + kw['microseconds'] = '-' + kw['microseconds'] + + kw_ = {k: float(v) for k, v in kw.items() if v is not None} + + return sign * timedelta(**kw_) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/decorator.py b/venv/lib/python3.12/site-packages/pydantic/v1/decorator.py new file mode 100644 index 0000000000000000000000000000000000000000..2c7c2c2ffdb45ca50cd2b7e57bde1fc711adb851 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/decorator.py @@ -0,0 +1,264 @@ +from functools import wraps +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, Type, TypeVar, Union, overload + +from pydantic.v1 import validator +from pydantic.v1.config import Extra +from pydantic.v1.errors import ConfigError +from pydantic.v1.main import BaseModel, create_model +from pydantic.v1.typing import get_all_type_hints +from pydantic.v1.utils import to_camel + +__all__ = ('validate_arguments',) + +if TYPE_CHECKING: + from pydantic.v1.typing import AnyCallable + + AnyCallableT = TypeVar('AnyCallableT', bound=AnyCallable) + ConfigType = Union[None, Type[Any], Dict[str, Any]] + + +@overload +def validate_arguments(func: None = None, *, config: 'ConfigType' = None) -> Callable[['AnyCallableT'], 'AnyCallableT']: + ... + + +@overload +def validate_arguments(func: 'AnyCallableT') -> 'AnyCallableT': + ... + + +def validate_arguments(func: Optional['AnyCallableT'] = None, *, config: 'ConfigType' = None) -> Any: + """ + Decorator to validate the arguments passed to a function. + """ + + def validate(_func: 'AnyCallable') -> 'AnyCallable': + vd = ValidatedFunction(_func, config) + + @wraps(_func) + def wrapper_function(*args: Any, **kwargs: Any) -> Any: + return vd.call(*args, **kwargs) + + wrapper_function.vd = vd # type: ignore + wrapper_function.validate = vd.init_model_instance # type: ignore + wrapper_function.raw_function = vd.raw_function # type: ignore + wrapper_function.model = vd.model # type: ignore + return wrapper_function + + if func: + return validate(func) + else: + return validate + + +ALT_V_ARGS = 'v__args' +ALT_V_KWARGS = 'v__kwargs' +V_POSITIONAL_ONLY_NAME = 'v__positional_only' +V_DUPLICATE_KWARGS = 'v__duplicate_kwargs' + + +class ValidatedFunction: + def __init__(self, function: 'AnyCallableT', config: 'ConfigType'): # noqa C901 + from inspect import Parameter, signature + + parameters: Mapping[str, Parameter] = signature(function).parameters + + if parameters.keys() & {ALT_V_ARGS, ALT_V_KWARGS, V_POSITIONAL_ONLY_NAME, V_DUPLICATE_KWARGS}: + raise ConfigError( + f'"{ALT_V_ARGS}", "{ALT_V_KWARGS}", "{V_POSITIONAL_ONLY_NAME}" and "{V_DUPLICATE_KWARGS}" ' + f'are not permitted as argument names when using the "{validate_arguments.__name__}" decorator' + ) + + self.raw_function = function + self.arg_mapping: Dict[int, str] = {} + self.positional_only_args = set() + self.v_args_name = 'args' + self.v_kwargs_name = 'kwargs' + + type_hints = get_all_type_hints(function) + takes_args = False + takes_kwargs = False + fields: Dict[str, Tuple[Any, Any]] = {} + for i, (name, p) in enumerate(parameters.items()): + if p.annotation is p.empty: + annotation = Any + else: + annotation = type_hints[name] + + default = ... if p.default is p.empty else p.default + if p.kind == Parameter.POSITIONAL_ONLY: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_POSITIONAL_ONLY_NAME] = List[str], None + self.positional_only_args.add(name) + elif p.kind == Parameter.POSITIONAL_OR_KEYWORD: + self.arg_mapping[i] = name + fields[name] = annotation, default + fields[V_DUPLICATE_KWARGS] = List[str], None + elif p.kind == Parameter.KEYWORD_ONLY: + fields[name] = annotation, default + elif p.kind == Parameter.VAR_POSITIONAL: + self.v_args_name = name + fields[name] = Tuple[annotation, ...], None + takes_args = True + else: + assert p.kind == Parameter.VAR_KEYWORD, p.kind + self.v_kwargs_name = name + fields[name] = Dict[str, annotation], None # type: ignore + takes_kwargs = True + + # these checks avoid a clash between "args" and a field with that name + if not takes_args and self.v_args_name in fields: + self.v_args_name = ALT_V_ARGS + + # same with "kwargs" + if not takes_kwargs and self.v_kwargs_name in fields: + self.v_kwargs_name = ALT_V_KWARGS + + if not takes_args: + # we add the field so validation below can raise the correct exception + fields[self.v_args_name] = List[Any], None + + if not takes_kwargs: + # same with kwargs + fields[self.v_kwargs_name] = Dict[Any, Any], None + + self.create_model(fields, takes_args, takes_kwargs, config) + + def init_model_instance(self, *args: Any, **kwargs: Any) -> BaseModel: + values = self.build_values(args, kwargs) + return self.model(**values) + + def call(self, *args: Any, **kwargs: Any) -> Any: + m = self.init_model_instance(*args, **kwargs) + return self.execute(m) + + def build_values(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Dict[str, Any]: + values: Dict[str, Any] = {} + if args: + arg_iter = enumerate(args) + while True: + try: + i, a = next(arg_iter) + except StopIteration: + break + arg_name = self.arg_mapping.get(i) + if arg_name is not None: + values[arg_name] = a + else: + values[self.v_args_name] = [a] + [a for _, a in arg_iter] + break + + var_kwargs: Dict[str, Any] = {} + wrong_positional_args = [] + duplicate_kwargs = [] + fields_alias = [ + field.alias + for name, field in self.model.__fields__.items() + if name not in (self.v_args_name, self.v_kwargs_name) + ] + non_var_fields = set(self.model.__fields__) - {self.v_args_name, self.v_kwargs_name} + for k, v in kwargs.items(): + if k in non_var_fields or k in fields_alias: + if k in self.positional_only_args: + wrong_positional_args.append(k) + if k in values: + duplicate_kwargs.append(k) + values[k] = v + else: + var_kwargs[k] = v + + if var_kwargs: + values[self.v_kwargs_name] = var_kwargs + if wrong_positional_args: + values[V_POSITIONAL_ONLY_NAME] = wrong_positional_args + if duplicate_kwargs: + values[V_DUPLICATE_KWARGS] = duplicate_kwargs + return values + + def execute(self, m: BaseModel) -> Any: + d = {k: v for k, v in m._iter() if k in m.__fields_set__ or m.__fields__[k].default_factory} + var_kwargs = d.pop(self.v_kwargs_name, {}) + + if self.v_args_name in d: + args_: List[Any] = [] + in_kwargs = False + kwargs = {} + for name, value in d.items(): + if in_kwargs: + kwargs[name] = value + elif name == self.v_args_name: + args_ += value + in_kwargs = True + else: + args_.append(value) + return self.raw_function(*args_, **kwargs, **var_kwargs) + elif self.positional_only_args: + args_ = [] + kwargs = {} + for name, value in d.items(): + if name in self.positional_only_args: + args_.append(value) + else: + kwargs[name] = value + return self.raw_function(*args_, **kwargs, **var_kwargs) + else: + return self.raw_function(**d, **var_kwargs) + + def create_model(self, fields: Dict[str, Any], takes_args: bool, takes_kwargs: bool, config: 'ConfigType') -> None: + pos_args = len(self.arg_mapping) + + class CustomConfig: + pass + + if not TYPE_CHECKING: # pragma: no branch + if isinstance(config, dict): + CustomConfig = type('Config', (), config) # noqa: F811 + elif config is not None: + CustomConfig = config # noqa: F811 + + if hasattr(CustomConfig, 'fields') or hasattr(CustomConfig, 'alias_generator'): + raise ConfigError( + 'Setting the "fields" and "alias_generator" property on custom Config for ' + '@validate_arguments is not yet supported, please remove.' + ) + + class DecoratorBaseModel(BaseModel): + @validator(self.v_args_name, check_fields=False, allow_reuse=True) + def check_args(cls, v: Optional[List[Any]]) -> Optional[List[Any]]: + if takes_args or v is None: + return v + + raise TypeError(f'{pos_args} positional arguments expected but {pos_args + len(v)} given') + + @validator(self.v_kwargs_name, check_fields=False, allow_reuse=True) + def check_kwargs(cls, v: Optional[Dict[str, Any]]) -> Optional[Dict[str, Any]]: + if takes_kwargs or v is None: + return v + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v.keys())) + raise TypeError(f'unexpected keyword argument{plural}: {keys}') + + @validator(V_POSITIONAL_ONLY_NAME, check_fields=False, allow_reuse=True) + def check_positional_only(cls, v: Optional[List[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'positional-only argument{plural} passed as keyword argument{plural}: {keys}') + + @validator(V_DUPLICATE_KWARGS, check_fields=False, allow_reuse=True) + def check_duplicate_kwargs(cls, v: Optional[List[str]]) -> None: + if v is None: + return + + plural = '' if len(v) == 1 else 's' + keys = ', '.join(map(repr, v)) + raise TypeError(f'multiple values for argument{plural}: {keys}') + + class Config(CustomConfig): + extra = getattr(CustomConfig, 'extra', Extra.forbid) + + self.model = create_model(to_camel(self.raw_function.__name__), __base__=DecoratorBaseModel, **fields) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py b/venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..5f6f217500d8ca1e7717f59ae372ee9a27660fe3 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/env_settings.py @@ -0,0 +1,350 @@ +import os +import warnings +from pathlib import Path +from typing import AbstractSet, Any, Callable, ClassVar, Dict, List, Mapping, Optional, Tuple, Type, Union + +from pydantic.v1.config import BaseConfig, Extra +from pydantic.v1.fields import ModelField +from pydantic.v1.main import BaseModel +from pydantic.v1.types import JsonWrapper +from pydantic.v1.typing import StrPath, display_as_type, get_origin, is_union +from pydantic.v1.utils import deep_update, lenient_issubclass, path_type, sequence_like + +env_file_sentinel = str(object()) + +SettingsSourceCallable = Callable[['BaseSettings'], Dict[str, Any]] +DotenvType = Union[StrPath, List[StrPath], Tuple[StrPath, ...]] + + +class SettingsError(ValueError): + pass + + +class BaseSettings(BaseModel): + """ + Base class for settings, allowing values to be overridden by environment variables. + + This is useful in production for secrets you do not wish to save in code, it plays nicely with docker(-compose), + Heroku and any 12 factor app design. + """ + + def __init__( + __pydantic_self__, + _env_file: Optional[DotenvType] = env_file_sentinel, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + **values: Any, + ) -> None: + # Uses something other than `self` the first arg to allow "self" as a settable attribute + super().__init__( + **__pydantic_self__._build_values( + values, + _env_file=_env_file, + _env_file_encoding=_env_file_encoding, + _env_nested_delimiter=_env_nested_delimiter, + _secrets_dir=_secrets_dir, + ) + ) + + def _build_values( + self, + init_kwargs: Dict[str, Any], + _env_file: Optional[DotenvType] = None, + _env_file_encoding: Optional[str] = None, + _env_nested_delimiter: Optional[str] = None, + _secrets_dir: Optional[StrPath] = None, + ) -> Dict[str, Any]: + # Configure built-in sources + init_settings = InitSettingsSource(init_kwargs=init_kwargs) + env_settings = EnvSettingsSource( + env_file=(_env_file if _env_file != env_file_sentinel else self.__config__.env_file), + env_file_encoding=( + _env_file_encoding if _env_file_encoding is not None else self.__config__.env_file_encoding + ), + env_nested_delimiter=( + _env_nested_delimiter if _env_nested_delimiter is not None else self.__config__.env_nested_delimiter + ), + env_prefix_len=len(self.__config__.env_prefix), + ) + file_secret_settings = SecretsSettingsSource(secrets_dir=_secrets_dir or self.__config__.secrets_dir) + # Provide a hook to set built-in sources priority and add / remove sources + sources = self.__config__.customise_sources( + init_settings=init_settings, env_settings=env_settings, file_secret_settings=file_secret_settings + ) + if sources: + return deep_update(*reversed([source(self) for source in sources])) + else: + # no one should mean to do this, but I think returning an empty dict is marginally preferable + # to an informative error and much better than a confusing error + return {} + + class Config(BaseConfig): + env_prefix: str = '' + env_file: Optional[DotenvType] = None + env_file_encoding: Optional[str] = None + env_nested_delimiter: Optional[str] = None + secrets_dir: Optional[StrPath] = None + validate_all: bool = True + extra: Extra = Extra.forbid + arbitrary_types_allowed: bool = True + case_sensitive: bool = False + + @classmethod + def prepare_field(cls, field: ModelField) -> None: + env_names: Union[List[str], AbstractSet[str]] + field_info_from_config = cls.get_field_info(field.name) + + env = field_info_from_config.get('env') or field.field_info.extra.get('env') + if env is None: + if field.has_alias: + warnings.warn( + 'aliases are no longer used by BaseSettings to define which environment variables to read. ' + 'Instead use the "env" field setting. ' + 'See https://pydantic-docs.helpmanual.io/usage/settings/#environment-variable-names', + FutureWarning, + ) + env_names = {cls.env_prefix + field.name} + elif isinstance(env, str): + env_names = {env} + elif isinstance(env, (set, frozenset)): + env_names = env + elif sequence_like(env): + env_names = list(env) + else: + raise TypeError(f'invalid field env: {env!r} ({display_as_type(env)}); should be string, list or set') + + if not cls.case_sensitive: + env_names = env_names.__class__(n.lower() for n in env_names) + field.field_info.extra['env_names'] = env_names + + @classmethod + def customise_sources( + cls, + init_settings: SettingsSourceCallable, + env_settings: SettingsSourceCallable, + file_secret_settings: SettingsSourceCallable, + ) -> Tuple[SettingsSourceCallable, ...]: + return init_settings, env_settings, file_secret_settings + + @classmethod + def parse_env_var(cls, field_name: str, raw_val: str) -> Any: + return cls.json_loads(raw_val) + + # populated by the metaclass using the Config class defined above, annotated here to help IDEs only + __config__: ClassVar[Type[Config]] + + +class InitSettingsSource: + __slots__ = ('init_kwargs',) + + def __init__(self, init_kwargs: Dict[str, Any]): + self.init_kwargs = init_kwargs + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + return self.init_kwargs + + def __repr__(self) -> str: + return f'InitSettingsSource(init_kwargs={self.init_kwargs!r})' + + +class EnvSettingsSource: + __slots__ = ('env_file', 'env_file_encoding', 'env_nested_delimiter', 'env_prefix_len') + + def __init__( + self, + env_file: Optional[DotenvType], + env_file_encoding: Optional[str], + env_nested_delimiter: Optional[str] = None, + env_prefix_len: int = 0, + ): + self.env_file: Optional[DotenvType] = env_file + self.env_file_encoding: Optional[str] = env_file_encoding + self.env_nested_delimiter: Optional[str] = env_nested_delimiter + self.env_prefix_len: int = env_prefix_len + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: # noqa C901 + """ + Build environment variables suitable for passing to the Model. + """ + d: Dict[str, Any] = {} + + if settings.__config__.case_sensitive: + env_vars: Mapping[str, Optional[str]] = os.environ + else: + env_vars = {k.lower(): v for k, v in os.environ.items()} + + dotenv_vars = self._read_env_files(settings.__config__.case_sensitive) + if dotenv_vars: + env_vars = {**dotenv_vars, **env_vars} + + for field in settings.__fields__.values(): + env_val: Optional[str] = None + for env_name in field.field_info.extra['env_names']: + env_val = env_vars.get(env_name) + if env_val is not None: + break + + is_complex, allow_parse_failure = self.field_is_complex(field) + if is_complex: + if env_val is None: + # field is complex but no value found so far, try explode_env_vars + env_val_built = self.explode_env_vars(field, env_vars) + if env_val_built: + d[field.alias] = env_val_built + else: + # field is complex and there's a value, decode that as JSON, then add explode_env_vars + try: + env_val = settings.__config__.parse_env_var(field.name, env_val) + except ValueError as e: + if not allow_parse_failure: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + if isinstance(env_val, dict): + d[field.alias] = deep_update(env_val, self.explode_env_vars(field, env_vars)) + else: + d[field.alias] = env_val + elif env_val is not None: + # simplest case, field is not complex, we only need to add the value if it was found + d[field.alias] = env_val + + return d + + def _read_env_files(self, case_sensitive: bool) -> Dict[str, Optional[str]]: + env_files = self.env_file + if env_files is None: + return {} + + if isinstance(env_files, (str, os.PathLike)): + env_files = [env_files] + + dotenv_vars = {} + for env_file in env_files: + env_path = Path(env_file).expanduser() + if env_path.is_file(): + dotenv_vars.update( + read_env_file(env_path, encoding=self.env_file_encoding, case_sensitive=case_sensitive) + ) + + return dotenv_vars + + def field_is_complex(self, field: ModelField) -> Tuple[bool, bool]: + """ + Find out if a field is complex, and if so whether JSON errors should be ignored + """ + if lenient_issubclass(field.annotation, JsonWrapper): + return False, False + + if field.is_complex(): + allow_parse_failure = False + elif is_union(get_origin(field.type_)) and field.sub_fields and any(f.is_complex() for f in field.sub_fields): + allow_parse_failure = True + else: + return False, False + + return True, allow_parse_failure + + def explode_env_vars(self, field: ModelField, env_vars: Mapping[str, Optional[str]]) -> Dict[str, Any]: + """ + Process env_vars and extract the values of keys containing env_nested_delimiter into nested dictionaries. + + This is applied to a single field, hence filtering by env_var prefix. + """ + prefixes = [f'{env_name}{self.env_nested_delimiter}' for env_name in field.field_info.extra['env_names']] + result: Dict[str, Any] = {} + for env_name, env_val in env_vars.items(): + if not any(env_name.startswith(prefix) for prefix in prefixes): + continue + # we remove the prefix before splitting in case the prefix has characters in common with the delimiter + env_name_without_prefix = env_name[self.env_prefix_len :] + _, *keys, last_key = env_name_without_prefix.split(self.env_nested_delimiter) + env_var = result + for key in keys: + env_var = env_var.setdefault(key, {}) + env_var[last_key] = env_val + + return result + + def __repr__(self) -> str: + return ( + f'EnvSettingsSource(env_file={self.env_file!r}, env_file_encoding={self.env_file_encoding!r}, ' + f'env_nested_delimiter={self.env_nested_delimiter!r})' + ) + + +class SecretsSettingsSource: + __slots__ = ('secrets_dir',) + + def __init__(self, secrets_dir: Optional[StrPath]): + self.secrets_dir: Optional[StrPath] = secrets_dir + + def __call__(self, settings: BaseSettings) -> Dict[str, Any]: + """ + Build fields from "secrets" files. + """ + secrets: Dict[str, Optional[str]] = {} + + if self.secrets_dir is None: + return secrets + + secrets_path = Path(self.secrets_dir).expanduser() + + if not secrets_path.exists(): + warnings.warn(f'directory "{secrets_path}" does not exist') + return secrets + + if not secrets_path.is_dir(): + raise SettingsError(f'secrets_dir must reference a directory, not a {path_type(secrets_path)}') + + for field in settings.__fields__.values(): + for env_name in field.field_info.extra['env_names']: + path = find_case_path(secrets_path, env_name, settings.__config__.case_sensitive) + if not path: + # path does not exist, we currently don't return a warning for this + continue + + if path.is_file(): + secret_value = path.read_text().strip() + if field.is_complex(): + try: + secret_value = settings.__config__.parse_env_var(field.name, secret_value) + except ValueError as e: + raise SettingsError(f'error parsing env var "{env_name}"') from e + + secrets[field.alias] = secret_value + else: + warnings.warn( + f'attempted to load secret file "{path}" but found a {path_type(path)} instead.', + stacklevel=4, + ) + return secrets + + def __repr__(self) -> str: + return f'SecretsSettingsSource(secrets_dir={self.secrets_dir!r})' + + +def read_env_file( + file_path: StrPath, *, encoding: str = None, case_sensitive: bool = False +) -> Dict[str, Optional[str]]: + try: + from dotenv import dotenv_values + except ImportError as e: + raise ImportError('python-dotenv is not installed, run `pip install pydantic[dotenv]`') from e + + file_vars: Dict[str, Optional[str]] = dotenv_values(file_path, encoding=encoding or 'utf8') + if not case_sensitive: + return {k.lower(): v for k, v in file_vars.items()} + else: + return file_vars + + +def find_case_path(dir_path: Path, file_name: str, case_sensitive: bool) -> Optional[Path]: + """ + Find a file within path's directory matching filename, optionally ignoring case. + """ + for f in dir_path.iterdir(): + if f.name == file_name: + return f + elif not case_sensitive and f.name.lower() == file_name.lower(): + return f + return None diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py b/venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..bc7f263146ee4f1701391a2436ab38e5e16f0e1a --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/error_wrappers.py @@ -0,0 +1,161 @@ +import json +from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Sequence, Tuple, Type, Union + +from pydantic.v1.json import pydantic_encoder +from pydantic.v1.utils import Representation + +if TYPE_CHECKING: + from typing_extensions import TypedDict + + from pydantic.v1.config import BaseConfig + from pydantic.v1.types import ModelOrDc + from pydantic.v1.typing import ReprArgs + + Loc = Tuple[Union[int, str], ...] + + class _ErrorDictRequired(TypedDict): + loc: Loc + msg: str + type: str + + class ErrorDict(_ErrorDictRequired, total=False): + ctx: Dict[str, Any] + + +__all__ = 'ErrorWrapper', 'ValidationError' + + +class ErrorWrapper(Representation): + __slots__ = 'exc', '_loc' + + def __init__(self, exc: Exception, loc: Union[str, 'Loc']) -> None: + self.exc = exc + self._loc = loc + + def loc_tuple(self) -> 'Loc': + if isinstance(self._loc, tuple): + return self._loc + else: + return (self._loc,) + + def __repr_args__(self) -> 'ReprArgs': + return [('exc', self.exc), ('loc', self.loc_tuple())] + + +# ErrorList is something like Union[List[Union[List[ErrorWrapper], ErrorWrapper]], ErrorWrapper] +# but recursive, therefore just use: +ErrorList = Union[Sequence[Any], ErrorWrapper] + + +class ValidationError(Representation, ValueError): + __slots__ = 'raw_errors', 'model', '_error_cache' + + def __init__(self, errors: Sequence[ErrorList], model: 'ModelOrDc') -> None: + self.raw_errors = errors + self.model = model + self._error_cache: Optional[List['ErrorDict']] = None + + def errors(self) -> List['ErrorDict']: + if self._error_cache is None: + try: + config = self.model.__config__ # type: ignore + except AttributeError: + config = self.model.__pydantic_model__.__config__ # type: ignore + self._error_cache = list(flatten_errors(self.raw_errors, config)) + return self._error_cache + + def json(self, *, indent: Union[None, int, str] = 2) -> str: + return json.dumps(self.errors(), indent=indent, default=pydantic_encoder) + + def __str__(self) -> str: + errors = self.errors() + no_errors = len(errors) + return ( + f'{no_errors} validation error{"" if no_errors == 1 else "s"} for {self.model.__name__}\n' + f'{display_errors(errors)}' + ) + + def __repr_args__(self) -> 'ReprArgs': + return [('model', self.model.__name__), ('errors', self.errors())] + + +def display_errors(errors: List['ErrorDict']) -> str: + return '\n'.join(f'{_display_error_loc(e)}\n {e["msg"]} ({_display_error_type_and_ctx(e)})' for e in errors) + + +def _display_error_loc(error: 'ErrorDict') -> str: + return ' -> '.join(str(e) for e in error['loc']) + + +def _display_error_type_and_ctx(error: 'ErrorDict') -> str: + t = 'type=' + error['type'] + ctx = error.get('ctx') + if ctx: + return t + ''.join(f'; {k}={v}' for k, v in ctx.items()) + else: + return t + + +def flatten_errors( + errors: Sequence[Any], config: Type['BaseConfig'], loc: Optional['Loc'] = None +) -> Generator['ErrorDict', None, None]: + for error in errors: + if isinstance(error, ErrorWrapper): + if loc: + error_loc = loc + error.loc_tuple() + else: + error_loc = error.loc_tuple() + + if isinstance(error.exc, ValidationError): + yield from flatten_errors(error.exc.raw_errors, config, error_loc) + else: + yield error_dict(error.exc, config, error_loc) + elif isinstance(error, list): + yield from flatten_errors(error, config, loc=loc) + else: + raise RuntimeError(f'Unknown error object: {error}') + + +def error_dict(exc: Exception, config: Type['BaseConfig'], loc: 'Loc') -> 'ErrorDict': + type_ = get_exc_type(exc.__class__) + msg_template = config.error_msg_templates.get(type_) or getattr(exc, 'msg_template', None) + ctx = exc.__dict__ + if msg_template: + msg = msg_template.format(**ctx) + else: + msg = str(exc) + + d: 'ErrorDict' = {'loc': loc, 'msg': msg, 'type': type_} + + if ctx: + d['ctx'] = ctx + + return d + + +_EXC_TYPE_CACHE: Dict[Type[Exception], str] = {} + + +def get_exc_type(cls: Type[Exception]) -> str: + # slightly more efficient than using lru_cache since we don't need to worry about the cache filling up + try: + return _EXC_TYPE_CACHE[cls] + except KeyError: + r = _get_exc_type(cls) + _EXC_TYPE_CACHE[cls] = r + return r + + +def _get_exc_type(cls: Type[Exception]) -> str: + if issubclass(cls, AssertionError): + return 'assertion_error' + + base_name = 'type_error' if issubclass(cls, TypeError) else 'value_error' + if cls in (TypeError, ValueError): + # just TypeError or ValueError, no extra code + return base_name + + # if it's not a TypeError or ValueError, we just take the lowercase of the exception name + # no chaining or snake case logic, use "code" for more complex error types. + code = getattr(cls, 'code', None) or cls.__name__.replace('Error', '').lower() + return base_name + '.' + code diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/errors.py b/venv/lib/python3.12/site-packages/pydantic/v1/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..6e8644258a6ccf999a19f69c359452a109e54ffe --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/errors.py @@ -0,0 +1,646 @@ +from decimal import Decimal +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Sequence, Set, Tuple, Type, Union + +from pydantic.v1.typing import display_as_type + +if TYPE_CHECKING: + from pydantic.v1.typing import DictStrAny + +# explicitly state exports to avoid "from pydantic.v1.errors import *" also importing Decimal, Path etc. +__all__ = ( + 'PydanticTypeError', + 'PydanticValueError', + 'ConfigError', + 'MissingError', + 'ExtraError', + 'NoneIsNotAllowedError', + 'NoneIsAllowedError', + 'WrongConstantError', + 'NotNoneError', + 'BoolError', + 'BytesError', + 'DictError', + 'EmailError', + 'UrlError', + 'UrlSchemeError', + 'UrlSchemePermittedError', + 'UrlUserInfoError', + 'UrlHostError', + 'UrlHostTldError', + 'UrlPortError', + 'UrlExtraError', + 'EnumError', + 'IntEnumError', + 'EnumMemberError', + 'IntegerError', + 'FloatError', + 'PathError', + 'PathNotExistsError', + 'PathNotAFileError', + 'PathNotADirectoryError', + 'PyObjectError', + 'SequenceError', + 'ListError', + 'SetError', + 'FrozenSetError', + 'TupleError', + 'TupleLengthError', + 'ListMinLengthError', + 'ListMaxLengthError', + 'ListUniqueItemsError', + 'SetMinLengthError', + 'SetMaxLengthError', + 'FrozenSetMinLengthError', + 'FrozenSetMaxLengthError', + 'AnyStrMinLengthError', + 'AnyStrMaxLengthError', + 'StrError', + 'StrRegexError', + 'NumberNotGtError', + 'NumberNotGeError', + 'NumberNotLtError', + 'NumberNotLeError', + 'NumberNotMultipleError', + 'DecimalError', + 'DecimalIsNotFiniteError', + 'DecimalMaxDigitsError', + 'DecimalMaxPlacesError', + 'DecimalWholeDigitsError', + 'DateTimeError', + 'DateError', + 'DateNotInThePastError', + 'DateNotInTheFutureError', + 'TimeError', + 'DurationError', + 'HashableError', + 'UUIDError', + 'UUIDVersionError', + 'ArbitraryTypeError', + 'ClassError', + 'SubclassError', + 'JsonError', + 'JsonTypeError', + 'PatternError', + 'DataclassTypeError', + 'CallableError', + 'IPvAnyAddressError', + 'IPvAnyInterfaceError', + 'IPvAnyNetworkError', + 'IPv4AddressError', + 'IPv6AddressError', + 'IPv4NetworkError', + 'IPv6NetworkError', + 'IPv4InterfaceError', + 'IPv6InterfaceError', + 'ColorError', + 'StrictBoolError', + 'NotDigitError', + 'LuhnValidationError', + 'InvalidLengthForBrand', + 'InvalidByteSize', + 'InvalidByteSizeUnit', + 'MissingDiscriminator', + 'InvalidDiscriminator', +) + + +def cls_kwargs(cls: Type['PydanticErrorMixin'], ctx: 'DictStrAny') -> 'PydanticErrorMixin': + """ + For built-in exceptions like ValueError or TypeError, we need to implement + __reduce__ to override the default behaviour (instead of __getstate__/__setstate__) + By default pickle protocol 2 calls `cls.__new__(cls, *args)`. + Since we only use kwargs, we need a little constructor to change that. + Note: the callable can't be a lambda as pickle looks in the namespace to find it + """ + return cls(**ctx) + + +class PydanticErrorMixin: + code: str + msg_template: str + + def __init__(self, **ctx: Any) -> None: + self.__dict__ = ctx + + def __str__(self) -> str: + return self.msg_template.format(**self.__dict__) + + def __reduce__(self) -> Tuple[Callable[..., 'PydanticErrorMixin'], Tuple[Type['PydanticErrorMixin'], 'DictStrAny']]: + return cls_kwargs, (self.__class__, self.__dict__) + + +class PydanticTypeError(PydanticErrorMixin, TypeError): + pass + + +class PydanticValueError(PydanticErrorMixin, ValueError): + pass + + +class ConfigError(RuntimeError): + pass + + +class MissingError(PydanticValueError): + msg_template = 'field required' + + +class ExtraError(PydanticValueError): + msg_template = 'extra fields not permitted' + + +class NoneIsNotAllowedError(PydanticTypeError): + code = 'none.not_allowed' + msg_template = 'none is not an allowed value' + + +class NoneIsAllowedError(PydanticTypeError): + code = 'none.allowed' + msg_template = 'value is not none' + + +class WrongConstantError(PydanticValueError): + code = 'const' + + def __str__(self) -> str: + permitted = ', '.join(repr(v) for v in self.permitted) # type: ignore + return f'unexpected value; permitted: {permitted}' + + +class NotNoneError(PydanticTypeError): + code = 'not_none' + msg_template = 'value is not None' + + +class BoolError(PydanticTypeError): + msg_template = 'value could not be parsed to a boolean' + + +class BytesError(PydanticTypeError): + msg_template = 'byte type expected' + + +class DictError(PydanticTypeError): + msg_template = 'value is not a valid dict' + + +class EmailError(PydanticValueError): + msg_template = 'value is not a valid email address' + + +class UrlError(PydanticValueError): + code = 'url' + + +class UrlSchemeError(UrlError): + code = 'url.scheme' + msg_template = 'invalid or missing URL scheme' + + +class UrlSchemePermittedError(UrlError): + code = 'url.scheme' + msg_template = 'URL scheme not permitted' + + def __init__(self, allowed_schemes: Set[str]): + super().__init__(allowed_schemes=allowed_schemes) + + +class UrlUserInfoError(UrlError): + code = 'url.userinfo' + msg_template = 'userinfo required in URL but missing' + + +class UrlHostError(UrlError): + code = 'url.host' + msg_template = 'URL host invalid' + + +class UrlHostTldError(UrlError): + code = 'url.host' + msg_template = 'URL host invalid, top level domain required' + + +class UrlPortError(UrlError): + code = 'url.port' + msg_template = 'URL port invalid, port cannot exceed 65535' + + +class UrlExtraError(UrlError): + code = 'url.extra' + msg_template = 'URL invalid, extra characters found after valid URL: {extra!r}' + + +class EnumMemberError(PydanticTypeError): + code = 'enum' + + def __str__(self) -> str: + permitted = ', '.join(repr(v.value) for v in self.enum_values) # type: ignore + return f'value is not a valid enumeration member; permitted: {permitted}' + + +class IntegerError(PydanticTypeError): + msg_template = 'value is not a valid integer' + + +class FloatError(PydanticTypeError): + msg_template = 'value is not a valid float' + + +class PathError(PydanticTypeError): + msg_template = 'value is not a valid path' + + +class _PathValueError(PydanticValueError): + def __init__(self, *, path: Path) -> None: + super().__init__(path=str(path)) + + +class PathNotExistsError(_PathValueError): + code = 'path.not_exists' + msg_template = 'file or directory at path "{path}" does not exist' + + +class PathNotAFileError(_PathValueError): + code = 'path.not_a_file' + msg_template = 'path "{path}" does not point to a file' + + +class PathNotADirectoryError(_PathValueError): + code = 'path.not_a_directory' + msg_template = 'path "{path}" does not point to a directory' + + +class PyObjectError(PydanticTypeError): + msg_template = 'ensure this value contains valid import path or valid callable: {error_message}' + + +class SequenceError(PydanticTypeError): + msg_template = 'value is not a valid sequence' + + +class IterableError(PydanticTypeError): + msg_template = 'value is not a valid iterable' + + +class ListError(PydanticTypeError): + msg_template = 'value is not a valid list' + + +class SetError(PydanticTypeError): + msg_template = 'value is not a valid set' + + +class FrozenSetError(PydanticTypeError): + msg_template = 'value is not a valid frozenset' + + +class DequeError(PydanticTypeError): + msg_template = 'value is not a valid deque' + + +class TupleError(PydanticTypeError): + msg_template = 'value is not a valid tuple' + + +class TupleLengthError(PydanticValueError): + code = 'tuple.length' + msg_template = 'wrong tuple length {actual_length}, expected {expected_length}' + + def __init__(self, *, actual_length: int, expected_length: int) -> None: + super().__init__(actual_length=actual_length, expected_length=expected_length) + + +class ListMinLengthError(PydanticValueError): + code = 'list.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListMaxLengthError(PydanticValueError): + code = 'list.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class ListUniqueItemsError(PydanticValueError): + code = 'list.unique_items' + msg_template = 'the list has duplicated items' + + +class SetMinLengthError(PydanticValueError): + code = 'set.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class SetMaxLengthError(PydanticValueError): + code = 'set.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMinLengthError(PydanticValueError): + code = 'frozenset.min_items' + msg_template = 'ensure this value has at least {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class FrozenSetMaxLengthError(PydanticValueError): + code = 'frozenset.max_items' + msg_template = 'ensure this value has at most {limit_value} items' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMinLengthError(PydanticValueError): + code = 'any_str.min_length' + msg_template = 'ensure this value has at least {limit_value} characters' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class AnyStrMaxLengthError(PydanticValueError): + code = 'any_str.max_length' + msg_template = 'ensure this value has at most {limit_value} characters' + + def __init__(self, *, limit_value: int) -> None: + super().__init__(limit_value=limit_value) + + +class StrError(PydanticTypeError): + msg_template = 'str type expected' + + +class StrRegexError(PydanticValueError): + code = 'str.regex' + msg_template = 'string does not match regex "{pattern}"' + + def __init__(self, *, pattern: str) -> None: + super().__init__(pattern=pattern) + + +class _NumberBoundError(PydanticValueError): + def __init__(self, *, limit_value: Union[int, float, Decimal]) -> None: + super().__init__(limit_value=limit_value) + + +class NumberNotGtError(_NumberBoundError): + code = 'number.not_gt' + msg_template = 'ensure this value is greater than {limit_value}' + + +class NumberNotGeError(_NumberBoundError): + code = 'number.not_ge' + msg_template = 'ensure this value is greater than or equal to {limit_value}' + + +class NumberNotLtError(_NumberBoundError): + code = 'number.not_lt' + msg_template = 'ensure this value is less than {limit_value}' + + +class NumberNotLeError(_NumberBoundError): + code = 'number.not_le' + msg_template = 'ensure this value is less than or equal to {limit_value}' + + +class NumberNotFiniteError(PydanticValueError): + code = 'number.not_finite_number' + msg_template = 'ensure this value is a finite number' + + +class NumberNotMultipleError(PydanticValueError): + code = 'number.not_multiple' + msg_template = 'ensure this value is a multiple of {multiple_of}' + + def __init__(self, *, multiple_of: Union[int, float, Decimal]) -> None: + super().__init__(multiple_of=multiple_of) + + +class DecimalError(PydanticTypeError): + msg_template = 'value is not a valid decimal' + + +class DecimalIsNotFiniteError(PydanticValueError): + code = 'decimal.not_finite' + msg_template = 'value is not a valid decimal' + + +class DecimalMaxDigitsError(PydanticValueError): + code = 'decimal.max_digits' + msg_template = 'ensure that there are no more than {max_digits} digits in total' + + def __init__(self, *, max_digits: int) -> None: + super().__init__(max_digits=max_digits) + + +class DecimalMaxPlacesError(PydanticValueError): + code = 'decimal.max_places' + msg_template = 'ensure that there are no more than {decimal_places} decimal places' + + def __init__(self, *, decimal_places: int) -> None: + super().__init__(decimal_places=decimal_places) + + +class DecimalWholeDigitsError(PydanticValueError): + code = 'decimal.whole_digits' + msg_template = 'ensure that there are no more than {whole_digits} digits before the decimal point' + + def __init__(self, *, whole_digits: int) -> None: + super().__init__(whole_digits=whole_digits) + + +class DateTimeError(PydanticValueError): + msg_template = 'invalid datetime format' + + +class DateError(PydanticValueError): + msg_template = 'invalid date format' + + +class DateNotInThePastError(PydanticValueError): + code = 'date.not_in_the_past' + msg_template = 'date is not in the past' + + +class DateNotInTheFutureError(PydanticValueError): + code = 'date.not_in_the_future' + msg_template = 'date is not in the future' + + +class TimeError(PydanticValueError): + msg_template = 'invalid time format' + + +class DurationError(PydanticValueError): + msg_template = 'invalid duration format' + + +class HashableError(PydanticTypeError): + msg_template = 'value is not a valid hashable' + + +class UUIDError(PydanticTypeError): + msg_template = 'value is not a valid uuid' + + +class UUIDVersionError(PydanticValueError): + code = 'uuid.version' + msg_template = 'uuid version {required_version} expected' + + def __init__(self, *, required_version: int) -> None: + super().__init__(required_version=required_version) + + +class ArbitraryTypeError(PydanticTypeError): + code = 'arbitrary_type' + msg_template = 'instance of {expected_arbitrary_type} expected' + + def __init__(self, *, expected_arbitrary_type: Type[Any]) -> None: + super().__init__(expected_arbitrary_type=display_as_type(expected_arbitrary_type)) + + +class ClassError(PydanticTypeError): + code = 'class' + msg_template = 'a class is expected' + + +class SubclassError(PydanticTypeError): + code = 'subclass' + msg_template = 'subclass of {expected_class} expected' + + def __init__(self, *, expected_class: Type[Any]) -> None: + super().__init__(expected_class=display_as_type(expected_class)) + + +class JsonError(PydanticValueError): + msg_template = 'Invalid JSON' + + +class JsonTypeError(PydanticTypeError): + code = 'json' + msg_template = 'JSON object must be str, bytes or bytearray' + + +class PatternError(PydanticValueError): + code = 'regex_pattern' + msg_template = 'Invalid regular expression' + + +class DataclassTypeError(PydanticTypeError): + code = 'dataclass' + msg_template = 'instance of {class_name}, tuple or dict expected' + + +class CallableError(PydanticTypeError): + msg_template = '{value} is not callable' + + +class EnumError(PydanticTypeError): + code = 'enum_instance' + msg_template = '{value} is not a valid Enum instance' + + +class IntEnumError(PydanticTypeError): + code = 'int_enum_instance' + msg_template = '{value} is not a valid IntEnum instance' + + +class IPvAnyAddressError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 address' + + +class IPvAnyInterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 interface' + + +class IPvAnyNetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv4 or IPv6 network' + + +class IPv4AddressError(PydanticValueError): + msg_template = 'value is not a valid IPv4 address' + + +class IPv6AddressError(PydanticValueError): + msg_template = 'value is not a valid IPv6 address' + + +class IPv4NetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv4 network' + + +class IPv6NetworkError(PydanticValueError): + msg_template = 'value is not a valid IPv6 network' + + +class IPv4InterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv4 interface' + + +class IPv6InterfaceError(PydanticValueError): + msg_template = 'value is not a valid IPv6 interface' + + +class ColorError(PydanticValueError): + msg_template = 'value is not a valid color: {reason}' + + +class StrictBoolError(PydanticValueError): + msg_template = 'value is not a valid boolean' + + +class NotDigitError(PydanticValueError): + code = 'payment_card_number.digits' + msg_template = 'card number is not all digits' + + +class LuhnValidationError(PydanticValueError): + code = 'payment_card_number.luhn_check' + msg_template = 'card number is not luhn valid' + + +class InvalidLengthForBrand(PydanticValueError): + code = 'payment_card_number.invalid_length_for_brand' + msg_template = 'Length for a {brand} card must be {required_length}' + + +class InvalidByteSize(PydanticValueError): + msg_template = 'could not parse value and unit from byte string' + + +class InvalidByteSizeUnit(PydanticValueError): + msg_template = 'could not interpret byte unit: {unit}' + + +class MissingDiscriminator(PydanticValueError): + code = 'discriminated_union.missing_discriminator' + msg_template = 'Discriminator {discriminator_key!r} is missing in value' + + +class InvalidDiscriminator(PydanticValueError): + code = 'discriminated_union.invalid_discriminator' + msg_template = ( + 'No match for discriminator {discriminator_key!r} and value {discriminator_value!r} ' + '(allowed values: {allowed_values})' + ) + + def __init__(self, *, discriminator_key: str, discriminator_value: Any, allowed_values: Sequence[Any]) -> None: + super().__init__( + discriminator_key=discriminator_key, + discriminator_value=discriminator_value, + allowed_values=', '.join(map(repr, allowed_values)), + ) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/fields.py b/venv/lib/python3.12/site-packages/pydantic/v1/fields.py new file mode 100644 index 0000000000000000000000000000000000000000..002b60cde1bf28bbbf20af824638503d598fbc60 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/fields.py @@ -0,0 +1,1253 @@ +import copy +import re +from collections import Counter as CollectionCounter, defaultdict, deque +from collections.abc import Callable, Hashable as CollectionsHashable, Iterable as CollectionsIterable +from typing import ( + TYPE_CHECKING, + Any, + Counter, + DefaultDict, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Iterable, + Iterator, + List, + Mapping, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import Annotated, Final + +from pydantic.v1 import errors as errors_ +from pydantic.v1.class_validators import Validator, make_generic_validator, prep_validators +from pydantic.v1.error_wrappers import ErrorWrapper +from pydantic.v1.errors import ConfigError, InvalidDiscriminator, MissingDiscriminator, NoneIsNotAllowedError +from pydantic.v1.types import Json, JsonWrapper +from pydantic.v1.typing import ( + NoArgAnyCallable, + convert_generics, + display_as_type, + get_args, + get_origin, + is_finalvar, + is_literal_type, + is_new_type, + is_none_type, + is_typeddict, + is_typeddict_special, + is_union, + new_type_supertype, +) +from pydantic.v1.utils import ( + PyObjectStr, + Representation, + ValueItems, + get_discriminator_alias_and_values, + get_unique_discriminator_alias, + lenient_isinstance, + lenient_issubclass, + sequence_like, + smart_deepcopy, +) +from pydantic.v1.validators import constant_validator, dict_validator, find_validators, validate_json + +Required: Any = Ellipsis + +T = TypeVar('T') + + +class UndefinedType: + def __repr__(self) -> str: + return 'PydanticUndefined' + + def __copy__(self: T) -> T: + return self + + def __reduce__(self) -> str: + return 'Undefined' + + def __deepcopy__(self: T, _: Any) -> T: + return self + + +Undefined = UndefinedType() + +if TYPE_CHECKING: + from pydantic.v1.class_validators import ValidatorsList + from pydantic.v1.config import BaseConfig + from pydantic.v1.error_wrappers import ErrorList + from pydantic.v1.types import ModelOrDc + from pydantic.v1.typing import AbstractSetIntStr, MappingIntStrAny, ReprArgs + + ValidateReturn = Tuple[Optional[Any], Optional[ErrorList]] + LocStr = Union[Tuple[Union[int, str], ...], str] + BoolUndefined = Union[bool, UndefinedType] + + +class FieldInfo(Representation): + """ + Captures extra information about a field. + """ + + __slots__ = ( + 'default', + 'default_factory', + 'alias', + 'alias_priority', + 'title', + 'description', + 'exclude', + 'include', + 'const', + 'gt', + 'ge', + 'lt', + 'le', + 'multiple_of', + 'allow_inf_nan', + 'max_digits', + 'decimal_places', + 'min_items', + 'max_items', + 'unique_items', + 'min_length', + 'max_length', + 'allow_mutation', + 'repr', + 'regex', + 'discriminator', + 'extra', + ) + + # field constraints with the default value, it's also used in update_from_config below + __field_constraints__ = { + 'min_length': None, + 'max_length': None, + 'regex': None, + 'gt': None, + 'lt': None, + 'ge': None, + 'le': None, + 'multiple_of': None, + 'allow_inf_nan': None, + 'max_digits': None, + 'decimal_places': None, + 'min_items': None, + 'max_items': None, + 'unique_items': None, + 'allow_mutation': True, + } + + def __init__(self, default: Any = Undefined, **kwargs: Any) -> None: + self.default = default + self.default_factory = kwargs.pop('default_factory', None) + self.alias = kwargs.pop('alias', None) + self.alias_priority = kwargs.pop('alias_priority', 2 if self.alias is not None else None) + self.title = kwargs.pop('title', None) + self.description = kwargs.pop('description', None) + self.exclude = kwargs.pop('exclude', None) + self.include = kwargs.pop('include', None) + self.const = kwargs.pop('const', None) + self.gt = kwargs.pop('gt', None) + self.ge = kwargs.pop('ge', None) + self.lt = kwargs.pop('lt', None) + self.le = kwargs.pop('le', None) + self.multiple_of = kwargs.pop('multiple_of', None) + self.allow_inf_nan = kwargs.pop('allow_inf_nan', None) + self.max_digits = kwargs.pop('max_digits', None) + self.decimal_places = kwargs.pop('decimal_places', None) + self.min_items = kwargs.pop('min_items', None) + self.max_items = kwargs.pop('max_items', None) + self.unique_items = kwargs.pop('unique_items', None) + self.min_length = kwargs.pop('min_length', None) + self.max_length = kwargs.pop('max_length', None) + self.allow_mutation = kwargs.pop('allow_mutation', True) + self.regex = kwargs.pop('regex', None) + self.discriminator = kwargs.pop('discriminator', None) + self.repr = kwargs.pop('repr', True) + self.extra = kwargs + + def __repr_args__(self) -> 'ReprArgs': + field_defaults_to_hide: Dict[str, Any] = { + 'repr': True, + **self.__field_constraints__, + } + + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v != field_defaults_to_hide.get(a, None)] + + def get_constraints(self) -> Set[str]: + """ + Gets the constraints set on the field by comparing the constraint value with its default value + + :return: the constraints set on field_info + """ + return {attr for attr, default in self.__field_constraints__.items() if getattr(self, attr) != default} + + def update_from_config(self, from_config: Dict[str, Any]) -> None: + """ + Update this FieldInfo based on a dict from get_field_info, only fields which have not been set are dated. + """ + for attr_name, value in from_config.items(): + try: + current_value = getattr(self, attr_name) + except AttributeError: + # attr_name is not an attribute of FieldInfo, it should therefore be added to extra + # (except if extra already has this value!) + self.extra.setdefault(attr_name, value) + else: + if current_value is self.__field_constraints__.get(attr_name, None): + setattr(self, attr_name, value) + elif attr_name == 'exclude': + self.exclude = ValueItems.merge(value, current_value) + elif attr_name == 'include': + self.include = ValueItems.merge(value, current_value, intersect=True) + + def _validate(self) -> None: + if self.default is not Undefined and self.default_factory is not None: + raise ValueError('cannot specify both default and default_factory') + + +def Field( + default: Any = Undefined, + *, + default_factory: Optional[NoArgAnyCallable] = None, + alias: Optional[str] = None, + title: Optional[str] = None, + description: Optional[str] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny', Any]] = None, + const: Optional[bool] = None, + gt: Optional[float] = None, + ge: Optional[float] = None, + lt: Optional[float] = None, + le: Optional[float] = None, + multiple_of: Optional[float] = None, + allow_inf_nan: Optional[bool] = None, + max_digits: Optional[int] = None, + decimal_places: Optional[int] = None, + min_items: Optional[int] = None, + max_items: Optional[int] = None, + unique_items: Optional[bool] = None, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + allow_mutation: bool = True, + regex: Optional[str] = None, + discriminator: Optional[str] = None, + repr: bool = True, + **extra: Any, +) -> Any: + """ + Used to provide extra information about a field, either for the model schema or complex validation. Some arguments + apply only to number fields (``int``, ``float``, ``Decimal``) and some apply only to ``str``. + + :param default: since this is replacing the field’s default, its first argument is used + to set the default, use ellipsis (``...``) to indicate the field is required + :param default_factory: callable that will be called when a default value is needed for this field + If both `default` and `default_factory` are set, an error is raised. + :param alias: the public name of the field + :param title: can be any string, used in the schema + :param description: can be any string, used in the schema + :param exclude: exclude this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param include: include this field while dumping. + Takes same values as the ``include`` and ``exclude`` arguments on the ``.dict`` method. + :param const: this field is required and *must* take it's default value + :param gt: only applies to numbers, requires the field to be "greater than". The schema + will have an ``exclusiveMinimum`` validation keyword + :param ge: only applies to numbers, requires the field to be "greater than or equal to". The + schema will have a ``minimum`` validation keyword + :param lt: only applies to numbers, requires the field to be "less than". The schema + will have an ``exclusiveMaximum`` validation keyword + :param le: only applies to numbers, requires the field to be "less than or equal to". The + schema will have a ``maximum`` validation keyword + :param multiple_of: only applies to numbers, requires the field to be "a multiple of". The + schema will have a ``multipleOf`` validation keyword + :param allow_inf_nan: only applies to numbers, allows the field to be NaN or infinity (+inf or -inf), + which is a valid Python float. Default True, set to False for compatibility with JSON. + :param max_digits: only applies to Decimals, requires the field to have a maximum number + of digits within the decimal. It does not include a zero before the decimal point or trailing decimal zeroes. + :param decimal_places: only applies to Decimals, requires the field to have at most a number of decimal places + allowed. It does not include trailing decimal zeroes. + :param min_items: only applies to lists, requires the field to have a minimum number of + elements. The schema will have a ``minItems`` validation keyword + :param max_items: only applies to lists, requires the field to have a maximum number of + elements. The schema will have a ``maxItems`` validation keyword + :param unique_items: only applies to lists, requires the field not to have duplicated + elements. The schema will have a ``uniqueItems`` validation keyword + :param min_length: only applies to strings, requires the field to have a minimum length. The + schema will have a ``minLength`` validation keyword + :param max_length: only applies to strings, requires the field to have a maximum length. The + schema will have a ``maxLength`` validation keyword + :param allow_mutation: a boolean which defaults to True. When False, the field raises a TypeError if the field is + assigned on an instance. The BaseModel Config must set validate_assignment to True + :param regex: only applies to strings, requires the field match against a regular expression + pattern string. The schema will have a ``pattern`` validation keyword + :param discriminator: only useful with a (discriminated a.k.a. tagged) `Union` of sub models with a common field. + The `discriminator` is the name of this common field to shorten validation and improve generated schema + :param repr: show this field in the representation + :param **extra: any additional keyword arguments will be added as is to the schema + """ + field_info = FieldInfo( + default, + default_factory=default_factory, + alias=alias, + title=title, + description=description, + exclude=exclude, + include=include, + const=const, + gt=gt, + ge=ge, + lt=lt, + le=le, + multiple_of=multiple_of, + allow_inf_nan=allow_inf_nan, + max_digits=max_digits, + decimal_places=decimal_places, + min_items=min_items, + max_items=max_items, + unique_items=unique_items, + min_length=min_length, + max_length=max_length, + allow_mutation=allow_mutation, + regex=regex, + discriminator=discriminator, + repr=repr, + **extra, + ) + field_info._validate() + return field_info + + +# used to be an enum but changed to int's for small performance improvement as less access overhead +SHAPE_SINGLETON = 1 +SHAPE_LIST = 2 +SHAPE_SET = 3 +SHAPE_MAPPING = 4 +SHAPE_TUPLE = 5 +SHAPE_TUPLE_ELLIPSIS = 6 +SHAPE_SEQUENCE = 7 +SHAPE_FROZENSET = 8 +SHAPE_ITERABLE = 9 +SHAPE_GENERIC = 10 +SHAPE_DEQUE = 11 +SHAPE_DICT = 12 +SHAPE_DEFAULTDICT = 13 +SHAPE_COUNTER = 14 +SHAPE_NAME_LOOKUP = { + SHAPE_LIST: 'List[{}]', + SHAPE_SET: 'Set[{}]', + SHAPE_TUPLE_ELLIPSIS: 'Tuple[{}, ...]', + SHAPE_SEQUENCE: 'Sequence[{}]', + SHAPE_FROZENSET: 'FrozenSet[{}]', + SHAPE_ITERABLE: 'Iterable[{}]', + SHAPE_DEQUE: 'Deque[{}]', + SHAPE_DICT: 'Dict[{}]', + SHAPE_DEFAULTDICT: 'DefaultDict[{}]', + SHAPE_COUNTER: 'Counter[{}]', +} + +MAPPING_LIKE_SHAPES: Set[int] = {SHAPE_DEFAULTDICT, SHAPE_DICT, SHAPE_MAPPING, SHAPE_COUNTER} + + +class ModelField(Representation): + __slots__ = ( + 'type_', + 'outer_type_', + 'annotation', + 'sub_fields', + 'sub_fields_mapping', + 'key_field', + 'validators', + 'pre_validators', + 'post_validators', + 'default', + 'default_factory', + 'required', + 'final', + 'model_config', + 'name', + 'alias', + 'has_alias', + 'field_info', + 'discriminator_key', + 'discriminator_alias', + 'validate_always', + 'allow_none', + 'shape', + 'class_validators', + 'parse_json', + ) + + def __init__( + self, + *, + name: str, + type_: Type[Any], + class_validators: Optional[Dict[str, Validator]], + model_config: Type['BaseConfig'], + default: Any = None, + default_factory: Optional[NoArgAnyCallable] = None, + required: 'BoolUndefined' = Undefined, + final: bool = False, + alias: Optional[str] = None, + field_info: Optional[FieldInfo] = None, + ) -> None: + self.name: str = name + self.has_alias: bool = alias is not None + self.alias: str = alias if alias is not None else name + self.annotation = type_ + self.type_: Any = convert_generics(type_) + self.outer_type_: Any = type_ + self.class_validators = class_validators or {} + self.default: Any = default + self.default_factory: Optional[NoArgAnyCallable] = default_factory + self.required: 'BoolUndefined' = required + self.final: bool = final + self.model_config = model_config + self.field_info: FieldInfo = field_info or FieldInfo(default) + self.discriminator_key: Optional[str] = self.field_info.discriminator + self.discriminator_alias: Optional[str] = self.discriminator_key + + self.allow_none: bool = False + self.validate_always: bool = False + self.sub_fields: Optional[List[ModelField]] = None + self.sub_fields_mapping: Optional[Dict[str, 'ModelField']] = None # used for discriminated union + self.key_field: Optional[ModelField] = None + self.validators: 'ValidatorsList' = [] + self.pre_validators: Optional['ValidatorsList'] = None + self.post_validators: Optional['ValidatorsList'] = None + self.parse_json: bool = False + self.shape: int = SHAPE_SINGLETON + self.model_config.prepare_field(self) + self.prepare() + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + @staticmethod + def _get_field_info( + field_name: str, annotation: Any, value: Any, config: Type['BaseConfig'] + ) -> Tuple[FieldInfo, Any]: + """ + Get a FieldInfo from a root typing.Annotated annotation, value, or config default. + + The FieldInfo may be set in typing.Annotated or the value, but not both. If neither contain + a FieldInfo, a new one will be created using the config. + + :param field_name: name of the field for use in error messages + :param annotation: a type hint such as `str` or `Annotated[str, Field(..., min_length=5)]` + :param value: the field's assigned value + :param config: the model's config object + :return: the FieldInfo contained in the `annotation`, the value, or a new one from the config. + """ + field_info_from_config = config.get_field_info(field_name) + + field_info = None + if get_origin(annotation) is Annotated: + field_infos = [arg for arg in get_args(annotation)[1:] if isinstance(arg, FieldInfo)] + if len(field_infos) > 1: + raise ValueError(f'cannot specify multiple `Annotated` `Field`s for {field_name!r}') + field_info = next(iter(field_infos), None) + if field_info is not None: + field_info = copy.copy(field_info) + field_info.update_from_config(field_info_from_config) + if field_info.default not in (Undefined, Required): + raise ValueError(f'`Field` default cannot be set in `Annotated` for {field_name!r}') + if value is not Undefined and value is not Required: + # check also `Required` because of `validate_arguments` that sets `...` as default value + field_info.default = value + + if isinstance(value, FieldInfo): + if field_info is not None: + raise ValueError(f'cannot specify `Annotated` and value `Field`s together for {field_name!r}') + field_info = value + field_info.update_from_config(field_info_from_config) + elif field_info is None: + field_info = FieldInfo(value, **field_info_from_config) + value = None if field_info.default_factory is not None else field_info.default + field_info._validate() + return field_info, value + + @classmethod + def infer( + cls, + *, + name: str, + value: Any, + annotation: Any, + class_validators: Optional[Dict[str, Validator]], + config: Type['BaseConfig'], + ) -> 'ModelField': + from pydantic.v1.schema import get_annotation_from_field_info + + field_info, value = cls._get_field_info(name, annotation, value, config) + required: 'BoolUndefined' = Undefined + if value is Required: + required = True + value = None + elif value is not Undefined: + required = False + annotation = get_annotation_from_field_info(annotation, field_info, name, config.validate_assignment) + + return cls( + name=name, + type_=annotation, + alias=field_info.alias, + class_validators=class_validators, + default=value, + default_factory=field_info.default_factory, + required=required, + model_config=config, + field_info=field_info, + ) + + def set_config(self, config: Type['BaseConfig']) -> None: + self.model_config = config + info_from_config = config.get_field_info(self.name) + config.prepare_field(self) + new_alias = info_from_config.get('alias') + new_alias_priority = info_from_config.get('alias_priority') or 0 + if new_alias and new_alias_priority >= (self.field_info.alias_priority or 0): + self.field_info.alias = new_alias + self.field_info.alias_priority = new_alias_priority + self.alias = new_alias + new_exclude = info_from_config.get('exclude') + if new_exclude is not None: + self.field_info.exclude = ValueItems.merge(self.field_info.exclude, new_exclude) + new_include = info_from_config.get('include') + if new_include is not None: + self.field_info.include = ValueItems.merge(self.field_info.include, new_include, intersect=True) + + @property + def alt_alias(self) -> bool: + return self.name != self.alias + + def prepare(self) -> None: + """ + Prepare the field but inspecting self.default, self.type_ etc. + + Note: this method is **not** idempotent (because _type_analysis is not idempotent), + e.g. calling it it multiple times may modify the field and configure it incorrectly. + """ + self._set_default_and_type() + if self.type_.__class__ is ForwardRef or self.type_.__class__ is DeferredType: + # self.type_ is currently a ForwardRef and there's nothing we can do now, + # user will need to call model.update_forward_refs() + return + + self._type_analysis() + if self.required is Undefined: + self.required = True + if self.default is Undefined and self.default_factory is None: + self.default = None + self.populate_validators() + + def _set_default_and_type(self) -> None: + """ + Set the default value, infer the type if needed and check if `None` value is valid. + """ + if self.default_factory is not None: + if self.type_ is Undefined: + raise errors_.ConfigError( + f'you need to set the type of field {self.name!r} when using `default_factory`' + ) + return + + default_value = self.get_default() + + if default_value is not None and self.type_ is Undefined: + self.type_ = default_value.__class__ + self.outer_type_ = self.type_ + self.annotation = self.type_ + + if self.type_ is Undefined: + raise errors_.ConfigError(f'unable to infer type for attribute "{self.name}"') + + if self.required is False and default_value is None: + self.allow_none = True + + def _type_analysis(self) -> None: # noqa: C901 (ignore complexity) + # typing interface is horrible, we have to do some ugly checks + if lenient_issubclass(self.type_, JsonWrapper): + self.type_ = self.type_.inner_type + self.parse_json = True + elif lenient_issubclass(self.type_, Json): + self.type_ = Any + self.parse_json = True + elif isinstance(self.type_, TypeVar): + if self.type_.__bound__: + self.type_ = self.type_.__bound__ + elif self.type_.__constraints__: + self.type_ = Union[self.type_.__constraints__] + else: + self.type_ = Any + elif is_new_type(self.type_): + self.type_ = new_type_supertype(self.type_) + + if self.type_ is Any or self.type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + return + elif self.type_ is Pattern or self.type_ is re.Pattern: + # python 3.7 only, Pattern is a typing object but without sub fields + return + elif is_literal_type(self.type_): + return + elif is_typeddict(self.type_): + return + + if is_finalvar(self.type_): + self.final = True + + if self.type_ is Final: + self.type_ = Any + else: + self.type_ = get_args(self.type_)[0] + + self._type_analysis() + return + + origin = get_origin(self.type_) + + if origin is Annotated or is_typeddict_special(origin): + self.type_ = get_args(self.type_)[0] + self._type_analysis() + return + + if self.discriminator_key is not None and not is_union(origin): + raise TypeError('`discriminator` can only be used with `Union` type with more than one variant') + + # add extra check for `collections.abc.Hashable` for python 3.10+ where origin is not `None` + if origin is None or origin is CollectionsHashable: + # field is not "typing" object eg. Union, Dict, List etc. + # allow None for virtual superclasses of NoneType, e.g. Hashable + if isinstance(self.type_, type) and isinstance(None, self.type_): + self.allow_none = True + return + elif origin is Callable: + return + elif is_union(origin): + types_ = [] + for type_ in get_args(self.type_): + if is_none_type(type_) or type_ is Any or type_ is object: + if self.required is Undefined: + self.required = False + self.allow_none = True + if is_none_type(type_): + continue + types_.append(type_) + + if len(types_) == 1: + # Optional[] + self.type_ = types_[0] + # this is the one case where the "outer type" isn't just the original type + self.outer_type_ = self.type_ + # re-run to correctly interpret the new self.type_ + self._type_analysis() + else: + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{display_as_type(t)}') for t in types_] + + if self.discriminator_key is not None: + self.prepare_discriminated_union_sub_fields() + return + elif issubclass(origin, Tuple): # type: ignore + # origin == Tuple without item type + args = get_args(self.type_) + if not args: # plain tuple + self.type_ = Any + self.shape = SHAPE_TUPLE_ELLIPSIS + elif len(args) == 2 and args[1] is Ellipsis: # e.g. Tuple[int, ...] + self.type_ = args[0] + self.shape = SHAPE_TUPLE_ELLIPSIS + self.sub_fields = [self._create_sub_type(args[0], f'{self.name}_0')] + elif args == ((),): # Tuple[()] means empty tuple + self.shape = SHAPE_TUPLE + self.type_ = Any + self.sub_fields = [] + else: + self.shape = SHAPE_TUPLE + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(args)] + return + elif issubclass(origin, List): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'list_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_LIST + elif issubclass(origin, Set): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'set_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SET + elif issubclass(origin, FrozenSet): + # Create self validators + get_validators = getattr(self.type_, '__get_validators__', None) + if get_validators: + self.class_validators.update( + {f'frozenset_{i}': Validator(validator, pre=True) for i, validator in enumerate(get_validators())} + ) + + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_FROZENSET + elif issubclass(origin, Deque): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_DEQUE + elif issubclass(origin, Sequence): + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_SEQUENCE + # priority to most common mapping: dict + elif origin is dict or origin is Dict: + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DICT + elif issubclass(origin, DefaultDict): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_DEFAULTDICT + elif issubclass(origin, Counter): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = int + self.shape = SHAPE_COUNTER + elif issubclass(origin, Mapping): + self.key_field = self._create_sub_type(get_args(self.type_)[0], 'key_' + self.name, for_keys=True) + self.type_ = get_args(self.type_)[1] + self.shape = SHAPE_MAPPING + # Equality check as almost everything inherits form Iterable, including str + # check for Iterable and CollectionsIterable, as it could receive one even when declared with the other + elif origin in {Iterable, CollectionsIterable}: + self.type_ = get_args(self.type_)[0] + self.shape = SHAPE_ITERABLE + self.sub_fields = [self._create_sub_type(self.type_, f'{self.name}_type')] + elif issubclass(origin, Type): # type: ignore + return + elif hasattr(origin, '__get_validators__') or self.model_config.arbitrary_types_allowed: + # Is a Pydantic-compatible generic that handles itself + # or we have arbitrary_types_allowed = True + self.shape = SHAPE_GENERIC + self.sub_fields = [self._create_sub_type(t, f'{self.name}_{i}') for i, t in enumerate(get_args(self.type_))] + self.type_ = origin + return + else: + raise TypeError(f'Fields of type "{origin}" are not supported.') + + # type_ has been refined eg. as the type of a List and sub_fields needs to be populated + self.sub_fields = [self._create_sub_type(self.type_, '_' + self.name)] + + def prepare_discriminated_union_sub_fields(self) -> None: + """ + Prepare the mapping -> and update `sub_fields` + Note that this process can be aborted if a `ForwardRef` is encountered + """ + assert self.discriminator_key is not None + + if self.type_.__class__ is DeferredType: + return + + assert self.sub_fields is not None + sub_fields_mapping: Dict[str, 'ModelField'] = {} + all_aliases: Set[str] = set() + + for sub_field in self.sub_fields: + t = sub_field.type_ + if t.__class__ is ForwardRef: + # Stopping everything...will need to call `update_forward_refs` + return + + alias, discriminator_values = get_discriminator_alias_and_values(t, self.discriminator_key) + all_aliases.add(alias) + for discriminator_value in discriminator_values: + sub_fields_mapping[discriminator_value] = sub_field + + self.sub_fields_mapping = sub_fields_mapping + self.discriminator_alias = get_unique_discriminator_alias(all_aliases, self.discriminator_key) + + def _create_sub_type(self, type_: Type[Any], name: str, *, for_keys: bool = False) -> 'ModelField': + if for_keys: + class_validators = None + else: + # validators for sub items should not have `each_item` as we want to check only the first sublevel + class_validators = { + k: Validator( + func=v.func, + pre=v.pre, + each_item=False, + always=v.always, + check_fields=v.check_fields, + skip_on_failure=v.skip_on_failure, + ) + for k, v in self.class_validators.items() + if v.each_item + } + + field_info, _ = self._get_field_info(name, type_, None, self.model_config) + + return self.__class__( + type_=type_, + name=name, + class_validators=class_validators, + model_config=self.model_config, + field_info=field_info, + ) + + def populate_validators(self) -> None: + """ + Prepare self.pre_validators, self.validators, and self.post_validators based on self.type_'s __get_validators__ + and class validators. This method should be idempotent, e.g. it should be safe to call multiple times + without mis-configuring the field. + """ + self.validate_always = getattr(self.type_, 'validate_always', False) or any( + v.always for v in self.class_validators.values() + ) + + class_validators_ = self.class_validators.values() + if not self.sub_fields or self.shape == SHAPE_GENERIC: + get_validators = getattr(self.type_, '__get_validators__', None) + v_funcs = ( + *[v.func for v in class_validators_ if v.each_item and v.pre], + *(get_validators() if get_validators else list(find_validators(self.type_, self.model_config))), + *[v.func for v in class_validators_ if v.each_item and not v.pre], + ) + self.validators = prep_validators(v_funcs) + + self.pre_validators = [] + self.post_validators = [] + + if self.field_info and self.field_info.const: + self.post_validators.append(make_generic_validator(constant_validator)) + + if class_validators_: + self.pre_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and v.pre) + self.post_validators += prep_validators(v.func for v in class_validators_ if not v.each_item and not v.pre) + + if self.parse_json: + self.pre_validators.append(make_generic_validator(validate_json)) + + self.pre_validators = self.pre_validators or None + self.post_validators = self.post_validators or None + + def validate( + self, v: Any, values: Dict[str, Any], *, loc: 'LocStr', cls: Optional['ModelOrDc'] = None + ) -> 'ValidateReturn': + assert self.type_.__class__ is not DeferredType + + if self.type_.__class__ is ForwardRef: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f'you might need to call {cls.__name__}.update_forward_refs().' + ) + + errors: Optional['ErrorList'] + if self.pre_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.pre_validators) + if errors: + return v, errors + + if v is None: + if is_none_type(self.type_): + # keep validating + pass + elif self.allow_none: + if self.post_validators: + return self._apply_validators(v, values, loc, cls, self.post_validators) + else: + return None, None + else: + return v, ErrorWrapper(NoneIsNotAllowedError(), loc) + + if self.shape == SHAPE_SINGLETON: + v, errors = self._validate_singleton(v, values, loc, cls) + elif self.shape in MAPPING_LIKE_SHAPES: + v, errors = self._validate_mapping_like(v, values, loc, cls) + elif self.shape == SHAPE_TUPLE: + v, errors = self._validate_tuple(v, values, loc, cls) + elif self.shape == SHAPE_ITERABLE: + v, errors = self._validate_iterable(v, values, loc, cls) + elif self.shape == SHAPE_GENERIC: + v, errors = self._apply_validators(v, values, loc, cls, self.validators) + else: + # sequence, list, set, generator, tuple with ellipsis, frozen set + v, errors = self._validate_sequence_like(v, values, loc, cls) + + if not errors and self.post_validators: + v, errors = self._apply_validators(v, values, loc, cls, self.post_validators) + return v, errors + + def _validate_sequence_like( # noqa: C901 (ignore complexity) + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + """ + Validate sequence-like containers: lists, tuples, sets and generators + Note that large if-else blocks are necessary to enable Cython + optimization, which is why we disable the complexity check above. + """ + if not sequence_like(v): + e: errors_.PydanticTypeError + if self.shape == SHAPE_LIST: + e = errors_.ListError() + elif self.shape in (SHAPE_TUPLE, SHAPE_TUPLE_ELLIPSIS): + e = errors_.TupleError() + elif self.shape == SHAPE_SET: + e = errors_.SetError() + elif self.shape == SHAPE_FROZENSET: + e = errors_.FrozenSetError() + else: + e = errors_.SequenceError() + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, v_ in enumerate(v): + v_loc = *loc, i + r, ee = self._validate_singleton(v_, values, v_loc, cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + + converted: Union[List[Any], Set[Any], FrozenSet[Any], Tuple[Any, ...], Iterator[Any], Deque[Any]] = result + + if self.shape == SHAPE_SET: + converted = set(result) + elif self.shape == SHAPE_FROZENSET: + converted = frozenset(result) + elif self.shape == SHAPE_TUPLE_ELLIPSIS: + converted = tuple(result) + elif self.shape == SHAPE_DEQUE: + converted = deque(result, maxlen=getattr(v, 'maxlen', None)) + elif self.shape == SHAPE_SEQUENCE: + if isinstance(v, tuple): + converted = tuple(result) + elif isinstance(v, set): + converted = set(result) + elif isinstance(v, Generator): + converted = iter(result) + elif isinstance(v, deque): + converted = deque(result, maxlen=getattr(v, 'maxlen', None)) + return converted, None + + def _validate_iterable( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + """ + Validate Iterables. + + This intentionally doesn't validate values to allow infinite generators. + """ + + try: + iterable = iter(v) + except TypeError: + return v, ErrorWrapper(errors_.IterableError(), loc) + return iterable, None + + def _validate_tuple( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + e: Optional[Exception] = None + if not sequence_like(v): + e = errors_.TupleError() + else: + actual_length, expected_length = len(v), len(self.sub_fields) # type: ignore + if actual_length != expected_length: + e = errors_.TupleLengthError(actual_length=actual_length, expected_length=expected_length) + + if e: + return v, ErrorWrapper(e, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result = [] + errors: List[ErrorList] = [] + for i, (v_, field) in enumerate(zip(v, self.sub_fields)): # type: ignore + v_loc = *loc, i + r, ee = field.validate(v_, values, loc=v_loc, cls=cls) + if ee: + errors.append(ee) + else: + result.append(r) + + if errors: + return v, errors + else: + return tuple(result), None + + def _validate_mapping_like( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + try: + v_iter = dict_validator(v) + except TypeError as exc: + return v, ErrorWrapper(exc, loc) + + loc = loc if isinstance(loc, tuple) else (loc,) + result, errors = {}, [] + for k, v_ in v_iter.items(): + v_loc = *loc, '__key__' + key_result, key_errors = self.key_field.validate(k, values, loc=v_loc, cls=cls) # type: ignore + if key_errors: + errors.append(key_errors) + continue + + v_loc = *loc, k + value_result, value_errors = self._validate_singleton(v_, values, v_loc, cls) + if value_errors: + errors.append(value_errors) + continue + + result[key_result] = value_result + if errors: + return v, errors + elif self.shape == SHAPE_DICT: + return result, None + elif self.shape == SHAPE_DEFAULTDICT: + return defaultdict(self.type_, result), None + elif self.shape == SHAPE_COUNTER: + return CollectionCounter(result), None + else: + return self._get_mapping_value(v, result), None + + def _get_mapping_value(self, original: T, converted: Dict[Any, Any]) -> Union[T, Dict[Any, Any]]: + """ + When type is `Mapping[KT, KV]` (or another unsupported mapping), we try to avoid + coercing to `dict` unwillingly. + """ + original_cls = original.__class__ + + if original_cls == dict or original_cls == Dict: + return converted + elif original_cls in {defaultdict, DefaultDict}: + return defaultdict(self.type_, converted) + else: + try: + # Counter, OrderedDict, UserDict, ... + return original_cls(converted) # type: ignore + except TypeError: + raise RuntimeError(f'Could not convert dictionary to {original_cls.__name__!r}') from None + + def _validate_singleton( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + if self.sub_fields: + if self.discriminator_key is not None: + return self._validate_discriminated_union(v, values, loc, cls) + + errors = [] + + if self.model_config.smart_union and is_union(get_origin(self.type_)): + # 1st pass: check if the value is an exact instance of one of the Union types + # (e.g. to avoid coercing a bool into an int) + for field in self.sub_fields: + if v.__class__ is field.outer_type_: + return v, None + + # 2nd pass: check if the value is an instance of any subclass of the Union types + for field in self.sub_fields: + # This whole logic will be improved later on to support more complex `isinstance` checks + # It will probably be done once a strict mode is added and be something like: + # ``` + # value, error = field.validate(v, values, strict=True) + # if error is None: + # return value, None + # ``` + try: + if isinstance(v, field.outer_type_): + return v, None + except TypeError: + # compound type + if lenient_isinstance(v, get_origin(field.outer_type_)): + value, error = field.validate(v, values, loc=loc, cls=cls) + if not error: + return value, None + + # 1st pass by default or 3rd pass with `smart_union` enabled: + # check if the value can be coerced into one of the Union types + for field in self.sub_fields: + value, error = field.validate(v, values, loc=loc, cls=cls) + if error: + errors.append(error) + else: + return value, None + return v, errors + else: + return self._apply_validators(v, values, loc, cls, self.validators) + + def _validate_discriminated_union( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'] + ) -> 'ValidateReturn': + assert self.discriminator_key is not None + assert self.discriminator_alias is not None + + try: + try: + discriminator_value = v[self.discriminator_alias] + except KeyError: + if self.model_config.allow_population_by_field_name: + discriminator_value = v[self.discriminator_key] + else: + raise + except KeyError: + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + except TypeError: + try: + # BaseModel or dataclass + discriminator_value = getattr(v, self.discriminator_key) + except (AttributeError, TypeError): + return v, ErrorWrapper(MissingDiscriminator(discriminator_key=self.discriminator_key), loc) + + if self.sub_fields_mapping is None: + assert cls is not None + raise ConfigError( + f'field "{self.name}" not yet prepared so type is still a ForwardRef, ' + f'you might need to call {cls.__name__}.update_forward_refs().' + ) + + try: + sub_field = self.sub_fields_mapping[discriminator_value] + except (KeyError, TypeError): + # KeyError: `discriminator_value` is not in the dictionary. + # TypeError: `discriminator_value` is unhashable. + assert self.sub_fields_mapping is not None + return v, ErrorWrapper( + InvalidDiscriminator( + discriminator_key=self.discriminator_key, + discriminator_value=discriminator_value, + allowed_values=list(self.sub_fields_mapping), + ), + loc, + ) + else: + if not isinstance(loc, tuple): + loc = (loc,) + return sub_field.validate(v, values, loc=(*loc, display_as_type(sub_field.type_)), cls=cls) + + def _apply_validators( + self, v: Any, values: Dict[str, Any], loc: 'LocStr', cls: Optional['ModelOrDc'], validators: 'ValidatorsList' + ) -> 'ValidateReturn': + for validator in validators: + try: + v = validator(cls, v, values, self, self.model_config) + except (ValueError, TypeError, AssertionError) as exc: + return v, ErrorWrapper(exc, loc) + return v, None + + def is_complex(self) -> bool: + """ + Whether the field is "complex" eg. env variables should be parsed as JSON. + """ + from pydantic.v1.main import BaseModel + + return ( + self.shape != SHAPE_SINGLETON + or hasattr(self.type_, '__pydantic_model__') + or lenient_issubclass(self.type_, (BaseModel, list, set, frozenset, dict)) + ) + + def _type_display(self) -> PyObjectStr: + t = display_as_type(self.type_) + + if self.shape in MAPPING_LIKE_SHAPES: + t = f'Mapping[{display_as_type(self.key_field.type_)}, {t}]' # type: ignore + elif self.shape == SHAPE_TUPLE: + t = 'Tuple[{}]'.format(', '.join(display_as_type(f.type_) for f in self.sub_fields)) # type: ignore + elif self.shape == SHAPE_GENERIC: + assert self.sub_fields + t = '{}[{}]'.format( + display_as_type(self.type_), ', '.join(display_as_type(f.type_) for f in self.sub_fields) + ) + elif self.shape != SHAPE_SINGLETON: + t = SHAPE_NAME_LOOKUP[self.shape].format(t) + + if self.allow_none and (self.shape != SHAPE_SINGLETON or not self.sub_fields): + t = f'Optional[{t}]' + return PyObjectStr(t) + + def __repr_args__(self) -> 'ReprArgs': + args = [('name', self.name), ('type', self._type_display()), ('required', self.required)] + + if not self.required: + if self.default_factory is not None: + args.append(('default_factory', f'')) + else: + args.append(('default', self.default)) + + if self.alt_alias: + args.append(('alias', self.alias)) + return args + + +class ModelPrivateAttr(Representation): + __slots__ = ('default', 'default_factory') + + def __init__(self, default: Any = Undefined, *, default_factory: Optional[NoArgAnyCallable] = None) -> None: + self.default = default + self.default_factory = default_factory + + def get_default(self) -> Any: + return smart_deepcopy(self.default) if self.default_factory is None else self.default_factory() + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and (self.default, self.default_factory) == ( + other.default, + other.default_factory, + ) + + +def PrivateAttr( + default: Any = Undefined, + *, + default_factory: Optional[NoArgAnyCallable] = None, +) -> Any: + """ + Indicates that attribute is only used internally and never mixed with regular fields. + + Types or values of private attrs are not checked by pydantic and it's up to you to keep them relevant. + + Private attrs are stored in model __slots__. + + :param default: the attribute’s default value + :param default_factory: callable that will be called when a default value is needed for this attribute + If both `default` and `default_factory` are set, an error is raised. + """ + if default is not Undefined and default_factory is not None: + raise ValueError('cannot specify both default and default_factory') + + return ModelPrivateAttr( + default, + default_factory=default_factory, + ) + + +class DeferredType: + """ + Used to postpone field preparation, while creating recursive generic models. + """ + + +def is_finalvar_with_default_val(type_: Type[Any], val: Any) -> bool: + return is_finalvar(type_) and val is not Undefined and not isinstance(val, FieldInfo) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/generics.py b/venv/lib/python3.12/site-packages/pydantic/v1/generics.py new file mode 100644 index 0000000000000000000000000000000000000000..fa1dcec42d7fc6310f0f243dd1a50f296d0146f4 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/generics.py @@ -0,0 +1,400 @@ +import functools +import operator +import sys +import types +import typing +from typing import ( + TYPE_CHECKING, + Any, + ClassVar, + Dict, + ForwardRef, + Generic, + Iterator, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from weakref import WeakKeyDictionary, WeakValueDictionary + +from typing_extensions import Annotated, Literal as ExtLiteral + +from pydantic.v1.class_validators import gather_all_validators +from pydantic.v1.fields import DeferredType +from pydantic.v1.main import BaseModel, create_model +from pydantic.v1.types import JsonWrapper +from pydantic.v1.typing import display_as_type, get_all_type_hints, get_args, get_origin, typing_base +from pydantic.v1.utils import all_identical, lenient_issubclass + +if sys.version_info >= (3, 8): + from typing import Literal + +GenericModelT = TypeVar('GenericModelT', bound='GenericModel') +TypeVarType = Any # since mypy doesn't allow the use of TypeVar as a type + +CacheKey = Tuple[Type[Any], Any, Tuple[Any, ...]] +Parametrization = Mapping[TypeVarType, Type[Any]] + +# weak dictionaries allow the dynamically created parametrized versions of generic models to get collected +# once they are no longer referenced by the caller. +if sys.version_info >= (3, 9): # Typing for weak dictionaries available at 3.9 + GenericTypesCache = WeakValueDictionary[CacheKey, Type[BaseModel]] + AssignedParameters = WeakKeyDictionary[Type[BaseModel], Parametrization] +else: + GenericTypesCache = WeakValueDictionary + AssignedParameters = WeakKeyDictionary + +# _generic_types_cache is a Mapping from __class_getitem__ arguments to the parametrized version of generic models. +# This ensures multiple calls of e.g. A[B] return always the same class. +_generic_types_cache = GenericTypesCache() + +# _assigned_parameters is a Mapping from parametrized version of generic models to assigned types of parametrizations +# as captured during construction of the class (not instances). +# E.g., for generic model `Model[A, B]`, when parametrized model `Model[int, str]` is created, +# `Model[int, str]`: {A: int, B: str}` will be stored in `_assigned_parameters`. +# (This information is only otherwise available after creation from the class name string). +_assigned_parameters = AssignedParameters() + + +class GenericModel(BaseModel): + __slots__ = () + __concrete__: ClassVar[bool] = False + + if TYPE_CHECKING: + # Putting this in a TYPE_CHECKING block allows us to replace `if Generic not in cls.__bases__` with + # `not hasattr(cls, "__parameters__")`. This means we don't need to force non-concrete subclasses of + # `GenericModel` to also inherit from `Generic`, which would require changes to the use of `create_model` below. + __parameters__: ClassVar[Tuple[TypeVarType, ...]] + + # Setting the return type as Type[Any] instead of Type[BaseModel] prevents PyCharm warnings + def __class_getitem__(cls: Type[GenericModelT], params: Union[Type[Any], Tuple[Type[Any], ...]]) -> Type[Any]: + """Instantiates a new class from a generic class `cls` and type variables `params`. + + :param params: Tuple of types the class . Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + :return: New model class inheriting from `cls` with instantiated + types described by `params`. If no parameters are given, `cls` is + returned as is. + + """ + + def _cache_key(_params: Any) -> CacheKey: + args = get_args(_params) + # python returns a list for Callables, which is not hashable + if len(args) == 2 and isinstance(args[0], list): + args = (tuple(args[0]), args[1]) + return cls, _params, args + + cached = _generic_types_cache.get(_cache_key(params)) + if cached is not None: + return cached + if cls.__concrete__ and Generic not in cls.__bases__: + raise TypeError('Cannot parameterize a concrete instantiation of a generic model') + if not isinstance(params, tuple): + params = (params,) + if cls is GenericModel and any(isinstance(param, TypeVar) for param in params): + raise TypeError('Type parameters should be placed on typing.Generic, not GenericModel') + if not hasattr(cls, '__parameters__'): + raise TypeError(f'Type {cls.__name__} must inherit from typing.Generic before being parameterized') + + check_parameters_count(cls, params) + # Build map from generic typevars to passed params + typevars_map: Dict[TypeVarType, Type[Any]] = dict(zip(cls.__parameters__, params)) + if all_identical(typevars_map.keys(), typevars_map.values()) and typevars_map: + return cls # if arguments are equal to parameters it's the same object + + # Create new model with original model as parent inserting fields with DeferredType. + model_name = cls.__concrete_name__(params) + validators = gather_all_validators(cls) + + type_hints = get_all_type_hints(cls).items() + instance_type_hints = {k: v for k, v in type_hints if get_origin(v) is not ClassVar} + + fields = {k: (DeferredType(), cls.__fields__[k].field_info) for k in instance_type_hints if k in cls.__fields__} + + model_module, called_globally = get_caller_frame_info() + created_model = cast( + Type[GenericModel], # casting ensures mypy is aware of the __concrete__ and __parameters__ attributes + create_model( + model_name, + __module__=model_module or cls.__module__, + __base__=(cls,) + tuple(cls.__parameterized_bases__(typevars_map)), + __config__=None, + __validators__=validators, + __cls_kwargs__=None, + **fields, + ), + ) + + _assigned_parameters[created_model] = typevars_map + + if called_globally: # create global reference and therefore allow pickling + object_by_reference = None + reference_name = model_name + reference_module_globals = sys.modules[created_model.__module__].__dict__ + while object_by_reference is not created_model: + object_by_reference = reference_module_globals.setdefault(reference_name, created_model) + reference_name += '_' + + created_model.Config = cls.Config + + # Find any typevars that are still present in the model. + # If none are left, the model is fully "concrete", otherwise the new + # class is a generic class as well taking the found typevars as + # parameters. + new_params = tuple( + {param: None for param in iter_contained_typevars(typevars_map.values())} + ) # use dict as ordered set + created_model.__concrete__ = not new_params + if new_params: + created_model.__parameters__ = new_params + + # Save created model in cache so we don't end up creating duplicate + # models that should be identical. + _generic_types_cache[_cache_key(params)] = created_model + if len(params) == 1: + _generic_types_cache[_cache_key(params[0])] = created_model + + # Recursively walk class type hints and replace generic typevars + # with concrete types that were passed. + _prepare_model_fields(created_model, fields, instance_type_hints, typevars_map) + + return created_model + + @classmethod + def __concrete_name__(cls: Type[Any], params: Tuple[Type[Any], ...]) -> str: + """Compute class name for child classes. + + :param params: Tuple of types the class . Given a generic class + `Model` with 2 type variables and a concrete model `Model[str, int]`, + the value `(str, int)` would be passed to `params`. + :return: String representing a the new class where `params` are + passed to `cls` as type variables. + + This method can be overridden to achieve a custom naming scheme for GenericModels. + """ + param_names = [display_as_type(param) for param in params] + params_component = ', '.join(param_names) + return f'{cls.__name__}[{params_component}]' + + @classmethod + def __parameterized_bases__(cls, typevars_map: Parametrization) -> Iterator[Type[Any]]: + """ + Returns unbound bases of cls parameterised to given type variables + + :param typevars_map: Dictionary of type applications for binding subclasses. + Given a generic class `Model` with 2 type variables [S, T] + and a concrete model `Model[str, int]`, + the value `{S: str, T: int}` would be passed to `typevars_map`. + :return: an iterator of generic sub classes, parameterised by `typevars_map` + and other assigned parameters of `cls` + + e.g.: + ``` + class A(GenericModel, Generic[T]): + ... + + class B(A[V], Generic[V]): + ... + + assert A[int] in B.__parameterized_bases__({V: int}) + ``` + """ + + def build_base_model( + base_model: Type[GenericModel], mapped_types: Parametrization + ) -> Iterator[Type[GenericModel]]: + base_parameters = tuple(mapped_types[param] for param in base_model.__parameters__) + parameterized_base = base_model.__class_getitem__(base_parameters) + if parameterized_base is base_model or parameterized_base is cls: + # Avoid duplication in MRO + return + yield parameterized_base + + for base_model in cls.__bases__: + if not issubclass(base_model, GenericModel): + # not a class that can be meaningfully parameterized + continue + elif not getattr(base_model, '__parameters__', None): + # base_model is "GenericModel" (and has no __parameters__) + # or + # base_model is already concrete, and will be included transitively via cls. + continue + elif cls in _assigned_parameters: + if base_model in _assigned_parameters: + # cls is partially parameterised but not from base_model + # e.g. cls = B[S], base_model = A[S] + # B[S][int] should subclass A[int], (and will be transitively via B[int]) + # but it's not viable to consistently subclass types with arbitrary construction + # So don't attempt to include A[S][int] + continue + else: # base_model not in _assigned_parameters: + # cls is partially parameterized, base_model is original generic + # e.g. cls = B[str, T], base_model = B[S, T] + # Need to determine the mapping for the base_model parameters + mapped_types: Parametrization = { + key: typevars_map.get(value, value) for key, value in _assigned_parameters[cls].items() + } + yield from build_base_model(base_model, mapped_types) + else: + # cls is base generic, so base_class has a distinct base + # can construct the Parameterised base model using typevars_map directly + yield from build_base_model(base_model, typevars_map) + + +def replace_types(type_: Any, type_map: Mapping[Any, Any]) -> Any: + """Return type with all occurrences of `type_map` keys recursively replaced with their values. + + :param type_: Any type, class or generic alias + :param type_map: Mapping from `TypeVar` instance to concrete types. + :return: New type representing the basic structure of `type_` with all + `typevar_map` keys recursively replaced. + + >>> replace_types(Tuple[str, Union[List[str], float]], {str: int}) + Tuple[int, Union[List[int], float]] + + """ + if not type_map: + return type_ + + type_args = get_args(type_) + origin_type = get_origin(type_) + + if origin_type is Annotated: + annotated_type, *annotations = type_args + return Annotated[replace_types(annotated_type, type_map), tuple(annotations)] + + if (origin_type is ExtLiteral) or (sys.version_info >= (3, 8) and origin_type is Literal): + return type_map.get(type_, type_) + # Having type args is a good indicator that this is a typing module + # class instantiation or a generic alias of some sort. + if type_args: + resolved_type_args = tuple(replace_types(arg, type_map) for arg in type_args) + if all_identical(type_args, resolved_type_args): + # If all arguments are the same, there is no need to modify the + # type or create a new object at all + return type_ + if ( + origin_type is not None + and isinstance(type_, typing_base) + and not isinstance(origin_type, typing_base) + and getattr(type_, '_name', None) is not None + ): + # In python < 3.9 generic aliases don't exist so any of these like `list`, + # `type` or `collections.abc.Callable` need to be translated. + # See: https://www.python.org/dev/peps/pep-0585 + origin_type = getattr(typing, type_._name) + assert origin_type is not None + # PEP-604 syntax (Ex.: list | str) is represented with a types.UnionType object that does not have __getitem__. + # We also cannot use isinstance() since we have to compare types. + if sys.version_info >= (3, 10) and origin_type is types.UnionType: # noqa: E721 + return functools.reduce(operator.or_, resolved_type_args) + return origin_type[resolved_type_args] + + # We handle pydantic generic models separately as they don't have the same + # semantics as "typing" classes or generic aliases + if not origin_type and lenient_issubclass(type_, GenericModel) and not type_.__concrete__: + type_args = type_.__parameters__ + resolved_type_args = tuple(replace_types(t, type_map) for t in type_args) + if all_identical(type_args, resolved_type_args): + return type_ + return type_[resolved_type_args] + + # Handle special case for typehints that can have lists as arguments. + # `typing.Callable[[int, str], int]` is an example for this. + if isinstance(type_, (List, list)): + resolved_list = list(replace_types(element, type_map) for element in type_) + if all_identical(type_, resolved_list): + return type_ + return resolved_list + + # For JsonWrapperValue, need to handle its inner type to allow correct parsing + # of generic Json arguments like Json[T] + if not origin_type and lenient_issubclass(type_, JsonWrapper): + type_.inner_type = replace_types(type_.inner_type, type_map) + return type_ + + # If all else fails, we try to resolve the type directly and otherwise just + # return the input with no modifications. + new_type = type_map.get(type_, type_) + # Convert string to ForwardRef + if isinstance(new_type, str): + return ForwardRef(new_type) + else: + return new_type + + +def check_parameters_count(cls: Type[GenericModel], parameters: Tuple[Any, ...]) -> None: + actual = len(parameters) + expected = len(cls.__parameters__) + if actual != expected: + description = 'many' if actual > expected else 'few' + raise TypeError(f'Too {description} parameters for {cls.__name__}; actual {actual}, expected {expected}') + + +DictValues: Type[Any] = {}.values().__class__ + + +def iter_contained_typevars(v: Any) -> Iterator[TypeVarType]: + """Recursively iterate through all subtypes and type args of `v` and yield any typevars that are found.""" + if isinstance(v, TypeVar): + yield v + elif hasattr(v, '__parameters__') and not get_origin(v) and lenient_issubclass(v, GenericModel): + yield from v.__parameters__ + elif isinstance(v, (DictValues, list)): + for var in v: + yield from iter_contained_typevars(var) + else: + args = get_args(v) + for arg in args: + yield from iter_contained_typevars(arg) + + +def get_caller_frame_info() -> Tuple[Optional[str], bool]: + """ + Used inside a function to check whether it was called globally + + Will only work against non-compiled code, therefore used only in pydantic.generics + + :returns Tuple[module_name, called_globally] + """ + try: + previous_caller_frame = sys._getframe(2) + except ValueError as e: + raise RuntimeError('This function must be used inside another function') from e + except AttributeError: # sys module does not have _getframe function, so there's nothing we can do about it + return None, False + frame_globals = previous_caller_frame.f_globals + return frame_globals.get('__name__'), previous_caller_frame.f_locals is frame_globals + + +def _prepare_model_fields( + created_model: Type[GenericModel], + fields: Mapping[str, Any], + instance_type_hints: Mapping[str, type], + typevars_map: Mapping[Any, type], +) -> None: + """ + Replace DeferredType fields with concrete type hints and prepare them. + """ + + for key, field in created_model.__fields__.items(): + if key not in fields: + assert field.type_.__class__ is not DeferredType + # https://github.com/nedbat/coveragepy/issues/198 + continue # pragma: no cover + + assert field.type_.__class__ is DeferredType, field.type_.__class__ + + field_type_hint = instance_type_hints[key] + concrete_type = replace_types(field_type_hint, typevars_map) + field.type_ = concrete_type + field.outer_type_ = concrete_type + field.prepare() + created_model.__annotations__[key] = concrete_type diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/json.py b/venv/lib/python3.12/site-packages/pydantic/v1/json.py new file mode 100644 index 0000000000000000000000000000000000000000..41d0d5fcae0d946401c2d089b69143f540618b90 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/json.py @@ -0,0 +1,112 @@ +import datetime +from collections import deque +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from re import Pattern +from types import GeneratorType +from typing import Any, Callable, Dict, Type, Union +from uuid import UUID + +from pydantic.v1.color import Color +from pydantic.v1.networks import NameEmail +from pydantic.v1.types import SecretBytes, SecretStr + +__all__ = 'pydantic_encoder', 'custom_pydantic_encoder', 'timedelta_isoformat' + + +def isoformat(o: Union[datetime.date, datetime.time]) -> str: + return o.isoformat() + + +def decimal_encoder(dec_value: Decimal) -> Union[int, float]: + """ + Encodes a Decimal as int of there's no exponent, otherwise float + + This is useful when we use ConstrainedDecimal to represent Numeric(x,0) + where a integer (but not int typed) is used. Encoding this as a float + results in failed round-tripping between encode and parse. + Our Id type is a prime example of this. + + >>> decimal_encoder(Decimal("1.0")) + 1.0 + + >>> decimal_encoder(Decimal("1")) + 1 + """ + if dec_value.as_tuple().exponent >= 0: + return int(dec_value) + else: + return float(dec_value) + + +ENCODERS_BY_TYPE: Dict[Type[Any], Callable[[Any], Any]] = { + bytes: lambda o: o.decode(), + Color: str, + datetime.date: isoformat, + datetime.datetime: isoformat, + datetime.time: isoformat, + datetime.timedelta: lambda td: td.total_seconds(), + Decimal: decimal_encoder, + Enum: lambda o: o.value, + frozenset: list, + deque: list, + GeneratorType: list, + IPv4Address: str, + IPv4Interface: str, + IPv4Network: str, + IPv6Address: str, + IPv6Interface: str, + IPv6Network: str, + NameEmail: str, + Path: str, + Pattern: lambda o: o.pattern, + SecretBytes: str, + SecretStr: str, + set: list, + UUID: str, +} + + +def pydantic_encoder(obj: Any) -> Any: + from dataclasses import asdict, is_dataclass + + from pydantic.v1.main import BaseModel + + if isinstance(obj, BaseModel): + return obj.dict() + elif is_dataclass(obj): + return asdict(obj) + + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = ENCODERS_BY_TYPE[base] + except KeyError: + continue + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + raise TypeError(f"Object of type '{obj.__class__.__name__}' is not JSON serializable") + + +def custom_pydantic_encoder(type_encoders: Dict[Any, Callable[[Type[Any]], Any]], obj: Any) -> Any: + # Check the class type and its superclasses for a matching encoder + for base in obj.__class__.__mro__[:-1]: + try: + encoder = type_encoders[base] + except KeyError: + continue + + return encoder(obj) + else: # We have exited the for loop without finding a suitable encoder + return pydantic_encoder(obj) + + +def timedelta_isoformat(td: datetime.timedelta) -> str: + """ + ISO 8601 encoding for Python timedelta object. + """ + minutes, seconds = divmod(td.seconds, 60) + hours, minutes = divmod(minutes, 60) + return f'{"-" if td.days < 0 else ""}P{abs(td.days)}DT{hours:d}H{minutes:d}M{seconds:d}.{td.microseconds:06d}S' diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/main.py b/venv/lib/python3.12/site-packages/pydantic/v1/main.py new file mode 100644 index 0000000000000000000000000000000000000000..36e9c44c98735ece2d784f5ad1d60b783e39ce59 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/main.py @@ -0,0 +1,1130 @@ +import sys +import warnings +from abc import ABCMeta +from copy import deepcopy +from enum import Enum +from functools import partial +from pathlib import Path +from types import FunctionType, prepare_class, resolve_bases +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + ClassVar, + Dict, + List, + Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, + cast, + no_type_check, + overload, +) + +from typing_extensions import dataclass_transform + +from pydantic.v1.class_validators import ValidatorGroup, extract_root_validators, extract_validators, inherit_validators +from pydantic.v1.config import BaseConfig, Extra, inherit_config, prepare_config +from pydantic.v1.error_wrappers import ErrorWrapper, ValidationError +from pydantic.v1.errors import ConfigError, DictError, ExtraError, MissingError +from pydantic.v1.fields import ( + MAPPING_LIKE_SHAPES, + Field, + ModelField, + ModelPrivateAttr, + PrivateAttr, + Undefined, + is_finalvar_with_default_val, +) +from pydantic.v1.json import custom_pydantic_encoder, pydantic_encoder +from pydantic.v1.parse import Protocol, load_file, load_str_bytes +from pydantic.v1.schema import default_ref_template, model_schema +from pydantic.v1.types import PyObject, StrBytes +from pydantic.v1.typing import ( + AnyCallable, + get_args, + get_origin, + is_classvar, + is_namedtuple, + is_union, + resolve_annotations, + update_model_forward_refs, +) +from pydantic.v1.utils import ( + DUNDER_ATTRIBUTES, + ROOT_KEY, + ClassAttribute, + GetterDict, + Representation, + ValueItems, + generate_model_signature, + is_valid_field, + is_valid_private_name, + lenient_issubclass, + sequence_like, + smart_deepcopy, + unique_list, + validate_field_name, +) + +if TYPE_CHECKING: + from inspect import Signature + + from pydantic.v1.class_validators import ValidatorListDict + from pydantic.v1.types import ModelOrDc + from pydantic.v1.typing import ( + AbstractSetIntStr, + AnyClassMethod, + CallableGenerator, + DictAny, + DictStrAny, + MappingIntStrAny, + ReprArgs, + SetStr, + TupleGenerator, + ) + + Model = TypeVar('Model', bound='BaseModel') + +__all__ = 'BaseModel', 'create_model', 'validate_model' + +_T = TypeVar('_T') + + +def validate_custom_root_type(fields: Dict[str, ModelField]) -> None: + if len(fields) > 1: + raise ValueError(f'{ROOT_KEY} cannot be mixed with other fields') + + +def generate_hash_function(frozen: bool) -> Optional[Callable[[Any], int]]: + def hash_function(self_: Any) -> int: + return hash(self_.__class__) + hash(tuple(self_.__dict__.values())) + + return hash_function if frozen else None + + +# If a field is of type `Callable`, its default value should be a function and cannot to ignored. +ANNOTATED_FIELD_UNTOUCHED_TYPES: Tuple[Any, ...] = (property, type, classmethod, staticmethod) +# When creating a `BaseModel` instance, we bypass all the methods, properties... added to the model +UNTOUCHED_TYPES: Tuple[Any, ...] = (FunctionType,) + ANNOTATED_FIELD_UNTOUCHED_TYPES +# Note `ModelMetaclass` refers to `BaseModel`, but is also used to *create* `BaseModel`, so we need to add this extra +# (somewhat hacky) boolean to keep track of whether we've created the `BaseModel` class yet, and therefore whether it's +# safe to refer to it. If it *hasn't* been created, we assume that the `__new__` call we're in the middle of is for +# the `BaseModel` class, since that's defined immediately after the metaclass. +_is_base_model_class_defined = False + + +@dataclass_transform(kw_only_default=True, field_specifiers=(Field,)) +class ModelMetaclass(ABCMeta): + @no_type_check # noqa C901 + def __new__(mcs, name, bases, namespace, **kwargs): # noqa C901 + fields: Dict[str, ModelField] = {} + config = BaseConfig + validators: 'ValidatorListDict' = {} + + pre_root_validators, post_root_validators = [], [] + private_attributes: Dict[str, ModelPrivateAttr] = {} + base_private_attributes: Dict[str, ModelPrivateAttr] = {} + slots: SetStr = namespace.get('__slots__', ()) + slots = {slots} if isinstance(slots, str) else set(slots) + class_vars: SetStr = set() + hash_func: Optional[Callable[[Any], int]] = None + + for base in reversed(bases): + if _is_base_model_class_defined and issubclass(base, BaseModel) and base != BaseModel: + fields.update(smart_deepcopy(base.__fields__)) + config = inherit_config(base.__config__, config) + validators = inherit_validators(base.__validators__, validators) + pre_root_validators += base.__pre_root_validators__ + post_root_validators += base.__post_root_validators__ + base_private_attributes.update(base.__private_attributes__) + class_vars.update(base.__class_vars__) + hash_func = base.__hash__ + + resolve_forward_refs = kwargs.pop('__resolve_forward_refs__', True) + allowed_config_kwargs: SetStr = { + key + for key in dir(config) + if not (key.startswith('__') and key.endswith('__')) # skip dunder methods and attributes + } + config_kwargs = {key: kwargs.pop(key) for key in kwargs.keys() & allowed_config_kwargs} + config_from_namespace = namespace.get('Config') + if config_kwargs and config_from_namespace: + raise TypeError('Specifying config in two places is ambiguous, use either Config attribute or class kwargs') + config = inherit_config(config_from_namespace, config, **config_kwargs) + + validators = inherit_validators(extract_validators(namespace), validators) + vg = ValidatorGroup(validators) + + for f in fields.values(): + f.set_config(config) + extra_validators = vg.get_validators(f.name) + if extra_validators: + f.class_validators.update(extra_validators) + # re-run prepare to add extra validators + f.populate_validators() + + prepare_config(config, name) + + untouched_types = ANNOTATED_FIELD_UNTOUCHED_TYPES + + def is_untouched(v: Any) -> bool: + return isinstance(v, untouched_types) or v.__class__.__name__ == 'cython_function_or_method' + + if (namespace.get('__module__'), namespace.get('__qualname__')) != ('pydantic.main', 'BaseModel'): + if sys.version_info >= (3, 14): + if '__annotations__' in namespace: + # `from __future__ import annotations` was used in the model's module + raw_annotations = namespace['__annotations__'] + else: + # See https://docs.python.org/3/library/annotationlib.html#using-annotations-in-a-metaclass: + from annotationlib import Format, call_annotate_function, get_annotate_from_class_namespace + + annotate = get_annotate_from_class_namespace(namespace) + if annotate is not None: + raw_annotations = call_annotate_function(annotate, format=Format.FORWARDREF) + else: + raw_annotations = {} + else: + raw_annotations = namespace.get('__annotations__', {}) + + annotations = resolve_annotations(raw_annotations, namespace.get('__module__', None)) + # annotation only fields need to come first in fields + for ann_name, ann_type in annotations.items(): + if is_classvar(ann_type): + class_vars.add(ann_name) + elif is_finalvar_with_default_val(ann_type, namespace.get(ann_name, Undefined)): + class_vars.add(ann_name) + elif is_valid_field(ann_name): + validate_field_name(bases, ann_name) + value = namespace.get(ann_name, Undefined) + allowed_types = get_args(ann_type) if is_union(get_origin(ann_type)) else (ann_type,) + if ( + is_untouched(value) + and ann_type != PyObject + and not any( + lenient_issubclass(get_origin(allowed_type), Type) for allowed_type in allowed_types + ) + ): + continue + fields[ann_name] = ModelField.infer( + name=ann_name, + value=value, + annotation=ann_type, + class_validators=vg.get_validators(ann_name), + config=config, + ) + elif ann_name not in namespace and config.underscore_attrs_are_private: + private_attributes[ann_name] = PrivateAttr() + + untouched_types = UNTOUCHED_TYPES + config.keep_untouched + for var_name, value in namespace.items(): + can_be_changed = var_name not in class_vars and not is_untouched(value) + if isinstance(value, ModelPrivateAttr): + if not is_valid_private_name(var_name): + raise NameError( + f'Private attributes "{var_name}" must not be a valid field name; ' + f'Use sunder or dunder names, e. g. "_{var_name}" or "__{var_name}__"' + ) + private_attributes[var_name] = value + elif config.underscore_attrs_are_private and is_valid_private_name(var_name) and can_be_changed: + private_attributes[var_name] = PrivateAttr(default=value) + elif is_valid_field(var_name) and var_name not in annotations and can_be_changed: + validate_field_name(bases, var_name) + inferred = ModelField.infer( + name=var_name, + value=value, + annotation=annotations.get(var_name, Undefined), + class_validators=vg.get_validators(var_name), + config=config, + ) + if var_name in fields: + if lenient_issubclass(inferred.type_, fields[var_name].type_): + inferred.type_ = fields[var_name].type_ + else: + raise TypeError( + f'The type of {name}.{var_name} differs from the new default value; ' + f'if you wish to change the type of this field, please use a type annotation' + ) + fields[var_name] = inferred + + _custom_root_type = ROOT_KEY in fields + if _custom_root_type: + validate_custom_root_type(fields) + vg.check_for_unused() + if config.json_encoders: + json_encoder = partial(custom_pydantic_encoder, config.json_encoders) + else: + json_encoder = pydantic_encoder + pre_rv_new, post_rv_new = extract_root_validators(namespace) + + if hash_func is None: + hash_func = generate_hash_function(config.frozen) + + exclude_from_namespace = fields | private_attributes.keys() | {'__slots__'} + new_namespace = { + '__config__': config, + '__fields__': fields, + '__exclude_fields__': { + name: field.field_info.exclude for name, field in fields.items() if field.field_info.exclude is not None + } + or None, + '__include_fields__': { + name: field.field_info.include for name, field in fields.items() if field.field_info.include is not None + } + or None, + '__validators__': vg.validators, + '__pre_root_validators__': unique_list( + pre_root_validators + pre_rv_new, + name_factory=lambda v: v.__name__, + ), + '__post_root_validators__': unique_list( + post_root_validators + post_rv_new, + name_factory=lambda skip_on_failure_and_v: skip_on_failure_and_v[1].__name__, + ), + '__schema_cache__': {}, + '__json_encoder__': staticmethod(json_encoder), + '__custom_root_type__': _custom_root_type, + '__private_attributes__': {**base_private_attributes, **private_attributes}, + '__slots__': slots | private_attributes.keys(), + '__hash__': hash_func, + '__class_vars__': class_vars, + **{n: v for n, v in namespace.items() if n not in exclude_from_namespace}, + } + + cls = super().__new__(mcs, name, bases, new_namespace, **kwargs) + # set __signature__ attr only for model class, but not for its instances + cls.__signature__ = ClassAttribute('__signature__', generate_model_signature(cls.__init__, fields, config)) + + if not _is_base_model_class_defined: + # Cython does not understand the `if TYPE_CHECKING:` condition in the + # BaseModel's body (where annotations are set), so clear them manually: + getattr(cls, '__annotations__', {}).clear() + + if resolve_forward_refs: + cls.__try_update_forward_refs__() + + # preserve `__set_name__` protocol defined in https://peps.python.org/pep-0487 + # for attributes not in `new_namespace` (e.g. private attributes) + for name, obj in namespace.items(): + if name not in new_namespace: + set_name = getattr(obj, '__set_name__', None) + if callable(set_name): + set_name(cls, name) + + return cls + + def __instancecheck__(self, instance: Any) -> bool: + """ + Avoid calling ABC _abc_subclasscheck unless we're pretty sure. + + See #3829 and python/cpython#92810 + """ + return hasattr(instance, '__post_root_validators__') and super().__instancecheck__(instance) + + +object_setattr = object.__setattr__ + + +class BaseModel(Representation, metaclass=ModelMetaclass): + if TYPE_CHECKING: + # populated by the metaclass, defined here to help IDEs only + __fields__: ClassVar[Dict[str, ModelField]] = {} + __include_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __exclude_fields__: ClassVar[Optional[Mapping[str, Any]]] = None + __validators__: ClassVar[Dict[str, AnyCallable]] = {} + __pre_root_validators__: ClassVar[List[AnyCallable]] + __post_root_validators__: ClassVar[List[Tuple[bool, AnyCallable]]] + __config__: ClassVar[Type[BaseConfig]] = BaseConfig + __json_encoder__: ClassVar[Callable[[Any], Any]] = lambda x: x + __schema_cache__: ClassVar['DictAny'] = {} + __custom_root_type__: ClassVar[bool] = False + __signature__: ClassVar['Signature'] + __private_attributes__: ClassVar[Dict[str, ModelPrivateAttr]] + __class_vars__: ClassVar[SetStr] + __fields_set__: ClassVar[SetStr] = set() + + Config = BaseConfig + __slots__ = ('__dict__', '__fields_set__') + __doc__ = '' # Null out the Representation docstring + + def __init__(__pydantic_self__, **data: Any) -> None: + """ + Create a new model by parsing and validating input data from keyword arguments. + + Raises ValidationError if the input data cannot be parsed to form a valid model. + """ + # Uses something other than `self` the first arg to allow "self" as a settable attribute + values, fields_set, validation_error = validate_model(__pydantic_self__.__class__, data) + if validation_error: + raise validation_error + try: + object_setattr(__pydantic_self__, '__dict__', values) + except TypeError as e: + raise TypeError( + 'Model values must be a dict; you may not have returned a dictionary from a root validator' + ) from e + object_setattr(__pydantic_self__, '__fields_set__', fields_set) + __pydantic_self__._init_private_attributes() + + @no_type_check + def __setattr__(self, name, value): # noqa: C901 (ignore complexity) + if name in self.__private_attributes__ or name in DUNDER_ATTRIBUTES: + return object_setattr(self, name, value) + + if self.__config__.extra is not Extra.allow and name not in self.__fields__: + raise ValueError(f'"{self.__class__.__name__}" object has no field "{name}"') + elif not self.__config__.allow_mutation or self.__config__.frozen: + raise TypeError(f'"{self.__class__.__name__}" is immutable and does not support item assignment') + elif name in self.__fields__ and self.__fields__[name].final: + raise TypeError( + f'"{self.__class__.__name__}" object "{name}" field is final and does not support reassignment' + ) + elif self.__config__.validate_assignment: + new_values = {**self.__dict__, name: value} + + for validator in self.__pre_root_validators__: + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], self.__class__) + + known_field = self.__fields__.get(name, None) + if known_field: + # We want to + # - make sure validators are called without the current value for this field inside `values` + # - keep other values (e.g. submodels) untouched (using `BaseModel.dict()` will change them into dicts) + # - keep the order of the fields + if not known_field.field_info.allow_mutation: + raise TypeError(f'"{known_field.name}" has allow_mutation set to False and cannot be assigned') + dict_without_original_value = {k: v for k, v in self.__dict__.items() if k != name} + value, error_ = known_field.validate(value, dict_without_original_value, loc=name, cls=self.__class__) + if error_: + raise ValidationError([error_], self.__class__) + else: + new_values[name] = value + + errors = [] + for skip_on_failure, validator in self.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + new_values = validator(self.__class__, new_values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + if errors: + raise ValidationError(errors, self.__class__) + + # update the whole __dict__ as other values than just `value` + # may be changed (e.g. with `root_validator`) + object_setattr(self, '__dict__', new_values) + else: + self.__dict__[name] = value + + self.__fields_set__.add(name) + + def __getstate__(self) -> 'DictAny': + private_attrs = ((k, getattr(self, k, Undefined)) for k in self.__private_attributes__) + return { + '__dict__': self.__dict__, + '__fields_set__': self.__fields_set__, + '__private_attribute_values__': {k: v for k, v in private_attrs if v is not Undefined}, + } + + def __setstate__(self, state: 'DictAny') -> None: + object_setattr(self, '__dict__', state['__dict__']) + object_setattr(self, '__fields_set__', state['__fields_set__']) + for name, value in state.get('__private_attribute_values__', {}).items(): + object_setattr(self, name, value) + + def _init_private_attributes(self) -> None: + for name, private_attr in self.__private_attributes__.items(): + default = private_attr.get_default() + if default is not Undefined: + object_setattr(self, name, default) + + def dict( + self, + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> 'DictStrAny': + """ + Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. + + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.dict(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + + return dict( + self._iter( + to_dict=True, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + ) + + def json( + self, + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + by_alias: bool = False, + skip_defaults: Optional[bool] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + encoder: Optional[Callable[[Any], Any]] = None, + models_as_dict: bool = True, + **dumps_kwargs: Any, + ) -> str: + """ + Generate a JSON representation of the model, `include` and `exclude` arguments as per `dict()`. + + `encoder` is an optional function to supply as `default` to json.dumps(), other arguments as per `json.dumps()`. + """ + if skip_defaults is not None: + warnings.warn( + f'{self.__class__.__name__}.json(): "skip_defaults" is deprecated and replaced by "exclude_unset"', + DeprecationWarning, + ) + exclude_unset = skip_defaults + encoder = cast(Callable[[Any], Any], encoder or self.__json_encoder__) + + # We don't directly call `self.dict()`, which does exactly this with `to_dict=True` + # because we want to be able to keep raw `BaseModel` instances and not as `dict`. + # This allows users to write custom JSON encoders for given `BaseModel` classes. + data = dict( + self._iter( + to_dict=models_as_dict, + by_alias=by_alias, + include=include, + exclude=exclude, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + ) + if self.__custom_root_type__: + data = data[ROOT_KEY] + return self.__config__.json_dumps(data, default=encoder, **dumps_kwargs) + + @classmethod + def _enforce_dict_if_root(cls, obj: Any) -> Any: + if cls.__custom_root_type__ and ( + not (isinstance(obj, dict) and obj.keys() == {ROOT_KEY}) + and not (isinstance(obj, BaseModel) and obj.__fields__.keys() == {ROOT_KEY}) + or cls.__fields__[ROOT_KEY].shape in MAPPING_LIKE_SHAPES + ): + return {ROOT_KEY: obj} + else: + return obj + + @classmethod + def parse_obj(cls: Type['Model'], obj: Any) -> 'Model': + obj = cls._enforce_dict_if_root(obj) + if not isinstance(obj, dict): + try: + obj = dict(obj) + except (TypeError, ValueError) as e: + exc = TypeError(f'{cls.__name__} expected dict not {obj.__class__.__name__}') + raise ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls) from e + return cls(**obj) + + @classmethod + def parse_raw( + cls: Type['Model'], + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + ) -> 'Model': + try: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, + ) + except (ValueError, TypeError, UnicodeDecodeError) as e: + raise ValidationError([ErrorWrapper(e, loc=ROOT_KEY)], cls) + return cls.parse_obj(obj) + + @classmethod + def parse_file( + cls: Type['Model'], + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + ) -> 'Model': + obj = load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=cls.__config__.json_loads, + ) + return cls.parse_obj(obj) + + @classmethod + def from_orm(cls: Type['Model'], obj: Any) -> 'Model': + if not cls.__config__.orm_mode: + raise ConfigError('You must have the config attribute orm_mode=True to use from_orm') + obj = {ROOT_KEY: obj} if cls.__custom_root_type__ else cls._decompose_class(obj) + m = cls.__new__(cls) + values, fields_set, validation_error = validate_model(cls, obj) + if validation_error: + raise validation_error + object_setattr(m, '__dict__', values) + object_setattr(m, '__fields_set__', fields_set) + m._init_private_attributes() + return m + + @classmethod + def construct(cls: Type['Model'], _fields_set: Optional['SetStr'] = None, **values: Any) -> 'Model': + """ + Creates a new model setting __dict__ and __fields_set__ from trusted or pre-validated data. + Default values are respected, but no other validation is performed. + Behaves as if `Config.extra = 'allow'` was set since it adds all passed values + """ + m = cls.__new__(cls) + fields_values: Dict[str, Any] = {} + for name, field in cls.__fields__.items(): + if field.alt_alias and field.alias in values: + fields_values[name] = values[field.alias] + elif name in values: + fields_values[name] = values[name] + elif not field.required: + fields_values[name] = field.get_default() + fields_values.update(values) + object_setattr(m, '__dict__', fields_values) + if _fields_set is None: + _fields_set = set(values.keys()) + object_setattr(m, '__fields_set__', _fields_set) + m._init_private_attributes() + return m + + def _copy_and_set_values(self: 'Model', values: 'DictStrAny', fields_set: 'SetStr', *, deep: bool) -> 'Model': + if deep: + # chances of having empty dict here are quite low for using smart_deepcopy + values = deepcopy(values) + + cls = self.__class__ + m = cls.__new__(cls) + object_setattr(m, '__dict__', values) + object_setattr(m, '__fields_set__', fields_set) + for name in self.__private_attributes__: + value = getattr(self, name, Undefined) + if value is not Undefined: + if deep: + value = deepcopy(value) + object_setattr(m, name, value) + + return m + + def copy( + self: 'Model', + *, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + update: Optional['DictStrAny'] = None, + deep: bool = False, + ) -> 'Model': + """ + Duplicate a model, optionally choose which fields to include, exclude and change. + + :param include: fields to include in new model + :param exclude: fields to exclude from new model, as with values this takes precedence over include + :param update: values to change/add in the new model. Note: the data is not validated before creating + the new model: you should trust this data + :param deep: set to `True` to make a deep copy of the model + :return: new model instance + """ + + values = dict( + self._iter(to_dict=False, by_alias=False, include=include, exclude=exclude, exclude_unset=False), + **(update or {}), + ) + + # new `__fields_set__` can have unset optional fields with a set value in `update` kwarg + if update: + fields_set = self.__fields_set__ | update.keys() + else: + fields_set = set(self.__fields_set__) + + return self._copy_and_set_values(values, fields_set, deep=deep) + + @classmethod + def schema(cls, by_alias: bool = True, ref_template: str = default_ref_template) -> 'DictStrAny': + cached = cls.__schema_cache__.get((by_alias, ref_template)) + if cached is not None: + return cached + s = model_schema(cls, by_alias=by_alias, ref_template=ref_template) + cls.__schema_cache__[(by_alias, ref_template)] = s + return s + + @classmethod + def schema_json( + cls, *, by_alias: bool = True, ref_template: str = default_ref_template, **dumps_kwargs: Any + ) -> str: + from pydantic.v1.json import pydantic_encoder + + return cls.__config__.json_dumps( + cls.schema(by_alias=by_alias, ref_template=ref_template), default=pydantic_encoder, **dumps_kwargs + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls: Type['Model'], value: Any) -> 'Model': + if isinstance(value, cls): + copy_on_model_validation = cls.__config__.copy_on_model_validation + # whether to deep or shallow copy the model on validation, None means do not copy + deep_copy: Optional[bool] = None + if copy_on_model_validation not in {'deep', 'shallow', 'none'}: + # Warn about deprecated behavior + warnings.warn( + "`copy_on_model_validation` should be a string: 'deep', 'shallow' or 'none'", DeprecationWarning + ) + if copy_on_model_validation: + deep_copy = False + + if copy_on_model_validation == 'shallow': + # shallow copy + deep_copy = False + elif copy_on_model_validation == 'deep': + # deep copy + deep_copy = True + + if deep_copy is None: + return value + else: + return value._copy_and_set_values(value.__dict__, value.__fields_set__, deep=deep_copy) + + value = cls._enforce_dict_if_root(value) + + if isinstance(value, dict): + return cls(**value) + elif cls.__config__.orm_mode: + return cls.from_orm(value) + else: + try: + value_as_dict = dict(value) + except (TypeError, ValueError) as e: + raise DictError() from e + return cls(**value_as_dict) + + @classmethod + def _decompose_class(cls: Type['Model'], obj: Any) -> GetterDict: + if isinstance(obj, GetterDict): + return obj + return cls.__config__.getter_dict(obj) + + @classmethod + @no_type_check + def _get_value( + cls, + v: Any, + to_dict: bool, + by_alias: bool, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']], + exclude_unset: bool, + exclude_defaults: bool, + exclude_none: bool, + ) -> Any: + if isinstance(v, BaseModel): + if to_dict: + v_dict = v.dict( + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=include, + exclude=exclude, + exclude_none=exclude_none, + ) + if ROOT_KEY in v_dict: + return v_dict[ROOT_KEY] + return v_dict + else: + return v.copy(include=include, exclude=exclude) + + value_exclude = ValueItems(v, exclude) if exclude else None + value_include = ValueItems(v, include) if include else None + + if isinstance(v, dict): + return { + k_: cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(k_), + exclude=value_exclude and value_exclude.for_element(k_), + exclude_none=exclude_none, + ) + for k_, v_ in v.items() + if (not value_exclude or not value_exclude.is_excluded(k_)) + and (not value_include or value_include.is_included(k_)) + } + + elif sequence_like(v): + seq_args = ( + cls._get_value( + v_, + to_dict=to_dict, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + include=value_include and value_include.for_element(i), + exclude=value_exclude and value_exclude.for_element(i), + exclude_none=exclude_none, + ) + for i, v_ in enumerate(v) + if (not value_exclude or not value_exclude.is_excluded(i)) + and (not value_include or value_include.is_included(i)) + ) + + return v.__class__(*seq_args) if is_namedtuple(v.__class__) else v.__class__(seq_args) + + elif isinstance(v, Enum) and getattr(cls.Config, 'use_enum_values', False): + return v.value + + else: + return v + + @classmethod + def __try_update_forward_refs__(cls, **localns: Any) -> None: + """ + Same as update_forward_refs but will not raise exception + when forward references are not defined. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns, (NameError,)) + + @classmethod + def update_forward_refs(cls, **localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this Model, globalns and localns. + """ + update_model_forward_refs(cls, cls.__fields__.values(), cls.__config__.json_encoders, localns) + + def __iter__(self) -> 'TupleGenerator': + """ + so `dict(model)` works + """ + yield from self.__dict__.items() + + def _iter( + self, + to_dict: bool = False, + by_alias: bool = False, + include: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude: Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']] = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + ) -> 'TupleGenerator': + # Merge field set excludes with explicit exclude parameter with explicit overriding field set options. + # The extra "is not None" guards are not logically necessary but optimizes performance for the simple case. + if exclude is not None or self.__exclude_fields__ is not None: + exclude = ValueItems.merge(self.__exclude_fields__, exclude) + + if include is not None or self.__include_fields__ is not None: + include = ValueItems.merge(self.__include_fields__, include, intersect=True) + + allowed_keys = self._calculate_keys( + include=include, exclude=exclude, exclude_unset=exclude_unset # type: ignore + ) + if allowed_keys is None and not (to_dict or by_alias or exclude_unset or exclude_defaults or exclude_none): + # huge boost for plain _iter() + yield from self.__dict__.items() + return + + value_exclude = ValueItems(self, exclude) if exclude is not None else None + value_include = ValueItems(self, include) if include is not None else None + + for field_key, v in self.__dict__.items(): + if (allowed_keys is not None and field_key not in allowed_keys) or (exclude_none and v is None): + continue + + if exclude_defaults: + model_field = self.__fields__.get(field_key) + if not getattr(model_field, 'required', True) and getattr(model_field, 'default', _missing) == v: + continue + + if by_alias and field_key in self.__fields__: + dict_key = self.__fields__[field_key].alias + else: + dict_key = field_key + + if to_dict or value_include or value_exclude: + v = self._get_value( + v, + to_dict=to_dict, + by_alias=by_alias, + include=value_include and value_include.for_element(field_key), + exclude=value_exclude and value_exclude.for_element(field_key), + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) + yield dict_key, v + + def _calculate_keys( + self, + include: Optional['MappingIntStrAny'], + exclude: Optional['MappingIntStrAny'], + exclude_unset: bool, + update: Optional['DictStrAny'] = None, + ) -> Optional[AbstractSet[str]]: + if include is None and exclude is None and exclude_unset is False: + return None + + keys: AbstractSet[str] + if exclude_unset: + keys = self.__fields_set__.copy() + else: + keys = self.__dict__.keys() + + if include is not None: + keys &= include.keys() + + if update: + keys -= update.keys() + + if exclude: + keys -= {k for k, v in exclude.items() if ValueItems.is_true(v)} + + return keys + + def __eq__(self, other: Any) -> bool: + if isinstance(other, BaseModel): + return self.dict() == other.dict() + else: + return self.dict() == other + + def __repr_args__(self) -> 'ReprArgs': + return [ + (k, v) + for k, v in self.__dict__.items() + if k not in DUNDER_ATTRIBUTES and (k not in self.__fields__ or self.__fields__[k].field_info.repr) + ] + + +_is_base_model_class_defined = True + + +@overload +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: None = None, + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + **field_definitions: Any, +) -> Type['BaseModel']: + ... + + +@overload +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[Type['Model'], Tuple[Type['Model'], ...]], + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + **field_definitions: Any, +) -> Type['Model']: + ... + + +def create_model( + __model_name: str, + *, + __config__: Optional[Type[BaseConfig]] = None, + __base__: Union[None, Type['Model'], Tuple[Type['Model'], ...]] = None, + __module__: str = __name__, + __validators__: Dict[str, 'AnyClassMethod'] = None, + __cls_kwargs__: Dict[str, Any] = None, + __slots__: Optional[Tuple[str, ...]] = None, + **field_definitions: Any, +) -> Type['Model']: + """ + Dynamically create a model. + :param __model_name: name of the created model + :param __config__: config class to use for the new model + :param __base__: base class for the new model to inherit from + :param __module__: module of the created model + :param __validators__: a dict of method names and @validator class methods + :param __cls_kwargs__: a dict for class creation + :param __slots__: Deprecated, `__slots__` should not be passed to `create_model` + :param field_definitions: fields of the model (or extra fields if a base is supplied) + in the format `=(, )` or `=, e.g. + `foobar=(str, ...)` or `foobar=123`, or, for complex use-cases, in the format + `=` or `=(, )`, e.g. + `foo=Field(datetime, default_factory=datetime.utcnow, alias='bar')` or + `foo=(str, FieldInfo(title='Foo'))` + """ + if __slots__ is not None: + # __slots__ will be ignored from here on + warnings.warn('__slots__ should not be passed to create_model', RuntimeWarning) + + if __base__ is not None: + if __config__ is not None: + raise ConfigError('to avoid confusion __config__ and __base__ cannot be used together') + if not isinstance(__base__, tuple): + __base__ = (__base__,) + else: + __base__ = (cast(Type['Model'], BaseModel),) + + __cls_kwargs__ = __cls_kwargs__ or {} + + fields = {} + annotations = {} + + for f_name, f_def in field_definitions.items(): + if not is_valid_field(f_name): + warnings.warn(f'fields may not start with an underscore, ignoring "{f_name}"', RuntimeWarning) + if isinstance(f_def, tuple): + try: + f_annotation, f_value = f_def + except ValueError as e: + raise ConfigError( + 'field definitions should either be a tuple of (, ) or just a ' + 'default value, unfortunately this means tuples as ' + 'default values are not allowed' + ) from e + else: + f_annotation, f_value = None, f_def + + if f_annotation: + annotations[f_name] = f_annotation + fields[f_name] = f_value + + namespace: 'DictStrAny' = {'__annotations__': annotations, '__module__': __module__} + if __validators__: + namespace.update(__validators__) + namespace.update(fields) + if __config__: + namespace['Config'] = inherit_config(__config__, BaseConfig) + resolved_bases = resolve_bases(__base__) + meta, ns, kwds = prepare_class(__model_name, resolved_bases, kwds=__cls_kwargs__) + if resolved_bases is not __base__: + ns['__orig_bases__'] = __base__ + namespace.update(ns) + return meta(__model_name, resolved_bases, namespace, **kwds) + + +_missing = object() + + +def validate_model( # noqa: C901 (ignore complexity) + model: Type[BaseModel], input_data: 'DictStrAny', cls: 'ModelOrDc' = None +) -> Tuple['DictStrAny', 'SetStr', Optional[ValidationError]]: + """ + validate data against a model. + """ + values = {} + errors = [] + # input_data names, possibly alias + names_used = set() + # field names, never aliases + fields_set = set() + config = model.__config__ + check_extra = config.extra is not Extra.ignore + cls_ = cls or model + + for validator in model.__pre_root_validators__: + try: + input_data = validator(cls_, input_data) + except (ValueError, TypeError, AssertionError) as exc: + return {}, set(), ValidationError([ErrorWrapper(exc, loc=ROOT_KEY)], cls_) + + for name, field in model.__fields__.items(): + value = input_data.get(field.alias, _missing) + using_name = False + if value is _missing and config.allow_population_by_field_name and field.alt_alias: + value = input_data.get(field.name, _missing) + using_name = True + + if value is _missing: + if field.required: + errors.append(ErrorWrapper(MissingError(), loc=field.alias)) + continue + + value = field.get_default() + + if not config.validate_all and not field.validate_always: + values[name] = value + continue + else: + fields_set.add(name) + if check_extra: + names_used.add(field.name if using_name else field.alias) + + v_, errors_ = field.validate(value, values, loc=field.alias, cls=cls_) + if isinstance(errors_, ErrorWrapper): + errors.append(errors_) + elif isinstance(errors_, list): + errors.extend(errors_) + else: + values[name] = v_ + + if check_extra: + if isinstance(input_data, GetterDict): + extra = input_data.extra_keys() - names_used + else: + extra = input_data.keys() - names_used + if extra: + fields_set |= extra + if config.extra is Extra.allow: + for f in extra: + values[f] = input_data[f] + else: + for f in sorted(extra): + errors.append(ErrorWrapper(ExtraError(), loc=f)) + + for skip_on_failure, validator in model.__post_root_validators__: + if skip_on_failure and errors: + continue + try: + values = validator(cls_, values) + except (ValueError, TypeError, AssertionError) as exc: + errors.append(ErrorWrapper(exc, loc=ROOT_KEY)) + + if errors: + return values, fields_set, ValidationError(errors, cls_) + else: + return values, fields_set, None diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/mypy.py b/venv/lib/python3.12/site-packages/pydantic/v1/mypy.py new file mode 100644 index 0000000000000000000000000000000000000000..0a77569218ce6f832e7221b1f557d811318402e7 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/mypy.py @@ -0,0 +1,949 @@ +import sys +from configparser import ConfigParser +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type as TypingType, Union + +from mypy.errorcodes import ErrorCode +from mypy.nodes import ( + ARG_NAMED, + ARG_NAMED_OPT, + ARG_OPT, + ARG_POS, + ARG_STAR2, + MDEF, + Argument, + AssignmentStmt, + Block, + CallExpr, + ClassDef, + Context, + Decorator, + EllipsisExpr, + FuncBase, + FuncDef, + JsonDict, + MemberExpr, + NameExpr, + PassStmt, + PlaceholderNode, + RefExpr, + StrExpr, + SymbolNode, + SymbolTableNode, + TempNode, + TypeInfo, + TypeVarExpr, + Var, +) +from mypy.options import Options +from mypy.plugin import ( + CheckerPluginInterface, + ClassDefContext, + FunctionContext, + MethodContext, + Plugin, + ReportConfigContext, + SemanticAnalyzerPluginInterface, +) +from mypy.plugins import dataclasses +from mypy.semanal import set_callable_name # type: ignore +from mypy.server.trigger import make_wildcard_trigger +from mypy.types import ( + AnyType, + CallableType, + Instance, + NoneType, + Overloaded, + ProperType, + Type, + TypeOfAny, + TypeType, + TypeVarId, + TypeVarType, + UnionType, + get_proper_type, +) +from mypy.typevars import fill_typevars +from mypy.util import get_unique_redefinition_name +from mypy.version import __version__ as mypy_version + +from pydantic.v1.utils import is_valid_field + +try: + from mypy.types import TypeVarDef # type: ignore[attr-defined] +except ImportError: # pragma: no cover + # Backward-compatible with TypeVarDef from Mypy 0.910. + from mypy.types import TypeVarType as TypeVarDef + +CONFIGFILE_KEY = 'pydantic-mypy' +METADATA_KEY = 'pydantic-mypy-metadata' +_NAMESPACE = __name__[:-5] # 'pydantic' in 1.10.X, 'pydantic.v1' in v2.X +BASEMODEL_FULLNAME = f'{_NAMESPACE}.main.BaseModel' +BASESETTINGS_FULLNAME = f'{_NAMESPACE}.env_settings.BaseSettings' +MODEL_METACLASS_FULLNAME = f'{_NAMESPACE}.main.ModelMetaclass' +FIELD_FULLNAME = f'{_NAMESPACE}.fields.Field' +DATACLASS_FULLNAME = f'{_NAMESPACE}.dataclasses.dataclass' + + +def parse_mypy_version(version: str) -> Tuple[int, ...]: + return tuple(map(int, version.partition('+')[0].split('.'))) + + +MYPY_VERSION_TUPLE = parse_mypy_version(mypy_version) +BUILTINS_NAME = 'builtins' if MYPY_VERSION_TUPLE >= (0, 930) else '__builtins__' + +# Increment version if plugin changes and mypy caches should be invalidated +__version__ = 2 + + +def plugin(version: str) -> 'TypingType[Plugin]': + """ + `version` is the mypy version string + + We might want to use this to print a warning if the mypy version being used is + newer, or especially older, than we expect (or need). + """ + return PydanticPlugin + + +class PydanticPlugin(Plugin): + def __init__(self, options: Options) -> None: + self.plugin_config = PydanticPluginConfig(options) + self._plugin_data = self.plugin_config.to_data() + super().__init__(options) + + def get_base_class_hook(self, fullname: str) -> 'Optional[Callable[[ClassDefContext], None]]': + sym = self.lookup_fully_qualified(fullname) + if sym and isinstance(sym.node, TypeInfo): # pragma: no branch + # No branching may occur if the mypy cache has not been cleared + if any(get_fullname(base) == BASEMODEL_FULLNAME for base in sym.node.mro): + return self._pydantic_model_class_maker_callback + return None + + def get_metaclass_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: + if fullname == MODEL_METACLASS_FULLNAME: + return self._pydantic_model_metaclass_marker_callback + return None + + def get_function_hook(self, fullname: str) -> 'Optional[Callable[[FunctionContext], Type]]': + sym = self.lookup_fully_qualified(fullname) + if sym and sym.fullname == FIELD_FULLNAME: + return self._pydantic_field_callback + return None + + def get_method_hook(self, fullname: str) -> Optional[Callable[[MethodContext], Type]]: + if fullname.endswith('.from_orm'): + return from_orm_callback + return None + + def get_class_decorator_hook(self, fullname: str) -> Optional[Callable[[ClassDefContext], None]]: + """Mark pydantic.dataclasses as dataclass. + + Mypy version 1.1.1 added support for `@dataclass_transform` decorator. + """ + if fullname == DATACLASS_FULLNAME and MYPY_VERSION_TUPLE < (1, 1): + return dataclasses.dataclass_class_maker_callback # type: ignore[return-value] + return None + + def report_config_data(self, ctx: ReportConfigContext) -> Dict[str, Any]: + """Return all plugin config data. + + Used by mypy to determine if cache needs to be discarded. + """ + return self._plugin_data + + def _pydantic_model_class_maker_callback(self, ctx: ClassDefContext) -> None: + transformer = PydanticModelTransformer(ctx, self.plugin_config) + transformer.transform() + + def _pydantic_model_metaclass_marker_callback(self, ctx: ClassDefContext) -> None: + """Reset dataclass_transform_spec attribute of ModelMetaclass. + + Let the plugin handle it. This behavior can be disabled + if 'debug_dataclass_transform' is set to True', for testing purposes. + """ + if self.plugin_config.debug_dataclass_transform: + return + info_metaclass = ctx.cls.info.declared_metaclass + assert info_metaclass, "callback not passed from 'get_metaclass_hook'" + if getattr(info_metaclass.type, 'dataclass_transform_spec', None): + info_metaclass.type.dataclass_transform_spec = None # type: ignore[attr-defined] + + def _pydantic_field_callback(self, ctx: FunctionContext) -> 'Type': + """ + Extract the type of the `default` argument from the Field function, and use it as the return type. + + In particular: + * Check whether the default and default_factory argument is specified. + * Output an error if both are specified. + * Retrieve the type of the argument which is specified, and use it as return type for the function. + """ + default_any_type = ctx.default_return_type + + assert ctx.callee_arg_names[0] == 'default', '"default" is no longer first argument in Field()' + assert ctx.callee_arg_names[1] == 'default_factory', '"default_factory" is no longer second argument in Field()' + default_args = ctx.args[0] + default_factory_args = ctx.args[1] + + if default_args and default_factory_args: + error_default_and_default_factory_specified(ctx.api, ctx.context) + return default_any_type + + if default_args: + default_type = ctx.arg_types[0][0] + default_arg = default_args[0] + + # Fallback to default Any type if the field is required + if not isinstance(default_arg, EllipsisExpr): + return default_type + + elif default_factory_args: + default_factory_type = ctx.arg_types[1][0] + + # Functions which use `ParamSpec` can be overloaded, exposing the callable's types as a parameter + # Pydantic calls the default factory without any argument, so we retrieve the first item + if isinstance(default_factory_type, Overloaded): + if MYPY_VERSION_TUPLE > (0, 910): + default_factory_type = default_factory_type.items[0] + else: + # Mypy0.910 exposes the items of overloaded types in a function + default_factory_type = default_factory_type.items()[0] # type: ignore[operator] + + if isinstance(default_factory_type, CallableType): + ret_type = get_proper_type(default_factory_type.ret_type) + if ( + isinstance(ret_type, Instance) + and ret_type.args + and all(isinstance(arg, TypeVarType) for arg in ret_type.args) + ): + # Looks like the default factory is a type like `list` or `dict`, replace all args with `Any` + ret_type = ret_type.copy_modified(args=[default_any_type] * len(ret_type.args)) + return ret_type + + return default_any_type + + +class PydanticPluginConfig: + __slots__ = ( + 'init_forbid_extra', + 'init_typed', + 'warn_required_dynamic_aliases', + 'warn_untyped_fields', + 'debug_dataclass_transform', + ) + init_forbid_extra: bool + init_typed: bool + warn_required_dynamic_aliases: bool + warn_untyped_fields: bool + debug_dataclass_transform: bool # undocumented + + def __init__(self, options: Options) -> None: + if options.config_file is None: # pragma: no cover + return + + toml_config = parse_toml(options.config_file) + if toml_config is not None: + config = toml_config.get('tool', {}).get('pydantic-mypy', {}) + for key in self.__slots__: + setting = config.get(key, False) + if not isinstance(setting, bool): + raise ValueError(f'Configuration value must be a boolean for key: {key}') + setattr(self, key, setting) + else: + plugin_config = ConfigParser() + plugin_config.read(options.config_file) + for key in self.__slots__: + setting = plugin_config.getboolean(CONFIGFILE_KEY, key, fallback=False) + setattr(self, key, setting) + + def to_data(self) -> Dict[str, Any]: + return {key: getattr(self, key) for key in self.__slots__} + + +def from_orm_callback(ctx: MethodContext) -> Type: + """ + Raise an error if orm_mode is not enabled + """ + model_type: Instance + ctx_type = ctx.type + if isinstance(ctx_type, TypeType): + ctx_type = ctx_type.item + if isinstance(ctx_type, CallableType) and isinstance(ctx_type.ret_type, Instance): + model_type = ctx_type.ret_type # called on the class + elif isinstance(ctx_type, Instance): + model_type = ctx_type # called on an instance (unusual, but still valid) + else: # pragma: no cover + detail = f'ctx.type: {ctx_type} (of type {ctx_type.__class__.__name__})' + error_unexpected_behavior(detail, ctx.api, ctx.context) + return ctx.default_return_type + pydantic_metadata = model_type.type.metadata.get(METADATA_KEY) + if pydantic_metadata is None: + return ctx.default_return_type + orm_mode = pydantic_metadata.get('config', {}).get('orm_mode') + if orm_mode is not True: + error_from_orm(get_name(model_type.type), ctx.api, ctx.context) + return ctx.default_return_type + + +class PydanticModelTransformer: + tracked_config_fields: Set[str] = { + 'extra', + 'allow_mutation', + 'frozen', + 'orm_mode', + 'allow_population_by_field_name', + 'alias_generator', + } + + def __init__(self, ctx: ClassDefContext, plugin_config: PydanticPluginConfig) -> None: + self._ctx = ctx + self.plugin_config = plugin_config + + def transform(self) -> None: + """ + Configures the BaseModel subclass according to the plugin settings. + + In particular: + * determines the model config and fields, + * adds a fields-aware signature for the initializer and construct methods + * freezes the class if allow_mutation = False or frozen = True + * stores the fields, config, and if the class is settings in the mypy metadata for access by subclasses + """ + ctx = self._ctx + info = ctx.cls.info + + self.adjust_validator_signatures() + config = self.collect_config() + fields = self.collect_fields(config) + is_settings = any(get_fullname(base) == BASESETTINGS_FULLNAME for base in info.mro[:-1]) + self.add_initializer(fields, config, is_settings) + self.add_construct_method(fields) + self.set_frozen(fields, frozen=config.allow_mutation is False or config.frozen is True) + info.metadata[METADATA_KEY] = { + 'fields': {field.name: field.serialize() for field in fields}, + 'config': config.set_values_dict(), + } + + def adjust_validator_signatures(self) -> None: + """When we decorate a function `f` with `pydantic.validator(...), mypy sees + `f` as a regular method taking a `self` instance, even though pydantic + internally wraps `f` with `classmethod` if necessary. + + Teach mypy this by marking any function whose outermost decorator is a + `validator()` call as a classmethod. + """ + for name, sym in self._ctx.cls.info.names.items(): + if isinstance(sym.node, Decorator): + first_dec = sym.node.original_decorators[0] + if ( + isinstance(first_dec, CallExpr) + and isinstance(first_dec.callee, NameExpr) + and first_dec.callee.fullname == f'{_NAMESPACE}.class_validators.validator' + ): + sym.node.func.is_class = True + + def collect_config(self) -> 'ModelConfigData': + """ + Collects the values of the config attributes that are used by the plugin, accounting for parent classes. + """ + ctx = self._ctx + cls = ctx.cls + config = ModelConfigData() + for stmt in cls.defs.body: + if not isinstance(stmt, ClassDef): + continue + if stmt.name == 'Config': + for substmt in stmt.defs.body: + if not isinstance(substmt, AssignmentStmt): + continue + config.update(self.get_config_update(substmt)) + if ( + config.has_alias_generator + and not config.allow_population_by_field_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(ctx.api, stmt) + for info in cls.info.mro[1:]: # 0 is the current class + if METADATA_KEY not in info.metadata: + continue + + # Each class depends on the set of fields in its ancestors + ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) + for name, value in info.metadata[METADATA_KEY]['config'].items(): + config.setdefault(name, value) + return config + + def collect_fields(self, model_config: 'ModelConfigData') -> List['PydanticModelField']: + """ + Collects the fields for the model, accounting for parent classes + """ + # First, collect fields belonging to the current class. + ctx = self._ctx + cls = self._ctx.cls + fields = [] # type: List[PydanticModelField] + known_fields = set() # type: Set[str] + for stmt in cls.defs.body: + if not isinstance(stmt, AssignmentStmt): # `and stmt.new_syntax` to require annotation + continue + + lhs = stmt.lvalues[0] + if not isinstance(lhs, NameExpr) or not is_valid_field(lhs.name): + continue + + if not stmt.new_syntax and self.plugin_config.warn_untyped_fields: + error_untyped_fields(ctx.api, stmt) + + # if lhs.name == '__config__': # BaseConfig not well handled; I'm not sure why yet + # continue + + sym = cls.info.names.get(lhs.name) + if sym is None: # pragma: no cover + # This is likely due to a star import (see the dataclasses plugin for a more detailed explanation) + # This is the same logic used in the dataclasses plugin + continue + + node = sym.node + if isinstance(node, PlaceholderNode): # pragma: no cover + # See the PlaceholderNode docstring for more detail about how this can occur + # Basically, it is an edge case when dealing with complex import logic + # This is the same logic used in the dataclasses plugin + continue + if not isinstance(node, Var): # pragma: no cover + # Don't know if this edge case still happens with the `is_valid_field` check above + # but better safe than sorry + continue + + # x: ClassVar[int] is ignored by dataclasses. + if node.is_classvar: + continue + + is_required = self.get_is_required(cls, stmt, lhs) + alias, has_dynamic_alias = self.get_alias_info(stmt) + if ( + has_dynamic_alias + and not model_config.allow_population_by_field_name + and self.plugin_config.warn_required_dynamic_aliases + ): + error_required_dynamic_aliases(ctx.api, stmt) + fields.append( + PydanticModelField( + name=lhs.name, + is_required=is_required, + alias=alias, + has_dynamic_alias=has_dynamic_alias, + line=stmt.line, + column=stmt.column, + ) + ) + known_fields.add(lhs.name) + all_fields = fields.copy() + for info in cls.info.mro[1:]: # 0 is the current class, -2 is BaseModel, -1 is object + if METADATA_KEY not in info.metadata: + continue + + superclass_fields = [] + # Each class depends on the set of fields in its ancestors + ctx.api.add_plugin_dependency(make_wildcard_trigger(get_fullname(info))) + + for name, data in info.metadata[METADATA_KEY]['fields'].items(): + if name not in known_fields: + field = PydanticModelField.deserialize(info, data) + known_fields.add(name) + superclass_fields.append(field) + else: + (field,) = (a for a in all_fields if a.name == name) + all_fields.remove(field) + superclass_fields.append(field) + all_fields = superclass_fields + all_fields + return all_fields + + def add_initializer(self, fields: List['PydanticModelField'], config: 'ModelConfigData', is_settings: bool) -> None: + """ + Adds a fields-aware `__init__` method to the class. + + The added `__init__` will be annotated with types vs. all `Any` depending on the plugin settings. + """ + ctx = self._ctx + typed = self.plugin_config.init_typed + use_alias = config.allow_population_by_field_name is not True + force_all_optional = is_settings or bool( + config.has_alias_generator and not config.allow_population_by_field_name + ) + init_arguments = self.get_field_arguments( + fields, typed=typed, force_all_optional=force_all_optional, use_alias=use_alias + ) + if not self.should_init_forbid_extra(fields, config): + var = Var('kwargs') + init_arguments.append(Argument(var, AnyType(TypeOfAny.explicit), None, ARG_STAR2)) + + if '__init__' not in ctx.cls.info.names: + add_method(ctx, '__init__', init_arguments, NoneType()) + + def add_construct_method(self, fields: List['PydanticModelField']) -> None: + """ + Adds a fully typed `construct` classmethod to the class. + + Similar to the fields-aware __init__ method, but always uses the field names (not aliases), + and does not treat settings fields as optional. + """ + ctx = self._ctx + set_str = ctx.api.named_type(f'{BUILTINS_NAME}.set', [ctx.api.named_type(f'{BUILTINS_NAME}.str')]) + optional_set_str = UnionType([set_str, NoneType()]) + fields_set_argument = Argument(Var('_fields_set', optional_set_str), optional_set_str, None, ARG_OPT) + construct_arguments = self.get_field_arguments(fields, typed=True, force_all_optional=False, use_alias=False) + construct_arguments = [fields_set_argument] + construct_arguments + + obj_type = ctx.api.named_type(f'{BUILTINS_NAME}.object') + self_tvar_name = '_PydanticBaseModel' # Make sure it does not conflict with other names in the class + tvar_fullname = ctx.cls.fullname + '.' + self_tvar_name + if MYPY_VERSION_TUPLE >= (1, 4): + tvd = TypeVarType( + self_tvar_name, + tvar_fullname, + ( + TypeVarId(-1, namespace=ctx.cls.fullname + '.construct') + if MYPY_VERSION_TUPLE >= (1, 11) + else TypeVarId(-1) + ), + [], + obj_type, + AnyType(TypeOfAny.from_omitted_generics), # type: ignore[arg-type] + ) + self_tvar_expr = TypeVarExpr( + self_tvar_name, + tvar_fullname, + [], + obj_type, + AnyType(TypeOfAny.from_omitted_generics), # type: ignore[arg-type] + ) + else: + tvd = TypeVarDef(self_tvar_name, tvar_fullname, -1, [], obj_type) + self_tvar_expr = TypeVarExpr(self_tvar_name, tvar_fullname, [], obj_type) + ctx.cls.info.names[self_tvar_name] = SymbolTableNode(MDEF, self_tvar_expr) + + # Backward-compatible with TypeVarDef from Mypy 0.910. + if isinstance(tvd, TypeVarType): + self_type = tvd + else: + self_type = TypeVarType(tvd) + + add_method( + ctx, + 'construct', + construct_arguments, + return_type=self_type, + self_type=self_type, + tvar_def=tvd, + is_classmethod=True, + ) + + def set_frozen(self, fields: List['PydanticModelField'], frozen: bool) -> None: + """ + Marks all fields as properties so that attempts to set them trigger mypy errors. + + This is the same approach used by the attrs and dataclasses plugins. + """ + ctx = self._ctx + info = ctx.cls.info + for field in fields: + sym_node = info.names.get(field.name) + if sym_node is not None: + var = sym_node.node + if isinstance(var, Var): + var.is_property = frozen + elif isinstance(var, PlaceholderNode) and not ctx.api.final_iteration: + # See https://github.com/pydantic/pydantic/issues/5191 to hit this branch for test coverage + ctx.api.defer() + else: # pragma: no cover + # I don't know whether it's possible to hit this branch, but I've added it for safety + try: + var_str = str(var) + except TypeError: + # This happens for PlaceholderNode; perhaps it will happen for other types in the future.. + var_str = repr(var) + detail = f'sym_node.node: {var_str} (of type {var.__class__})' + error_unexpected_behavior(detail, ctx.api, ctx.cls) + else: + var = field.to_var(info, use_alias=False) + var.info = info + var.is_property = frozen + var._fullname = get_fullname(info) + '.' + get_name(var) + info.names[get_name(var)] = SymbolTableNode(MDEF, var) + + def get_config_update(self, substmt: AssignmentStmt) -> Optional['ModelConfigData']: + """ + Determines the config update due to a single statement in the Config class definition. + + Warns if a tracked config attribute is set to a value the plugin doesn't know how to interpret (e.g., an int) + """ + lhs = substmt.lvalues[0] + if not (isinstance(lhs, NameExpr) and lhs.name in self.tracked_config_fields): + return None + if lhs.name == 'extra': + if isinstance(substmt.rvalue, StrExpr): + forbid_extra = substmt.rvalue.value == 'forbid' + elif isinstance(substmt.rvalue, MemberExpr): + forbid_extra = substmt.rvalue.name == 'forbid' + else: + error_invalid_config_value(lhs.name, self._ctx.api, substmt) + return None + return ModelConfigData(forbid_extra=forbid_extra) + if lhs.name == 'alias_generator': + has_alias_generator = True + if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname == 'builtins.None': + has_alias_generator = False + return ModelConfigData(has_alias_generator=has_alias_generator) + if isinstance(substmt.rvalue, NameExpr) and substmt.rvalue.fullname in ('builtins.True', 'builtins.False'): + return ModelConfigData(**{lhs.name: substmt.rvalue.fullname == 'builtins.True'}) + error_invalid_config_value(lhs.name, self._ctx.api, substmt) + return None + + @staticmethod + def get_is_required(cls: ClassDef, stmt: AssignmentStmt, lhs: NameExpr) -> bool: + """ + Returns a boolean indicating whether the field defined in `stmt` is a required field. + """ + expr = stmt.rvalue + if isinstance(expr, TempNode): + # TempNode means annotation-only, so only non-required if Optional + value_type = get_proper_type(cls.info[lhs.name].type) + return not PydanticModelTransformer.type_has_implicit_default(value_type) + if isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME: + # The "default value" is a call to `Field`; at this point, the field is + # only required if default is Ellipsis (i.e., `field_name: Annotation = Field(...)`) or if default_factory + # is specified. + for arg, name in zip(expr.args, expr.arg_names): + # If name is None, then this arg is the default because it is the only positional argument. + if name is None or name == 'default': + return arg.__class__ is EllipsisExpr + if name == 'default_factory': + return False + # In this case, default and default_factory are not specified, so we need to look at the annotation + value_type = get_proper_type(cls.info[lhs.name].type) + return not PydanticModelTransformer.type_has_implicit_default(value_type) + # Only required if the "default value" is Ellipsis (i.e., `field_name: Annotation = ...`) + return isinstance(expr, EllipsisExpr) + + @staticmethod + def type_has_implicit_default(type_: Optional[ProperType]) -> bool: + """ + Returns True if the passed type will be given an implicit default value. + + In pydantic v1, this is the case for Optional types and Any (with default value None). + """ + if isinstance(type_, AnyType): + # Annotated as Any + return True + if isinstance(type_, UnionType) and any( + isinstance(item, NoneType) or isinstance(item, AnyType) for item in type_.items + ): + # Annotated as Optional, or otherwise having NoneType or AnyType in the union + return True + return False + + @staticmethod + def get_alias_info(stmt: AssignmentStmt) -> Tuple[Optional[str], bool]: + """ + Returns a pair (alias, has_dynamic_alias), extracted from the declaration of the field defined in `stmt`. + + `has_dynamic_alias` is True if and only if an alias is provided, but not as a string literal. + If `has_dynamic_alias` is True, `alias` will be None. + """ + expr = stmt.rvalue + if isinstance(expr, TempNode): + # TempNode means annotation-only + return None, False + + if not ( + isinstance(expr, CallExpr) and isinstance(expr.callee, RefExpr) and expr.callee.fullname == FIELD_FULLNAME + ): + # Assigned value is not a call to pydantic.fields.Field + return None, False + + for i, arg_name in enumerate(expr.arg_names): + if arg_name != 'alias': + continue + arg = expr.args[i] + if isinstance(arg, StrExpr): + return arg.value, False + else: + return None, True + return None, False + + def get_field_arguments( + self, fields: List['PydanticModelField'], typed: bool, force_all_optional: bool, use_alias: bool + ) -> List[Argument]: + """ + Helper function used during the construction of the `__init__` and `construct` method signatures. + + Returns a list of mypy Argument instances for use in the generated signatures. + """ + info = self._ctx.cls.info + arguments = [ + field.to_argument(info, typed=typed, force_optional=force_all_optional, use_alias=use_alias) + for field in fields + if not (use_alias and field.has_dynamic_alias) + ] + return arguments + + def should_init_forbid_extra(self, fields: List['PydanticModelField'], config: 'ModelConfigData') -> bool: + """ + Indicates whether the generated `__init__` should get a `**kwargs` at the end of its signature + + We disallow arbitrary kwargs if the extra config setting is "forbid", or if the plugin config says to, + *unless* a required dynamic alias is present (since then we can't determine a valid signature). + """ + if not config.allow_population_by_field_name: + if self.is_dynamic_alias_present(fields, bool(config.has_alias_generator)): + return False + if config.forbid_extra: + return True + return self.plugin_config.init_forbid_extra + + @staticmethod + def is_dynamic_alias_present(fields: List['PydanticModelField'], has_alias_generator: bool) -> bool: + """ + Returns whether any fields on the model have a "dynamic alias", i.e., an alias that cannot be + determined during static analysis. + """ + for field in fields: + if field.has_dynamic_alias: + return True + if has_alias_generator: + for field in fields: + if field.alias is None: + return True + return False + + +class PydanticModelField: + def __init__( + self, name: str, is_required: bool, alias: Optional[str], has_dynamic_alias: bool, line: int, column: int + ): + self.name = name + self.is_required = is_required + self.alias = alias + self.has_dynamic_alias = has_dynamic_alias + self.line = line + self.column = column + + def to_var(self, info: TypeInfo, use_alias: bool) -> Var: + name = self.name + if use_alias and self.alias is not None: + name = self.alias + return Var(name, info[self.name].type) + + def to_argument(self, info: TypeInfo, typed: bool, force_optional: bool, use_alias: bool) -> Argument: + if typed and info[self.name].type is not None: + type_annotation = info[self.name].type + else: + type_annotation = AnyType(TypeOfAny.explicit) + return Argument( + variable=self.to_var(info, use_alias), + type_annotation=type_annotation, + initializer=None, + kind=ARG_NAMED_OPT if force_optional or not self.is_required else ARG_NAMED, + ) + + def serialize(self) -> JsonDict: + return self.__dict__ + + @classmethod + def deserialize(cls, info: TypeInfo, data: JsonDict) -> 'PydanticModelField': + return cls(**data) + + +class ModelConfigData: + def __init__( + self, + forbid_extra: Optional[bool] = None, + allow_mutation: Optional[bool] = None, + frozen: Optional[bool] = None, + orm_mode: Optional[bool] = None, + allow_population_by_field_name: Optional[bool] = None, + has_alias_generator: Optional[bool] = None, + ): + self.forbid_extra = forbid_extra + self.allow_mutation = allow_mutation + self.frozen = frozen + self.orm_mode = orm_mode + self.allow_population_by_field_name = allow_population_by_field_name + self.has_alias_generator = has_alias_generator + + def set_values_dict(self) -> Dict[str, Any]: + return {k: v for k, v in self.__dict__.items() if v is not None} + + def update(self, config: Optional['ModelConfigData']) -> None: + if config is None: + return + for k, v in config.set_values_dict().items(): + setattr(self, k, v) + + def setdefault(self, key: str, value: Any) -> None: + if getattr(self, key) is None: + setattr(self, key, value) + + +ERROR_ORM = ErrorCode('pydantic-orm', 'Invalid from_orm call', 'Pydantic') +ERROR_CONFIG = ErrorCode('pydantic-config', 'Invalid config value', 'Pydantic') +ERROR_ALIAS = ErrorCode('pydantic-alias', 'Dynamic alias disallowed', 'Pydantic') +ERROR_UNEXPECTED = ErrorCode('pydantic-unexpected', 'Unexpected behavior', 'Pydantic') +ERROR_UNTYPED = ErrorCode('pydantic-field', 'Untyped field disallowed', 'Pydantic') +ERROR_FIELD_DEFAULTS = ErrorCode('pydantic-field', 'Invalid Field defaults', 'Pydantic') + + +def error_from_orm(model_name: str, api: CheckerPluginInterface, context: Context) -> None: + api.fail(f'"{model_name}" does not have orm_mode=True', context, code=ERROR_ORM) + + +def error_invalid_config_value(name: str, api: SemanticAnalyzerPluginInterface, context: Context) -> None: + api.fail(f'Invalid value for "Config.{name}"', context, code=ERROR_CONFIG) + + +def error_required_dynamic_aliases(api: SemanticAnalyzerPluginInterface, context: Context) -> None: + api.fail('Required dynamic aliases disallowed', context, code=ERROR_ALIAS) + + +def error_unexpected_behavior( + detail: str, api: Union[CheckerPluginInterface, SemanticAnalyzerPluginInterface], context: Context +) -> None: # pragma: no cover + # Can't think of a good way to test this, but I confirmed it renders as desired by adding to a non-error path + link = 'https://github.com/pydantic/pydantic/issues/new/choose' + full_message = f'The pydantic mypy plugin ran into unexpected behavior: {detail}\n' + full_message += f'Please consider reporting this bug at {link} so we can try to fix it!' + api.fail(full_message, context, code=ERROR_UNEXPECTED) + + +def error_untyped_fields(api: SemanticAnalyzerPluginInterface, context: Context) -> None: + api.fail('Untyped fields disallowed', context, code=ERROR_UNTYPED) + + +def error_default_and_default_factory_specified(api: CheckerPluginInterface, context: Context) -> None: + api.fail('Field default and default_factory cannot be specified together', context, code=ERROR_FIELD_DEFAULTS) + + +def add_method( + ctx: ClassDefContext, + name: str, + args: List[Argument], + return_type: Type, + self_type: Optional[Type] = None, + tvar_def: Optional[TypeVarDef] = None, + is_classmethod: bool = False, + is_new: bool = False, + # is_staticmethod: bool = False, +) -> None: + """ + Adds a new method to a class. + + This can be dropped if/when https://github.com/python/mypy/issues/7301 is merged + """ + info = ctx.cls.info + + # First remove any previously generated methods with the same name + # to avoid clashes and problems in the semantic analyzer. + if name in info.names: + sym = info.names[name] + if sym.plugin_generated and isinstance(sym.node, FuncDef): + ctx.cls.defs.body.remove(sym.node) # pragma: no cover + + self_type = self_type or fill_typevars(info) + if is_classmethod or is_new: + first = [Argument(Var('_cls'), TypeType.make_normalized(self_type), None, ARG_POS)] + # elif is_staticmethod: + # first = [] + else: + self_type = self_type or fill_typevars(info) + first = [Argument(Var('__pydantic_self__'), self_type, None, ARG_POS)] + args = first + args + arg_types, arg_names, arg_kinds = [], [], [] + for arg in args: + assert arg.type_annotation, 'All arguments must be fully typed.' + arg_types.append(arg.type_annotation) + arg_names.append(get_name(arg.variable)) + arg_kinds.append(arg.kind) + + function_type = ctx.api.named_type(f'{BUILTINS_NAME}.function') + signature = CallableType( + arg_types, arg_kinds, arg_names, return_type, function_type, variables=[tvar_def] if tvar_def else None + ) + + func = FuncDef(name, args, Block([PassStmt()])) + func.info = info + func.type = set_callable_name(signature, func) + func.is_class = is_classmethod + # func.is_static = is_staticmethod + func._fullname = get_fullname(info) + '.' + name + func.line = info.line + + # NOTE: we would like the plugin generated node to dominate, but we still + # need to keep any existing definitions so they get semantically analyzed. + if name in info.names: + # Get a nice unique name instead. + r_name = get_unique_redefinition_name(name, info.names) + info.names[r_name] = info.names[name] + + if is_classmethod: # or is_staticmethod: + func.is_decorated = True + v = Var(name, func.type) + v.info = info + v._fullname = func._fullname + # if is_classmethod: + v.is_classmethod = True + dec = Decorator(func, [NameExpr('classmethod')], v) + # else: + # v.is_staticmethod = True + # dec = Decorator(func, [NameExpr('staticmethod')], v) + + dec.line = info.line + sym = SymbolTableNode(MDEF, dec) + else: + sym = SymbolTableNode(MDEF, func) + sym.plugin_generated = True + + info.names[name] = sym + info.defn.defs.body.append(func) + + +def get_fullname(x: Union[FuncBase, SymbolNode]) -> str: + """ + Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. + """ + fn = x.fullname + if callable(fn): # pragma: no cover + return fn() + return fn + + +def get_name(x: Union[FuncBase, SymbolNode]) -> str: + """ + Used for compatibility with mypy 0.740; can be dropped once support for 0.740 is dropped. + """ + fn = x.name + if callable(fn): # pragma: no cover + return fn() + return fn + + +def parse_toml(config_file: str) -> Optional[Dict[str, Any]]: + if not config_file.endswith('.toml'): + return None + + read_mode = 'rb' + if sys.version_info >= (3, 11): + import tomllib as toml_ + else: + try: + import tomli as toml_ + except ImportError: + # older versions of mypy have toml as a dependency, not tomli + read_mode = 'r' + try: + import toml as toml_ # type: ignore[no-redef] + except ImportError: # pragma: no cover + import warnings + + warnings.warn('No TOML parser installed, cannot read configuration from `pyproject.toml`.') + return None + + with open(config_file, read_mode) as rf: + return toml_.load(rf) # type: ignore[arg-type] diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/networks.py b/venv/lib/python3.12/site-packages/pydantic/v1/networks.py new file mode 100644 index 0000000000000000000000000000000000000000..ba07b74867b83beaaabc3afe2e19d77d71544338 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/networks.py @@ -0,0 +1,747 @@ +import re +from ipaddress import ( + IPv4Address, + IPv4Interface, + IPv4Network, + IPv6Address, + IPv6Interface, + IPv6Network, + _BaseAddress, + _BaseNetwork, +) +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + Generator, + List, + Match, + Optional, + Pattern, + Set, + Tuple, + Type, + Union, + cast, + no_type_check, +) + +from pydantic.v1 import errors +from pydantic.v1.utils import Representation, update_not_none +from pydantic.v1.validators import constr_length_validator, str_validator + +if TYPE_CHECKING: + import email_validator + from typing_extensions import TypedDict + + from pydantic.v1.config import BaseConfig + from pydantic.v1.fields import ModelField + from pydantic.v1.typing import AnyCallable + + CallableGenerator = Generator[AnyCallable, None, None] + + class Parts(TypedDict, total=False): + scheme: str + user: Optional[str] + password: Optional[str] + ipv4: Optional[str] + ipv6: Optional[str] + domain: Optional[str] + port: Optional[str] + path: Optional[str] + query: Optional[str] + fragment: Optional[str] + + class HostParts(TypedDict, total=False): + host: str + tld: Optional[str] + host_type: Optional[str] + port: Optional[str] + rebuild: bool + +else: + email_validator = None + + class Parts(dict): + pass + + +NetworkType = Union[str, bytes, int, Tuple[Union[str, bytes, int], Union[str, int]]] + +__all__ = [ + 'AnyUrl', + 'AnyHttpUrl', + 'FileUrl', + 'HttpUrl', + 'stricturl', + 'EmailStr', + 'NameEmail', + 'IPvAnyAddress', + 'IPvAnyInterface', + 'IPvAnyNetwork', + 'PostgresDsn', + 'CockroachDsn', + 'AmqpDsn', + 'RedisDsn', + 'MongoDsn', + 'KafkaDsn', + 'validate_email', +] + +_url_regex_cache = None +_multi_host_url_regex_cache = None +_ascii_domain_regex_cache = None +_int_domain_regex_cache = None +_host_regex_cache = None + +_host_regex = ( + r'(?:' + r'(?P(?:\d{1,3}\.){3}\d{1,3})(?=$|[/:#?])|' # ipv4 + r'(?P\[[A-F0-9]*:[A-F0-9:]+\])(?=$|[/:#?])|' # ipv6 + r'(?P[^\s/:?#]+)' # domain, validation occurs later + r')?' + r'(?::(?P\d+))?' # port +) +_scheme_regex = r'(?:(?P[a-z][a-z0-9+\-.]+)://)?' # scheme https://tools.ietf.org/html/rfc3986#appendix-A +_user_info_regex = r'(?:(?P[^\s:/]*)(?::(?P[^\s/]*))?@)?' +_path_regex = r'(?P/[^\s?#]*)?' +_query_regex = r'(?:\?(?P[^\s#]*))?' +_fragment_regex = r'(?:#(?P[^\s#]*))?' + + +def url_regex() -> Pattern[str]: + global _url_regex_cache + if _url_regex_cache is None: + _url_regex_cache = re.compile( + rf'{_scheme_regex}{_user_info_regex}{_host_regex}{_path_regex}{_query_regex}{_fragment_regex}', + re.IGNORECASE, + ) + return _url_regex_cache + + +def multi_host_url_regex() -> Pattern[str]: + """ + Compiled multi host url regex. + + Additionally to `url_regex` it allows to match multiple hosts. + E.g. host1.db.net,host2.db.net + """ + global _multi_host_url_regex_cache + if _multi_host_url_regex_cache is None: + _multi_host_url_regex_cache = re.compile( + rf'{_scheme_regex}{_user_info_regex}' + r'(?P([^/]*))' # validation occurs later + rf'{_path_regex}{_query_regex}{_fragment_regex}', + re.IGNORECASE, + ) + return _multi_host_url_regex_cache + + +def ascii_domain_regex() -> Pattern[str]: + global _ascii_domain_regex_cache + if _ascii_domain_regex_cache is None: + ascii_chunk = r'[_0-9a-z](?:[-_0-9a-z]{0,61}[_0-9a-z])?' + ascii_domain_ending = r'(?P\.[a-z]{2,63})?\.?' + _ascii_domain_regex_cache = re.compile( + fr'(?:{ascii_chunk}\.)*?{ascii_chunk}{ascii_domain_ending}', re.IGNORECASE + ) + return _ascii_domain_regex_cache + + +def int_domain_regex() -> Pattern[str]: + global _int_domain_regex_cache + if _int_domain_regex_cache is None: + int_chunk = r'[_0-9a-\U00040000](?:[-_0-9a-\U00040000]{0,61}[_0-9a-\U00040000])?' + int_domain_ending = r'(?P(\.[^\W\d_]{2,63})|(\.(?:xn--)[_0-9a-z-]{2,63}))?\.?' + _int_domain_regex_cache = re.compile(fr'(?:{int_chunk}\.)*?{int_chunk}{int_domain_ending}', re.IGNORECASE) + return _int_domain_regex_cache + + +def host_regex() -> Pattern[str]: + global _host_regex_cache + if _host_regex_cache is None: + _host_regex_cache = re.compile( + _host_regex, + re.IGNORECASE, + ) + return _host_regex_cache + + +class AnyUrl(str): + strip_whitespace = True + min_length = 1 + max_length = 2**16 + allowed_schemes: Optional[Collection[str]] = None + tld_required: bool = False + user_required: bool = False + host_required: bool = True + hidden_parts: Set[str] = set() + + __slots__ = ('scheme', 'user', 'password', 'host', 'tld', 'host_type', 'port', 'path', 'query', 'fragment') + + @no_type_check + def __new__(cls, url: Optional[str], **kwargs) -> object: + return str.__new__(cls, cls.build(**kwargs) if url is None else url) + + def __init__( + self, + url: str, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: Optional[str] = None, + tld: Optional[str] = None, + host_type: str = 'domain', + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + ) -> None: + str.__init__(url) + self.scheme = scheme + self.user = user + self.password = password + self.host = host + self.tld = tld + self.host_type = host_type + self.port = port + self.path = path + self.query = query + self.fragment = fragment + + @classmethod + def build( + cls, + *, + scheme: str, + user: Optional[str] = None, + password: Optional[str] = None, + host: str, + port: Optional[str] = None, + path: Optional[str] = None, + query: Optional[str] = None, + fragment: Optional[str] = None, + **_kwargs: str, + ) -> str: + parts = Parts( + scheme=scheme, + user=user, + password=password, + host=host, + port=port, + path=path, + query=query, + fragment=fragment, + **_kwargs, # type: ignore[misc] + ) + + url = scheme + '://' + if user: + url += user + if password: + url += ':' + password + if user or password: + url += '@' + url += host + if port and ('port' not in cls.hidden_parts or cls.get_default_parts(parts).get('port') != port): + url += ':' + port + if path: + url += path + if query: + url += '?' + query + if fragment: + url += '#' + fragment + return url + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length, format='uri') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any, field: 'ModelField', config: 'BaseConfig') -> 'AnyUrl': + if value.__class__ == cls: + return value + value = str_validator(value) + if cls.strip_whitespace: + value = value.strip() + url: str = cast(str, constr_length_validator(value, field, config)) + + m = cls._match_url(url) + # the regex should always match, if it doesn't please report with details of the URL tried + assert m, 'URL regex failed unexpectedly' + + original_parts = cast('Parts', m.groupdict()) + parts = cls.apply_default_parts(original_parts) + parts = cls.validate_parts(parts) + + if m.end() != len(url): + raise errors.UrlExtraError(extra=url[m.end() :]) + + return cls._build_url(m, url, parts) + + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'AnyUrl': + """ + Validate hosts and build the AnyUrl object. Split from `validate` so this method + can be altered in `MultiHostDsn`. + """ + host, tld, host_type, rebuild = cls.validate_host(parts) + + return cls( + None if rebuild else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + host=host, + tld=tld, + host_type=host_type, + port=parts['port'], + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + ) + + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return url_regex().match(url) + + @staticmethod + def _validate_port(port: Optional[str]) -> None: + if port is not None and int(port) > 65_535: + raise errors.UrlPortError() + + @classmethod + def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': + """ + A method used to validate parts of a URL. + Could be overridden to set default values for parts if missing + """ + scheme = parts['scheme'] + if scheme is None: + raise errors.UrlSchemeError() + + if cls.allowed_schemes and scheme.lower() not in cls.allowed_schemes: + raise errors.UrlSchemePermittedError(set(cls.allowed_schemes)) + + if validate_port: + cls._validate_port(parts['port']) + + user = parts['user'] + if cls.user_required and user is None: + raise errors.UrlUserInfoError() + + return parts + + @classmethod + def validate_host(cls, parts: 'Parts') -> Tuple[str, Optional[str], str, bool]: + tld, host_type, rebuild = None, None, False + for f in ('domain', 'ipv4', 'ipv6'): + host = parts[f] # type: ignore[literal-required] + if host: + host_type = f + break + + if host is None: + if cls.host_required: + raise errors.UrlHostError() + elif host_type == 'domain': + is_international = False + d = ascii_domain_regex().fullmatch(host) + if d is None: + d = int_domain_regex().fullmatch(host) + if d is None: + raise errors.UrlHostError() + is_international = True + + tld = d.group('tld') + if tld is None and not is_international: + d = int_domain_regex().fullmatch(host) + assert d is not None + tld = d.group('tld') + is_international = True + + if tld is not None: + tld = tld[1:] + elif cls.tld_required: + raise errors.UrlHostTldError() + + if is_international: + host_type = 'int_domain' + rebuild = True + host = host.encode('idna').decode('ascii') + if tld is not None: + tld = tld.encode('idna').decode('ascii') + + return host, tld, host_type, rebuild # type: ignore + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return {} + + @classmethod + def apply_default_parts(cls, parts: 'Parts') -> 'Parts': + for key, value in cls.get_default_parts(parts).items(): + if not parts[key]: # type: ignore[literal-required] + parts[key] = value # type: ignore[literal-required] + return parts + + def __repr__(self) -> str: + extra = ', '.join(f'{n}={getattr(self, n)!r}' for n in self.__slots__ if getattr(self, n) is not None) + return f'{self.__class__.__name__}({super().__repr__()}, {extra})' + + +class AnyHttpUrl(AnyUrl): + allowed_schemes = {'http', 'https'} + + __slots__ = () + + +class HttpUrl(AnyHttpUrl): + tld_required = True + # https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers + max_length = 2083 + hidden_parts = {'port'} + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return {'port': '80' if parts['scheme'] == 'http' else '443'} + + +class FileUrl(AnyUrl): + allowed_schemes = {'file'} + host_required = False + + __slots__ = () + + +class MultiHostDsn(AnyUrl): + __slots__ = AnyUrl.__slots__ + ('hosts',) + + def __init__(self, *args: Any, hosts: Optional[List['HostParts']] = None, **kwargs: Any): + super().__init__(*args, **kwargs) + self.hosts = hosts + + @staticmethod + def _match_url(url: str) -> Optional[Match[str]]: + return multi_host_url_regex().match(url) + + @classmethod + def validate_parts(cls, parts: 'Parts', validate_port: bool = True) -> 'Parts': + return super().validate_parts(parts, validate_port=False) + + @classmethod + def _build_url(cls, m: Match[str], url: str, parts: 'Parts') -> 'MultiHostDsn': + hosts_parts: List['HostParts'] = [] + host_re = host_regex() + for host in m.groupdict()['hosts'].split(','): + d: Parts = host_re.match(host).groupdict() # type: ignore + host, tld, host_type, rebuild = cls.validate_host(d) + port = d.get('port') + cls._validate_port(port) + hosts_parts.append( + { + 'host': host, + 'host_type': host_type, + 'tld': tld, + 'rebuild': rebuild, + 'port': port, + } + ) + + if len(hosts_parts) > 1: + return cls( + None if any([hp['rebuild'] for hp in hosts_parts]) else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + host_type=None, + hosts=hosts_parts, + ) + else: + # backwards compatibility with single host + host_part = hosts_parts[0] + return cls( + None if host_part['rebuild'] else url, + scheme=parts['scheme'], + user=parts['user'], + password=parts['password'], + host=host_part['host'], + tld=host_part['tld'], + host_type=host_part['host_type'], + port=host_part.get('port'), + path=parts['path'], + query=parts['query'], + fragment=parts['fragment'], + ) + + +class PostgresDsn(MultiHostDsn): + allowed_schemes = { + 'postgres', + 'postgresql', + 'postgresql+asyncpg', + 'postgresql+pg8000', + 'postgresql+psycopg', + 'postgresql+psycopg2', + 'postgresql+psycopg2cffi', + 'postgresql+py-postgresql', + 'postgresql+pygresql', + } + user_required = True + + __slots__ = () + + +class CockroachDsn(AnyUrl): + allowed_schemes = { + 'cockroachdb', + 'cockroachdb+psycopg2', + 'cockroachdb+asyncpg', + } + user_required = True + + +class AmqpDsn(AnyUrl): + allowed_schemes = {'amqp', 'amqps'} + host_required = False + + +class RedisDsn(AnyUrl): + __slots__ = () + allowed_schemes = {'redis', 'rediss'} + host_required = False + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'domain': 'localhost' if not (parts['ipv4'] or parts['ipv6']) else '', + 'port': '6379', + 'path': '/0', + } + + +class MongoDsn(AnyUrl): + allowed_schemes = {'mongodb'} + + # TODO: Needed to generic "Parts" for "Replica Set", "Sharded Cluster", and other mongodb deployment modes + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'port': '27017', + } + + +class KafkaDsn(AnyUrl): + allowed_schemes = {'kafka'} + + @staticmethod + def get_default_parts(parts: 'Parts') -> 'Parts': + return { + 'domain': 'localhost', + 'port': '9092', + } + + +def stricturl( + *, + strip_whitespace: bool = True, + min_length: int = 1, + max_length: int = 2**16, + tld_required: bool = True, + host_required: bool = True, + allowed_schemes: Optional[Collection[str]] = None, +) -> Type[AnyUrl]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + min_length=min_length, + max_length=max_length, + tld_required=tld_required, + host_required=host_required, + allowed_schemes=allowed_schemes, + ) + return type('UrlValue', (AnyUrl,), namespace) + + +def import_email_validator() -> None: + global email_validator + try: + import email_validator + except ImportError as e: + raise ImportError('email-validator is not installed, run `pip install pydantic[email]`') from e + + +class EmailStr(str): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='email') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + # included here and below so the error happens straight away + import_email_validator() + + yield str_validator + yield cls.validate + + @classmethod + def validate(cls, value: Union[str]) -> str: + return validate_email(value)[1] + + +class NameEmail(Representation): + __slots__ = 'name', 'email' + + def __init__(self, name: str, email: str): + self.name = name + self.email = email + + def __eq__(self, other: Any) -> bool: + return isinstance(other, NameEmail) and (self.name, self.email) == (other.name, other.email) + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='name-email') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + import_email_validator() + + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> 'NameEmail': + if value.__class__ == cls: + return value + value = str_validator(value) + return cls(*validate_email(value)) + + def __str__(self) -> str: + return f'{self.name} <{self.email}>' + + +class IPvAnyAddress(_BaseAddress): + __slots__ = () + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanyaddress') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Union[str, bytes, int]) -> Union[IPv4Address, IPv6Address]: + try: + return IPv4Address(value) + except ValueError: + pass + + try: + return IPv6Address(value) + except ValueError: + raise errors.IPvAnyAddressError() + + +class IPvAnyInterface(_BaseAddress): + __slots__ = () + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanyinterface') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Interface, IPv6Interface]: + try: + return IPv4Interface(value) + except ValueError: + pass + + try: + return IPv6Interface(value) + except ValueError: + raise errors.IPvAnyInterfaceError() + + +class IPvAnyNetwork(_BaseNetwork): # type: ignore + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='ipvanynetwork') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: NetworkType) -> Union[IPv4Network, IPv6Network]: + # Assume IP Network is defined with a default value for ``strict`` argument. + # Define your own class if you want to specify network address check strictness. + try: + return IPv4Network(value) + except ValueError: + pass + + try: + return IPv6Network(value) + except ValueError: + raise errors.IPvAnyNetworkError() + + +pretty_email_regex = re.compile(r'([\w ]*?) *<(.*)> *') +MAX_EMAIL_LENGTH = 2048 +"""Maximum length for an email. +A somewhat arbitrary but very generous number compared to what is allowed by most implementations. +""" + + +def validate_email(value: Union[str]) -> Tuple[str, str]: + """ + Email address validation using https://pypi.org/project/email-validator/ + Notes: + * raw ip address (literal) domain parts are not allowed. + * "John Doe " style "pretty" email addresses are processed + * spaces are striped from the beginning and end of addresses but no error is raised + """ + if email_validator is None: + import_email_validator() + + if len(value) > MAX_EMAIL_LENGTH: + raise errors.EmailError() + + m = pretty_email_regex.fullmatch(value) + name: Union[str, None] = None + if m: + name, value = m.groups() + email = value.strip() + try: + parts = email_validator.validate_email(email, check_deliverability=False) + except email_validator.EmailNotValidError as e: + raise errors.EmailError from e + + if hasattr(parts, 'normalized'): + # email-validator >= 2 + email = parts.normalized + assert email is not None + name = name or parts.local_part + return name, email + else: + # email-validator >1, <2 + at_index = email.index('@') + local_part = email[:at_index] # RFC 5321, local part must be case-sensitive. + global_part = email[at_index:].lower() + + return name or local_part, local_part + global_part diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/parse.py b/venv/lib/python3.12/site-packages/pydantic/v1/parse.py new file mode 100644 index 0000000000000000000000000000000000000000..431d75a6406f3cf79ae4fcd26a2ad35fb5030526 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/parse.py @@ -0,0 +1,66 @@ +import json +import pickle +from enum import Enum +from pathlib import Path +from typing import Any, Callable, Union + +from pydantic.v1.types import StrBytes + + +class Protocol(str, Enum): + json = 'json' + pickle = 'pickle' + + +def load_str_bytes( + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + if proto is None and content_type: + if content_type.endswith(('json', 'javascript')): + pass + elif allow_pickle and content_type.endswith('pickle'): + proto = Protocol.pickle + else: + raise TypeError(f'Unknown content-type: {content_type}') + + proto = proto or Protocol.json + + if proto == Protocol.json: + if isinstance(b, bytes): + b = b.decode(encoding) + return json_loads(b) + elif proto == Protocol.pickle: + if not allow_pickle: + raise RuntimeError('Trying to decode with pickle with allow_pickle=False') + bb = b if isinstance(b, bytes) else b.encode() + return pickle.loads(bb) + else: + raise TypeError(f'Unknown protocol: {proto}') + + +def load_file( + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, +) -> Any: + path = Path(path) + b = path.read_bytes() + if content_type is None: + if path.suffix in ('.js', '.json'): + proto = Protocol.json + elif path.suffix == '.pkl': + proto = Protocol.pickle + + return load_str_bytes( + b, proto=proto, content_type=content_type, encoding=encoding, allow_pickle=allow_pickle, json_loads=json_loads + ) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/py.typed b/venv/lib/python3.12/site-packages/pydantic/v1/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/schema.py b/venv/lib/python3.12/site-packages/pydantic/v1/schema.py new file mode 100644 index 0000000000000000000000000000000000000000..a91fe2cd4ae12e619a0ca34baf14400ded5f14cf --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/schema.py @@ -0,0 +1,1163 @@ +import re +import warnings +from collections import defaultdict +from dataclasses import is_dataclass +from datetime import date, datetime, time, timedelta +from decimal import Decimal +from enum import Enum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + ForwardRef, + FrozenSet, + Generic, + Iterable, + List, + Optional, + Pattern, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, +) +from uuid import UUID + +from typing_extensions import Annotated, Literal + +from pydantic.v1.fields import ( + MAPPING_LIKE_SHAPES, + SHAPE_DEQUE, + SHAPE_FROZENSET, + SHAPE_GENERIC, + SHAPE_ITERABLE, + SHAPE_LIST, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_SINGLETON, + SHAPE_TUPLE, + SHAPE_TUPLE_ELLIPSIS, + FieldInfo, + ModelField, +) +from pydantic.v1.json import pydantic_encoder +from pydantic.v1.networks import AnyUrl, EmailStr +from pydantic.v1.types import ( + ConstrainedDecimal, + ConstrainedFloat, + ConstrainedFrozenSet, + ConstrainedInt, + ConstrainedList, + ConstrainedSet, + ConstrainedStr, + SecretBytes, + SecretStr, + StrictBytes, + StrictStr, + conbytes, + condecimal, + confloat, + confrozenset, + conint, + conlist, + conset, + constr, +) +from pydantic.v1.typing import ( + all_literal_values, + get_args, + get_origin, + get_sub_types, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_union, +) +from pydantic.v1.utils import ROOT_KEY, get_model, lenient_issubclass + +if TYPE_CHECKING: + from pydantic.v1.dataclasses import Dataclass + from pydantic.v1.main import BaseModel + +default_prefix = '#/definitions/' +default_ref_template = '#/definitions/{model}' + +TypeModelOrEnum = Union[Type['BaseModel'], Type[Enum]] +TypeModelSet = Set[TypeModelOrEnum] + + +def _apply_modify_schema( + modify_schema: Callable[..., None], field: Optional[ModelField], field_schema: Dict[str, Any] +) -> None: + from inspect import signature + + sig = signature(modify_schema) + args = set(sig.parameters.keys()) + if 'field' in args or 'kwargs' in args: + modify_schema(field_schema, field=field) + else: + modify_schema(field_schema) + + +def schema( + models: Sequence[Union[Type['BaseModel'], Type['Dataclass']]], + *, + by_alias: bool = True, + title: Optional[str] = None, + description: Optional[str] = None, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Process a list of models and generate a single JSON Schema with all of them defined in the ``definitions`` + top-level JSON key, including their sub-models. + + :param models: a list of models to include in the generated JSON Schema + :param by_alias: generate the schemas using the aliases defined, if any + :param title: title for the generated schema that includes the definitions + :param description: description for the generated schema + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful + for references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For + a sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema with a ``definitions`` top-level key including the schema definitions for + the models and sub-models passed in ``models``. + """ + clean_models = [get_model(model) for model in models] + flat_models = get_flat_models_from_models(clean_models) + model_name_map = get_model_name_map(flat_models) + definitions = {} + output_schema: Dict[str, Any] = {} + if title: + output_schema['title'] = title + if description: + output_schema['description'] = description + for model in clean_models: + m_schema, m_definitions, m_nested_models = model_process_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + ) + definitions.update(m_definitions) + model_name = model_name_map[model] + definitions[model_name] = m_schema + if definitions: + output_schema['definitions'] = definitions + return output_schema + + +def model_schema( + model: Union[Type['BaseModel'], Type['Dataclass']], + by_alias: bool = True, + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, +) -> Dict[str, Any]: + """ + Generate a JSON Schema for one model. With all the sub-models defined in the ``definitions`` top-level + JSON key. + + :param model: a Pydantic model (a class that inherits from BaseModel) + :param by_alias: generate the schemas using the aliases defined, if any + :param ref_prefix: the JSON Pointer prefix for schema references with ``$ref``, if None, will be set to the + default of ``#/definitions/``. Update it if you want the schemas to reference the definitions somewhere + else, e.g. for OpenAPI use ``#/components/schemas/``. The resulting generated schemas will still be at the + top-level key ``definitions``, so you can extract them from there. But all the references will have the set + prefix. + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :return: dict with the JSON Schema for the passed ``model`` + """ + model = get_model(model) + flat_models = get_flat_models_from_model(model) + model_name_map = get_model_name_map(flat_models) + model_name = model_name_map[model] + m_schema, m_definitions, nested_models = model_process_schema( + model, by_alias=by_alias, model_name_map=model_name_map, ref_prefix=ref_prefix, ref_template=ref_template + ) + if model_name in nested_models: + # model_name is in Nested models, it has circular references + m_definitions[model_name] = m_schema + m_schema = get_schema_ref(model_name, ref_prefix, ref_template, False) + if m_definitions: + m_schema.update({'definitions': m_definitions}) + return m_schema + + +def get_field_info_schema(field: ModelField, schema_overrides: bool = False) -> Tuple[Dict[str, Any], bool]: + # If no title is explicitly set, we don't set title in the schema for enums. + # The behaviour is the same as `BaseModel` reference, where the default title + # is in the definitions part of the schema. + schema_: Dict[str, Any] = {} + if field.field_info.title or not lenient_issubclass(field.type_, Enum): + schema_['title'] = field.field_info.title or field.alias.title().replace('_', ' ') + + if field.field_info.title: + schema_overrides = True + + if field.field_info.description: + schema_['description'] = field.field_info.description + schema_overrides = True + + if not field.required and field.default is not None and not is_callable_type(field.outer_type_): + schema_['default'] = encode_default(field.default) + schema_overrides = True + + return schema_, schema_overrides + + +def field_schema( + field: ModelField, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: Optional[TypeModelSet] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Process a Pydantic field and return a tuple with a JSON Schema for it as the first item. + Also return a dictionary of definitions with models as keys and their schemas as values. If the passed field + is a model and has sub-models, and those sub-models don't have overrides (as ``title``, ``default``, etc), they + will be included in the definitions and referenced in the schema instead of included recursively. + + :param field: a Pydantic ``ModelField`` + :param by_alias: use the defined alias (if any) in the returned schema + :param model_name_map: used to generate the JSON Schema references to other models included in the definitions + :param ref_prefix: the JSON Pointer prefix to use for references to other schemas, if None, the default of + #/definitions/ will be used + :param ref_template: Use a ``string.format()`` template for ``$ref`` instead of a prefix. This can be useful for + references that cannot be represented by ``ref_prefix`` such as a definition stored in another file. For a + sibling json file in a ``/schemas`` directory use ``"/schemas/${model}.json#"``. + :param known_models: used to solve circular references + :return: tuple of the schema for this field and additional definitions + """ + s, schema_overrides = get_field_info_schema(field) + + validation_schema = get_field_schema_validations(field) + if validation_schema: + s.update(validation_schema) + schema_overrides = True + + f_schema, f_definitions, f_nested_models = field_type_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models or set(), + ) + + # $ref will only be returned when there are no schema_overrides + if '$ref' in f_schema: + return f_schema, f_definitions, f_nested_models + else: + s.update(f_schema) + return s, f_definitions, f_nested_models + + +numeric_types = (int, float, Decimal) +_str_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ('max_length', numeric_types, 'maxLength'), + ('min_length', numeric_types, 'minLength'), + ('regex', str, 'pattern'), +) + +_numeric_types_attrs: Tuple[Tuple[str, Union[type, Tuple[type, ...]], str], ...] = ( + ('gt', numeric_types, 'exclusiveMinimum'), + ('lt', numeric_types, 'exclusiveMaximum'), + ('ge', numeric_types, 'minimum'), + ('le', numeric_types, 'maximum'), + ('multiple_of', numeric_types, 'multipleOf'), +) + + +def get_field_schema_validations(field: ModelField) -> Dict[str, Any]: + """ + Get the JSON Schema validation keywords for a ``field`` with an annotation of + a Pydantic ``FieldInfo`` with validation arguments. + """ + f_schema: Dict[str, Any] = {} + + if lenient_issubclass(field.type_, Enum): + # schema is already updated by `enum_process_schema`; just update with field extra + if field.field_info.extra: + f_schema.update(field.field_info.extra) + return f_schema + + if lenient_issubclass(field.type_, (str, bytes)): + for attr_name, t, keyword in _str_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if lenient_issubclass(field.type_, numeric_types) and not issubclass(field.type_, bool): + for attr_name, t, keyword in _numeric_types_attrs: + attr = getattr(field.field_info, attr_name, None) + if isinstance(attr, t): + f_schema[keyword] = attr + if field.field_info is not None and field.field_info.const: + f_schema['const'] = field.default + if field.field_info.extra: + f_schema.update(field.field_info.extra) + modify_schema = getattr(field.outer_type_, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema + + +def get_model_name_map(unique_models: TypeModelSet) -> Dict[TypeModelOrEnum, str]: + """ + Process a set of models and generate unique names for them to be used as keys in the JSON Schema + definitions. By default the names are the same as the class name. But if two models in different Python + modules have the same name (e.g. "users.Model" and "items.Model"), the generated names will be + based on the Python module path for those conflicting models to prevent name collisions. + + :param unique_models: a Python set of models + :return: dict mapping models to names + """ + name_model_map = {} + conflicting_names: Set[str] = set() + for model in unique_models: + model_name = normalize_name(model.__name__) + if model_name in conflicting_names: + model_name = get_long_model_name(model) + name_model_map[model_name] = model + elif model_name in name_model_map: + conflicting_names.add(model_name) + conflicting_model = name_model_map.pop(model_name) + name_model_map[get_long_model_name(conflicting_model)] = conflicting_model + name_model_map[get_long_model_name(model)] = model + else: + name_model_map[model_name] = model + return {v: k for k, v in name_model_map.items()} + + +def get_flat_models_from_model(model: Type['BaseModel'], known_models: Optional[TypeModelSet] = None) -> TypeModelSet: + """ + Take a single ``model`` and generate a set with itself and all the sub-models in the tree. I.e. if you pass + model ``Foo`` (subclass of Pydantic ``BaseModel``) as ``model``, and it has a field of type ``Bar`` (also + subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also subclass of ``BaseModel``), + the return value will be ``set([Foo, Bar, Baz])``. + + :param model: a Pydantic ``BaseModel`` subclass + :param known_models: used to solve circular references + :return: a set with the initial model and all its sub-models + """ + known_models = known_models or set() + flat_models: TypeModelSet = set() + flat_models.add(model) + known_models |= flat_models + fields = cast(Sequence[ModelField], model.__fields__.values()) + flat_models |= get_flat_models_from_fields(fields, known_models=known_models) + return flat_models + + +def get_flat_models_from_field(field: ModelField, known_models: TypeModelSet) -> TypeModelSet: + """ + Take a single Pydantic ``ModelField`` (from a model) that could have been declared as a subclass of BaseModel + (so, it could be a submodel), and generate a set with its model and all the sub-models in the tree. + I.e. if you pass a field that was declared to be of type ``Foo`` (subclass of BaseModel) as ``field``, and that + model ``Foo`` has a field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of + type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param field: a Pydantic ``ModelField`` + :param known_models: used to solve circular references + :return: a set with the model used in the declaration for this field, if any, and all its sub-models + """ + from pydantic.v1.main import BaseModel + + flat_models: TypeModelSet = set() + + field_type = field.type_ + if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): + field_type = field_type.__pydantic_model__ + + if field.sub_fields and not lenient_issubclass(field_type, BaseModel): + flat_models |= get_flat_models_from_fields(field.sub_fields, known_models=known_models) + elif lenient_issubclass(field_type, BaseModel) and field_type not in known_models: + flat_models |= get_flat_models_from_model(field_type, known_models=known_models) + elif lenient_issubclass(field_type, Enum): + flat_models.add(field_type) + return flat_models + + +def get_flat_models_from_fields(fields: Sequence[ModelField], known_models: TypeModelSet) -> TypeModelSet: + """ + Take a list of Pydantic ``ModelField``s (from a model) that could have been declared as subclasses of ``BaseModel`` + (so, any of them could be a submodel), and generate a set with their models and all the sub-models in the tree. + I.e. if you pass a the fields of a model ``Foo`` (subclass of ``BaseModel``) as ``fields``, and on of them has a + field of type ``Bar`` (also subclass of ``BaseModel``) and that model ``Bar`` has a field of type ``Baz`` (also + subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + + :param fields: a list of Pydantic ``ModelField``s + :param known_models: used to solve circular references + :return: a set with any model declared in the fields, and all their sub-models + """ + flat_models: TypeModelSet = set() + for field in fields: + flat_models |= get_flat_models_from_field(field, known_models=known_models) + return flat_models + + +def get_flat_models_from_models(models: Sequence[Type['BaseModel']]) -> TypeModelSet: + """ + Take a list of ``models`` and generate a set with them and all their sub-models in their trees. I.e. if you pass + a list of two models, ``Foo`` and ``Bar``, both subclasses of Pydantic ``BaseModel`` as models, and ``Bar`` has + a field of type ``Baz`` (also subclass of ``BaseModel``), the return value will be ``set([Foo, Bar, Baz])``. + """ + flat_models: TypeModelSet = set() + for model in models: + flat_models |= get_flat_models_from_model(model) + return flat_models + + +def get_long_model_name(model: TypeModelOrEnum) -> str: + return f'{model.__module__}__{model.__qualname__}'.replace('.', '__') + + +def field_type_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``field_schema()``, you probably should be using that function. + + Take a single ``field`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + from pydantic.v1.main import BaseModel # noqa: F811 + + definitions = {} + nested_models: Set[str] = set() + f_schema: Dict[str, Any] + if field.shape in { + SHAPE_LIST, + SHAPE_TUPLE_ELLIPSIS, + SHAPE_SEQUENCE, + SHAPE_SET, + SHAPE_FROZENSET, + SHAPE_ITERABLE, + SHAPE_DEQUE, + }: + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + f_schema = {'type': 'array', 'items': items_schema} + if field.shape in {SHAPE_SET, SHAPE_FROZENSET}: + f_schema['uniqueItems'] = True + + elif field.shape in MAPPING_LIKE_SHAPES: + f_schema = {'type': 'object'} + key_field = cast(ModelField, field.key_field) + regex = getattr(key_field.type_, 'regex', None) + items_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if regex: + # Dict keys have a regex pattern + # items_schema might be a schema or empty dict, add it either way + f_schema['patternProperties'] = {ConstrainedStr._get_pattern(regex): items_schema} + if items_schema: + # The dict values are not simply Any, so they need a schema + f_schema['additionalProperties'] = items_schema + elif field.shape == SHAPE_TUPLE or (field.shape == SHAPE_GENERIC and not issubclass(field.type_, BaseModel)): + sub_schema = [] + sub_fields = cast(List[ModelField], field.sub_fields) + for sf in sub_fields: + sf_schema, sf_definitions, sf_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sf_definitions) + nested_models.update(sf_nested_models) + sub_schema.append(sf_schema) + + sub_fields_len = len(sub_fields) + if field.shape == SHAPE_GENERIC: + all_of_schemas = sub_schema[0] if sub_fields_len == 1 else {'type': 'array', 'items': sub_schema} + f_schema = {'allOf': [all_of_schemas]} + else: + f_schema = { + 'type': 'array', + 'minItems': sub_fields_len, + 'maxItems': sub_fields_len, + } + if sub_fields_len >= 1: + f_schema['items'] = sub_schema + else: + assert field.shape in {SHAPE_SINGLETON, SHAPE_GENERIC}, field.shape + f_schema, f_definitions, f_nested_models = field_singleton_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(f_definitions) + nested_models.update(f_nested_models) + + # check field type to avoid repeated calls to the same __modify_schema__ method + if field.type_ != field.outer_type_: + if field.shape == SHAPE_GENERIC: + field_type = field.type_ + else: + field_type = field.outer_type_ + modify_schema = getattr(field_type, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + return f_schema, definitions, nested_models + + +def model_process_schema( + model: TypeModelOrEnum, + *, + by_alias: bool = True, + model_name_map: Dict[TypeModelOrEnum, str], + ref_prefix: Optional[str] = None, + ref_template: str = default_ref_template, + known_models: Optional[TypeModelSet] = None, + field: Optional[ModelField] = None, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + Used by ``model_schema()``, you probably should be using that function. + + Take a single ``model`` and generate its schema. Also return additional schema definitions, from sub-models. The + sub-models of the returned schema will be referenced, but their definitions will not be included in the schema. All + the definitions are returned as the second value. + """ + from inspect import getdoc, signature + + known_models = known_models or set() + if lenient_issubclass(model, Enum): + model = cast(Type[Enum], model) + s = enum_process_schema(model, field=field) + return s, {}, set() + model = cast(Type['BaseModel'], model) + s = {'title': model.__config__.title or model.__name__} + doc = getdoc(model) + if doc: + s['description'] = doc + known_models.add(model) + m_schema, m_definitions, nested_models = model_type_schema( + model, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + s.update(m_schema) + schema_extra = model.__config__.schema_extra + if callable(schema_extra): + if len(signature(schema_extra).parameters) == 1: + schema_extra(s) + else: + schema_extra(s, model) + else: + s.update(schema_extra) + return s, m_definitions, nested_models + + +def model_type_schema( + model: Type['BaseModel'], + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + You probably should be using ``model_schema()``, this function is indirectly used by that function. + + Take a single ``model`` and generate the schema for its type only, not including additional + information as title, etc. Also return additional schema definitions, from sub-models. + """ + properties = {} + required = [] + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + for k, f in model.__fields__.items(): + try: + f_schema, f_definitions, f_nested_models = field_schema( + f, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + except SkipField as skip: + warnings.warn(skip.message, UserWarning) + continue + definitions.update(f_definitions) + nested_models.update(f_nested_models) + if by_alias: + properties[f.alias] = f_schema + if f.required: + required.append(f.alias) + else: + properties[k] = f_schema + if f.required: + required.append(k) + if ROOT_KEY in properties: + out_schema = properties[ROOT_KEY] + out_schema['title'] = model.__config__.title or model.__name__ + else: + out_schema = {'type': 'object', 'properties': properties} + if required: + out_schema['required'] = required + if model.__config__.extra == 'forbid': + out_schema['additionalProperties'] = False + return out_schema, definitions, nested_models + + +def enum_process_schema(enum: Type[Enum], *, field: Optional[ModelField] = None) -> Dict[str, Any]: + """ + Take a single `enum` and generate its schema. + + This is similar to the `model_process_schema` function, but applies to ``Enum`` objects. + """ + import inspect + + schema_: Dict[str, Any] = { + 'title': enum.__name__, + # Python assigns all enums a default docstring value of 'An enumeration', so + # all enums will have a description field even if not explicitly provided. + 'description': inspect.cleandoc(enum.__doc__ or 'An enumeration.'), + # Add enum values and the enum field type to the schema. + 'enum': [item.value for item in cast(Iterable[Enum], enum)], + } + + add_field_type_to_schema(enum, schema_) + + modify_schema = getattr(enum, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, schema_) + + return schema_ + + +def field_singleton_sub_fields_schema( + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you probably should be using that function. + + Take a list of Pydantic ``ModelField`` from the declaration of a type with parameters, and generate their + schema. I.e., fields used as "type parameters", like ``str`` and ``int`` in ``Tuple[str, int]``. + """ + sub_fields = cast(List[ModelField], field.sub_fields) + definitions = {} + nested_models: Set[str] = set() + if len(sub_fields) == 1: + return field_type_schema( + sub_fields[0], + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + else: + s: Dict[str, Any] = {} + # https://github.com/OAI/OpenAPI-Specification/blob/master/versions/3.0.2.md#discriminator-object + field_has_discriminator: bool = field.discriminator_key is not None + if field_has_discriminator: + assert field.sub_fields_mapping is not None + + discriminator_models_refs: Dict[str, Union[str, Dict[str, Any]]] = {} + + for discriminator_value, sub_field in field.sub_fields_mapping.items(): + if isinstance(discriminator_value, Enum): + discriminator_value = str(discriminator_value.value) + # sub_field is either a `BaseModel` or directly an `Annotated` `Union` of many + if is_union(get_origin(sub_field.type_)): + sub_models = get_sub_types(sub_field.type_) + discriminator_models_refs[discriminator_value] = { + model_name_map[sub_model]: get_schema_ref( + model_name_map[sub_model], ref_prefix, ref_template, False + ) + for sub_model in sub_models + } + else: + sub_field_type = sub_field.type_ + if hasattr(sub_field_type, '__pydantic_model__'): + sub_field_type = sub_field_type.__pydantic_model__ + + discriminator_model_name = model_name_map[sub_field_type] + discriminator_model_ref = get_schema_ref(discriminator_model_name, ref_prefix, ref_template, False) + discriminator_models_refs[discriminator_value] = discriminator_model_ref['$ref'] + + s['discriminator'] = { + 'propertyName': field.discriminator_alias if by_alias else field.discriminator_key, + 'mapping': discriminator_models_refs, + } + + sub_field_schemas = [] + for sf in sub_fields: + sub_schema, sub_definitions, sub_nested_models = field_type_schema( + sf, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + definitions.update(sub_definitions) + if schema_overrides and 'allOf' in sub_schema: + # if the sub_field is a referenced schema we only need the referenced + # object. Otherwise we will end up with several allOf inside anyOf/oneOf. + # See https://github.com/pydantic/pydantic/issues/1209 + sub_schema = sub_schema['allOf'][0] + + if sub_schema.keys() == {'discriminator', 'oneOf'}: + # we don't want discriminator information inside oneOf choices, this is dealt with elsewhere + sub_schema.pop('discriminator') + sub_field_schemas.append(sub_schema) + nested_models.update(sub_nested_models) + s['oneOf' if field_has_discriminator else 'anyOf'] = sub_field_schemas + return s, definitions, nested_models + + +# Order is important, e.g. subclasses of str must go before str +# this is used only for standard library types, custom types should use __modify_schema__ instead +field_class_to_schema: Tuple[Tuple[Any, Dict[str, Any]], ...] = ( + (Path, {'type': 'string', 'format': 'path'}), + (datetime, {'type': 'string', 'format': 'date-time'}), + (date, {'type': 'string', 'format': 'date'}), + (time, {'type': 'string', 'format': 'time'}), + (timedelta, {'type': 'number', 'format': 'time-delta'}), + (IPv4Network, {'type': 'string', 'format': 'ipv4network'}), + (IPv6Network, {'type': 'string', 'format': 'ipv6network'}), + (IPv4Interface, {'type': 'string', 'format': 'ipv4interface'}), + (IPv6Interface, {'type': 'string', 'format': 'ipv6interface'}), + (IPv4Address, {'type': 'string', 'format': 'ipv4'}), + (IPv6Address, {'type': 'string', 'format': 'ipv6'}), + (Pattern, {'type': 'string', 'format': 'regex'}), + (str, {'type': 'string'}), + (bytes, {'type': 'string', 'format': 'binary'}), + (bool, {'type': 'boolean'}), + (int, {'type': 'integer'}), + (float, {'type': 'number'}), + (Decimal, {'type': 'number'}), + (UUID, {'type': 'string', 'format': 'uuid'}), + (dict, {'type': 'object'}), + (list, {'type': 'array', 'items': {}}), + (tuple, {'type': 'array', 'items': {}}), + (set, {'type': 'array', 'items': {}, 'uniqueItems': True}), + (frozenset, {'type': 'array', 'items': {}, 'uniqueItems': True}), +) + +json_scheme = {'type': 'string', 'format': 'json-string'} + + +def add_field_type_to_schema(field_type: Any, schema_: Dict[str, Any]) -> None: + """ + Update the given `schema` with the type-specific metadata for the given `field_type`. + + This function looks through `field_class_to_schema` for a class that matches the given `field_type`, + and then modifies the given `schema` with the information from that type. + """ + for type_, t_schema in field_class_to_schema: + # Fallback for `typing.Pattern` and `re.Pattern` as they are not a valid class + if lenient_issubclass(field_type, type_) or field_type is type_ is Pattern: + schema_.update(t_schema) + break + + +def get_schema_ref(name: str, ref_prefix: Optional[str], ref_template: str, schema_overrides: bool) -> Dict[str, Any]: + if ref_prefix: + schema_ref = {'$ref': ref_prefix + name} + else: + schema_ref = {'$ref': ref_template.format(model=name)} + return {'allOf': [schema_ref]} if schema_overrides else schema_ref + + +def field_singleton_schema( # noqa: C901 (ignore complexity) + field: ModelField, + *, + by_alias: bool, + model_name_map: Dict[TypeModelOrEnum, str], + ref_template: str, + schema_overrides: bool = False, + ref_prefix: Optional[str] = None, + known_models: TypeModelSet, +) -> Tuple[Dict[str, Any], Dict[str, Any], Set[str]]: + """ + This function is indirectly used by ``field_schema()``, you should probably be using that function. + + Take a single Pydantic ``ModelField``, and return its schema and any additional definitions from sub-models. + """ + from pydantic.v1.main import BaseModel + + definitions: Dict[str, Any] = {} + nested_models: Set[str] = set() + field_type = field.type_ + + # Recurse into this field if it contains sub_fields and is NOT a + # BaseModel OR that BaseModel is a const + if field.sub_fields and ( + (field.field_info and field.field_info.const) or not lenient_issubclass(field_type, BaseModel) + ): + return field_singleton_sub_fields_schema( + field, + by_alias=by_alias, + model_name_map=model_name_map, + schema_overrides=schema_overrides, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + if field_type is Any or field_type is object or field_type.__class__ == TypeVar or get_origin(field_type) is type: + return {}, definitions, nested_models # no restrictions + if is_none_type(field_type): + return {'type': 'null'}, definitions, nested_models + if is_callable_type(field_type): + raise SkipField(f'Callable {field.name} was excluded from schema since JSON schema has no equivalent type.') + f_schema: Dict[str, Any] = {} + if field.field_info is not None and field.field_info.const: + f_schema['const'] = field.default + + if is_literal_type(field_type): + values = tuple(x.value if isinstance(x, Enum) else x for x in all_literal_values(field_type)) + + if len({v.__class__ for v in values}) > 1: + return field_schema( + multitypes_literal_field_for_schema(values, field), + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + ) + + # All values have the same type + field_type = values[0].__class__ + f_schema['enum'] = list(values) + add_field_type_to_schema(field_type, f_schema) + elif lenient_issubclass(field_type, Enum): + enum_name = model_name_map[field_type] + f_schema, schema_overrides = get_field_info_schema(field, schema_overrides) + f_schema.update(get_schema_ref(enum_name, ref_prefix, ref_template, schema_overrides)) + definitions[enum_name] = enum_process_schema(field_type, field=field) + elif is_namedtuple(field_type): + sub_schema, *_ = model_process_schema( + field_type.__pydantic_model__, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + items_schemas = list(sub_schema['properties'].values()) + f_schema.update( + { + 'type': 'array', + 'items': items_schemas, + 'minItems': len(items_schemas), + 'maxItems': len(items_schemas), + } + ) + elif not hasattr(field_type, '__pydantic_model__'): + add_field_type_to_schema(field_type, f_schema) + + modify_schema = getattr(field_type, '__modify_schema__', None) + if modify_schema: + _apply_modify_schema(modify_schema, field, f_schema) + + if f_schema: + return f_schema, definitions, nested_models + + # Handle dataclass-based models + if lenient_issubclass(getattr(field_type, '__pydantic_model__', None), BaseModel): + field_type = field_type.__pydantic_model__ + + if issubclass(field_type, BaseModel): + model_name = model_name_map[field_type] + if field_type not in known_models: + sub_schema, sub_definitions, sub_nested_models = model_process_schema( + field_type, + by_alias=by_alias, + model_name_map=model_name_map, + ref_prefix=ref_prefix, + ref_template=ref_template, + known_models=known_models, + field=field, + ) + definitions.update(sub_definitions) + definitions[model_name] = sub_schema + nested_models.update(sub_nested_models) + else: + nested_models.add(model_name) + schema_ref = get_schema_ref(model_name, ref_prefix, ref_template, schema_overrides) + return schema_ref, definitions, nested_models + + # For generics with no args + args = get_args(field_type) + if args is not None and not args and Generic in field_type.__bases__: + return f_schema, definitions, nested_models + + raise ValueError(f'Value not declarable with JSON Schema, field: {field}') + + +def multitypes_literal_field_for_schema(values: Tuple[Any, ...], field: ModelField) -> ModelField: + """ + To support `Literal` with values of different types, we split it into multiple `Literal` with same type + e.g. `Literal['qwe', 'asd', 1, 2]` becomes `Union[Literal['qwe', 'asd'], Literal[1, 2]]` + """ + literal_distinct_types = defaultdict(list) + for v in values: + literal_distinct_types[v.__class__].append(v) + distinct_literals = (Literal[tuple(same_type_values)] for same_type_values in literal_distinct_types.values()) + + return ModelField( + name=field.name, + type_=Union[tuple(distinct_literals)], # type: ignore + class_validators=field.class_validators, + model_config=field.model_config, + default=field.default, + required=field.required, + alias=field.alias, + field_info=field.field_info, + ) + + +def encode_default(dft: Any) -> Any: + from pydantic.v1.main import BaseModel + + if isinstance(dft, BaseModel) or is_dataclass(dft): + dft = cast('dict[str, Any]', pydantic_encoder(dft)) + + if isinstance(dft, dict): + return {encode_default(k): encode_default(v) for k, v in dft.items()} + elif isinstance(dft, Enum): + return dft.value + elif isinstance(dft, (int, float, str)): + return dft + elif isinstance(dft, (list, tuple)): + t = dft.__class__ + seq_args = (encode_default(v) for v in dft) + return t(*seq_args) if is_namedtuple(t) else t(seq_args) + elif dft is None: + return None + else: + return pydantic_encoder(dft) + + +_map_types_constraint: Dict[Any, Callable[..., type]] = {int: conint, float: confloat, Decimal: condecimal} + + +def get_annotation_from_field_info( + annotation: Any, field_info: FieldInfo, field_name: str, validate_assignment: bool = False +) -> Type[Any]: + """ + Get an annotation with validation implemented for numbers and strings based on the field_info. + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :param field_name: name of the field for use in error messages + :param validate_assignment: default False, flag for BaseModel Config value of validate_assignment + :return: the same ``annotation`` if unmodified or a new annotation with validation in place + """ + constraints = field_info.get_constraints() + used_constraints: Set[str] = set() + if constraints: + annotation, used_constraints = get_annotation_with_constraints(annotation, field_info) + if validate_assignment: + used_constraints.add('allow_mutation') + + unused_constraints = constraints - used_constraints + if unused_constraints: + raise ValueError( + f'On field "{field_name}" the following field constraints are set but not enforced: ' + f'{", ".join(unused_constraints)}. ' + f'\nFor more details see https://docs.pydantic.dev/usage/schema/#unenforced-field-constraints' + ) + + return annotation + + +def get_annotation_with_constraints(annotation: Any, field_info: FieldInfo) -> Tuple[Type[Any], Set[str]]: # noqa: C901 + """ + Get an annotation with used constraints implemented for numbers and strings based on the field_info. + + :param annotation: an annotation from a field specification, as ``str``, ``ConstrainedStr`` + :param field_info: an instance of FieldInfo, possibly with declarations for validations and JSON Schema + :return: the same ``annotation`` if unmodified or a new annotation along with the used constraints. + """ + used_constraints: Set[str] = set() + + def go(type_: Any) -> Type[Any]: + if ( + is_literal_type(type_) + or isinstance(type_, ForwardRef) + or lenient_issubclass(type_, (ConstrainedList, ConstrainedSet, ConstrainedFrozenSet)) + ): + return type_ + origin = get_origin(type_) + if origin is not None: + args: Tuple[Any, ...] = get_args(type_) + if any(isinstance(a, ForwardRef) for a in args): + # forward refs cause infinite recursion below + return type_ + + if origin is Annotated: + return go(args[0]) + if is_union(origin): + return Union[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, List) and ( + field_info.min_items is not None + or field_info.max_items is not None + or field_info.unique_items is not None + ): + used_constraints.update({'min_items', 'max_items', 'unique_items'}) + return conlist( + go(args[0]), + min_items=field_info.min_items, + max_items=field_info.max_items, + unique_items=field_info.unique_items, + ) + + if issubclass(origin, Set) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({'min_items', 'max_items'}) + return conset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + if issubclass(origin, FrozenSet) and (field_info.min_items is not None or field_info.max_items is not None): + used_constraints.update({'min_items', 'max_items'}) + return confrozenset(go(args[0]), min_items=field_info.min_items, max_items=field_info.max_items) + + for t in (Tuple, List, Set, FrozenSet, Sequence): + if issubclass(origin, t): # type: ignore + return t[tuple(go(a) for a in args)] # type: ignore + + if issubclass(origin, Dict): + return Dict[args[0], go(args[1])] # type: ignore + + attrs: Optional[Tuple[str, ...]] = None + constraint_func: Optional[Callable[..., type]] = None + if isinstance(type_, type): + if issubclass(type_, (SecretStr, SecretBytes)): + attrs = ('max_length', 'min_length') + + def constraint_func(**kw: Any) -> Type[Any]: # noqa: F811 + return type(type_.__name__, (type_,), kw) + + elif issubclass(type_, str) and not issubclass(type_, (EmailStr, AnyUrl)): + attrs = ('max_length', 'min_length', 'regex') + if issubclass(type_, StrictStr): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = constr + elif issubclass(type_, bytes): + attrs = ('max_length', 'min_length', 'regex') + if issubclass(type_, StrictBytes): + + def constraint_func(**kw: Any) -> Type[Any]: + return type(type_.__name__, (type_,), kw) + + else: + constraint_func = conbytes + elif issubclass(type_, numeric_types) and not issubclass( + type_, + ( + ConstrainedInt, + ConstrainedFloat, + ConstrainedDecimal, + ConstrainedList, + ConstrainedSet, + ConstrainedFrozenSet, + bool, + ), + ): + # Is numeric type + attrs = ('gt', 'lt', 'ge', 'le', 'multiple_of') + if issubclass(type_, float): + attrs += ('allow_inf_nan',) + if issubclass(type_, Decimal): + attrs += ('max_digits', 'decimal_places') + numeric_type = next(t for t in numeric_types if issubclass(type_, t)) # pragma: no branch + constraint_func = _map_types_constraint[numeric_type] + + if attrs: + used_constraints.update(set(attrs)) + kwargs = { + attr_name: attr + for attr_name, attr in ((attr_name, getattr(field_info, attr_name)) for attr_name in attrs) + if attr is not None + } + if kwargs: + constraint_func = cast(Callable[..., type], constraint_func) + return constraint_func(**kwargs) + return type_ + + return go(annotation), used_constraints + + +def normalize_name(name: str) -> str: + """ + Normalizes the given name. This can be applied to either a model *or* enum. + """ + return re.sub(r'[^a-zA-Z0-9.\-_]', '_', name) + + +class SkipField(Exception): + """ + Utility exception used to exclude fields from schema. + """ + + def __init__(self, message: str) -> None: + self.message = message diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/tools.py b/venv/lib/python3.12/site-packages/pydantic/v1/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..6838a23ecc7419f0478449d9d680c230a99c20e0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/tools.py @@ -0,0 +1,92 @@ +import json +from functools import lru_cache +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, Optional, Type, TypeVar, Union + +from pydantic.v1.parse import Protocol, load_file, load_str_bytes +from pydantic.v1.types import StrBytes +from pydantic.v1.typing import display_as_type + +__all__ = ('parse_file_as', 'parse_obj_as', 'parse_raw_as', 'schema_of', 'schema_json_of') + +NameFactory = Union[str, Callable[[Type[Any]], str]] + +if TYPE_CHECKING: + from pydantic.v1.typing import DictStrAny + + +def _generate_parsing_type_name(type_: Any) -> str: + return f'ParsingModel[{display_as_type(type_)}]' + + +@lru_cache(maxsize=2048) +def _get_parsing_type(type_: Any, *, type_name: Optional[NameFactory] = None) -> Any: + from pydantic.v1.main import create_model + + if type_name is None: + type_name = _generate_parsing_type_name + if not isinstance(type_name, str): + type_name = type_name(type_) + return create_model(type_name, __root__=(type_, ...)) + + +T = TypeVar('T') + + +def parse_obj_as(type_: Type[T], obj: Any, *, type_name: Optional[NameFactory] = None) -> T: + model_type = _get_parsing_type(type_, type_name=type_name) # type: ignore[arg-type] + return model_type(__root__=obj).__root__ + + +def parse_file_as( + type_: Type[T], + path: Union[str, Path], + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_file( + path, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) + + +def parse_raw_as( + type_: Type[T], + b: StrBytes, + *, + content_type: str = None, + encoding: str = 'utf8', + proto: Protocol = None, + allow_pickle: bool = False, + json_loads: Callable[[str], Any] = json.loads, + type_name: Optional[NameFactory] = None, +) -> T: + obj = load_str_bytes( + b, + proto=proto, + content_type=content_type, + encoding=encoding, + allow_pickle=allow_pickle, + json_loads=json_loads, + ) + return parse_obj_as(type_, obj, type_name=type_name) + + +def schema_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_kwargs: Any) -> 'DictStrAny': + """Generate a JSON schema (as dict) for the passed model or dynamically generated one""" + return _get_parsing_type(type_, type_name=title).schema(**schema_kwargs) + + +def schema_json_of(type_: Any, *, title: Optional[NameFactory] = None, **schema_json_kwargs: Any) -> str: + """Generate a JSON schema (as JSON) for the passed model or dynamically generated one""" + return _get_parsing_type(type_, type_name=title).schema_json(**schema_json_kwargs) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/types.py b/venv/lib/python3.12/site-packages/pydantic/v1/types.py new file mode 100644 index 0000000000000000000000000000000000000000..e1840d99f79683412aebfc23ced893d0716c1400 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/types.py @@ -0,0 +1,1205 @@ +import abc +import math +import re +import warnings +from datetime import date +from decimal import Decimal, InvalidOperation +from enum import Enum +from pathlib import Path +from types import new_class +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + FrozenSet, + List, + Optional, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) +from uuid import UUID +from weakref import WeakSet + +from pydantic.v1 import errors +from pydantic.v1.datetime_parse import parse_date +from pydantic.v1.utils import import_string, update_not_none +from pydantic.v1.validators import ( + bytes_validator, + constr_length_validator, + constr_lower, + constr_strip_whitespace, + constr_upper, + decimal_validator, + float_finite_validator, + float_validator, + frozenset_validator, + int_validator, + list_validator, + number_multiple_validator, + number_size_validator, + path_exists_validator, + path_validator, + set_validator, + str_validator, + strict_bytes_validator, + strict_float_validator, + strict_int_validator, + strict_str_validator, +) + +__all__ = [ + 'NoneStr', + 'NoneBytes', + 'StrBytes', + 'NoneStrBytes', + 'StrictStr', + 'ConstrainedBytes', + 'conbytes', + 'ConstrainedList', + 'conlist', + 'ConstrainedSet', + 'conset', + 'ConstrainedFrozenSet', + 'confrozenset', + 'ConstrainedStr', + 'constr', + 'PyObject', + 'ConstrainedInt', + 'conint', + 'PositiveInt', + 'NegativeInt', + 'NonNegativeInt', + 'NonPositiveInt', + 'ConstrainedFloat', + 'confloat', + 'PositiveFloat', + 'NegativeFloat', + 'NonNegativeFloat', + 'NonPositiveFloat', + 'FiniteFloat', + 'ConstrainedDecimal', + 'condecimal', + 'UUID1', + 'UUID3', + 'UUID4', + 'UUID5', + 'FilePath', + 'DirectoryPath', + 'Json', + 'JsonWrapper', + 'SecretField', + 'SecretStr', + 'SecretBytes', + 'StrictBool', + 'StrictBytes', + 'StrictInt', + 'StrictFloat', + 'PaymentCardNumber', + 'ByteSize', + 'PastDate', + 'FutureDate', + 'ConstrainedDate', + 'condate', +] + +NoneStr = Optional[str] +NoneBytes = Optional[bytes] +StrBytes = Union[str, bytes] +NoneStrBytes = Optional[StrBytes] +OptionalInt = Optional[int] +OptionalIntFloat = Union[OptionalInt, float] +OptionalIntFloatDecimal = Union[OptionalIntFloat, Decimal] +OptionalDate = Optional[date] +StrIntFloat = Union[str, int, float] + +if TYPE_CHECKING: + from typing_extensions import Annotated + + from pydantic.v1.dataclasses import Dataclass + from pydantic.v1.main import BaseModel + from pydantic.v1.typing import CallableGenerator + + ModelOrDc = Type[Union[BaseModel, Dataclass]] + +T = TypeVar('T') +_DEFINED_TYPES: 'WeakSet[type]' = WeakSet() + + +@overload +def _registered(typ: Type[T]) -> Type[T]: + pass + + +@overload +def _registered(typ: 'ConstrainedNumberMeta') -> 'ConstrainedNumberMeta': + pass + + +def _registered(typ: Union[Type[T], 'ConstrainedNumberMeta']) -> Union[Type[T], 'ConstrainedNumberMeta']: + # In order to generate valid examples of constrained types, Hypothesis needs + # to inspect the type object - so we keep a weakref to each contype object + # until it can be registered. When (or if) our Hypothesis plugin is loaded, + # it monkeypatches this function. + # If Hypothesis is never used, the total effect is to keep a weak reference + # which has minimal memory usage and doesn't even affect garbage collection. + _DEFINED_TYPES.add(typ) + return typ + + +class ConstrainedNumberMeta(type): + def __new__(cls, name: str, bases: Any, dct: Dict[str, Any]) -> 'ConstrainedInt': # type: ignore + new_cls = cast('ConstrainedInt', type.__new__(cls, name, bases, dct)) + + if new_cls.gt is not None and new_cls.ge is not None: + raise errors.ConfigError('bounds gt and ge cannot be specified at the same time') + if new_cls.lt is not None and new_cls.le is not None: + raise errors.ConfigError('bounds lt and le cannot be specified at the same time') + + return _registered(new_cls) # type: ignore + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BOOLEAN TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + StrictBool = bool +else: + + class StrictBool(int): + """ + StrictBool to allow for bools which are not type-coerced. + """ + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='boolean') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> bool: + """ + Ensure that we only allow bools. + """ + if isinstance(value, bool): + return value + + raise errors.StrictBoolError() + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ INTEGER TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedInt(int, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalInt = None + ge: OptionalInt = None + lt: OptionalInt = None + le: OptionalInt = None + multiple_of: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_int_validator if cls.strict else int_validator + yield number_size_validator + yield number_multiple_validator + + +def conint( + *, + strict: bool = False, + gt: Optional[int] = None, + ge: Optional[int] = None, + lt: Optional[int] = None, + le: Optional[int] = None, + multiple_of: Optional[int] = None, +) -> Type[int]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of) + return type('ConstrainedIntValue', (ConstrainedInt,), namespace) + + +if TYPE_CHECKING: + PositiveInt = int + NegativeInt = int + NonPositiveInt = int + NonNegativeInt = int + StrictInt = int +else: + + class PositiveInt(ConstrainedInt): + gt = 0 + + class NegativeInt(ConstrainedInt): + lt = 0 + + class NonPositiveInt(ConstrainedInt): + le = 0 + + class NonNegativeInt(ConstrainedInt): + ge = 0 + + class StrictInt(ConstrainedInt): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ FLOAT TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedFloat(float, metaclass=ConstrainedNumberMeta): + strict: bool = False + gt: OptionalIntFloat = None + ge: OptionalIntFloat = None + lt: OptionalIntFloat = None + le: OptionalIntFloat = None + multiple_of: OptionalIntFloat = None + allow_inf_nan: Optional[bool] = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + # Modify constraints to account for differences between IEEE floats and JSON + if field_schema.get('exclusiveMinimum') == -math.inf: + del field_schema['exclusiveMinimum'] + if field_schema.get('minimum') == -math.inf: + del field_schema['minimum'] + if field_schema.get('exclusiveMaximum') == math.inf: + del field_schema['exclusiveMaximum'] + if field_schema.get('maximum') == math.inf: + del field_schema['maximum'] + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_float_validator if cls.strict else float_validator + yield number_size_validator + yield number_multiple_validator + yield float_finite_validator + + +def confloat( + *, + strict: bool = False, + gt: float = None, + ge: float = None, + lt: float = None, + le: float = None, + multiple_of: float = None, + allow_inf_nan: Optional[bool] = None, +) -> Type[float]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(strict=strict, gt=gt, ge=ge, lt=lt, le=le, multiple_of=multiple_of, allow_inf_nan=allow_inf_nan) + return type('ConstrainedFloatValue', (ConstrainedFloat,), namespace) + + +if TYPE_CHECKING: + PositiveFloat = float + NegativeFloat = float + NonPositiveFloat = float + NonNegativeFloat = float + StrictFloat = float + FiniteFloat = float +else: + + class PositiveFloat(ConstrainedFloat): + gt = 0 + + class NegativeFloat(ConstrainedFloat): + lt = 0 + + class NonPositiveFloat(ConstrainedFloat): + le = 0 + + class NonNegativeFloat(ConstrainedFloat): + ge = 0 + + class StrictFloat(ConstrainedFloat): + strict = True + + class FiniteFloat(ConstrainedFloat): + allow_inf_nan = False + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTES TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedBytes(bytes): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + strict: bool = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minLength=cls.min_length, maxLength=cls.max_length) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_bytes_validator if cls.strict else bytes_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + + +def conbytes( + *, + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + strict: bool = False, +) -> Type[bytes]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + min_length=min_length, + max_length=max_length, + strict=strict, + ) + return _registered(type('ConstrainedBytesValue', (ConstrainedBytes,), namespace)) + + +if TYPE_CHECKING: + StrictBytes = bytes +else: + + class StrictBytes(ConstrainedBytes): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ STRING TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedStr(str): + strip_whitespace = False + to_upper = False + to_lower = False + min_length: OptionalInt = None + max_length: OptionalInt = None + curtail_length: OptionalInt = None + regex: Optional[Union[str, Pattern[str]]] = None + strict = False + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + minLength=cls.min_length, + maxLength=cls.max_length, + pattern=cls.regex and cls._get_pattern(cls.regex), + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield strict_str_validator if cls.strict else str_validator + yield constr_strip_whitespace + yield constr_upper + yield constr_lower + yield constr_length_validator + yield cls.validate + + @classmethod + def validate(cls, value: Union[str]) -> Union[str]: + if cls.curtail_length and len(value) > cls.curtail_length: + value = value[: cls.curtail_length] + + if cls.regex: + if not re.match(cls.regex, value): + raise errors.StrRegexError(pattern=cls._get_pattern(cls.regex)) + + return value + + @staticmethod + def _get_pattern(regex: Union[str, Pattern[str]]) -> str: + return regex if isinstance(regex, str) else regex.pattern + + +def constr( + *, + strip_whitespace: bool = False, + to_upper: bool = False, + to_lower: bool = False, + strict: bool = False, + min_length: Optional[int] = None, + max_length: Optional[int] = None, + curtail_length: Optional[int] = None, + regex: Optional[str] = None, +) -> Type[str]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + strip_whitespace=strip_whitespace, + to_upper=to_upper, + to_lower=to_lower, + strict=strict, + min_length=min_length, + max_length=max_length, + curtail_length=curtail_length, + regex=regex and re.compile(regex), + ) + return _registered(type('ConstrainedStrValue', (ConstrainedStr,), namespace)) + + +if TYPE_CHECKING: + StrictStr = str +else: + + class StrictStr(ConstrainedStr): + strict = True + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +# This types superclass should be Set[T], but cython chokes on that... +class ConstrainedSet(set): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = set + __args__: Set[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.set_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def set_length_validator(cls, v: 'Optional[Set[T]]') -> 'Optional[Set[T]]': + if v is None: + return None + + v = set_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.SetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.SetMaxLengthError(limit_value=cls.max_items) + + return v + + +def conset(item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None) -> Type[Set[T]]: + # __args__ is needed to conform to typing generics api + namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedSetValue', (ConstrainedSet,), {}, lambda ns: ns.update(namespace)) + + +# This types superclass should be FrozenSet[T], but cython chokes on that... +class ConstrainedFrozenSet(frozenset): # type: ignore + # Needed for pydantic to detect that this is a set + __origin__ = frozenset + __args__: FrozenSet[Type[T]] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.frozenset_length_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items) + + @classmethod + def frozenset_length_validator(cls, v: 'Optional[FrozenSet[T]]') -> 'Optional[FrozenSet[T]]': + if v is None: + return None + + v = frozenset_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.FrozenSetMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.FrozenSetMaxLengthError(limit_value=cls.max_items) + + return v + + +def confrozenset( + item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None +) -> Type[FrozenSet[T]]: + # __args__ is needed to conform to typing generics api + namespace = {'min_items': min_items, 'max_items': max_items, 'item_type': item_type, '__args__': [item_type]} + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedFrozenSetValue', (ConstrainedFrozenSet,), {}, lambda ns: ns.update(namespace)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ LIST TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +# This types superclass should be List[T], but cython chokes on that... +class ConstrainedList(list): # type: ignore + # Needed for pydantic to detect that this is a list + __origin__ = list + __args__: Tuple[Type[T], ...] # type: ignore + + min_items: Optional[int] = None + max_items: Optional[int] = None + unique_items: Optional[bool] = None + item_type: Type[T] # type: ignore + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.list_length_validator + if cls.unique_items: + yield cls.unique_items_validator + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, minItems=cls.min_items, maxItems=cls.max_items, uniqueItems=cls.unique_items) + + @classmethod + def list_length_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': + if v is None: + return None + + v = list_validator(v) + v_len = len(v) + + if cls.min_items is not None and v_len < cls.min_items: + raise errors.ListMinLengthError(limit_value=cls.min_items) + + if cls.max_items is not None and v_len > cls.max_items: + raise errors.ListMaxLengthError(limit_value=cls.max_items) + + return v + + @classmethod + def unique_items_validator(cls, v: 'Optional[List[T]]') -> 'Optional[List[T]]': + if v is None: + return None + + for i, value in enumerate(v, start=1): + if value in v[i:]: + raise errors.ListUniqueItemsError() + + return v + + +def conlist( + item_type: Type[T], *, min_items: Optional[int] = None, max_items: Optional[int] = None, unique_items: bool = None +) -> Type[List[T]]: + # __args__ is needed to conform to typing generics api + namespace = dict( + min_items=min_items, max_items=max_items, unique_items=unique_items, item_type=item_type, __args__=(item_type,) + ) + # We use new_class to be able to deal with Generic types + return new_class('ConstrainedListValue', (ConstrainedList,), {}, lambda ns: ns.update(namespace)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PYOBJECT TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +if TYPE_CHECKING: + PyObject = Callable[..., Any] +else: + + class PyObject: + validate_always = True + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, value: Any) -> Any: + if isinstance(value, Callable): + return value + + try: + value = str_validator(value) + except errors.StrError: + raise errors.PyObjectError(error_message='value is neither a valid import path not a valid callable') + + try: + return import_string(value) + except ImportError as e: + raise errors.PyObjectError(error_message=str(e)) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECIMAL TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class ConstrainedDecimal(Decimal, metaclass=ConstrainedNumberMeta): + gt: OptionalIntFloatDecimal = None + ge: OptionalIntFloatDecimal = None + lt: OptionalIntFloatDecimal = None + le: OptionalIntFloatDecimal = None + max_digits: OptionalInt = None + decimal_places: OptionalInt = None + multiple_of: OptionalIntFloatDecimal = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + exclusiveMinimum=cls.gt, + exclusiveMaximum=cls.lt, + minimum=cls.ge, + maximum=cls.le, + multipleOf=cls.multiple_of, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield decimal_validator + yield number_size_validator + yield number_multiple_validator + yield cls.validate + + @classmethod + def validate(cls, value: Decimal) -> Decimal: + try: + normalized_value = value.normalize() + except InvalidOperation: + normalized_value = value + digit_tuple, exponent = normalized_value.as_tuple()[1:] + if exponent in {'F', 'n', 'N'}: + raise errors.DecimalIsNotFiniteError() + + if exponent >= 0: + # A positive exponent adds that many trailing zeros. + digits = len(digit_tuple) + exponent + decimals = 0 + else: + # If the absolute value of the negative exponent is larger than the + # number of digits, then it's the same as the number of digits, + # because it'll consume all of the digits in digit_tuple and then + # add abs(exponent) - len(digit_tuple) leading zeros after the + # decimal point. + if abs(exponent) > len(digit_tuple): + digits = decimals = abs(exponent) + else: + digits = len(digit_tuple) + decimals = abs(exponent) + whole_digits = digits - decimals + + if cls.max_digits is not None and digits > cls.max_digits: + raise errors.DecimalMaxDigitsError(max_digits=cls.max_digits) + + if cls.decimal_places is not None and decimals > cls.decimal_places: + raise errors.DecimalMaxPlacesError(decimal_places=cls.decimal_places) + + if cls.max_digits is not None and cls.decimal_places is not None: + expected = cls.max_digits - cls.decimal_places + if whole_digits > expected: + raise errors.DecimalWholeDigitsError(whole_digits=expected) + + return value + + +def condecimal( + *, + gt: Decimal = None, + ge: Decimal = None, + lt: Decimal = None, + le: Decimal = None, + max_digits: Optional[int] = None, + decimal_places: Optional[int] = None, + multiple_of: Decimal = None, +) -> Type[Decimal]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict( + gt=gt, ge=ge, lt=lt, le=le, max_digits=max_digits, decimal_places=decimal_places, multiple_of=multiple_of + ) + return type('ConstrainedDecimalValue', (ConstrainedDecimal,), namespace) + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ UUID TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + UUID1 = UUID + UUID3 = UUID + UUID4 = UUID + UUID5 = UUID +else: + + class UUID1(UUID): + _required_version = 1 + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format=f'uuid{cls._required_version}') + + class UUID3(UUID1): + _required_version = 3 + + class UUID4(UUID1): + _required_version = 4 + + class UUID5(UUID1): + _required_version = 5 + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PATH TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + FilePath = Path + DirectoryPath = Path +else: + + class FilePath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format='file-path') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield path_validator + yield path_exists_validator + yield cls.validate + + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_file(): + raise errors.PathNotAFileError(path=value) + + return value + + class DirectoryPath(Path): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(format='directory-path') + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield path_validator + yield path_exists_validator + yield cls.validate + + @classmethod + def validate(cls, value: Path) -> Path: + if not value.is_dir(): + raise errors.PathNotADirectoryError(path=value) + + return value + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ JSON TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class JsonWrapper: + pass + + +class JsonMeta(type): + def __getitem__(self, t: Type[Any]) -> Type[JsonWrapper]: + if t is Any: + return Json # allow Json[Any] to replicate plain Json + return _registered(type('JsonWrapperValue', (JsonWrapper,), {'inner_type': t})) + + +if TYPE_CHECKING: + Json = Annotated[T, ...] # Json[list[str]] will be recognized by type checkers as list[str] + +else: + + class Json(metaclass=JsonMeta): + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + field_schema.update(type='string', format='json-string') + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SECRET TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class SecretField(abc.ABC): + """ + Note: this should be implemented as a generic like `SecretField(ABC, Generic[T])`, + the `__init__()` should be part of the abstract class and the + `get_secret_value()` method should use the generic `T` type. + + However Cython doesn't support very well generics at the moment and + the generated code fails to be imported (see + https://github.com/cython/cython/issues/2753). + """ + + def __eq__(self, other: Any) -> bool: + return isinstance(other, self.__class__) and self.get_secret_value() == other.get_secret_value() + + def __str__(self) -> str: + return '**********' if self.get_secret_value() else '' + + def __hash__(self) -> int: + return hash(self.get_secret_value()) + + @abc.abstractmethod + def get_secret_value(self) -> Any: # pragma: no cover + ... + + +class SecretStr(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type='string', + writeOnly=True, + format='password', + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> 'SecretStr': + if isinstance(value, cls): + return value + value = str_validator(value) + return cls(value) + + def __init__(self, value: str): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretStr('{self}')" + + def __len__(self) -> int: + return len(self._secret_value) + + def display(self) -> str: + warnings.warn('`secret_str.display()` is deprecated, use `str(secret_str)` instead', DeprecationWarning) + return str(self) + + def get_secret_value(self) -> str: + return self._secret_value + + +class SecretBytes(SecretField): + min_length: OptionalInt = None + max_length: OptionalInt = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none( + field_schema, + type='string', + writeOnly=True, + format='password', + minLength=cls.min_length, + maxLength=cls.max_length, + ) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + yield constr_length_validator + + @classmethod + def validate(cls, value: Any) -> 'SecretBytes': + if isinstance(value, cls): + return value + value = bytes_validator(value) + return cls(value) + + def __init__(self, value: bytes): + self._secret_value = value + + def __repr__(self) -> str: + return f"SecretBytes(b'{self}')" + + def __len__(self) -> int: + return len(self._secret_value) + + def display(self) -> str: + warnings.warn('`secret_bytes.display()` is deprecated, use `str(secret_bytes)` instead', DeprecationWarning) + return str(self) + + def get_secret_value(self) -> bytes: + return self._secret_value + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ PAYMENT CARD TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +class PaymentCardBrand(str, Enum): + # If you add another card type, please also add it to the + # Hypothesis strategy in `pydantic._hypothesis_plugin`. + amex = 'American Express' + mastercard = 'Mastercard' + visa = 'Visa' + other = 'other' + + def __str__(self) -> str: + return self.value + + +class PaymentCardNumber(str): + """ + Based on: https://en.wikipedia.org/wiki/Payment_card_number + """ + + strip_whitespace: ClassVar[bool] = True + min_length: ClassVar[int] = 12 + max_length: ClassVar[int] = 19 + bin: str + last4: str + brand: PaymentCardBrand + + def __init__(self, card_number: str): + self.bin = card_number[:6] + self.last4 = card_number[-4:] + self.brand = self._get_brand(card_number) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield str_validator + yield constr_strip_whitespace + yield constr_length_validator + yield cls.validate_digits + yield cls.validate_luhn_check_digit + yield cls + yield cls.validate_length_for_brand + + @property + def masked(self) -> str: + num_masked = len(self) - 10 # len(bin) + len(last4) == 10 + return f'{self.bin}{"*" * num_masked}{self.last4}' + + @classmethod + def validate_digits(cls, card_number: str) -> str: + if not card_number.isdigit(): + raise errors.NotDigitError + return card_number + + @classmethod + def validate_luhn_check_digit(cls, card_number: str) -> str: + """ + Based on: https://en.wikipedia.org/wiki/Luhn_algorithm + """ + sum_ = int(card_number[-1]) + length = len(card_number) + parity = length % 2 + for i in range(length - 1): + digit = int(card_number[i]) + if i % 2 == parity: + digit *= 2 + if digit > 9: + digit -= 9 + sum_ += digit + valid = sum_ % 10 == 0 + if not valid: + raise errors.LuhnValidationError + return card_number + + @classmethod + def validate_length_for_brand(cls, card_number: 'PaymentCardNumber') -> 'PaymentCardNumber': + """ + Validate length based on BIN for major brands: + https://en.wikipedia.org/wiki/Payment_card_number#Issuer_identification_number_(IIN) + """ + required_length: Union[None, int, str] = None + if card_number.brand in PaymentCardBrand.mastercard: + required_length = 16 + valid = len(card_number) == required_length + elif card_number.brand == PaymentCardBrand.visa: + required_length = '13, 16 or 19' + valid = len(card_number) in {13, 16, 19} + elif card_number.brand == PaymentCardBrand.amex: + required_length = 15 + valid = len(card_number) == required_length + else: + valid = True + if not valid: + raise errors.InvalidLengthForBrand(brand=card_number.brand, required_length=required_length) + return card_number + + @staticmethod + def _get_brand(card_number: str) -> PaymentCardBrand: + if card_number[0] == '4': + brand = PaymentCardBrand.visa + elif 51 <= int(card_number[:2]) <= 55: + brand = PaymentCardBrand.mastercard + elif card_number[:2] in {'34', '37'}: + brand = PaymentCardBrand.amex + else: + brand = PaymentCardBrand.other + return brand + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ BYTE SIZE TYPE ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +BYTE_SIZES = { + 'b': 1, + 'kb': 10**3, + 'mb': 10**6, + 'gb': 10**9, + 'tb': 10**12, + 'pb': 10**15, + 'eb': 10**18, + 'kib': 2**10, + 'mib': 2**20, + 'gib': 2**30, + 'tib': 2**40, + 'pib': 2**50, + 'eib': 2**60, +} +BYTE_SIZES.update({k.lower()[0]: v for k, v in BYTE_SIZES.items() if 'i' not in k}) +byte_string_re = re.compile(r'^\s*(\d*\.?\d+)\s*(\w+)?', re.IGNORECASE) + + +class ByteSize(int): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield cls.validate + + @classmethod + def validate(cls, v: StrIntFloat) -> 'ByteSize': + try: + return cls(int(v)) + except ValueError: + pass + + str_match = byte_string_re.match(str(v)) + if str_match is None: + raise errors.InvalidByteSize() + + scalar, unit = str_match.groups() + if unit is None: + unit = 'b' + + try: + unit_mult = BYTE_SIZES[unit.lower()] + except KeyError: + raise errors.InvalidByteSizeUnit(unit=unit) + + return cls(int(float(scalar) * unit_mult)) + + def human_readable(self, decimal: bool = False) -> str: + if decimal: + divisor = 1000 + units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] + final_unit = 'EB' + else: + divisor = 1024 + units = ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB'] + final_unit = 'EiB' + + num = float(self) + for unit in units: + if abs(num) < divisor: + return f'{num:0.1f}{unit}' + num /= divisor + + return f'{num:0.1f}{final_unit}' + + def to(self, unit: str) -> float: + try: + unit_div = BYTE_SIZES[unit.lower()] + except KeyError: + raise errors.InvalidByteSizeUnit(unit=unit) + + return self / unit_div + + +# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DATE TYPES ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +if TYPE_CHECKING: + PastDate = date + FutureDate = date +else: + + class PastDate(date): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield cls.validate + + @classmethod + def validate(cls, value: date) -> date: + if value >= date.today(): + raise errors.DateNotInThePastError() + + return value + + class FutureDate(date): + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield cls.validate + + @classmethod + def validate(cls, value: date) -> date: + if value <= date.today(): + raise errors.DateNotInTheFutureError() + + return value + + +class ConstrainedDate(date, metaclass=ConstrainedNumberMeta): + gt: OptionalDate = None + ge: OptionalDate = None + lt: OptionalDate = None + le: OptionalDate = None + + @classmethod + def __modify_schema__(cls, field_schema: Dict[str, Any]) -> None: + update_not_none(field_schema, exclusiveMinimum=cls.gt, exclusiveMaximum=cls.lt, minimum=cls.ge, maximum=cls.le) + + @classmethod + def __get_validators__(cls) -> 'CallableGenerator': + yield parse_date + yield number_size_validator + + +def condate( + *, + gt: date = None, + ge: date = None, + lt: date = None, + le: date = None, +) -> Type[date]: + # use kwargs then define conf in a dict to aid with IDE type hinting + namespace = dict(gt=gt, ge=ge, lt=lt, le=le) + return type('ConstrainedDateValue', (ConstrainedDate,), namespace) diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/typing.py b/venv/lib/python3.12/site-packages/pydantic/v1/typing.py new file mode 100644 index 0000000000000000000000000000000000000000..c5c59794b9b3d29765e24bfb05880de78e6f5a25 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/typing.py @@ -0,0 +1,627 @@ +import functools +import operator +import sys +import typing +from collections.abc import Callable +from os import PathLike +from typing import ( # type: ignore + TYPE_CHECKING, + AbstractSet, + Any, + Callable as TypingCallable, + ClassVar, + Dict, + ForwardRef, + Generator, + Iterable, + List, + Mapping, + NewType, + Optional, + Sequence, + Set, + Tuple, + Type, + TypeVar, + Union, + _eval_type, + cast, + get_type_hints, +) + +from typing_extensions import ( + Annotated, + Final, + Literal, + NotRequired as TypedDictNotRequired, + Required as TypedDictRequired, +) + +try: + from typing import _TypingBase as typing_base # type: ignore +except ImportError: + from typing import _Final as typing_base # type: ignore + +try: + from typing import GenericAlias as TypingGenericAlias # type: ignore +except ImportError: + # python < 3.9 does not have GenericAlias (list[int], tuple[str, ...] and so on) + TypingGenericAlias = () + +try: + from types import UnionType as TypesUnionType # type: ignore +except ImportError: + # python < 3.10 does not have UnionType (str | int, byte | bool and so on) + TypesUnionType = () + + +if sys.version_info < (3, 9): + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + return type_._evaluate(globalns, localns) + +elif sys.version_info < (3, 12, 4): + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + # Even though it is the right signature for python 3.9, mypy complains with + # `error: Too many arguments for "_evaluate" of "ForwardRef"` hence the cast... + # Python 3.13/3.12.4+ made `recursive_guard` a kwarg, so name it explicitly to avoid: + # TypeError: ForwardRef._evaluate() missing 1 required keyword-only argument: 'recursive_guard' + return cast(Any, type_)._evaluate(globalns, localns, recursive_guard=set()) + +elif sys.version_info < (3, 14): + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + # Pydantic 1.x will not support PEP 695 syntax, but provide `type_params` to avoid + # warnings: + return cast(Any, type_)._evaluate(globalns, localns, type_params=(), recursive_guard=set()) + +else: + + def evaluate_forwardref(type_: ForwardRef, globalns: Any, localns: Any) -> Any: + # Pydantic 1.x will not support PEP 695 syntax, but provide `type_params` to avoid + # warnings: + return typing.evaluate_forward_ref( + type_, + globals=globalns, + locals=localns, + type_params=(), + _recursive_guard=set(), + ) + + +if sys.version_info < (3, 9): + # Ensure we always get all the whole `Annotated` hint, not just the annotated type. + # For 3.7 to 3.8, `get_type_hints` doesn't recognize `typing_extensions.Annotated`, + # so it already returns the full annotation + get_all_type_hints = get_type_hints + +else: + + def get_all_type_hints(obj: Any, globalns: Any = None, localns: Any = None) -> Any: + return get_type_hints(obj, globalns, localns, include_extras=True) + + +_T = TypeVar('_T') + +AnyCallable = TypingCallable[..., Any] +NoArgAnyCallable = TypingCallable[[], Any] + +# workaround for https://github.com/python/mypy/issues/9496 +AnyArgTCallable = TypingCallable[..., _T] + + +# Annotated[...] is implemented by returning an instance of one of these classes, depending on +# python/typing_extensions version. +AnnotatedTypeNames = {'AnnotatedMeta', '_AnnotatedAlias'} + + +LITERAL_TYPES: Set[Any] = {Literal} +if hasattr(typing, 'Literal'): + LITERAL_TYPES.add(typing.Literal) + + +if sys.version_info < (3, 8): + + def get_origin(t: Type[Any]) -> Optional[Type[Any]]: + if type(t).__name__ in AnnotatedTypeNames: + # weirdly this is a runtime requirement, as well as for mypy + return cast(Type[Any], Annotated) + return getattr(t, '__origin__', None) + +else: + from typing import get_origin as _typing_get_origin + + def get_origin(tp: Type[Any]) -> Optional[Type[Any]]: + """ + We can't directly use `typing.get_origin` since we need a fallback to support + custom generic classes like `ConstrainedList` + It should be useless once https://github.com/cython/cython/issues/3537 is + solved and https://github.com/pydantic/pydantic/pull/1753 is merged. + """ + if type(tp).__name__ in AnnotatedTypeNames: + return cast(Type[Any], Annotated) # mypy complains about _SpecialForm + return _typing_get_origin(tp) or getattr(tp, '__origin__', None) + + +if sys.version_info < (3, 8): + from typing import _GenericAlias + + def get_args(t: Type[Any]) -> Tuple[Any, ...]: + """Compatibility version of get_args for python 3.7. + + Mostly compatible with the python 3.8 `typing` module version + and able to handle almost all use cases. + """ + if type(t).__name__ in AnnotatedTypeNames: + return t.__args__ + t.__metadata__ + if isinstance(t, _GenericAlias): + res = t.__args__ + if t.__origin__ is Callable and res and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return getattr(t, '__args__', ()) + +else: + from typing import get_args as _typing_get_args + + def _generic_get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """ + In python 3.9, `typing.Dict`, `typing.List`, ... + do have an empty `__args__` by default (instead of the generic ~T for example). + In order to still support `Dict` for example and consider it as `Dict[Any, Any]`, + we retrieve the `_nparams` value that tells us how many parameters it needs. + """ + if hasattr(tp, '_nparams'): + return (Any,) * tp._nparams + # Special case for `tuple[()]`, which used to return ((),) with `typing.Tuple` + # in python 3.10- but now returns () for `tuple` and `Tuple`. + # This will probably be clarified in pydantic v2 + try: + if tp == Tuple[()] or sys.version_info >= (3, 9) and tp == tuple[()]: # type: ignore[misc] + return ((),) + # there is a TypeError when compiled with cython + except TypeError: # pragma: no cover + pass + return () + + def get_args(tp: Type[Any]) -> Tuple[Any, ...]: + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if type(tp).__name__ in AnnotatedTypeNames: + return tp.__args__ + tp.__metadata__ + # the fallback is needed for the same reasons as `get_origin` (see above) + return _typing_get_args(tp) or getattr(tp, '__args__', ()) or _generic_get_args(tp) + + +if sys.version_info < (3, 9): + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """Python 3.9 and older only supports generics from `typing` module. + They convert strings to ForwardRef automatically. + + Examples:: + typing.List['Hero'] == typing.List[ForwardRef('Hero')] + """ + return tp + +else: + + def convert_generics(tp: Type[Any]) -> Type[Any]: + """ + Recursively searches for `str` type hints and replaces them with ForwardRef. + + Examples:: + convert_generics(list['Hero']) == list[ForwardRef('Hero')] + convert_generics(dict['Hero', 'Team']) == dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(typing.Dict['Hero', 'Team']) == typing.Dict[ForwardRef('Hero'), ForwardRef('Team')] + convert_generics(list[str | 'Hero'] | int) == list[str | ForwardRef('Hero')] | int + """ + origin = get_origin(tp) + if not origin or not hasattr(tp, '__args__'): + return tp + + args = get_args(tp) + + # typing.Annotated needs special treatment + if origin is Annotated: + return Annotated[(convert_generics(args[0]), *args[1:])] # type: ignore + + # recursively replace `str` instances inside of `GenericAlias` with `ForwardRef(arg)` + converted = tuple( + ForwardRef(arg) if isinstance(arg, str) and isinstance(tp, TypingGenericAlias) else convert_generics(arg) + for arg in args + ) + + if converted == args: + return tp + elif isinstance(tp, TypingGenericAlias): + return TypingGenericAlias(origin, converted) + elif isinstance(tp, TypesUnionType): + # recreate types.UnionType (PEP604, Python >= 3.10) + return functools.reduce(operator.or_, converted) # type: ignore + else: + try: + setattr(tp, '__args__', converted) + except AttributeError: + pass + return tp + + +if sys.version_info < (3, 10): + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union + + WithArgsTypes = (TypingGenericAlias,) + +else: + import types + import typing + + def is_union(tp: Optional[Type[Any]]) -> bool: + return tp is Union or tp is types.UnionType # noqa: E721 + + WithArgsTypes = (typing._GenericAlias, types.GenericAlias, types.UnionType) + + +StrPath = Union[str, PathLike] + + +if TYPE_CHECKING: + from pydantic.v1.fields import ModelField + + TupleGenerator = Generator[Tuple[str, Any], None, None] + DictStrAny = Dict[str, Any] + DictAny = Dict[Any, Any] + SetStr = Set[str] + ListStr = List[str] + IntStr = Union[int, str] + AbstractSetIntStr = AbstractSet[IntStr] + DictIntStrAny = Dict[IntStr, Any] + MappingIntStrAny = Mapping[IntStr, Any] + CallableGenerator = Generator[AnyCallable, None, None] + ReprArgs = Sequence[Tuple[Optional[str], Any]] + + MYPY = False + if MYPY: + AnyClassMethod = classmethod[Any] + else: + # classmethod[TargetType, CallableParamSpecType, CallableReturnType] + AnyClassMethod = classmethod[Any, Any, Any] + +__all__ = ( + 'AnyCallable', + 'NoArgAnyCallable', + 'NoneType', + 'is_none_type', + 'display_as_type', + 'resolve_annotations', + 'is_callable_type', + 'is_literal_type', + 'all_literal_values', + 'is_namedtuple', + 'is_typeddict', + 'is_typeddict_special', + 'is_new_type', + 'new_type_supertype', + 'is_classvar', + 'is_finalvar', + 'update_field_forward_refs', + 'update_model_forward_refs', + 'TupleGenerator', + 'DictStrAny', + 'DictAny', + 'SetStr', + 'ListStr', + 'IntStr', + 'AbstractSetIntStr', + 'DictIntStrAny', + 'CallableGenerator', + 'ReprArgs', + 'AnyClassMethod', + 'CallableGenerator', + 'WithArgsTypes', + 'get_args', + 'get_origin', + 'get_sub_types', + 'typing_base', + 'get_all_type_hints', + 'is_union', + 'StrPath', + 'MappingIntStrAny', +) + + +NoneType = None.__class__ + + +NONE_TYPES: Tuple[Any, Any, Any] = (None, NoneType, Literal[None]) + + +if sys.version_info < (3, 8): + # Even though this implementation is slower, we need it for python 3.7: + # In python 3.7 "Literal" is not a builtin type and uses a different + # mechanism. + # for this reason `Literal[None] is Literal[None]` evaluates to `False`, + # breaking the faster implementation used for the other python versions. + + def is_none_type(type_: Any) -> bool: + return type_ in NONE_TYPES + +elif sys.version_info[:2] == (3, 8): + + def is_none_type(type_: Any) -> bool: + for none_type in NONE_TYPES: + if type_ is none_type: + return True + # With python 3.8, specifically 3.8.10, Literal "is" check sare very flakey + # can change on very subtle changes like use of types in other modules, + # hopefully this check avoids that issue. + if is_literal_type(type_): # pragma: no cover + return all_literal_values(type_) == (None,) + return False + +else: + + def is_none_type(type_: Any) -> bool: + return type_ in NONE_TYPES + + +def display_as_type(v: Type[Any]) -> str: + if not isinstance(v, typing_base) and not isinstance(v, WithArgsTypes) and not isinstance(v, type): + v = v.__class__ + + if is_union(get_origin(v)): + return f'Union[{", ".join(map(display_as_type, get_args(v)))}]' + + if isinstance(v, WithArgsTypes): + # Generic alias are constructs like `list[int]` + return str(v).replace('typing.', '') + + try: + return v.__name__ + except AttributeError: + # happens with typing objects + return str(v).replace('typing.', '') + + +def resolve_annotations(raw_annotations: Dict[str, Type[Any]], module_name: Optional[str]) -> Dict[str, Type[Any]]: + """ + Partially taken from typing.get_type_hints. + + Resolve string or ForwardRef annotations into type objects if possible. + """ + base_globals: Optional[Dict[str, Any]] = None + if module_name: + try: + module = sys.modules[module_name] + except KeyError: + # happens occasionally, see https://github.com/pydantic/pydantic/issues/2363 + pass + else: + base_globals = module.__dict__ + + annotations = {} + for name, value in raw_annotations.items(): + if isinstance(value, str): + if (3, 10) > sys.version_info >= (3, 9, 8) or sys.version_info >= (3, 10, 1): + value = ForwardRef(value, is_argument=False, is_class=True) + else: + value = ForwardRef(value, is_argument=False) + try: + if sys.version_info >= (3, 13): + value = _eval_type(value, base_globals, None, type_params=()) + else: + value = _eval_type(value, base_globals, None) + except NameError: + # this is ok, it can be fixed with update_forward_refs + pass + annotations[name] = value + return annotations + + +def is_callable_type(type_: Type[Any]) -> bool: + return type_ is Callable or get_origin(type_) is Callable + + +def is_literal_type(type_: Type[Any]) -> bool: + return Literal is not None and get_origin(type_) in LITERAL_TYPES + + +def literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + return get_args(type_) + + +def all_literal_values(type_: Type[Any]) -> Tuple[Any, ...]: + """ + This method is used to retrieve all Literal values as + Literal can be used recursively (see https://www.python.org/dev/peps/pep-0586) + e.g. `Literal[Literal[Literal[1, 2, 3], "foo"], 5, None]` + """ + if not is_literal_type(type_): + return (type_,) + + values = literal_values(type_) + return tuple(x for value in values for x in all_literal_values(value)) + + +def is_namedtuple(type_: Type[Any]) -> bool: + """ + Check if a given class is a named tuple. + It can be either a `typing.NamedTuple` or `collections.namedtuple` + """ + from pydantic.v1.utils import lenient_issubclass + + return lenient_issubclass(type_, tuple) and hasattr(type_, '_fields') + + +def is_typeddict(type_: Type[Any]) -> bool: + """ + Check if a given class is a typed dict (from `typing` or `typing_extensions`) + In 3.10, there will be a public method (https://docs.python.org/3.10/library/typing.html#typing.is_typeddict) + """ + from pydantic.v1.utils import lenient_issubclass + + return lenient_issubclass(type_, dict) and hasattr(type_, '__total__') + + +def _check_typeddict_special(type_: Any) -> bool: + return type_ is TypedDictRequired or type_ is TypedDictNotRequired + + +def is_typeddict_special(type_: Any) -> bool: + """ + Check if type is a TypedDict special form (Required or NotRequired). + """ + return _check_typeddict_special(type_) or _check_typeddict_special(get_origin(type_)) + + +test_type = NewType('test_type', str) + + +def is_new_type(type_: Type[Any]) -> bool: + """ + Check whether type_ was created using typing.NewType + """ + return isinstance(type_, test_type.__class__) and hasattr(type_, '__supertype__') # type: ignore + + +def new_type_supertype(type_: Type[Any]) -> Type[Any]: + while hasattr(type_, '__supertype__'): + type_ = type_.__supertype__ + return type_ + + +def _check_classvar(v: Optional[Type[Any]]) -> bool: + if v is None: + return False + + return v.__class__ == ClassVar.__class__ and getattr(v, '_name', None) == 'ClassVar' + + +def _check_finalvar(v: Optional[Type[Any]]) -> bool: + """ + Check if a given type is a `typing.Final` type. + """ + if v is None: + return False + + return v.__class__ == Final.__class__ and (sys.version_info < (3, 8) or getattr(v, '_name', None) == 'Final') + + +def is_classvar(ann_type: Type[Any]) -> bool: + if _check_classvar(ann_type) or _check_classvar(get_origin(ann_type)): + return True + + # this is an ugly workaround for class vars that contain forward references and are therefore themselves + # forward references, see #3679 + if ann_type.__class__ == ForwardRef and ann_type.__forward_arg__.startswith('ClassVar['): + return True + + return False + + +def is_finalvar(ann_type: Type[Any]) -> bool: + return _check_finalvar(ann_type) or _check_finalvar(get_origin(ann_type)) + + +def update_field_forward_refs(field: 'ModelField', globalns: Any, localns: Any) -> None: + """ + Try to update ForwardRefs on fields based on this ModelField, globalns and localns. + """ + prepare = False + if field.type_.__class__ == ForwardRef: + prepare = True + field.type_ = evaluate_forwardref(field.type_, globalns, localns or None) + if field.outer_type_.__class__ == ForwardRef: + prepare = True + field.outer_type_ = evaluate_forwardref(field.outer_type_, globalns, localns or None) + if prepare: + field.prepare() + + if field.sub_fields: + for sub_f in field.sub_fields: + update_field_forward_refs(sub_f, globalns=globalns, localns=localns) + + if field.discriminator_key is not None: + field.prepare_discriminated_union_sub_fields() + + +def update_model_forward_refs( + model: Type[Any], + fields: Iterable['ModelField'], + json_encoders: Dict[Union[Type[Any], str, ForwardRef], AnyCallable], + localns: 'DictStrAny', + exc_to_suppress: Tuple[Type[BaseException], ...] = (), +) -> None: + """ + Try to update model fields ForwardRefs based on model and localns. + """ + if model.__module__ in sys.modules: + globalns = sys.modules[model.__module__].__dict__.copy() + else: + globalns = {} + + globalns.setdefault(model.__name__, model) + + for f in fields: + try: + update_field_forward_refs(f, globalns=globalns, localns=localns) + except exc_to_suppress: + pass + + for key in set(json_encoders.keys()): + if isinstance(key, str): + fr: ForwardRef = ForwardRef(key) + elif isinstance(key, ForwardRef): + fr = key + else: + continue + + try: + new_key = evaluate_forwardref(fr, globalns, localns or None) + except exc_to_suppress: # pragma: no cover + continue + + json_encoders[new_key] = json_encoders.pop(key) + + +def get_class(type_: Type[Any]) -> Union[None, bool, Type[Any]]: + """ + Tries to get the class of a Type[T] annotation. Returns True if Type is used + without brackets. Otherwise returns None. + """ + if type_ is type: + return True + + if get_origin(type_) is None: + return None + + args = get_args(type_) + if not args or not isinstance(args[0], type): + return True + else: + return args[0] + + +def get_sub_types(tp: Any) -> List[Any]: + """ + Return all the types that are allowed by type `tp` + `tp` can be a `Union` of allowed types or an `Annotated` type + """ + origin = get_origin(tp) + if origin is Annotated: + return get_sub_types(get_args(tp)[0]) + elif is_union(origin): + return [x for t in get_args(tp) for x in get_sub_types(t)] + else: + return [tp] diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/utils.py b/venv/lib/python3.12/site-packages/pydantic/v1/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..2094e84fad951d6620635cf86f7f5fb647c5c20b --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/utils.py @@ -0,0 +1,807 @@ +import keyword +import warnings +import weakref +from collections import OrderedDict, defaultdict, deque +from copy import deepcopy +from itertools import islice, zip_longest +from types import BuiltinFunctionType, CodeType, FunctionType, GeneratorType, LambdaType, ModuleType +from typing import ( + TYPE_CHECKING, + AbstractSet, + Any, + Callable, + Collection, + Dict, + Generator, + Iterable, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Set, + Tuple, + Type, + TypeVar, + Union, +) + +from typing_extensions import Annotated + +from pydantic.v1.errors import ConfigError +from pydantic.v1.typing import ( + NoneType, + WithArgsTypes, + all_literal_values, + display_as_type, + get_args, + get_origin, + is_literal_type, + is_union, +) +from pydantic.v1.version import version_info + +if TYPE_CHECKING: + from inspect import Signature + from pathlib import Path + + from pydantic.v1.config import BaseConfig + from pydantic.v1.dataclasses import Dataclass + from pydantic.v1.fields import ModelField + from pydantic.v1.main import BaseModel + from pydantic.v1.typing import AbstractSetIntStr, DictIntStrAny, IntStr, MappingIntStrAny, ReprArgs + + RichReprResult = Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]] + +__all__ = ( + 'import_string', + 'sequence_like', + 'validate_field_name', + 'lenient_isinstance', + 'lenient_issubclass', + 'in_ipython', + 'is_valid_identifier', + 'deep_update', + 'update_not_none', + 'almost_equal_floats', + 'get_model', + 'to_camel', + 'to_lower_camel', + 'is_valid_field', + 'smart_deepcopy', + 'PyObjectStr', + 'Representation', + 'GetterDict', + 'ValueItems', + 'version_info', # required here to match behaviour in v1.3 + 'ClassAttribute', + 'path_type', + 'ROOT_KEY', + 'get_unique_discriminator_alias', + 'get_discriminator_alias_and_values', + 'DUNDER_ATTRIBUTES', +) + +ROOT_KEY = '__root__' +# these are types that are returned unchanged by deepcopy +IMMUTABLE_NON_COLLECTIONS_TYPES: Set[Type[Any]] = { + int, + float, + complex, + str, + bool, + bytes, + type, + NoneType, + FunctionType, + BuiltinFunctionType, + LambdaType, + weakref.ref, + CodeType, + # note: including ModuleType will differ from behaviour of deepcopy by not producing error. + # It might be not a good idea in general, but considering that this function used only internally + # against default values of fields, this will allow to actually have a field with module as default value + ModuleType, + NotImplemented.__class__, + Ellipsis.__class__, +} + +# these are types that if empty, might be copied with simple copy() instead of deepcopy() +BUILTIN_COLLECTIONS: Set[Type[Any]] = { + list, + set, + tuple, + frozenset, + dict, + OrderedDict, + defaultdict, + deque, +} + + +def import_string(dotted_path: str) -> Any: + """ + Stolen approximately from django. Import a dotted module path and return the attribute/class designated by the + last name in the path. Raise ImportError if the import fails. + """ + from importlib import import_module + + try: + module_path, class_name = dotted_path.strip(' ').rsplit('.', 1) + except ValueError as e: + raise ImportError(f'"{dotted_path}" doesn\'t look like a module path') from e + + module = import_module(module_path) + try: + return getattr(module, class_name) + except AttributeError as e: + raise ImportError(f'Module "{module_path}" does not define a "{class_name}" attribute') from e + + +def truncate(v: Union[str], *, max_len: int = 80) -> str: + """ + Truncate a value and add a unicode ellipsis (three dots) to the end if it was too long + """ + warnings.warn('`truncate` is no-longer used by pydantic and is deprecated', DeprecationWarning) + if isinstance(v, str) and len(v) > (max_len - 2): + # -3 so quote + string + … + quote has correct length + return (v[: (max_len - 3)] + '…').__repr__() + try: + v = v.__repr__() + except TypeError: + v = v.__class__.__repr__(v) # in case v is a type + if len(v) > max_len: + v = v[: max_len - 1] + '…' + return v + + +def sequence_like(v: Any) -> bool: + return isinstance(v, (list, tuple, set, frozenset, GeneratorType, deque)) + + +def validate_field_name(bases: Iterable[Type[Any]], field_name: str) -> None: + """ + Ensure that the field's name does not shadow an existing attribute of the model. + """ + for base in bases: + if getattr(base, field_name, None): + raise NameError( + f'Field name "{field_name}" shadows a BaseModel attribute; ' + f'use a different field name with "alias=\'{field_name}\'".' + ) + + +def lenient_isinstance(o: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(o, class_or_tuple) # type: ignore[arg-type] + except TypeError: + return False + + +def lenient_issubclass(cls: Any, class_or_tuple: Union[Type[Any], Tuple[Type[Any], ...], None]) -> bool: + try: + return isinstance(cls, type) and issubclass(cls, class_or_tuple) # type: ignore[arg-type] + except TypeError: + if isinstance(cls, WithArgsTypes): + return False + raise # pragma: no cover + + +def in_ipython() -> bool: + """ + Check whether we're in an ipython environment, including jupyter notebooks. + """ + try: + eval('__IPYTHON__') + except NameError: + return False + else: # pragma: no cover + return True + + +def is_valid_identifier(identifier: str) -> bool: + """ + Checks that a string is a valid identifier and not a Python keyword. + :param identifier: The identifier to test. + :return: True if the identifier is valid. + """ + return identifier.isidentifier() and not keyword.iskeyword(identifier) + + +KeyType = TypeVar('KeyType') + + +def deep_update(mapping: Dict[KeyType, Any], *updating_mappings: Dict[KeyType, Any]) -> Dict[KeyType, Any]: + updated_mapping = mapping.copy() + for updating_mapping in updating_mappings: + for k, v in updating_mapping.items(): + if k in updated_mapping and isinstance(updated_mapping[k], dict) and isinstance(v, dict): + updated_mapping[k] = deep_update(updated_mapping[k], v) + else: + updated_mapping[k] = v + return updated_mapping + + +def update_not_none(mapping: Dict[Any, Any], **update: Any) -> None: + mapping.update({k: v for k, v in update.items() if v is not None}) + + +def almost_equal_floats(value_1: float, value_2: float, *, delta: float = 1e-8) -> bool: + """ + Return True if two floats are almost equal + """ + return abs(value_1 - value_2) <= delta + + +def generate_model_signature( + init: Callable[..., None], fields: Dict[str, 'ModelField'], config: Type['BaseConfig'] +) -> 'Signature': + """ + Generate signature for model based on its fields + """ + from inspect import Parameter, Signature, signature + + from pydantic.v1.config import Extra + + present_params = signature(init).parameters.values() + merged_params: Dict[str, Parameter] = {} + var_kw = None + use_var_kw = False + + for param in islice(present_params, 1, None): # skip self arg + if param.kind is param.VAR_KEYWORD: + var_kw = param + continue + merged_params[param.name] = param + + if var_kw: # if custom init has no var_kw, fields which are not declared in it cannot be passed through + allow_names = config.allow_population_by_field_name + for field_name, field in fields.items(): + param_name = field.alias + if field_name in merged_params or param_name in merged_params: + continue + elif not is_valid_identifier(param_name): + if allow_names and is_valid_identifier(field_name): + param_name = field_name + else: + use_var_kw = True + continue + + # TODO: replace annotation with actual expected types once #1055 solved + kwargs = {'default': field.default} if not field.required else {} + merged_params[param_name] = Parameter( + param_name, Parameter.KEYWORD_ONLY, annotation=field.annotation, **kwargs + ) + + if config.extra is Extra.allow: + use_var_kw = True + + if var_kw and use_var_kw: + # Make sure the parameter for extra kwargs + # does not have the same name as a field + default_model_signature = [ + ('__pydantic_self__', Parameter.POSITIONAL_OR_KEYWORD), + ('data', Parameter.VAR_KEYWORD), + ] + if [(p.name, p.kind) for p in present_params] == default_model_signature: + # if this is the standard model signature, use extra_data as the extra args name + var_kw_name = 'extra_data' + else: + # else start from var_kw + var_kw_name = var_kw.name + + # generate a name that's definitely unique + while var_kw_name in fields: + var_kw_name += '_' + merged_params[var_kw_name] = var_kw.replace(name=var_kw_name) + + return Signature(parameters=list(merged_params.values()), return_annotation=None) + + +def get_model(obj: Union[Type['BaseModel'], Type['Dataclass']]) -> Type['BaseModel']: + from pydantic.v1.main import BaseModel + + try: + model_cls = obj.__pydantic_model__ # type: ignore + except AttributeError: + model_cls = obj + + if not issubclass(model_cls, BaseModel): + raise TypeError('Unsupported type, must be either BaseModel or dataclass') + return model_cls + + +def to_camel(string: str) -> str: + return ''.join(word.capitalize() for word in string.split('_')) + + +def to_lower_camel(string: str) -> str: + if len(string) >= 1: + pascal_string = to_camel(string) + return pascal_string[0].lower() + pascal_string[1:] + return string.lower() + + +T = TypeVar('T') + + +def unique_list( + input_list: Union[List[T], Tuple[T, ...]], + *, + name_factory: Callable[[T], str] = str, +) -> List[T]: + """ + Make a list unique while maintaining order. + We update the list if another one with the same name is set + (e.g. root validator overridden in subclass) + """ + result: List[T] = [] + result_names: List[str] = [] + for v in input_list: + v_name = name_factory(v) + if v_name not in result_names: + result_names.append(v_name) + result.append(v) + else: + result[result_names.index(v_name)] = v + + return result + + +class PyObjectStr(str): + """ + String class where repr doesn't include quotes. Useful with Representation when you want to return a string + representation of something that valid (or pseudo-valid) python. + """ + + def __repr__(self) -> str: + return str(self) + + +class Representation: + """ + Mixin to provide __str__, __repr__, and __pretty__ methods. See #884 for more details. + + __pretty__ is used by [devtools](https://python-devtools.helpmanual.io/) to provide human readable representations + of objects. + """ + + __slots__: Tuple[str, ...] = tuple() + + def __repr_args__(self) -> 'ReprArgs': + """ + Returns the attributes to show in __str__, __repr__, and __pretty__ this is generally overridden. + + Can either return: + * name - value pairs, e.g.: `[('foo_name', 'foo'), ('bar_name', ['b', 'a', 'r'])]` + * or, just values, e.g.: `[(None, 'foo'), (None, ['b', 'a', 'r'])]` + """ + attrs = ((s, getattr(self, s)) for s in self.__slots__) + return [(a, v) for a, v in attrs if v is not None] + + def __repr_name__(self) -> str: + """ + Name of the instance's class, used in __repr__. + """ + return self.__class__.__name__ + + def __repr_str__(self, join_str: str) -> str: + return join_str.join(repr(v) if a is None else f'{a}={v!r}' for a, v in self.__repr_args__()) + + def __pretty__(self, fmt: Callable[[Any], Any], **kwargs: Any) -> Generator[Any, None, None]: + """ + Used by devtools (https://python-devtools.helpmanual.io/) to provide a human readable representations of objects + """ + yield self.__repr_name__() + '(' + yield 1 + for name, value in self.__repr_args__(): + if name is not None: + yield name + '=' + yield fmt(value) + yield ',' + yield 0 + yield -1 + yield ')' + + def __str__(self) -> str: + return self.__repr_str__(' ') + + def __repr__(self) -> str: + return f'{self.__repr_name__()}({self.__repr_str__(", ")})' + + def __rich_repr__(self) -> 'RichReprResult': + """Get fields for Rich library""" + for name, field_repr in self.__repr_args__(): + if name is None: + yield field_repr + else: + yield name, field_repr + + +class GetterDict(Representation): + """ + Hack to make object's smell just enough like dicts for validate_model. + + We can't inherit from Mapping[str, Any] because it upsets cython so we have to implement all methods ourselves. + """ + + __slots__ = ('_obj',) + + def __init__(self, obj: Any): + self._obj = obj + + def __getitem__(self, key: str) -> Any: + try: + return getattr(self._obj, key) + except AttributeError as e: + raise KeyError(key) from e + + def get(self, key: Any, default: Any = None) -> Any: + return getattr(self._obj, key, default) + + def extra_keys(self) -> Set[Any]: + """ + We don't want to get any other attributes of obj if the model didn't explicitly ask for them + """ + return set() + + def keys(self) -> List[Any]: + """ + Keys of the pseudo dictionary, uses a list not set so order information can be maintained like python + dictionaries. + """ + return list(self) + + def values(self) -> List[Any]: + return [self[k] for k in self] + + def items(self) -> Iterator[Tuple[str, Any]]: + for k in self: + yield k, self.get(k) + + def __iter__(self) -> Iterator[str]: + for name in dir(self._obj): + if not name.startswith('_'): + yield name + + def __len__(self) -> int: + return sum(1 for _ in self) + + def __contains__(self, item: Any) -> bool: + return item in self.keys() + + def __eq__(self, other: Any) -> bool: + return dict(self) == dict(other.items()) + + def __repr_args__(self) -> 'ReprArgs': + return [(None, dict(self))] + + def __repr_name__(self) -> str: + return f'GetterDict[{display_as_type(self._obj)}]' + + +class ValueItems(Representation): + """ + Class for more convenient calculation of excluded or included fields on values. + """ + + __slots__ = ('_items', '_type') + + def __init__(self, value: Any, items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> None: + items = self._coerce_items(items) + + if isinstance(value, (list, tuple)): + items = self._normalize_indexes(items, len(value)) + + self._items: 'MappingIntStrAny' = items + + def is_excluded(self, item: Any) -> bool: + """ + Check if item is fully excluded. + + :param item: key or index of a value + """ + return self.is_true(self._items.get(item)) + + def is_included(self, item: Any) -> bool: + """ + Check if value is contained in self._items + + :param item: key or index of value + """ + return item in self._items + + def for_element(self, e: 'IntStr') -> Optional[Union['AbstractSetIntStr', 'MappingIntStrAny']]: + """ + :param e: key or index of element on value + :return: raw values for element if self._items is dict and contain needed element + """ + + item = self._items.get(e) + return item if not self.is_true(item) else None + + def _normalize_indexes(self, items: 'MappingIntStrAny', v_length: int) -> 'DictIntStrAny': + """ + :param items: dict or set of indexes which will be normalized + :param v_length: length of sequence indexes of which will be + + >>> self._normalize_indexes({0: True, -2: True, -1: True}, 4) + {0: True, 2: True, 3: True} + >>> self._normalize_indexes({'__all__': True}, 4) + {0: True, 1: True, 2: True, 3: True} + """ + + normalized_items: 'DictIntStrAny' = {} + all_items = None + for i, v in items.items(): + if not (isinstance(v, Mapping) or isinstance(v, AbstractSet) or self.is_true(v)): + raise TypeError(f'Unexpected type of exclude value for index "{i}" {v.__class__}') + if i == '__all__': + all_items = self._coerce_value(v) + continue + if not isinstance(i, int): + raise TypeError( + 'Excluding fields from a sequence of sub-models or dicts must be performed index-wise: ' + 'expected integer keys or keyword "__all__"' + ) + normalized_i = v_length + i if i < 0 else i + normalized_items[normalized_i] = self.merge(v, normalized_items.get(normalized_i)) + + if not all_items: + return normalized_items + if self.is_true(all_items): + for i in range(v_length): + normalized_items.setdefault(i, ...) + return normalized_items + for i in range(v_length): + normalized_item = normalized_items.setdefault(i, {}) + if not self.is_true(normalized_item): + normalized_items[i] = self.merge(all_items, normalized_item) + return normalized_items + + @classmethod + def merge(cls, base: Any, override: Any, intersect: bool = False) -> Any: + """ + Merge a ``base`` item with an ``override`` item. + + Both ``base`` and ``override`` are converted to dictionaries if possible. + Sets are converted to dictionaries with the sets entries as keys and + Ellipsis as values. + + Each key-value pair existing in ``base`` is merged with ``override``, + while the rest of the key-value pairs are updated recursively with this function. + + Merging takes place based on the "union" of keys if ``intersect`` is + set to ``False`` (default) and on the intersection of keys if + ``intersect`` is set to ``True``. + """ + override = cls._coerce_value(override) + base = cls._coerce_value(base) + if override is None: + return base + if cls.is_true(base) or base is None: + return override + if cls.is_true(override): + return base if intersect else override + + # intersection or union of keys while preserving ordering: + if intersect: + merge_keys = [k for k in base if k in override] + [k for k in override if k in base] + else: + merge_keys = list(base) + [k for k in override if k not in base] + + merged: 'DictIntStrAny' = {} + for k in merge_keys: + merged_item = cls.merge(base.get(k), override.get(k), intersect=intersect) + if merged_item is not None: + merged[k] = merged_item + + return merged + + @staticmethod + def _coerce_items(items: Union['AbstractSetIntStr', 'MappingIntStrAny']) -> 'MappingIntStrAny': + if isinstance(items, Mapping): + pass + elif isinstance(items, AbstractSet): + items = dict.fromkeys(items, ...) + else: + class_name = getattr(items, '__class__', '???') + assert_never( + items, + f'Unexpected type of exclude value {class_name}', + ) + return items + + @classmethod + def _coerce_value(cls, value: Any) -> Any: + if value is None or cls.is_true(value): + return value + return cls._coerce_items(value) + + @staticmethod + def is_true(v: Any) -> bool: + return v is True or v is ... + + def __repr_args__(self) -> 'ReprArgs': + return [(None, self._items)] + + +class ClassAttribute: + """ + Hide class attribute from its instances + """ + + __slots__ = ( + 'name', + 'value', + ) + + def __init__(self, name: str, value: Any) -> None: + self.name = name + self.value = value + + def __get__(self, instance: Any, owner: Type[Any]) -> None: + if instance is None: + return self.value + raise AttributeError(f'{self.name!r} attribute of {owner.__name__!r} is class-only') + + +path_types = { + 'is_dir': 'directory', + 'is_file': 'file', + 'is_mount': 'mount point', + 'is_symlink': 'symlink', + 'is_block_device': 'block device', + 'is_char_device': 'char device', + 'is_fifo': 'FIFO', + 'is_socket': 'socket', +} + + +def path_type(p: 'Path') -> str: + """ + Find out what sort of thing a path is. + """ + assert p.exists(), 'path does not exist' + for method, name in path_types.items(): + if getattr(p, method)(): + return name + + return 'unknown' + + +Obj = TypeVar('Obj') + + +def smart_deepcopy(obj: Obj) -> Obj: + """ + Return type as is for immutable built-in types + Use obj.copy() for built-in empty collections + Use copy.deepcopy() for non-empty collections and unknown objects + """ + + obj_type = obj.__class__ + if obj_type in IMMUTABLE_NON_COLLECTIONS_TYPES: + return obj # fastest case: obj is immutable and not collection therefore will not be copied anyway + try: + if not obj and obj_type in BUILTIN_COLLECTIONS: + # faster way for empty collections, no need to copy its members + return obj if obj_type is tuple else obj.copy() # type: ignore # tuple doesn't have copy method + except (TypeError, ValueError, RuntimeError): + # do we really dare to catch ALL errors? Seems a bit risky + pass + + return deepcopy(obj) # slowest way when we actually might need a deepcopy + + +def is_valid_field(name: str) -> bool: + if not name.startswith('_'): + return True + return ROOT_KEY == name + + +DUNDER_ATTRIBUTES = { + '__annotations__', + '__classcell__', + '__doc__', + '__module__', + '__orig_bases__', + '__orig_class__', + '__qualname__', + '__firstlineno__', + '__static_attributes__', + '__classdictcell__', +} + + +def is_valid_private_name(name: str) -> bool: + return not is_valid_field(name) and name not in DUNDER_ATTRIBUTES + + +_EMPTY = object() + + +def all_identical(left: Iterable[Any], right: Iterable[Any]) -> bool: + """ + Check that the items of `left` are the same objects as those in `right`. + + >>> a, b = object(), object() + >>> all_identical([a, b, a], [a, b, a]) + True + >>> all_identical([a, b, [a]], [a, b, [a]]) # new list object, while "equal" is not "identical" + False + """ + for left_item, right_item in zip_longest(left, right, fillvalue=_EMPTY): + if left_item is not right_item: + return False + return True + + +def assert_never(obj: NoReturn, msg: str) -> NoReturn: + """ + Helper to make sure that we have covered all possible types. + + This is mostly useful for ``mypy``, docs: + https://mypy.readthedocs.io/en/latest/literal_types.html#exhaustive-checks + """ + raise TypeError(msg) + + +def get_unique_discriminator_alias(all_aliases: Collection[str], discriminator_key: str) -> str: + """Validate that all aliases are the same and if that's the case return the alias""" + unique_aliases = set(all_aliases) + if len(unique_aliases) > 1: + raise ConfigError( + f'Aliases for discriminator {discriminator_key!r} must be the same (got {", ".join(sorted(all_aliases))})' + ) + return unique_aliases.pop() + + +def get_discriminator_alias_and_values(tp: Any, discriminator_key: str) -> Tuple[str, Tuple[str, ...]]: + """ + Get alias and all valid values in the `Literal` type of the discriminator field + `tp` can be a `BaseModel` class or directly an `Annotated` `Union` of many. + """ + is_root_model = getattr(tp, '__custom_root_type__', False) + + if get_origin(tp) is Annotated: + tp = get_args(tp)[0] + + if hasattr(tp, '__pydantic_model__'): + tp = tp.__pydantic_model__ + + if is_union(get_origin(tp)): + alias, all_values = _get_union_alias_and_all_values(tp, discriminator_key) + return alias, tuple(v for values in all_values for v in values) + elif is_root_model: + union_type = tp.__fields__[ROOT_KEY].type_ + alias, all_values = _get_union_alias_and_all_values(union_type, discriminator_key) + + if len(set(all_values)) > 1: + raise ConfigError( + f'Field {discriminator_key!r} is not the same for all submodels of {display_as_type(tp)!r}' + ) + + return alias, all_values[0] + + else: + try: + t_discriminator_type = tp.__fields__[discriminator_key].type_ + except AttributeError as e: + raise TypeError(f'Type {tp.__name__!r} is not a valid `BaseModel` or `dataclass`') from e + except KeyError as e: + raise ConfigError(f'Model {tp.__name__!r} needs a discriminator field for key {discriminator_key!r}') from e + + if not is_literal_type(t_discriminator_type): + raise ConfigError(f'Field {discriminator_key!r} of model {tp.__name__!r} needs to be a `Literal`') + + return tp.__fields__[discriminator_key].alias, all_literal_values(t_discriminator_type) + + +def _get_union_alias_and_all_values( + union_type: Type[Any], discriminator_key: str +) -> Tuple[str, Tuple[Tuple[str, ...], ...]]: + zipped_aliases_values = [get_discriminator_alias_and_values(t, discriminator_key) for t in get_args(union_type)] + # unzip: [('alias_a',('v1', 'v2)), ('alias_b', ('v3',))] => [('alias_a', 'alias_b'), (('v1', 'v2'), ('v3',))] + all_aliases, all_values = zip(*zipped_aliases_values) + return get_unique_discriminator_alias(all_aliases, discriminator_key), all_values diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/validators.py b/venv/lib/python3.12/site-packages/pydantic/v1/validators.py new file mode 100644 index 0000000000000000000000000000000000000000..c0940e8114a47ceacf21f4df946a6401946b9b8f --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/validators.py @@ -0,0 +1,768 @@ +import math +import re +from collections import OrderedDict, deque +from collections.abc import Hashable as CollectionsHashable +from datetime import date, datetime, time, timedelta +from decimal import Decimal, DecimalException +from enum import Enum, IntEnum +from ipaddress import IPv4Address, IPv4Interface, IPv4Network, IPv6Address, IPv6Interface, IPv6Network +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Deque, + Dict, + ForwardRef, + FrozenSet, + Generator, + Hashable, + List, + NamedTuple, + Pattern, + Set, + Tuple, + Type, + TypeVar, + Union, +) +from uuid import UUID +from warnings import warn + +from pydantic.v1 import errors +from pydantic.v1.datetime_parse import parse_date, parse_datetime, parse_duration, parse_time +from pydantic.v1.typing import ( + AnyCallable, + all_literal_values, + display_as_type, + get_class, + is_callable_type, + is_literal_type, + is_namedtuple, + is_none_type, + is_typeddict, +) +from pydantic.v1.utils import almost_equal_floats, lenient_issubclass, sequence_like + +if TYPE_CHECKING: + from typing_extensions import Literal, TypedDict + + from pydantic.v1.config import BaseConfig + from pydantic.v1.fields import ModelField + from pydantic.v1.types import ConstrainedDecimal, ConstrainedFloat, ConstrainedInt + + ConstrainedNumber = Union[ConstrainedDecimal, ConstrainedFloat, ConstrainedInt] + AnyOrderedDict = OrderedDict[Any, Any] + Number = Union[int, float, Decimal] + StrBytes = Union[str, bytes] + + +def str_validator(v: Any) -> Union[str]: + if isinstance(v, str): + if isinstance(v, Enum): + return v.value + else: + return v + elif isinstance(v, (float, int, Decimal)): + # is there anything else we want to add here? If you think so, create an issue. + return str(v) + elif isinstance(v, (bytes, bytearray)): + return v.decode() + else: + raise errors.StrError() + + +def strict_str_validator(v: Any) -> Union[str]: + if isinstance(v, str) and not isinstance(v, Enum): + return v + raise errors.StrError() + + +def bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + elif isinstance(v, str): + return v.encode() + elif isinstance(v, (float, int, Decimal)): + return str(v).encode() + else: + raise errors.BytesError() + + +def strict_bytes_validator(v: Any) -> Union[bytes]: + if isinstance(v, bytes): + return v + elif isinstance(v, bytearray): + return bytes(v) + else: + raise errors.BytesError() + + +BOOL_FALSE = {0, '0', 'off', 'f', 'false', 'n', 'no'} +BOOL_TRUE = {1, '1', 'on', 't', 'true', 'y', 'yes'} + + +def bool_validator(v: Any) -> bool: + if v is True or v is False: + return v + if isinstance(v, bytes): + v = v.decode() + if isinstance(v, str): + v = v.lower() + try: + if v in BOOL_TRUE: + return True + if v in BOOL_FALSE: + return False + except TypeError: + raise errors.BoolError() + raise errors.BoolError() + + +# matches the default limit cpython, see https://github.com/python/cpython/pull/96500 +max_str_int = 4_300 + + +def int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + + # see https://github.com/pydantic/pydantic/issues/1477 and in turn, https://github.com/python/cpython/issues/95778 + # this check should be unnecessary once patch releases are out for 3.7, 3.8, 3.9 and 3.10 + # but better to check here until then. + # NOTICE: this does not fully protect user from the DOS risk since the standard library JSON implementation + # (and other std lib modules like xml) use `int()` and are likely called before this, the best workaround is to + # 1. update to the latest patch release of python once released, 2. use a different JSON library like ujson + if isinstance(v, (str, bytes, bytearray)) and len(v) > max_str_int: + raise errors.IntegerError() + + try: + return int(v) + except (TypeError, ValueError, OverflowError): + raise errors.IntegerError() + + +def strict_int_validator(v: Any) -> int: + if isinstance(v, int) and not (v is True or v is False): + return v + raise errors.IntegerError() + + +def float_validator(v: Any) -> float: + if isinstance(v, float): + return v + + try: + return float(v) + except (TypeError, ValueError): + raise errors.FloatError() + + +def strict_float_validator(v: Any) -> float: + if isinstance(v, float): + return v + raise errors.FloatError() + + +def float_finite_validator(v: 'Number', field: 'ModelField', config: 'BaseConfig') -> 'Number': + allow_inf_nan = getattr(field.type_, 'allow_inf_nan', None) + if allow_inf_nan is None: + allow_inf_nan = config.allow_inf_nan + + if allow_inf_nan is False and (math.isnan(v) or math.isinf(v)): + raise errors.NumberNotFiniteError() + return v + + +def number_multiple_validator(v: 'Number', field: 'ModelField') -> 'Number': + field_type: ConstrainedNumber = field.type_ + if field_type.multiple_of is not None: + mod = float(v) / float(field_type.multiple_of) % 1 + if not almost_equal_floats(mod, 0.0) and not almost_equal_floats(mod, 1.0): + raise errors.NumberNotMultipleError(multiple_of=field_type.multiple_of) + return v + + +def number_size_validator(v: 'Number', field: 'ModelField') -> 'Number': + field_type: ConstrainedNumber = field.type_ + if field_type.gt is not None and not v > field_type.gt: + raise errors.NumberNotGtError(limit_value=field_type.gt) + elif field_type.ge is not None and not v >= field_type.ge: + raise errors.NumberNotGeError(limit_value=field_type.ge) + + if field_type.lt is not None and not v < field_type.lt: + raise errors.NumberNotLtError(limit_value=field_type.lt) + if field_type.le is not None and not v <= field_type.le: + raise errors.NumberNotLeError(limit_value=field_type.le) + + return v + + +def constant_validator(v: 'Any', field: 'ModelField') -> 'Any': + """Validate ``const`` fields. + + The value provided for a ``const`` field must be equal to the default value + of the field. This is to support the keyword of the same name in JSON + Schema. + """ + if v != field.default: + raise errors.WrongConstantError(given=v, permitted=[field.default]) + + return v + + +def anystr_length_validator(v: 'StrBytes', config: 'BaseConfig') -> 'StrBytes': + v_len = len(v) + + min_length = config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def anystr_strip_whitespace(v: 'StrBytes') -> 'StrBytes': + return v.strip() + + +def anystr_upper(v: 'StrBytes') -> 'StrBytes': + return v.upper() + + +def anystr_lower(v: 'StrBytes') -> 'StrBytes': + return v.lower() + + +def ordered_dict_validator(v: Any) -> 'AnyOrderedDict': + if isinstance(v, OrderedDict): + return v + + try: + return OrderedDict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def dict_validator(v: Any) -> Dict[Any, Any]: + if isinstance(v, dict): + return v + + try: + return dict(v) + except (TypeError, ValueError): + raise errors.DictError() + + +def list_validator(v: Any) -> List[Any]: + if isinstance(v, list): + return v + elif sequence_like(v): + return list(v) + else: + raise errors.ListError() + + +def tuple_validator(v: Any) -> Tuple[Any, ...]: + if isinstance(v, tuple): + return v + elif sequence_like(v): + return tuple(v) + else: + raise errors.TupleError() + + +def set_validator(v: Any) -> Set[Any]: + if isinstance(v, set): + return v + elif sequence_like(v): + return set(v) + else: + raise errors.SetError() + + +def frozenset_validator(v: Any) -> FrozenSet[Any]: + if isinstance(v, frozenset): + return v + elif sequence_like(v): + return frozenset(v) + else: + raise errors.FrozenSetError() + + +def deque_validator(v: Any) -> Deque[Any]: + if isinstance(v, deque): + return v + elif sequence_like(v): + return deque(v) + else: + raise errors.DequeError() + + +def enum_member_validator(v: Any, field: 'ModelField', config: 'BaseConfig') -> Enum: + try: + enum_v = field.type_(v) + except ValueError: + # field.type_ should be an enum, so will be iterable + raise errors.EnumMemberError(enum_values=list(field.type_)) + return enum_v.value if config.use_enum_values else enum_v + + +def uuid_validator(v: Any, field: 'ModelField') -> UUID: + try: + if isinstance(v, str): + v = UUID(v) + elif isinstance(v, (bytes, bytearray)): + try: + v = UUID(v.decode()) + except ValueError: + # 16 bytes in big-endian order as the bytes argument fail + # the above check + v = UUID(bytes=v) + except ValueError: + raise errors.UUIDError() + + if not isinstance(v, UUID): + raise errors.UUIDError() + + required_version = getattr(field.type_, '_required_version', None) + if required_version and v.version != required_version: + raise errors.UUIDVersionError(required_version=required_version) + + return v + + +def decimal_validator(v: Any) -> Decimal: + if isinstance(v, Decimal): + return v + elif isinstance(v, (bytes, bytearray)): + v = v.decode() + + v = str(v).strip() + + try: + v = Decimal(v) + except DecimalException: + raise errors.DecimalError() + + if not v.is_finite(): + raise errors.DecimalIsNotFiniteError() + + return v + + +def hashable_validator(v: Any) -> Hashable: + if isinstance(v, Hashable): + return v + + raise errors.HashableError() + + +def ip_v4_address_validator(v: Any) -> IPv4Address: + if isinstance(v, IPv4Address): + return v + + try: + return IPv4Address(v) + except ValueError: + raise errors.IPv4AddressError() + + +def ip_v6_address_validator(v: Any) -> IPv6Address: + if isinstance(v, IPv6Address): + return v + + try: + return IPv6Address(v) + except ValueError: + raise errors.IPv6AddressError() + + +def ip_v4_network_validator(v: Any) -> IPv4Network: + """ + Assume IPv4Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv4Network + """ + if isinstance(v, IPv4Network): + return v + + try: + return IPv4Network(v) + except ValueError: + raise errors.IPv4NetworkError() + + +def ip_v6_network_validator(v: Any) -> IPv6Network: + """ + Assume IPv6Network initialised with a default ``strict`` argument + + See more: + https://docs.python.org/library/ipaddress.html#ipaddress.IPv6Network + """ + if isinstance(v, IPv6Network): + return v + + try: + return IPv6Network(v) + except ValueError: + raise errors.IPv6NetworkError() + + +def ip_v4_interface_validator(v: Any) -> IPv4Interface: + if isinstance(v, IPv4Interface): + return v + + try: + return IPv4Interface(v) + except ValueError: + raise errors.IPv4InterfaceError() + + +def ip_v6_interface_validator(v: Any) -> IPv6Interface: + if isinstance(v, IPv6Interface): + return v + + try: + return IPv6Interface(v) + except ValueError: + raise errors.IPv6InterfaceError() + + +def path_validator(v: Any) -> Path: + if isinstance(v, Path): + return v + + try: + return Path(v) + except TypeError: + raise errors.PathError() + + +def path_exists_validator(v: Any) -> Path: + if not v.exists(): + raise errors.PathNotExistsError(path=v) + + return v + + +def callable_validator(v: Any) -> AnyCallable: + """ + Perform a simple check if the value is callable. + + Note: complete matching of argument type hints and return types is not performed + """ + if callable(v): + return v + + raise errors.CallableError(value=v) + + +def enum_validator(v: Any) -> Enum: + if isinstance(v, Enum): + return v + + raise errors.EnumError(value=v) + + +def int_enum_validator(v: Any) -> IntEnum: + if isinstance(v, IntEnum): + return v + + raise errors.IntEnumError(value=v) + + +def make_literal_validator(type_: Any) -> Callable[[Any], Any]: + permitted_choices = all_literal_values(type_) + + # To have a O(1) complexity and still return one of the values set inside the `Literal`, + # we create a dict with the set values (a set causes some problems with the way intersection works). + # In some cases the set value and checked value can indeed be different (see `test_literal_validator_str_enum`) + allowed_choices = {v: v for v in permitted_choices} + + def literal_validator(v: Any) -> Any: + try: + return allowed_choices[v] + except (KeyError, TypeError): + raise errors.WrongConstantError(given=v, permitted=permitted_choices) + + return literal_validator + + +def constr_length_validator(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + v_len = len(v) + + min_length = field.type_.min_length if field.type_.min_length is not None else config.min_anystr_length + if v_len < min_length: + raise errors.AnyStrMinLengthError(limit_value=min_length) + + max_length = field.type_.max_length if field.type_.max_length is not None else config.max_anystr_length + if max_length is not None and v_len > max_length: + raise errors.AnyStrMaxLengthError(limit_value=max_length) + + return v + + +def constr_strip_whitespace(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + strip_whitespace = field.type_.strip_whitespace or config.anystr_strip_whitespace + if strip_whitespace: + v = v.strip() + + return v + + +def constr_upper(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + upper = field.type_.to_upper or config.anystr_upper + if upper: + v = v.upper() + + return v + + +def constr_lower(v: 'StrBytes', field: 'ModelField', config: 'BaseConfig') -> 'StrBytes': + lower = field.type_.to_lower or config.anystr_lower + if lower: + v = v.lower() + return v + + +def validate_json(v: Any, config: 'BaseConfig') -> Any: + if v is None: + # pass None through to other validators + return v + try: + return config.json_loads(v) # type: ignore + except ValueError: + raise errors.JsonError() + except TypeError: + raise errors.JsonTypeError() + + +T = TypeVar('T') + + +def make_arbitrary_type_validator(type_: Type[T]) -> Callable[[T], T]: + def arbitrary_type_validator(v: Any) -> T: + if isinstance(v, type_): + return v + raise errors.ArbitraryTypeError(expected_arbitrary_type=type_) + + return arbitrary_type_validator + + +def make_class_validator(type_: Type[T]) -> Callable[[Any], Type[T]]: + def class_validator(v: Any) -> Type[T]: + if lenient_issubclass(v, type_): + return v + raise errors.SubclassError(expected_class=type_) + + return class_validator + + +def any_class_validator(v: Any) -> Type[T]: + if isinstance(v, type): + return v + raise errors.ClassError() + + +def none_validator(v: Any) -> 'Literal[None]': + if v is None: + return v + raise errors.NotNoneError() + + +def pattern_validator(v: Any) -> Pattern[str]: + if isinstance(v, Pattern): + return v + + str_value = str_validator(v) + + try: + return re.compile(str_value) + except re.error: + raise errors.PatternError() + + +NamedTupleT = TypeVar('NamedTupleT', bound=NamedTuple) + + +def make_namedtuple_validator( + namedtuple_cls: Type[NamedTupleT], config: Type['BaseConfig'] +) -> Callable[[Tuple[Any, ...]], NamedTupleT]: + from pydantic.v1.annotated_types import create_model_from_namedtuple + + NamedTupleModel = create_model_from_namedtuple( + namedtuple_cls, + __config__=config, + __module__=namedtuple_cls.__module__, + ) + namedtuple_cls.__pydantic_model__ = NamedTupleModel # type: ignore[attr-defined] + + def namedtuple_validator(values: Tuple[Any, ...]) -> NamedTupleT: + annotations = NamedTupleModel.__annotations__ + + if len(values) > len(annotations): + raise errors.ListMaxLengthError(limit_value=len(annotations)) + + dict_values: Dict[str, Any] = dict(zip(annotations, values)) + validated_dict_values: Dict[str, Any] = dict(NamedTupleModel(**dict_values)) + return namedtuple_cls(**validated_dict_values) + + return namedtuple_validator + + +def make_typeddict_validator( + typeddict_cls: Type['TypedDict'], config: Type['BaseConfig'] # type: ignore[valid-type] +) -> Callable[[Any], Dict[str, Any]]: + from pydantic.v1.annotated_types import create_model_from_typeddict + + TypedDictModel = create_model_from_typeddict( + typeddict_cls, + __config__=config, + __module__=typeddict_cls.__module__, + ) + typeddict_cls.__pydantic_model__ = TypedDictModel # type: ignore[attr-defined] + + def typeddict_validator(values: 'TypedDict') -> Dict[str, Any]: # type: ignore[valid-type] + return TypedDictModel.parse_obj(values).dict(exclude_unset=True) + + return typeddict_validator + + +class IfConfig: + def __init__(self, validator: AnyCallable, *config_attr_names: str, ignored_value: Any = False) -> None: + self.validator = validator + self.config_attr_names = config_attr_names + self.ignored_value = ignored_value + + def check(self, config: Type['BaseConfig']) -> bool: + return any(getattr(config, name) not in {None, self.ignored_value} for name in self.config_attr_names) + + +# order is important here, for example: bool is a subclass of int so has to come first, datetime before date same, +# IPv4Interface before IPv4Address, etc +_VALIDATORS: List[Tuple[Type[Any], List[Any]]] = [ + (IntEnum, [int_validator, enum_member_validator]), + (Enum, [enum_member_validator]), + ( + str, + [ + str_validator, + IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), + IfConfig(anystr_upper, 'anystr_upper'), + IfConfig(anystr_lower, 'anystr_lower'), + IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), + ], + ), + ( + bytes, + [ + bytes_validator, + IfConfig(anystr_strip_whitespace, 'anystr_strip_whitespace'), + IfConfig(anystr_upper, 'anystr_upper'), + IfConfig(anystr_lower, 'anystr_lower'), + IfConfig(anystr_length_validator, 'min_anystr_length', 'max_anystr_length'), + ], + ), + (bool, [bool_validator]), + (int, [int_validator]), + (float, [float_validator, IfConfig(float_finite_validator, 'allow_inf_nan', ignored_value=True)]), + (Path, [path_validator]), + (datetime, [parse_datetime]), + (date, [parse_date]), + (time, [parse_time]), + (timedelta, [parse_duration]), + (OrderedDict, [ordered_dict_validator]), + (dict, [dict_validator]), + (list, [list_validator]), + (tuple, [tuple_validator]), + (set, [set_validator]), + (frozenset, [frozenset_validator]), + (deque, [deque_validator]), + (UUID, [uuid_validator]), + (Decimal, [decimal_validator]), + (IPv4Interface, [ip_v4_interface_validator]), + (IPv6Interface, [ip_v6_interface_validator]), + (IPv4Address, [ip_v4_address_validator]), + (IPv6Address, [ip_v6_address_validator]), + (IPv4Network, [ip_v4_network_validator]), + (IPv6Network, [ip_v6_network_validator]), +] + + +def find_validators( # noqa: C901 (ignore complexity) + type_: Type[Any], config: Type['BaseConfig'] +) -> Generator[AnyCallable, None, None]: + from pydantic.v1.dataclasses import is_builtin_dataclass, make_dataclass_validator + + if type_ is Any or type_ is object: + return + type_type = type_.__class__ + if type_type == ForwardRef or type_type == TypeVar: + return + + if is_none_type(type_): + yield none_validator + return + if type_ is Pattern or type_ is re.Pattern: + yield pattern_validator + return + if type_ is Hashable or type_ is CollectionsHashable: + yield hashable_validator + return + if is_callable_type(type_): + yield callable_validator + return + if is_literal_type(type_): + yield make_literal_validator(type_) + return + if is_builtin_dataclass(type_): + yield from make_dataclass_validator(type_, config) + return + if type_ is Enum: + yield enum_validator + return + if type_ is IntEnum: + yield int_enum_validator + return + if is_namedtuple(type_): + yield tuple_validator + yield make_namedtuple_validator(type_, config) + return + if is_typeddict(type_): + yield make_typeddict_validator(type_, config) + return + + class_ = get_class(type_) + if class_ is not None: + if class_ is not Any and isinstance(class_, type): + yield make_class_validator(class_) + else: + yield any_class_validator + return + + for val_type, validators in _VALIDATORS: + try: + if issubclass(type_, val_type): + for v in validators: + if isinstance(v, IfConfig): + if v.check(config): + yield v.validator + else: + yield v + return + except TypeError: + raise RuntimeError(f'error checking inheritance of {type_!r} (type: {display_as_type(type_)})') + + if config.arbitrary_types_allowed: + yield make_arbitrary_type_validator(type_) + else: + if hasattr(type_, '__pydantic_core_schema__'): + warn(f'Mixing V1 and V2 models is not supported. `{type_.__name__}` is a V2 model.', UserWarning) + raise RuntimeError(f'no validator found for {type_}, see `arbitrary_types_allowed` in Config') diff --git a/venv/lib/python3.12/site-packages/pydantic/v1/version.py b/venv/lib/python3.12/site-packages/pydantic/v1/version.py new file mode 100644 index 0000000000000000000000000000000000000000..6ba1cb08332b03fc85194d29ee76ab993ea951c9 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic/v1/version.py @@ -0,0 +1,38 @@ +__all__ = 'compiled', 'VERSION', 'version_info' + +VERSION = '1.10.26' + +try: + import cython # type: ignore +except ImportError: + compiled: bool = False +else: # pragma: no cover + try: + compiled = cython.compiled + except AttributeError: + compiled = False + + +def version_info() -> str: + import platform + import sys + from importlib import import_module + from pathlib import Path + + optional_deps = [] + for p in ('devtools', 'dotenv', 'email-validator', 'typing-extensions'): + try: + import_module(p.replace('-', '_')) + except ImportError: + continue + optional_deps.append(p) + + info = { + 'pydantic version': VERSION, + 'pydantic compiled': compiled, + 'install path': Path(__file__).resolve().parent, + 'python version': sys.version, + 'platform': platform.platform(), + 'optional deps. installed': optional_deps, + } + return '\n'.join('{:>30} {}'.format(k + ':', str(v).replace('\n', ' ')) for k, v in info.items()) diff --git a/venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..0716871caabdbbb3e77a0371d49936cef1923ea1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2022 Samuel Colvin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/sboms/pydantic-core.cyclonedx.json b/venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/sboms/pydantic-core.cyclonedx.json new file mode 100644 index 0000000000000000000000000000000000000000..4e2981647660a843afbd20dc5058c1b17dbab57c --- /dev/null +++ b/venv/lib/python3.12/site-packages/pydantic_core-2.46.4.dist-info/sboms/pydantic-core.cyclonedx.json @@ -0,0 +1,3961 @@ +{ + "bomFormat": "CycloneDX", + "specVersion": "1.5", + "version": 1, + "serialNumber": "urn:uuid:8283bc1b-853e-4e6a-8cb6-f9a3b5452a74", + "metadata": { + "timestamp": "2026-05-06T08:31:06.718616147Z", + "tools": [ + { + "vendor": "CycloneDX", + "name": "cargo-cyclonedx", + "version": "0.5.9" + } + ], + "component": { + "type": "library", + "bom-ref": "path+file:///home/runner/work/pydantic/pydantic/pydantic-core#2.46.4", + "name": "pydantic-core", + "version": "2.46.4", + "scope": "required", + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/pydantic-core@2.46.4?download_url=file://.", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pydantic/pydantic" + }, + { + "type": "vcs", + "url": "https://github.com/pydantic/pydantic.git" + } + ], + "components": [ + { + "type": "library", + "bom-ref": "path+file:///home/runner/work/pydantic/pydantic/pydantic-core#2.46.4 bin-target-0", + "name": "_pydantic_core", + "version": "2.46.4", + "purl": "pkg:cargo/pydantic-core@2.46.4?download_url=file://.#src/lib.rs" + } + ] + }, + "properties": [ + { + "name": "cdx:rustc:sbom:target:all_targets", + "value": "true" + } + ] + }, + "components": [ + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#ahash@0.8.12", + "author": "Tom Kaitchuck ", + "name": "ahash", + "version": "0.8.12", + "description": "A non-cryptographic hash function using AES-NI for high performance", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/ahash@0.8.12", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/ahash" + }, + { + "type": "vcs", + "url": "https://github.com/tkaitchuck/ahash" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#aho-corasick@1.1.3", + "author": "Andrew Gallant ", + "name": "aho-corasick", + "version": "1.1.3", + "description": "Fast multiple substring searching.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" + } + ], + "licenses": [ + { + "expression": "Unlicense OR MIT" + } + ], + "purl": "pkg:cargo/aho-corasick@1.1.3", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/BurntSushi/aho-corasick" + }, + { + "type": "vcs", + "url": "https://github.com/BurntSushi/aho-corasick" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#allocator-api2@0.2.21", + "author": "Zakarum ", + "name": "allocator-api2", + "version": "0.2.21", + "description": "Mirror of Rust's allocator API", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/allocator-api2@0.2.21", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/allocator-api2" + }, + { + "type": "website", + "url": "https://github.com/zakarumych/allocator-api2" + }, + { + "type": "vcs", + "url": "https://github.com/zakarumych/allocator-api2" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#autocfg@1.3.0", + "author": "Josh Stone ", + "name": "autocfg", + "version": "1.3.0", + "description": "Automatic cfg for Rust compiler features", + "scope": "excluded", + "hashes": [ + { + "alg": "SHA-256", + "content": "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/autocfg@1.3.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/autocfg/" + }, + { + "type": "vcs", + "url": "https://github.com/cuviper/autocfg" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#base64@0.22.1", + "author": "Marshall Pierce ", + "name": "base64", + "version": "0.22.1", + "description": "encodes and decodes base64 as bytes or utf8", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/base64@0.22.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/base64" + }, + { + "type": "vcs", + "url": "https://github.com/marshallpierce/rust-base64" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#bitflags@2.9.1", + "author": "The Rust Project Developers", + "name": "bitflags", + "version": "2.9.1", + "description": "A macro to generate structures which behave like bitflags. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/bitflags@2.9.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/bitflags" + }, + { + "type": "website", + "url": "https://github.com/bitflags/bitflags" + }, + { + "type": "vcs", + "url": "https://github.com/bitflags/bitflags" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#bitvec@1.0.1", + "name": "bitvec", + "version": "1.0.1", + "description": "Addresses memory by bits, for packed collections and bitfields", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/bitvec@1.0.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/bitvec/latest/bitvec" + }, + { + "type": "website", + "url": "https://bitvecto-rs.github.io/bitvec" + }, + { + "type": "vcs", + "url": "https://github.com/bitvecto-rs/bitvec" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#bumpalo@3.19.0", + "author": "Nick Fitzgerald ", + "name": "bumpalo", + "version": "3.19.0", + "description": "A fast bump allocation arena for Rust.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/bumpalo@3.19.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/bumpalo" + }, + { + "type": "vcs", + "url": "https://github.com/fitzgen/bumpalo" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#cc@1.0.101", + "author": "Alex Crichton ", + "name": "cc", + "version": "1.0.101", + "description": "A build-time dependency for Cargo build scripts to assist in invoking the native C compiler to compile native C code into a static archive to be linked into Rust code. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "ac367972e516d45567c7eafc73d24e1c193dcf200a8d94e9db7b3d38b349572d" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/cc@1.0.101", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/cc" + }, + { + "type": "website", + "url": "https://github.com/rust-lang/cc-rs" + }, + { + "type": "vcs", + "url": "https://github.com/rust-lang/cc-rs" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#cfg-if@1.0.0", + "author": "Alex Crichton ", + "name": "cfg-if", + "version": "1.0.0", + "description": "A macro to ergonomically define an item depending on a large number of #[cfg] parameters. Structured like an if-else chain, the first matching branch is the item that gets emitted. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/cfg-if@1.0.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/cfg-if" + }, + { + "type": "website", + "url": "https://github.com/alexcrichton/cfg-if" + }, + { + "type": "vcs", + "url": "https://github.com/alexcrichton/cfg-if" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "author": "Jane Lusby ", + "name": "displaydoc", + "version": "0.2.5", + "description": "A derive macro for implementing the display Trait via a doc comment and string interpolation ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/displaydoc@0.2.5", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/displaydoc" + }, + { + "type": "website", + "url": "https://github.com/yaahc/displaydoc" + }, + { + "type": "vcs", + "url": "https://github.com/yaahc/displaydoc" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#enum_dispatch@0.3.13", + "author": "Anton Lazarev ", + "name": "enum_dispatch", + "version": "0.3.13", + "description": "Near drop-in replacement for dynamic-dispatched method calls with up to 10x the speed", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/enum_dispatch@0.3.13", + "externalReferences": [ + { + "type": "vcs", + "url": "https://gitlab.com/antonok/enum_dispatch" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#equivalent@1.0.2", + "name": "equivalent", + "version": "1.0.2", + "description": "Traits for key comparison in maps.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/equivalent@1.0.2", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/indexmap-rs/equivalent" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#foldhash@0.2.0", + "author": "Orson Peters ", + "name": "foldhash", + "version": "0.2.0", + "description": "A fast, non-cryptographic, minimally DoS-resistant hashing algorithm.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + } + ], + "licenses": [ + { + "expression": "Zlib" + } + ], + "purl": "pkg:cargo/foldhash@0.2.0", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/orlp/foldhash" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#form_urlencoded@1.2.2", + "author": "The rust-url developers", + "name": "form_urlencoded", + "version": "1.2.2", + "description": "Parser and serializer for the application/x-www-form-urlencoded syntax, as used by HTML forms.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/form_urlencoded@1.2.2", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/servo/rust-url" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#funty@2.0.0", + "author": "myrrlyn ", + "name": "funty", + "version": "2.0.0", + "description": "Trait generalization over the primitive types", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/funty@2.0.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/funty" + }, + { + "type": "vcs", + "url": "https://github.com/myrrlyn/funty" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#getrandom@0.3.3", + "author": "The Rand Project Developers", + "name": "getrandom", + "version": "0.3.3", + "description": "A small cross-platform library for retrieving random data from system source", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/getrandom@0.3.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/getrandom" + }, + { + "type": "vcs", + "url": "https://github.com/rust-random/getrandom" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#hashbrown@0.16.1", + "author": "Amanieu d'Antras ", + "name": "hashbrown", + "version": "0.16.1", + "description": "A Rust port of Google's SwissTable hash map", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/hashbrown@0.16.1", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/rust-lang/hashbrown" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#heck@0.5.0", + "name": "heck", + "version": "0.5.0", + "description": "heck is a case conversion library.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/heck@0.5.0", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/withoutboats/heck" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#hex@0.4.3", + "author": "KokaKiwi ", + "name": "hex", + "version": "0.4.3", + "description": "Encoding and decoding data into/from hexadecimal representation.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/hex@0.4.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/hex/" + }, + { + "type": "vcs", + "url": "https://github.com/KokaKiwi/rust-hex" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_collections@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_collections", + "version": "1.5.0", + "description": "Collection of API for use in ICU libraries.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_collections@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_locid@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_locid", + "version": "1.5.0", + "description": "API for managing Unicode Language and Locale Identifiers", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_locid@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_locid_transform@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_locid_transform", + "version": "1.5.0", + "description": "API for Unicode Language and Locale Identifiers canonicalization", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_locid_transform@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_locid_transform_data@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_locid_transform_data", + "version": "1.5.0", + "description": "Data for the icu_locid_transform crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_locid_transform_data@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_normalizer@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_normalizer", + "version": "1.5.0", + "description": "API for normalizing text into Unicode Normalization Forms", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_normalizer@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_normalizer_data@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_normalizer_data", + "version": "1.5.0", + "description": "Data for the icu_normalizer crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_normalizer_data@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_properties@1.5.1", + "author": "The ICU4X Project Developers", + "name": "icu_properties", + "version": "1.5.1", + "description": "Definitions for Unicode properties", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "93d6020766cfc6302c15dbbc9c8778c37e62c14427cb7f6e601d849e092aeef5" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_properties@1.5.1", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_properties_data@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_properties_data", + "version": "1.5.0", + "description": "Data for the icu_properties crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_properties_data@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_provider@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_provider", + "version": "1.5.0", + "description": "Trait and struct definitions for the ICU data provider", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_provider@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#icu_provider_macros@1.5.0", + "author": "The ICU4X Project Developers", + "name": "icu_provider_macros", + "version": "1.5.0", + "description": "Proc macros for ICU data providers", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/icu_provider_macros@1.5.0", + "externalReferences": [ + { + "type": "website", + "url": "https://icu4x.unicode.org" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#idna@1.1.0", + "author": "The rust-url developers", + "name": "idna", + "version": "1.1.0", + "description": "IDNA (Internationalizing Domain Names in Applications) and Punycode.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/idna@1.1.0", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/servo/rust-url/" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#idna_adapter@1.2.0", + "author": "The rust-url developers", + "name": "idna_adapter", + "version": "1.2.0", + "description": "Back end adapter for idna", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/idna_adapter@1.2.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/idna_adapter/latest/idna_adapter/" + }, + { + "type": "website", + "url": "https://docs.rs/crate/idna_adapter/latest" + }, + { + "type": "vcs", + "url": "https://github.com/hsivonen/idna_adapter" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#itoa@1.0.11", + "author": "David Tolnay ", + "name": "itoa", + "version": "1.0.11", + "description": "Fast integer primitive to string conversion", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/itoa@1.0.11", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/itoa" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/itoa" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#jiter@0.14.0", + "author": "Samuel Colvin ", + "name": "jiter", + "version": "0.14.0", + "description": "Fast Iterable JSON parser", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "b6f3b5d3f84b36f4ad09fd1da896d23d9852a1aa86556578dd0289f43dce311d" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/jiter@0.14.0", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pydantic/jiter/" + }, + { + "type": "vcs", + "url": "https://github.com/pydantic/jiter/" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#js-sys@0.3.77", + "author": "The wasm-bindgen Developers", + "name": "js-sys", + "version": "0.3.77", + "description": "Bindings for all JS global objects and functions in all JS environments like Node.js and browsers, built on `#[wasm_bindgen]` using the `wasm-bindgen` crate. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/js-sys@0.3.77", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/js-sys" + }, + { + "type": "website", + "url": "https://rustwasm.github.io/wasm-bindgen/" + }, + { + "type": "vcs", + "url": "https://github.com/rustwasm/wasm-bindgen/tree/master/crates/js-sys" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-float@1.0.5", + "author": "Alex Huszagh ", + "name": "lexical-parse-float", + "version": "1.0.5", + "description": "Efficient parsing of floats from strings.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "de6f9cb01fb0b08060209a057c048fcbab8717b4c1ecd2eac66ebfe39a65b0f2" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/lexical-parse-float@1.0.5", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/Alexhuszagh/rust-lexical" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-integer@1.0.5", + "author": "Alex Huszagh ", + "name": "lexical-parse-integer", + "version": "1.0.5", + "description": "Efficient parsing of integers from strings.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "72207aae22fc0a121ba7b6d479e42cbfea549af1479c3f3a4f12c70dd66df12e" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/lexical-parse-integer@1.0.5", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/Alexhuszagh/rust-lexical" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#lexical-util@1.0.6", + "author": "Alex Huszagh ", + "name": "lexical-util", + "version": "1.0.6", + "description": "Shared utilities for lexical creates.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "5a82e24bf537fd24c177ffbbdc6ebcc8d54732c35b50a3f28cc3f4e4c949a0b3" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/lexical-util@1.0.6", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/Alexhuszagh/rust-lexical" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#libc@0.2.185", + "author": "The Rust Project Developers", + "name": "libc", + "version": "0.2.185", + "description": "Raw FFI bindings to platform libraries like libc.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "52ff2c0fe9bc6cb6b14a0592c2ff4fa9ceb83eea9db979b0487cd054946a2b8f" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/libc@0.2.185", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/rust-lang/libc" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#litemap@0.7.3", + "author": "The ICU4X Project Developers", + "name": "litemap", + "version": "0.7.3", + "description": "A key-value Map implementation based on a flat, sorted Vec.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/litemap@0.7.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/litemap" + }, + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#log@0.4.27", + "author": "The Rust Project Developers", + "name": "log", + "version": "0.4.27", + "description": "A lightweight logging facade for Rust ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/log@0.4.27", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/log" + }, + { + "type": "vcs", + "url": "https://github.com/rust-lang/log" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#lru@0.16.3", + "author": "Jerome Froelich ", + "name": "lru", + "version": "0.16.3", + "description": "A LRU cache implementation", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "a1dc47f592c06f33f8e3aea9591776ec7c9f9e4124778ff8a3c3b87159f7e593" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/lru@0.16.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/lru/" + }, + { + "type": "website", + "url": "https://github.com/jeromefroe/lru-rs" + }, + { + "type": "vcs", + "url": "https://github.com/jeromefroe/lru-rs.git" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#memchr@2.7.4", + "author": "Andrew Gallant , bluss", + "name": "memchr", + "version": "2.7.4", + "description": "Provides extremely fast (uses SIMD on x86_64, aarch64 and wasm32) routines for 1, 2 or 3 byte search and single substring search. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + } + ], + "licenses": [ + { + "expression": "Unlicense OR MIT" + } + ], + "purl": "pkg:cargo/memchr@2.7.4", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/memchr/" + }, + { + "type": "website", + "url": "https://github.com/BurntSushi/memchr" + }, + { + "type": "vcs", + "url": "https://github.com/BurntSushi/memchr" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#num-bigint@0.4.6", + "author": "The Rust Project Developers", + "name": "num-bigint", + "version": "0.4.6", + "description": "Big integer implementation for Rust", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/num-bigint@0.4.6", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/num-bigint" + }, + { + "type": "website", + "url": "https://github.com/rust-num/num-bigint" + }, + { + "type": "vcs", + "url": "https://github.com/rust-num/num-bigint" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#num-integer@0.1.46", + "author": "The Rust Project Developers", + "name": "num-integer", + "version": "0.1.46", + "description": "Integer traits and functions", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/num-integer@0.1.46", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/num-integer" + }, + { + "type": "website", + "url": "https://github.com/rust-num/num-integer" + }, + { + "type": "vcs", + "url": "https://github.com/rust-num/num-integer" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19", + "author": "The Rust Project Developers", + "name": "num-traits", + "version": "0.2.19", + "description": "Numeric traits for generic mathematics", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/num-traits@0.2.19", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/num-traits" + }, + { + "type": "website", + "url": "https://github.com/rust-num/num-traits" + }, + { + "type": "vcs", + "url": "https://github.com/rust-num/num-traits" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3", + "author": "Aleksey Kladov ", + "name": "once_cell", + "version": "1.21.3", + "description": "Single assignment cells and lazy values.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/once_cell@1.21.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/once_cell" + }, + { + "type": "vcs", + "url": "https://github.com/matklad/once_cell" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#percent-encoding@2.3.2", + "author": "The rust-url developers", + "name": "percent-encoding", + "version": "2.3.2", + "description": "Percent encoding and decoding", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/percent-encoding@2.3.2", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/servo/rust-url/" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#portable-atomic@1.6.0", + "name": "portable-atomic", + "version": "1.6.0", + "description": "Portable atomic types including support for 128-bit atomics, atomic float, etc. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/portable-atomic@1.6.0", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/taiki-e/portable-atomic" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "author": "David Tolnay , Alex Crichton ", + "name": "proc-macro2", + "version": "1.0.86", + "description": "A substitute implementation of the compiler's `proc_macro` API to decouple token-based libraries from the procedural macro use case.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/proc-macro2@1.0.86", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/proc-macro2" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/proc-macro2" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3", + "author": "PyO3 Project and Contributors ", + "name": "pyo3-build-config", + "version": "0.28.3", + "description": "Build configuration for the PyO3 ecosystem", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "e368e7ddfdeb98c9bca7f8383be1648fd84ab466bf2bc015e94008db6d35611e" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/pyo3-build-config@0.28.3", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pyo3/pyo3" + }, + { + "type": "vcs", + "url": "https://github.com/pyo3/pyo3" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-ffi@0.28.3", + "author": "PyO3 Project and Contributors ", + "name": "pyo3-ffi", + "version": "0.28.3", + "description": "Python-API bindings for the PyO3 ecosystem", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "7f29e10af80b1f7ccaf7f69eace800a03ecd13e883acfacc1e5d0988605f651e" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/pyo3-ffi@0.28.3", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pyo3/pyo3" + }, + { + "type": "other", + "url": "python" + }, + { + "type": "vcs", + "url": "https://github.com/pyo3/pyo3" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-macros-backend@0.28.3", + "author": "PyO3 Project and Contributors ", + "name": "pyo3-macros-backend", + "version": "0.28.3", + "description": "Code generation for PyO3 package", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "c4cdc218d835738f81c2338f822078af45b4afdf8b2e33cbb5916f108b813acb" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/pyo3-macros-backend@0.28.3", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pyo3/pyo3" + }, + { + "type": "vcs", + "url": "https://github.com/pyo3/pyo3" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-macros@0.28.3", + "author": "PyO3 Project and Contributors ", + "name": "pyo3-macros", + "version": "0.28.3", + "description": "Proc macros for PyO3 package", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "df6e520eff47c45997d2fc7dd8214b25dd1310918bbb2642156ef66a67f29813" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/pyo3-macros@0.28.3", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pyo3/pyo3" + }, + { + "type": "vcs", + "url": "https://github.com/pyo3/pyo3" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3@0.28.3", + "author": "PyO3 Project and Contributors ", + "name": "pyo3", + "version": "0.28.3", + "description": "Bindings to Python interpreter", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "91fd8e38a3b50ed1167fb981cd6fd60147e091784c427b8f7183a7ee32c31c12" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/pyo3@0.28.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/crate/pyo3/" + }, + { + "type": "website", + "url": "https://github.com/pyo3/pyo3" + }, + { + "type": "vcs", + "url": "https://github.com/pyo3/pyo3" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#python3-dll-a@0.2.14", + "author": "Sergey Kvachonok , messense , Adam Reichold ", + "name": "python3-dll-a", + "version": "0.2.14", + "description": "Standalone python3(y)(t).dll import library generator", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "d381ef313ae70b4da5f95f8a4de773c6aa5cd28f73adec4b4a31df70b66780d8" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/python3-dll-a@0.2.14", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/PyO3/python3-dll-a" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "author": "David Tolnay ", + "name": "quote", + "version": "1.0.44", + "description": "Quasi-quoting macro quote!(...)", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/quote@1.0.44", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/quote/" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/quote" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#r-efi@5.2.0", + "name": "r-efi", + "version": "5.2.0", + "description": "UEFI Reference Specification Protocol Constants and Definitions", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "74765f6d916ee2faa39bc8e68e4f3ed8949b48cccdac59983d287a7cb71ce9c5" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0 OR LGPL-2.1-or-later" + } + ], + "purl": "pkg:cargo/r-efi@5.2.0", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/r-efi/r-efi/wiki" + }, + { + "type": "vcs", + "url": "https://github.com/r-efi/r-efi" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#radium@0.7.0", + "author": "Nika Layzell , myrrlyn ", + "name": "radium", + "version": "0.7.0", + "description": "Portable interfaces for maybe-atomic types", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/radium@0.7.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/radium" + }, + { + "type": "website", + "url": "https://github.com/bitvecto-rs/radium" + }, + { + "type": "vcs", + "url": "https://github.com/bitvecto-rs/radium" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#regex-automata@0.4.13", + "author": "The Rust Project Developers, Andrew Gallant ", + "name": "regex-automata", + "version": "0.4.13", + "description": "Automata construction and matching using regular expressions.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/regex-automata@0.4.13", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/regex-automata" + }, + { + "type": "website", + "url": "https://github.com/rust-lang/regex/tree/master/regex-automata" + }, + { + "type": "vcs", + "url": "https://github.com/rust-lang/regex" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#regex-syntax@0.8.5", + "author": "The Rust Project Developers, Andrew Gallant ", + "name": "regex-syntax", + "version": "0.8.5", + "description": "A regular expression parser.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/regex-syntax@0.8.5", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/regex-syntax" + }, + { + "type": "vcs", + "url": "https://github.com/rust-lang/regex/tree/master/regex-syntax" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#regex@1.12.3", + "author": "The Rust Project Developers, Andrew Gallant ", + "name": "regex", + "version": "1.12.3", + "description": "An implementation of regular expressions for Rust. This implementation uses finite automata and guarantees linear time matching on all inputs. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/regex@1.12.3", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/regex" + }, + { + "type": "website", + "url": "https://github.com/rust-lang/regex" + }, + { + "type": "vcs", + "url": "https://github.com/rust-lang/regex" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#rustversion@1.0.17", + "author": "David Tolnay ", + "name": "rustversion", + "version": "1.0.17", + "description": "Conditional compilation according to rustc compiler version", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/rustversion@1.0.17", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/rustversion" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/rustversion" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#serde@1.0.228", + "author": "Erick Tryzelaar , David Tolnay ", + "name": "serde", + "version": "1.0.228", + "description": "A generic serialization/deserialization framework", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/serde@1.0.228", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/serde" + }, + { + "type": "website", + "url": "https://serde.rs" + }, + { + "type": "vcs", + "url": "https://github.com/serde-rs/serde" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#serde_core@1.0.228", + "author": "Erick Tryzelaar , David Tolnay ", + "name": "serde_core", + "version": "1.0.228", + "description": "Serde traits only, with no support for derive -- use the `serde` crate instead", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/serde_core@1.0.228", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/serde_core" + }, + { + "type": "website", + "url": "https://serde.rs" + }, + { + "type": "vcs", + "url": "https://github.com/serde-rs/serde" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#serde_derive@1.0.228", + "author": "Erick Tryzelaar , David Tolnay ", + "name": "serde_derive", + "version": "1.0.228", + "description": "Macros 1.1 implementation of #[derive(Serialize, Deserialize)]", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/serde_derive@1.0.228", + "externalReferences": [ + { + "type": "documentation", + "url": "https://serde.rs/derive.html" + }, + { + "type": "website", + "url": "https://serde.rs" + }, + { + "type": "vcs", + "url": "https://github.com/serde-rs/serde" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#serde_json@1.0.149", + "author": "Erick Tryzelaar , David Tolnay ", + "name": "serde_json", + "version": "1.0.149", + "description": "A JSON serialization file format", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/serde_json@1.0.149", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/serde_json" + }, + { + "type": "vcs", + "url": "https://github.com/serde-rs/json" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1", + "author": "The Servo Project Developers", + "name": "smallvec", + "version": "1.15.1", + "description": "'Small vector' optimization: store up to a small number of items on the stack", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/smallvec@1.15.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/smallvec/" + }, + { + "type": "vcs", + "url": "https://github.com/servo/rust-smallvec" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#speedate@0.17.0", + "author": "Samuel Colvin ", + "name": "speedate", + "version": "0.17.0", + "description": "Fast and simple datetime, date, time and duration parsing", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "aba069c070b5e213f2a094deb7e5ed50ecb092be36102a4f4042e8d2056d060e" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/speedate@0.17.0", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/pydantic/speedate/" + }, + { + "type": "vcs", + "url": "https://github.com/pydantic/speedate/" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#stable_deref_trait@1.2.0", + "author": "Robert Grosse ", + "name": "stable_deref_trait", + "version": "1.2.0", + "description": "An unsafe marker trait for types like Box and Rc that dereference to a stable address even when moved, and hence can be used with libraries such as owning_ref and rental. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/stable_deref_trait@1.2.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/stable_deref_trait/1.2.0/stable_deref_trait" + }, + { + "type": "vcs", + "url": "https://github.com/storyyeller/stable_deref_trait" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#static_assertions@1.1.0", + "author": "Nikolai Vazquez", + "name": "static_assertions", + "version": "1.1.0", + "description": "Compile-time assertions to ensure that invariants are met.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/static_assertions@1.1.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/static_assertions/" + }, + { + "type": "website", + "url": "https://github.com/nvzqz/static-assertions-rs" + }, + { + "type": "vcs", + "url": "https://github.com/nvzqz/static-assertions-rs" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#strum@0.27.2", + "author": "Peter Glotfelty ", + "name": "strum", + "version": "0.27.2", + "description": "Helpful macros for working with enums and strings", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/strum@0.27.2", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/strum" + }, + { + "type": "website", + "url": "https://github.com/Peternator7/strum" + }, + { + "type": "vcs", + "url": "https://github.com/Peternator7/strum" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#strum_macros@0.27.2", + "author": "Peter Glotfelty ", + "name": "strum_macros", + "version": "0.27.2", + "description": "Helpful macros for working with enums and strings", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/strum_macros@0.27.2", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/strum" + }, + { + "type": "website", + "url": "https://github.com/Peternator7/strum" + }, + { + "type": "vcs", + "url": "https://github.com/Peternator7/strum" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82", + "author": "David Tolnay ", + "name": "syn", + "version": "2.0.82", + "description": "Parser for Rust source code", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "83540f837a8afc019423a8edb95b52a8effe46957ee402287f4292fae35be021" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/syn@2.0.82", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/syn" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/syn" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#synstructure@0.13.1", + "author": "Nika Layzell ", + "name": "synstructure", + "version": "0.13.1", + "description": "Helper methods and macros for custom derives", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/synstructure@0.13.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/synstructure" + }, + { + "type": "vcs", + "url": "https://github.com/mystor/synstructure" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#tap@1.0.1", + "author": "Elliott Linder , myrrlyn ", + "name": "tap", + "version": "1.0.1", + "description": "Generic extensions for tapping values in Rust", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/tap@1.0.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/tap" + }, + { + "type": "website", + "url": "https://github.com/myrrlyn/tap" + }, + { + "type": "vcs", + "url": "https://github.com/myrrlyn/tap" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#target-lexicon@0.13.4", + "author": "Dan Gohman ", + "name": "target-lexicon", + "version": "0.13.4", + "description": "LLVM target triple types", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "b1dd07eb858a2067e2f3c7155d54e929265c264e6f37efe3ee7a8d1b5a1dd0ba" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 WITH LLVM-exception" + } + ], + "purl": "pkg:cargo/target-lexicon@0.13.4", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/target-lexicon/" + }, + { + "type": "vcs", + "url": "https://github.com/bytecodealliance/target-lexicon" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#tinystr@0.7.6", + "author": "The ICU4X Project Developers", + "name": "tinystr", + "version": "0.7.6", + "description": "A small ASCII-only bounded length string representation.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/tinystr@0.7.6", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#unicode-ident@1.0.12", + "author": "David Tolnay ", + "name": "unicode-ident", + "version": "1.0.12", + "description": "Determine whether characters have the XID_Start or XID_Continue properties according to Unicode Standard Annex #31", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" + } + ], + "licenses": [ + { + "expression": "(MIT OR Apache-2.0) AND Unicode-DFS-2016" + } + ], + "purl": "pkg:cargo/unicode-ident@1.0.12", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/unicode-ident" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/unicode-ident" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#url@2.5.8", + "author": "The rust-url developers", + "name": "url", + "version": "2.5.8", + "description": "URL library for Rust, based on the WHATWG URL Standard", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "ff67a8a4397373c3ef660812acab3268222035010ab8680ec4215f38ba3d0eed" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/url@2.5.8", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/url" + }, + { + "type": "vcs", + "url": "https://github.com/servo/rust-url" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#utf16_iter@1.0.5", + "author": "Henri Sivonen ", + "name": "utf16_iter", + "version": "1.0.5", + "description": "Iterator by char over potentially-invalid UTF-16 in &[u16]", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/utf16_iter@1.0.5", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/utf16_iter/" + }, + { + "type": "website", + "url": "https://docs.rs/utf16_iter/" + }, + { + "type": "vcs", + "url": "https://github.com/hsivonen/utf16_iter" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#utf8_iter@1.0.4", + "author": "Henri Sivonen ", + "name": "utf8_iter", + "version": "1.0.4", + "description": "Iterator by char over potentially-invalid UTF-8 in &[u8]", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/utf8_iter@1.0.4", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/utf8_iter/" + }, + { + "type": "website", + "url": "https://docs.rs/utf8_iter/" + }, + { + "type": "vcs", + "url": "https://github.com/hsivonen/utf8_iter" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#uuid@1.23.0", + "author": "Ashley Mannix, Dylan DPC, Hunar Roop Kahlon", + "name": "uuid", + "version": "1.23.0", + "description": "A library to generate and parse UUIDs.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "5ac8b6f42ead25368cf5b098aeb3dc8a1a2c05a3eee8a9a1a68c640edbfc79d9" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/uuid@1.23.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/uuid" + }, + { + "type": "website", + "url": "https://github.com/uuid-rs/uuid" + }, + { + "type": "vcs", + "url": "https://github.com/uuid-rs/uuid" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#version_check@0.9.5", + "author": "Sergio Benitez ", + "name": "version_check", + "version": "0.9.5", + "description": "Tiny crate to check the version of the installed/running rustc.", + "scope": "excluded", + "hashes": [ + { + "alg": "SHA-256", + "content": "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/version_check@0.9.5", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/version_check/" + }, + { + "type": "vcs", + "url": "https://github.com/SergioBenitez/version_check" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wasi@0.14.2+wasi-0.2.4", + "author": "The Cranelift Project Developers", + "name": "wasi", + "version": "0.14.2+wasi-0.2.4", + "description": "WASI API bindings for Rust", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/wasi@0.14.2+wasi-0.2.4", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wasi" + }, + { + "type": "vcs", + "url": "https://github.com/bytecodealliance/wasi-rs" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-backend@0.2.100", + "author": "The wasm-bindgen Developers", + "name": "wasm-bindgen-backend", + "version": "0.2.100", + "description": "Backend code generation of the wasm-bindgen tool ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/wasm-bindgen-backend@0.2.100", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wasm-bindgen-backend" + }, + { + "type": "website", + "url": "https://rustwasm.github.io/wasm-bindgen/" + }, + { + "type": "vcs", + "url": "https://github.com/rustwasm/wasm-bindgen/tree/master/crates/backend" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-macro-support@0.2.100", + "author": "The wasm-bindgen Developers", + "name": "wasm-bindgen-macro-support", + "version": "0.2.100", + "description": "The part of the implementation of the `#[wasm_bindgen]` attribute that is not in the shared backend crate ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/wasm-bindgen-macro-support@0.2.100", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wasm-bindgen" + }, + { + "type": "website", + "url": "https://rustwasm.github.io/wasm-bindgen/" + }, + { + "type": "vcs", + "url": "https://github.com/rustwasm/wasm-bindgen/tree/master/crates/macro-support" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-macro@0.2.100", + "author": "The wasm-bindgen Developers", + "name": "wasm-bindgen-macro", + "version": "0.2.100", + "description": "Definition of the `#[wasm_bindgen]` attribute, an internal dependency ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/wasm-bindgen-macro@0.2.100", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wasm-bindgen" + }, + { + "type": "website", + "url": "https://rustwasm.github.io/wasm-bindgen/" + }, + { + "type": "vcs", + "url": "https://github.com/rustwasm/wasm-bindgen/tree/master/crates/macro" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-shared@0.2.100", + "author": "The wasm-bindgen Developers", + "name": "wasm-bindgen-shared", + "version": "0.2.100", + "description": "Shared support between wasm-bindgen and wasm-bindgen cli, an internal dependency. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/wasm-bindgen-shared@0.2.100", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wasm-bindgen-shared" + }, + { + "type": "website", + "url": "https://rustwasm.github.io/wasm-bindgen/" + }, + { + "type": "other", + "url": "wasm_bindgen" + }, + { + "type": "vcs", + "url": "https://github.com/rustwasm/wasm-bindgen/tree/master/crates/shared" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen@0.2.100", + "author": "The wasm-bindgen Developers", + "name": "wasm-bindgen", + "version": "0.2.100", + "description": "Easy support for interacting between JS and Rust. ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" + } + ], + "licenses": [ + { + "expression": "MIT OR Apache-2.0" + } + ], + "purl": "pkg:cargo/wasm-bindgen@0.2.100", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wasm-bindgen" + }, + { + "type": "website", + "url": "https://rustwasm.github.io/" + }, + { + "type": "vcs", + "url": "https://github.com/rustwasm/wasm-bindgen" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wit-bindgen-rt@0.39.0", + "name": "wit-bindgen-rt", + "version": "0.39.0", + "description": "Runtime support for the `wit-bindgen` crate ", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/wit-bindgen-rt@0.39.0", + "externalReferences": [ + { + "type": "website", + "url": "https://github.com/bytecodealliance/wit-bindgen" + }, + { + "type": "vcs", + "url": "https://github.com/bytecodealliance/wit-bindgen" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#write16@1.0.0", + "name": "write16", + "version": "1.0.0", + "description": "A UTF-16 analog of the Write trait", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" + } + ], + "licenses": [ + { + "expression": "Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/write16@1.0.0", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/write16/" + }, + { + "type": "website", + "url": "https://docs.rs/write16/" + }, + { + "type": "vcs", + "url": "https://github.com/hsivonen/write16" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#writeable@0.5.5", + "author": "The ICU4X Project Developers", + "name": "writeable", + "version": "0.5.5", + "description": "A more efficient alternative to fmt::Display", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/writeable@0.5.5", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#wyz@0.5.1", + "author": "myrrlyn ", + "name": "wyz", + "version": "0.5.1", + "description": "myrrlyn’s utility collection", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/wyz@0.5.1", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/wyz" + }, + { + "type": "website", + "url": "https://myrrlyn.net/crates/wyz" + }, + { + "type": "vcs", + "url": "https://github.com/myrrlyn/wyz" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#yoke-derive@0.7.4", + "author": "Manish Goregaokar ", + "name": "yoke-derive", + "version": "0.7.4", + "description": "Custom derive for the yoke crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/yoke-derive@0.7.4", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#yoke@0.7.4", + "author": "Manish Goregaokar ", + "name": "yoke", + "version": "0.7.4", + "description": "Abstraction allowing borrowed data to be carried along with the backing data it borrows from", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/yoke@0.7.4", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zerocopy-derive@0.8.25", + "author": "Joshua Liebow-Feeser , Jack Wrenn ", + "name": "zerocopy-derive", + "version": "0.8.25", + "description": "Custom derive for traits from the zerocopy crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "28a6e20d751156648aa063f3800b706ee209a32c0b4d9f24be3d980b01be55ef" + } + ], + "licenses": [ + { + "expression": "BSD-2-Clause OR Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/zerocopy-derive@0.8.25", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/google/zerocopy" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zerocopy@0.8.25", + "author": "Joshua Liebow-Feeser , Jack Wrenn ", + "name": "zerocopy", + "version": "0.8.25", + "description": "Zerocopy makes zero-cost memory manipulation effortless. We write \"unsafe\" so you don't have to.", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "a1702d9583232ddb9174e01bb7c15a2ab8fb1bc6f227aa1233858c351a3ba0cb" + } + ], + "licenses": [ + { + "expression": "BSD-2-Clause OR Apache-2.0 OR MIT" + } + ], + "purl": "pkg:cargo/zerocopy@0.8.25", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/google/zerocopy" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zerofrom-derive@0.1.4", + "author": "Manish Goregaokar ", + "name": "zerofrom-derive", + "version": "0.1.4", + "description": "Custom derive for the zerofrom crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/zerofrom-derive@0.1.4", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zerofrom@0.1.4", + "author": "Manish Goregaokar ", + "name": "zerofrom", + "version": "0.1.4", + "description": "ZeroFrom trait for constructing", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/zerofrom@0.1.4", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zerovec-derive@0.10.3", + "author": "Manish Goregaokar ", + "name": "zerovec-derive", + "version": "0.10.3", + "description": "Custom derive for the zerovec crate", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/zerovec-derive@0.10.3", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4", + "author": "The ICU4X Project Developers", + "name": "zerovec", + "version": "0.10.4", + "description": "Zero-copy vector backed by a byte array", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "aa2b893d79df23bfb12d5461018d408ea19dfafe76c2c7ef6d4eba614f8ff079" + } + ], + "licenses": [ + { + "expression": "Unicode-3.0" + } + ], + "purl": "pkg:cargo/zerovec@0.10.4", + "externalReferences": [ + { + "type": "vcs", + "url": "https://github.com/unicode-org/icu4x" + } + ] + }, + { + "type": "library", + "bom-ref": "registry+https://github.com/rust-lang/crates.io-index#zmij@1.0.6", + "author": "David Tolnay ", + "name": "zmij", + "version": "1.0.6", + "description": "A double-to-string conversion algorithm based on Schubfach and yy", + "scope": "required", + "hashes": [ + { + "alg": "SHA-256", + "content": "aac060176f7020d62c3bcc1cdbcec619d54f48b07ad1963a3f80ce7a0c17755f" + } + ], + "licenses": [ + { + "expression": "MIT" + } + ], + "purl": "pkg:cargo/zmij@1.0.6", + "externalReferences": [ + { + "type": "documentation", + "url": "https://docs.rs/zmij" + }, + { + "type": "vcs", + "url": "https://github.com/dtolnay/zmij" + } + ] + } + ], + "dependencies": [ + { + "ref": "path+file:///home/runner/work/pydantic/pydantic/pydantic-core#2.46.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#ahash@0.8.12", + "registry+https://github.com/rust-lang/crates.io-index#base64@0.22.1", + "registry+https://github.com/rust-lang/crates.io-index#enum_dispatch@0.3.13", + "registry+https://github.com/rust-lang/crates.io-index#hashbrown@0.16.1", + "registry+https://github.com/rust-lang/crates.io-index#hex@0.4.3", + "registry+https://github.com/rust-lang/crates.io-index#idna@1.1.0", + "registry+https://github.com/rust-lang/crates.io-index#jiter@0.14.0", + "registry+https://github.com/rust-lang/crates.io-index#lru@0.16.3", + "registry+https://github.com/rust-lang/crates.io-index#num-bigint@0.4.6", + "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19", + "registry+https://github.com/rust-lang/crates.io-index#percent-encoding@2.3.2", + "registry+https://github.com/rust-lang/crates.io-index#pyo3@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#regex@1.12.3", + "registry+https://github.com/rust-lang/crates.io-index#serde@1.0.228", + "registry+https://github.com/rust-lang/crates.io-index#serde_json@1.0.149", + "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1", + "registry+https://github.com/rust-lang/crates.io-index#speedate@0.17.0", + "registry+https://github.com/rust-lang/crates.io-index#strum@0.27.2", + "registry+https://github.com/rust-lang/crates.io-index#strum_macros@0.27.2", + "registry+https://github.com/rust-lang/crates.io-index#url@2.5.8", + "registry+https://github.com/rust-lang/crates.io-index#uuid@1.23.0", + "registry+https://github.com/rust-lang/crates.io-index#version_check@0.9.5" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#ahash@0.8.12", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#cfg-if@1.0.0", + "registry+https://github.com/rust-lang/crates.io-index#getrandom@0.3.3", + "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3", + "registry+https://github.com/rust-lang/crates.io-index#version_check@0.9.5", + "registry+https://github.com/rust-lang/crates.io-index#zerocopy@0.8.25" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#aho-corasick@1.1.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#memchr@2.7.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#allocator-api2@0.2.21" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#autocfg@1.3.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#base64@0.22.1" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#bitflags@2.9.1" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#bitvec@1.0.1", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#funty@2.0.0", + "registry+https://github.com/rust-lang/crates.io-index#radium@0.7.0", + "registry+https://github.com/rust-lang/crates.io-index#tap@1.0.1", + "registry+https://github.com/rust-lang/crates.io-index#wyz@0.5.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#bumpalo@3.19.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#cc@1.0.101" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#cfg-if@1.0.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#enum_dispatch@0.3.13", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3", + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#equivalent@1.0.2" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#foldhash@0.2.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#form_urlencoded@1.2.2", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#percent-encoding@2.3.2" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#funty@2.0.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#getrandom@0.3.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#cfg-if@1.0.0", + "registry+https://github.com/rust-lang/crates.io-index#libc@0.2.185", + "registry+https://github.com/rust-lang/crates.io-index#r-efi@5.2.0", + "registry+https://github.com/rust-lang/crates.io-index#wasi@0.14.2+wasi-0.2.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#hashbrown@0.16.1", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#allocator-api2@0.2.21", + "registry+https://github.com/rust-lang/crates.io-index#equivalent@1.0.2", + "registry+https://github.com/rust-lang/crates.io-index#foldhash@0.2.0" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#heck@0.5.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#hex@0.4.3" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_collections@1.5.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#yoke@0.7.4", + "registry+https://github.com/rust-lang/crates.io-index#zerofrom@0.1.4", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_locid@1.5.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#litemap@0.7.3", + "registry+https://github.com/rust-lang/crates.io-index#tinystr@0.7.6", + "registry+https://github.com/rust-lang/crates.io-index#writeable@0.5.5", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_locid_transform@1.5.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#icu_locid@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_locid_transform_data@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_provider@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#tinystr@0.7.6", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_locid_transform_data@1.5.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_normalizer@1.5.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#icu_collections@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_normalizer_data@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_properties@1.5.1", + "registry+https://github.com/rust-lang/crates.io-index#icu_provider@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1", + "registry+https://github.com/rust-lang/crates.io-index#utf16_iter@1.0.5", + "registry+https://github.com/rust-lang/crates.io-index#utf8_iter@1.0.4", + "registry+https://github.com/rust-lang/crates.io-index#write16@1.0.0", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_normalizer_data@1.5.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_properties@1.5.1", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#icu_collections@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_locid_transform@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_properties_data@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_provider@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#tinystr@0.7.6", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_properties_data@1.5.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_provider@1.5.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#icu_locid@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_provider_macros@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#stable_deref_trait@1.2.0", + "registry+https://github.com/rust-lang/crates.io-index#tinystr@0.7.6", + "registry+https://github.com/rust-lang/crates.io-index#writeable@0.5.5", + "registry+https://github.com/rust-lang/crates.io-index#yoke@0.7.4", + "registry+https://github.com/rust-lang/crates.io-index#zerofrom@0.1.4", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#icu_provider_macros@1.5.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#idna@1.1.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#idna_adapter@1.2.0", + "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1", + "registry+https://github.com/rust-lang/crates.io-index#utf8_iter@1.0.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#idna_adapter@1.2.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#icu_normalizer@1.5.0", + "registry+https://github.com/rust-lang/crates.io-index#icu_properties@1.5.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#itoa@1.0.11" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#jiter@0.14.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#ahash@0.8.12", + "registry+https://github.com/rust-lang/crates.io-index#bitvec@1.0.1", + "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-float@1.0.5", + "registry+https://github.com/rust-lang/crates.io-index#num-bigint@0.4.6", + "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19", + "registry+https://github.com/rust-lang/crates.io-index#pyo3@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#js-sys@0.3.77", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen@0.2.100" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-float@1.0.5", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-integer@1.0.5", + "registry+https://github.com/rust-lang/crates.io-index#lexical-util@1.0.6", + "registry+https://github.com/rust-lang/crates.io-index#static_assertions@1.1.0" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-integer@1.0.5", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#lexical-util@1.0.6", + "registry+https://github.com/rust-lang/crates.io-index#static_assertions@1.1.0" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#lexical-util@1.0.6", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#static_assertions@1.1.0" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#libc@0.2.185" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#litemap@0.7.3" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#log@0.4.27" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#lru@0.16.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#hashbrown@0.16.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#memchr@2.7.4" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#num-bigint@0.4.6", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#num-integer@0.1.46", + "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#num-integer@0.1.46", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#autocfg@1.3.0" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#percent-encoding@2.3.2" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#portable-atomic@1.6.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#unicode-ident@1.0.12" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#python3-dll-a@0.2.14", + "registry+https://github.com/rust-lang/crates.io-index#target-lexicon@0.13.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-ffi@0.28.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#libc@0.2.185", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-macros-backend@0.28.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#heck@0.5.0", + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3-macros@0.28.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-macros-backend@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#pyo3@0.28.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#libc@0.2.185", + "registry+https://github.com/rust-lang/crates.io-index#num-bigint@0.4.6", + "registry+https://github.com/rust-lang/crates.io-index#num-traits@0.2.19", + "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3", + "registry+https://github.com/rust-lang/crates.io-index#portable-atomic@1.6.0", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-build-config@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-ffi@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#pyo3-macros@0.28.3", + "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#python3-dll-a@0.2.14", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#cc@1.0.101" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#r-efi@5.2.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#radium@0.7.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#regex-automata@0.4.13", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#aho-corasick@1.1.3", + "registry+https://github.com/rust-lang/crates.io-index#memchr@2.7.4", + "registry+https://github.com/rust-lang/crates.io-index#regex-syntax@0.8.5" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#regex-syntax@0.8.5" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#regex@1.12.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#aho-corasick@1.1.3", + "registry+https://github.com/rust-lang/crates.io-index#memchr@2.7.4", + "registry+https://github.com/rust-lang/crates.io-index#regex-automata@0.4.13", + "registry+https://github.com/rust-lang/crates.io-index#regex-syntax@0.8.5" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#rustversion@1.0.17" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#serde@1.0.228", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#serde_core@1.0.228", + "registry+https://github.com/rust-lang/crates.io-index#serde_derive@1.0.228" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#serde_core@1.0.228", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#serde_derive@1.0.228" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#serde_derive@1.0.228", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#serde_json@1.0.149", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#itoa@1.0.11", + "registry+https://github.com/rust-lang/crates.io-index#memchr@2.7.4", + "registry+https://github.com/rust-lang/crates.io-index#serde@1.0.228", + "registry+https://github.com/rust-lang/crates.io-index#serde_core@1.0.228", + "registry+https://github.com/rust-lang/crates.io-index#zmij@1.0.6" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#smallvec@1.15.1" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#speedate@0.17.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#lexical-parse-float@1.0.5", + "registry+https://github.com/rust-lang/crates.io-index#strum@0.27.2", + "registry+https://github.com/rust-lang/crates.io-index#strum_macros@0.27.2" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#stable_deref_trait@1.2.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#static_assertions@1.1.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#strum@0.27.2", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#strum_macros@0.27.2" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#strum_macros@0.27.2", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#heck@0.5.0", + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#unicode-ident@1.0.12" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#synstructure@0.13.1", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#tap@1.0.1" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#target-lexicon@0.13.4" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#tinystr@0.7.6", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#displaydoc@0.2.5", + "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#unicode-ident@1.0.12" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#url@2.5.8", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#form_urlencoded@1.2.2", + "registry+https://github.com/rust-lang/crates.io-index#idna@1.1.0", + "registry+https://github.com/rust-lang/crates.io-index#percent-encoding@2.3.2", + "registry+https://github.com/rust-lang/crates.io-index#serde@1.0.228" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#utf16_iter@1.0.5" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#utf8_iter@1.0.4" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#uuid@1.23.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#js-sys@0.3.77", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen@0.2.100" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#version_check@0.9.5" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wasi@0.14.2+wasi-0.2.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#wit-bindgen-rt@0.39.0" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-backend@0.2.100", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#bumpalo@3.19.0", + "registry+https://github.com/rust-lang/crates.io-index#log@0.4.27", + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-shared@0.2.100" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-macro-support@0.2.100", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-backend@0.2.100", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-shared@0.2.100" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-macro@0.2.100", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-macro-support@0.2.100" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-shared@0.2.100", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#unicode-ident@1.0.12" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen@0.2.100", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#cfg-if@1.0.0", + "registry+https://github.com/rust-lang/crates.io-index#once_cell@1.21.3", + "registry+https://github.com/rust-lang/crates.io-index#rustversion@1.0.17", + "registry+https://github.com/rust-lang/crates.io-index#wasm-bindgen-macro@0.2.100" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wit-bindgen-rt@0.39.0", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#bitflags@2.9.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#write16@1.0.0" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#writeable@0.5.5" + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#wyz@0.5.1", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#tap@1.0.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#yoke-derive@0.7.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82", + "registry+https://github.com/rust-lang/crates.io-index#synstructure@0.13.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#yoke@0.7.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#serde@1.0.228", + "registry+https://github.com/rust-lang/crates.io-index#stable_deref_trait@1.2.0", + "registry+https://github.com/rust-lang/crates.io-index#yoke-derive@0.7.4", + "registry+https://github.com/rust-lang/crates.io-index#zerofrom@0.1.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zerocopy-derive@0.8.25", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zerocopy@0.8.25", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#zerocopy-derive@0.8.25" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zerofrom-derive@0.1.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82", + "registry+https://github.com/rust-lang/crates.io-index#synstructure@0.13.1" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zerofrom@0.1.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#zerofrom-derive@0.1.4" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zerovec-derive@0.10.3", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#proc-macro2@1.0.86", + "registry+https://github.com/rust-lang/crates.io-index#quote@1.0.44", + "registry+https://github.com/rust-lang/crates.io-index#syn@2.0.82" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zerovec@0.10.4", + "dependsOn": [ + "registry+https://github.com/rust-lang/crates.io-index#yoke@0.7.4", + "registry+https://github.com/rust-lang/crates.io-index#zerofrom@0.1.4", + "registry+https://github.com/rust-lang/crates.io-index#zerovec-derive@0.10.3" + ] + }, + { + "ref": "registry+https://github.com/rust-lang/crates.io-index#zmij@1.0.6" + } + ] +} \ No newline at end of file diff --git a/venv/lib/python3.12/site-packages/pydantic_core/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/pydantic_core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6678f51105076415818be18d02f0a37e4e3bb508 Binary files /dev/null and b/venv/lib/python3.12/site-packages/pydantic_core/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..908445f6fafef335fd976270d5800cdacb2efc49 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/api_error.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/api_error.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0416ae60a4b54753ba1af1b7452a183d306353a0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/api_error.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/client_wrapper.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/client_wrapper.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..193f5e08c21b02de69d76aa42b218632e55dcea4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/client_wrapper.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/datetime_utils.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/datetime_utils.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc0e12dab6a75bccda079780fdb451f36e189063 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/datetime_utils.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/events.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/events.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..328d351c03368e276f820db841140cf247fa812e Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/events.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/file.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/file.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..03f627f946d15df4bbdc380b82a04a087332f4f1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/file.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/force_multipart.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/force_multipart.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c7a1286c754547ed18fe8ae9632296511504182 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/force_multipart.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/http_client.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/http_client.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a18d13e80cea86afbd1749a6b71bfc9640967ed Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/http_client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/http_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/http_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..517a3c89ecebebd22ecf2e264977edb19e319e5a Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/http_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/jsonable_encoder.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/jsonable_encoder.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67b779f67b13ded818ced3f267d21f6d1b89e793 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/jsonable_encoder.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/logging.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/logging.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1dc2ba786d417e68f0a05621a9a9612332b84eb5 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/logging.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/pydantic_utilities.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/pydantic_utilities.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dc4dc0c6bc5ccae953e6821321531eb9823fd507 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/pydantic_utilities.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/query_encoder.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/query_encoder.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3fa14fe15282c06770875fb9490c7e0790355718 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/query_encoder.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/remove_none_from_dict.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/remove_none_from_dict.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ca786a8fa6c4f871b023e94a2e208d47f6f1bb3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/remove_none_from_dict.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/request_options.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/request_options.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..885a1d031e108c3129f9c65c783825a075bd96f3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/request_options.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/serialization.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/serialization.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b5e904419cd6144e1d37aaaa84203f1d067672f3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/serialization.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/websocket_compat.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/websocket_compat.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f6c11cf1915cc676c73230a81297e5a36e9bb34 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/__pycache__/websocket_compat.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__init__.py b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..730e5a3382ebb716c5bbd9159041430efea45a8c --- /dev/null +++ b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__init__.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from ._api import EventSource, aconnect_sse, connect_sse + from ._exceptions import SSEError + from ._models import ServerSentEvent +_dynamic_imports: typing.Dict[str, str] = { + "EventSource": "._api", + "SSEError": "._exceptions", + "ServerSentEvent": "._models", + "aconnect_sse": "._api", + "connect_sse": "._api", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"] diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3b070cb2e7420d73abfbfb5893727fe4bf59826 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_api.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_api.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3b17a54fc756b407460be4ea8a999c2d4de0d334 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_api.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_decoders.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_decoders.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7720941c01cbe806f08bdcf51f3a77c7fbc127dc Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_decoders.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_exceptions.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_exceptions.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..722eaa76a03f8f2e08577fd8c94444a7ac735cea Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_exceptions.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_models.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_models.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..91a2149db56b60d8173b23396bef2cda80e387cc Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/__pycache__/_models.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_api.py b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_api.py new file mode 100644 index 0000000000000000000000000000000000000000..f900b3b686de9701c1049f634b117403e76952ad --- /dev/null +++ b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_api.py @@ -0,0 +1,112 @@ +# This file was auto-generated by Fern from our API Definition. + +import re +from contextlib import asynccontextmanager, contextmanager +from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast + +import httpx +from ._decoders import SSEDecoder +from ._exceptions import SSEError +from ._models import ServerSentEvent + + +class EventSource: + def __init__(self, response: httpx.Response) -> None: + self._response = response + + def _check_content_type(self) -> None: + content_type = self._response.headers.get("content-type", "").partition(";")[0] + if "text/event-stream" not in content_type: + raise SSEError( + f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}" + ) + + def _get_charset(self) -> str: + """Extract charset from Content-Type header, fallback to UTF-8.""" + content_type = self._response.headers.get("content-type", "") + + # Parse charset parameter using regex + charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE) + if charset_match: + charset = charset_match.group(1).strip("\"'") + # Validate that it's a known encoding + try: + # Test if the charset is valid by trying to encode/decode + "test".encode(charset).decode(charset) + return charset + except (LookupError, UnicodeError): + # If charset is invalid, fall back to UTF-8 + pass + + # Default to UTF-8 if no charset specified or invalid charset + return "utf-8" + + @property + def response(self) -> httpx.Response: + return self._response + + def iter_sse(self) -> Iterator[ServerSentEvent]: + self._check_content_type() + decoder = SSEDecoder() + charset = self._get_charset() + + buffer = "" + for chunk in self._response.iter_bytes(): + # Decode chunk using detected charset + text_chunk = chunk.decode(charset, errors="replace") + buffer += text_chunk + + # Process complete lines + while "\n" in buffer: + line, buffer = buffer.split("\n", 1) + line = line.rstrip("\r") + sse = decoder.decode(line) + # when we reach a "\n\n" => line = '' + # => decoder will attempt to return an SSE Event + if sse is not None: + yield sse + + # Process any remaining data in buffer + if buffer.strip(): + line = buffer.rstrip("\r") + sse = decoder.decode(line) + if sse is not None: + yield sse + + async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]: + self._check_content_type() + decoder = SSEDecoder() + lines = cast(AsyncGenerator[str, None], self._response.aiter_lines()) + try: + async for line in lines: + line = line.rstrip("\n") + sse = decoder.decode(line) + if sse is not None: + yield sse + finally: + await lines.aclose() + + +@contextmanager +def connect_sse(client: httpx.Client, method: str, url: str, **kwargs: Any) -> Iterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) + + +@asynccontextmanager +async def aconnect_sse( + client: httpx.AsyncClient, + method: str, + url: str, + **kwargs: Any, +) -> AsyncIterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + async with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_decoders.py b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_decoders.py new file mode 100644 index 0000000000000000000000000000000000000000..339b0890138112b349d7446798a7e5f04643cbb1 --- /dev/null +++ b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_decoders.py @@ -0,0 +1,61 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import List, Optional + +from ._models import ServerSentEvent + + +class SSEDecoder: + def __init__(self) -> None: + self._event = "" + self._data: List[str] = [] + self._last_event_id = "" + self._retry: Optional[int] = None + + def decode(self, line: str) -> Optional[ServerSentEvent]: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = "" + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_exceptions.py b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..81605a8a65ed7f9cdae1a5aad58a8f57817b6886 --- /dev/null +++ b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_exceptions.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import httpx + + +class SSEError(httpx.TransportError): + pass diff --git a/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_models.py b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_models.py new file mode 100644 index 0000000000000000000000000000000000000000..1af57f8fd0d2f9872ae06f67a13416ea3fb848e0 --- /dev/null +++ b/venv/lib/python3.12/site-packages/sarvamai/core/http_sse/_models.py @@ -0,0 +1,17 @@ +# This file was auto-generated by Fern from our API Definition. + +import json +from dataclasses import dataclass +from typing import Any, Optional + + +@dataclass(frozen=True) +class ServerSentEvent: + event: str = "message" + data: str = "" + id: str = "" + retry: Optional[int] = None + + def json(self) -> Any: + """Parse the data field as JSON.""" + return json.loads(self.data) diff --git a/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a4d231ee59f1d9f472835cfb09e762786b5b9afb Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/client.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..64556a80dc66d25190149c547a88684b93842a6a Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/raw_client.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/raw_client.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2726f5a8f4691330b5128ed8830e6bb0731d7cac Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/text_to_speech/__pycache__/raw_client.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/audio_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/audio_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68a1a9800bdbf414bd36f54cda2c5a6799f3f6dd Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/audio_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/audio_output.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/audio_output.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d7ba16166a245241abe95ddebd9a1fdaccf7c89a Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/audio_output.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/base_job_parameters.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/base_job_parameters.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2d51dabf500a09ba4bf29c7939c2dcd88d8b77f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/base_job_parameters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/bulk_job_callback.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/bulk_job_callback.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..473bf196bf9769256a34a52dffcbd1bfaf1fa408 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/bulk_job_callback.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_message_tool_call.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_message_tool_call.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..43e5dc42488fe0fe07a64fd8dd1515e166badc76 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_message_tool_call.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_named_tool_choice_function.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_named_tool_choice_function.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..355b5ef42f181e21349d4a9565ceaf9496e62a79 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_named_tool_choice_function.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_request_message.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_request_message.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7dd52edd950696a0f8622147c3f2faab9746c7c Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_request_message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_request_tool_message.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_request_tool_message.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..368fc7d80505a68e1570941d734e25ea4bd1bf1b Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_request_tool_message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_response_message.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_response_message.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2019a167b3652f5d3d217461a37e2786de8a53f2 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chat_completion_response_message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/choice_delta.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/choice_delta.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cefb8e9112ca524aea8851334a5343d5b9ad3155 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/choice_delta.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/choice_delta_tool_call.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/choice_delta_tool_call.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b40e554ea87c44c8fa43fc847f64c82fc80b856 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/choice_delta_tool_call.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chunk_choice.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chunk_choice.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b8ccfa3ca1761005272abca7e541dc29afd78bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/chunk_choice.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/config_message.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/config_message.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a56085521a41dc8830e8a040b433400883f67846 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/config_message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0a820e5441720b15b2d145ead70333242b73890 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5927ffc35fcae2153bdd116f7f1f5239d954b92e Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_model.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_model.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a67708a79fa3fb11857649dcccd1693e065b0380 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_output_audio_bitrate.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_output_audio_bitrate.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c32294360ba8eab931b5e7b7702b148e4232752a Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_output_audio_bitrate.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_speaker.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_speaker.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..745731886c597e55b8ccdd9243e4ed3b79b12e5d Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_speaker.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_target_language_code.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_target_language_code.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f4b8d0ede17774601548c1191ec808833610f08d Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/configure_connection_data_target_language_code.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/create_chat_completion_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/create_chat_completion_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b0505b0d0176a57f6b803c63e1395a5a9ca41932 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/create_chat_completion_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/diarized_entry.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/diarized_entry.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1690cf960ffa5084fb023f676ece8dae23f2343a Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/diarized_entry.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_error_code.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_error_code.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..69747ed98228945e8051d5402aaa57f8c364732b Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_error_code.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_error_message.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_error_message.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f1cd70bc5a3ebe61f93c133751c13c42b7a59fb6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_error_message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_job_detail.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_job_detail.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..be052d0271fa4025e753147abcba4f5dd4c49808 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_job_detail.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_job_status_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_job_status_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46a0ed9b0d4d8813c0418796828e7320227a4c9f Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_job_status_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_output_format.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_output_format.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9ed5891b28c9d9f3dd4c1ace6e37a108644f1580 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_output_format.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_upload_files_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_upload_files_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..df644a7d03033c793eb7dcbf1573616d60dad7bc Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_upload_files_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_webhook_callback.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_webhook_callback.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4ad23fda133f30ba17bc729bb86fa58bdd1a38a6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/doc_digitization_webhook_callback.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_code.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_code.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..344da5b19cf92f00fe32e14dbd51e163666da726 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_code.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_code2.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_code2.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..52e9c9fd39a7a77244087a12b82dd1bf0d32215b Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_code2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..99cc0355a45ab521ef07a1129925948c0cd22d9d Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_details2.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_details2.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..694d9e3d0360dc02d33db86bdeb4e98cd528dad1 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_details2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_message.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_message.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e7790ffbcb7b3aba2a256b09db4c87ae398e61b7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_message.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_message2.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_message2.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dd4ff69bfe81bae17a85245d789ead06486bab1f Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_message2.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2abdd203982b8137549501d30d4f4841de3ef523 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/error_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/event_response_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/event_response_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a7bf221c2e20d164b91181bfd3b9f9160e64510 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/event_response_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/events_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/events_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4bb37b04dc553ac3d688d94b985aea1be1fff65 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/events_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/events_data_signal_type.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/events_data_signal_type.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..586f6a55122436b72c522d47d02cceb9bbd55dd9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/events_data_signal_type.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/files_download_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/files_download_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11c976a28491ea5d99c1b7663ef98f823330ffce Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/files_download_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/files_request.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/files_request.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..531672f588f59965f7ea1978a0a08ead91d72091 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/files_request.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/finish_reason.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/finish_reason.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b7328484bf572dcdd7ed80ae44a90dcd44f02532 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/finish_reason.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/function_call.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/function_call.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b7de2a165bae4f88884709d310c72bee58b4c5b Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/function_call.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/input_audio_codec.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/input_audio_codec.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a142ab547d281d8a1f426dbcf7d4b8b33c140eb4 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/input_audio_codec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/job_status_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/job_status_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8078dd81287963f99565024998f23bc685224a25 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/job_status_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/language_identification_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/language_identification_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..95e3daf2ab8c75b63f6972ce24bf0af51138ad0e Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/language_identification_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/mode.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/mode.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e8fe7a72e4122adb543a16eee092cf339696c718 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/mode.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/numerals_format.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/numerals_format.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10266ed691dbd8f254b85f693ae96131578f5aa8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/numerals_format.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/ping_signal.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/ping_signal.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6a1759a251810b7fc79948706138ca9d0484c7c6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/ping_signal.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..57b5f291ed4ad910fe6761d5f47076167d0a14b0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_delete_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_delete_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..79bd940d0fa8a2820aa47eafcdfab20192666f2e Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_delete_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_update_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_update_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0b6db5079c212fbe67f4cfc48599534afd3e1d0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/pronunciation_dictionary_update_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/reasoning_effort.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/reasoning_effort.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..60ddf9cfef7b5655bebcc44a445bf9faff371819 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/reasoning_effort.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/response_type.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/response_type.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..772ae943911e09306daacbcb7afe028cd9105d3f Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/response_type.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/sarvam_model_ids.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/sarvam_model_ids.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c4a713bb8ed77f5fa5ab1c4d0dcb2d74ad9b96c7 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/sarvam_model_ids.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_stream_bitrate.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_stream_bitrate.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9b2c8507b72857e545dc1087ceef8e5b40e8348 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_stream_bitrate.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_job_parameters.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_job_parameters.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..429aaf3c9d84e8eeb5da0d6889e1400e0442da49 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_job_parameters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_language.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_language.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9b426cfcbfec362843a659c9d939c7e413f47f12 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_language.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_model.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_model.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2e449d55797b468db7df1ae02c7fd1f214cb774 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8f962de86cb9bebad9b65650d94571838696f6a6 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_response_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_response_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1c841a3e22a0a01635586f901004a72b9eaa5527 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_response_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_streaming_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_streaming_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6e78700569457bd264a73c6f7a7eb4710cae6148 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_streaming_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_job_parameters.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_job_parameters.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f7419740808393c084a0102eb880543cb467c423 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_job_parameters.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..35236df5afe0365939675367f8f84572a4faa996 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_streaming_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_streaming_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a5afe4e8e917f915821537285feee320e628138b Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_streaming_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_transcription_data.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_transcription_data.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5ea7d4496f982bbdb10cb25acd964ef763121471 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/speech_to_text_translate_transcription_data.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/spoken_form_numerals_format.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/spoken_form_numerals_format.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5d3269f8fa3bba5fde0e7fe0d75189a7281682b8 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/spoken_form_numerals_format.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/task_detail.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/task_detail.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0e29522445d8ef4d51a61130563d3e0dbede5f1d Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/task_detail.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_language.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_language.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2af54a5ec5c4b4362637f2c57b8a07e8901ddc9c Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_language.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_model.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_model.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5b3ba75517a3a6dbd0763de194a642b8954ea9b9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_output_audio_codec.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_output_audio_codec.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d0262a07ad19dadb165941a37d1cfb27c67f83be Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_output_audio_codec.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_speaker.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_speaker.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dddbcfad83a6aa952fc76584cb5a7a43f23b711d Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/text_to_speech_speaker.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/transcription_metrics.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/transcription_metrics.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f6e9aab5dffe3884b33cb2e637013b0f32d2b396 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/transcription_metrics.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translate_mode.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translate_mode.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2b4aef3816e81c53bf7a063c394394b92bc6dd0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translate_mode.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translate_model.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translate_model.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76ea31c96bf660b08746723de0b04ea1370ea21c Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translate_model.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translation_response.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translation_response.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..bfdde18abb19152b699fca45e57000cde404ae6e Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translation_response.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translatiterate_target_language.cpython-312.pyc b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translatiterate_target_language.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..47b70b4c7d27e1bcda3818b0bd5dbcd3304e66e9 Binary files /dev/null and b/venv/lib/python3.12/site-packages/sarvamai/types/__pycache__/translatiterate_target_language.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..f26bcf4d2de6eb136e31006ca3ab447d5e488adf --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_extensions-4.15.0.dist-info/licenses/LICENSE @@ -0,0 +1,279 @@ +A. HISTORY OF THE SOFTWARE +========================== + +Python was created in the early 1990s by Guido van Rossum at Stichting +Mathematisch Centrum (CWI, see https://www.cwi.nl) in the Netherlands +as a successor of a language called ABC. Guido remains Python's +principal author, although it includes many contributions from others. + +In 1995, Guido continued his work on Python at the Corporation for +National Research Initiatives (CNRI, see https://www.cnri.reston.va.us) +in Reston, Virginia where he released several versions of the +software. + +In May 2000, Guido and the Python core development team moved to +BeOpen.com to form the BeOpen PythonLabs team. In October of the same +year, the PythonLabs team moved to Digital Creations, which became +Zope Corporation. In 2001, the Python Software Foundation (PSF, see +https://www.python.org/psf/) was formed, a non-profit organization +created specifically to own Python-related Intellectual Property. +Zope Corporation was a sponsoring member of the PSF. + +All Python releases are Open Source (see https://opensource.org for +the Open Source Definition). Historically, most, but not all, Python +releases have also been GPL-compatible; the table below summarizes +the various releases. + + Release Derived Year Owner GPL- + from compatible? (1) + + 0.9.0 thru 1.2 1991-1995 CWI yes + 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes + 1.6 1.5.2 2000 CNRI no + 2.0 1.6 2000 BeOpen.com no + 1.6.1 1.6 2001 CNRI yes (2) + 2.1 2.0+1.6.1 2001 PSF no + 2.0.1 2.0+1.6.1 2001 PSF yes + 2.1.1 2.1+2.0.1 2001 PSF yes + 2.1.2 2.1.1 2002 PSF yes + 2.1.3 2.1.2 2002 PSF yes + 2.2 and above 2.1.1 2001-now PSF yes + +Footnotes: + +(1) GPL-compatible doesn't mean that we're distributing Python under + the GPL. All Python licenses, unlike the GPL, let you distribute + a modified version without making your changes open source. The + GPL-compatible licenses make it possible to combine Python with + other software that is released under the GPL; the others don't. + +(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, + because its license has a choice of law clause. According to + CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 + is "not incompatible" with the GPL. + +Thanks to the many outside volunteers who have worked under Guido's +direction to make these releases possible. + + +B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON +=============================================================== + +Python software and documentation are licensed under the +Python Software Foundation License Version 2. + +Starting with Python 3.8.6, examples, recipes, and other code in +the documentation are dual licensed under the PSF License Version 2 +and the Zero-Clause BSD license. + +Some software incorporated into Python is under different licenses. +The licenses are listed with code falling under that license. + + +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, +2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Python Software Foundation; +All Rights Reserved" are retained in Python alone or in any derivative version +prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 +------------------------------------------- + +BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 + +1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an +office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the +Individual or Organization ("Licensee") accessing and otherwise using +this software in source or binary form and its associated +documentation ("the Software"). + +2. Subject to the terms and conditions of this BeOpen Python License +Agreement, BeOpen hereby grants Licensee a non-exclusive, +royalty-free, world-wide license to reproduce, analyze, test, perform +and/or display publicly, prepare derivative works, distribute, and +otherwise use the Software alone or in any derivative version, +provided, however, that the BeOpen Python License is retained in the +Software, alone or in any derivative version prepared by Licensee. + +3. BeOpen is making the Software available to Licensee on an "AS IS" +basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE +SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS +AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY +DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +5. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +6. This License Agreement shall be governed by and interpreted in all +respects by the law of the State of California, excluding conflict of +law provisions. Nothing in this License Agreement shall be deemed to +create any relationship of agency, partnership, or joint venture +between BeOpen and Licensee. This License Agreement does not grant +permission to use BeOpen trademarks or trade names in a trademark +sense to endorse or promote products or services of Licensee, or any +third party. As an exception, the "BeOpen Python" logos available at +http://www.pythonlabs.com/logos.html may be used according to the +permissions granted on that web page. + +7. By copying, installing or otherwise using the software, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + + +CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 +--------------------------------------- + +1. This LICENSE AGREEMENT is between the Corporation for National +Research Initiatives, having an office at 1895 Preston White Drive, +Reston, VA 20191 ("CNRI"), and the Individual or Organization +("Licensee") accessing and otherwise using Python 1.6.1 software in +source or binary form and its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, CNRI +hereby grants Licensee a nonexclusive, royalty-free, world-wide +license to reproduce, analyze, test, perform and/or display publicly, +prepare derivative works, distribute, and otherwise use Python 1.6.1 +alone or in any derivative version, provided, however, that CNRI's +License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) +1995-2001 Corporation for National Research Initiatives; All Rights +Reserved" are retained in Python 1.6.1 alone or in any derivative +version prepared by Licensee. Alternately, in lieu of CNRI's License +Agreement, Licensee may substitute the following text (omitting the +quotes): "Python 1.6.1 is made available subject to the terms and +conditions in CNRI's License Agreement. This Agreement together with +Python 1.6.1 may be located on the internet using the following +unique, persistent identifier (known as a handle): 1895.22/1013. This +Agreement may also be obtained from a proxy server on the internet +using the following URL: http://hdl.handle.net/1895.22/1013". + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python 1.6.1 or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python 1.6.1. + +4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" +basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. This License Agreement shall be governed by the federal +intellectual property law of the United States, including without +limitation the federal copyright law, and, to the extent such +U.S. federal law does not apply, by the law of the Commonwealth of +Virginia, excluding Virginia's conflict of law provisions. +Notwithstanding the foregoing, with regard to derivative works based +on Python 1.6.1 that incorporate non-separable material that was +previously distributed under the GNU General Public License (GPL), the +law of the Commonwealth of Virginia shall govern this License +Agreement only as to issues arising under or with respect to +Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this +License Agreement shall be deemed to create any relationship of +agency, partnership, or joint venture between CNRI and Licensee. This +License Agreement does not grant permission to use CNRI trademarks or +trade name in a trademark sense to endorse or promote products or +services of Licensee, or any third party. + +8. By clicking on the "ACCEPT" button where indicated, or by copying, +installing or otherwise using Python 1.6.1, Licensee agrees to be +bound by the terms and conditions of this License Agreement. + + ACCEPT + + +CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 +-------------------------------------------------- + +Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, +The Netherlands. All rights reserved. + +Permission to use, copy, modify, and distribute this software and its +documentation for any purpose and without fee is hereby granted, +provided that the above copyright notice appear in all copies and that +both that copyright notice and this permission notice appear in +supporting documentation, and that the name of Stichting Mathematisch +Centrum or CWI not be used in advertising or publicity pertaining to +distribution of the software without specific, written prior +permission. + +STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO +THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE +FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +ZERO-CLAUSE BSD LICENSE FOR CODE IN THE PYTHON DOCUMENTATION +---------------------------------------------------------------------- + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/typing_inspection-0.4.2.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/typing_inspection-0.4.2.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..e825ad51621c5e34d370ba64f13f6854d89456a6 --- /dev/null +++ b/venv/lib/python3.12/site-packages/typing_inspection-0.4.2.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) Pydantic Services Inc. 2025 to present + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/__init__.cpython-312.pyc b/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25210c4b633b805aceb0a662cc9d122ff69833b0 Binary files /dev/null and b/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/__init__.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/introspection.cpython-312.pyc b/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/introspection.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e5b1d0eb2ef40217173d8f4b7ef8a531365a306 Binary files /dev/null and b/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/introspection.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/typing_objects.cpython-312.pyc b/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/typing_objects.cpython-312.pyc new file mode 100644 index 0000000000000000000000000000000000000000..df51e060a6b5da8e3a457ce5361467e9503721a3 Binary files /dev/null and b/venv/lib/python3.12/site-packages/typing_inspection/__pycache__/typing_objects.cpython-312.pyc differ diff --git a/venv/lib/python3.12/site-packages/websockets-16.0.dist-info/licenses/LICENSE b/venv/lib/python3.12/site-packages/websockets-16.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..5d61ece22a75a759aed8e52af280eca28d35d6bf --- /dev/null +++ b/venv/lib/python3.12/site-packages/websockets-16.0.dist-info/licenses/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) Aymeric Augustin and contributors + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.