Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- venv/lib/python3.10/site-packages/anyio/__init__.py +108 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/to_interpreter.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/to_process.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_backends/__init__.py +0 -0
- venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py +0 -0
- venv/lib/python3.10/site-packages/anyio/_backends/_trio.py +1375 -0
- venv/lib/python3.10/site-packages/anyio/_core/__init__.py +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tempfile.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/_core/_asyncio_selector_thread.py +167 -0
- venv/lib/python3.10/site-packages/anyio/_core/_contextmanagers.py +200 -0
- venv/lib/python3.10/site-packages/anyio/_core/_eventloop.py +166 -0
- venv/lib/python3.10/site-packages/anyio/_core/_exceptions.py +134 -0
- venv/lib/python3.10/site-packages/anyio/_core/_fileio.py +740 -0
- venv/lib/python3.10/site-packages/anyio/_core/_resources.py +18 -0
- venv/lib/python3.10/site-packages/anyio/_core/_signals.py +27 -0
- venv/lib/python3.10/site-packages/anyio/_core/_sockets.py +934 -0
- venv/lib/python3.10/site-packages/anyio/_core/_streams.py +52 -0
- venv/lib/python3.10/site-packages/anyio/_core/_subprocesses.py +202 -0
- venv/lib/python3.10/site-packages/anyio/_core/_synchronization.py +732 -0
- venv/lib/python3.10/site-packages/anyio/_core/_tasks.py +158 -0
- venv/lib/python3.10/site-packages/anyio/_core/_tempfile.py +616 -0
- venv/lib/python3.10/site-packages/anyio/_core/_testing.py +78 -0
- venv/lib/python3.10/site-packages/anyio/_core/_typedattr.py +81 -0
- venv/lib/python3.10/site-packages/anyio/abc/__init__.py +58 -0
- venv/lib/python3.10/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_eventloop.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc +0 -0
venv/lib/python3.10/site-packages/anyio/__init__.py
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from ._core._contextmanagers import AsyncContextManagerMixin as AsyncContextManagerMixin
|
| 4 |
+
from ._core._contextmanagers import ContextManagerMixin as ContextManagerMixin
|
| 5 |
+
from ._core._eventloop import current_time as current_time
|
| 6 |
+
from ._core._eventloop import get_all_backends as get_all_backends
|
| 7 |
+
from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class
|
| 8 |
+
from ._core._eventloop import run as run
|
| 9 |
+
from ._core._eventloop import sleep as sleep
|
| 10 |
+
from ._core._eventloop import sleep_forever as sleep_forever
|
| 11 |
+
from ._core._eventloop import sleep_until as sleep_until
|
| 12 |
+
from ._core._exceptions import BrokenResourceError as BrokenResourceError
|
| 13 |
+
from ._core._exceptions import BrokenWorkerInterpreter as BrokenWorkerInterpreter
|
| 14 |
+
from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess
|
| 15 |
+
from ._core._exceptions import BusyResourceError as BusyResourceError
|
| 16 |
+
from ._core._exceptions import ClosedResourceError as ClosedResourceError
|
| 17 |
+
from ._core._exceptions import ConnectionFailed as ConnectionFailed
|
| 18 |
+
from ._core._exceptions import DelimiterNotFound as DelimiterNotFound
|
| 19 |
+
from ._core._exceptions import EndOfStream as EndOfStream
|
| 20 |
+
from ._core._exceptions import IncompleteRead as IncompleteRead
|
| 21 |
+
from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError
|
| 22 |
+
from ._core._exceptions import WouldBlock as WouldBlock
|
| 23 |
+
from ._core._fileio import AsyncFile as AsyncFile
|
| 24 |
+
from ._core._fileio import Path as Path
|
| 25 |
+
from ._core._fileio import open_file as open_file
|
| 26 |
+
from ._core._fileio import wrap_file as wrap_file
|
| 27 |
+
from ._core._resources import aclose_forcefully as aclose_forcefully
|
| 28 |
+
from ._core._signals import open_signal_receiver as open_signal_receiver
|
| 29 |
+
from ._core._sockets import TCPConnectable as TCPConnectable
|
| 30 |
+
from ._core._sockets import UNIXConnectable as UNIXConnectable
|
| 31 |
+
from ._core._sockets import as_connectable as as_connectable
|
| 32 |
+
from ._core._sockets import connect_tcp as connect_tcp
|
| 33 |
+
from ._core._sockets import connect_unix as connect_unix
|
| 34 |
+
from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket
|
| 35 |
+
from ._core._sockets import (
|
| 36 |
+
create_connected_unix_datagram_socket as create_connected_unix_datagram_socket,
|
| 37 |
+
)
|
| 38 |
+
from ._core._sockets import create_tcp_listener as create_tcp_listener
|
| 39 |
+
from ._core._sockets import create_udp_socket as create_udp_socket
|
| 40 |
+
from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket
|
| 41 |
+
from ._core._sockets import create_unix_listener as create_unix_listener
|
| 42 |
+
from ._core._sockets import getaddrinfo as getaddrinfo
|
| 43 |
+
from ._core._sockets import getnameinfo as getnameinfo
|
| 44 |
+
from ._core._sockets import notify_closing as notify_closing
|
| 45 |
+
from ._core._sockets import wait_readable as wait_readable
|
| 46 |
+
from ._core._sockets import wait_socket_readable as wait_socket_readable
|
| 47 |
+
from ._core._sockets import wait_socket_writable as wait_socket_writable
|
| 48 |
+
from ._core._sockets import wait_writable as wait_writable
|
| 49 |
+
from ._core._streams import create_memory_object_stream as create_memory_object_stream
|
| 50 |
+
from ._core._subprocesses import open_process as open_process
|
| 51 |
+
from ._core._subprocesses import run_process as run_process
|
| 52 |
+
from ._core._synchronization import CapacityLimiter as CapacityLimiter
|
| 53 |
+
from ._core._synchronization import (
|
| 54 |
+
CapacityLimiterStatistics as CapacityLimiterStatistics,
|
| 55 |
+
)
|
| 56 |
+
from ._core._synchronization import Condition as Condition
|
| 57 |
+
from ._core._synchronization import ConditionStatistics as ConditionStatistics
|
| 58 |
+
from ._core._synchronization import Event as Event
|
| 59 |
+
from ._core._synchronization import EventStatistics as EventStatistics
|
| 60 |
+
from ._core._synchronization import Lock as Lock
|
| 61 |
+
from ._core._synchronization import LockStatistics as LockStatistics
|
| 62 |
+
from ._core._synchronization import ResourceGuard as ResourceGuard
|
| 63 |
+
from ._core._synchronization import Semaphore as Semaphore
|
| 64 |
+
from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics
|
| 65 |
+
from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED
|
| 66 |
+
from ._core._tasks import CancelScope as CancelScope
|
| 67 |
+
from ._core._tasks import create_task_group as create_task_group
|
| 68 |
+
from ._core._tasks import current_effective_deadline as current_effective_deadline
|
| 69 |
+
from ._core._tasks import fail_after as fail_after
|
| 70 |
+
from ._core._tasks import move_on_after as move_on_after
|
| 71 |
+
from ._core._tempfile import NamedTemporaryFile as NamedTemporaryFile
|
| 72 |
+
from ._core._tempfile import SpooledTemporaryFile as SpooledTemporaryFile
|
| 73 |
+
from ._core._tempfile import TemporaryDirectory as TemporaryDirectory
|
| 74 |
+
from ._core._tempfile import TemporaryFile as TemporaryFile
|
| 75 |
+
from ._core._tempfile import gettempdir as gettempdir
|
| 76 |
+
from ._core._tempfile import gettempdirb as gettempdirb
|
| 77 |
+
from ._core._tempfile import mkdtemp as mkdtemp
|
| 78 |
+
from ._core._tempfile import mkstemp as mkstemp
|
| 79 |
+
from ._core._testing import TaskInfo as TaskInfo
|
| 80 |
+
from ._core._testing import get_current_task as get_current_task
|
| 81 |
+
from ._core._testing import get_running_tasks as get_running_tasks
|
| 82 |
+
from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked
|
| 83 |
+
from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider
|
| 84 |
+
from ._core._typedattr import TypedAttributeSet as TypedAttributeSet
|
| 85 |
+
from ._core._typedattr import typed_attribute as typed_attribute
|
| 86 |
+
|
| 87 |
+
# Re-export imports so they look like they live directly in this package
|
| 88 |
+
for __value in list(locals().values()):
|
| 89 |
+
if getattr(__value, "__module__", "").startswith("anyio."):
|
| 90 |
+
__value.__module__ = __name__
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
del __value
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
def __getattr__(attr: str) -> type[BrokenWorkerInterpreter]:
|
| 97 |
+
"""Support deprecated aliases."""
|
| 98 |
+
if attr == "BrokenWorkerIntepreter":
|
| 99 |
+
import warnings
|
| 100 |
+
|
| 101 |
+
warnings.warn(
|
| 102 |
+
"The 'BrokenWorkerIntepreter' alias is deprecated, use 'BrokenWorkerInterpreter' instead.",
|
| 103 |
+
DeprecationWarning,
|
| 104 |
+
stacklevel=2,
|
| 105 |
+
)
|
| 106 |
+
return BrokenWorkerInterpreter
|
| 107 |
+
|
| 108 |
+
raise AttributeError(f"module {__name__!r} has no attribute {attr!r}")
|
venv/lib/python3.10/site-packages/anyio/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (4.23 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/__pycache__/from_thread.cpython-310.pyc
ADDED
|
Binary file (17.5 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/__pycache__/lowlevel.cpython-310.pyc
ADDED
|
Binary file (5.23 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/__pycache__/pytest_plugin.cpython-310.pyc
ADDED
|
Binary file (8.82 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/__pycache__/to_interpreter.cpython-310.pyc
ADDED
|
Binary file (7.08 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/__pycache__/to_process.cpython-310.pyc
ADDED
|
Binary file (6.67 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/__pycache__/to_thread.cpython-310.pyc
ADDED
|
Binary file (2.68 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_backends/__init__.py
ADDED
|
File without changes
|
venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (251 Bytes). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_asyncio.cpython-310.pyc
ADDED
|
Binary file (79.6 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_backends/__pycache__/_trio.cpython-310.pyc
ADDED
|
Binary file (45.9 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_backends/_asyncio.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
venv/lib/python3.10/site-packages/anyio/_backends/_trio.py
ADDED
|
@@ -0,0 +1,1375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import array
|
| 4 |
+
import math
|
| 5 |
+
import os
|
| 6 |
+
import socket
|
| 7 |
+
import sys
|
| 8 |
+
import types
|
| 9 |
+
import weakref
|
| 10 |
+
from collections.abc import (
|
| 11 |
+
AsyncGenerator,
|
| 12 |
+
AsyncIterator,
|
| 13 |
+
Awaitable,
|
| 14 |
+
Callable,
|
| 15 |
+
Collection,
|
| 16 |
+
Coroutine,
|
| 17 |
+
Iterable,
|
| 18 |
+
Sequence,
|
| 19 |
+
)
|
| 20 |
+
from concurrent.futures import Future
|
| 21 |
+
from contextlib import AbstractContextManager
|
| 22 |
+
from dataclasses import dataclass
|
| 23 |
+
from functools import partial
|
| 24 |
+
from io import IOBase
|
| 25 |
+
from os import PathLike
|
| 26 |
+
from signal import Signals
|
| 27 |
+
from socket import AddressFamily, SocketKind
|
| 28 |
+
from types import TracebackType
|
| 29 |
+
from typing import (
|
| 30 |
+
IO,
|
| 31 |
+
TYPE_CHECKING,
|
| 32 |
+
Any,
|
| 33 |
+
Generic,
|
| 34 |
+
NoReturn,
|
| 35 |
+
TypeVar,
|
| 36 |
+
cast,
|
| 37 |
+
overload,
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
import trio.from_thread
|
| 41 |
+
import trio.lowlevel
|
| 42 |
+
from outcome import Error, Outcome, Value
|
| 43 |
+
from trio.lowlevel import (
|
| 44 |
+
current_root_task,
|
| 45 |
+
current_task,
|
| 46 |
+
notify_closing,
|
| 47 |
+
wait_readable,
|
| 48 |
+
wait_writable,
|
| 49 |
+
)
|
| 50 |
+
from trio.socket import SocketType as TrioSocketType
|
| 51 |
+
from trio.to_thread import run_sync
|
| 52 |
+
|
| 53 |
+
from .. import (
|
| 54 |
+
CapacityLimiterStatistics,
|
| 55 |
+
EventStatistics,
|
| 56 |
+
LockStatistics,
|
| 57 |
+
TaskInfo,
|
| 58 |
+
WouldBlock,
|
| 59 |
+
abc,
|
| 60 |
+
)
|
| 61 |
+
from .._core._eventloop import claim_worker_thread
|
| 62 |
+
from .._core._exceptions import (
|
| 63 |
+
BrokenResourceError,
|
| 64 |
+
BusyResourceError,
|
| 65 |
+
ClosedResourceError,
|
| 66 |
+
EndOfStream,
|
| 67 |
+
)
|
| 68 |
+
from .._core._sockets import convert_ipv6_sockaddr
|
| 69 |
+
from .._core._streams import create_memory_object_stream
|
| 70 |
+
from .._core._synchronization import (
|
| 71 |
+
CapacityLimiter as BaseCapacityLimiter,
|
| 72 |
+
)
|
| 73 |
+
from .._core._synchronization import Event as BaseEvent
|
| 74 |
+
from .._core._synchronization import Lock as BaseLock
|
| 75 |
+
from .._core._synchronization import (
|
| 76 |
+
ResourceGuard,
|
| 77 |
+
SemaphoreStatistics,
|
| 78 |
+
)
|
| 79 |
+
from .._core._synchronization import Semaphore as BaseSemaphore
|
| 80 |
+
from .._core._tasks import CancelScope as BaseCancelScope
|
| 81 |
+
from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType
|
| 82 |
+
from ..abc._eventloop import AsyncBackend, StrOrBytesPath
|
| 83 |
+
from ..streams.memory import MemoryObjectSendStream
|
| 84 |
+
|
| 85 |
+
if TYPE_CHECKING:
|
| 86 |
+
from _typeshed import FileDescriptorLike
|
| 87 |
+
|
| 88 |
+
if sys.version_info >= (3, 10):
|
| 89 |
+
from typing import ParamSpec
|
| 90 |
+
else:
|
| 91 |
+
from typing_extensions import ParamSpec
|
| 92 |
+
|
| 93 |
+
if sys.version_info >= (3, 11):
|
| 94 |
+
from typing import TypeVarTuple, Unpack
|
| 95 |
+
else:
|
| 96 |
+
from exceptiongroup import BaseExceptionGroup
|
| 97 |
+
from typing_extensions import TypeVarTuple, Unpack
|
| 98 |
+
|
| 99 |
+
T = TypeVar("T")
|
| 100 |
+
T_Retval = TypeVar("T_Retval")
|
| 101 |
+
T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType)
|
| 102 |
+
PosArgsT = TypeVarTuple("PosArgsT")
|
| 103 |
+
P = ParamSpec("P")
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
#
|
| 107 |
+
# Event loop
|
| 108 |
+
#
|
| 109 |
+
|
| 110 |
+
RunVar = trio.lowlevel.RunVar
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
#
|
| 114 |
+
# Timeouts and cancellation
|
| 115 |
+
#
|
| 116 |
+
|
| 117 |
+
|
| 118 |
+
class CancelScope(BaseCancelScope):
|
| 119 |
+
def __new__(
|
| 120 |
+
cls, original: trio.CancelScope | None = None, **kwargs: object
|
| 121 |
+
) -> CancelScope:
|
| 122 |
+
return object.__new__(cls)
|
| 123 |
+
|
| 124 |
+
def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None:
|
| 125 |
+
self.__original = original or trio.CancelScope(**kwargs)
|
| 126 |
+
|
| 127 |
+
def __enter__(self) -> CancelScope:
|
| 128 |
+
self.__original.__enter__()
|
| 129 |
+
return self
|
| 130 |
+
|
| 131 |
+
def __exit__(
|
| 132 |
+
self,
|
| 133 |
+
exc_type: type[BaseException] | None,
|
| 134 |
+
exc_val: BaseException | None,
|
| 135 |
+
exc_tb: TracebackType | None,
|
| 136 |
+
) -> bool:
|
| 137 |
+
return self.__original.__exit__(exc_type, exc_val, exc_tb)
|
| 138 |
+
|
| 139 |
+
def cancel(self) -> None:
|
| 140 |
+
self.__original.cancel()
|
| 141 |
+
|
| 142 |
+
@property
|
| 143 |
+
def deadline(self) -> float:
|
| 144 |
+
return self.__original.deadline
|
| 145 |
+
|
| 146 |
+
@deadline.setter
|
| 147 |
+
def deadline(self, value: float) -> None:
|
| 148 |
+
self.__original.deadline = value
|
| 149 |
+
|
| 150 |
+
@property
|
| 151 |
+
def cancel_called(self) -> bool:
|
| 152 |
+
return self.__original.cancel_called
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def cancelled_caught(self) -> bool:
|
| 156 |
+
return self.__original.cancelled_caught
|
| 157 |
+
|
| 158 |
+
@property
|
| 159 |
+
def shield(self) -> bool:
|
| 160 |
+
return self.__original.shield
|
| 161 |
+
|
| 162 |
+
@shield.setter
|
| 163 |
+
def shield(self, value: bool) -> None:
|
| 164 |
+
self.__original.shield = value
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
#
|
| 168 |
+
# Task groups
|
| 169 |
+
#
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class TaskGroup(abc.TaskGroup):
|
| 173 |
+
def __init__(self) -> None:
|
| 174 |
+
self._active = False
|
| 175 |
+
self._nursery_manager = trio.open_nursery(strict_exception_groups=True)
|
| 176 |
+
self.cancel_scope = None # type: ignore[assignment]
|
| 177 |
+
|
| 178 |
+
async def __aenter__(self) -> TaskGroup:
|
| 179 |
+
self._active = True
|
| 180 |
+
self._nursery = await self._nursery_manager.__aenter__()
|
| 181 |
+
self.cancel_scope = CancelScope(self._nursery.cancel_scope)
|
| 182 |
+
return self
|
| 183 |
+
|
| 184 |
+
async def __aexit__(
|
| 185 |
+
self,
|
| 186 |
+
exc_type: type[BaseException] | None,
|
| 187 |
+
exc_val: BaseException | None,
|
| 188 |
+
exc_tb: TracebackType | None,
|
| 189 |
+
) -> bool:
|
| 190 |
+
try:
|
| 191 |
+
# trio.Nursery.__exit__ returns bool; .open_nursery has wrong type
|
| 192 |
+
return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) # type: ignore[return-value]
|
| 193 |
+
except BaseExceptionGroup as exc:
|
| 194 |
+
if not exc.split(trio.Cancelled)[1]:
|
| 195 |
+
raise trio.Cancelled._create() from exc
|
| 196 |
+
|
| 197 |
+
raise
|
| 198 |
+
finally:
|
| 199 |
+
del exc_val, exc_tb
|
| 200 |
+
self._active = False
|
| 201 |
+
|
| 202 |
+
def start_soon(
|
| 203 |
+
self,
|
| 204 |
+
func: Callable[[Unpack[PosArgsT]], Awaitable[Any]],
|
| 205 |
+
*args: Unpack[PosArgsT],
|
| 206 |
+
name: object = None,
|
| 207 |
+
) -> None:
|
| 208 |
+
if not self._active:
|
| 209 |
+
raise RuntimeError(
|
| 210 |
+
"This task group is not active; no new tasks can be started."
|
| 211 |
+
)
|
| 212 |
+
|
| 213 |
+
self._nursery.start_soon(func, *args, name=name)
|
| 214 |
+
|
| 215 |
+
async def start(
|
| 216 |
+
self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None
|
| 217 |
+
) -> Any:
|
| 218 |
+
if not self._active:
|
| 219 |
+
raise RuntimeError(
|
| 220 |
+
"This task group is not active; no new tasks can be started."
|
| 221 |
+
)
|
| 222 |
+
|
| 223 |
+
return await self._nursery.start(func, *args, name=name)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
#
|
| 227 |
+
# Threads
|
| 228 |
+
#
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
class BlockingPortal(abc.BlockingPortal):
|
| 232 |
+
def __new__(cls) -> BlockingPortal:
|
| 233 |
+
return object.__new__(cls)
|
| 234 |
+
|
| 235 |
+
def __init__(self) -> None:
|
| 236 |
+
super().__init__()
|
| 237 |
+
self._token = trio.lowlevel.current_trio_token()
|
| 238 |
+
|
| 239 |
+
def _spawn_task_from_thread(
|
| 240 |
+
self,
|
| 241 |
+
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval],
|
| 242 |
+
args: tuple[Unpack[PosArgsT]],
|
| 243 |
+
kwargs: dict[str, Any],
|
| 244 |
+
name: object,
|
| 245 |
+
future: Future[T_Retval],
|
| 246 |
+
) -> None:
|
| 247 |
+
trio.from_thread.run_sync(
|
| 248 |
+
partial(self._task_group.start_soon, name=name),
|
| 249 |
+
self._call_func,
|
| 250 |
+
func,
|
| 251 |
+
args,
|
| 252 |
+
kwargs,
|
| 253 |
+
future,
|
| 254 |
+
trio_token=self._token,
|
| 255 |
+
)
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
#
|
| 259 |
+
# Subprocesses
|
| 260 |
+
#
|
| 261 |
+
|
| 262 |
+
|
| 263 |
+
@dataclass(eq=False)
|
| 264 |
+
class ReceiveStreamWrapper(abc.ByteReceiveStream):
|
| 265 |
+
_stream: trio.abc.ReceiveStream
|
| 266 |
+
|
| 267 |
+
async def receive(self, max_bytes: int | None = None) -> bytes:
|
| 268 |
+
try:
|
| 269 |
+
data = await self._stream.receive_some(max_bytes)
|
| 270 |
+
except trio.ClosedResourceError as exc:
|
| 271 |
+
raise ClosedResourceError from exc.__cause__
|
| 272 |
+
except trio.BrokenResourceError as exc:
|
| 273 |
+
raise BrokenResourceError from exc.__cause__
|
| 274 |
+
|
| 275 |
+
if data:
|
| 276 |
+
return bytes(data)
|
| 277 |
+
else:
|
| 278 |
+
raise EndOfStream
|
| 279 |
+
|
| 280 |
+
async def aclose(self) -> None:
|
| 281 |
+
await self._stream.aclose()
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
@dataclass(eq=False)
|
| 285 |
+
class SendStreamWrapper(abc.ByteSendStream):
|
| 286 |
+
_stream: trio.abc.SendStream
|
| 287 |
+
|
| 288 |
+
async def send(self, item: bytes) -> None:
|
| 289 |
+
try:
|
| 290 |
+
await self._stream.send_all(item)
|
| 291 |
+
except trio.ClosedResourceError as exc:
|
| 292 |
+
raise ClosedResourceError from exc.__cause__
|
| 293 |
+
except trio.BrokenResourceError as exc:
|
| 294 |
+
raise BrokenResourceError from exc.__cause__
|
| 295 |
+
|
| 296 |
+
async def aclose(self) -> None:
|
| 297 |
+
await self._stream.aclose()
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
@dataclass(eq=False)
|
| 301 |
+
class Process(abc.Process):
|
| 302 |
+
_process: trio.Process
|
| 303 |
+
_stdin: abc.ByteSendStream | None
|
| 304 |
+
_stdout: abc.ByteReceiveStream | None
|
| 305 |
+
_stderr: abc.ByteReceiveStream | None
|
| 306 |
+
|
| 307 |
+
async def aclose(self) -> None:
|
| 308 |
+
with CancelScope(shield=True):
|
| 309 |
+
if self._stdin:
|
| 310 |
+
await self._stdin.aclose()
|
| 311 |
+
if self._stdout:
|
| 312 |
+
await self._stdout.aclose()
|
| 313 |
+
if self._stderr:
|
| 314 |
+
await self._stderr.aclose()
|
| 315 |
+
|
| 316 |
+
try:
|
| 317 |
+
await self.wait()
|
| 318 |
+
except BaseException:
|
| 319 |
+
self.kill()
|
| 320 |
+
with CancelScope(shield=True):
|
| 321 |
+
await self.wait()
|
| 322 |
+
raise
|
| 323 |
+
|
| 324 |
+
async def wait(self) -> int:
|
| 325 |
+
return await self._process.wait()
|
| 326 |
+
|
| 327 |
+
def terminate(self) -> None:
|
| 328 |
+
self._process.terminate()
|
| 329 |
+
|
| 330 |
+
def kill(self) -> None:
|
| 331 |
+
self._process.kill()
|
| 332 |
+
|
| 333 |
+
def send_signal(self, signal: Signals) -> None:
|
| 334 |
+
self._process.send_signal(signal)
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def pid(self) -> int:
|
| 338 |
+
return self._process.pid
|
| 339 |
+
|
| 340 |
+
@property
|
| 341 |
+
def returncode(self) -> int | None:
|
| 342 |
+
return self._process.returncode
|
| 343 |
+
|
| 344 |
+
@property
|
| 345 |
+
def stdin(self) -> abc.ByteSendStream | None:
|
| 346 |
+
return self._stdin
|
| 347 |
+
|
| 348 |
+
@property
|
| 349 |
+
def stdout(self) -> abc.ByteReceiveStream | None:
|
| 350 |
+
return self._stdout
|
| 351 |
+
|
| 352 |
+
@property
|
| 353 |
+
def stderr(self) -> abc.ByteReceiveStream | None:
|
| 354 |
+
return self._stderr
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
class _ProcessPoolShutdownInstrument(trio.abc.Instrument):
|
| 358 |
+
def after_run(self) -> None:
|
| 359 |
+
super().after_run()
|
| 360 |
+
|
| 361 |
+
|
| 362 |
+
current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar(
|
| 363 |
+
"current_default_worker_process_limiter"
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
|
| 367 |
+
async def _shutdown_process_pool(workers: set[abc.Process]) -> None:
|
| 368 |
+
try:
|
| 369 |
+
await trio.sleep(math.inf)
|
| 370 |
+
except trio.Cancelled:
|
| 371 |
+
for process in workers:
|
| 372 |
+
if process.returncode is None:
|
| 373 |
+
process.kill()
|
| 374 |
+
|
| 375 |
+
with CancelScope(shield=True):
|
| 376 |
+
for process in workers:
|
| 377 |
+
await process.aclose()
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
#
|
| 381 |
+
# Sockets and networking
|
| 382 |
+
#
|
| 383 |
+
|
| 384 |
+
|
| 385 |
+
class _TrioSocketMixin(Generic[T_SockAddr]):
|
| 386 |
+
def __init__(self, trio_socket: TrioSocketType) -> None:
|
| 387 |
+
self._trio_socket = trio_socket
|
| 388 |
+
self._closed = False
|
| 389 |
+
|
| 390 |
+
def _check_closed(self) -> None:
|
| 391 |
+
if self._closed:
|
| 392 |
+
raise ClosedResourceError
|
| 393 |
+
if self._trio_socket.fileno() < 0:
|
| 394 |
+
raise BrokenResourceError
|
| 395 |
+
|
| 396 |
+
@property
|
| 397 |
+
def _raw_socket(self) -> socket.socket:
|
| 398 |
+
return self._trio_socket._sock # type: ignore[attr-defined]
|
| 399 |
+
|
| 400 |
+
async def aclose(self) -> None:
|
| 401 |
+
if self._trio_socket.fileno() >= 0:
|
| 402 |
+
self._closed = True
|
| 403 |
+
self._trio_socket.close()
|
| 404 |
+
|
| 405 |
+
def _convert_socket_error(self, exc: BaseException) -> NoReturn:
|
| 406 |
+
if isinstance(exc, trio.ClosedResourceError):
|
| 407 |
+
raise ClosedResourceError from exc
|
| 408 |
+
elif self._trio_socket.fileno() < 0 and self._closed:
|
| 409 |
+
raise ClosedResourceError from None
|
| 410 |
+
elif isinstance(exc, OSError):
|
| 411 |
+
raise BrokenResourceError from exc
|
| 412 |
+
else:
|
| 413 |
+
raise exc
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
class SocketStream(_TrioSocketMixin, abc.SocketStream):
|
| 417 |
+
def __init__(self, trio_socket: TrioSocketType) -> None:
|
| 418 |
+
super().__init__(trio_socket)
|
| 419 |
+
self._receive_guard = ResourceGuard("reading from")
|
| 420 |
+
self._send_guard = ResourceGuard("writing to")
|
| 421 |
+
|
| 422 |
+
async def receive(self, max_bytes: int = 65536) -> bytes:
|
| 423 |
+
with self._receive_guard:
|
| 424 |
+
try:
|
| 425 |
+
data = await self._trio_socket.recv(max_bytes)
|
| 426 |
+
except BaseException as exc:
|
| 427 |
+
self._convert_socket_error(exc)
|
| 428 |
+
|
| 429 |
+
if data:
|
| 430 |
+
return data
|
| 431 |
+
else:
|
| 432 |
+
raise EndOfStream
|
| 433 |
+
|
| 434 |
+
async def send(self, item: bytes) -> None:
|
| 435 |
+
with self._send_guard:
|
| 436 |
+
view = memoryview(item)
|
| 437 |
+
while view:
|
| 438 |
+
try:
|
| 439 |
+
bytes_sent = await self._trio_socket.send(view)
|
| 440 |
+
except BaseException as exc:
|
| 441 |
+
self._convert_socket_error(exc)
|
| 442 |
+
|
| 443 |
+
view = view[bytes_sent:]
|
| 444 |
+
|
| 445 |
+
async def send_eof(self) -> None:
|
| 446 |
+
self._trio_socket.shutdown(socket.SHUT_WR)
|
| 447 |
+
|
| 448 |
+
|
| 449 |
+
class UNIXSocketStream(SocketStream, abc.UNIXSocketStream):
|
| 450 |
+
async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]:
|
| 451 |
+
if not isinstance(msglen, int) or msglen < 0:
|
| 452 |
+
raise ValueError("msglen must be a non-negative integer")
|
| 453 |
+
if not isinstance(maxfds, int) or maxfds < 1:
|
| 454 |
+
raise ValueError("maxfds must be a positive integer")
|
| 455 |
+
|
| 456 |
+
fds = array.array("i")
|
| 457 |
+
await trio.lowlevel.checkpoint()
|
| 458 |
+
with self._receive_guard:
|
| 459 |
+
while True:
|
| 460 |
+
try:
|
| 461 |
+
message, ancdata, flags, addr = await self._trio_socket.recvmsg(
|
| 462 |
+
msglen, socket.CMSG_LEN(maxfds * fds.itemsize)
|
| 463 |
+
)
|
| 464 |
+
except BaseException as exc:
|
| 465 |
+
self._convert_socket_error(exc)
|
| 466 |
+
else:
|
| 467 |
+
if not message and not ancdata:
|
| 468 |
+
raise EndOfStream
|
| 469 |
+
|
| 470 |
+
break
|
| 471 |
+
|
| 472 |
+
for cmsg_level, cmsg_type, cmsg_data in ancdata:
|
| 473 |
+
if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS:
|
| 474 |
+
raise RuntimeError(
|
| 475 |
+
f"Received unexpected ancillary data; message = {message!r}, "
|
| 476 |
+
f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}"
|
| 477 |
+
)
|
| 478 |
+
|
| 479 |
+
fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)])
|
| 480 |
+
|
| 481 |
+
return message, list(fds)
|
| 482 |
+
|
| 483 |
+
async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None:
|
| 484 |
+
if not message:
|
| 485 |
+
raise ValueError("message must not be empty")
|
| 486 |
+
if not fds:
|
| 487 |
+
raise ValueError("fds must not be empty")
|
| 488 |
+
|
| 489 |
+
filenos: list[int] = []
|
| 490 |
+
for fd in fds:
|
| 491 |
+
if isinstance(fd, int):
|
| 492 |
+
filenos.append(fd)
|
| 493 |
+
elif isinstance(fd, IOBase):
|
| 494 |
+
filenos.append(fd.fileno())
|
| 495 |
+
|
| 496 |
+
fdarray = array.array("i", filenos)
|
| 497 |
+
await trio.lowlevel.checkpoint()
|
| 498 |
+
with self._send_guard:
|
| 499 |
+
while True:
|
| 500 |
+
try:
|
| 501 |
+
await self._trio_socket.sendmsg(
|
| 502 |
+
[message],
|
| 503 |
+
[
|
| 504 |
+
(
|
| 505 |
+
socket.SOL_SOCKET,
|
| 506 |
+
socket.SCM_RIGHTS,
|
| 507 |
+
fdarray,
|
| 508 |
+
)
|
| 509 |
+
],
|
| 510 |
+
)
|
| 511 |
+
break
|
| 512 |
+
except BaseException as exc:
|
| 513 |
+
self._convert_socket_error(exc)
|
| 514 |
+
|
| 515 |
+
|
| 516 |
+
class TCPSocketListener(_TrioSocketMixin, abc.SocketListener):
|
| 517 |
+
def __init__(self, raw_socket: socket.socket):
|
| 518 |
+
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
|
| 519 |
+
self._accept_guard = ResourceGuard("accepting connections from")
|
| 520 |
+
|
| 521 |
+
async def accept(self) -> SocketStream:
|
| 522 |
+
with self._accept_guard:
|
| 523 |
+
try:
|
| 524 |
+
trio_socket, _addr = await self._trio_socket.accept()
|
| 525 |
+
except BaseException as exc:
|
| 526 |
+
self._convert_socket_error(exc)
|
| 527 |
+
|
| 528 |
+
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
| 529 |
+
return SocketStream(trio_socket)
|
| 530 |
+
|
| 531 |
+
|
| 532 |
+
class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener):
|
| 533 |
+
def __init__(self, raw_socket: socket.socket):
|
| 534 |
+
super().__init__(trio.socket.from_stdlib_socket(raw_socket))
|
| 535 |
+
self._accept_guard = ResourceGuard("accepting connections from")
|
| 536 |
+
|
| 537 |
+
async def accept(self) -> UNIXSocketStream:
|
| 538 |
+
with self._accept_guard:
|
| 539 |
+
try:
|
| 540 |
+
trio_socket, _addr = await self._trio_socket.accept()
|
| 541 |
+
except BaseException as exc:
|
| 542 |
+
self._convert_socket_error(exc)
|
| 543 |
+
|
| 544 |
+
return UNIXSocketStream(trio_socket)
|
| 545 |
+
|
| 546 |
+
|
| 547 |
+
class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket):
|
| 548 |
+
def __init__(self, trio_socket: TrioSocketType) -> None:
|
| 549 |
+
super().__init__(trio_socket)
|
| 550 |
+
self._receive_guard = ResourceGuard("reading from")
|
| 551 |
+
self._send_guard = ResourceGuard("writing to")
|
| 552 |
+
|
| 553 |
+
async def receive(self) -> tuple[bytes, IPSockAddrType]:
|
| 554 |
+
with self._receive_guard:
|
| 555 |
+
try:
|
| 556 |
+
data, addr = await self._trio_socket.recvfrom(65536)
|
| 557 |
+
return data, convert_ipv6_sockaddr(addr)
|
| 558 |
+
except BaseException as exc:
|
| 559 |
+
self._convert_socket_error(exc)
|
| 560 |
+
|
| 561 |
+
async def send(self, item: UDPPacketType) -> None:
|
| 562 |
+
with self._send_guard:
|
| 563 |
+
try:
|
| 564 |
+
await self._trio_socket.sendto(*item)
|
| 565 |
+
except BaseException as exc:
|
| 566 |
+
self._convert_socket_error(exc)
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket):
|
| 570 |
+
def __init__(self, trio_socket: TrioSocketType) -> None:
|
| 571 |
+
super().__init__(trio_socket)
|
| 572 |
+
self._receive_guard = ResourceGuard("reading from")
|
| 573 |
+
self._send_guard = ResourceGuard("writing to")
|
| 574 |
+
|
| 575 |
+
async def receive(self) -> bytes:
|
| 576 |
+
with self._receive_guard:
|
| 577 |
+
try:
|
| 578 |
+
return await self._trio_socket.recv(65536)
|
| 579 |
+
except BaseException as exc:
|
| 580 |
+
self._convert_socket_error(exc)
|
| 581 |
+
|
| 582 |
+
async def send(self, item: bytes) -> None:
|
| 583 |
+
with self._send_guard:
|
| 584 |
+
try:
|
| 585 |
+
await self._trio_socket.send(item)
|
| 586 |
+
except BaseException as exc:
|
| 587 |
+
self._convert_socket_error(exc)
|
| 588 |
+
|
| 589 |
+
|
| 590 |
+
class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket):
|
| 591 |
+
def __init__(self, trio_socket: TrioSocketType) -> None:
|
| 592 |
+
super().__init__(trio_socket)
|
| 593 |
+
self._receive_guard = ResourceGuard("reading from")
|
| 594 |
+
self._send_guard = ResourceGuard("writing to")
|
| 595 |
+
|
| 596 |
+
async def receive(self) -> UNIXDatagramPacketType:
|
| 597 |
+
with self._receive_guard:
|
| 598 |
+
try:
|
| 599 |
+
data, addr = await self._trio_socket.recvfrom(65536)
|
| 600 |
+
return data, addr
|
| 601 |
+
except BaseException as exc:
|
| 602 |
+
self._convert_socket_error(exc)
|
| 603 |
+
|
| 604 |
+
async def send(self, item: UNIXDatagramPacketType) -> None:
|
| 605 |
+
with self._send_guard:
|
| 606 |
+
try:
|
| 607 |
+
await self._trio_socket.sendto(*item)
|
| 608 |
+
except BaseException as exc:
|
| 609 |
+
self._convert_socket_error(exc)
|
| 610 |
+
|
| 611 |
+
|
| 612 |
+
class ConnectedUNIXDatagramSocket(
|
| 613 |
+
_TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket
|
| 614 |
+
):
|
| 615 |
+
def __init__(self, trio_socket: TrioSocketType) -> None:
|
| 616 |
+
super().__init__(trio_socket)
|
| 617 |
+
self._receive_guard = ResourceGuard("reading from")
|
| 618 |
+
self._send_guard = ResourceGuard("writing to")
|
| 619 |
+
|
| 620 |
+
async def receive(self) -> bytes:
|
| 621 |
+
with self._receive_guard:
|
| 622 |
+
try:
|
| 623 |
+
return await self._trio_socket.recv(65536)
|
| 624 |
+
except BaseException as exc:
|
| 625 |
+
self._convert_socket_error(exc)
|
| 626 |
+
|
| 627 |
+
async def send(self, item: bytes) -> None:
|
| 628 |
+
with self._send_guard:
|
| 629 |
+
try:
|
| 630 |
+
await self._trio_socket.send(item)
|
| 631 |
+
except BaseException as exc:
|
| 632 |
+
self._convert_socket_error(exc)
|
| 633 |
+
|
| 634 |
+
|
| 635 |
+
#
|
| 636 |
+
# Synchronization
|
| 637 |
+
#
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
class Event(BaseEvent):
|
| 641 |
+
def __new__(cls) -> Event:
|
| 642 |
+
return object.__new__(cls)
|
| 643 |
+
|
| 644 |
+
def __init__(self) -> None:
|
| 645 |
+
self.__original = trio.Event()
|
| 646 |
+
|
| 647 |
+
def is_set(self) -> bool:
|
| 648 |
+
return self.__original.is_set()
|
| 649 |
+
|
| 650 |
+
async def wait(self) -> None:
|
| 651 |
+
return await self.__original.wait()
|
| 652 |
+
|
| 653 |
+
def statistics(self) -> EventStatistics:
|
| 654 |
+
orig_statistics = self.__original.statistics()
|
| 655 |
+
return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting)
|
| 656 |
+
|
| 657 |
+
def set(self) -> None:
|
| 658 |
+
self.__original.set()
|
| 659 |
+
|
| 660 |
+
|
| 661 |
+
class Lock(BaseLock):
|
| 662 |
+
def __new__(cls, *, fast_acquire: bool = False) -> Lock:
|
| 663 |
+
return object.__new__(cls)
|
| 664 |
+
|
| 665 |
+
def __init__(self, *, fast_acquire: bool = False) -> None:
|
| 666 |
+
self._fast_acquire = fast_acquire
|
| 667 |
+
self.__original = trio.Lock()
|
| 668 |
+
|
| 669 |
+
@staticmethod
|
| 670 |
+
def _convert_runtime_error_msg(exc: RuntimeError) -> None:
|
| 671 |
+
if exc.args == ("attempt to re-acquire an already held Lock",):
|
| 672 |
+
exc.args = ("Attempted to acquire an already held Lock",)
|
| 673 |
+
|
| 674 |
+
async def acquire(self) -> None:
|
| 675 |
+
if not self._fast_acquire:
|
| 676 |
+
try:
|
| 677 |
+
await self.__original.acquire()
|
| 678 |
+
except RuntimeError as exc:
|
| 679 |
+
self._convert_runtime_error_msg(exc)
|
| 680 |
+
raise
|
| 681 |
+
|
| 682 |
+
return
|
| 683 |
+
|
| 684 |
+
# This is the "fast path" where we don't let other tasks run
|
| 685 |
+
await trio.lowlevel.checkpoint_if_cancelled()
|
| 686 |
+
try:
|
| 687 |
+
self.__original.acquire_nowait()
|
| 688 |
+
except trio.WouldBlock:
|
| 689 |
+
await self.__original._lot.park()
|
| 690 |
+
except RuntimeError as exc:
|
| 691 |
+
self._convert_runtime_error_msg(exc)
|
| 692 |
+
raise
|
| 693 |
+
|
| 694 |
+
def acquire_nowait(self) -> None:
|
| 695 |
+
try:
|
| 696 |
+
self.__original.acquire_nowait()
|
| 697 |
+
except trio.WouldBlock:
|
| 698 |
+
raise WouldBlock from None
|
| 699 |
+
except RuntimeError as exc:
|
| 700 |
+
self._convert_runtime_error_msg(exc)
|
| 701 |
+
raise
|
| 702 |
+
|
| 703 |
+
def locked(self) -> bool:
|
| 704 |
+
return self.__original.locked()
|
| 705 |
+
|
| 706 |
+
def release(self) -> None:
|
| 707 |
+
self.__original.release()
|
| 708 |
+
|
| 709 |
+
def statistics(self) -> LockStatistics:
|
| 710 |
+
orig_statistics = self.__original.statistics()
|
| 711 |
+
owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None
|
| 712 |
+
return LockStatistics(
|
| 713 |
+
orig_statistics.locked, owner, orig_statistics.tasks_waiting
|
| 714 |
+
)
|
| 715 |
+
|
| 716 |
+
|
| 717 |
+
class Semaphore(BaseSemaphore):
|
| 718 |
+
def __new__(
|
| 719 |
+
cls,
|
| 720 |
+
initial_value: int,
|
| 721 |
+
*,
|
| 722 |
+
max_value: int | None = None,
|
| 723 |
+
fast_acquire: bool = False,
|
| 724 |
+
) -> Semaphore:
|
| 725 |
+
return object.__new__(cls)
|
| 726 |
+
|
| 727 |
+
def __init__(
|
| 728 |
+
self,
|
| 729 |
+
initial_value: int,
|
| 730 |
+
*,
|
| 731 |
+
max_value: int | None = None,
|
| 732 |
+
fast_acquire: bool = False,
|
| 733 |
+
) -> None:
|
| 734 |
+
super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
|
| 735 |
+
self.__original = trio.Semaphore(initial_value, max_value=max_value)
|
| 736 |
+
|
| 737 |
+
async def acquire(self) -> None:
|
| 738 |
+
if not self._fast_acquire:
|
| 739 |
+
await self.__original.acquire()
|
| 740 |
+
return
|
| 741 |
+
|
| 742 |
+
# This is the "fast path" where we don't let other tasks run
|
| 743 |
+
await trio.lowlevel.checkpoint_if_cancelled()
|
| 744 |
+
try:
|
| 745 |
+
self.__original.acquire_nowait()
|
| 746 |
+
except trio.WouldBlock:
|
| 747 |
+
await self.__original._lot.park()
|
| 748 |
+
|
| 749 |
+
def acquire_nowait(self) -> None:
|
| 750 |
+
try:
|
| 751 |
+
self.__original.acquire_nowait()
|
| 752 |
+
except trio.WouldBlock:
|
| 753 |
+
raise WouldBlock from None
|
| 754 |
+
|
| 755 |
+
@property
|
| 756 |
+
def max_value(self) -> int | None:
|
| 757 |
+
return self.__original.max_value
|
| 758 |
+
|
| 759 |
+
@property
|
| 760 |
+
def value(self) -> int:
|
| 761 |
+
return self.__original.value
|
| 762 |
+
|
| 763 |
+
def release(self) -> None:
|
| 764 |
+
self.__original.release()
|
| 765 |
+
|
| 766 |
+
def statistics(self) -> SemaphoreStatistics:
|
| 767 |
+
orig_statistics = self.__original.statistics()
|
| 768 |
+
return SemaphoreStatistics(orig_statistics.tasks_waiting)
|
| 769 |
+
|
| 770 |
+
|
| 771 |
+
class CapacityLimiter(BaseCapacityLimiter):
|
| 772 |
+
def __new__(
|
| 773 |
+
cls,
|
| 774 |
+
total_tokens: float | None = None,
|
| 775 |
+
*,
|
| 776 |
+
original: trio.CapacityLimiter | None = None,
|
| 777 |
+
) -> CapacityLimiter:
|
| 778 |
+
return object.__new__(cls)
|
| 779 |
+
|
| 780 |
+
def __init__(
|
| 781 |
+
self,
|
| 782 |
+
total_tokens: float | None = None,
|
| 783 |
+
*,
|
| 784 |
+
original: trio.CapacityLimiter | None = None,
|
| 785 |
+
) -> None:
|
| 786 |
+
if original is not None:
|
| 787 |
+
self.__original = original
|
| 788 |
+
else:
|
| 789 |
+
assert total_tokens is not None
|
| 790 |
+
self.__original = trio.CapacityLimiter(total_tokens)
|
| 791 |
+
|
| 792 |
+
async def __aenter__(self) -> None:
|
| 793 |
+
return await self.__original.__aenter__()
|
| 794 |
+
|
| 795 |
+
async def __aexit__(
|
| 796 |
+
self,
|
| 797 |
+
exc_type: type[BaseException] | None,
|
| 798 |
+
exc_val: BaseException | None,
|
| 799 |
+
exc_tb: TracebackType | None,
|
| 800 |
+
) -> None:
|
| 801 |
+
await self.__original.__aexit__(exc_type, exc_val, exc_tb)
|
| 802 |
+
|
| 803 |
+
@property
|
| 804 |
+
def total_tokens(self) -> float:
|
| 805 |
+
return self.__original.total_tokens
|
| 806 |
+
|
| 807 |
+
@total_tokens.setter
|
| 808 |
+
def total_tokens(self, value: float) -> None:
|
| 809 |
+
self.__original.total_tokens = value
|
| 810 |
+
|
| 811 |
+
@property
|
| 812 |
+
def borrowed_tokens(self) -> int:
|
| 813 |
+
return self.__original.borrowed_tokens
|
| 814 |
+
|
| 815 |
+
@property
|
| 816 |
+
def available_tokens(self) -> float:
|
| 817 |
+
return self.__original.available_tokens
|
| 818 |
+
|
| 819 |
+
def acquire_nowait(self) -> None:
|
| 820 |
+
self.__original.acquire_nowait()
|
| 821 |
+
|
| 822 |
+
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
|
| 823 |
+
self.__original.acquire_on_behalf_of_nowait(borrower)
|
| 824 |
+
|
| 825 |
+
async def acquire(self) -> None:
|
| 826 |
+
await self.__original.acquire()
|
| 827 |
+
|
| 828 |
+
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
| 829 |
+
await self.__original.acquire_on_behalf_of(borrower)
|
| 830 |
+
|
| 831 |
+
def release(self) -> None:
|
| 832 |
+
return self.__original.release()
|
| 833 |
+
|
| 834 |
+
def release_on_behalf_of(self, borrower: object) -> None:
|
| 835 |
+
return self.__original.release_on_behalf_of(borrower)
|
| 836 |
+
|
| 837 |
+
def statistics(self) -> CapacityLimiterStatistics:
|
| 838 |
+
orig = self.__original.statistics()
|
| 839 |
+
return CapacityLimiterStatistics(
|
| 840 |
+
borrowed_tokens=orig.borrowed_tokens,
|
| 841 |
+
total_tokens=orig.total_tokens,
|
| 842 |
+
borrowers=tuple(orig.borrowers),
|
| 843 |
+
tasks_waiting=orig.tasks_waiting,
|
| 844 |
+
)
|
| 845 |
+
|
| 846 |
+
|
| 847 |
+
_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper")
|
| 848 |
+
|
| 849 |
+
|
| 850 |
+
#
|
| 851 |
+
# Signal handling
|
| 852 |
+
#
|
| 853 |
+
|
| 854 |
+
|
| 855 |
+
class _SignalReceiver:
|
| 856 |
+
_iterator: AsyncIterator[int]
|
| 857 |
+
|
| 858 |
+
def __init__(self, signals: tuple[Signals, ...]):
|
| 859 |
+
self._signals = signals
|
| 860 |
+
|
| 861 |
+
def __enter__(self) -> _SignalReceiver:
|
| 862 |
+
self._cm = trio.open_signal_receiver(*self._signals)
|
| 863 |
+
self._iterator = self._cm.__enter__()
|
| 864 |
+
return self
|
| 865 |
+
|
| 866 |
+
def __exit__(
|
| 867 |
+
self,
|
| 868 |
+
exc_type: type[BaseException] | None,
|
| 869 |
+
exc_val: BaseException | None,
|
| 870 |
+
exc_tb: TracebackType | None,
|
| 871 |
+
) -> bool | None:
|
| 872 |
+
return self._cm.__exit__(exc_type, exc_val, exc_tb)
|
| 873 |
+
|
| 874 |
+
def __aiter__(self) -> _SignalReceiver:
|
| 875 |
+
return self
|
| 876 |
+
|
| 877 |
+
async def __anext__(self) -> Signals:
|
| 878 |
+
signum = await self._iterator.__anext__()
|
| 879 |
+
return Signals(signum)
|
| 880 |
+
|
| 881 |
+
|
| 882 |
+
#
|
| 883 |
+
# Testing and debugging
|
| 884 |
+
#
|
| 885 |
+
|
| 886 |
+
|
| 887 |
+
class TestRunner(abc.TestRunner):
|
| 888 |
+
def __init__(self, **options: Any) -> None:
|
| 889 |
+
from queue import Queue
|
| 890 |
+
|
| 891 |
+
self._call_queue: Queue[Callable[[], object]] = Queue()
|
| 892 |
+
self._send_stream: MemoryObjectSendStream | None = None
|
| 893 |
+
self._options = options
|
| 894 |
+
|
| 895 |
+
def __exit__(
|
| 896 |
+
self,
|
| 897 |
+
exc_type: type[BaseException] | None,
|
| 898 |
+
exc_val: BaseException | None,
|
| 899 |
+
exc_tb: types.TracebackType | None,
|
| 900 |
+
) -> None:
|
| 901 |
+
if self._send_stream:
|
| 902 |
+
self._send_stream.close()
|
| 903 |
+
while self._send_stream is not None:
|
| 904 |
+
self._call_queue.get()()
|
| 905 |
+
|
| 906 |
+
async def _run_tests_and_fixtures(self) -> None:
|
| 907 |
+
self._send_stream, receive_stream = create_memory_object_stream(1)
|
| 908 |
+
with receive_stream:
|
| 909 |
+
async for coro, outcome_holder in receive_stream:
|
| 910 |
+
try:
|
| 911 |
+
retval = await coro
|
| 912 |
+
except BaseException as exc:
|
| 913 |
+
outcome_holder.append(Error(exc))
|
| 914 |
+
else:
|
| 915 |
+
outcome_holder.append(Value(retval))
|
| 916 |
+
|
| 917 |
+
def _main_task_finished(self, outcome: object) -> None:
|
| 918 |
+
self._send_stream = None
|
| 919 |
+
|
| 920 |
+
def _call_in_runner_task(
|
| 921 |
+
self,
|
| 922 |
+
func: Callable[P, Awaitable[T_Retval]],
|
| 923 |
+
*args: P.args,
|
| 924 |
+
**kwargs: P.kwargs,
|
| 925 |
+
) -> T_Retval:
|
| 926 |
+
if self._send_stream is None:
|
| 927 |
+
trio.lowlevel.start_guest_run(
|
| 928 |
+
self._run_tests_and_fixtures,
|
| 929 |
+
run_sync_soon_threadsafe=self._call_queue.put,
|
| 930 |
+
done_callback=self._main_task_finished,
|
| 931 |
+
**self._options,
|
| 932 |
+
)
|
| 933 |
+
while self._send_stream is None:
|
| 934 |
+
self._call_queue.get()()
|
| 935 |
+
|
| 936 |
+
outcome_holder: list[Outcome] = []
|
| 937 |
+
self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder))
|
| 938 |
+
while not outcome_holder:
|
| 939 |
+
self._call_queue.get()()
|
| 940 |
+
|
| 941 |
+
return outcome_holder[0].unwrap()
|
| 942 |
+
|
| 943 |
+
def run_asyncgen_fixture(
|
| 944 |
+
self,
|
| 945 |
+
fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]],
|
| 946 |
+
kwargs: dict[str, Any],
|
| 947 |
+
) -> Iterable[T_Retval]:
|
| 948 |
+
asyncgen = fixture_func(**kwargs)
|
| 949 |
+
fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None)
|
| 950 |
+
|
| 951 |
+
yield fixturevalue
|
| 952 |
+
|
| 953 |
+
try:
|
| 954 |
+
self._call_in_runner_task(asyncgen.asend, None)
|
| 955 |
+
except StopAsyncIteration:
|
| 956 |
+
pass
|
| 957 |
+
else:
|
| 958 |
+
self._call_in_runner_task(asyncgen.aclose)
|
| 959 |
+
raise RuntimeError("Async generator fixture did not stop")
|
| 960 |
+
|
| 961 |
+
def run_fixture(
|
| 962 |
+
self,
|
| 963 |
+
fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]],
|
| 964 |
+
kwargs: dict[str, Any],
|
| 965 |
+
) -> T_Retval:
|
| 966 |
+
return self._call_in_runner_task(fixture_func, **kwargs)
|
| 967 |
+
|
| 968 |
+
def run_test(
|
| 969 |
+
self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any]
|
| 970 |
+
) -> None:
|
| 971 |
+
self._call_in_runner_task(test_func, **kwargs)
|
| 972 |
+
|
| 973 |
+
|
| 974 |
+
class TrioTaskInfo(TaskInfo):
|
| 975 |
+
def __init__(self, task: trio.lowlevel.Task):
|
| 976 |
+
parent_id = None
|
| 977 |
+
if task.parent_nursery and task.parent_nursery.parent_task:
|
| 978 |
+
parent_id = id(task.parent_nursery.parent_task)
|
| 979 |
+
|
| 980 |
+
super().__init__(id(task), parent_id, task.name, task.coro)
|
| 981 |
+
self._task = weakref.proxy(task)
|
| 982 |
+
|
| 983 |
+
def has_pending_cancellation(self) -> bool:
|
| 984 |
+
try:
|
| 985 |
+
return self._task._cancel_status.effectively_cancelled
|
| 986 |
+
except ReferenceError:
|
| 987 |
+
# If the task is no longer around, it surely doesn't have a cancellation
|
| 988 |
+
# pending
|
| 989 |
+
return False
|
| 990 |
+
|
| 991 |
+
|
| 992 |
+
class TrioBackend(AsyncBackend):
|
| 993 |
+
@classmethod
|
| 994 |
+
def run(
|
| 995 |
+
cls,
|
| 996 |
+
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
| 997 |
+
args: tuple[Unpack[PosArgsT]],
|
| 998 |
+
kwargs: dict[str, Any],
|
| 999 |
+
options: dict[str, Any],
|
| 1000 |
+
) -> T_Retval:
|
| 1001 |
+
return trio.run(func, *args)
|
| 1002 |
+
|
| 1003 |
+
@classmethod
|
| 1004 |
+
def current_token(cls) -> object:
|
| 1005 |
+
return trio.lowlevel.current_trio_token()
|
| 1006 |
+
|
| 1007 |
+
@classmethod
|
| 1008 |
+
def current_time(cls) -> float:
|
| 1009 |
+
return trio.current_time()
|
| 1010 |
+
|
| 1011 |
+
@classmethod
|
| 1012 |
+
def cancelled_exception_class(cls) -> type[BaseException]:
|
| 1013 |
+
return trio.Cancelled
|
| 1014 |
+
|
| 1015 |
+
@classmethod
|
| 1016 |
+
async def checkpoint(cls) -> None:
|
| 1017 |
+
await trio.lowlevel.checkpoint()
|
| 1018 |
+
|
| 1019 |
+
@classmethod
|
| 1020 |
+
async def checkpoint_if_cancelled(cls) -> None:
|
| 1021 |
+
await trio.lowlevel.checkpoint_if_cancelled()
|
| 1022 |
+
|
| 1023 |
+
@classmethod
|
| 1024 |
+
async def cancel_shielded_checkpoint(cls) -> None:
|
| 1025 |
+
await trio.lowlevel.cancel_shielded_checkpoint()
|
| 1026 |
+
|
| 1027 |
+
@classmethod
|
| 1028 |
+
async def sleep(cls, delay: float) -> None:
|
| 1029 |
+
await trio.sleep(delay)
|
| 1030 |
+
|
| 1031 |
+
@classmethod
|
| 1032 |
+
def create_cancel_scope(
|
| 1033 |
+
cls, *, deadline: float = math.inf, shield: bool = False
|
| 1034 |
+
) -> abc.CancelScope:
|
| 1035 |
+
return CancelScope(deadline=deadline, shield=shield)
|
| 1036 |
+
|
| 1037 |
+
@classmethod
|
| 1038 |
+
def current_effective_deadline(cls) -> float:
|
| 1039 |
+
return trio.current_effective_deadline()
|
| 1040 |
+
|
| 1041 |
+
@classmethod
|
| 1042 |
+
def create_task_group(cls) -> abc.TaskGroup:
|
| 1043 |
+
return TaskGroup()
|
| 1044 |
+
|
| 1045 |
+
@classmethod
|
| 1046 |
+
def create_event(cls) -> abc.Event:
|
| 1047 |
+
return Event()
|
| 1048 |
+
|
| 1049 |
+
@classmethod
|
| 1050 |
+
def create_lock(cls, *, fast_acquire: bool) -> Lock:
|
| 1051 |
+
return Lock(fast_acquire=fast_acquire)
|
| 1052 |
+
|
| 1053 |
+
@classmethod
|
| 1054 |
+
def create_semaphore(
|
| 1055 |
+
cls,
|
| 1056 |
+
initial_value: int,
|
| 1057 |
+
*,
|
| 1058 |
+
max_value: int | None = None,
|
| 1059 |
+
fast_acquire: bool = False,
|
| 1060 |
+
) -> abc.Semaphore:
|
| 1061 |
+
return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire)
|
| 1062 |
+
|
| 1063 |
+
@classmethod
|
| 1064 |
+
def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter:
|
| 1065 |
+
return CapacityLimiter(total_tokens)
|
| 1066 |
+
|
| 1067 |
+
@classmethod
|
| 1068 |
+
async def run_sync_in_worker_thread(
|
| 1069 |
+
cls,
|
| 1070 |
+
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
| 1071 |
+
args: tuple[Unpack[PosArgsT]],
|
| 1072 |
+
abandon_on_cancel: bool = False,
|
| 1073 |
+
limiter: abc.CapacityLimiter | None = None,
|
| 1074 |
+
) -> T_Retval:
|
| 1075 |
+
def wrapper() -> T_Retval:
|
| 1076 |
+
with claim_worker_thread(TrioBackend, token):
|
| 1077 |
+
return func(*args)
|
| 1078 |
+
|
| 1079 |
+
token = TrioBackend.current_token()
|
| 1080 |
+
return await run_sync(
|
| 1081 |
+
wrapper,
|
| 1082 |
+
abandon_on_cancel=abandon_on_cancel,
|
| 1083 |
+
limiter=cast(trio.CapacityLimiter, limiter),
|
| 1084 |
+
)
|
| 1085 |
+
|
| 1086 |
+
@classmethod
|
| 1087 |
+
def check_cancelled(cls) -> None:
|
| 1088 |
+
trio.from_thread.check_cancelled()
|
| 1089 |
+
|
| 1090 |
+
@classmethod
|
| 1091 |
+
def run_async_from_thread(
|
| 1092 |
+
cls,
|
| 1093 |
+
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
| 1094 |
+
args: tuple[Unpack[PosArgsT]],
|
| 1095 |
+
token: object,
|
| 1096 |
+
) -> T_Retval:
|
| 1097 |
+
return trio.from_thread.run(func, *args)
|
| 1098 |
+
|
| 1099 |
+
@classmethod
|
| 1100 |
+
def run_sync_from_thread(
|
| 1101 |
+
cls,
|
| 1102 |
+
func: Callable[[Unpack[PosArgsT]], T_Retval],
|
| 1103 |
+
args: tuple[Unpack[PosArgsT]],
|
| 1104 |
+
token: object,
|
| 1105 |
+
) -> T_Retval:
|
| 1106 |
+
return trio.from_thread.run_sync(func, *args)
|
| 1107 |
+
|
| 1108 |
+
@classmethod
|
| 1109 |
+
def create_blocking_portal(cls) -> abc.BlockingPortal:
|
| 1110 |
+
return BlockingPortal()
|
| 1111 |
+
|
| 1112 |
+
@classmethod
|
| 1113 |
+
async def open_process(
|
| 1114 |
+
cls,
|
| 1115 |
+
command: StrOrBytesPath | Sequence[StrOrBytesPath],
|
| 1116 |
+
*,
|
| 1117 |
+
stdin: int | IO[Any] | None,
|
| 1118 |
+
stdout: int | IO[Any] | None,
|
| 1119 |
+
stderr: int | IO[Any] | None,
|
| 1120 |
+
**kwargs: Any,
|
| 1121 |
+
) -> Process:
|
| 1122 |
+
def convert_item(item: StrOrBytesPath) -> str:
|
| 1123 |
+
str_or_bytes = os.fspath(item)
|
| 1124 |
+
if isinstance(str_or_bytes, str):
|
| 1125 |
+
return str_or_bytes
|
| 1126 |
+
else:
|
| 1127 |
+
return os.fsdecode(str_or_bytes)
|
| 1128 |
+
|
| 1129 |
+
if isinstance(command, (str, bytes, PathLike)):
|
| 1130 |
+
process = await trio.lowlevel.open_process(
|
| 1131 |
+
convert_item(command),
|
| 1132 |
+
stdin=stdin,
|
| 1133 |
+
stdout=stdout,
|
| 1134 |
+
stderr=stderr,
|
| 1135 |
+
shell=True,
|
| 1136 |
+
**kwargs,
|
| 1137 |
+
)
|
| 1138 |
+
else:
|
| 1139 |
+
process = await trio.lowlevel.open_process(
|
| 1140 |
+
[convert_item(item) for item in command],
|
| 1141 |
+
stdin=stdin,
|
| 1142 |
+
stdout=stdout,
|
| 1143 |
+
stderr=stderr,
|
| 1144 |
+
shell=False,
|
| 1145 |
+
**kwargs,
|
| 1146 |
+
)
|
| 1147 |
+
|
| 1148 |
+
stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None
|
| 1149 |
+
stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None
|
| 1150 |
+
stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None
|
| 1151 |
+
return Process(process, stdin_stream, stdout_stream, stderr_stream)
|
| 1152 |
+
|
| 1153 |
+
@classmethod
|
| 1154 |
+
def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None:
|
| 1155 |
+
trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers)
|
| 1156 |
+
|
| 1157 |
+
@classmethod
|
| 1158 |
+
async def connect_tcp(
|
| 1159 |
+
cls, host: str, port: int, local_address: IPSockAddrType | None = None
|
| 1160 |
+
) -> SocketStream:
|
| 1161 |
+
family = socket.AF_INET6 if ":" in host else socket.AF_INET
|
| 1162 |
+
trio_socket = trio.socket.socket(family)
|
| 1163 |
+
trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
| 1164 |
+
if local_address:
|
| 1165 |
+
await trio_socket.bind(local_address)
|
| 1166 |
+
|
| 1167 |
+
try:
|
| 1168 |
+
await trio_socket.connect((host, port))
|
| 1169 |
+
except BaseException:
|
| 1170 |
+
trio_socket.close()
|
| 1171 |
+
raise
|
| 1172 |
+
|
| 1173 |
+
return SocketStream(trio_socket)
|
| 1174 |
+
|
| 1175 |
+
@classmethod
|
| 1176 |
+
async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream:
|
| 1177 |
+
trio_socket = trio.socket.socket(socket.AF_UNIX)
|
| 1178 |
+
try:
|
| 1179 |
+
await trio_socket.connect(path)
|
| 1180 |
+
except BaseException:
|
| 1181 |
+
trio_socket.close()
|
| 1182 |
+
raise
|
| 1183 |
+
|
| 1184 |
+
return UNIXSocketStream(trio_socket)
|
| 1185 |
+
|
| 1186 |
+
@classmethod
|
| 1187 |
+
def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener:
|
| 1188 |
+
return TCPSocketListener(sock)
|
| 1189 |
+
|
| 1190 |
+
@classmethod
|
| 1191 |
+
def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener:
|
| 1192 |
+
return UNIXSocketListener(sock)
|
| 1193 |
+
|
| 1194 |
+
@classmethod
|
| 1195 |
+
async def create_udp_socket(
|
| 1196 |
+
cls,
|
| 1197 |
+
family: socket.AddressFamily,
|
| 1198 |
+
local_address: IPSockAddrType | None,
|
| 1199 |
+
remote_address: IPSockAddrType | None,
|
| 1200 |
+
reuse_port: bool,
|
| 1201 |
+
) -> UDPSocket | ConnectedUDPSocket:
|
| 1202 |
+
trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM)
|
| 1203 |
+
|
| 1204 |
+
if reuse_port:
|
| 1205 |
+
trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
| 1206 |
+
|
| 1207 |
+
if local_address:
|
| 1208 |
+
await trio_socket.bind(local_address)
|
| 1209 |
+
|
| 1210 |
+
if remote_address:
|
| 1211 |
+
await trio_socket.connect(remote_address)
|
| 1212 |
+
return ConnectedUDPSocket(trio_socket)
|
| 1213 |
+
else:
|
| 1214 |
+
return UDPSocket(trio_socket)
|
| 1215 |
+
|
| 1216 |
+
@classmethod
|
| 1217 |
+
@overload
|
| 1218 |
+
async def create_unix_datagram_socket(
|
| 1219 |
+
cls, raw_socket: socket.socket, remote_path: None
|
| 1220 |
+
) -> abc.UNIXDatagramSocket: ...
|
| 1221 |
+
|
| 1222 |
+
@classmethod
|
| 1223 |
+
@overload
|
| 1224 |
+
async def create_unix_datagram_socket(
|
| 1225 |
+
cls, raw_socket: socket.socket, remote_path: str | bytes
|
| 1226 |
+
) -> abc.ConnectedUNIXDatagramSocket: ...
|
| 1227 |
+
|
| 1228 |
+
@classmethod
|
| 1229 |
+
async def create_unix_datagram_socket(
|
| 1230 |
+
cls, raw_socket: socket.socket, remote_path: str | bytes | None
|
| 1231 |
+
) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket:
|
| 1232 |
+
trio_socket = trio.socket.from_stdlib_socket(raw_socket)
|
| 1233 |
+
|
| 1234 |
+
if remote_path:
|
| 1235 |
+
await trio_socket.connect(remote_path)
|
| 1236 |
+
return ConnectedUNIXDatagramSocket(trio_socket)
|
| 1237 |
+
else:
|
| 1238 |
+
return UNIXDatagramSocket(trio_socket)
|
| 1239 |
+
|
| 1240 |
+
@classmethod
|
| 1241 |
+
async def getaddrinfo(
|
| 1242 |
+
cls,
|
| 1243 |
+
host: bytes | str | None,
|
| 1244 |
+
port: str | int | None,
|
| 1245 |
+
*,
|
| 1246 |
+
family: int | AddressFamily = 0,
|
| 1247 |
+
type: int | SocketKind = 0,
|
| 1248 |
+
proto: int = 0,
|
| 1249 |
+
flags: int = 0,
|
| 1250 |
+
) -> Sequence[
|
| 1251 |
+
tuple[
|
| 1252 |
+
AddressFamily,
|
| 1253 |
+
SocketKind,
|
| 1254 |
+
int,
|
| 1255 |
+
str,
|
| 1256 |
+
tuple[str, int] | tuple[str, int, int, int] | tuple[int, bytes],
|
| 1257 |
+
]
|
| 1258 |
+
]:
|
| 1259 |
+
return await trio.socket.getaddrinfo(host, port, family, type, proto, flags)
|
| 1260 |
+
|
| 1261 |
+
@classmethod
|
| 1262 |
+
async def getnameinfo(
|
| 1263 |
+
cls, sockaddr: IPSockAddrType, flags: int = 0
|
| 1264 |
+
) -> tuple[str, str]:
|
| 1265 |
+
return await trio.socket.getnameinfo(sockaddr, flags)
|
| 1266 |
+
|
| 1267 |
+
@classmethod
|
| 1268 |
+
async def wait_readable(cls, obj: FileDescriptorLike) -> None:
|
| 1269 |
+
try:
|
| 1270 |
+
await wait_readable(obj)
|
| 1271 |
+
except trio.ClosedResourceError as exc:
|
| 1272 |
+
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
|
| 1273 |
+
except trio.BusyResourceError:
|
| 1274 |
+
raise BusyResourceError("reading from") from None
|
| 1275 |
+
|
| 1276 |
+
@classmethod
|
| 1277 |
+
async def wait_writable(cls, obj: FileDescriptorLike) -> None:
|
| 1278 |
+
try:
|
| 1279 |
+
await wait_writable(obj)
|
| 1280 |
+
except trio.ClosedResourceError as exc:
|
| 1281 |
+
raise ClosedResourceError().with_traceback(exc.__traceback__) from None
|
| 1282 |
+
except trio.BusyResourceError:
|
| 1283 |
+
raise BusyResourceError("writing to") from None
|
| 1284 |
+
|
| 1285 |
+
@classmethod
|
| 1286 |
+
def notify_closing(cls, obj: FileDescriptorLike) -> None:
|
| 1287 |
+
notify_closing(obj)
|
| 1288 |
+
|
| 1289 |
+
@classmethod
|
| 1290 |
+
async def wrap_listener_socket(cls, sock: socket.socket) -> abc.SocketListener:
|
| 1291 |
+
return TCPSocketListener(sock)
|
| 1292 |
+
|
| 1293 |
+
@classmethod
|
| 1294 |
+
async def wrap_stream_socket(cls, sock: socket.socket) -> SocketStream:
|
| 1295 |
+
trio_sock = trio.socket.from_stdlib_socket(sock)
|
| 1296 |
+
return SocketStream(trio_sock)
|
| 1297 |
+
|
| 1298 |
+
@classmethod
|
| 1299 |
+
async def wrap_unix_stream_socket(cls, sock: socket.socket) -> UNIXSocketStream:
|
| 1300 |
+
trio_sock = trio.socket.from_stdlib_socket(sock)
|
| 1301 |
+
return UNIXSocketStream(trio_sock)
|
| 1302 |
+
|
| 1303 |
+
@classmethod
|
| 1304 |
+
async def wrap_udp_socket(cls, sock: socket.socket) -> UDPSocket:
|
| 1305 |
+
trio_sock = trio.socket.from_stdlib_socket(sock)
|
| 1306 |
+
return UDPSocket(trio_sock)
|
| 1307 |
+
|
| 1308 |
+
@classmethod
|
| 1309 |
+
async def wrap_connected_udp_socket(cls, sock: socket.socket) -> ConnectedUDPSocket:
|
| 1310 |
+
trio_sock = trio.socket.from_stdlib_socket(sock)
|
| 1311 |
+
return ConnectedUDPSocket(trio_sock)
|
| 1312 |
+
|
| 1313 |
+
@classmethod
|
| 1314 |
+
async def wrap_unix_datagram_socket(cls, sock: socket.socket) -> UNIXDatagramSocket:
|
| 1315 |
+
trio_sock = trio.socket.from_stdlib_socket(sock)
|
| 1316 |
+
return UNIXDatagramSocket(trio_sock)
|
| 1317 |
+
|
| 1318 |
+
@classmethod
|
| 1319 |
+
async def wrap_connected_unix_datagram_socket(
|
| 1320 |
+
cls, sock: socket.socket
|
| 1321 |
+
) -> ConnectedUNIXDatagramSocket:
|
| 1322 |
+
trio_sock = trio.socket.from_stdlib_socket(sock)
|
| 1323 |
+
return ConnectedUNIXDatagramSocket(trio_sock)
|
| 1324 |
+
|
| 1325 |
+
@classmethod
|
| 1326 |
+
def current_default_thread_limiter(cls) -> CapacityLimiter:
|
| 1327 |
+
try:
|
| 1328 |
+
return _capacity_limiter_wrapper.get()
|
| 1329 |
+
except LookupError:
|
| 1330 |
+
limiter = CapacityLimiter(
|
| 1331 |
+
original=trio.to_thread.current_default_thread_limiter()
|
| 1332 |
+
)
|
| 1333 |
+
_capacity_limiter_wrapper.set(limiter)
|
| 1334 |
+
return limiter
|
| 1335 |
+
|
| 1336 |
+
@classmethod
|
| 1337 |
+
def open_signal_receiver(
|
| 1338 |
+
cls, *signals: Signals
|
| 1339 |
+
) -> AbstractContextManager[AsyncIterator[Signals]]:
|
| 1340 |
+
return _SignalReceiver(signals)
|
| 1341 |
+
|
| 1342 |
+
@classmethod
|
| 1343 |
+
def get_current_task(cls) -> TaskInfo:
|
| 1344 |
+
task = current_task()
|
| 1345 |
+
return TrioTaskInfo(task)
|
| 1346 |
+
|
| 1347 |
+
@classmethod
|
| 1348 |
+
def get_running_tasks(cls) -> Sequence[TaskInfo]:
|
| 1349 |
+
root_task = current_root_task()
|
| 1350 |
+
assert root_task
|
| 1351 |
+
task_infos = [TrioTaskInfo(root_task)]
|
| 1352 |
+
nurseries = root_task.child_nurseries
|
| 1353 |
+
while nurseries:
|
| 1354 |
+
new_nurseries: list[trio.Nursery] = []
|
| 1355 |
+
for nursery in nurseries:
|
| 1356 |
+
for task in nursery.child_tasks:
|
| 1357 |
+
task_infos.append(TrioTaskInfo(task))
|
| 1358 |
+
new_nurseries.extend(task.child_nurseries)
|
| 1359 |
+
|
| 1360 |
+
nurseries = new_nurseries
|
| 1361 |
+
|
| 1362 |
+
return task_infos
|
| 1363 |
+
|
| 1364 |
+
@classmethod
|
| 1365 |
+
async def wait_all_tasks_blocked(cls) -> None:
|
| 1366 |
+
from trio.testing import wait_all_tasks_blocked
|
| 1367 |
+
|
| 1368 |
+
await wait_all_tasks_blocked()
|
| 1369 |
+
|
| 1370 |
+
@classmethod
|
| 1371 |
+
def create_test_runner(cls, options: dict[str, Any]) -> TestRunner:
|
| 1372 |
+
return TestRunner(**options)
|
| 1373 |
+
|
| 1374 |
+
|
| 1375 |
+
backend_class = TrioBackend
|
venv/lib/python3.10/site-packages/anyio/_core/__init__.py
ADDED
|
File without changes
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (247 Bytes). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_asyncio_selector_thread.cpython-310.pyc
ADDED
|
Binary file (4.18 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_contextmanagers.cpython-310.pyc
ADDED
|
Binary file (7.05 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_eventloop.cpython-310.pyc
ADDED
|
Binary file (4.86 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_exceptions.cpython-310.pyc
ADDED
|
Binary file (5.21 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_fileio.cpython-310.pyc
ADDED
|
Binary file (27 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_resources.cpython-310.pyc
ADDED
|
Binary file (826 Bytes). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_signals.cpython-310.pyc
ADDED
|
Binary file (1.26 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_sockets.cpython-310.pyc
ADDED
|
Binary file (27.5 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_streams.cpython-310.pyc
ADDED
|
Binary file (2.06 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_subprocesses.cpython-310.pyc
ADDED
|
Binary file (7.72 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_synchronization.cpython-310.pyc
ADDED
|
Binary file (24 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tasks.cpython-310.pyc
ADDED
|
Binary file (5.9 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_tempfile.cpython-310.pyc
ADDED
|
Binary file (19.4 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_testing.cpython-310.pyc
ADDED
|
Binary file (2.98 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/__pycache__/_typedattr.cpython-310.pyc
ADDED
|
Binary file (3.39 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/_core/_asyncio_selector_thread.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import socket
|
| 5 |
+
import threading
|
| 6 |
+
from collections.abc import Callable
|
| 7 |
+
from selectors import EVENT_READ, EVENT_WRITE, DefaultSelector
|
| 8 |
+
from typing import TYPE_CHECKING, Any
|
| 9 |
+
|
| 10 |
+
if TYPE_CHECKING:
|
| 11 |
+
from _typeshed import FileDescriptorLike
|
| 12 |
+
|
| 13 |
+
_selector_lock = threading.Lock()
|
| 14 |
+
_selector: Selector | None = None
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class Selector:
|
| 18 |
+
def __init__(self) -> None:
|
| 19 |
+
self._thread = threading.Thread(target=self.run, name="AnyIO socket selector")
|
| 20 |
+
self._selector = DefaultSelector()
|
| 21 |
+
self._send, self._receive = socket.socketpair()
|
| 22 |
+
self._send.setblocking(False)
|
| 23 |
+
self._receive.setblocking(False)
|
| 24 |
+
# This somewhat reduces the amount of memory wasted queueing up data
|
| 25 |
+
# for wakeups. With these settings, maximum number of 1-byte sends
|
| 26 |
+
# before getting BlockingIOError:
|
| 27 |
+
# Linux 4.8: 6
|
| 28 |
+
# macOS (darwin 15.5): 1
|
| 29 |
+
# Windows 10: 525347
|
| 30 |
+
# Windows you're weird. (And on Windows setting SNDBUF to 0 makes send
|
| 31 |
+
# blocking, even on non-blocking sockets, so don't do that.)
|
| 32 |
+
self._receive.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1)
|
| 33 |
+
self._send.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1)
|
| 34 |
+
# On Windows this is a TCP socket so this might matter. On other
|
| 35 |
+
# platforms this fails b/c AF_UNIX sockets aren't actually TCP.
|
| 36 |
+
try:
|
| 37 |
+
self._send.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
| 38 |
+
except OSError:
|
| 39 |
+
pass
|
| 40 |
+
|
| 41 |
+
self._selector.register(self._receive, EVENT_READ)
|
| 42 |
+
self._closed = False
|
| 43 |
+
|
| 44 |
+
def start(self) -> None:
|
| 45 |
+
self._thread.start()
|
| 46 |
+
threading._register_atexit(self._stop) # type: ignore[attr-defined]
|
| 47 |
+
|
| 48 |
+
def _stop(self) -> None:
|
| 49 |
+
global _selector
|
| 50 |
+
self._closed = True
|
| 51 |
+
self._notify_self()
|
| 52 |
+
self._send.close()
|
| 53 |
+
self._thread.join()
|
| 54 |
+
self._selector.unregister(self._receive)
|
| 55 |
+
self._receive.close()
|
| 56 |
+
self._selector.close()
|
| 57 |
+
_selector = None
|
| 58 |
+
assert not self._selector.get_map(), (
|
| 59 |
+
"selector still has registered file descriptors after shutdown"
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
def _notify_self(self) -> None:
|
| 63 |
+
try:
|
| 64 |
+
self._send.send(b"\x00")
|
| 65 |
+
except BlockingIOError:
|
| 66 |
+
pass
|
| 67 |
+
|
| 68 |
+
def add_reader(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
|
| 69 |
+
loop = asyncio.get_running_loop()
|
| 70 |
+
try:
|
| 71 |
+
key = self._selector.get_key(fd)
|
| 72 |
+
except KeyError:
|
| 73 |
+
self._selector.register(fd, EVENT_READ, {EVENT_READ: (loop, callback)})
|
| 74 |
+
else:
|
| 75 |
+
if EVENT_READ in key.data:
|
| 76 |
+
raise ValueError(
|
| 77 |
+
"this file descriptor is already registered for reading"
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
key.data[EVENT_READ] = loop, callback
|
| 81 |
+
self._selector.modify(fd, key.events | EVENT_READ, key.data)
|
| 82 |
+
|
| 83 |
+
self._notify_self()
|
| 84 |
+
|
| 85 |
+
def add_writer(self, fd: FileDescriptorLike, callback: Callable[[], Any]) -> None:
|
| 86 |
+
loop = asyncio.get_running_loop()
|
| 87 |
+
try:
|
| 88 |
+
key = self._selector.get_key(fd)
|
| 89 |
+
except KeyError:
|
| 90 |
+
self._selector.register(fd, EVENT_WRITE, {EVENT_WRITE: (loop, callback)})
|
| 91 |
+
else:
|
| 92 |
+
if EVENT_WRITE in key.data:
|
| 93 |
+
raise ValueError(
|
| 94 |
+
"this file descriptor is already registered for writing"
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
key.data[EVENT_WRITE] = loop, callback
|
| 98 |
+
self._selector.modify(fd, key.events | EVENT_WRITE, key.data)
|
| 99 |
+
|
| 100 |
+
self._notify_self()
|
| 101 |
+
|
| 102 |
+
def remove_reader(self, fd: FileDescriptorLike) -> bool:
|
| 103 |
+
try:
|
| 104 |
+
key = self._selector.get_key(fd)
|
| 105 |
+
except KeyError:
|
| 106 |
+
return False
|
| 107 |
+
|
| 108 |
+
if new_events := key.events ^ EVENT_READ:
|
| 109 |
+
del key.data[EVENT_READ]
|
| 110 |
+
self._selector.modify(fd, new_events, key.data)
|
| 111 |
+
else:
|
| 112 |
+
self._selector.unregister(fd)
|
| 113 |
+
|
| 114 |
+
return True
|
| 115 |
+
|
| 116 |
+
def remove_writer(self, fd: FileDescriptorLike) -> bool:
|
| 117 |
+
try:
|
| 118 |
+
key = self._selector.get_key(fd)
|
| 119 |
+
except KeyError:
|
| 120 |
+
return False
|
| 121 |
+
|
| 122 |
+
if new_events := key.events ^ EVENT_WRITE:
|
| 123 |
+
del key.data[EVENT_WRITE]
|
| 124 |
+
self._selector.modify(fd, new_events, key.data)
|
| 125 |
+
else:
|
| 126 |
+
self._selector.unregister(fd)
|
| 127 |
+
|
| 128 |
+
return True
|
| 129 |
+
|
| 130 |
+
def run(self) -> None:
|
| 131 |
+
while not self._closed:
|
| 132 |
+
for key, events in self._selector.select():
|
| 133 |
+
if key.fileobj is self._receive:
|
| 134 |
+
try:
|
| 135 |
+
while self._receive.recv(4096):
|
| 136 |
+
pass
|
| 137 |
+
except BlockingIOError:
|
| 138 |
+
pass
|
| 139 |
+
|
| 140 |
+
continue
|
| 141 |
+
|
| 142 |
+
if events & EVENT_READ:
|
| 143 |
+
loop, callback = key.data[EVENT_READ]
|
| 144 |
+
self.remove_reader(key.fd)
|
| 145 |
+
try:
|
| 146 |
+
loop.call_soon_threadsafe(callback)
|
| 147 |
+
except RuntimeError:
|
| 148 |
+
pass # the loop was already closed
|
| 149 |
+
|
| 150 |
+
if events & EVENT_WRITE:
|
| 151 |
+
loop, callback = key.data[EVENT_WRITE]
|
| 152 |
+
self.remove_writer(key.fd)
|
| 153 |
+
try:
|
| 154 |
+
loop.call_soon_threadsafe(callback)
|
| 155 |
+
except RuntimeError:
|
| 156 |
+
pass # the loop was already closed
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
def get_selector() -> Selector:
|
| 160 |
+
global _selector
|
| 161 |
+
|
| 162 |
+
with _selector_lock:
|
| 163 |
+
if _selector is None:
|
| 164 |
+
_selector = Selector()
|
| 165 |
+
_selector.start()
|
| 166 |
+
|
| 167 |
+
return _selector
|
venv/lib/python3.10/site-packages/anyio/_core/_contextmanagers.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from abc import abstractmethod
|
| 4 |
+
from contextlib import AbstractAsyncContextManager, AbstractContextManager
|
| 5 |
+
from inspect import isasyncgen, iscoroutine, isgenerator
|
| 6 |
+
from types import TracebackType
|
| 7 |
+
from typing import Protocol, TypeVar, cast, final
|
| 8 |
+
|
| 9 |
+
_T_co = TypeVar("_T_co", covariant=True)
|
| 10 |
+
_ExitT_co = TypeVar("_ExitT_co", covariant=True, bound="bool | None")
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class _SupportsCtxMgr(Protocol[_T_co, _ExitT_co]):
|
| 14 |
+
def __contextmanager__(self) -> AbstractContextManager[_T_co, _ExitT_co]: ...
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class _SupportsAsyncCtxMgr(Protocol[_T_co, _ExitT_co]):
|
| 18 |
+
def __asynccontextmanager__(
|
| 19 |
+
self,
|
| 20 |
+
) -> AbstractAsyncContextManager[_T_co, _ExitT_co]: ...
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
class ContextManagerMixin:
|
| 24 |
+
"""
|
| 25 |
+
Mixin class providing context manager functionality via a generator-based
|
| 26 |
+
implementation.
|
| 27 |
+
|
| 28 |
+
This class allows you to implement a context manager via :meth:`__contextmanager__`
|
| 29 |
+
which should return a generator. The mechanics are meant to mirror those of
|
| 30 |
+
:func:`@contextmanager <contextlib.contextmanager>`.
|
| 31 |
+
|
| 32 |
+
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
|
| 33 |
+
that once you enter it, you can't re-enter before first exiting it.
|
| 34 |
+
|
| 35 |
+
.. seealso:: :doc:`contextmanagers`
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
__cm: AbstractContextManager[object, bool | None] | None = None
|
| 39 |
+
|
| 40 |
+
@final
|
| 41 |
+
def __enter__(self: _SupportsCtxMgr[_T_co, bool | None]) -> _T_co:
|
| 42 |
+
# Needed for mypy to assume self still has the __cm member
|
| 43 |
+
assert isinstance(self, ContextManagerMixin)
|
| 44 |
+
if self.__cm is not None:
|
| 45 |
+
raise RuntimeError(
|
| 46 |
+
f"this {self.__class__.__qualname__} has already been entered"
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
cm = self.__contextmanager__()
|
| 50 |
+
if not isinstance(cm, AbstractContextManager):
|
| 51 |
+
if isgenerator(cm):
|
| 52 |
+
raise TypeError(
|
| 53 |
+
"__contextmanager__() returned a generator object instead of "
|
| 54 |
+
"a context manager. Did you forget to add the @contextmanager "
|
| 55 |
+
"decorator?"
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
raise TypeError(
|
| 59 |
+
f"__contextmanager__() did not return a context manager object, "
|
| 60 |
+
f"but {cm.__class__!r}"
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
if cm is self:
|
| 64 |
+
raise TypeError(
|
| 65 |
+
f"{self.__class__.__qualname__}.__contextmanager__() returned "
|
| 66 |
+
f"self. Did you forget to add the @contextmanager decorator and a "
|
| 67 |
+
f"'yield' statement?"
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
value = cm.__enter__()
|
| 71 |
+
self.__cm = cm
|
| 72 |
+
return value
|
| 73 |
+
|
| 74 |
+
@final
|
| 75 |
+
def __exit__(
|
| 76 |
+
self: _SupportsCtxMgr[object, _ExitT_co],
|
| 77 |
+
exc_type: type[BaseException] | None,
|
| 78 |
+
exc_val: BaseException | None,
|
| 79 |
+
exc_tb: TracebackType | None,
|
| 80 |
+
) -> _ExitT_co:
|
| 81 |
+
# Needed for mypy to assume self still has the __cm member
|
| 82 |
+
assert isinstance(self, ContextManagerMixin)
|
| 83 |
+
if self.__cm is None:
|
| 84 |
+
raise RuntimeError(
|
| 85 |
+
f"this {self.__class__.__qualname__} has not been entered yet"
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
# Prevent circular references
|
| 89 |
+
cm = self.__cm
|
| 90 |
+
del self.__cm
|
| 91 |
+
|
| 92 |
+
return cast(_ExitT_co, cm.__exit__(exc_type, exc_val, exc_tb))
|
| 93 |
+
|
| 94 |
+
@abstractmethod
|
| 95 |
+
def __contextmanager__(self) -> AbstractContextManager[object, bool | None]:
|
| 96 |
+
"""
|
| 97 |
+
Implement your context manager logic here.
|
| 98 |
+
|
| 99 |
+
This method **must** be decorated with
|
| 100 |
+
:func:`@contextmanager <contextlib.contextmanager>`.
|
| 101 |
+
|
| 102 |
+
.. note:: Remember that the ``yield`` will raise any exception raised in the
|
| 103 |
+
enclosed context block, so use a ``finally:`` block to clean up resources!
|
| 104 |
+
|
| 105 |
+
:return: a context manager object
|
| 106 |
+
"""
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
class AsyncContextManagerMixin:
|
| 110 |
+
"""
|
| 111 |
+
Mixin class providing async context manager functionality via a generator-based
|
| 112 |
+
implementation.
|
| 113 |
+
|
| 114 |
+
This class allows you to implement a context manager via
|
| 115 |
+
:meth:`__asynccontextmanager__`. The mechanics are meant to mirror those of
|
| 116 |
+
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
|
| 117 |
+
|
| 118 |
+
.. note:: Classes using this mix-in are not reentrant as context managers, meaning
|
| 119 |
+
that once you enter it, you can't re-enter before first exiting it.
|
| 120 |
+
|
| 121 |
+
.. seealso:: :doc:`contextmanagers`
|
| 122 |
+
"""
|
| 123 |
+
|
| 124 |
+
__cm: AbstractAsyncContextManager[object, bool | None] | None = None
|
| 125 |
+
|
| 126 |
+
@final
|
| 127 |
+
async def __aenter__(self: _SupportsAsyncCtxMgr[_T_co, bool | None]) -> _T_co:
|
| 128 |
+
# Needed for mypy to assume self still has the __cm member
|
| 129 |
+
assert isinstance(self, AsyncContextManagerMixin)
|
| 130 |
+
if self.__cm is not None:
|
| 131 |
+
raise RuntimeError(
|
| 132 |
+
f"this {self.__class__.__qualname__} has already been entered"
|
| 133 |
+
)
|
| 134 |
+
|
| 135 |
+
cm = self.__asynccontextmanager__()
|
| 136 |
+
if not isinstance(cm, AbstractAsyncContextManager):
|
| 137 |
+
if isasyncgen(cm):
|
| 138 |
+
raise TypeError(
|
| 139 |
+
"__asynccontextmanager__() returned an async generator instead of "
|
| 140 |
+
"an async context manager. Did you forget to add the "
|
| 141 |
+
"@asynccontextmanager decorator?"
|
| 142 |
+
)
|
| 143 |
+
elif iscoroutine(cm):
|
| 144 |
+
cm.close()
|
| 145 |
+
raise TypeError(
|
| 146 |
+
"__asynccontextmanager__() returned a coroutine object instead of "
|
| 147 |
+
"an async context manager. Did you forget to add the "
|
| 148 |
+
"@asynccontextmanager decorator and a 'yield' statement?"
|
| 149 |
+
)
|
| 150 |
+
|
| 151 |
+
raise TypeError(
|
| 152 |
+
f"__asynccontextmanager__() did not return an async context manager, "
|
| 153 |
+
f"but {cm.__class__!r}"
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
if cm is self:
|
| 157 |
+
raise TypeError(
|
| 158 |
+
f"{self.__class__.__qualname__}.__asynccontextmanager__() returned "
|
| 159 |
+
f"self. Did you forget to add the @asynccontextmanager decorator and a "
|
| 160 |
+
f"'yield' statement?"
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
value = await cm.__aenter__()
|
| 164 |
+
self.__cm = cm
|
| 165 |
+
return value
|
| 166 |
+
|
| 167 |
+
@final
|
| 168 |
+
async def __aexit__(
|
| 169 |
+
self: _SupportsAsyncCtxMgr[object, _ExitT_co],
|
| 170 |
+
exc_type: type[BaseException] | None,
|
| 171 |
+
exc_val: BaseException | None,
|
| 172 |
+
exc_tb: TracebackType | None,
|
| 173 |
+
) -> _ExitT_co:
|
| 174 |
+
assert isinstance(self, AsyncContextManagerMixin)
|
| 175 |
+
if self.__cm is None:
|
| 176 |
+
raise RuntimeError(
|
| 177 |
+
f"this {self.__class__.__qualname__} has not been entered yet"
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
# Prevent circular references
|
| 181 |
+
cm = self.__cm
|
| 182 |
+
del self.__cm
|
| 183 |
+
|
| 184 |
+
return cast(_ExitT_co, await cm.__aexit__(exc_type, exc_val, exc_tb))
|
| 185 |
+
|
| 186 |
+
@abstractmethod
|
| 187 |
+
def __asynccontextmanager__(
|
| 188 |
+
self,
|
| 189 |
+
) -> AbstractAsyncContextManager[object, bool | None]:
|
| 190 |
+
"""
|
| 191 |
+
Implement your async context manager logic here.
|
| 192 |
+
|
| 193 |
+
This method **must** be decorated with
|
| 194 |
+
:func:`@asynccontextmanager <contextlib.asynccontextmanager>`.
|
| 195 |
+
|
| 196 |
+
.. note:: Remember that the ``yield`` will raise any exception raised in the
|
| 197 |
+
enclosed context block, so use a ``finally:`` block to clean up resources!
|
| 198 |
+
|
| 199 |
+
:return: an async context manager object
|
| 200 |
+
"""
|
venv/lib/python3.10/site-packages/anyio/_core/_eventloop.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
import sys
|
| 5 |
+
import threading
|
| 6 |
+
from collections.abc import Awaitable, Callable, Generator
|
| 7 |
+
from contextlib import contextmanager
|
| 8 |
+
from importlib import import_module
|
| 9 |
+
from typing import TYPE_CHECKING, Any, TypeVar
|
| 10 |
+
|
| 11 |
+
import sniffio
|
| 12 |
+
|
| 13 |
+
if sys.version_info >= (3, 11):
|
| 14 |
+
from typing import TypeVarTuple, Unpack
|
| 15 |
+
else:
|
| 16 |
+
from typing_extensions import TypeVarTuple, Unpack
|
| 17 |
+
|
| 18 |
+
if TYPE_CHECKING:
|
| 19 |
+
from ..abc import AsyncBackend
|
| 20 |
+
|
| 21 |
+
# This must be updated when new backends are introduced
|
| 22 |
+
BACKENDS = "asyncio", "trio"
|
| 23 |
+
|
| 24 |
+
T_Retval = TypeVar("T_Retval")
|
| 25 |
+
PosArgsT = TypeVarTuple("PosArgsT")
|
| 26 |
+
|
| 27 |
+
threadlocals = threading.local()
|
| 28 |
+
loaded_backends: dict[str, type[AsyncBackend]] = {}
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def run(
|
| 32 |
+
func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]],
|
| 33 |
+
*args: Unpack[PosArgsT],
|
| 34 |
+
backend: str = "asyncio",
|
| 35 |
+
backend_options: dict[str, Any] | None = None,
|
| 36 |
+
) -> T_Retval:
|
| 37 |
+
"""
|
| 38 |
+
Run the given coroutine function in an asynchronous event loop.
|
| 39 |
+
|
| 40 |
+
The current thread must not be already running an event loop.
|
| 41 |
+
|
| 42 |
+
:param func: a coroutine function
|
| 43 |
+
:param args: positional arguments to ``func``
|
| 44 |
+
:param backend: name of the asynchronous event loop implementation – currently
|
| 45 |
+
either ``asyncio`` or ``trio``
|
| 46 |
+
:param backend_options: keyword arguments to call the backend ``run()``
|
| 47 |
+
implementation with (documented :ref:`here <backend options>`)
|
| 48 |
+
:return: the return value of the coroutine function
|
| 49 |
+
:raises RuntimeError: if an asynchronous event loop is already running in this
|
| 50 |
+
thread
|
| 51 |
+
:raises LookupError: if the named backend is not found
|
| 52 |
+
|
| 53 |
+
"""
|
| 54 |
+
try:
|
| 55 |
+
asynclib_name = sniffio.current_async_library()
|
| 56 |
+
except sniffio.AsyncLibraryNotFoundError:
|
| 57 |
+
pass
|
| 58 |
+
else:
|
| 59 |
+
raise RuntimeError(f"Already running {asynclib_name} in this thread")
|
| 60 |
+
|
| 61 |
+
try:
|
| 62 |
+
async_backend = get_async_backend(backend)
|
| 63 |
+
except ImportError as exc:
|
| 64 |
+
raise LookupError(f"No such backend: {backend}") from exc
|
| 65 |
+
|
| 66 |
+
token = None
|
| 67 |
+
if sniffio.current_async_library_cvar.get(None) is None:
|
| 68 |
+
# Since we're in control of the event loop, we can cache the name of the async
|
| 69 |
+
# library
|
| 70 |
+
token = sniffio.current_async_library_cvar.set(backend)
|
| 71 |
+
|
| 72 |
+
try:
|
| 73 |
+
backend_options = backend_options or {}
|
| 74 |
+
return async_backend.run(func, args, {}, backend_options)
|
| 75 |
+
finally:
|
| 76 |
+
if token:
|
| 77 |
+
sniffio.current_async_library_cvar.reset(token)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
async def sleep(delay: float) -> None:
|
| 81 |
+
"""
|
| 82 |
+
Pause the current task for the specified duration.
|
| 83 |
+
|
| 84 |
+
:param delay: the duration, in seconds
|
| 85 |
+
|
| 86 |
+
"""
|
| 87 |
+
return await get_async_backend().sleep(delay)
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
async def sleep_forever() -> None:
|
| 91 |
+
"""
|
| 92 |
+
Pause the current task until it's cancelled.
|
| 93 |
+
|
| 94 |
+
This is a shortcut for ``sleep(math.inf)``.
|
| 95 |
+
|
| 96 |
+
.. versionadded:: 3.1
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
await sleep(math.inf)
|
| 100 |
+
|
| 101 |
+
|
| 102 |
+
async def sleep_until(deadline: float) -> None:
|
| 103 |
+
"""
|
| 104 |
+
Pause the current task until the given time.
|
| 105 |
+
|
| 106 |
+
:param deadline: the absolute time to wake up at (according to the internal
|
| 107 |
+
monotonic clock of the event loop)
|
| 108 |
+
|
| 109 |
+
.. versionadded:: 3.1
|
| 110 |
+
|
| 111 |
+
"""
|
| 112 |
+
now = current_time()
|
| 113 |
+
await sleep(max(deadline - now, 0))
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def current_time() -> float:
|
| 117 |
+
"""
|
| 118 |
+
Return the current value of the event loop's internal clock.
|
| 119 |
+
|
| 120 |
+
:return: the clock value (seconds)
|
| 121 |
+
|
| 122 |
+
"""
|
| 123 |
+
return get_async_backend().current_time()
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
def get_all_backends() -> tuple[str, ...]:
|
| 127 |
+
"""Return a tuple of the names of all built-in backends."""
|
| 128 |
+
return BACKENDS
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def get_cancelled_exc_class() -> type[BaseException]:
|
| 132 |
+
"""Return the current async library's cancellation exception class."""
|
| 133 |
+
return get_async_backend().cancelled_exception_class()
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
#
|
| 137 |
+
# Private API
|
| 138 |
+
#
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
@contextmanager
|
| 142 |
+
def claim_worker_thread(
|
| 143 |
+
backend_class: type[AsyncBackend], token: object
|
| 144 |
+
) -> Generator[Any, None, None]:
|
| 145 |
+
threadlocals.current_async_backend = backend_class
|
| 146 |
+
threadlocals.current_token = token
|
| 147 |
+
try:
|
| 148 |
+
yield
|
| 149 |
+
finally:
|
| 150 |
+
del threadlocals.current_async_backend
|
| 151 |
+
del threadlocals.current_token
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]:
|
| 155 |
+
if asynclib_name is None:
|
| 156 |
+
asynclib_name = sniffio.current_async_library()
|
| 157 |
+
|
| 158 |
+
# We use our own dict instead of sys.modules to get the already imported back-end
|
| 159 |
+
# class because the appropriate modules in sys.modules could potentially be only
|
| 160 |
+
# partially initialized
|
| 161 |
+
try:
|
| 162 |
+
return loaded_backends[asynclib_name]
|
| 163 |
+
except KeyError:
|
| 164 |
+
module = import_module(f"anyio._backends._{asynclib_name}")
|
| 165 |
+
loaded_backends[asynclib_name] = module.backend_class
|
| 166 |
+
return module.backend_class
|
venv/lib/python3.10/site-packages/anyio/_core/_exceptions.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
from collections.abc import Generator
|
| 5 |
+
from textwrap import dedent
|
| 6 |
+
from typing import Any
|
| 7 |
+
|
| 8 |
+
if sys.version_info < (3, 11):
|
| 9 |
+
from exceptiongroup import BaseExceptionGroup
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class BrokenResourceError(Exception):
|
| 13 |
+
"""
|
| 14 |
+
Raised when trying to use a resource that has been rendered unusable due to external
|
| 15 |
+
causes (e.g. a send stream whose peer has disconnected).
|
| 16 |
+
"""
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class BrokenWorkerProcess(Exception):
|
| 20 |
+
"""
|
| 21 |
+
Raised by :meth:`~anyio.to_process.run_sync` if the worker process terminates abruptly or
|
| 22 |
+
otherwise misbehaves.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class BrokenWorkerInterpreter(Exception):
|
| 27 |
+
"""
|
| 28 |
+
Raised by :meth:`~anyio.to_interpreter.run_sync` if an unexpected exception is
|
| 29 |
+
raised in the subinterpreter.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def __init__(self, excinfo: Any):
|
| 33 |
+
# This was adapted from concurrent.futures.interpreter.ExecutionFailed
|
| 34 |
+
msg = excinfo.formatted
|
| 35 |
+
if not msg:
|
| 36 |
+
if excinfo.type and excinfo.msg:
|
| 37 |
+
msg = f"{excinfo.type.__name__}: {excinfo.msg}"
|
| 38 |
+
else:
|
| 39 |
+
msg = excinfo.type.__name__ or excinfo.msg
|
| 40 |
+
|
| 41 |
+
super().__init__(msg)
|
| 42 |
+
self.excinfo = excinfo
|
| 43 |
+
|
| 44 |
+
def __str__(self) -> str:
|
| 45 |
+
try:
|
| 46 |
+
formatted = self.excinfo.errdisplay
|
| 47 |
+
except Exception:
|
| 48 |
+
return super().__str__()
|
| 49 |
+
else:
|
| 50 |
+
return dedent(
|
| 51 |
+
f"""
|
| 52 |
+
{super().__str__()}
|
| 53 |
+
|
| 54 |
+
Uncaught in the interpreter:
|
| 55 |
+
|
| 56 |
+
{formatted}
|
| 57 |
+
""".strip()
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class BusyResourceError(Exception):
|
| 62 |
+
"""
|
| 63 |
+
Raised when two tasks are trying to read from or write to the same resource
|
| 64 |
+
concurrently.
|
| 65 |
+
"""
|
| 66 |
+
|
| 67 |
+
def __init__(self, action: str):
|
| 68 |
+
super().__init__(f"Another task is already {action} this resource")
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class ClosedResourceError(Exception):
|
| 72 |
+
"""Raised when trying to use a resource that has been closed."""
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class ConnectionFailed(OSError):
|
| 76 |
+
"""
|
| 77 |
+
Raised when a connection attempt fails.
|
| 78 |
+
|
| 79 |
+
.. note:: This class inherits from :exc:`OSError` for backwards compatibility.
|
| 80 |
+
"""
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
def iterate_exceptions(
|
| 84 |
+
exception: BaseException,
|
| 85 |
+
) -> Generator[BaseException, None, None]:
|
| 86 |
+
if isinstance(exception, BaseExceptionGroup):
|
| 87 |
+
for exc in exception.exceptions:
|
| 88 |
+
yield from iterate_exceptions(exc)
|
| 89 |
+
else:
|
| 90 |
+
yield exception
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class DelimiterNotFound(Exception):
|
| 94 |
+
"""
|
| 95 |
+
Raised during
|
| 96 |
+
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
| 97 |
+
maximum number of bytes has been read without the delimiter being found.
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
def __init__(self, max_bytes: int) -> None:
|
| 101 |
+
super().__init__(
|
| 102 |
+
f"The delimiter was not found among the first {max_bytes} bytes"
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class EndOfStream(Exception):
|
| 107 |
+
"""
|
| 108 |
+
Raised when trying to read from a stream that has been closed from the other end.
|
| 109 |
+
"""
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class IncompleteRead(Exception):
|
| 113 |
+
"""
|
| 114 |
+
Raised during
|
| 115 |
+
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or
|
| 116 |
+
:meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the
|
| 117 |
+
connection is closed before the requested amount of bytes has been read.
|
| 118 |
+
"""
|
| 119 |
+
|
| 120 |
+
def __init__(self) -> None:
|
| 121 |
+
super().__init__(
|
| 122 |
+
"The stream was closed before the read operation could be completed"
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class TypedAttributeLookupError(LookupError):
|
| 127 |
+
"""
|
| 128 |
+
Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute
|
| 129 |
+
is not found and no default value has been given.
|
| 130 |
+
"""
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class WouldBlock(Exception):
|
| 134 |
+
"""Raised by ``X_nowait`` functions if ``X()`` would block."""
|
venv/lib/python3.10/site-packages/anyio/_core/_fileio.py
ADDED
|
@@ -0,0 +1,740 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import sys
|
| 6 |
+
from collections.abc import (
|
| 7 |
+
AsyncIterator,
|
| 8 |
+
Callable,
|
| 9 |
+
Iterable,
|
| 10 |
+
Iterator,
|
| 11 |
+
Sequence,
|
| 12 |
+
)
|
| 13 |
+
from dataclasses import dataclass
|
| 14 |
+
from functools import partial
|
| 15 |
+
from os import PathLike
|
| 16 |
+
from typing import (
|
| 17 |
+
IO,
|
| 18 |
+
TYPE_CHECKING,
|
| 19 |
+
Any,
|
| 20 |
+
AnyStr,
|
| 21 |
+
ClassVar,
|
| 22 |
+
Final,
|
| 23 |
+
Generic,
|
| 24 |
+
overload,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
from .. import to_thread
|
| 28 |
+
from ..abc import AsyncResource
|
| 29 |
+
|
| 30 |
+
if TYPE_CHECKING:
|
| 31 |
+
from types import ModuleType
|
| 32 |
+
|
| 33 |
+
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
|
| 34 |
+
else:
|
| 35 |
+
ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class AsyncFile(AsyncResource, Generic[AnyStr]):
|
| 39 |
+
"""
|
| 40 |
+
An asynchronous file object.
|
| 41 |
+
|
| 42 |
+
This class wraps a standard file object and provides async friendly versions of the
|
| 43 |
+
following blocking methods (where available on the original file object):
|
| 44 |
+
|
| 45 |
+
* read
|
| 46 |
+
* read1
|
| 47 |
+
* readline
|
| 48 |
+
* readlines
|
| 49 |
+
* readinto
|
| 50 |
+
* readinto1
|
| 51 |
+
* write
|
| 52 |
+
* writelines
|
| 53 |
+
* truncate
|
| 54 |
+
* seek
|
| 55 |
+
* tell
|
| 56 |
+
* flush
|
| 57 |
+
|
| 58 |
+
All other methods are directly passed through.
|
| 59 |
+
|
| 60 |
+
This class supports the asynchronous context manager protocol which closes the
|
| 61 |
+
underlying file at the end of the context block.
|
| 62 |
+
|
| 63 |
+
This class also supports asynchronous iteration::
|
| 64 |
+
|
| 65 |
+
async with await open_file(...) as f:
|
| 66 |
+
async for line in f:
|
| 67 |
+
print(line)
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
def __init__(self, fp: IO[AnyStr]) -> None:
|
| 71 |
+
self._fp: Any = fp
|
| 72 |
+
|
| 73 |
+
def __getattr__(self, name: str) -> object:
|
| 74 |
+
return getattr(self._fp, name)
|
| 75 |
+
|
| 76 |
+
@property
|
| 77 |
+
def wrapped(self) -> IO[AnyStr]:
|
| 78 |
+
"""The wrapped file object."""
|
| 79 |
+
return self._fp
|
| 80 |
+
|
| 81 |
+
async def __aiter__(self) -> AsyncIterator[AnyStr]:
|
| 82 |
+
while True:
|
| 83 |
+
line = await self.readline()
|
| 84 |
+
if line:
|
| 85 |
+
yield line
|
| 86 |
+
else:
|
| 87 |
+
break
|
| 88 |
+
|
| 89 |
+
async def aclose(self) -> None:
|
| 90 |
+
return await to_thread.run_sync(self._fp.close)
|
| 91 |
+
|
| 92 |
+
async def read(self, size: int = -1) -> AnyStr:
|
| 93 |
+
return await to_thread.run_sync(self._fp.read, size)
|
| 94 |
+
|
| 95 |
+
async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes:
|
| 96 |
+
return await to_thread.run_sync(self._fp.read1, size)
|
| 97 |
+
|
| 98 |
+
async def readline(self) -> AnyStr:
|
| 99 |
+
return await to_thread.run_sync(self._fp.readline)
|
| 100 |
+
|
| 101 |
+
async def readlines(self) -> list[AnyStr]:
|
| 102 |
+
return await to_thread.run_sync(self._fp.readlines)
|
| 103 |
+
|
| 104 |
+
async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
|
| 105 |
+
return await to_thread.run_sync(self._fp.readinto, b)
|
| 106 |
+
|
| 107 |
+
async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> int:
|
| 108 |
+
return await to_thread.run_sync(self._fp.readinto1, b)
|
| 109 |
+
|
| 110 |
+
@overload
|
| 111 |
+
async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ...
|
| 112 |
+
|
| 113 |
+
@overload
|
| 114 |
+
async def write(self: AsyncFile[str], b: str) -> int: ...
|
| 115 |
+
|
| 116 |
+
async def write(self, b: ReadableBuffer | str) -> int:
|
| 117 |
+
return await to_thread.run_sync(self._fp.write, b)
|
| 118 |
+
|
| 119 |
+
@overload
|
| 120 |
+
async def writelines(
|
| 121 |
+
self: AsyncFile[bytes], lines: Iterable[ReadableBuffer]
|
| 122 |
+
) -> None: ...
|
| 123 |
+
|
| 124 |
+
@overload
|
| 125 |
+
async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ...
|
| 126 |
+
|
| 127 |
+
async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None:
|
| 128 |
+
return await to_thread.run_sync(self._fp.writelines, lines)
|
| 129 |
+
|
| 130 |
+
async def truncate(self, size: int | None = None) -> int:
|
| 131 |
+
return await to_thread.run_sync(self._fp.truncate, size)
|
| 132 |
+
|
| 133 |
+
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
|
| 134 |
+
return await to_thread.run_sync(self._fp.seek, offset, whence)
|
| 135 |
+
|
| 136 |
+
async def tell(self) -> int:
|
| 137 |
+
return await to_thread.run_sync(self._fp.tell)
|
| 138 |
+
|
| 139 |
+
async def flush(self) -> None:
|
| 140 |
+
return await to_thread.run_sync(self._fp.flush)
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
@overload
|
| 144 |
+
async def open_file(
|
| 145 |
+
file: str | PathLike[str] | int,
|
| 146 |
+
mode: OpenBinaryMode,
|
| 147 |
+
buffering: int = ...,
|
| 148 |
+
encoding: str | None = ...,
|
| 149 |
+
errors: str | None = ...,
|
| 150 |
+
newline: str | None = ...,
|
| 151 |
+
closefd: bool = ...,
|
| 152 |
+
opener: Callable[[str, int], int] | None = ...,
|
| 153 |
+
) -> AsyncFile[bytes]: ...
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
@overload
|
| 157 |
+
async def open_file(
|
| 158 |
+
file: str | PathLike[str] | int,
|
| 159 |
+
mode: OpenTextMode = ...,
|
| 160 |
+
buffering: int = ...,
|
| 161 |
+
encoding: str | None = ...,
|
| 162 |
+
errors: str | None = ...,
|
| 163 |
+
newline: str | None = ...,
|
| 164 |
+
closefd: bool = ...,
|
| 165 |
+
opener: Callable[[str, int], int] | None = ...,
|
| 166 |
+
) -> AsyncFile[str]: ...
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
async def open_file(
|
| 170 |
+
file: str | PathLike[str] | int,
|
| 171 |
+
mode: str = "r",
|
| 172 |
+
buffering: int = -1,
|
| 173 |
+
encoding: str | None = None,
|
| 174 |
+
errors: str | None = None,
|
| 175 |
+
newline: str | None = None,
|
| 176 |
+
closefd: bool = True,
|
| 177 |
+
opener: Callable[[str, int], int] | None = None,
|
| 178 |
+
) -> AsyncFile[Any]:
|
| 179 |
+
"""
|
| 180 |
+
Open a file asynchronously.
|
| 181 |
+
|
| 182 |
+
The arguments are exactly the same as for the builtin :func:`open`.
|
| 183 |
+
|
| 184 |
+
:return: an asynchronous file object
|
| 185 |
+
|
| 186 |
+
"""
|
| 187 |
+
fp = await to_thread.run_sync(
|
| 188 |
+
open, file, mode, buffering, encoding, errors, newline, closefd, opener
|
| 189 |
+
)
|
| 190 |
+
return AsyncFile(fp)
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]:
|
| 194 |
+
"""
|
| 195 |
+
Wrap an existing file as an asynchronous file.
|
| 196 |
+
|
| 197 |
+
:param file: an existing file-like object
|
| 198 |
+
:return: an asynchronous file object
|
| 199 |
+
|
| 200 |
+
"""
|
| 201 |
+
return AsyncFile(file)
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
@dataclass(eq=False)
|
| 205 |
+
class _PathIterator(AsyncIterator["Path"]):
|
| 206 |
+
iterator: Iterator[PathLike[str]]
|
| 207 |
+
|
| 208 |
+
async def __anext__(self) -> Path:
|
| 209 |
+
nextval = await to_thread.run_sync(
|
| 210 |
+
next, self.iterator, None, abandon_on_cancel=True
|
| 211 |
+
)
|
| 212 |
+
if nextval is None:
|
| 213 |
+
raise StopAsyncIteration from None
|
| 214 |
+
|
| 215 |
+
return Path(nextval)
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class Path:
|
| 219 |
+
"""
|
| 220 |
+
An asynchronous version of :class:`pathlib.Path`.
|
| 221 |
+
|
| 222 |
+
This class cannot be substituted for :class:`pathlib.Path` or
|
| 223 |
+
:class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike`
|
| 224 |
+
interface.
|
| 225 |
+
|
| 226 |
+
It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for
|
| 227 |
+
the deprecated :meth:`~pathlib.Path.link_to` method.
|
| 228 |
+
|
| 229 |
+
Some methods may be unavailable or have limited functionality, based on the Python
|
| 230 |
+
version:
|
| 231 |
+
|
| 232 |
+
* :meth:`~pathlib.Path.copy` (available on Python 3.14 or later)
|
| 233 |
+
* :meth:`~pathlib.Path.copy_into` (available on Python 3.14 or later)
|
| 234 |
+
* :meth:`~pathlib.Path.from_uri` (available on Python 3.13 or later)
|
| 235 |
+
* :meth:`~pathlib.PurePath.full_match` (available on Python 3.13 or later)
|
| 236 |
+
* :attr:`~pathlib.Path.info` (available on Python 3.14 or later)
|
| 237 |
+
* :meth:`~pathlib.Path.is_junction` (available on Python 3.12 or later)
|
| 238 |
+
* :meth:`~pathlib.PurePath.match` (the ``case_sensitive`` parameter is only
|
| 239 |
+
available on Python 3.13 or later)
|
| 240 |
+
* :meth:`~pathlib.Path.move` (available on Python 3.14 or later)
|
| 241 |
+
* :meth:`~pathlib.Path.move_into` (available on Python 3.14 or later)
|
| 242 |
+
* :meth:`~pathlib.PurePath.relative_to` (the ``walk_up`` parameter is only available
|
| 243 |
+
on Python 3.12 or later)
|
| 244 |
+
* :meth:`~pathlib.Path.walk` (available on Python 3.12 or later)
|
| 245 |
+
|
| 246 |
+
Any methods that do disk I/O need to be awaited on. These methods are:
|
| 247 |
+
|
| 248 |
+
* :meth:`~pathlib.Path.absolute`
|
| 249 |
+
* :meth:`~pathlib.Path.chmod`
|
| 250 |
+
* :meth:`~pathlib.Path.cwd`
|
| 251 |
+
* :meth:`~pathlib.Path.exists`
|
| 252 |
+
* :meth:`~pathlib.Path.expanduser`
|
| 253 |
+
* :meth:`~pathlib.Path.group`
|
| 254 |
+
* :meth:`~pathlib.Path.hardlink_to`
|
| 255 |
+
* :meth:`~pathlib.Path.home`
|
| 256 |
+
* :meth:`~pathlib.Path.is_block_device`
|
| 257 |
+
* :meth:`~pathlib.Path.is_char_device`
|
| 258 |
+
* :meth:`~pathlib.Path.is_dir`
|
| 259 |
+
* :meth:`~pathlib.Path.is_fifo`
|
| 260 |
+
* :meth:`~pathlib.Path.is_file`
|
| 261 |
+
* :meth:`~pathlib.Path.is_junction`
|
| 262 |
+
* :meth:`~pathlib.Path.is_mount`
|
| 263 |
+
* :meth:`~pathlib.Path.is_socket`
|
| 264 |
+
* :meth:`~pathlib.Path.is_symlink`
|
| 265 |
+
* :meth:`~pathlib.Path.lchmod`
|
| 266 |
+
* :meth:`~pathlib.Path.lstat`
|
| 267 |
+
* :meth:`~pathlib.Path.mkdir`
|
| 268 |
+
* :meth:`~pathlib.Path.open`
|
| 269 |
+
* :meth:`~pathlib.Path.owner`
|
| 270 |
+
* :meth:`~pathlib.Path.read_bytes`
|
| 271 |
+
* :meth:`~pathlib.Path.read_text`
|
| 272 |
+
* :meth:`~pathlib.Path.readlink`
|
| 273 |
+
* :meth:`~pathlib.Path.rename`
|
| 274 |
+
* :meth:`~pathlib.Path.replace`
|
| 275 |
+
* :meth:`~pathlib.Path.resolve`
|
| 276 |
+
* :meth:`~pathlib.Path.rmdir`
|
| 277 |
+
* :meth:`~pathlib.Path.samefile`
|
| 278 |
+
* :meth:`~pathlib.Path.stat`
|
| 279 |
+
* :meth:`~pathlib.Path.symlink_to`
|
| 280 |
+
* :meth:`~pathlib.Path.touch`
|
| 281 |
+
* :meth:`~pathlib.Path.unlink`
|
| 282 |
+
* :meth:`~pathlib.Path.walk`
|
| 283 |
+
* :meth:`~pathlib.Path.write_bytes`
|
| 284 |
+
* :meth:`~pathlib.Path.write_text`
|
| 285 |
+
|
| 286 |
+
Additionally, the following methods return an async iterator yielding
|
| 287 |
+
:class:`~.Path` objects:
|
| 288 |
+
|
| 289 |
+
* :meth:`~pathlib.Path.glob`
|
| 290 |
+
* :meth:`~pathlib.Path.iterdir`
|
| 291 |
+
* :meth:`~pathlib.Path.rglob`
|
| 292 |
+
"""
|
| 293 |
+
|
| 294 |
+
__slots__ = "_path", "__weakref__"
|
| 295 |
+
|
| 296 |
+
__weakref__: Any
|
| 297 |
+
|
| 298 |
+
def __init__(self, *args: str | PathLike[str]) -> None:
|
| 299 |
+
self._path: Final[pathlib.Path] = pathlib.Path(*args)
|
| 300 |
+
|
| 301 |
+
def __fspath__(self) -> str:
|
| 302 |
+
return self._path.__fspath__()
|
| 303 |
+
|
| 304 |
+
def __str__(self) -> str:
|
| 305 |
+
return self._path.__str__()
|
| 306 |
+
|
| 307 |
+
def __repr__(self) -> str:
|
| 308 |
+
return f"{self.__class__.__name__}({self.as_posix()!r})"
|
| 309 |
+
|
| 310 |
+
def __bytes__(self) -> bytes:
|
| 311 |
+
return self._path.__bytes__()
|
| 312 |
+
|
| 313 |
+
def __hash__(self) -> int:
|
| 314 |
+
return self._path.__hash__()
|
| 315 |
+
|
| 316 |
+
def __eq__(self, other: object) -> bool:
|
| 317 |
+
target = other._path if isinstance(other, Path) else other
|
| 318 |
+
return self._path.__eq__(target)
|
| 319 |
+
|
| 320 |
+
def __lt__(self, other: pathlib.PurePath | Path) -> bool:
|
| 321 |
+
target = other._path if isinstance(other, Path) else other
|
| 322 |
+
return self._path.__lt__(target)
|
| 323 |
+
|
| 324 |
+
def __le__(self, other: pathlib.PurePath | Path) -> bool:
|
| 325 |
+
target = other._path if isinstance(other, Path) else other
|
| 326 |
+
return self._path.__le__(target)
|
| 327 |
+
|
| 328 |
+
def __gt__(self, other: pathlib.PurePath | Path) -> bool:
|
| 329 |
+
target = other._path if isinstance(other, Path) else other
|
| 330 |
+
return self._path.__gt__(target)
|
| 331 |
+
|
| 332 |
+
def __ge__(self, other: pathlib.PurePath | Path) -> bool:
|
| 333 |
+
target = other._path if isinstance(other, Path) else other
|
| 334 |
+
return self._path.__ge__(target)
|
| 335 |
+
|
| 336 |
+
def __truediv__(self, other: str | PathLike[str]) -> Path:
|
| 337 |
+
return Path(self._path / other)
|
| 338 |
+
|
| 339 |
+
def __rtruediv__(self, other: str | PathLike[str]) -> Path:
|
| 340 |
+
return Path(other) / self
|
| 341 |
+
|
| 342 |
+
@property
|
| 343 |
+
def parts(self) -> tuple[str, ...]:
|
| 344 |
+
return self._path.parts
|
| 345 |
+
|
| 346 |
+
@property
|
| 347 |
+
def drive(self) -> str:
|
| 348 |
+
return self._path.drive
|
| 349 |
+
|
| 350 |
+
@property
|
| 351 |
+
def root(self) -> str:
|
| 352 |
+
return self._path.root
|
| 353 |
+
|
| 354 |
+
@property
|
| 355 |
+
def anchor(self) -> str:
|
| 356 |
+
return self._path.anchor
|
| 357 |
+
|
| 358 |
+
@property
|
| 359 |
+
def parents(self) -> Sequence[Path]:
|
| 360 |
+
return tuple(Path(p) for p in self._path.parents)
|
| 361 |
+
|
| 362 |
+
@property
|
| 363 |
+
def parent(self) -> Path:
|
| 364 |
+
return Path(self._path.parent)
|
| 365 |
+
|
| 366 |
+
@property
|
| 367 |
+
def name(self) -> str:
|
| 368 |
+
return self._path.name
|
| 369 |
+
|
| 370 |
+
@property
|
| 371 |
+
def suffix(self) -> str:
|
| 372 |
+
return self._path.suffix
|
| 373 |
+
|
| 374 |
+
@property
|
| 375 |
+
def suffixes(self) -> list[str]:
|
| 376 |
+
return self._path.suffixes
|
| 377 |
+
|
| 378 |
+
@property
|
| 379 |
+
def stem(self) -> str:
|
| 380 |
+
return self._path.stem
|
| 381 |
+
|
| 382 |
+
async def absolute(self) -> Path:
|
| 383 |
+
path = await to_thread.run_sync(self._path.absolute)
|
| 384 |
+
return Path(path)
|
| 385 |
+
|
| 386 |
+
def as_posix(self) -> str:
|
| 387 |
+
return self._path.as_posix()
|
| 388 |
+
|
| 389 |
+
def as_uri(self) -> str:
|
| 390 |
+
return self._path.as_uri()
|
| 391 |
+
|
| 392 |
+
if sys.version_info >= (3, 13):
|
| 393 |
+
parser: ClassVar[ModuleType] = pathlib.Path.parser
|
| 394 |
+
|
| 395 |
+
@classmethod
|
| 396 |
+
def from_uri(cls, uri: str) -> Path:
|
| 397 |
+
return Path(pathlib.Path.from_uri(uri))
|
| 398 |
+
|
| 399 |
+
def full_match(
|
| 400 |
+
self, path_pattern: str, *, case_sensitive: bool | None = None
|
| 401 |
+
) -> bool:
|
| 402 |
+
return self._path.full_match(path_pattern, case_sensitive=case_sensitive)
|
| 403 |
+
|
| 404 |
+
def match(
|
| 405 |
+
self, path_pattern: str, *, case_sensitive: bool | None = None
|
| 406 |
+
) -> bool:
|
| 407 |
+
return self._path.match(path_pattern, case_sensitive=case_sensitive)
|
| 408 |
+
else:
|
| 409 |
+
|
| 410 |
+
def match(self, path_pattern: str) -> bool:
|
| 411 |
+
return self._path.match(path_pattern)
|
| 412 |
+
|
| 413 |
+
if sys.version_info >= (3, 14):
|
| 414 |
+
|
| 415 |
+
@property
|
| 416 |
+
def info(self) -> Any: # TODO: add return type annotation when Typeshed gets it
|
| 417 |
+
return self._path.info
|
| 418 |
+
|
| 419 |
+
async def copy(
|
| 420 |
+
self,
|
| 421 |
+
target: str | os.PathLike[str],
|
| 422 |
+
*,
|
| 423 |
+
follow_symlinks: bool = True,
|
| 424 |
+
preserve_metadata: bool = False,
|
| 425 |
+
) -> Path:
|
| 426 |
+
func = partial(
|
| 427 |
+
self._path.copy,
|
| 428 |
+
follow_symlinks=follow_symlinks,
|
| 429 |
+
preserve_metadata=preserve_metadata,
|
| 430 |
+
)
|
| 431 |
+
return Path(await to_thread.run_sync(func, pathlib.Path(target)))
|
| 432 |
+
|
| 433 |
+
async def copy_into(
|
| 434 |
+
self,
|
| 435 |
+
target_dir: str | os.PathLike[str],
|
| 436 |
+
*,
|
| 437 |
+
follow_symlinks: bool = True,
|
| 438 |
+
preserve_metadata: bool = False,
|
| 439 |
+
) -> Path:
|
| 440 |
+
func = partial(
|
| 441 |
+
self._path.copy_into,
|
| 442 |
+
follow_symlinks=follow_symlinks,
|
| 443 |
+
preserve_metadata=preserve_metadata,
|
| 444 |
+
)
|
| 445 |
+
return Path(await to_thread.run_sync(func, pathlib.Path(target_dir)))
|
| 446 |
+
|
| 447 |
+
async def move(self, target: str | os.PathLike[str]) -> Path:
|
| 448 |
+
# Upstream does not handle anyio.Path properly as a PathLike
|
| 449 |
+
target = pathlib.Path(target)
|
| 450 |
+
return Path(await to_thread.run_sync(self._path.move, target))
|
| 451 |
+
|
| 452 |
+
async def move_into(
|
| 453 |
+
self,
|
| 454 |
+
target_dir: str | os.PathLike[str],
|
| 455 |
+
) -> Path:
|
| 456 |
+
return Path(await to_thread.run_sync(self._path.move_into, target_dir))
|
| 457 |
+
|
| 458 |
+
def is_relative_to(self, other: str | PathLike[str]) -> bool:
|
| 459 |
+
try:
|
| 460 |
+
self.relative_to(other)
|
| 461 |
+
return True
|
| 462 |
+
except ValueError:
|
| 463 |
+
return False
|
| 464 |
+
|
| 465 |
+
async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None:
|
| 466 |
+
func = partial(os.chmod, follow_symlinks=follow_symlinks)
|
| 467 |
+
return await to_thread.run_sync(func, self._path, mode)
|
| 468 |
+
|
| 469 |
+
@classmethod
|
| 470 |
+
async def cwd(cls) -> Path:
|
| 471 |
+
path = await to_thread.run_sync(pathlib.Path.cwd)
|
| 472 |
+
return cls(path)
|
| 473 |
+
|
| 474 |
+
async def exists(self) -> bool:
|
| 475 |
+
return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True)
|
| 476 |
+
|
| 477 |
+
async def expanduser(self) -> Path:
|
| 478 |
+
return Path(
|
| 479 |
+
await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True)
|
| 480 |
+
)
|
| 481 |
+
|
| 482 |
+
def glob(self, pattern: str) -> AsyncIterator[Path]:
|
| 483 |
+
gen = self._path.glob(pattern)
|
| 484 |
+
return _PathIterator(gen)
|
| 485 |
+
|
| 486 |
+
async def group(self) -> str:
|
| 487 |
+
return await to_thread.run_sync(self._path.group, abandon_on_cancel=True)
|
| 488 |
+
|
| 489 |
+
async def hardlink_to(
|
| 490 |
+
self, target: str | bytes | PathLike[str] | PathLike[bytes]
|
| 491 |
+
) -> None:
|
| 492 |
+
if isinstance(target, Path):
|
| 493 |
+
target = target._path
|
| 494 |
+
|
| 495 |
+
await to_thread.run_sync(os.link, target, self)
|
| 496 |
+
|
| 497 |
+
@classmethod
|
| 498 |
+
async def home(cls) -> Path:
|
| 499 |
+
home_path = await to_thread.run_sync(pathlib.Path.home)
|
| 500 |
+
return cls(home_path)
|
| 501 |
+
|
| 502 |
+
def is_absolute(self) -> bool:
|
| 503 |
+
return self._path.is_absolute()
|
| 504 |
+
|
| 505 |
+
async def is_block_device(self) -> bool:
|
| 506 |
+
return await to_thread.run_sync(
|
| 507 |
+
self._path.is_block_device, abandon_on_cancel=True
|
| 508 |
+
)
|
| 509 |
+
|
| 510 |
+
async def is_char_device(self) -> bool:
|
| 511 |
+
return await to_thread.run_sync(
|
| 512 |
+
self._path.is_char_device, abandon_on_cancel=True
|
| 513 |
+
)
|
| 514 |
+
|
| 515 |
+
async def is_dir(self) -> bool:
|
| 516 |
+
return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True)
|
| 517 |
+
|
| 518 |
+
async def is_fifo(self) -> bool:
|
| 519 |
+
return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True)
|
| 520 |
+
|
| 521 |
+
async def is_file(self) -> bool:
|
| 522 |
+
return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True)
|
| 523 |
+
|
| 524 |
+
if sys.version_info >= (3, 12):
|
| 525 |
+
|
| 526 |
+
async def is_junction(self) -> bool:
|
| 527 |
+
return await to_thread.run_sync(self._path.is_junction)
|
| 528 |
+
|
| 529 |
+
async def is_mount(self) -> bool:
|
| 530 |
+
return await to_thread.run_sync(
|
| 531 |
+
os.path.ismount, self._path, abandon_on_cancel=True
|
| 532 |
+
)
|
| 533 |
+
|
| 534 |
+
def is_reserved(self) -> bool:
|
| 535 |
+
return self._path.is_reserved()
|
| 536 |
+
|
| 537 |
+
async def is_socket(self) -> bool:
|
| 538 |
+
return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True)
|
| 539 |
+
|
| 540 |
+
async def is_symlink(self) -> bool:
|
| 541 |
+
return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True)
|
| 542 |
+
|
| 543 |
+
async def iterdir(self) -> AsyncIterator[Path]:
|
| 544 |
+
gen = (
|
| 545 |
+
self._path.iterdir()
|
| 546 |
+
if sys.version_info < (3, 13)
|
| 547 |
+
else await to_thread.run_sync(self._path.iterdir, abandon_on_cancel=True)
|
| 548 |
+
)
|
| 549 |
+
async for path in _PathIterator(gen):
|
| 550 |
+
yield path
|
| 551 |
+
|
| 552 |
+
def joinpath(self, *args: str | PathLike[str]) -> Path:
|
| 553 |
+
return Path(self._path.joinpath(*args))
|
| 554 |
+
|
| 555 |
+
async def lchmod(self, mode: int) -> None:
|
| 556 |
+
await to_thread.run_sync(self._path.lchmod, mode)
|
| 557 |
+
|
| 558 |
+
async def lstat(self) -> os.stat_result:
|
| 559 |
+
return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True)
|
| 560 |
+
|
| 561 |
+
async def mkdir(
|
| 562 |
+
self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False
|
| 563 |
+
) -> None:
|
| 564 |
+
await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok)
|
| 565 |
+
|
| 566 |
+
@overload
|
| 567 |
+
async def open(
|
| 568 |
+
self,
|
| 569 |
+
mode: OpenBinaryMode,
|
| 570 |
+
buffering: int = ...,
|
| 571 |
+
encoding: str | None = ...,
|
| 572 |
+
errors: str | None = ...,
|
| 573 |
+
newline: str | None = ...,
|
| 574 |
+
) -> AsyncFile[bytes]: ...
|
| 575 |
+
|
| 576 |
+
@overload
|
| 577 |
+
async def open(
|
| 578 |
+
self,
|
| 579 |
+
mode: OpenTextMode = ...,
|
| 580 |
+
buffering: int = ...,
|
| 581 |
+
encoding: str | None = ...,
|
| 582 |
+
errors: str | None = ...,
|
| 583 |
+
newline: str | None = ...,
|
| 584 |
+
) -> AsyncFile[str]: ...
|
| 585 |
+
|
| 586 |
+
async def open(
|
| 587 |
+
self,
|
| 588 |
+
mode: str = "r",
|
| 589 |
+
buffering: int = -1,
|
| 590 |
+
encoding: str | None = None,
|
| 591 |
+
errors: str | None = None,
|
| 592 |
+
newline: str | None = None,
|
| 593 |
+
) -> AsyncFile[Any]:
|
| 594 |
+
fp = await to_thread.run_sync(
|
| 595 |
+
self._path.open, mode, buffering, encoding, errors, newline
|
| 596 |
+
)
|
| 597 |
+
return AsyncFile(fp)
|
| 598 |
+
|
| 599 |
+
async def owner(self) -> str:
|
| 600 |
+
return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True)
|
| 601 |
+
|
| 602 |
+
async def read_bytes(self) -> bytes:
|
| 603 |
+
return await to_thread.run_sync(self._path.read_bytes)
|
| 604 |
+
|
| 605 |
+
async def read_text(
|
| 606 |
+
self, encoding: str | None = None, errors: str | None = None
|
| 607 |
+
) -> str:
|
| 608 |
+
return await to_thread.run_sync(self._path.read_text, encoding, errors)
|
| 609 |
+
|
| 610 |
+
if sys.version_info >= (3, 12):
|
| 611 |
+
|
| 612 |
+
def relative_to(
|
| 613 |
+
self, *other: str | PathLike[str], walk_up: bool = False
|
| 614 |
+
) -> Path:
|
| 615 |
+
# relative_to() should work with any PathLike but it doesn't
|
| 616 |
+
others = [pathlib.Path(other) for other in other]
|
| 617 |
+
return Path(self._path.relative_to(*others, walk_up=walk_up))
|
| 618 |
+
|
| 619 |
+
else:
|
| 620 |
+
|
| 621 |
+
def relative_to(self, *other: str | PathLike[str]) -> Path:
|
| 622 |
+
return Path(self._path.relative_to(*other))
|
| 623 |
+
|
| 624 |
+
async def readlink(self) -> Path:
|
| 625 |
+
target = await to_thread.run_sync(os.readlink, self._path)
|
| 626 |
+
return Path(target)
|
| 627 |
+
|
| 628 |
+
async def rename(self, target: str | pathlib.PurePath | Path) -> Path:
|
| 629 |
+
if isinstance(target, Path):
|
| 630 |
+
target = target._path
|
| 631 |
+
|
| 632 |
+
await to_thread.run_sync(self._path.rename, target)
|
| 633 |
+
return Path(target)
|
| 634 |
+
|
| 635 |
+
async def replace(self, target: str | pathlib.PurePath | Path) -> Path:
|
| 636 |
+
if isinstance(target, Path):
|
| 637 |
+
target = target._path
|
| 638 |
+
|
| 639 |
+
await to_thread.run_sync(self._path.replace, target)
|
| 640 |
+
return Path(target)
|
| 641 |
+
|
| 642 |
+
async def resolve(self, strict: bool = False) -> Path:
|
| 643 |
+
func = partial(self._path.resolve, strict=strict)
|
| 644 |
+
return Path(await to_thread.run_sync(func, abandon_on_cancel=True))
|
| 645 |
+
|
| 646 |
+
def rglob(self, pattern: str) -> AsyncIterator[Path]:
|
| 647 |
+
gen = self._path.rglob(pattern)
|
| 648 |
+
return _PathIterator(gen)
|
| 649 |
+
|
| 650 |
+
async def rmdir(self) -> None:
|
| 651 |
+
await to_thread.run_sync(self._path.rmdir)
|
| 652 |
+
|
| 653 |
+
async def samefile(self, other_path: str | PathLike[str]) -> bool:
|
| 654 |
+
if isinstance(other_path, Path):
|
| 655 |
+
other_path = other_path._path
|
| 656 |
+
|
| 657 |
+
return await to_thread.run_sync(
|
| 658 |
+
self._path.samefile, other_path, abandon_on_cancel=True
|
| 659 |
+
)
|
| 660 |
+
|
| 661 |
+
async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result:
|
| 662 |
+
func = partial(os.stat, follow_symlinks=follow_symlinks)
|
| 663 |
+
return await to_thread.run_sync(func, self._path, abandon_on_cancel=True)
|
| 664 |
+
|
| 665 |
+
async def symlink_to(
|
| 666 |
+
self,
|
| 667 |
+
target: str | bytes | PathLike[str] | PathLike[bytes],
|
| 668 |
+
target_is_directory: bool = False,
|
| 669 |
+
) -> None:
|
| 670 |
+
if isinstance(target, Path):
|
| 671 |
+
target = target._path
|
| 672 |
+
|
| 673 |
+
await to_thread.run_sync(self._path.symlink_to, target, target_is_directory)
|
| 674 |
+
|
| 675 |
+
async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None:
|
| 676 |
+
await to_thread.run_sync(self._path.touch, mode, exist_ok)
|
| 677 |
+
|
| 678 |
+
async def unlink(self, missing_ok: bool = False) -> None:
|
| 679 |
+
try:
|
| 680 |
+
await to_thread.run_sync(self._path.unlink)
|
| 681 |
+
except FileNotFoundError:
|
| 682 |
+
if not missing_ok:
|
| 683 |
+
raise
|
| 684 |
+
|
| 685 |
+
if sys.version_info >= (3, 12):
|
| 686 |
+
|
| 687 |
+
async def walk(
|
| 688 |
+
self,
|
| 689 |
+
top_down: bool = True,
|
| 690 |
+
on_error: Callable[[OSError], object] | None = None,
|
| 691 |
+
follow_symlinks: bool = False,
|
| 692 |
+
) -> AsyncIterator[tuple[Path, list[str], list[str]]]:
|
| 693 |
+
def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None:
|
| 694 |
+
try:
|
| 695 |
+
return next(gen)
|
| 696 |
+
except StopIteration:
|
| 697 |
+
return None
|
| 698 |
+
|
| 699 |
+
gen = self._path.walk(top_down, on_error, follow_symlinks)
|
| 700 |
+
while True:
|
| 701 |
+
value = await to_thread.run_sync(get_next_value)
|
| 702 |
+
if value is None:
|
| 703 |
+
return
|
| 704 |
+
|
| 705 |
+
root, dirs, paths = value
|
| 706 |
+
yield Path(root), dirs, paths
|
| 707 |
+
|
| 708 |
+
def with_name(self, name: str) -> Path:
|
| 709 |
+
return Path(self._path.with_name(name))
|
| 710 |
+
|
| 711 |
+
def with_stem(self, stem: str) -> Path:
|
| 712 |
+
return Path(self._path.with_name(stem + self._path.suffix))
|
| 713 |
+
|
| 714 |
+
def with_suffix(self, suffix: str) -> Path:
|
| 715 |
+
return Path(self._path.with_suffix(suffix))
|
| 716 |
+
|
| 717 |
+
def with_segments(self, *pathsegments: str | PathLike[str]) -> Path:
|
| 718 |
+
return Path(*pathsegments)
|
| 719 |
+
|
| 720 |
+
async def write_bytes(self, data: bytes) -> int:
|
| 721 |
+
return await to_thread.run_sync(self._path.write_bytes, data)
|
| 722 |
+
|
| 723 |
+
async def write_text(
|
| 724 |
+
self,
|
| 725 |
+
data: str,
|
| 726 |
+
encoding: str | None = None,
|
| 727 |
+
errors: str | None = None,
|
| 728 |
+
newline: str | None = None,
|
| 729 |
+
) -> int:
|
| 730 |
+
# Path.write_text() does not support the "newline" parameter before Python 3.10
|
| 731 |
+
def sync_write_text() -> int:
|
| 732 |
+
with self._path.open(
|
| 733 |
+
"w", encoding=encoding, errors=errors, newline=newline
|
| 734 |
+
) as fp:
|
| 735 |
+
return fp.write(data)
|
| 736 |
+
|
| 737 |
+
return await to_thread.run_sync(sync_write_text)
|
| 738 |
+
|
| 739 |
+
|
| 740 |
+
PathLike.register(Path)
|
venv/lib/python3.10/site-packages/anyio/_core/_resources.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from ..abc import AsyncResource
|
| 4 |
+
from ._tasks import CancelScope
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
async def aclose_forcefully(resource: AsyncResource) -> None:
|
| 8 |
+
"""
|
| 9 |
+
Close an asynchronous resource in a cancelled scope.
|
| 10 |
+
|
| 11 |
+
Doing this closes the resource without waiting on anything.
|
| 12 |
+
|
| 13 |
+
:param resource: the resource to close
|
| 14 |
+
|
| 15 |
+
"""
|
| 16 |
+
with CancelScope() as scope:
|
| 17 |
+
scope.cancel()
|
| 18 |
+
await resource.aclose()
|
venv/lib/python3.10/site-packages/anyio/_core/_signals.py
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from collections.abc import AsyncIterator
|
| 4 |
+
from contextlib import AbstractContextManager
|
| 5 |
+
from signal import Signals
|
| 6 |
+
|
| 7 |
+
from ._eventloop import get_async_backend
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def open_signal_receiver(
|
| 11 |
+
*signals: Signals,
|
| 12 |
+
) -> AbstractContextManager[AsyncIterator[Signals]]:
|
| 13 |
+
"""
|
| 14 |
+
Start receiving operating system signals.
|
| 15 |
+
|
| 16 |
+
:param signals: signals to receive (e.g. ``signal.SIGINT``)
|
| 17 |
+
:return: an asynchronous context manager for an asynchronous iterator which yields
|
| 18 |
+
signal numbers
|
| 19 |
+
|
| 20 |
+
.. warning:: Windows does not support signals natively so it is best to avoid
|
| 21 |
+
relying on this in cross-platform applications.
|
| 22 |
+
|
| 23 |
+
.. warning:: On asyncio, this permanently replaces any previous signal handler for
|
| 24 |
+
the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`.
|
| 25 |
+
|
| 26 |
+
"""
|
| 27 |
+
return get_async_backend().open_signal_receiver(*signals)
|
venv/lib/python3.10/site-packages/anyio/_core/_sockets.py
ADDED
|
@@ -0,0 +1,934 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import errno
|
| 4 |
+
import os
|
| 5 |
+
import socket
|
| 6 |
+
import ssl
|
| 7 |
+
import stat
|
| 8 |
+
import sys
|
| 9 |
+
from collections.abc import Awaitable
|
| 10 |
+
from dataclasses import dataclass
|
| 11 |
+
from ipaddress import IPv4Address, IPv6Address, ip_address
|
| 12 |
+
from os import PathLike, chmod
|
| 13 |
+
from socket import AddressFamily, SocketKind
|
| 14 |
+
from typing import TYPE_CHECKING, Any, Literal, cast, overload
|
| 15 |
+
|
| 16 |
+
from .. import ConnectionFailed, to_thread
|
| 17 |
+
from ..abc import (
|
| 18 |
+
ByteStreamConnectable,
|
| 19 |
+
ConnectedUDPSocket,
|
| 20 |
+
ConnectedUNIXDatagramSocket,
|
| 21 |
+
IPAddressType,
|
| 22 |
+
IPSockAddrType,
|
| 23 |
+
SocketListener,
|
| 24 |
+
SocketStream,
|
| 25 |
+
UDPSocket,
|
| 26 |
+
UNIXDatagramSocket,
|
| 27 |
+
UNIXSocketStream,
|
| 28 |
+
)
|
| 29 |
+
from ..streams.stapled import MultiListener
|
| 30 |
+
from ..streams.tls import TLSConnectable, TLSStream
|
| 31 |
+
from ._eventloop import get_async_backend
|
| 32 |
+
from ._resources import aclose_forcefully
|
| 33 |
+
from ._synchronization import Event
|
| 34 |
+
from ._tasks import create_task_group, move_on_after
|
| 35 |
+
|
| 36 |
+
if TYPE_CHECKING:
|
| 37 |
+
from _typeshed import FileDescriptorLike
|
| 38 |
+
else:
|
| 39 |
+
FileDescriptorLike = object
|
| 40 |
+
|
| 41 |
+
if sys.version_info < (3, 11):
|
| 42 |
+
from exceptiongroup import ExceptionGroup
|
| 43 |
+
|
| 44 |
+
if sys.version_info >= (3, 12):
|
| 45 |
+
from typing import override
|
| 46 |
+
else:
|
| 47 |
+
from typing_extensions import override
|
| 48 |
+
|
| 49 |
+
if sys.version_info < (3, 13):
|
| 50 |
+
from typing_extensions import deprecated
|
| 51 |
+
else:
|
| 52 |
+
from warnings import deprecated
|
| 53 |
+
|
| 54 |
+
IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515
|
| 55 |
+
|
| 56 |
+
AnyIPAddressFamily = Literal[
|
| 57 |
+
AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6
|
| 58 |
+
]
|
| 59 |
+
IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6]
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
# tls_hostname given
|
| 63 |
+
@overload
|
| 64 |
+
async def connect_tcp(
|
| 65 |
+
remote_host: IPAddressType,
|
| 66 |
+
remote_port: int,
|
| 67 |
+
*,
|
| 68 |
+
local_host: IPAddressType | None = ...,
|
| 69 |
+
ssl_context: ssl.SSLContext | None = ...,
|
| 70 |
+
tls_standard_compatible: bool = ...,
|
| 71 |
+
tls_hostname: str,
|
| 72 |
+
happy_eyeballs_delay: float = ...,
|
| 73 |
+
) -> TLSStream: ...
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
# ssl_context given
|
| 77 |
+
@overload
|
| 78 |
+
async def connect_tcp(
|
| 79 |
+
remote_host: IPAddressType,
|
| 80 |
+
remote_port: int,
|
| 81 |
+
*,
|
| 82 |
+
local_host: IPAddressType | None = ...,
|
| 83 |
+
ssl_context: ssl.SSLContext,
|
| 84 |
+
tls_standard_compatible: bool = ...,
|
| 85 |
+
tls_hostname: str | None = ...,
|
| 86 |
+
happy_eyeballs_delay: float = ...,
|
| 87 |
+
) -> TLSStream: ...
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
# tls=True
|
| 91 |
+
@overload
|
| 92 |
+
async def connect_tcp(
|
| 93 |
+
remote_host: IPAddressType,
|
| 94 |
+
remote_port: int,
|
| 95 |
+
*,
|
| 96 |
+
local_host: IPAddressType | None = ...,
|
| 97 |
+
tls: Literal[True],
|
| 98 |
+
ssl_context: ssl.SSLContext | None = ...,
|
| 99 |
+
tls_standard_compatible: bool = ...,
|
| 100 |
+
tls_hostname: str | None = ...,
|
| 101 |
+
happy_eyeballs_delay: float = ...,
|
| 102 |
+
) -> TLSStream: ...
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
# tls=False
|
| 106 |
+
@overload
|
| 107 |
+
async def connect_tcp(
|
| 108 |
+
remote_host: IPAddressType,
|
| 109 |
+
remote_port: int,
|
| 110 |
+
*,
|
| 111 |
+
local_host: IPAddressType | None = ...,
|
| 112 |
+
tls: Literal[False],
|
| 113 |
+
ssl_context: ssl.SSLContext | None = ...,
|
| 114 |
+
tls_standard_compatible: bool = ...,
|
| 115 |
+
tls_hostname: str | None = ...,
|
| 116 |
+
happy_eyeballs_delay: float = ...,
|
| 117 |
+
) -> SocketStream: ...
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
# No TLS arguments
|
| 121 |
+
@overload
|
| 122 |
+
async def connect_tcp(
|
| 123 |
+
remote_host: IPAddressType,
|
| 124 |
+
remote_port: int,
|
| 125 |
+
*,
|
| 126 |
+
local_host: IPAddressType | None = ...,
|
| 127 |
+
happy_eyeballs_delay: float = ...,
|
| 128 |
+
) -> SocketStream: ...
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
async def connect_tcp(
|
| 132 |
+
remote_host: IPAddressType,
|
| 133 |
+
remote_port: int,
|
| 134 |
+
*,
|
| 135 |
+
local_host: IPAddressType | None = None,
|
| 136 |
+
tls: bool = False,
|
| 137 |
+
ssl_context: ssl.SSLContext | None = None,
|
| 138 |
+
tls_standard_compatible: bool = True,
|
| 139 |
+
tls_hostname: str | None = None,
|
| 140 |
+
happy_eyeballs_delay: float = 0.25,
|
| 141 |
+
) -> SocketStream | TLSStream:
|
| 142 |
+
"""
|
| 143 |
+
Connect to a host using the TCP protocol.
|
| 144 |
+
|
| 145 |
+
This function implements the stateless version of the Happy Eyeballs algorithm (RFC
|
| 146 |
+
6555). If ``remote_host`` is a host name that resolves to multiple IP addresses,
|
| 147 |
+
each one is tried until one connection attempt succeeds. If the first attempt does
|
| 148 |
+
not connected within 250 milliseconds, a second attempt is started using the next
|
| 149 |
+
address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if
|
| 150 |
+
available) is tried first.
|
| 151 |
+
|
| 152 |
+
When the connection has been established, a TLS handshake will be done if either
|
| 153 |
+
``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``.
|
| 154 |
+
|
| 155 |
+
:param remote_host: the IP address or host name to connect to
|
| 156 |
+
:param remote_port: port on the target host to connect to
|
| 157 |
+
:param local_host: the interface address or name to bind the socket to before
|
| 158 |
+
connecting
|
| 159 |
+
:param tls: ``True`` to do a TLS handshake with the connected stream and return a
|
| 160 |
+
:class:`~anyio.streams.tls.TLSStream` instead
|
| 161 |
+
:param ssl_context: the SSL context object to use (if omitted, a default context is
|
| 162 |
+
created)
|
| 163 |
+
:param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake
|
| 164 |
+
before closing the stream and requires that the server does this as well.
|
| 165 |
+
Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream.
|
| 166 |
+
Some protocols, such as HTTP, require this option to be ``False``.
|
| 167 |
+
See :meth:`~ssl.SSLContext.wrap_socket` for details.
|
| 168 |
+
:param tls_hostname: host name to check the server certificate against (defaults to
|
| 169 |
+
the value of ``remote_host``)
|
| 170 |
+
:param happy_eyeballs_delay: delay (in seconds) before starting the next connection
|
| 171 |
+
attempt
|
| 172 |
+
:return: a socket stream object if no TLS handshake was done, otherwise a TLS stream
|
| 173 |
+
:raises ConnectionFailed: if the connection fails
|
| 174 |
+
|
| 175 |
+
"""
|
| 176 |
+
# Placed here due to https://github.com/python/mypy/issues/7057
|
| 177 |
+
connected_stream: SocketStream | None = None
|
| 178 |
+
|
| 179 |
+
async def try_connect(remote_host: str, event: Event) -> None:
|
| 180 |
+
nonlocal connected_stream
|
| 181 |
+
try:
|
| 182 |
+
stream = await asynclib.connect_tcp(remote_host, remote_port, local_address)
|
| 183 |
+
except OSError as exc:
|
| 184 |
+
oserrors.append(exc)
|
| 185 |
+
return
|
| 186 |
+
else:
|
| 187 |
+
if connected_stream is None:
|
| 188 |
+
connected_stream = stream
|
| 189 |
+
tg.cancel_scope.cancel()
|
| 190 |
+
else:
|
| 191 |
+
await stream.aclose()
|
| 192 |
+
finally:
|
| 193 |
+
event.set()
|
| 194 |
+
|
| 195 |
+
asynclib = get_async_backend()
|
| 196 |
+
local_address: IPSockAddrType | None = None
|
| 197 |
+
family = socket.AF_UNSPEC
|
| 198 |
+
if local_host:
|
| 199 |
+
gai_res = await getaddrinfo(str(local_host), None)
|
| 200 |
+
family, *_, local_address = gai_res[0]
|
| 201 |
+
|
| 202 |
+
target_host = str(remote_host)
|
| 203 |
+
try:
|
| 204 |
+
addr_obj = ip_address(remote_host)
|
| 205 |
+
except ValueError:
|
| 206 |
+
addr_obj = None
|
| 207 |
+
|
| 208 |
+
if addr_obj is not None:
|
| 209 |
+
if isinstance(addr_obj, IPv6Address):
|
| 210 |
+
target_addrs = [(socket.AF_INET6, addr_obj.compressed)]
|
| 211 |
+
else:
|
| 212 |
+
target_addrs = [(socket.AF_INET, addr_obj.compressed)]
|
| 213 |
+
else:
|
| 214 |
+
# getaddrinfo() will raise an exception if name resolution fails
|
| 215 |
+
gai_res = await getaddrinfo(
|
| 216 |
+
target_host, remote_port, family=family, type=socket.SOCK_STREAM
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
# Organize the list so that the first address is an IPv6 address (if available)
|
| 220 |
+
# and the second one is an IPv4 addresses. The rest can be in whatever order.
|
| 221 |
+
v6_found = v4_found = False
|
| 222 |
+
target_addrs = []
|
| 223 |
+
for af, *_, sa in gai_res:
|
| 224 |
+
if af == socket.AF_INET6 and not v6_found:
|
| 225 |
+
v6_found = True
|
| 226 |
+
target_addrs.insert(0, (af, sa[0]))
|
| 227 |
+
elif af == socket.AF_INET and not v4_found and v6_found:
|
| 228 |
+
v4_found = True
|
| 229 |
+
target_addrs.insert(1, (af, sa[0]))
|
| 230 |
+
else:
|
| 231 |
+
target_addrs.append((af, sa[0]))
|
| 232 |
+
|
| 233 |
+
oserrors: list[OSError] = []
|
| 234 |
+
try:
|
| 235 |
+
async with create_task_group() as tg:
|
| 236 |
+
for _af, addr in target_addrs:
|
| 237 |
+
event = Event()
|
| 238 |
+
tg.start_soon(try_connect, addr, event)
|
| 239 |
+
with move_on_after(happy_eyeballs_delay):
|
| 240 |
+
await event.wait()
|
| 241 |
+
|
| 242 |
+
if connected_stream is None:
|
| 243 |
+
cause = (
|
| 244 |
+
oserrors[0]
|
| 245 |
+
if len(oserrors) == 1
|
| 246 |
+
else ExceptionGroup("multiple connection attempts failed", oserrors)
|
| 247 |
+
)
|
| 248 |
+
raise OSError("All connection attempts failed") from cause
|
| 249 |
+
finally:
|
| 250 |
+
oserrors.clear()
|
| 251 |
+
|
| 252 |
+
if tls or tls_hostname or ssl_context:
|
| 253 |
+
try:
|
| 254 |
+
return await TLSStream.wrap(
|
| 255 |
+
connected_stream,
|
| 256 |
+
server_side=False,
|
| 257 |
+
hostname=tls_hostname or str(remote_host),
|
| 258 |
+
ssl_context=ssl_context,
|
| 259 |
+
standard_compatible=tls_standard_compatible,
|
| 260 |
+
)
|
| 261 |
+
except BaseException:
|
| 262 |
+
await aclose_forcefully(connected_stream)
|
| 263 |
+
raise
|
| 264 |
+
|
| 265 |
+
return connected_stream
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream:
|
| 269 |
+
"""
|
| 270 |
+
Connect to the given UNIX socket.
|
| 271 |
+
|
| 272 |
+
Not available on Windows.
|
| 273 |
+
|
| 274 |
+
:param path: path to the socket
|
| 275 |
+
:return: a socket stream object
|
| 276 |
+
:raises ConnectionFailed: if the connection fails
|
| 277 |
+
|
| 278 |
+
"""
|
| 279 |
+
path = os.fspath(path)
|
| 280 |
+
return await get_async_backend().connect_unix(path)
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
async def create_tcp_listener(
|
| 284 |
+
*,
|
| 285 |
+
local_host: IPAddressType | None = None,
|
| 286 |
+
local_port: int = 0,
|
| 287 |
+
family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC,
|
| 288 |
+
backlog: int = 65536,
|
| 289 |
+
reuse_port: bool = False,
|
| 290 |
+
) -> MultiListener[SocketStream]:
|
| 291 |
+
"""
|
| 292 |
+
Create a TCP socket listener.
|
| 293 |
+
|
| 294 |
+
:param local_port: port number to listen on
|
| 295 |
+
:param local_host: IP address of the interface to listen on. If omitted, listen on
|
| 296 |
+
all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address
|
| 297 |
+
family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6.
|
| 298 |
+
:param family: address family (used if ``local_host`` was omitted)
|
| 299 |
+
:param backlog: maximum number of queued incoming connections (up to a maximum of
|
| 300 |
+
2**16, or 65536)
|
| 301 |
+
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
|
| 302 |
+
address/port (not supported on Windows)
|
| 303 |
+
:return: a multi-listener object containing one or more socket listeners
|
| 304 |
+
|
| 305 |
+
"""
|
| 306 |
+
asynclib = get_async_backend()
|
| 307 |
+
backlog = min(backlog, 65536)
|
| 308 |
+
local_host = str(local_host) if local_host is not None else None
|
| 309 |
+
gai_res = await getaddrinfo(
|
| 310 |
+
local_host,
|
| 311 |
+
local_port,
|
| 312 |
+
family=family,
|
| 313 |
+
type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0,
|
| 314 |
+
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
|
| 315 |
+
)
|
| 316 |
+
listeners: list[SocketListener] = []
|
| 317 |
+
try:
|
| 318 |
+
# The set() is here to work around a glibc bug:
|
| 319 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=14969
|
| 320 |
+
sockaddr: tuple[str, int] | tuple[str, int, int, int]
|
| 321 |
+
for fam, kind, *_, sockaddr in sorted(set(gai_res)):
|
| 322 |
+
# Workaround for an uvloop bug where we don't get the correct scope ID for
|
| 323 |
+
# IPv6 link-local addresses when passing type=socket.SOCK_STREAM to
|
| 324 |
+
# getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539
|
| 325 |
+
if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM:
|
| 326 |
+
continue
|
| 327 |
+
|
| 328 |
+
raw_socket = socket.socket(fam)
|
| 329 |
+
raw_socket.setblocking(False)
|
| 330 |
+
|
| 331 |
+
# For Windows, enable exclusive address use. For others, enable address
|
| 332 |
+
# reuse.
|
| 333 |
+
if sys.platform == "win32":
|
| 334 |
+
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1)
|
| 335 |
+
else:
|
| 336 |
+
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 337 |
+
|
| 338 |
+
if reuse_port:
|
| 339 |
+
raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
| 340 |
+
|
| 341 |
+
# If only IPv6 was requested, disable dual stack operation
|
| 342 |
+
if fam == socket.AF_INET6:
|
| 343 |
+
raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1)
|
| 344 |
+
|
| 345 |
+
# Workaround for #554
|
| 346 |
+
if "%" in sockaddr[0]:
|
| 347 |
+
addr, scope_id = sockaddr[0].split("%", 1)
|
| 348 |
+
sockaddr = (addr, sockaddr[1], 0, int(scope_id))
|
| 349 |
+
|
| 350 |
+
raw_socket.bind(sockaddr)
|
| 351 |
+
raw_socket.listen(backlog)
|
| 352 |
+
listener = asynclib.create_tcp_listener(raw_socket)
|
| 353 |
+
listeners.append(listener)
|
| 354 |
+
except BaseException:
|
| 355 |
+
for listener in listeners:
|
| 356 |
+
await listener.aclose()
|
| 357 |
+
|
| 358 |
+
raise
|
| 359 |
+
|
| 360 |
+
return MultiListener(listeners)
|
| 361 |
+
|
| 362 |
+
|
| 363 |
+
async def create_unix_listener(
|
| 364 |
+
path: str | bytes | PathLike[Any],
|
| 365 |
+
*,
|
| 366 |
+
mode: int | None = None,
|
| 367 |
+
backlog: int = 65536,
|
| 368 |
+
) -> SocketListener:
|
| 369 |
+
"""
|
| 370 |
+
Create a UNIX socket listener.
|
| 371 |
+
|
| 372 |
+
Not available on Windows.
|
| 373 |
+
|
| 374 |
+
:param path: path of the socket
|
| 375 |
+
:param mode: permissions to set on the socket
|
| 376 |
+
:param backlog: maximum number of queued incoming connections (up to a maximum of
|
| 377 |
+
2**16, or 65536)
|
| 378 |
+
:return: a listener object
|
| 379 |
+
|
| 380 |
+
.. versionchanged:: 3.0
|
| 381 |
+
If a socket already exists on the file system in the given path, it will be
|
| 382 |
+
removed first.
|
| 383 |
+
|
| 384 |
+
"""
|
| 385 |
+
backlog = min(backlog, 65536)
|
| 386 |
+
raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM)
|
| 387 |
+
try:
|
| 388 |
+
raw_socket.listen(backlog)
|
| 389 |
+
return get_async_backend().create_unix_listener(raw_socket)
|
| 390 |
+
except BaseException:
|
| 391 |
+
raw_socket.close()
|
| 392 |
+
raise
|
| 393 |
+
|
| 394 |
+
|
| 395 |
+
async def create_udp_socket(
|
| 396 |
+
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
|
| 397 |
+
*,
|
| 398 |
+
local_host: IPAddressType | None = None,
|
| 399 |
+
local_port: int = 0,
|
| 400 |
+
reuse_port: bool = False,
|
| 401 |
+
) -> UDPSocket:
|
| 402 |
+
"""
|
| 403 |
+
Create a UDP socket.
|
| 404 |
+
|
| 405 |
+
If ``port`` has been given, the socket will be bound to this port on the local
|
| 406 |
+
machine, making this socket suitable for providing UDP based services.
|
| 407 |
+
|
| 408 |
+
:param family: address family (``AF_INET`` or ``AF_INET6``) – automatically
|
| 409 |
+
determined from ``local_host`` if omitted
|
| 410 |
+
:param local_host: IP address or host name of the local interface to bind to
|
| 411 |
+
:param local_port: local port to bind to
|
| 412 |
+
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
|
| 413 |
+
address/port (not supported on Windows)
|
| 414 |
+
:return: a UDP socket
|
| 415 |
+
|
| 416 |
+
"""
|
| 417 |
+
if family is AddressFamily.AF_UNSPEC and not local_host:
|
| 418 |
+
raise ValueError('Either "family" or "local_host" must be given')
|
| 419 |
+
|
| 420 |
+
if local_host:
|
| 421 |
+
gai_res = await getaddrinfo(
|
| 422 |
+
str(local_host),
|
| 423 |
+
local_port,
|
| 424 |
+
family=family,
|
| 425 |
+
type=socket.SOCK_DGRAM,
|
| 426 |
+
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
|
| 427 |
+
)
|
| 428 |
+
family = cast(AnyIPAddressFamily, gai_res[0][0])
|
| 429 |
+
local_address = gai_res[0][-1]
|
| 430 |
+
elif family is AddressFamily.AF_INET6:
|
| 431 |
+
local_address = ("::", 0)
|
| 432 |
+
else:
|
| 433 |
+
local_address = ("0.0.0.0", 0)
|
| 434 |
+
|
| 435 |
+
sock = await get_async_backend().create_udp_socket(
|
| 436 |
+
family, local_address, None, reuse_port
|
| 437 |
+
)
|
| 438 |
+
return cast(UDPSocket, sock)
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
async def create_connected_udp_socket(
|
| 442 |
+
remote_host: IPAddressType,
|
| 443 |
+
remote_port: int,
|
| 444 |
+
*,
|
| 445 |
+
family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC,
|
| 446 |
+
local_host: IPAddressType | None = None,
|
| 447 |
+
local_port: int = 0,
|
| 448 |
+
reuse_port: bool = False,
|
| 449 |
+
) -> ConnectedUDPSocket:
|
| 450 |
+
"""
|
| 451 |
+
Create a connected UDP socket.
|
| 452 |
+
|
| 453 |
+
Connected UDP sockets can only communicate with the specified remote host/port, an
|
| 454 |
+
any packets sent from other sources are dropped.
|
| 455 |
+
|
| 456 |
+
:param remote_host: remote host to set as the default target
|
| 457 |
+
:param remote_port: port on the remote host to set as the default target
|
| 458 |
+
:param family: address family (``AF_INET`` or ``AF_INET6``) – automatically
|
| 459 |
+
determined from ``local_host`` or ``remote_host`` if omitted
|
| 460 |
+
:param local_host: IP address or host name of the local interface to bind to
|
| 461 |
+
:param local_port: local port to bind to
|
| 462 |
+
:param reuse_port: ``True`` to allow multiple sockets to bind to the same
|
| 463 |
+
address/port (not supported on Windows)
|
| 464 |
+
:return: a connected UDP socket
|
| 465 |
+
|
| 466 |
+
"""
|
| 467 |
+
local_address = None
|
| 468 |
+
if local_host:
|
| 469 |
+
gai_res = await getaddrinfo(
|
| 470 |
+
str(local_host),
|
| 471 |
+
local_port,
|
| 472 |
+
family=family,
|
| 473 |
+
type=socket.SOCK_DGRAM,
|
| 474 |
+
flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG,
|
| 475 |
+
)
|
| 476 |
+
family = cast(AnyIPAddressFamily, gai_res[0][0])
|
| 477 |
+
local_address = gai_res[0][-1]
|
| 478 |
+
|
| 479 |
+
gai_res = await getaddrinfo(
|
| 480 |
+
str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM
|
| 481 |
+
)
|
| 482 |
+
family = cast(AnyIPAddressFamily, gai_res[0][0])
|
| 483 |
+
remote_address = gai_res[0][-1]
|
| 484 |
+
|
| 485 |
+
sock = await get_async_backend().create_udp_socket(
|
| 486 |
+
family, local_address, remote_address, reuse_port
|
| 487 |
+
)
|
| 488 |
+
return cast(ConnectedUDPSocket, sock)
|
| 489 |
+
|
| 490 |
+
|
| 491 |
+
async def create_unix_datagram_socket(
|
| 492 |
+
*,
|
| 493 |
+
local_path: None | str | bytes | PathLike[Any] = None,
|
| 494 |
+
local_mode: int | None = None,
|
| 495 |
+
) -> UNIXDatagramSocket:
|
| 496 |
+
"""
|
| 497 |
+
Create a UNIX datagram socket.
|
| 498 |
+
|
| 499 |
+
Not available on Windows.
|
| 500 |
+
|
| 501 |
+
If ``local_path`` has been given, the socket will be bound to this path, making this
|
| 502 |
+
socket suitable for receiving datagrams from other processes. Other processes can
|
| 503 |
+
send datagrams to this socket only if ``local_path`` is set.
|
| 504 |
+
|
| 505 |
+
If a socket already exists on the file system in the ``local_path``, it will be
|
| 506 |
+
removed first.
|
| 507 |
+
|
| 508 |
+
:param local_path: the path on which to bind to
|
| 509 |
+
:param local_mode: permissions to set on the local socket
|
| 510 |
+
:return: a UNIX datagram socket
|
| 511 |
+
|
| 512 |
+
"""
|
| 513 |
+
raw_socket = await setup_unix_local_socket(
|
| 514 |
+
local_path, local_mode, socket.SOCK_DGRAM
|
| 515 |
+
)
|
| 516 |
+
return await get_async_backend().create_unix_datagram_socket(raw_socket, None)
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
async def create_connected_unix_datagram_socket(
|
| 520 |
+
remote_path: str | bytes | PathLike[Any],
|
| 521 |
+
*,
|
| 522 |
+
local_path: None | str | bytes | PathLike[Any] = None,
|
| 523 |
+
local_mode: int | None = None,
|
| 524 |
+
) -> ConnectedUNIXDatagramSocket:
|
| 525 |
+
"""
|
| 526 |
+
Create a connected UNIX datagram socket.
|
| 527 |
+
|
| 528 |
+
Connected datagram sockets can only communicate with the specified remote path.
|
| 529 |
+
|
| 530 |
+
If ``local_path`` has been given, the socket will be bound to this path, making
|
| 531 |
+
this socket suitable for receiving datagrams from other processes. Other processes
|
| 532 |
+
can send datagrams to this socket only if ``local_path`` is set.
|
| 533 |
+
|
| 534 |
+
If a socket already exists on the file system in the ``local_path``, it will be
|
| 535 |
+
removed first.
|
| 536 |
+
|
| 537 |
+
:param remote_path: the path to set as the default target
|
| 538 |
+
:param local_path: the path on which to bind to
|
| 539 |
+
:param local_mode: permissions to set on the local socket
|
| 540 |
+
:return: a connected UNIX datagram socket
|
| 541 |
+
|
| 542 |
+
"""
|
| 543 |
+
remote_path = os.fspath(remote_path)
|
| 544 |
+
raw_socket = await setup_unix_local_socket(
|
| 545 |
+
local_path, local_mode, socket.SOCK_DGRAM
|
| 546 |
+
)
|
| 547 |
+
return await get_async_backend().create_unix_datagram_socket(
|
| 548 |
+
raw_socket, remote_path
|
| 549 |
+
)
|
| 550 |
+
|
| 551 |
+
|
| 552 |
+
async def getaddrinfo(
|
| 553 |
+
host: bytes | str | None,
|
| 554 |
+
port: str | int | None,
|
| 555 |
+
*,
|
| 556 |
+
family: int | AddressFamily = 0,
|
| 557 |
+
type: int | SocketKind = 0,
|
| 558 |
+
proto: int = 0,
|
| 559 |
+
flags: int = 0,
|
| 560 |
+
) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]:
|
| 561 |
+
"""
|
| 562 |
+
Look up a numeric IP address given a host name.
|
| 563 |
+
|
| 564 |
+
Internationalized domain names are translated according to the (non-transitional)
|
| 565 |
+
IDNA 2008 standard.
|
| 566 |
+
|
| 567 |
+
.. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of
|
| 568 |
+
(host, port), unlike what :func:`socket.getaddrinfo` does.
|
| 569 |
+
|
| 570 |
+
:param host: host name
|
| 571 |
+
:param port: port number
|
| 572 |
+
:param family: socket family (`'AF_INET``, ...)
|
| 573 |
+
:param type: socket type (``SOCK_STREAM``, ...)
|
| 574 |
+
:param proto: protocol number
|
| 575 |
+
:param flags: flags to pass to upstream ``getaddrinfo()``
|
| 576 |
+
:return: list of tuples containing (family, type, proto, canonname, sockaddr)
|
| 577 |
+
|
| 578 |
+
.. seealso:: :func:`socket.getaddrinfo`
|
| 579 |
+
|
| 580 |
+
"""
|
| 581 |
+
# Handle unicode hostnames
|
| 582 |
+
if isinstance(host, str):
|
| 583 |
+
try:
|
| 584 |
+
encoded_host: bytes | None = host.encode("ascii")
|
| 585 |
+
except UnicodeEncodeError:
|
| 586 |
+
import idna
|
| 587 |
+
|
| 588 |
+
encoded_host = idna.encode(host, uts46=True)
|
| 589 |
+
else:
|
| 590 |
+
encoded_host = host
|
| 591 |
+
|
| 592 |
+
gai_res = await get_async_backend().getaddrinfo(
|
| 593 |
+
encoded_host, port, family=family, type=type, proto=proto, flags=flags
|
| 594 |
+
)
|
| 595 |
+
return [
|
| 596 |
+
(family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr))
|
| 597 |
+
for family, type, proto, canonname, sockaddr in gai_res
|
| 598 |
+
# filter out IPv6 results when IPv6 is disabled
|
| 599 |
+
if not isinstance(sockaddr[0], int)
|
| 600 |
+
]
|
| 601 |
+
|
| 602 |
+
|
| 603 |
+
def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]:
|
| 604 |
+
"""
|
| 605 |
+
Look up the host name of an IP address.
|
| 606 |
+
|
| 607 |
+
:param sockaddr: socket address (e.g. (ipaddress, port) for IPv4)
|
| 608 |
+
:param flags: flags to pass to upstream ``getnameinfo()``
|
| 609 |
+
:return: a tuple of (host name, service name)
|
| 610 |
+
|
| 611 |
+
.. seealso:: :func:`socket.getnameinfo`
|
| 612 |
+
|
| 613 |
+
"""
|
| 614 |
+
return get_async_backend().getnameinfo(sockaddr, flags)
|
| 615 |
+
|
| 616 |
+
|
| 617 |
+
@deprecated("This function is deprecated; use `wait_readable` instead")
|
| 618 |
+
def wait_socket_readable(sock: socket.socket) -> Awaitable[None]:
|
| 619 |
+
"""
|
| 620 |
+
.. deprecated:: 4.7.0
|
| 621 |
+
Use :func:`wait_readable` instead.
|
| 622 |
+
|
| 623 |
+
Wait until the given socket has data to be read.
|
| 624 |
+
|
| 625 |
+
.. warning:: Only use this on raw sockets that have not been wrapped by any higher
|
| 626 |
+
level constructs like socket streams!
|
| 627 |
+
|
| 628 |
+
:param sock: a socket object
|
| 629 |
+
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
|
| 630 |
+
socket to become readable
|
| 631 |
+
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
|
| 632 |
+
to become readable
|
| 633 |
+
|
| 634 |
+
"""
|
| 635 |
+
return get_async_backend().wait_readable(sock.fileno())
|
| 636 |
+
|
| 637 |
+
|
| 638 |
+
@deprecated("This function is deprecated; use `wait_writable` instead")
|
| 639 |
+
def wait_socket_writable(sock: socket.socket) -> Awaitable[None]:
|
| 640 |
+
"""
|
| 641 |
+
.. deprecated:: 4.7.0
|
| 642 |
+
Use :func:`wait_writable` instead.
|
| 643 |
+
|
| 644 |
+
Wait until the given socket can be written to.
|
| 645 |
+
|
| 646 |
+
This does **NOT** work on Windows when using the asyncio backend with a proactor
|
| 647 |
+
event loop (default on py3.8+).
|
| 648 |
+
|
| 649 |
+
.. warning:: Only use this on raw sockets that have not been wrapped by any higher
|
| 650 |
+
level constructs like socket streams!
|
| 651 |
+
|
| 652 |
+
:param sock: a socket object
|
| 653 |
+
:raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the
|
| 654 |
+
socket to become writable
|
| 655 |
+
:raises ~anyio.BusyResourceError: if another task is already waiting for the socket
|
| 656 |
+
to become writable
|
| 657 |
+
|
| 658 |
+
"""
|
| 659 |
+
return get_async_backend().wait_writable(sock.fileno())
|
| 660 |
+
|
| 661 |
+
|
| 662 |
+
def wait_readable(obj: FileDescriptorLike) -> Awaitable[None]:
|
| 663 |
+
"""
|
| 664 |
+
Wait until the given object has data to be read.
|
| 665 |
+
|
| 666 |
+
On Unix systems, ``obj`` must either be an integer file descriptor, or else an
|
| 667 |
+
object with a ``.fileno()`` method which returns an integer file descriptor. Any
|
| 668 |
+
kind of file descriptor can be passed, though the exact semantics will depend on
|
| 669 |
+
your kernel. For example, this probably won't do anything useful for on-disk files.
|
| 670 |
+
|
| 671 |
+
On Windows systems, ``obj`` must either be an integer ``SOCKET`` handle, or else an
|
| 672 |
+
object with a ``.fileno()`` method which returns an integer ``SOCKET`` handle. File
|
| 673 |
+
descriptors aren't supported, and neither are handles that refer to anything besides
|
| 674 |
+
a ``SOCKET``.
|
| 675 |
+
|
| 676 |
+
On backends where this functionality is not natively provided (asyncio
|
| 677 |
+
``ProactorEventLoop`` on Windows), it is provided using a separate selector thread
|
| 678 |
+
which is set to shut down when the interpreter shuts down.
|
| 679 |
+
|
| 680 |
+
.. warning:: Don't use this on raw sockets that have been wrapped by any higher
|
| 681 |
+
level constructs like socket streams!
|
| 682 |
+
|
| 683 |
+
:param obj: an object with a ``.fileno()`` method or an integer handle
|
| 684 |
+
:raises ~anyio.ClosedResourceError: if the object was closed while waiting for the
|
| 685 |
+
object to become readable
|
| 686 |
+
:raises ~anyio.BusyResourceError: if another task is already waiting for the object
|
| 687 |
+
to become readable
|
| 688 |
+
|
| 689 |
+
"""
|
| 690 |
+
return get_async_backend().wait_readable(obj)
|
| 691 |
+
|
| 692 |
+
|
| 693 |
+
def wait_writable(obj: FileDescriptorLike) -> Awaitable[None]:
|
| 694 |
+
"""
|
| 695 |
+
Wait until the given object can be written to.
|
| 696 |
+
|
| 697 |
+
:param obj: an object with a ``.fileno()`` method or an integer handle
|
| 698 |
+
:raises ~anyio.ClosedResourceError: if the object was closed while waiting for the
|
| 699 |
+
object to become writable
|
| 700 |
+
:raises ~anyio.BusyResourceError: if another task is already waiting for the object
|
| 701 |
+
to become writable
|
| 702 |
+
|
| 703 |
+
.. seealso:: See the documentation of :func:`wait_readable` for the definition of
|
| 704 |
+
``obj`` and notes on backend compatibility.
|
| 705 |
+
|
| 706 |
+
.. warning:: Don't use this on raw sockets that have been wrapped by any higher
|
| 707 |
+
level constructs like socket streams!
|
| 708 |
+
|
| 709 |
+
"""
|
| 710 |
+
return get_async_backend().wait_writable(obj)
|
| 711 |
+
|
| 712 |
+
|
| 713 |
+
def notify_closing(obj: FileDescriptorLike) -> None:
|
| 714 |
+
"""
|
| 715 |
+
Call this before closing a file descriptor (on Unix) or socket (on
|
| 716 |
+
Windows). This will cause any `wait_readable` or `wait_writable`
|
| 717 |
+
calls on the given object to immediately wake up and raise
|
| 718 |
+
`~anyio.ClosedResourceError`.
|
| 719 |
+
|
| 720 |
+
This doesn't actually close the object – you still have to do that
|
| 721 |
+
yourself afterwards. Also, you want to be careful to make sure no
|
| 722 |
+
new tasks start waiting on the object in between when you call this
|
| 723 |
+
and when it's actually closed. So to close something properly, you
|
| 724 |
+
usually want to do these steps in order:
|
| 725 |
+
|
| 726 |
+
1. Explicitly mark the object as closed, so that any new attempts
|
| 727 |
+
to use it will abort before they start.
|
| 728 |
+
2. Call `notify_closing` to wake up any already-existing users.
|
| 729 |
+
3. Actually close the object.
|
| 730 |
+
|
| 731 |
+
It's also possible to do them in a different order if that's more
|
| 732 |
+
convenient, *but only if* you make sure not to have any checkpoints in
|
| 733 |
+
between the steps. This way they all happen in a single atomic
|
| 734 |
+
step, so other tasks won't be able to tell what order they happened
|
| 735 |
+
in anyway.
|
| 736 |
+
|
| 737 |
+
:param obj: an object with a ``.fileno()`` method or an integer handle
|
| 738 |
+
|
| 739 |
+
"""
|
| 740 |
+
get_async_backend().notify_closing(obj)
|
| 741 |
+
|
| 742 |
+
|
| 743 |
+
#
|
| 744 |
+
# Private API
|
| 745 |
+
#
|
| 746 |
+
|
| 747 |
+
|
| 748 |
+
def convert_ipv6_sockaddr(
|
| 749 |
+
sockaddr: tuple[str, int, int, int] | tuple[str, int],
|
| 750 |
+
) -> tuple[str, int]:
|
| 751 |
+
"""
|
| 752 |
+
Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format.
|
| 753 |
+
|
| 754 |
+
If the scope ID is nonzero, it is added to the address, separated with ``%``.
|
| 755 |
+
Otherwise the flow id and scope id are simply cut off from the tuple.
|
| 756 |
+
Any other kinds of socket addresses are returned as-is.
|
| 757 |
+
|
| 758 |
+
:param sockaddr: the result of :meth:`~socket.socket.getsockname`
|
| 759 |
+
:return: the converted socket address
|
| 760 |
+
|
| 761 |
+
"""
|
| 762 |
+
# This is more complicated than it should be because of MyPy
|
| 763 |
+
if isinstance(sockaddr, tuple) and len(sockaddr) == 4:
|
| 764 |
+
host, port, flowinfo, scope_id = sockaddr
|
| 765 |
+
if scope_id:
|
| 766 |
+
# PyPy (as of v7.3.11) leaves the interface name in the result, so
|
| 767 |
+
# we discard it and only get the scope ID from the end
|
| 768 |
+
# (https://foss.heptapod.net/pypy/pypy/-/issues/3938)
|
| 769 |
+
host = host.split("%")[0]
|
| 770 |
+
|
| 771 |
+
# Add scope_id to the address
|
| 772 |
+
return f"{host}%{scope_id}", port
|
| 773 |
+
else:
|
| 774 |
+
return host, port
|
| 775 |
+
else:
|
| 776 |
+
return sockaddr
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
async def setup_unix_local_socket(
|
| 780 |
+
path: None | str | bytes | PathLike[Any],
|
| 781 |
+
mode: int | None,
|
| 782 |
+
socktype: int,
|
| 783 |
+
) -> socket.socket:
|
| 784 |
+
"""
|
| 785 |
+
Create a UNIX local socket object, deleting the socket at the given path if it
|
| 786 |
+
exists.
|
| 787 |
+
|
| 788 |
+
Not available on Windows.
|
| 789 |
+
|
| 790 |
+
:param path: path of the socket
|
| 791 |
+
:param mode: permissions to set on the socket
|
| 792 |
+
:param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM
|
| 793 |
+
|
| 794 |
+
"""
|
| 795 |
+
path_str: str | None
|
| 796 |
+
if path is not None:
|
| 797 |
+
path_str = os.fsdecode(path)
|
| 798 |
+
|
| 799 |
+
# Linux abstract namespace sockets aren't backed by a concrete file so skip stat call
|
| 800 |
+
if not path_str.startswith("\0"):
|
| 801 |
+
# Copied from pathlib...
|
| 802 |
+
try:
|
| 803 |
+
stat_result = os.stat(path)
|
| 804 |
+
except OSError as e:
|
| 805 |
+
if e.errno not in (
|
| 806 |
+
errno.ENOENT,
|
| 807 |
+
errno.ENOTDIR,
|
| 808 |
+
errno.EBADF,
|
| 809 |
+
errno.ELOOP,
|
| 810 |
+
):
|
| 811 |
+
raise
|
| 812 |
+
else:
|
| 813 |
+
if stat.S_ISSOCK(stat_result.st_mode):
|
| 814 |
+
os.unlink(path)
|
| 815 |
+
else:
|
| 816 |
+
path_str = None
|
| 817 |
+
|
| 818 |
+
raw_socket = socket.socket(socket.AF_UNIX, socktype)
|
| 819 |
+
raw_socket.setblocking(False)
|
| 820 |
+
|
| 821 |
+
if path_str is not None:
|
| 822 |
+
try:
|
| 823 |
+
await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True)
|
| 824 |
+
if mode is not None:
|
| 825 |
+
await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True)
|
| 826 |
+
except BaseException:
|
| 827 |
+
raw_socket.close()
|
| 828 |
+
raise
|
| 829 |
+
|
| 830 |
+
return raw_socket
|
| 831 |
+
|
| 832 |
+
|
| 833 |
+
@dataclass
|
| 834 |
+
class TCPConnectable(ByteStreamConnectable):
|
| 835 |
+
"""
|
| 836 |
+
Connects to a TCP server at the given host and port.
|
| 837 |
+
|
| 838 |
+
:param host: host name or IP address of the server
|
| 839 |
+
:param port: TCP port number of the server
|
| 840 |
+
"""
|
| 841 |
+
|
| 842 |
+
host: str | IPv4Address | IPv6Address
|
| 843 |
+
port: int
|
| 844 |
+
|
| 845 |
+
def __post_init__(self) -> None:
|
| 846 |
+
if self.port < 1 or self.port > 65535:
|
| 847 |
+
raise ValueError("TCP port number out of range")
|
| 848 |
+
|
| 849 |
+
@override
|
| 850 |
+
async def connect(self) -> SocketStream:
|
| 851 |
+
try:
|
| 852 |
+
return await connect_tcp(self.host, self.port)
|
| 853 |
+
except OSError as exc:
|
| 854 |
+
raise ConnectionFailed(
|
| 855 |
+
f"error connecting to {self.host}:{self.port}: {exc}"
|
| 856 |
+
) from exc
|
| 857 |
+
|
| 858 |
+
|
| 859 |
+
@dataclass
|
| 860 |
+
class UNIXConnectable(ByteStreamConnectable):
|
| 861 |
+
"""
|
| 862 |
+
Connects to a UNIX domain socket at the given path.
|
| 863 |
+
|
| 864 |
+
:param path: the file system path of the socket
|
| 865 |
+
"""
|
| 866 |
+
|
| 867 |
+
path: str | bytes | PathLike[str] | PathLike[bytes]
|
| 868 |
+
|
| 869 |
+
@override
|
| 870 |
+
async def connect(self) -> UNIXSocketStream:
|
| 871 |
+
try:
|
| 872 |
+
return await connect_unix(self.path)
|
| 873 |
+
except OSError as exc:
|
| 874 |
+
raise ConnectionFailed(f"error connecting to {self.path!r}: {exc}") from exc
|
| 875 |
+
|
| 876 |
+
|
| 877 |
+
def as_connectable(
|
| 878 |
+
remote: ByteStreamConnectable
|
| 879 |
+
| tuple[str | IPv4Address | IPv6Address, int]
|
| 880 |
+
| str
|
| 881 |
+
| bytes
|
| 882 |
+
| PathLike[str],
|
| 883 |
+
/,
|
| 884 |
+
*,
|
| 885 |
+
tls: bool = False,
|
| 886 |
+
ssl_context: ssl.SSLContext | None = None,
|
| 887 |
+
tls_hostname: str | None = None,
|
| 888 |
+
tls_standard_compatible: bool = True,
|
| 889 |
+
) -> ByteStreamConnectable:
|
| 890 |
+
"""
|
| 891 |
+
Return a byte stream connectable from the given object.
|
| 892 |
+
|
| 893 |
+
If a bytestream connectable is given, it is returned unchanged.
|
| 894 |
+
If a tuple of (host, port) is given, a TCP connectable is returned.
|
| 895 |
+
If a string or bytes path is given, a UNIX connectable is returned.
|
| 896 |
+
|
| 897 |
+
If ``tls=True``, the connectable will be wrapped in a
|
| 898 |
+
:class:`~.streams.tls.TLSConnectable`.
|
| 899 |
+
|
| 900 |
+
:param remote: a connectable, a tuple of (host, port) or a path to a UNIX socket
|
| 901 |
+
:param tls: if ``True``, wrap the plaintext connectable in a
|
| 902 |
+
:class:`~.streams.tls.TLSConnectable`, using the provided TLS settings)
|
| 903 |
+
:param ssl_context: if ``tls=True``, the SSLContext object to use (if not provided,
|
| 904 |
+
a secure default will be created)
|
| 905 |
+
:param tls_hostname: if ``tls=True``, host name of the server to use for checking
|
| 906 |
+
the server certificate (defaults to the host portion of the address for TCP
|
| 907 |
+
connectables)
|
| 908 |
+
:param tls_standard_compatible: if ``False`` and ``tls=True``, makes the TLS stream
|
| 909 |
+
skip the closing handshake when closing the connection, so it won't raise an
|
| 910 |
+
exception if the server does the same
|
| 911 |
+
|
| 912 |
+
"""
|
| 913 |
+
connectable: TCPConnectable | UNIXConnectable | TLSConnectable
|
| 914 |
+
if isinstance(remote, ByteStreamConnectable):
|
| 915 |
+
return remote
|
| 916 |
+
elif isinstance(remote, tuple) and len(remote) == 2:
|
| 917 |
+
connectable = TCPConnectable(*remote)
|
| 918 |
+
elif isinstance(remote, (str, bytes, PathLike)):
|
| 919 |
+
connectable = UNIXConnectable(remote)
|
| 920 |
+
else:
|
| 921 |
+
raise TypeError(f"cannot convert {remote!r} to a connectable")
|
| 922 |
+
|
| 923 |
+
if tls:
|
| 924 |
+
if not tls_hostname and isinstance(connectable, TCPConnectable):
|
| 925 |
+
tls_hostname = str(connectable.host)
|
| 926 |
+
|
| 927 |
+
connectable = TLSConnectable(
|
| 928 |
+
connectable,
|
| 929 |
+
ssl_context=ssl_context,
|
| 930 |
+
hostname=tls_hostname,
|
| 931 |
+
standard_compatible=tls_standard_compatible,
|
| 932 |
+
)
|
| 933 |
+
|
| 934 |
+
return connectable
|
venv/lib/python3.10/site-packages/anyio/_core/_streams.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
from typing import TypeVar
|
| 5 |
+
from warnings import warn
|
| 6 |
+
|
| 7 |
+
from ..streams.memory import (
|
| 8 |
+
MemoryObjectReceiveStream,
|
| 9 |
+
MemoryObjectSendStream,
|
| 10 |
+
MemoryObjectStreamState,
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
T_Item = TypeVar("T_Item")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class create_memory_object_stream(
|
| 17 |
+
tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]],
|
| 18 |
+
):
|
| 19 |
+
"""
|
| 20 |
+
Create a memory object stream.
|
| 21 |
+
|
| 22 |
+
The stream's item type can be annotated like
|
| 23 |
+
:func:`create_memory_object_stream[T_Item]`.
|
| 24 |
+
|
| 25 |
+
:param max_buffer_size: number of items held in the buffer until ``send()`` starts
|
| 26 |
+
blocking
|
| 27 |
+
:param item_type: old way of marking the streams with the right generic type for
|
| 28 |
+
static typing (does nothing on AnyIO 4)
|
| 29 |
+
|
| 30 |
+
.. deprecated:: 4.0
|
| 31 |
+
Use ``create_memory_object_stream[YourItemType](...)`` instead.
|
| 32 |
+
:return: a tuple of (send stream, receive stream)
|
| 33 |
+
|
| 34 |
+
"""
|
| 35 |
+
|
| 36 |
+
def __new__( # type: ignore[misc]
|
| 37 |
+
cls, max_buffer_size: float = 0, item_type: object = None
|
| 38 |
+
) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]:
|
| 39 |
+
if max_buffer_size != math.inf and not isinstance(max_buffer_size, int):
|
| 40 |
+
raise ValueError("max_buffer_size must be either an integer or math.inf")
|
| 41 |
+
if max_buffer_size < 0:
|
| 42 |
+
raise ValueError("max_buffer_size cannot be negative")
|
| 43 |
+
if item_type is not None:
|
| 44 |
+
warn(
|
| 45 |
+
"The item_type argument has been deprecated in AnyIO 4.0. "
|
| 46 |
+
"Use create_memory_object_stream[YourItemType](...) instead.",
|
| 47 |
+
DeprecationWarning,
|
| 48 |
+
stacklevel=2,
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
state = MemoryObjectStreamState[T_Item](max_buffer_size)
|
| 52 |
+
return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state))
|
venv/lib/python3.10/site-packages/anyio/_core/_subprocesses.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import sys
|
| 4 |
+
from collections.abc import AsyncIterable, Iterable, Mapping, Sequence
|
| 5 |
+
from io import BytesIO
|
| 6 |
+
from os import PathLike
|
| 7 |
+
from subprocess import PIPE, CalledProcessError, CompletedProcess
|
| 8 |
+
from typing import IO, Any, Union, cast
|
| 9 |
+
|
| 10 |
+
from ..abc import Process
|
| 11 |
+
from ._eventloop import get_async_backend
|
| 12 |
+
from ._tasks import create_task_group
|
| 13 |
+
|
| 14 |
+
if sys.version_info >= (3, 10):
|
| 15 |
+
from typing import TypeAlias
|
| 16 |
+
else:
|
| 17 |
+
from typing_extensions import TypeAlias
|
| 18 |
+
|
| 19 |
+
StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
async def run_process(
|
| 23 |
+
command: StrOrBytesPath | Sequence[StrOrBytesPath],
|
| 24 |
+
*,
|
| 25 |
+
input: bytes | None = None,
|
| 26 |
+
stdin: int | IO[Any] | None = None,
|
| 27 |
+
stdout: int | IO[Any] | None = PIPE,
|
| 28 |
+
stderr: int | IO[Any] | None = PIPE,
|
| 29 |
+
check: bool = True,
|
| 30 |
+
cwd: StrOrBytesPath | None = None,
|
| 31 |
+
env: Mapping[str, str] | None = None,
|
| 32 |
+
startupinfo: Any = None,
|
| 33 |
+
creationflags: int = 0,
|
| 34 |
+
start_new_session: bool = False,
|
| 35 |
+
pass_fds: Sequence[int] = (),
|
| 36 |
+
user: str | int | None = None,
|
| 37 |
+
group: str | int | None = None,
|
| 38 |
+
extra_groups: Iterable[str | int] | None = None,
|
| 39 |
+
umask: int = -1,
|
| 40 |
+
) -> CompletedProcess[bytes]:
|
| 41 |
+
"""
|
| 42 |
+
Run an external command in a subprocess and wait until it completes.
|
| 43 |
+
|
| 44 |
+
.. seealso:: :func:`subprocess.run`
|
| 45 |
+
|
| 46 |
+
:param command: either a string to pass to the shell, or an iterable of strings
|
| 47 |
+
containing the executable name or path and its arguments
|
| 48 |
+
:param input: bytes passed to the standard input of the subprocess
|
| 49 |
+
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
| 50 |
+
a file-like object, or `None`; ``input`` overrides this
|
| 51 |
+
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
| 52 |
+
a file-like object, or `None`
|
| 53 |
+
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
| 54 |
+
:data:`subprocess.STDOUT`, a file-like object, or `None`
|
| 55 |
+
:param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the
|
| 56 |
+
process terminates with a return code other than 0
|
| 57 |
+
:param cwd: If not ``None``, change the working directory to this before running the
|
| 58 |
+
command
|
| 59 |
+
:param env: if not ``None``, this mapping replaces the inherited environment
|
| 60 |
+
variables from the parent process
|
| 61 |
+
:param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
|
| 62 |
+
to specify process startup parameters (Windows only)
|
| 63 |
+
:param creationflags: flags that can be used to control the creation of the
|
| 64 |
+
subprocess (see :class:`subprocess.Popen` for the specifics)
|
| 65 |
+
:param start_new_session: if ``true`` the setsid() system call will be made in the
|
| 66 |
+
child process prior to the execution of the subprocess. (POSIX only)
|
| 67 |
+
:param pass_fds: sequence of file descriptors to keep open between the parent and
|
| 68 |
+
child processes. (POSIX only)
|
| 69 |
+
:param user: effective user to run the process as (Python >= 3.9, POSIX only)
|
| 70 |
+
:param group: effective group to run the process as (Python >= 3.9, POSIX only)
|
| 71 |
+
:param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9,
|
| 72 |
+
POSIX only)
|
| 73 |
+
:param umask: if not negative, this umask is applied in the child process before
|
| 74 |
+
running the given command (Python >= 3.9, POSIX only)
|
| 75 |
+
:return: an object representing the completed process
|
| 76 |
+
:raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process
|
| 77 |
+
exits with a nonzero return code
|
| 78 |
+
|
| 79 |
+
"""
|
| 80 |
+
|
| 81 |
+
async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None:
|
| 82 |
+
buffer = BytesIO()
|
| 83 |
+
async for chunk in stream:
|
| 84 |
+
buffer.write(chunk)
|
| 85 |
+
|
| 86 |
+
stream_contents[index] = buffer.getvalue()
|
| 87 |
+
|
| 88 |
+
if stdin is not None and input is not None:
|
| 89 |
+
raise ValueError("only one of stdin and input is allowed")
|
| 90 |
+
|
| 91 |
+
async with await open_process(
|
| 92 |
+
command,
|
| 93 |
+
stdin=PIPE if input else stdin,
|
| 94 |
+
stdout=stdout,
|
| 95 |
+
stderr=stderr,
|
| 96 |
+
cwd=cwd,
|
| 97 |
+
env=env,
|
| 98 |
+
startupinfo=startupinfo,
|
| 99 |
+
creationflags=creationflags,
|
| 100 |
+
start_new_session=start_new_session,
|
| 101 |
+
pass_fds=pass_fds,
|
| 102 |
+
user=user,
|
| 103 |
+
group=group,
|
| 104 |
+
extra_groups=extra_groups,
|
| 105 |
+
umask=umask,
|
| 106 |
+
) as process:
|
| 107 |
+
stream_contents: list[bytes | None] = [None, None]
|
| 108 |
+
async with create_task_group() as tg:
|
| 109 |
+
if process.stdout:
|
| 110 |
+
tg.start_soon(drain_stream, process.stdout, 0)
|
| 111 |
+
|
| 112 |
+
if process.stderr:
|
| 113 |
+
tg.start_soon(drain_stream, process.stderr, 1)
|
| 114 |
+
|
| 115 |
+
if process.stdin and input:
|
| 116 |
+
await process.stdin.send(input)
|
| 117 |
+
await process.stdin.aclose()
|
| 118 |
+
|
| 119 |
+
await process.wait()
|
| 120 |
+
|
| 121 |
+
output, errors = stream_contents
|
| 122 |
+
if check and process.returncode != 0:
|
| 123 |
+
raise CalledProcessError(cast(int, process.returncode), command, output, errors)
|
| 124 |
+
|
| 125 |
+
return CompletedProcess(command, cast(int, process.returncode), output, errors)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
async def open_process(
|
| 129 |
+
command: StrOrBytesPath | Sequence[StrOrBytesPath],
|
| 130 |
+
*,
|
| 131 |
+
stdin: int | IO[Any] | None = PIPE,
|
| 132 |
+
stdout: int | IO[Any] | None = PIPE,
|
| 133 |
+
stderr: int | IO[Any] | None = PIPE,
|
| 134 |
+
cwd: StrOrBytesPath | None = None,
|
| 135 |
+
env: Mapping[str, str] | None = None,
|
| 136 |
+
startupinfo: Any = None,
|
| 137 |
+
creationflags: int = 0,
|
| 138 |
+
start_new_session: bool = False,
|
| 139 |
+
pass_fds: Sequence[int] = (),
|
| 140 |
+
user: str | int | None = None,
|
| 141 |
+
group: str | int | None = None,
|
| 142 |
+
extra_groups: Iterable[str | int] | None = None,
|
| 143 |
+
umask: int = -1,
|
| 144 |
+
) -> Process:
|
| 145 |
+
"""
|
| 146 |
+
Start an external command in a subprocess.
|
| 147 |
+
|
| 148 |
+
.. seealso:: :class:`subprocess.Popen`
|
| 149 |
+
|
| 150 |
+
:param command: either a string to pass to the shell, or an iterable of strings
|
| 151 |
+
containing the executable name or path and its arguments
|
| 152 |
+
:param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a
|
| 153 |
+
file-like object, or ``None``
|
| 154 |
+
:param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
| 155 |
+
a file-like object, or ``None``
|
| 156 |
+
:param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`,
|
| 157 |
+
:data:`subprocess.STDOUT`, a file-like object, or ``None``
|
| 158 |
+
:param cwd: If not ``None``, the working directory is changed before executing
|
| 159 |
+
:param env: If env is not ``None``, it must be a mapping that defines the
|
| 160 |
+
environment variables for the new process
|
| 161 |
+
:param creationflags: flags that can be used to control the creation of the
|
| 162 |
+
subprocess (see :class:`subprocess.Popen` for the specifics)
|
| 163 |
+
:param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used
|
| 164 |
+
to specify process startup parameters (Windows only)
|
| 165 |
+
:param start_new_session: if ``true`` the setsid() system call will be made in the
|
| 166 |
+
child process prior to the execution of the subprocess. (POSIX only)
|
| 167 |
+
:param pass_fds: sequence of file descriptors to keep open between the parent and
|
| 168 |
+
child processes. (POSIX only)
|
| 169 |
+
:param user: effective user to run the process as (POSIX only)
|
| 170 |
+
:param group: effective group to run the process as (POSIX only)
|
| 171 |
+
:param extra_groups: supplementary groups to set in the subprocess (POSIX only)
|
| 172 |
+
:param umask: if not negative, this umask is applied in the child process before
|
| 173 |
+
running the given command (POSIX only)
|
| 174 |
+
:return: an asynchronous process object
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
kwargs: dict[str, Any] = {}
|
| 178 |
+
if user is not None:
|
| 179 |
+
kwargs["user"] = user
|
| 180 |
+
|
| 181 |
+
if group is not None:
|
| 182 |
+
kwargs["group"] = group
|
| 183 |
+
|
| 184 |
+
if extra_groups is not None:
|
| 185 |
+
kwargs["extra_groups"] = group
|
| 186 |
+
|
| 187 |
+
if umask >= 0:
|
| 188 |
+
kwargs["umask"] = umask
|
| 189 |
+
|
| 190 |
+
return await get_async_backend().open_process(
|
| 191 |
+
command,
|
| 192 |
+
stdin=stdin,
|
| 193 |
+
stdout=stdout,
|
| 194 |
+
stderr=stderr,
|
| 195 |
+
cwd=cwd,
|
| 196 |
+
env=env,
|
| 197 |
+
startupinfo=startupinfo,
|
| 198 |
+
creationflags=creationflags,
|
| 199 |
+
start_new_session=start_new_session,
|
| 200 |
+
pass_fds=pass_fds,
|
| 201 |
+
**kwargs,
|
| 202 |
+
)
|
venv/lib/python3.10/site-packages/anyio/_core/_synchronization.py
ADDED
|
@@ -0,0 +1,732 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
from collections import deque
|
| 5 |
+
from dataclasses import dataclass
|
| 6 |
+
from types import TracebackType
|
| 7 |
+
|
| 8 |
+
from sniffio import AsyncLibraryNotFoundError
|
| 9 |
+
|
| 10 |
+
from ..lowlevel import checkpoint
|
| 11 |
+
from ._eventloop import get_async_backend
|
| 12 |
+
from ._exceptions import BusyResourceError
|
| 13 |
+
from ._tasks import CancelScope
|
| 14 |
+
from ._testing import TaskInfo, get_current_task
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
@dataclass(frozen=True)
|
| 18 |
+
class EventStatistics:
|
| 19 |
+
"""
|
| 20 |
+
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait`
|
| 21 |
+
"""
|
| 22 |
+
|
| 23 |
+
tasks_waiting: int
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
@dataclass(frozen=True)
|
| 27 |
+
class CapacityLimiterStatistics:
|
| 28 |
+
"""
|
| 29 |
+
:ivar int borrowed_tokens: number of tokens currently borrowed by tasks
|
| 30 |
+
:ivar float total_tokens: total number of available tokens
|
| 31 |
+
:ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from
|
| 32 |
+
this limiter
|
| 33 |
+
:ivar int tasks_waiting: number of tasks waiting on
|
| 34 |
+
:meth:`~.CapacityLimiter.acquire` or
|
| 35 |
+
:meth:`~.CapacityLimiter.acquire_on_behalf_of`
|
| 36 |
+
"""
|
| 37 |
+
|
| 38 |
+
borrowed_tokens: int
|
| 39 |
+
total_tokens: float
|
| 40 |
+
borrowers: tuple[object, ...]
|
| 41 |
+
tasks_waiting: int
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@dataclass(frozen=True)
|
| 45 |
+
class LockStatistics:
|
| 46 |
+
"""
|
| 47 |
+
:ivar bool locked: flag indicating if this lock is locked or not
|
| 48 |
+
:ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the
|
| 49 |
+
lock is not held by any task)
|
| 50 |
+
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire`
|
| 51 |
+
"""
|
| 52 |
+
|
| 53 |
+
locked: bool
|
| 54 |
+
owner: TaskInfo | None
|
| 55 |
+
tasks_waiting: int
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@dataclass(frozen=True)
|
| 59 |
+
class ConditionStatistics:
|
| 60 |
+
"""
|
| 61 |
+
:ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait`
|
| 62 |
+
:ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying
|
| 63 |
+
:class:`~.Lock`
|
| 64 |
+
"""
|
| 65 |
+
|
| 66 |
+
tasks_waiting: int
|
| 67 |
+
lock_statistics: LockStatistics
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
@dataclass(frozen=True)
|
| 71 |
+
class SemaphoreStatistics:
|
| 72 |
+
"""
|
| 73 |
+
:ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire`
|
| 74 |
+
|
| 75 |
+
"""
|
| 76 |
+
|
| 77 |
+
tasks_waiting: int
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class Event:
|
| 81 |
+
def __new__(cls) -> Event:
|
| 82 |
+
try:
|
| 83 |
+
return get_async_backend().create_event()
|
| 84 |
+
except AsyncLibraryNotFoundError:
|
| 85 |
+
return EventAdapter()
|
| 86 |
+
|
| 87 |
+
def set(self) -> None:
|
| 88 |
+
"""Set the flag, notifying all listeners."""
|
| 89 |
+
raise NotImplementedError
|
| 90 |
+
|
| 91 |
+
def is_set(self) -> bool:
|
| 92 |
+
"""Return ``True`` if the flag is set, ``False`` if not."""
|
| 93 |
+
raise NotImplementedError
|
| 94 |
+
|
| 95 |
+
async def wait(self) -> None:
|
| 96 |
+
"""
|
| 97 |
+
Wait until the flag has been set.
|
| 98 |
+
|
| 99 |
+
If the flag has already been set when this method is called, it returns
|
| 100 |
+
immediately.
|
| 101 |
+
|
| 102 |
+
"""
|
| 103 |
+
raise NotImplementedError
|
| 104 |
+
|
| 105 |
+
def statistics(self) -> EventStatistics:
|
| 106 |
+
"""Return statistics about the current state of this event."""
|
| 107 |
+
raise NotImplementedError
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class EventAdapter(Event):
|
| 111 |
+
_internal_event: Event | None = None
|
| 112 |
+
_is_set: bool = False
|
| 113 |
+
|
| 114 |
+
def __new__(cls) -> EventAdapter:
|
| 115 |
+
return object.__new__(cls)
|
| 116 |
+
|
| 117 |
+
@property
|
| 118 |
+
def _event(self) -> Event:
|
| 119 |
+
if self._internal_event is None:
|
| 120 |
+
self._internal_event = get_async_backend().create_event()
|
| 121 |
+
if self._is_set:
|
| 122 |
+
self._internal_event.set()
|
| 123 |
+
|
| 124 |
+
return self._internal_event
|
| 125 |
+
|
| 126 |
+
def set(self) -> None:
|
| 127 |
+
if self._internal_event is None:
|
| 128 |
+
self._is_set = True
|
| 129 |
+
else:
|
| 130 |
+
self._event.set()
|
| 131 |
+
|
| 132 |
+
def is_set(self) -> bool:
|
| 133 |
+
if self._internal_event is None:
|
| 134 |
+
return self._is_set
|
| 135 |
+
|
| 136 |
+
return self._internal_event.is_set()
|
| 137 |
+
|
| 138 |
+
async def wait(self) -> None:
|
| 139 |
+
await self._event.wait()
|
| 140 |
+
|
| 141 |
+
def statistics(self) -> EventStatistics:
|
| 142 |
+
if self._internal_event is None:
|
| 143 |
+
return EventStatistics(tasks_waiting=0)
|
| 144 |
+
|
| 145 |
+
return self._internal_event.statistics()
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
class Lock:
|
| 149 |
+
def __new__(cls, *, fast_acquire: bool = False) -> Lock:
|
| 150 |
+
try:
|
| 151 |
+
return get_async_backend().create_lock(fast_acquire=fast_acquire)
|
| 152 |
+
except AsyncLibraryNotFoundError:
|
| 153 |
+
return LockAdapter(fast_acquire=fast_acquire)
|
| 154 |
+
|
| 155 |
+
async def __aenter__(self) -> None:
|
| 156 |
+
await self.acquire()
|
| 157 |
+
|
| 158 |
+
async def __aexit__(
|
| 159 |
+
self,
|
| 160 |
+
exc_type: type[BaseException] | None,
|
| 161 |
+
exc_val: BaseException | None,
|
| 162 |
+
exc_tb: TracebackType | None,
|
| 163 |
+
) -> None:
|
| 164 |
+
self.release()
|
| 165 |
+
|
| 166 |
+
async def acquire(self) -> None:
|
| 167 |
+
"""Acquire the lock."""
|
| 168 |
+
raise NotImplementedError
|
| 169 |
+
|
| 170 |
+
def acquire_nowait(self) -> None:
|
| 171 |
+
"""
|
| 172 |
+
Acquire the lock, without blocking.
|
| 173 |
+
|
| 174 |
+
:raises ~anyio.WouldBlock: if the operation would block
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
raise NotImplementedError
|
| 178 |
+
|
| 179 |
+
def release(self) -> None:
|
| 180 |
+
"""Release the lock."""
|
| 181 |
+
raise NotImplementedError
|
| 182 |
+
|
| 183 |
+
def locked(self) -> bool:
|
| 184 |
+
"""Return True if the lock is currently held."""
|
| 185 |
+
raise NotImplementedError
|
| 186 |
+
|
| 187 |
+
def statistics(self) -> LockStatistics:
|
| 188 |
+
"""
|
| 189 |
+
Return statistics about the current state of this lock.
|
| 190 |
+
|
| 191 |
+
.. versionadded:: 3.0
|
| 192 |
+
"""
|
| 193 |
+
raise NotImplementedError
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
class LockAdapter(Lock):
|
| 197 |
+
_internal_lock: Lock | None = None
|
| 198 |
+
|
| 199 |
+
def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter:
|
| 200 |
+
return object.__new__(cls)
|
| 201 |
+
|
| 202 |
+
def __init__(self, *, fast_acquire: bool = False):
|
| 203 |
+
self._fast_acquire = fast_acquire
|
| 204 |
+
|
| 205 |
+
@property
|
| 206 |
+
def _lock(self) -> Lock:
|
| 207 |
+
if self._internal_lock is None:
|
| 208 |
+
self._internal_lock = get_async_backend().create_lock(
|
| 209 |
+
fast_acquire=self._fast_acquire
|
| 210 |
+
)
|
| 211 |
+
|
| 212 |
+
return self._internal_lock
|
| 213 |
+
|
| 214 |
+
async def __aenter__(self) -> None:
|
| 215 |
+
await self._lock.acquire()
|
| 216 |
+
|
| 217 |
+
async def __aexit__(
|
| 218 |
+
self,
|
| 219 |
+
exc_type: type[BaseException] | None,
|
| 220 |
+
exc_val: BaseException | None,
|
| 221 |
+
exc_tb: TracebackType | None,
|
| 222 |
+
) -> None:
|
| 223 |
+
if self._internal_lock is not None:
|
| 224 |
+
self._internal_lock.release()
|
| 225 |
+
|
| 226 |
+
async def acquire(self) -> None:
|
| 227 |
+
"""Acquire the lock."""
|
| 228 |
+
await self._lock.acquire()
|
| 229 |
+
|
| 230 |
+
def acquire_nowait(self) -> None:
|
| 231 |
+
"""
|
| 232 |
+
Acquire the lock, without blocking.
|
| 233 |
+
|
| 234 |
+
:raises ~anyio.WouldBlock: if the operation would block
|
| 235 |
+
|
| 236 |
+
"""
|
| 237 |
+
self._lock.acquire_nowait()
|
| 238 |
+
|
| 239 |
+
def release(self) -> None:
|
| 240 |
+
"""Release the lock."""
|
| 241 |
+
self._lock.release()
|
| 242 |
+
|
| 243 |
+
def locked(self) -> bool:
|
| 244 |
+
"""Return True if the lock is currently held."""
|
| 245 |
+
return self._lock.locked()
|
| 246 |
+
|
| 247 |
+
def statistics(self) -> LockStatistics:
|
| 248 |
+
"""
|
| 249 |
+
Return statistics about the current state of this lock.
|
| 250 |
+
|
| 251 |
+
.. versionadded:: 3.0
|
| 252 |
+
|
| 253 |
+
"""
|
| 254 |
+
if self._internal_lock is None:
|
| 255 |
+
return LockStatistics(False, None, 0)
|
| 256 |
+
|
| 257 |
+
return self._internal_lock.statistics()
|
| 258 |
+
|
| 259 |
+
|
| 260 |
+
class Condition:
|
| 261 |
+
_owner_task: TaskInfo | None = None
|
| 262 |
+
|
| 263 |
+
def __init__(self, lock: Lock | None = None):
|
| 264 |
+
self._lock = lock or Lock()
|
| 265 |
+
self._waiters: deque[Event] = deque()
|
| 266 |
+
|
| 267 |
+
async def __aenter__(self) -> None:
|
| 268 |
+
await self.acquire()
|
| 269 |
+
|
| 270 |
+
async def __aexit__(
|
| 271 |
+
self,
|
| 272 |
+
exc_type: type[BaseException] | None,
|
| 273 |
+
exc_val: BaseException | None,
|
| 274 |
+
exc_tb: TracebackType | None,
|
| 275 |
+
) -> None:
|
| 276 |
+
self.release()
|
| 277 |
+
|
| 278 |
+
def _check_acquired(self) -> None:
|
| 279 |
+
if self._owner_task != get_current_task():
|
| 280 |
+
raise RuntimeError("The current task is not holding the underlying lock")
|
| 281 |
+
|
| 282 |
+
async def acquire(self) -> None:
|
| 283 |
+
"""Acquire the underlying lock."""
|
| 284 |
+
await self._lock.acquire()
|
| 285 |
+
self._owner_task = get_current_task()
|
| 286 |
+
|
| 287 |
+
def acquire_nowait(self) -> None:
|
| 288 |
+
"""
|
| 289 |
+
Acquire the underlying lock, without blocking.
|
| 290 |
+
|
| 291 |
+
:raises ~anyio.WouldBlock: if the operation would block
|
| 292 |
+
|
| 293 |
+
"""
|
| 294 |
+
self._lock.acquire_nowait()
|
| 295 |
+
self._owner_task = get_current_task()
|
| 296 |
+
|
| 297 |
+
def release(self) -> None:
|
| 298 |
+
"""Release the underlying lock."""
|
| 299 |
+
self._lock.release()
|
| 300 |
+
|
| 301 |
+
def locked(self) -> bool:
|
| 302 |
+
"""Return True if the lock is set."""
|
| 303 |
+
return self._lock.locked()
|
| 304 |
+
|
| 305 |
+
def notify(self, n: int = 1) -> None:
|
| 306 |
+
"""Notify exactly n listeners."""
|
| 307 |
+
self._check_acquired()
|
| 308 |
+
for _ in range(n):
|
| 309 |
+
try:
|
| 310 |
+
event = self._waiters.popleft()
|
| 311 |
+
except IndexError:
|
| 312 |
+
break
|
| 313 |
+
|
| 314 |
+
event.set()
|
| 315 |
+
|
| 316 |
+
def notify_all(self) -> None:
|
| 317 |
+
"""Notify all the listeners."""
|
| 318 |
+
self._check_acquired()
|
| 319 |
+
for event in self._waiters:
|
| 320 |
+
event.set()
|
| 321 |
+
|
| 322 |
+
self._waiters.clear()
|
| 323 |
+
|
| 324 |
+
async def wait(self) -> None:
|
| 325 |
+
"""Wait for a notification."""
|
| 326 |
+
await checkpoint()
|
| 327 |
+
event = Event()
|
| 328 |
+
self._waiters.append(event)
|
| 329 |
+
self.release()
|
| 330 |
+
try:
|
| 331 |
+
await event.wait()
|
| 332 |
+
except BaseException:
|
| 333 |
+
if not event.is_set():
|
| 334 |
+
self._waiters.remove(event)
|
| 335 |
+
|
| 336 |
+
raise
|
| 337 |
+
finally:
|
| 338 |
+
with CancelScope(shield=True):
|
| 339 |
+
await self.acquire()
|
| 340 |
+
|
| 341 |
+
def statistics(self) -> ConditionStatistics:
|
| 342 |
+
"""
|
| 343 |
+
Return statistics about the current state of this condition.
|
| 344 |
+
|
| 345 |
+
.. versionadded:: 3.0
|
| 346 |
+
"""
|
| 347 |
+
return ConditionStatistics(len(self._waiters), self._lock.statistics())
|
| 348 |
+
|
| 349 |
+
|
| 350 |
+
class Semaphore:
|
| 351 |
+
def __new__(
|
| 352 |
+
cls,
|
| 353 |
+
initial_value: int,
|
| 354 |
+
*,
|
| 355 |
+
max_value: int | None = None,
|
| 356 |
+
fast_acquire: bool = False,
|
| 357 |
+
) -> Semaphore:
|
| 358 |
+
try:
|
| 359 |
+
return get_async_backend().create_semaphore(
|
| 360 |
+
initial_value, max_value=max_value, fast_acquire=fast_acquire
|
| 361 |
+
)
|
| 362 |
+
except AsyncLibraryNotFoundError:
|
| 363 |
+
return SemaphoreAdapter(initial_value, max_value=max_value)
|
| 364 |
+
|
| 365 |
+
def __init__(
|
| 366 |
+
self,
|
| 367 |
+
initial_value: int,
|
| 368 |
+
*,
|
| 369 |
+
max_value: int | None = None,
|
| 370 |
+
fast_acquire: bool = False,
|
| 371 |
+
):
|
| 372 |
+
if not isinstance(initial_value, int):
|
| 373 |
+
raise TypeError("initial_value must be an integer")
|
| 374 |
+
if initial_value < 0:
|
| 375 |
+
raise ValueError("initial_value must be >= 0")
|
| 376 |
+
if max_value is not None:
|
| 377 |
+
if not isinstance(max_value, int):
|
| 378 |
+
raise TypeError("max_value must be an integer or None")
|
| 379 |
+
if max_value < initial_value:
|
| 380 |
+
raise ValueError(
|
| 381 |
+
"max_value must be equal to or higher than initial_value"
|
| 382 |
+
)
|
| 383 |
+
|
| 384 |
+
self._fast_acquire = fast_acquire
|
| 385 |
+
|
| 386 |
+
async def __aenter__(self) -> Semaphore:
|
| 387 |
+
await self.acquire()
|
| 388 |
+
return self
|
| 389 |
+
|
| 390 |
+
async def __aexit__(
|
| 391 |
+
self,
|
| 392 |
+
exc_type: type[BaseException] | None,
|
| 393 |
+
exc_val: BaseException | None,
|
| 394 |
+
exc_tb: TracebackType | None,
|
| 395 |
+
) -> None:
|
| 396 |
+
self.release()
|
| 397 |
+
|
| 398 |
+
async def acquire(self) -> None:
|
| 399 |
+
"""Decrement the semaphore value, blocking if necessary."""
|
| 400 |
+
raise NotImplementedError
|
| 401 |
+
|
| 402 |
+
def acquire_nowait(self) -> None:
|
| 403 |
+
"""
|
| 404 |
+
Acquire the underlying lock, without blocking.
|
| 405 |
+
|
| 406 |
+
:raises ~anyio.WouldBlock: if the operation would block
|
| 407 |
+
|
| 408 |
+
"""
|
| 409 |
+
raise NotImplementedError
|
| 410 |
+
|
| 411 |
+
def release(self) -> None:
|
| 412 |
+
"""Increment the semaphore value."""
|
| 413 |
+
raise NotImplementedError
|
| 414 |
+
|
| 415 |
+
@property
|
| 416 |
+
def value(self) -> int:
|
| 417 |
+
"""The current value of the semaphore."""
|
| 418 |
+
raise NotImplementedError
|
| 419 |
+
|
| 420 |
+
@property
|
| 421 |
+
def max_value(self) -> int | None:
|
| 422 |
+
"""The maximum value of the semaphore."""
|
| 423 |
+
raise NotImplementedError
|
| 424 |
+
|
| 425 |
+
def statistics(self) -> SemaphoreStatistics:
|
| 426 |
+
"""
|
| 427 |
+
Return statistics about the current state of this semaphore.
|
| 428 |
+
|
| 429 |
+
.. versionadded:: 3.0
|
| 430 |
+
"""
|
| 431 |
+
raise NotImplementedError
|
| 432 |
+
|
| 433 |
+
|
| 434 |
+
class SemaphoreAdapter(Semaphore):
|
| 435 |
+
_internal_semaphore: Semaphore | None = None
|
| 436 |
+
|
| 437 |
+
def __new__(
|
| 438 |
+
cls,
|
| 439 |
+
initial_value: int,
|
| 440 |
+
*,
|
| 441 |
+
max_value: int | None = None,
|
| 442 |
+
fast_acquire: bool = False,
|
| 443 |
+
) -> SemaphoreAdapter:
|
| 444 |
+
return object.__new__(cls)
|
| 445 |
+
|
| 446 |
+
def __init__(
|
| 447 |
+
self,
|
| 448 |
+
initial_value: int,
|
| 449 |
+
*,
|
| 450 |
+
max_value: int | None = None,
|
| 451 |
+
fast_acquire: bool = False,
|
| 452 |
+
) -> None:
|
| 453 |
+
super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire)
|
| 454 |
+
self._initial_value = initial_value
|
| 455 |
+
self._max_value = max_value
|
| 456 |
+
|
| 457 |
+
@property
|
| 458 |
+
def _semaphore(self) -> Semaphore:
|
| 459 |
+
if self._internal_semaphore is None:
|
| 460 |
+
self._internal_semaphore = get_async_backend().create_semaphore(
|
| 461 |
+
self._initial_value, max_value=self._max_value
|
| 462 |
+
)
|
| 463 |
+
|
| 464 |
+
return self._internal_semaphore
|
| 465 |
+
|
| 466 |
+
async def acquire(self) -> None:
|
| 467 |
+
await self._semaphore.acquire()
|
| 468 |
+
|
| 469 |
+
def acquire_nowait(self) -> None:
|
| 470 |
+
self._semaphore.acquire_nowait()
|
| 471 |
+
|
| 472 |
+
def release(self) -> None:
|
| 473 |
+
self._semaphore.release()
|
| 474 |
+
|
| 475 |
+
@property
|
| 476 |
+
def value(self) -> int:
|
| 477 |
+
if self._internal_semaphore is None:
|
| 478 |
+
return self._initial_value
|
| 479 |
+
|
| 480 |
+
return self._semaphore.value
|
| 481 |
+
|
| 482 |
+
@property
|
| 483 |
+
def max_value(self) -> int | None:
|
| 484 |
+
return self._max_value
|
| 485 |
+
|
| 486 |
+
def statistics(self) -> SemaphoreStatistics:
|
| 487 |
+
if self._internal_semaphore is None:
|
| 488 |
+
return SemaphoreStatistics(tasks_waiting=0)
|
| 489 |
+
|
| 490 |
+
return self._semaphore.statistics()
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
class CapacityLimiter:
|
| 494 |
+
def __new__(cls, total_tokens: float) -> CapacityLimiter:
|
| 495 |
+
try:
|
| 496 |
+
return get_async_backend().create_capacity_limiter(total_tokens)
|
| 497 |
+
except AsyncLibraryNotFoundError:
|
| 498 |
+
return CapacityLimiterAdapter(total_tokens)
|
| 499 |
+
|
| 500 |
+
async def __aenter__(self) -> None:
|
| 501 |
+
raise NotImplementedError
|
| 502 |
+
|
| 503 |
+
async def __aexit__(
|
| 504 |
+
self,
|
| 505 |
+
exc_type: type[BaseException] | None,
|
| 506 |
+
exc_val: BaseException | None,
|
| 507 |
+
exc_tb: TracebackType | None,
|
| 508 |
+
) -> None:
|
| 509 |
+
raise NotImplementedError
|
| 510 |
+
|
| 511 |
+
@property
|
| 512 |
+
def total_tokens(self) -> float:
|
| 513 |
+
"""
|
| 514 |
+
The total number of tokens available for borrowing.
|
| 515 |
+
|
| 516 |
+
This is a read-write property. If the total number of tokens is increased, the
|
| 517 |
+
proportionate number of tasks waiting on this limiter will be granted their
|
| 518 |
+
tokens.
|
| 519 |
+
|
| 520 |
+
.. versionchanged:: 3.0
|
| 521 |
+
The property is now writable.
|
| 522 |
+
|
| 523 |
+
"""
|
| 524 |
+
raise NotImplementedError
|
| 525 |
+
|
| 526 |
+
@total_tokens.setter
|
| 527 |
+
def total_tokens(self, value: float) -> None:
|
| 528 |
+
raise NotImplementedError
|
| 529 |
+
|
| 530 |
+
@property
|
| 531 |
+
def borrowed_tokens(self) -> int:
|
| 532 |
+
"""The number of tokens that have currently been borrowed."""
|
| 533 |
+
raise NotImplementedError
|
| 534 |
+
|
| 535 |
+
@property
|
| 536 |
+
def available_tokens(self) -> float:
|
| 537 |
+
"""The number of tokens currently available to be borrowed"""
|
| 538 |
+
raise NotImplementedError
|
| 539 |
+
|
| 540 |
+
def acquire_nowait(self) -> None:
|
| 541 |
+
"""
|
| 542 |
+
Acquire a token for the current task without waiting for one to become
|
| 543 |
+
available.
|
| 544 |
+
|
| 545 |
+
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
|
| 546 |
+
|
| 547 |
+
"""
|
| 548 |
+
raise NotImplementedError
|
| 549 |
+
|
| 550 |
+
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
|
| 551 |
+
"""
|
| 552 |
+
Acquire a token without waiting for one to become available.
|
| 553 |
+
|
| 554 |
+
:param borrower: the entity borrowing a token
|
| 555 |
+
:raises ~anyio.WouldBlock: if there are no tokens available for borrowing
|
| 556 |
+
|
| 557 |
+
"""
|
| 558 |
+
raise NotImplementedError
|
| 559 |
+
|
| 560 |
+
async def acquire(self) -> None:
|
| 561 |
+
"""
|
| 562 |
+
Acquire a token for the current task, waiting if necessary for one to become
|
| 563 |
+
available.
|
| 564 |
+
|
| 565 |
+
"""
|
| 566 |
+
raise NotImplementedError
|
| 567 |
+
|
| 568 |
+
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
| 569 |
+
"""
|
| 570 |
+
Acquire a token, waiting if necessary for one to become available.
|
| 571 |
+
|
| 572 |
+
:param borrower: the entity borrowing a token
|
| 573 |
+
|
| 574 |
+
"""
|
| 575 |
+
raise NotImplementedError
|
| 576 |
+
|
| 577 |
+
def release(self) -> None:
|
| 578 |
+
"""
|
| 579 |
+
Release the token held by the current task.
|
| 580 |
+
|
| 581 |
+
:raises RuntimeError: if the current task has not borrowed a token from this
|
| 582 |
+
limiter.
|
| 583 |
+
|
| 584 |
+
"""
|
| 585 |
+
raise NotImplementedError
|
| 586 |
+
|
| 587 |
+
def release_on_behalf_of(self, borrower: object) -> None:
|
| 588 |
+
"""
|
| 589 |
+
Release the token held by the given borrower.
|
| 590 |
+
|
| 591 |
+
:raises RuntimeError: if the borrower has not borrowed a token from this
|
| 592 |
+
limiter.
|
| 593 |
+
|
| 594 |
+
"""
|
| 595 |
+
raise NotImplementedError
|
| 596 |
+
|
| 597 |
+
def statistics(self) -> CapacityLimiterStatistics:
|
| 598 |
+
"""
|
| 599 |
+
Return statistics about the current state of this limiter.
|
| 600 |
+
|
| 601 |
+
.. versionadded:: 3.0
|
| 602 |
+
|
| 603 |
+
"""
|
| 604 |
+
raise NotImplementedError
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
class CapacityLimiterAdapter(CapacityLimiter):
|
| 608 |
+
_internal_limiter: CapacityLimiter | None = None
|
| 609 |
+
|
| 610 |
+
def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter:
|
| 611 |
+
return object.__new__(cls)
|
| 612 |
+
|
| 613 |
+
def __init__(self, total_tokens: float) -> None:
|
| 614 |
+
self.total_tokens = total_tokens
|
| 615 |
+
|
| 616 |
+
@property
|
| 617 |
+
def _limiter(self) -> CapacityLimiter:
|
| 618 |
+
if self._internal_limiter is None:
|
| 619 |
+
self._internal_limiter = get_async_backend().create_capacity_limiter(
|
| 620 |
+
self._total_tokens
|
| 621 |
+
)
|
| 622 |
+
|
| 623 |
+
return self._internal_limiter
|
| 624 |
+
|
| 625 |
+
async def __aenter__(self) -> None:
|
| 626 |
+
await self._limiter.__aenter__()
|
| 627 |
+
|
| 628 |
+
async def __aexit__(
|
| 629 |
+
self,
|
| 630 |
+
exc_type: type[BaseException] | None,
|
| 631 |
+
exc_val: BaseException | None,
|
| 632 |
+
exc_tb: TracebackType | None,
|
| 633 |
+
) -> None:
|
| 634 |
+
return await self._limiter.__aexit__(exc_type, exc_val, exc_tb)
|
| 635 |
+
|
| 636 |
+
@property
|
| 637 |
+
def total_tokens(self) -> float:
|
| 638 |
+
if self._internal_limiter is None:
|
| 639 |
+
return self._total_tokens
|
| 640 |
+
|
| 641 |
+
return self._internal_limiter.total_tokens
|
| 642 |
+
|
| 643 |
+
@total_tokens.setter
|
| 644 |
+
def total_tokens(self, value: float) -> None:
|
| 645 |
+
if not isinstance(value, int) and value is not math.inf:
|
| 646 |
+
raise TypeError("total_tokens must be an int or math.inf")
|
| 647 |
+
elif value < 1:
|
| 648 |
+
raise ValueError("total_tokens must be >= 1")
|
| 649 |
+
|
| 650 |
+
if self._internal_limiter is None:
|
| 651 |
+
self._total_tokens = value
|
| 652 |
+
return
|
| 653 |
+
|
| 654 |
+
self._limiter.total_tokens = value
|
| 655 |
+
|
| 656 |
+
@property
|
| 657 |
+
def borrowed_tokens(self) -> int:
|
| 658 |
+
if self._internal_limiter is None:
|
| 659 |
+
return 0
|
| 660 |
+
|
| 661 |
+
return self._internal_limiter.borrowed_tokens
|
| 662 |
+
|
| 663 |
+
@property
|
| 664 |
+
def available_tokens(self) -> float:
|
| 665 |
+
if self._internal_limiter is None:
|
| 666 |
+
return self._total_tokens
|
| 667 |
+
|
| 668 |
+
return self._internal_limiter.available_tokens
|
| 669 |
+
|
| 670 |
+
def acquire_nowait(self) -> None:
|
| 671 |
+
self._limiter.acquire_nowait()
|
| 672 |
+
|
| 673 |
+
def acquire_on_behalf_of_nowait(self, borrower: object) -> None:
|
| 674 |
+
self._limiter.acquire_on_behalf_of_nowait(borrower)
|
| 675 |
+
|
| 676 |
+
async def acquire(self) -> None:
|
| 677 |
+
await self._limiter.acquire()
|
| 678 |
+
|
| 679 |
+
async def acquire_on_behalf_of(self, borrower: object) -> None:
|
| 680 |
+
await self._limiter.acquire_on_behalf_of(borrower)
|
| 681 |
+
|
| 682 |
+
def release(self) -> None:
|
| 683 |
+
self._limiter.release()
|
| 684 |
+
|
| 685 |
+
def release_on_behalf_of(self, borrower: object) -> None:
|
| 686 |
+
self._limiter.release_on_behalf_of(borrower)
|
| 687 |
+
|
| 688 |
+
def statistics(self) -> CapacityLimiterStatistics:
|
| 689 |
+
if self._internal_limiter is None:
|
| 690 |
+
return CapacityLimiterStatistics(
|
| 691 |
+
borrowed_tokens=0,
|
| 692 |
+
total_tokens=self.total_tokens,
|
| 693 |
+
borrowers=(),
|
| 694 |
+
tasks_waiting=0,
|
| 695 |
+
)
|
| 696 |
+
|
| 697 |
+
return self._internal_limiter.statistics()
|
| 698 |
+
|
| 699 |
+
|
| 700 |
+
class ResourceGuard:
|
| 701 |
+
"""
|
| 702 |
+
A context manager for ensuring that a resource is only used by a single task at a
|
| 703 |
+
time.
|
| 704 |
+
|
| 705 |
+
Entering this context manager while the previous has not exited it yet will trigger
|
| 706 |
+
:exc:`BusyResourceError`.
|
| 707 |
+
|
| 708 |
+
:param action: the action to guard against (visible in the :exc:`BusyResourceError`
|
| 709 |
+
when triggered, e.g. "Another task is already {action} this resource")
|
| 710 |
+
|
| 711 |
+
.. versionadded:: 4.1
|
| 712 |
+
"""
|
| 713 |
+
|
| 714 |
+
__slots__ = "action", "_guarded"
|
| 715 |
+
|
| 716 |
+
def __init__(self, action: str = "using"):
|
| 717 |
+
self.action: str = action
|
| 718 |
+
self._guarded = False
|
| 719 |
+
|
| 720 |
+
def __enter__(self) -> None:
|
| 721 |
+
if self._guarded:
|
| 722 |
+
raise BusyResourceError(self.action)
|
| 723 |
+
|
| 724 |
+
self._guarded = True
|
| 725 |
+
|
| 726 |
+
def __exit__(
|
| 727 |
+
self,
|
| 728 |
+
exc_type: type[BaseException] | None,
|
| 729 |
+
exc_val: BaseException | None,
|
| 730 |
+
exc_tb: TracebackType | None,
|
| 731 |
+
) -> None:
|
| 732 |
+
self._guarded = False
|
venv/lib/python3.10/site-packages/anyio/_core/_tasks.py
ADDED
|
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import math
|
| 4 |
+
from collections.abc import Generator
|
| 5 |
+
from contextlib import contextmanager
|
| 6 |
+
from types import TracebackType
|
| 7 |
+
|
| 8 |
+
from ..abc._tasks import TaskGroup, TaskStatus
|
| 9 |
+
from ._eventloop import get_async_backend
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class _IgnoredTaskStatus(TaskStatus[object]):
|
| 13 |
+
def started(self, value: object = None) -> None:
|
| 14 |
+
pass
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
TASK_STATUS_IGNORED = _IgnoredTaskStatus()
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class CancelScope:
|
| 21 |
+
"""
|
| 22 |
+
Wraps a unit of work that can be made separately cancellable.
|
| 23 |
+
|
| 24 |
+
:param deadline: The time (clock value) when this scope is cancelled automatically
|
| 25 |
+
:param shield: ``True`` to shield the cancel scope from external cancellation
|
| 26 |
+
"""
|
| 27 |
+
|
| 28 |
+
def __new__(
|
| 29 |
+
cls, *, deadline: float = math.inf, shield: bool = False
|
| 30 |
+
) -> CancelScope:
|
| 31 |
+
return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline)
|
| 32 |
+
|
| 33 |
+
def cancel(self) -> None:
|
| 34 |
+
"""Cancel this scope immediately."""
|
| 35 |
+
raise NotImplementedError
|
| 36 |
+
|
| 37 |
+
@property
|
| 38 |
+
def deadline(self) -> float:
|
| 39 |
+
"""
|
| 40 |
+
The time (clock value) when this scope is cancelled automatically.
|
| 41 |
+
|
| 42 |
+
Will be ``float('inf')`` if no timeout has been set.
|
| 43 |
+
|
| 44 |
+
"""
|
| 45 |
+
raise NotImplementedError
|
| 46 |
+
|
| 47 |
+
@deadline.setter
|
| 48 |
+
def deadline(self, value: float) -> None:
|
| 49 |
+
raise NotImplementedError
|
| 50 |
+
|
| 51 |
+
@property
|
| 52 |
+
def cancel_called(self) -> bool:
|
| 53 |
+
"""``True`` if :meth:`cancel` has been called."""
|
| 54 |
+
raise NotImplementedError
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def cancelled_caught(self) -> bool:
|
| 58 |
+
"""
|
| 59 |
+
``True`` if this scope suppressed a cancellation exception it itself raised.
|
| 60 |
+
|
| 61 |
+
This is typically used to check if any work was interrupted, or to see if the
|
| 62 |
+
scope was cancelled due to its deadline being reached. The value will, however,
|
| 63 |
+
only be ``True`` if the cancellation was triggered by the scope itself (and not
|
| 64 |
+
an outer scope).
|
| 65 |
+
|
| 66 |
+
"""
|
| 67 |
+
raise NotImplementedError
|
| 68 |
+
|
| 69 |
+
@property
|
| 70 |
+
def shield(self) -> bool:
|
| 71 |
+
"""
|
| 72 |
+
``True`` if this scope is shielded from external cancellation.
|
| 73 |
+
|
| 74 |
+
While a scope is shielded, it will not receive cancellations from outside.
|
| 75 |
+
|
| 76 |
+
"""
|
| 77 |
+
raise NotImplementedError
|
| 78 |
+
|
| 79 |
+
@shield.setter
|
| 80 |
+
def shield(self, value: bool) -> None:
|
| 81 |
+
raise NotImplementedError
|
| 82 |
+
|
| 83 |
+
def __enter__(self) -> CancelScope:
|
| 84 |
+
raise NotImplementedError
|
| 85 |
+
|
| 86 |
+
def __exit__(
|
| 87 |
+
self,
|
| 88 |
+
exc_type: type[BaseException] | None,
|
| 89 |
+
exc_val: BaseException | None,
|
| 90 |
+
exc_tb: TracebackType | None,
|
| 91 |
+
) -> bool:
|
| 92 |
+
raise NotImplementedError
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
@contextmanager
|
| 96 |
+
def fail_after(
|
| 97 |
+
delay: float | None, shield: bool = False
|
| 98 |
+
) -> Generator[CancelScope, None, None]:
|
| 99 |
+
"""
|
| 100 |
+
Create a context manager which raises a :class:`TimeoutError` if does not finish in
|
| 101 |
+
time.
|
| 102 |
+
|
| 103 |
+
:param delay: maximum allowed time (in seconds) before raising the exception, or
|
| 104 |
+
``None`` to disable the timeout
|
| 105 |
+
:param shield: ``True`` to shield the cancel scope from external cancellation
|
| 106 |
+
:return: a context manager that yields a cancel scope
|
| 107 |
+
:rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\]
|
| 108 |
+
|
| 109 |
+
"""
|
| 110 |
+
current_time = get_async_backend().current_time
|
| 111 |
+
deadline = (current_time() + delay) if delay is not None else math.inf
|
| 112 |
+
with get_async_backend().create_cancel_scope(
|
| 113 |
+
deadline=deadline, shield=shield
|
| 114 |
+
) as cancel_scope:
|
| 115 |
+
yield cancel_scope
|
| 116 |
+
|
| 117 |
+
if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline:
|
| 118 |
+
raise TimeoutError
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
def move_on_after(delay: float | None, shield: bool = False) -> CancelScope:
|
| 122 |
+
"""
|
| 123 |
+
Create a cancel scope with a deadline that expires after the given delay.
|
| 124 |
+
|
| 125 |
+
:param delay: maximum allowed time (in seconds) before exiting the context block, or
|
| 126 |
+
``None`` to disable the timeout
|
| 127 |
+
:param shield: ``True`` to shield the cancel scope from external cancellation
|
| 128 |
+
:return: a cancel scope
|
| 129 |
+
|
| 130 |
+
"""
|
| 131 |
+
deadline = (
|
| 132 |
+
(get_async_backend().current_time() + delay) if delay is not None else math.inf
|
| 133 |
+
)
|
| 134 |
+
return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield)
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
def current_effective_deadline() -> float:
|
| 138 |
+
"""
|
| 139 |
+
Return the nearest deadline among all the cancel scopes effective for the current
|
| 140 |
+
task.
|
| 141 |
+
|
| 142 |
+
:return: a clock value from the event loop's internal clock (or ``float('inf')`` if
|
| 143 |
+
there is no deadline in effect, or ``float('-inf')`` if the current scope has
|
| 144 |
+
been cancelled)
|
| 145 |
+
:rtype: float
|
| 146 |
+
|
| 147 |
+
"""
|
| 148 |
+
return get_async_backend().current_effective_deadline()
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
def create_task_group() -> TaskGroup:
|
| 152 |
+
"""
|
| 153 |
+
Create a task group.
|
| 154 |
+
|
| 155 |
+
:return: a task group
|
| 156 |
+
|
| 157 |
+
"""
|
| 158 |
+
return get_async_backend().create_task_group()
|
venv/lib/python3.10/site-packages/anyio/_core/_tempfile.py
ADDED
|
@@ -0,0 +1,616 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
import tempfile
|
| 6 |
+
from collections.abc import Iterable
|
| 7 |
+
from io import BytesIO, TextIOWrapper
|
| 8 |
+
from types import TracebackType
|
| 9 |
+
from typing import (
|
| 10 |
+
TYPE_CHECKING,
|
| 11 |
+
Any,
|
| 12 |
+
AnyStr,
|
| 13 |
+
Generic,
|
| 14 |
+
overload,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
from .. import to_thread
|
| 18 |
+
from .._core._fileio import AsyncFile
|
| 19 |
+
from ..lowlevel import checkpoint_if_cancelled
|
| 20 |
+
|
| 21 |
+
if TYPE_CHECKING:
|
| 22 |
+
from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class TemporaryFile(Generic[AnyStr]):
|
| 26 |
+
"""
|
| 27 |
+
An asynchronous temporary file that is automatically created and cleaned up.
|
| 28 |
+
|
| 29 |
+
This class provides an asynchronous context manager interface to a temporary file.
|
| 30 |
+
The file is created using Python's standard `tempfile.TemporaryFile` function in a
|
| 31 |
+
background thread, and is wrapped as an asynchronous file using `AsyncFile`.
|
| 32 |
+
|
| 33 |
+
:param mode: The mode in which the file is opened. Defaults to "w+b".
|
| 34 |
+
:param buffering: The buffering policy (-1 means the default buffering).
|
| 35 |
+
:param encoding: The encoding used to decode or encode the file. Only applicable in
|
| 36 |
+
text mode.
|
| 37 |
+
:param newline: Controls how universal newlines mode works (only applicable in text
|
| 38 |
+
mode).
|
| 39 |
+
:param suffix: The suffix for the temporary file name.
|
| 40 |
+
:param prefix: The prefix for the temporary file name.
|
| 41 |
+
:param dir: The directory in which the temporary file is created.
|
| 42 |
+
:param errors: The error handling scheme used for encoding/decoding errors.
|
| 43 |
+
"""
|
| 44 |
+
|
| 45 |
+
_async_file: AsyncFile[AnyStr]
|
| 46 |
+
|
| 47 |
+
@overload
|
| 48 |
+
def __init__(
|
| 49 |
+
self: TemporaryFile[bytes],
|
| 50 |
+
mode: OpenBinaryMode = ...,
|
| 51 |
+
buffering: int = ...,
|
| 52 |
+
encoding: str | None = ...,
|
| 53 |
+
newline: str | None = ...,
|
| 54 |
+
suffix: str | None = ...,
|
| 55 |
+
prefix: str | None = ...,
|
| 56 |
+
dir: str | None = ...,
|
| 57 |
+
*,
|
| 58 |
+
errors: str | None = ...,
|
| 59 |
+
): ...
|
| 60 |
+
@overload
|
| 61 |
+
def __init__(
|
| 62 |
+
self: TemporaryFile[str],
|
| 63 |
+
mode: OpenTextMode,
|
| 64 |
+
buffering: int = ...,
|
| 65 |
+
encoding: str | None = ...,
|
| 66 |
+
newline: str | None = ...,
|
| 67 |
+
suffix: str | None = ...,
|
| 68 |
+
prefix: str | None = ...,
|
| 69 |
+
dir: str | None = ...,
|
| 70 |
+
*,
|
| 71 |
+
errors: str | None = ...,
|
| 72 |
+
): ...
|
| 73 |
+
|
| 74 |
+
def __init__(
|
| 75 |
+
self,
|
| 76 |
+
mode: OpenTextMode | OpenBinaryMode = "w+b",
|
| 77 |
+
buffering: int = -1,
|
| 78 |
+
encoding: str | None = None,
|
| 79 |
+
newline: str | None = None,
|
| 80 |
+
suffix: str | None = None,
|
| 81 |
+
prefix: str | None = None,
|
| 82 |
+
dir: str | None = None,
|
| 83 |
+
*,
|
| 84 |
+
errors: str | None = None,
|
| 85 |
+
) -> None:
|
| 86 |
+
self.mode = mode
|
| 87 |
+
self.buffering = buffering
|
| 88 |
+
self.encoding = encoding
|
| 89 |
+
self.newline = newline
|
| 90 |
+
self.suffix: str | None = suffix
|
| 91 |
+
self.prefix: str | None = prefix
|
| 92 |
+
self.dir: str | None = dir
|
| 93 |
+
self.errors = errors
|
| 94 |
+
|
| 95 |
+
async def __aenter__(self) -> AsyncFile[AnyStr]:
|
| 96 |
+
fp = await to_thread.run_sync(
|
| 97 |
+
lambda: tempfile.TemporaryFile(
|
| 98 |
+
self.mode,
|
| 99 |
+
self.buffering,
|
| 100 |
+
self.encoding,
|
| 101 |
+
self.newline,
|
| 102 |
+
self.suffix,
|
| 103 |
+
self.prefix,
|
| 104 |
+
self.dir,
|
| 105 |
+
errors=self.errors,
|
| 106 |
+
)
|
| 107 |
+
)
|
| 108 |
+
self._async_file = AsyncFile(fp)
|
| 109 |
+
return self._async_file
|
| 110 |
+
|
| 111 |
+
async def __aexit__(
|
| 112 |
+
self,
|
| 113 |
+
exc_type: type[BaseException] | None,
|
| 114 |
+
exc_value: BaseException | None,
|
| 115 |
+
traceback: TracebackType | None,
|
| 116 |
+
) -> None:
|
| 117 |
+
await self._async_file.aclose()
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class NamedTemporaryFile(Generic[AnyStr]):
|
| 121 |
+
"""
|
| 122 |
+
An asynchronous named temporary file that is automatically created and cleaned up.
|
| 123 |
+
|
| 124 |
+
This class provides an asynchronous context manager for a temporary file with a
|
| 125 |
+
visible name in the file system. It uses Python's standard
|
| 126 |
+
:func:`~tempfile.NamedTemporaryFile` function and wraps the file object with
|
| 127 |
+
:class:`AsyncFile` for asynchronous operations.
|
| 128 |
+
|
| 129 |
+
:param mode: The mode in which the file is opened. Defaults to "w+b".
|
| 130 |
+
:param buffering: The buffering policy (-1 means the default buffering).
|
| 131 |
+
:param encoding: The encoding used to decode or encode the file. Only applicable in
|
| 132 |
+
text mode.
|
| 133 |
+
:param newline: Controls how universal newlines mode works (only applicable in text
|
| 134 |
+
mode).
|
| 135 |
+
:param suffix: The suffix for the temporary file name.
|
| 136 |
+
:param prefix: The prefix for the temporary file name.
|
| 137 |
+
:param dir: The directory in which the temporary file is created.
|
| 138 |
+
:param delete: Whether to delete the file when it is closed.
|
| 139 |
+
:param errors: The error handling scheme used for encoding/decoding errors.
|
| 140 |
+
:param delete_on_close: (Python 3.12+) Whether to delete the file on close.
|
| 141 |
+
"""
|
| 142 |
+
|
| 143 |
+
_async_file: AsyncFile[AnyStr]
|
| 144 |
+
|
| 145 |
+
@overload
|
| 146 |
+
def __init__(
|
| 147 |
+
self: NamedTemporaryFile[bytes],
|
| 148 |
+
mode: OpenBinaryMode = ...,
|
| 149 |
+
buffering: int = ...,
|
| 150 |
+
encoding: str | None = ...,
|
| 151 |
+
newline: str | None = ...,
|
| 152 |
+
suffix: str | None = ...,
|
| 153 |
+
prefix: str | None = ...,
|
| 154 |
+
dir: str | None = ...,
|
| 155 |
+
delete: bool = ...,
|
| 156 |
+
*,
|
| 157 |
+
errors: str | None = ...,
|
| 158 |
+
delete_on_close: bool = ...,
|
| 159 |
+
): ...
|
| 160 |
+
@overload
|
| 161 |
+
def __init__(
|
| 162 |
+
self: NamedTemporaryFile[str],
|
| 163 |
+
mode: OpenTextMode,
|
| 164 |
+
buffering: int = ...,
|
| 165 |
+
encoding: str | None = ...,
|
| 166 |
+
newline: str | None = ...,
|
| 167 |
+
suffix: str | None = ...,
|
| 168 |
+
prefix: str | None = ...,
|
| 169 |
+
dir: str | None = ...,
|
| 170 |
+
delete: bool = ...,
|
| 171 |
+
*,
|
| 172 |
+
errors: str | None = ...,
|
| 173 |
+
delete_on_close: bool = ...,
|
| 174 |
+
): ...
|
| 175 |
+
|
| 176 |
+
def __init__(
|
| 177 |
+
self,
|
| 178 |
+
mode: OpenBinaryMode | OpenTextMode = "w+b",
|
| 179 |
+
buffering: int = -1,
|
| 180 |
+
encoding: str | None = None,
|
| 181 |
+
newline: str | None = None,
|
| 182 |
+
suffix: str | None = None,
|
| 183 |
+
prefix: str | None = None,
|
| 184 |
+
dir: str | None = None,
|
| 185 |
+
delete: bool = True,
|
| 186 |
+
*,
|
| 187 |
+
errors: str | None = None,
|
| 188 |
+
delete_on_close: bool = True,
|
| 189 |
+
) -> None:
|
| 190 |
+
self._params: dict[str, Any] = {
|
| 191 |
+
"mode": mode,
|
| 192 |
+
"buffering": buffering,
|
| 193 |
+
"encoding": encoding,
|
| 194 |
+
"newline": newline,
|
| 195 |
+
"suffix": suffix,
|
| 196 |
+
"prefix": prefix,
|
| 197 |
+
"dir": dir,
|
| 198 |
+
"delete": delete,
|
| 199 |
+
"errors": errors,
|
| 200 |
+
}
|
| 201 |
+
if sys.version_info >= (3, 12):
|
| 202 |
+
self._params["delete_on_close"] = delete_on_close
|
| 203 |
+
|
| 204 |
+
async def __aenter__(self) -> AsyncFile[AnyStr]:
|
| 205 |
+
fp = await to_thread.run_sync(
|
| 206 |
+
lambda: tempfile.NamedTemporaryFile(**self._params)
|
| 207 |
+
)
|
| 208 |
+
self._async_file = AsyncFile(fp)
|
| 209 |
+
return self._async_file
|
| 210 |
+
|
| 211 |
+
async def __aexit__(
|
| 212 |
+
self,
|
| 213 |
+
exc_type: type[BaseException] | None,
|
| 214 |
+
exc_value: BaseException | None,
|
| 215 |
+
traceback: TracebackType | None,
|
| 216 |
+
) -> None:
|
| 217 |
+
await self._async_file.aclose()
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class SpooledTemporaryFile(AsyncFile[AnyStr]):
|
| 221 |
+
"""
|
| 222 |
+
An asynchronous spooled temporary file that starts in memory and is spooled to disk.
|
| 223 |
+
|
| 224 |
+
This class provides an asynchronous interface to a spooled temporary file, much like
|
| 225 |
+
Python's standard :class:`~tempfile.SpooledTemporaryFile`. It supports asynchronous
|
| 226 |
+
write operations and provides a method to force a rollover to disk.
|
| 227 |
+
|
| 228 |
+
:param max_size: Maximum size in bytes before the file is rolled over to disk.
|
| 229 |
+
:param mode: The mode in which the file is opened. Defaults to "w+b".
|
| 230 |
+
:param buffering: The buffering policy (-1 means the default buffering).
|
| 231 |
+
:param encoding: The encoding used to decode or encode the file (text mode only).
|
| 232 |
+
:param newline: Controls how universal newlines mode works (text mode only).
|
| 233 |
+
:param suffix: The suffix for the temporary file name.
|
| 234 |
+
:param prefix: The prefix for the temporary file name.
|
| 235 |
+
:param dir: The directory in which the temporary file is created.
|
| 236 |
+
:param errors: The error handling scheme used for encoding/decoding errors.
|
| 237 |
+
"""
|
| 238 |
+
|
| 239 |
+
_rolled: bool = False
|
| 240 |
+
|
| 241 |
+
@overload
|
| 242 |
+
def __init__(
|
| 243 |
+
self: SpooledTemporaryFile[bytes],
|
| 244 |
+
max_size: int = ...,
|
| 245 |
+
mode: OpenBinaryMode = ...,
|
| 246 |
+
buffering: int = ...,
|
| 247 |
+
encoding: str | None = ...,
|
| 248 |
+
newline: str | None = ...,
|
| 249 |
+
suffix: str | None = ...,
|
| 250 |
+
prefix: str | None = ...,
|
| 251 |
+
dir: str | None = ...,
|
| 252 |
+
*,
|
| 253 |
+
errors: str | None = ...,
|
| 254 |
+
): ...
|
| 255 |
+
@overload
|
| 256 |
+
def __init__(
|
| 257 |
+
self: SpooledTemporaryFile[str],
|
| 258 |
+
max_size: int = ...,
|
| 259 |
+
mode: OpenTextMode = ...,
|
| 260 |
+
buffering: int = ...,
|
| 261 |
+
encoding: str | None = ...,
|
| 262 |
+
newline: str | None = ...,
|
| 263 |
+
suffix: str | None = ...,
|
| 264 |
+
prefix: str | None = ...,
|
| 265 |
+
dir: str | None = ...,
|
| 266 |
+
*,
|
| 267 |
+
errors: str | None = ...,
|
| 268 |
+
): ...
|
| 269 |
+
|
| 270 |
+
def __init__(
|
| 271 |
+
self,
|
| 272 |
+
max_size: int = 0,
|
| 273 |
+
mode: OpenBinaryMode | OpenTextMode = "w+b",
|
| 274 |
+
buffering: int = -1,
|
| 275 |
+
encoding: str | None = None,
|
| 276 |
+
newline: str | None = None,
|
| 277 |
+
suffix: str | None = None,
|
| 278 |
+
prefix: str | None = None,
|
| 279 |
+
dir: str | None = None,
|
| 280 |
+
*,
|
| 281 |
+
errors: str | None = None,
|
| 282 |
+
) -> None:
|
| 283 |
+
self._tempfile_params: dict[str, Any] = {
|
| 284 |
+
"mode": mode,
|
| 285 |
+
"buffering": buffering,
|
| 286 |
+
"encoding": encoding,
|
| 287 |
+
"newline": newline,
|
| 288 |
+
"suffix": suffix,
|
| 289 |
+
"prefix": prefix,
|
| 290 |
+
"dir": dir,
|
| 291 |
+
"errors": errors,
|
| 292 |
+
}
|
| 293 |
+
self._max_size = max_size
|
| 294 |
+
if "b" in mode:
|
| 295 |
+
super().__init__(BytesIO()) # type: ignore[arg-type]
|
| 296 |
+
else:
|
| 297 |
+
super().__init__(
|
| 298 |
+
TextIOWrapper( # type: ignore[arg-type]
|
| 299 |
+
BytesIO(),
|
| 300 |
+
encoding=encoding,
|
| 301 |
+
errors=errors,
|
| 302 |
+
newline=newline,
|
| 303 |
+
write_through=True,
|
| 304 |
+
)
|
| 305 |
+
)
|
| 306 |
+
|
| 307 |
+
async def aclose(self) -> None:
|
| 308 |
+
if not self._rolled:
|
| 309 |
+
self._fp.close()
|
| 310 |
+
return
|
| 311 |
+
|
| 312 |
+
await super().aclose()
|
| 313 |
+
|
| 314 |
+
async def _check(self) -> None:
|
| 315 |
+
if self._rolled or self._fp.tell() <= self._max_size:
|
| 316 |
+
return
|
| 317 |
+
|
| 318 |
+
await self.rollover()
|
| 319 |
+
|
| 320 |
+
async def rollover(self) -> None:
|
| 321 |
+
if self._rolled:
|
| 322 |
+
return
|
| 323 |
+
|
| 324 |
+
self._rolled = True
|
| 325 |
+
buffer = self._fp
|
| 326 |
+
buffer.seek(0)
|
| 327 |
+
self._fp = await to_thread.run_sync(
|
| 328 |
+
lambda: tempfile.TemporaryFile(**self._tempfile_params)
|
| 329 |
+
)
|
| 330 |
+
await self.write(buffer.read())
|
| 331 |
+
buffer.close()
|
| 332 |
+
|
| 333 |
+
@property
|
| 334 |
+
def closed(self) -> bool:
|
| 335 |
+
return self._fp.closed
|
| 336 |
+
|
| 337 |
+
async def read(self, size: int = -1) -> AnyStr:
|
| 338 |
+
if not self._rolled:
|
| 339 |
+
await checkpoint_if_cancelled()
|
| 340 |
+
return self._fp.read(size)
|
| 341 |
+
|
| 342 |
+
return await super().read(size) # type: ignore[return-value]
|
| 343 |
+
|
| 344 |
+
async def read1(self: SpooledTemporaryFile[bytes], size: int = -1) -> bytes:
|
| 345 |
+
if not self._rolled:
|
| 346 |
+
await checkpoint_if_cancelled()
|
| 347 |
+
return self._fp.read1(size)
|
| 348 |
+
|
| 349 |
+
return await super().read1(size)
|
| 350 |
+
|
| 351 |
+
async def readline(self) -> AnyStr:
|
| 352 |
+
if not self._rolled:
|
| 353 |
+
await checkpoint_if_cancelled()
|
| 354 |
+
return self._fp.readline()
|
| 355 |
+
|
| 356 |
+
return await super().readline() # type: ignore[return-value]
|
| 357 |
+
|
| 358 |
+
async def readlines(self) -> list[AnyStr]:
|
| 359 |
+
if not self._rolled:
|
| 360 |
+
await checkpoint_if_cancelled()
|
| 361 |
+
return self._fp.readlines()
|
| 362 |
+
|
| 363 |
+
return await super().readlines() # type: ignore[return-value]
|
| 364 |
+
|
| 365 |
+
async def readinto(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int:
|
| 366 |
+
if not self._rolled:
|
| 367 |
+
await checkpoint_if_cancelled()
|
| 368 |
+
self._fp.readinto(b)
|
| 369 |
+
|
| 370 |
+
return await super().readinto(b)
|
| 371 |
+
|
| 372 |
+
async def readinto1(self: SpooledTemporaryFile[bytes], b: WriteableBuffer) -> int:
|
| 373 |
+
if not self._rolled:
|
| 374 |
+
await checkpoint_if_cancelled()
|
| 375 |
+
self._fp.readinto(b)
|
| 376 |
+
|
| 377 |
+
return await super().readinto1(b)
|
| 378 |
+
|
| 379 |
+
async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int:
|
| 380 |
+
if not self._rolled:
|
| 381 |
+
await checkpoint_if_cancelled()
|
| 382 |
+
return self._fp.seek(offset, whence)
|
| 383 |
+
|
| 384 |
+
return await super().seek(offset, whence)
|
| 385 |
+
|
| 386 |
+
async def tell(self) -> int:
|
| 387 |
+
if not self._rolled:
|
| 388 |
+
await checkpoint_if_cancelled()
|
| 389 |
+
return self._fp.tell()
|
| 390 |
+
|
| 391 |
+
return await super().tell()
|
| 392 |
+
|
| 393 |
+
async def truncate(self, size: int | None = None) -> int:
|
| 394 |
+
if not self._rolled:
|
| 395 |
+
await checkpoint_if_cancelled()
|
| 396 |
+
return self._fp.truncate(size)
|
| 397 |
+
|
| 398 |
+
return await super().truncate(size)
|
| 399 |
+
|
| 400 |
+
@overload
|
| 401 |
+
async def write(self: SpooledTemporaryFile[bytes], b: ReadableBuffer) -> int: ...
|
| 402 |
+
@overload
|
| 403 |
+
async def write(self: SpooledTemporaryFile[str], b: str) -> int: ...
|
| 404 |
+
|
| 405 |
+
async def write(self, b: ReadableBuffer | str) -> int:
|
| 406 |
+
"""
|
| 407 |
+
Asynchronously write data to the spooled temporary file.
|
| 408 |
+
|
| 409 |
+
If the file has not yet been rolled over, the data is written synchronously,
|
| 410 |
+
and a rollover is triggered if the size exceeds the maximum size.
|
| 411 |
+
|
| 412 |
+
:param s: The data to write.
|
| 413 |
+
:return: The number of bytes written.
|
| 414 |
+
:raises RuntimeError: If the underlying file is not initialized.
|
| 415 |
+
|
| 416 |
+
"""
|
| 417 |
+
if not self._rolled:
|
| 418 |
+
await checkpoint_if_cancelled()
|
| 419 |
+
result = self._fp.write(b)
|
| 420 |
+
await self._check()
|
| 421 |
+
return result
|
| 422 |
+
|
| 423 |
+
return await super().write(b) # type: ignore[misc]
|
| 424 |
+
|
| 425 |
+
@overload
|
| 426 |
+
async def writelines(
|
| 427 |
+
self: SpooledTemporaryFile[bytes], lines: Iterable[ReadableBuffer]
|
| 428 |
+
) -> None: ...
|
| 429 |
+
@overload
|
| 430 |
+
async def writelines(
|
| 431 |
+
self: SpooledTemporaryFile[str], lines: Iterable[str]
|
| 432 |
+
) -> None: ...
|
| 433 |
+
|
| 434 |
+
async def writelines(self, lines: Iterable[str] | Iterable[ReadableBuffer]) -> None:
|
| 435 |
+
"""
|
| 436 |
+
Asynchronously write a list of lines to the spooled temporary file.
|
| 437 |
+
|
| 438 |
+
If the file has not yet been rolled over, the lines are written synchronously,
|
| 439 |
+
and a rollover is triggered if the size exceeds the maximum size.
|
| 440 |
+
|
| 441 |
+
:param lines: An iterable of lines to write.
|
| 442 |
+
:raises RuntimeError: If the underlying file is not initialized.
|
| 443 |
+
|
| 444 |
+
"""
|
| 445 |
+
if not self._rolled:
|
| 446 |
+
await checkpoint_if_cancelled()
|
| 447 |
+
result = self._fp.writelines(lines)
|
| 448 |
+
await self._check()
|
| 449 |
+
return result
|
| 450 |
+
|
| 451 |
+
return await super().writelines(lines) # type: ignore[misc]
|
| 452 |
+
|
| 453 |
+
|
| 454 |
+
class TemporaryDirectory(Generic[AnyStr]):
|
| 455 |
+
"""
|
| 456 |
+
An asynchronous temporary directory that is created and cleaned up automatically.
|
| 457 |
+
|
| 458 |
+
This class provides an asynchronous context manager for creating a temporary
|
| 459 |
+
directory. It wraps Python's standard :class:`~tempfile.TemporaryDirectory` to
|
| 460 |
+
perform directory creation and cleanup operations in a background thread.
|
| 461 |
+
|
| 462 |
+
:param suffix: Suffix to be added to the temporary directory name.
|
| 463 |
+
:param prefix: Prefix to be added to the temporary directory name.
|
| 464 |
+
:param dir: The parent directory where the temporary directory is created.
|
| 465 |
+
:param ignore_cleanup_errors: Whether to ignore errors during cleanup
|
| 466 |
+
(Python 3.10+).
|
| 467 |
+
:param delete: Whether to delete the directory upon closing (Python 3.12+).
|
| 468 |
+
"""
|
| 469 |
+
|
| 470 |
+
def __init__(
|
| 471 |
+
self,
|
| 472 |
+
suffix: AnyStr | None = None,
|
| 473 |
+
prefix: AnyStr | None = None,
|
| 474 |
+
dir: AnyStr | None = None,
|
| 475 |
+
*,
|
| 476 |
+
ignore_cleanup_errors: bool = False,
|
| 477 |
+
delete: bool = True,
|
| 478 |
+
) -> None:
|
| 479 |
+
self.suffix: AnyStr | None = suffix
|
| 480 |
+
self.prefix: AnyStr | None = prefix
|
| 481 |
+
self.dir: AnyStr | None = dir
|
| 482 |
+
self.ignore_cleanup_errors = ignore_cleanup_errors
|
| 483 |
+
self.delete = delete
|
| 484 |
+
|
| 485 |
+
self._tempdir: tempfile.TemporaryDirectory | None = None
|
| 486 |
+
|
| 487 |
+
async def __aenter__(self) -> str:
|
| 488 |
+
params: dict[str, Any] = {
|
| 489 |
+
"suffix": self.suffix,
|
| 490 |
+
"prefix": self.prefix,
|
| 491 |
+
"dir": self.dir,
|
| 492 |
+
}
|
| 493 |
+
if sys.version_info >= (3, 10):
|
| 494 |
+
params["ignore_cleanup_errors"] = self.ignore_cleanup_errors
|
| 495 |
+
|
| 496 |
+
if sys.version_info >= (3, 12):
|
| 497 |
+
params["delete"] = self.delete
|
| 498 |
+
|
| 499 |
+
self._tempdir = await to_thread.run_sync(
|
| 500 |
+
lambda: tempfile.TemporaryDirectory(**params)
|
| 501 |
+
)
|
| 502 |
+
return await to_thread.run_sync(self._tempdir.__enter__)
|
| 503 |
+
|
| 504 |
+
async def __aexit__(
|
| 505 |
+
self,
|
| 506 |
+
exc_type: type[BaseException] | None,
|
| 507 |
+
exc_value: BaseException | None,
|
| 508 |
+
traceback: TracebackType | None,
|
| 509 |
+
) -> None:
|
| 510 |
+
if self._tempdir is not None:
|
| 511 |
+
await to_thread.run_sync(
|
| 512 |
+
self._tempdir.__exit__, exc_type, exc_value, traceback
|
| 513 |
+
)
|
| 514 |
+
|
| 515 |
+
async def cleanup(self) -> None:
|
| 516 |
+
if self._tempdir is not None:
|
| 517 |
+
await to_thread.run_sync(self._tempdir.cleanup)
|
| 518 |
+
|
| 519 |
+
|
| 520 |
+
@overload
|
| 521 |
+
async def mkstemp(
|
| 522 |
+
suffix: str | None = None,
|
| 523 |
+
prefix: str | None = None,
|
| 524 |
+
dir: str | None = None,
|
| 525 |
+
text: bool = False,
|
| 526 |
+
) -> tuple[int, str]: ...
|
| 527 |
+
|
| 528 |
+
|
| 529 |
+
@overload
|
| 530 |
+
async def mkstemp(
|
| 531 |
+
suffix: bytes | None = None,
|
| 532 |
+
prefix: bytes | None = None,
|
| 533 |
+
dir: bytes | None = None,
|
| 534 |
+
text: bool = False,
|
| 535 |
+
) -> tuple[int, bytes]: ...
|
| 536 |
+
|
| 537 |
+
|
| 538 |
+
async def mkstemp(
|
| 539 |
+
suffix: AnyStr | None = None,
|
| 540 |
+
prefix: AnyStr | None = None,
|
| 541 |
+
dir: AnyStr | None = None,
|
| 542 |
+
text: bool = False,
|
| 543 |
+
) -> tuple[int, str | bytes]:
|
| 544 |
+
"""
|
| 545 |
+
Asynchronously create a temporary file and return an OS-level handle and the file
|
| 546 |
+
name.
|
| 547 |
+
|
| 548 |
+
This function wraps `tempfile.mkstemp` and executes it in a background thread.
|
| 549 |
+
|
| 550 |
+
:param suffix: Suffix to be added to the file name.
|
| 551 |
+
:param prefix: Prefix to be added to the file name.
|
| 552 |
+
:param dir: Directory in which the temporary file is created.
|
| 553 |
+
:param text: Whether the file is opened in text mode.
|
| 554 |
+
:return: A tuple containing the file descriptor and the file name.
|
| 555 |
+
|
| 556 |
+
"""
|
| 557 |
+
return await to_thread.run_sync(tempfile.mkstemp, suffix, prefix, dir, text)
|
| 558 |
+
|
| 559 |
+
|
| 560 |
+
@overload
|
| 561 |
+
async def mkdtemp(
|
| 562 |
+
suffix: str | None = None,
|
| 563 |
+
prefix: str | None = None,
|
| 564 |
+
dir: str | None = None,
|
| 565 |
+
) -> str: ...
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
@overload
|
| 569 |
+
async def mkdtemp(
|
| 570 |
+
suffix: bytes | None = None,
|
| 571 |
+
prefix: bytes | None = None,
|
| 572 |
+
dir: bytes | None = None,
|
| 573 |
+
) -> bytes: ...
|
| 574 |
+
|
| 575 |
+
|
| 576 |
+
async def mkdtemp(
|
| 577 |
+
suffix: AnyStr | None = None,
|
| 578 |
+
prefix: AnyStr | None = None,
|
| 579 |
+
dir: AnyStr | None = None,
|
| 580 |
+
) -> str | bytes:
|
| 581 |
+
"""
|
| 582 |
+
Asynchronously create a temporary directory and return its path.
|
| 583 |
+
|
| 584 |
+
This function wraps `tempfile.mkdtemp` and executes it in a background thread.
|
| 585 |
+
|
| 586 |
+
:param suffix: Suffix to be added to the directory name.
|
| 587 |
+
:param prefix: Prefix to be added to the directory name.
|
| 588 |
+
:param dir: Parent directory where the temporary directory is created.
|
| 589 |
+
:return: The path of the created temporary directory.
|
| 590 |
+
|
| 591 |
+
"""
|
| 592 |
+
return await to_thread.run_sync(tempfile.mkdtemp, suffix, prefix, dir)
|
| 593 |
+
|
| 594 |
+
|
| 595 |
+
async def gettempdir() -> str:
|
| 596 |
+
"""
|
| 597 |
+
Asynchronously return the name of the directory used for temporary files.
|
| 598 |
+
|
| 599 |
+
This function wraps `tempfile.gettempdir` and executes it in a background thread.
|
| 600 |
+
|
| 601 |
+
:return: The path of the temporary directory as a string.
|
| 602 |
+
|
| 603 |
+
"""
|
| 604 |
+
return await to_thread.run_sync(tempfile.gettempdir)
|
| 605 |
+
|
| 606 |
+
|
| 607 |
+
async def gettempdirb() -> bytes:
|
| 608 |
+
"""
|
| 609 |
+
Asynchronously return the name of the directory used for temporary files in bytes.
|
| 610 |
+
|
| 611 |
+
This function wraps `tempfile.gettempdirb` and executes it in a background thread.
|
| 612 |
+
|
| 613 |
+
:return: The path of the temporary directory as bytes.
|
| 614 |
+
|
| 615 |
+
"""
|
| 616 |
+
return await to_thread.run_sync(tempfile.gettempdirb)
|
venv/lib/python3.10/site-packages/anyio/_core/_testing.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from collections.abc import Awaitable, Generator
|
| 4 |
+
from typing import Any, cast
|
| 5 |
+
|
| 6 |
+
from ._eventloop import get_async_backend
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TaskInfo:
|
| 10 |
+
"""
|
| 11 |
+
Represents an asynchronous task.
|
| 12 |
+
|
| 13 |
+
:ivar int id: the unique identifier of the task
|
| 14 |
+
:ivar parent_id: the identifier of the parent task, if any
|
| 15 |
+
:vartype parent_id: Optional[int]
|
| 16 |
+
:ivar str name: the description of the task (if any)
|
| 17 |
+
:ivar ~collections.abc.Coroutine coro: the coroutine object of the task
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
__slots__ = "_name", "id", "parent_id", "name", "coro"
|
| 21 |
+
|
| 22 |
+
def __init__(
|
| 23 |
+
self,
|
| 24 |
+
id: int,
|
| 25 |
+
parent_id: int | None,
|
| 26 |
+
name: str | None,
|
| 27 |
+
coro: Generator[Any, Any, Any] | Awaitable[Any],
|
| 28 |
+
):
|
| 29 |
+
func = get_current_task
|
| 30 |
+
self._name = f"{func.__module__}.{func.__qualname__}"
|
| 31 |
+
self.id: int = id
|
| 32 |
+
self.parent_id: int | None = parent_id
|
| 33 |
+
self.name: str | None = name
|
| 34 |
+
self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro
|
| 35 |
+
|
| 36 |
+
def __eq__(self, other: object) -> bool:
|
| 37 |
+
if isinstance(other, TaskInfo):
|
| 38 |
+
return self.id == other.id
|
| 39 |
+
|
| 40 |
+
return NotImplemented
|
| 41 |
+
|
| 42 |
+
def __hash__(self) -> int:
|
| 43 |
+
return hash(self.id)
|
| 44 |
+
|
| 45 |
+
def __repr__(self) -> str:
|
| 46 |
+
return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})"
|
| 47 |
+
|
| 48 |
+
def has_pending_cancellation(self) -> bool:
|
| 49 |
+
"""
|
| 50 |
+
Return ``True`` if the task has a cancellation pending, ``False`` otherwise.
|
| 51 |
+
|
| 52 |
+
"""
|
| 53 |
+
return False
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def get_current_task() -> TaskInfo:
|
| 57 |
+
"""
|
| 58 |
+
Return the current task.
|
| 59 |
+
|
| 60 |
+
:return: a representation of the current task
|
| 61 |
+
|
| 62 |
+
"""
|
| 63 |
+
return get_async_backend().get_current_task()
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
def get_running_tasks() -> list[TaskInfo]:
|
| 67 |
+
"""
|
| 68 |
+
Return a list of running tasks in the current event loop.
|
| 69 |
+
|
| 70 |
+
:return: a list of task info objects
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
return cast("list[TaskInfo]", get_async_backend().get_running_tasks())
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
async def wait_all_tasks_blocked() -> None:
|
| 77 |
+
"""Wait until all other tasks are waiting for something."""
|
| 78 |
+
await get_async_backend().wait_all_tasks_blocked()
|
venv/lib/python3.10/site-packages/anyio/_core/_typedattr.py
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from collections.abc import Callable, Mapping
|
| 4 |
+
from typing import Any, TypeVar, final, overload
|
| 5 |
+
|
| 6 |
+
from ._exceptions import TypedAttributeLookupError
|
| 7 |
+
|
| 8 |
+
T_Attr = TypeVar("T_Attr")
|
| 9 |
+
T_Default = TypeVar("T_Default")
|
| 10 |
+
undefined = object()
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def typed_attribute() -> Any:
|
| 14 |
+
"""Return a unique object, used to mark typed attributes."""
|
| 15 |
+
return object()
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class TypedAttributeSet:
|
| 19 |
+
"""
|
| 20 |
+
Superclass for typed attribute collections.
|
| 21 |
+
|
| 22 |
+
Checks that every public attribute of every subclass has a type annotation.
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
def __init_subclass__(cls) -> None:
|
| 26 |
+
annotations: dict[str, Any] = getattr(cls, "__annotations__", {})
|
| 27 |
+
for attrname in dir(cls):
|
| 28 |
+
if not attrname.startswith("_") and attrname not in annotations:
|
| 29 |
+
raise TypeError(
|
| 30 |
+
f"Attribute {attrname!r} is missing its type annotation"
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
super().__init_subclass__()
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class TypedAttributeProvider:
|
| 37 |
+
"""Base class for classes that wish to provide typed extra attributes."""
|
| 38 |
+
|
| 39 |
+
@property
|
| 40 |
+
def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]:
|
| 41 |
+
"""
|
| 42 |
+
A mapping of the extra attributes to callables that return the corresponding
|
| 43 |
+
values.
|
| 44 |
+
|
| 45 |
+
If the provider wraps another provider, the attributes from that wrapper should
|
| 46 |
+
also be included in the returned mapping (but the wrapper may override the
|
| 47 |
+
callables from the wrapped instance).
|
| 48 |
+
|
| 49 |
+
"""
|
| 50 |
+
return {}
|
| 51 |
+
|
| 52 |
+
@overload
|
| 53 |
+
def extra(self, attribute: T_Attr) -> T_Attr: ...
|
| 54 |
+
|
| 55 |
+
@overload
|
| 56 |
+
def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ...
|
| 57 |
+
|
| 58 |
+
@final
|
| 59 |
+
def extra(self, attribute: Any, default: object = undefined) -> object:
|
| 60 |
+
"""
|
| 61 |
+
extra(attribute, default=undefined)
|
| 62 |
+
|
| 63 |
+
Return the value of the given typed extra attribute.
|
| 64 |
+
|
| 65 |
+
:param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to
|
| 66 |
+
look for
|
| 67 |
+
:param default: the value that should be returned if no value is found for the
|
| 68 |
+
attribute
|
| 69 |
+
:raises ~anyio.TypedAttributeLookupError: if the search failed and no default
|
| 70 |
+
value was given
|
| 71 |
+
|
| 72 |
+
"""
|
| 73 |
+
try:
|
| 74 |
+
getter = self.extra_attributes[attribute]
|
| 75 |
+
except KeyError:
|
| 76 |
+
if default is undefined:
|
| 77 |
+
raise TypedAttributeLookupError("Attribute not found") from None
|
| 78 |
+
else:
|
| 79 |
+
return default
|
| 80 |
+
|
| 81 |
+
return getter()
|
venv/lib/python3.10/site-packages/anyio/abc/__init__.py
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from __future__ import annotations
|
| 2 |
+
|
| 3 |
+
from ._eventloop import AsyncBackend as AsyncBackend
|
| 4 |
+
from ._resources import AsyncResource as AsyncResource
|
| 5 |
+
from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket
|
| 6 |
+
from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket
|
| 7 |
+
from ._sockets import IPAddressType as IPAddressType
|
| 8 |
+
from ._sockets import IPSockAddrType as IPSockAddrType
|
| 9 |
+
from ._sockets import SocketAttribute as SocketAttribute
|
| 10 |
+
from ._sockets import SocketListener as SocketListener
|
| 11 |
+
from ._sockets import SocketStream as SocketStream
|
| 12 |
+
from ._sockets import UDPPacketType as UDPPacketType
|
| 13 |
+
from ._sockets import UDPSocket as UDPSocket
|
| 14 |
+
from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType
|
| 15 |
+
from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket
|
| 16 |
+
from ._sockets import UNIXSocketStream as UNIXSocketStream
|
| 17 |
+
from ._streams import AnyByteReceiveStream as AnyByteReceiveStream
|
| 18 |
+
from ._streams import AnyByteSendStream as AnyByteSendStream
|
| 19 |
+
from ._streams import AnyByteStream as AnyByteStream
|
| 20 |
+
from ._streams import AnyByteStreamConnectable as AnyByteStreamConnectable
|
| 21 |
+
from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream
|
| 22 |
+
from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream
|
| 23 |
+
from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream
|
| 24 |
+
from ._streams import ByteReceiveStream as ByteReceiveStream
|
| 25 |
+
from ._streams import ByteSendStream as ByteSendStream
|
| 26 |
+
from ._streams import ByteStream as ByteStream
|
| 27 |
+
from ._streams import ByteStreamConnectable as ByteStreamConnectable
|
| 28 |
+
from ._streams import Listener as Listener
|
| 29 |
+
from ._streams import ObjectReceiveStream as ObjectReceiveStream
|
| 30 |
+
from ._streams import ObjectSendStream as ObjectSendStream
|
| 31 |
+
from ._streams import ObjectStream as ObjectStream
|
| 32 |
+
from ._streams import ObjectStreamConnectable as ObjectStreamConnectable
|
| 33 |
+
from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream
|
| 34 |
+
from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream
|
| 35 |
+
from ._streams import UnreliableObjectStream as UnreliableObjectStream
|
| 36 |
+
from ._subprocesses import Process as Process
|
| 37 |
+
from ._tasks import TaskGroup as TaskGroup
|
| 38 |
+
from ._tasks import TaskStatus as TaskStatus
|
| 39 |
+
from ._testing import TestRunner as TestRunner
|
| 40 |
+
|
| 41 |
+
# Re-exported here, for backwards compatibility
|
| 42 |
+
# isort: off
|
| 43 |
+
from .._core._synchronization import (
|
| 44 |
+
CapacityLimiter as CapacityLimiter,
|
| 45 |
+
Condition as Condition,
|
| 46 |
+
Event as Event,
|
| 47 |
+
Lock as Lock,
|
| 48 |
+
Semaphore as Semaphore,
|
| 49 |
+
)
|
| 50 |
+
from .._core._tasks import CancelScope as CancelScope
|
| 51 |
+
from ..from_thread import BlockingPortal as BlockingPortal
|
| 52 |
+
|
| 53 |
+
# Re-export imports so they look like they live directly in this package
|
| 54 |
+
for __value in list(locals().values()):
|
| 55 |
+
if getattr(__value, "__module__", "").startswith("anyio.abc."):
|
| 56 |
+
__value.__module__ = __name__
|
| 57 |
+
|
| 58 |
+
del __value
|
venv/lib/python3.10/site-packages/anyio/abc/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (2.24 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_eventloop.cpython-310.pyc
ADDED
|
Binary file (13.9 kB). View file
|
|
|
venv/lib/python3.10/site-packages/anyio/abc/__pycache__/_resources.cpython-310.pyc
ADDED
|
Binary file (1.52 kB). View file
|
|
|