Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +6 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__init__.py +13 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/types.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/_staggered.py +202 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/impl.py +213 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/py.typed +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/types.py +12 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/utils.py +97 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/_find_header.pxd +2 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/client_exceptions.py +412 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/formdata.py +182 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/http_websocket.py +761 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/log.py +8 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/payload_streamer.py +78 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/py.typed +1 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/resolver.py +189 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/streams.py +687 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/typedefs.py +69 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/web.py +595 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/web_response.py +820 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/web_routedef.py +214 -0
- evalkit_tf437/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py +1299 -0
- evalkit_tf437/lib/python3.10/site-packages/pyasn1_modules/rfc5914.py +119 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/__init__.py +11 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/__init__.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/runners.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/taskgroups.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/tasks.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/timeouts.cpython-310.pyc +0 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/runners.py +214 -0
- evalkit_tf437/lib/python3.10/site-packages/taskgroup/timeouts.py +155 -0
- evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/__pycache__/_construct.cpython-310.pyc +0 -0
.gitattributes
CHANGED
|
@@ -1244,3 +1244,9 @@ evalkit_tf449/lib/python3.10/site-packages/scipy/optimize/_trlib/_trlib.cpython-
|
|
| 1244 |
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/linalg/_propack/_zpropack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1245 |
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/linalg/_propack/_spropack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1246 |
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/linalg/_propack/_dpropack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1244 |
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/linalg/_propack/_zpropack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1245 |
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/linalg/_propack/_spropack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1246 |
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/linalg/_propack/_dpropack.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1247 |
+
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/csgraph/_flow.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1248 |
+
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/csgraph/_matching.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1249 |
+
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/csgraph/_shortest_path.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1250 |
+
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/csgraph/_min_spanning_tree.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1251 |
+
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/csgraph/_traversal.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 1252 |
+
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/csgraph/_reordering.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__init__.py
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__version__ = "2.4.3"
|
| 2 |
+
|
| 3 |
+
from .impl import start_connection
|
| 4 |
+
from .types import AddrInfoType
|
| 5 |
+
from .utils import addr_to_addr_infos, pop_addr_infos_interleave, remove_addr_infos
|
| 6 |
+
|
| 7 |
+
__all__ = (
|
| 8 |
+
"start_connection",
|
| 9 |
+
"AddrInfoType",
|
| 10 |
+
"remove_addr_infos",
|
| 11 |
+
"pop_addr_infos_interleave",
|
| 12 |
+
"addr_to_addr_infos",
|
| 13 |
+
)
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (442 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/impl.cpython-310.pyc
ADDED
|
Binary file (5.43 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/__pycache__/types.cpython-310.pyc
ADDED
|
Binary file (391 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/_staggered.py
ADDED
|
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import contextlib
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Awaitable,
|
| 7 |
+
Callable,
|
| 8 |
+
Iterable,
|
| 9 |
+
List,
|
| 10 |
+
Optional,
|
| 11 |
+
Set,
|
| 12 |
+
Tuple,
|
| 13 |
+
TypeVar,
|
| 14 |
+
Union,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
_T = TypeVar("_T")
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _set_result(wait_next: "asyncio.Future[None]") -> None:
|
| 21 |
+
"""Set the result of a future if it is not already done."""
|
| 22 |
+
if not wait_next.done():
|
| 23 |
+
wait_next.set_result(None)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
async def _wait_one(
|
| 27 |
+
futures: "Iterable[asyncio.Future[Any]]",
|
| 28 |
+
loop: asyncio.AbstractEventLoop,
|
| 29 |
+
) -> _T:
|
| 30 |
+
"""Wait for the first future to complete."""
|
| 31 |
+
wait_next = loop.create_future()
|
| 32 |
+
|
| 33 |
+
def _on_completion(fut: "asyncio.Future[Any]") -> None:
|
| 34 |
+
if not wait_next.done():
|
| 35 |
+
wait_next.set_result(fut)
|
| 36 |
+
|
| 37 |
+
for f in futures:
|
| 38 |
+
f.add_done_callback(_on_completion)
|
| 39 |
+
|
| 40 |
+
try:
|
| 41 |
+
return await wait_next
|
| 42 |
+
finally:
|
| 43 |
+
for f in futures:
|
| 44 |
+
f.remove_done_callback(_on_completion)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
async def staggered_race(
|
| 48 |
+
coro_fns: Iterable[Callable[[], Awaitable[_T]]],
|
| 49 |
+
delay: Optional[float],
|
| 50 |
+
*,
|
| 51 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 52 |
+
) -> Tuple[Optional[_T], Optional[int], List[Optional[BaseException]]]:
|
| 53 |
+
"""
|
| 54 |
+
Run coroutines with staggered start times and take the first to finish.
|
| 55 |
+
|
| 56 |
+
This method takes an iterable of coroutine functions. The first one is
|
| 57 |
+
started immediately. From then on, whenever the immediately preceding one
|
| 58 |
+
fails (raises an exception), or when *delay* seconds has passed, the next
|
| 59 |
+
coroutine is started. This continues until one of the coroutines complete
|
| 60 |
+
successfully, in which case all others are cancelled, or until all
|
| 61 |
+
coroutines fail.
|
| 62 |
+
|
| 63 |
+
The coroutines provided should be well-behaved in the following way:
|
| 64 |
+
|
| 65 |
+
* They should only ``return`` if completed successfully.
|
| 66 |
+
|
| 67 |
+
* They should always raise an exception if they did not complete
|
| 68 |
+
successfully. In particular, if they handle cancellation, they should
|
| 69 |
+
probably reraise, like this::
|
| 70 |
+
|
| 71 |
+
try:
|
| 72 |
+
# do work
|
| 73 |
+
except asyncio.CancelledError:
|
| 74 |
+
# undo partially completed work
|
| 75 |
+
raise
|
| 76 |
+
|
| 77 |
+
Args:
|
| 78 |
+
----
|
| 79 |
+
coro_fns: an iterable of coroutine functions, i.e. callables that
|
| 80 |
+
return a coroutine object when called. Use ``functools.partial`` or
|
| 81 |
+
lambdas to pass arguments.
|
| 82 |
+
|
| 83 |
+
delay: amount of time, in seconds, between starting coroutines. If
|
| 84 |
+
``None``, the coroutines will run sequentially.
|
| 85 |
+
|
| 86 |
+
loop: the event loop to use. If ``None``, the running loop is used.
|
| 87 |
+
|
| 88 |
+
Returns:
|
| 89 |
+
-------
|
| 90 |
+
tuple *(winner_result, winner_index, exceptions)* where
|
| 91 |
+
|
| 92 |
+
- *winner_result*: the result of the winning coroutine, or ``None``
|
| 93 |
+
if no coroutines won.
|
| 94 |
+
|
| 95 |
+
- *winner_index*: the index of the winning coroutine in
|
| 96 |
+
``coro_fns``, or ``None`` if no coroutines won. If the winning
|
| 97 |
+
coroutine may return None on success, *winner_index* can be used
|
| 98 |
+
to definitively determine whether any coroutine won.
|
| 99 |
+
|
| 100 |
+
- *exceptions*: list of exceptions returned by the coroutines.
|
| 101 |
+
``len(exceptions)`` is equal to the number of coroutines actually
|
| 102 |
+
started, and the order is the same as in ``coro_fns``. The winning
|
| 103 |
+
coroutine's entry is ``None``.
|
| 104 |
+
|
| 105 |
+
"""
|
| 106 |
+
loop = loop or asyncio.get_running_loop()
|
| 107 |
+
exceptions: List[Optional[BaseException]] = []
|
| 108 |
+
tasks: Set[asyncio.Task[Optional[Tuple[_T, int]]]] = set()
|
| 109 |
+
|
| 110 |
+
async def run_one_coro(
|
| 111 |
+
coro_fn: Callable[[], Awaitable[_T]],
|
| 112 |
+
this_index: int,
|
| 113 |
+
start_next: "asyncio.Future[None]",
|
| 114 |
+
) -> Optional[Tuple[_T, int]]:
|
| 115 |
+
"""
|
| 116 |
+
Run a single coroutine.
|
| 117 |
+
|
| 118 |
+
If the coroutine fails, set the exception in the exceptions list and
|
| 119 |
+
start the next coroutine by setting the result of the start_next.
|
| 120 |
+
|
| 121 |
+
If the coroutine succeeds, return the result and the index of the
|
| 122 |
+
coroutine in the coro_fns list.
|
| 123 |
+
|
| 124 |
+
If SystemExit or KeyboardInterrupt is raised, re-raise it.
|
| 125 |
+
"""
|
| 126 |
+
try:
|
| 127 |
+
result = await coro_fn()
|
| 128 |
+
except (SystemExit, KeyboardInterrupt):
|
| 129 |
+
raise
|
| 130 |
+
except BaseException as e:
|
| 131 |
+
exceptions[this_index] = e
|
| 132 |
+
_set_result(start_next) # Kickstart the next coroutine
|
| 133 |
+
return None
|
| 134 |
+
|
| 135 |
+
return result, this_index
|
| 136 |
+
|
| 137 |
+
start_next_timer: Optional[asyncio.TimerHandle] = None
|
| 138 |
+
start_next: Optional[asyncio.Future[None]]
|
| 139 |
+
task: asyncio.Task[Optional[Tuple[_T, int]]]
|
| 140 |
+
done: Union[asyncio.Future[None], asyncio.Task[Optional[Tuple[_T, int]]]]
|
| 141 |
+
coro_iter = iter(coro_fns)
|
| 142 |
+
this_index = -1
|
| 143 |
+
try:
|
| 144 |
+
while True:
|
| 145 |
+
if coro_fn := next(coro_iter, None):
|
| 146 |
+
this_index += 1
|
| 147 |
+
exceptions.append(None)
|
| 148 |
+
start_next = loop.create_future()
|
| 149 |
+
task = loop.create_task(run_one_coro(coro_fn, this_index, start_next))
|
| 150 |
+
tasks.add(task)
|
| 151 |
+
start_next_timer = (
|
| 152 |
+
loop.call_later(delay, _set_result, start_next) if delay else None
|
| 153 |
+
)
|
| 154 |
+
elif not tasks:
|
| 155 |
+
# We exhausted the coro_fns list and no tasks are running
|
| 156 |
+
# so we have no winner and all coroutines failed.
|
| 157 |
+
break
|
| 158 |
+
|
| 159 |
+
while tasks:
|
| 160 |
+
done = await _wait_one(
|
| 161 |
+
[*tasks, start_next] if start_next else tasks, loop
|
| 162 |
+
)
|
| 163 |
+
if done is start_next:
|
| 164 |
+
# The current task has failed or the timer has expired
|
| 165 |
+
# so we need to start the next task.
|
| 166 |
+
start_next = None
|
| 167 |
+
if start_next_timer:
|
| 168 |
+
start_next_timer.cancel()
|
| 169 |
+
start_next_timer = None
|
| 170 |
+
|
| 171 |
+
# Break out of the task waiting loop to start the next
|
| 172 |
+
# task.
|
| 173 |
+
break
|
| 174 |
+
|
| 175 |
+
if TYPE_CHECKING:
|
| 176 |
+
assert isinstance(done, asyncio.Task)
|
| 177 |
+
|
| 178 |
+
tasks.remove(done)
|
| 179 |
+
if winner := done.result():
|
| 180 |
+
return *winner, exceptions
|
| 181 |
+
finally:
|
| 182 |
+
# We either have:
|
| 183 |
+
# - a winner
|
| 184 |
+
# - all tasks failed
|
| 185 |
+
# - a KeyboardInterrupt or SystemExit.
|
| 186 |
+
|
| 187 |
+
#
|
| 188 |
+
# If the timer is still running, cancel it.
|
| 189 |
+
#
|
| 190 |
+
if start_next_timer:
|
| 191 |
+
start_next_timer.cancel()
|
| 192 |
+
|
| 193 |
+
#
|
| 194 |
+
# If there are any tasks left, cancel them and than
|
| 195 |
+
# wait them so they fill the exceptions list.
|
| 196 |
+
#
|
| 197 |
+
for task in tasks:
|
| 198 |
+
task.cancel()
|
| 199 |
+
with contextlib.suppress(asyncio.CancelledError):
|
| 200 |
+
await task
|
| 201 |
+
|
| 202 |
+
return None, None, exceptions
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/impl.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Base implementation."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import collections
|
| 5 |
+
import functools
|
| 6 |
+
import itertools
|
| 7 |
+
import socket
|
| 8 |
+
import sys
|
| 9 |
+
from typing import List, Optional, Sequence, Union
|
| 10 |
+
|
| 11 |
+
from . import _staggered
|
| 12 |
+
from .types import AddrInfoType
|
| 13 |
+
|
| 14 |
+
if sys.version_info < (3, 8, 2): # noqa: UP036
|
| 15 |
+
# asyncio.staggered is broken in Python 3.8.0 and 3.8.1
|
| 16 |
+
# so it must be patched:
|
| 17 |
+
# https://github.com/aio-libs/aiohttp/issues/8556
|
| 18 |
+
# https://bugs.python.org/issue39129
|
| 19 |
+
# https://github.com/python/cpython/pull/17693
|
| 20 |
+
import asyncio.futures
|
| 21 |
+
|
| 22 |
+
asyncio.futures.TimeoutError = asyncio.TimeoutError # type: ignore[attr-defined]
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
async def start_connection(
|
| 26 |
+
addr_infos: Sequence[AddrInfoType],
|
| 27 |
+
*,
|
| 28 |
+
local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
|
| 29 |
+
happy_eyeballs_delay: Optional[float] = None,
|
| 30 |
+
interleave: Optional[int] = None,
|
| 31 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 32 |
+
) -> socket.socket:
|
| 33 |
+
"""
|
| 34 |
+
Connect to a TCP server.
|
| 35 |
+
|
| 36 |
+
Create a socket connection to a specified destination. The
|
| 37 |
+
destination is specified as a list of AddrInfoType tuples as
|
| 38 |
+
returned from getaddrinfo().
|
| 39 |
+
|
| 40 |
+
The arguments are, in order:
|
| 41 |
+
|
| 42 |
+
* ``family``: the address family, e.g. ``socket.AF_INET`` or
|
| 43 |
+
``socket.AF_INET6``.
|
| 44 |
+
* ``type``: the socket type, e.g. ``socket.SOCK_STREAM`` or
|
| 45 |
+
``socket.SOCK_DGRAM``.
|
| 46 |
+
* ``proto``: the protocol, e.g. ``socket.IPPROTO_TCP`` or
|
| 47 |
+
``socket.IPPROTO_UDP``.
|
| 48 |
+
* ``canonname``: the canonical name of the address, e.g.
|
| 49 |
+
``"www.python.org"``.
|
| 50 |
+
* ``sockaddr``: the socket address
|
| 51 |
+
|
| 52 |
+
This method is a coroutine which will try to establish the connection
|
| 53 |
+
in the background. When successful, the coroutine returns a
|
| 54 |
+
socket.
|
| 55 |
+
|
| 56 |
+
The expected use case is to use this method in conjunction with
|
| 57 |
+
loop.create_connection() to establish a connection to a server::
|
| 58 |
+
|
| 59 |
+
socket = await start_connection(addr_infos)
|
| 60 |
+
transport, protocol = await loop.create_connection(
|
| 61 |
+
MyProtocol, sock=socket, ...)
|
| 62 |
+
"""
|
| 63 |
+
if not (current_loop := loop):
|
| 64 |
+
current_loop = asyncio.get_running_loop()
|
| 65 |
+
|
| 66 |
+
single_addr_info = len(addr_infos) == 1
|
| 67 |
+
|
| 68 |
+
if happy_eyeballs_delay is not None and interleave is None:
|
| 69 |
+
# If using happy eyeballs, default to interleave addresses by family
|
| 70 |
+
interleave = 1
|
| 71 |
+
|
| 72 |
+
if interleave and not single_addr_info:
|
| 73 |
+
addr_infos = _interleave_addrinfos(addr_infos, interleave)
|
| 74 |
+
|
| 75 |
+
sock: Optional[socket.socket] = None
|
| 76 |
+
# uvloop can raise RuntimeError instead of OSError
|
| 77 |
+
exceptions: List[List[Union[OSError, RuntimeError]]] = []
|
| 78 |
+
if happy_eyeballs_delay is None or single_addr_info:
|
| 79 |
+
# not using happy eyeballs
|
| 80 |
+
for addrinfo in addr_infos:
|
| 81 |
+
try:
|
| 82 |
+
sock = await _connect_sock(
|
| 83 |
+
current_loop, exceptions, addrinfo, local_addr_infos
|
| 84 |
+
)
|
| 85 |
+
break
|
| 86 |
+
except (RuntimeError, OSError):
|
| 87 |
+
continue
|
| 88 |
+
else: # using happy eyeballs
|
| 89 |
+
sock, _, _ = await _staggered.staggered_race(
|
| 90 |
+
(
|
| 91 |
+
functools.partial(
|
| 92 |
+
_connect_sock, current_loop, exceptions, addrinfo, local_addr_infos
|
| 93 |
+
)
|
| 94 |
+
for addrinfo in addr_infos
|
| 95 |
+
),
|
| 96 |
+
happy_eyeballs_delay,
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
if sock is None:
|
| 100 |
+
all_exceptions = [exc for sub in exceptions for exc in sub]
|
| 101 |
+
try:
|
| 102 |
+
first_exception = all_exceptions[0]
|
| 103 |
+
if len(all_exceptions) == 1:
|
| 104 |
+
raise first_exception
|
| 105 |
+
else:
|
| 106 |
+
# If they all have the same str(), raise one.
|
| 107 |
+
model = str(first_exception)
|
| 108 |
+
if all(str(exc) == model for exc in all_exceptions):
|
| 109 |
+
raise first_exception
|
| 110 |
+
# Raise a combined exception so the user can see all
|
| 111 |
+
# the various error messages.
|
| 112 |
+
msg = "Multiple exceptions: {}".format(
|
| 113 |
+
", ".join(str(exc) for exc in all_exceptions)
|
| 114 |
+
)
|
| 115 |
+
# If the errno is the same for all exceptions, raise
|
| 116 |
+
# an OSError with that errno.
|
| 117 |
+
if isinstance(first_exception, OSError):
|
| 118 |
+
first_errno = first_exception.errno
|
| 119 |
+
if all(
|
| 120 |
+
isinstance(exc, OSError) and exc.errno == first_errno
|
| 121 |
+
for exc in all_exceptions
|
| 122 |
+
):
|
| 123 |
+
raise OSError(first_errno, msg)
|
| 124 |
+
elif isinstance(first_exception, RuntimeError) and all(
|
| 125 |
+
isinstance(exc, RuntimeError) for exc in all_exceptions
|
| 126 |
+
):
|
| 127 |
+
raise RuntimeError(msg)
|
| 128 |
+
# We have a mix of OSError and RuntimeError
|
| 129 |
+
# so we have to pick which one to raise.
|
| 130 |
+
# and we raise OSError for compatibility
|
| 131 |
+
raise OSError(msg)
|
| 132 |
+
finally:
|
| 133 |
+
all_exceptions = None # type: ignore[assignment]
|
| 134 |
+
exceptions = None # type: ignore[assignment]
|
| 135 |
+
|
| 136 |
+
return sock
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
async def _connect_sock(
|
| 140 |
+
loop: asyncio.AbstractEventLoop,
|
| 141 |
+
exceptions: List[List[Union[OSError, RuntimeError]]],
|
| 142 |
+
addr_info: AddrInfoType,
|
| 143 |
+
local_addr_infos: Optional[Sequence[AddrInfoType]] = None,
|
| 144 |
+
) -> socket.socket:
|
| 145 |
+
"""Create, bind and connect one socket."""
|
| 146 |
+
my_exceptions: List[Union[OSError, RuntimeError]] = []
|
| 147 |
+
exceptions.append(my_exceptions)
|
| 148 |
+
family, type_, proto, _, address = addr_info
|
| 149 |
+
sock = None
|
| 150 |
+
try:
|
| 151 |
+
sock = socket.socket(family=family, type=type_, proto=proto)
|
| 152 |
+
sock.setblocking(False)
|
| 153 |
+
if local_addr_infos is not None:
|
| 154 |
+
for lfamily, _, _, _, laddr in local_addr_infos:
|
| 155 |
+
# skip local addresses of different family
|
| 156 |
+
if lfamily != family:
|
| 157 |
+
continue
|
| 158 |
+
try:
|
| 159 |
+
sock.bind(laddr)
|
| 160 |
+
break
|
| 161 |
+
except OSError as exc:
|
| 162 |
+
msg = (
|
| 163 |
+
f"error while attempting to bind on "
|
| 164 |
+
f"address {laddr!r}: "
|
| 165 |
+
f"{exc.strerror.lower()}"
|
| 166 |
+
)
|
| 167 |
+
exc = OSError(exc.errno, msg)
|
| 168 |
+
my_exceptions.append(exc)
|
| 169 |
+
else: # all bind attempts failed
|
| 170 |
+
if my_exceptions:
|
| 171 |
+
raise my_exceptions.pop()
|
| 172 |
+
else:
|
| 173 |
+
raise OSError(f"no matching local address with {family=} found")
|
| 174 |
+
await loop.sock_connect(sock, address)
|
| 175 |
+
return sock
|
| 176 |
+
except (RuntimeError, OSError) as exc:
|
| 177 |
+
my_exceptions.append(exc)
|
| 178 |
+
if sock is not None:
|
| 179 |
+
sock.close()
|
| 180 |
+
raise
|
| 181 |
+
except:
|
| 182 |
+
if sock is not None:
|
| 183 |
+
sock.close()
|
| 184 |
+
raise
|
| 185 |
+
finally:
|
| 186 |
+
exceptions = my_exceptions = None # type: ignore[assignment]
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
def _interleave_addrinfos(
|
| 190 |
+
addrinfos: Sequence[AddrInfoType], first_address_family_count: int = 1
|
| 191 |
+
) -> List[AddrInfoType]:
|
| 192 |
+
"""Interleave list of addrinfo tuples by family."""
|
| 193 |
+
# Group addresses by family
|
| 194 |
+
addrinfos_by_family: collections.OrderedDict[int, List[AddrInfoType]] = (
|
| 195 |
+
collections.OrderedDict()
|
| 196 |
+
)
|
| 197 |
+
for addr in addrinfos:
|
| 198 |
+
family = addr[0]
|
| 199 |
+
if family not in addrinfos_by_family:
|
| 200 |
+
addrinfos_by_family[family] = []
|
| 201 |
+
addrinfos_by_family[family].append(addr)
|
| 202 |
+
addrinfos_lists = list(addrinfos_by_family.values())
|
| 203 |
+
|
| 204 |
+
reordered: List[AddrInfoType] = []
|
| 205 |
+
if first_address_family_count > 1:
|
| 206 |
+
reordered.extend(addrinfos_lists[0][: first_address_family_count - 1])
|
| 207 |
+
del addrinfos_lists[0][: first_address_family_count - 1]
|
| 208 |
+
reordered.extend(
|
| 209 |
+
a
|
| 210 |
+
for a in itertools.chain.from_iterable(itertools.zip_longest(*addrinfos_lists))
|
| 211 |
+
if a is not None
|
| 212 |
+
)
|
| 213 |
+
return reordered
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/py.typed
ADDED
|
File without changes
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/types.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Types for aiohappyeyeballs."""
|
| 2 |
+
|
| 3 |
+
import socket
|
| 4 |
+
from typing import Tuple, Union
|
| 5 |
+
|
| 6 |
+
AddrInfoType = Tuple[
|
| 7 |
+
Union[int, socket.AddressFamily],
|
| 8 |
+
Union[int, socket.SocketKind],
|
| 9 |
+
int,
|
| 10 |
+
str,
|
| 11 |
+
Tuple, # type: ignore[type-arg]
|
| 12 |
+
]
|
evalkit_tf437/lib/python3.10/site-packages/aiohappyeyeballs/utils.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utility functions for aiohappyeyeballs."""
|
| 2 |
+
|
| 3 |
+
import ipaddress
|
| 4 |
+
import socket
|
| 5 |
+
from typing import Dict, List, Optional, Tuple, Union
|
| 6 |
+
|
| 7 |
+
from .types import AddrInfoType
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def addr_to_addr_infos(
|
| 11 |
+
addr: Optional[
|
| 12 |
+
Union[Tuple[str, int, int, int], Tuple[str, int, int], Tuple[str, int]]
|
| 13 |
+
],
|
| 14 |
+
) -> Optional[List[AddrInfoType]]:
|
| 15 |
+
"""Convert an address tuple to a list of addr_info tuples."""
|
| 16 |
+
if addr is None:
|
| 17 |
+
return None
|
| 18 |
+
host = addr[0]
|
| 19 |
+
port = addr[1]
|
| 20 |
+
is_ipv6 = ":" in host
|
| 21 |
+
if is_ipv6:
|
| 22 |
+
flowinfo = 0
|
| 23 |
+
scopeid = 0
|
| 24 |
+
addr_len = len(addr)
|
| 25 |
+
if addr_len >= 4:
|
| 26 |
+
scopeid = addr[3] # type: ignore[misc]
|
| 27 |
+
if addr_len >= 3:
|
| 28 |
+
flowinfo = addr[2] # type: ignore[misc]
|
| 29 |
+
addr = (host, port, flowinfo, scopeid)
|
| 30 |
+
family = socket.AF_INET6
|
| 31 |
+
else:
|
| 32 |
+
addr = (host, port)
|
| 33 |
+
family = socket.AF_INET
|
| 34 |
+
return [(family, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", addr)]
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
def pop_addr_infos_interleave(
|
| 38 |
+
addr_infos: List[AddrInfoType], interleave: Optional[int] = None
|
| 39 |
+
) -> None:
|
| 40 |
+
"""
|
| 41 |
+
Pop addr_info from the list of addr_infos by family up to interleave times.
|
| 42 |
+
|
| 43 |
+
The interleave parameter is used to know how many addr_infos for
|
| 44 |
+
each family should be popped of the top of the list.
|
| 45 |
+
"""
|
| 46 |
+
seen: Dict[int, int] = {}
|
| 47 |
+
if interleave is None:
|
| 48 |
+
interleave = 1
|
| 49 |
+
to_remove: List[AddrInfoType] = []
|
| 50 |
+
for addr_info in addr_infos:
|
| 51 |
+
family = addr_info[0]
|
| 52 |
+
if family not in seen:
|
| 53 |
+
seen[family] = 0
|
| 54 |
+
if seen[family] < interleave:
|
| 55 |
+
to_remove.append(addr_info)
|
| 56 |
+
seen[family] += 1
|
| 57 |
+
for addr_info in to_remove:
|
| 58 |
+
addr_infos.remove(addr_info)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
def _addr_tuple_to_ip_address(
|
| 62 |
+
addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
|
| 63 |
+
) -> Union[
|
| 64 |
+
Tuple[ipaddress.IPv4Address, int], Tuple[ipaddress.IPv6Address, int, int, int]
|
| 65 |
+
]:
|
| 66 |
+
"""Convert an address tuple to an IPv4Address."""
|
| 67 |
+
return (ipaddress.ip_address(addr[0]), *addr[1:])
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def remove_addr_infos(
|
| 71 |
+
addr_infos: List[AddrInfoType],
|
| 72 |
+
addr: Union[Tuple[str, int], Tuple[str, int, int, int]],
|
| 73 |
+
) -> None:
|
| 74 |
+
"""
|
| 75 |
+
Remove an address from the list of addr_infos.
|
| 76 |
+
|
| 77 |
+
The addr value is typically the return value of
|
| 78 |
+
sock.getpeername().
|
| 79 |
+
"""
|
| 80 |
+
bad_addrs_infos: List[AddrInfoType] = []
|
| 81 |
+
for addr_info in addr_infos:
|
| 82 |
+
if addr_info[-1] == addr:
|
| 83 |
+
bad_addrs_infos.append(addr_info)
|
| 84 |
+
if bad_addrs_infos:
|
| 85 |
+
for bad_addr_info in bad_addrs_infos:
|
| 86 |
+
addr_infos.remove(bad_addr_info)
|
| 87 |
+
return
|
| 88 |
+
# Slow path in case addr is formatted differently
|
| 89 |
+
match_addr = _addr_tuple_to_ip_address(addr)
|
| 90 |
+
for addr_info in addr_infos:
|
| 91 |
+
if match_addr == _addr_tuple_to_ip_address(addr_info[-1]):
|
| 92 |
+
bad_addrs_infos.append(addr_info)
|
| 93 |
+
if bad_addrs_infos:
|
| 94 |
+
for bad_addr_info in bad_addrs_infos:
|
| 95 |
+
addr_infos.remove(bad_addr_info)
|
| 96 |
+
return
|
| 97 |
+
raise ValueError(f"Address {addr} not found in addr_infos")
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (4.55 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc
ADDED
|
Binary file (33.6 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/client_reqrep.cpython-310.pyc
ADDED
|
Binary file (31.7 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/connector.cpython-310.pyc
ADDED
|
Binary file (39.7 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/formdata.cpython-310.pyc
ADDED
|
Binary file (4.83 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc
ADDED
|
Binary file (30.4 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/http_exceptions.cpython-310.pyc
ADDED
|
Binary file (4.42 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/http_parser.cpython-310.pyc
ADDED
|
Binary file (20.1 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/http_websocket.cpython-310.pyc
ADDED
|
Binary file (15.9 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/multipart.cpython-310.pyc
ADDED
|
Binary file (28.8 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc
ADDED
|
Binary file (18.9 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc
ADDED
|
Binary file (22.1 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/web_exceptions.cpython-310.pyc
ADDED
|
Binary file (11.6 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc
ADDED
|
Binary file (3.85 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc
ADDED
|
Binary file (24.9 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/__pycache__/worker.cpython-310.pyc
ADDED
|
Binary file (6.52 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/_find_header.pxd
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "_find_header.h":
|
| 2 |
+
int find_header(char *, int)
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/client_exceptions.py
ADDED
|
@@ -0,0 +1,412 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP related errors."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import TYPE_CHECKING, Optional, Tuple, Union
|
| 6 |
+
|
| 7 |
+
from multidict import MultiMapping
|
| 8 |
+
|
| 9 |
+
from .typedefs import StrOrURL
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import ssl
|
| 13 |
+
|
| 14 |
+
SSLContext = ssl.SSLContext
|
| 15 |
+
except ImportError: # pragma: no cover
|
| 16 |
+
ssl = SSLContext = None # type: ignore[assignment]
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
if TYPE_CHECKING:
|
| 20 |
+
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
| 21 |
+
from .http_parser import RawResponseMessage
|
| 22 |
+
else:
|
| 23 |
+
RequestInfo = ClientResponse = ConnectionKey = RawResponseMessage = None
|
| 24 |
+
|
| 25 |
+
__all__ = (
|
| 26 |
+
"ClientError",
|
| 27 |
+
"ClientConnectionError",
|
| 28 |
+
"ClientConnectionResetError",
|
| 29 |
+
"ClientOSError",
|
| 30 |
+
"ClientConnectorError",
|
| 31 |
+
"ClientProxyConnectionError",
|
| 32 |
+
"ClientSSLError",
|
| 33 |
+
"ClientConnectorDNSError",
|
| 34 |
+
"ClientConnectorSSLError",
|
| 35 |
+
"ClientConnectorCertificateError",
|
| 36 |
+
"ConnectionTimeoutError",
|
| 37 |
+
"SocketTimeoutError",
|
| 38 |
+
"ServerConnectionError",
|
| 39 |
+
"ServerTimeoutError",
|
| 40 |
+
"ServerDisconnectedError",
|
| 41 |
+
"ServerFingerprintMismatch",
|
| 42 |
+
"ClientResponseError",
|
| 43 |
+
"ClientHttpProxyError",
|
| 44 |
+
"WSServerHandshakeError",
|
| 45 |
+
"ContentTypeError",
|
| 46 |
+
"ClientPayloadError",
|
| 47 |
+
"InvalidURL",
|
| 48 |
+
"InvalidUrlClientError",
|
| 49 |
+
"RedirectClientError",
|
| 50 |
+
"NonHttpUrlClientError",
|
| 51 |
+
"InvalidUrlRedirectClientError",
|
| 52 |
+
"NonHttpUrlRedirectClientError",
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class ClientError(Exception):
|
| 57 |
+
"""Base class for client connection errors."""
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class ClientResponseError(ClientError):
|
| 61 |
+
"""Base class for exceptions that occur after getting a response.
|
| 62 |
+
|
| 63 |
+
request_info: An instance of RequestInfo.
|
| 64 |
+
history: A sequence of responses, if redirects occurred.
|
| 65 |
+
status: HTTP status code.
|
| 66 |
+
message: Error message.
|
| 67 |
+
headers: Response headers.
|
| 68 |
+
"""
|
| 69 |
+
|
| 70 |
+
def __init__(
|
| 71 |
+
self,
|
| 72 |
+
request_info: RequestInfo,
|
| 73 |
+
history: Tuple[ClientResponse, ...],
|
| 74 |
+
*,
|
| 75 |
+
code: Optional[int] = None,
|
| 76 |
+
status: Optional[int] = None,
|
| 77 |
+
message: str = "",
|
| 78 |
+
headers: Optional[MultiMapping[str]] = None,
|
| 79 |
+
) -> None:
|
| 80 |
+
self.request_info = request_info
|
| 81 |
+
if code is not None:
|
| 82 |
+
if status is not None:
|
| 83 |
+
raise ValueError(
|
| 84 |
+
"Both code and status arguments are provided; "
|
| 85 |
+
"code is deprecated, use status instead"
|
| 86 |
+
)
|
| 87 |
+
warnings.warn(
|
| 88 |
+
"code argument is deprecated, use status instead",
|
| 89 |
+
DeprecationWarning,
|
| 90 |
+
stacklevel=2,
|
| 91 |
+
)
|
| 92 |
+
if status is not None:
|
| 93 |
+
self.status = status
|
| 94 |
+
elif code is not None:
|
| 95 |
+
self.status = code
|
| 96 |
+
else:
|
| 97 |
+
self.status = 0
|
| 98 |
+
self.message = message
|
| 99 |
+
self.headers = headers
|
| 100 |
+
self.history = history
|
| 101 |
+
self.args = (request_info, history)
|
| 102 |
+
|
| 103 |
+
def __str__(self) -> str:
|
| 104 |
+
return "{}, message={!r}, url={!r}".format(
|
| 105 |
+
self.status,
|
| 106 |
+
self.message,
|
| 107 |
+
str(self.request_info.real_url),
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
def __repr__(self) -> str:
|
| 111 |
+
args = f"{self.request_info!r}, {self.history!r}"
|
| 112 |
+
if self.status != 0:
|
| 113 |
+
args += f", status={self.status!r}"
|
| 114 |
+
if self.message != "":
|
| 115 |
+
args += f", message={self.message!r}"
|
| 116 |
+
if self.headers is not None:
|
| 117 |
+
args += f", headers={self.headers!r}"
|
| 118 |
+
return f"{type(self).__name__}({args})"
|
| 119 |
+
|
| 120 |
+
@property
|
| 121 |
+
def code(self) -> int:
|
| 122 |
+
warnings.warn(
|
| 123 |
+
"code property is deprecated, use status instead",
|
| 124 |
+
DeprecationWarning,
|
| 125 |
+
stacklevel=2,
|
| 126 |
+
)
|
| 127 |
+
return self.status
|
| 128 |
+
|
| 129 |
+
@code.setter
|
| 130 |
+
def code(self, value: int) -> None:
|
| 131 |
+
warnings.warn(
|
| 132 |
+
"code property is deprecated, use status instead",
|
| 133 |
+
DeprecationWarning,
|
| 134 |
+
stacklevel=2,
|
| 135 |
+
)
|
| 136 |
+
self.status = value
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
class ContentTypeError(ClientResponseError):
|
| 140 |
+
"""ContentType found is not valid."""
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
class WSServerHandshakeError(ClientResponseError):
|
| 144 |
+
"""websocket server handshake error."""
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class ClientHttpProxyError(ClientResponseError):
|
| 148 |
+
"""HTTP proxy error.
|
| 149 |
+
|
| 150 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 151 |
+
proxy responds with status other than ``200 OK``
|
| 152 |
+
on ``CONNECT`` request.
|
| 153 |
+
"""
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
class TooManyRedirects(ClientResponseError):
|
| 157 |
+
"""Client was redirected too many times."""
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
class ClientConnectionError(ClientError):
|
| 161 |
+
"""Base class for client socket errors."""
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class ClientConnectionResetError(ClientConnectionError, ConnectionResetError):
|
| 165 |
+
"""ConnectionResetError"""
|
| 166 |
+
|
| 167 |
+
|
| 168 |
+
class ClientOSError(ClientConnectionError, OSError):
|
| 169 |
+
"""OSError error."""
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
class ClientConnectorError(ClientOSError):
|
| 173 |
+
"""Client connector error.
|
| 174 |
+
|
| 175 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 176 |
+
a connection can not be established.
|
| 177 |
+
"""
|
| 178 |
+
|
| 179 |
+
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
| 180 |
+
self._conn_key = connection_key
|
| 181 |
+
self._os_error = os_error
|
| 182 |
+
super().__init__(os_error.errno, os_error.strerror)
|
| 183 |
+
self.args = (connection_key, os_error)
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def os_error(self) -> OSError:
|
| 187 |
+
return self._os_error
|
| 188 |
+
|
| 189 |
+
@property
|
| 190 |
+
def host(self) -> str:
|
| 191 |
+
return self._conn_key.host
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def port(self) -> Optional[int]:
|
| 195 |
+
return self._conn_key.port
|
| 196 |
+
|
| 197 |
+
@property
|
| 198 |
+
def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
|
| 199 |
+
return self._conn_key.ssl
|
| 200 |
+
|
| 201 |
+
def __str__(self) -> str:
|
| 202 |
+
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
| 203 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
# OSError.__reduce__ does too much black magick
|
| 207 |
+
__reduce__ = BaseException.__reduce__
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
class ClientConnectorDNSError(ClientConnectorError):
|
| 211 |
+
"""DNS resolution failed during client connection.
|
| 212 |
+
|
| 213 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 214 |
+
DNS resolution fails.
|
| 215 |
+
"""
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class ClientProxyConnectionError(ClientConnectorError):
|
| 219 |
+
"""Proxy connection error.
|
| 220 |
+
|
| 221 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 222 |
+
connection to proxy can not be established.
|
| 223 |
+
"""
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class UnixClientConnectorError(ClientConnectorError):
|
| 227 |
+
"""Unix connector error.
|
| 228 |
+
|
| 229 |
+
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
| 230 |
+
if connection to unix socket can not be established.
|
| 231 |
+
"""
|
| 232 |
+
|
| 233 |
+
def __init__(
|
| 234 |
+
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
| 235 |
+
) -> None:
|
| 236 |
+
self._path = path
|
| 237 |
+
super().__init__(connection_key, os_error)
|
| 238 |
+
|
| 239 |
+
@property
|
| 240 |
+
def path(self) -> str:
|
| 241 |
+
return self._path
|
| 242 |
+
|
| 243 |
+
def __str__(self) -> str:
|
| 244 |
+
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
| 245 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
class ServerConnectionError(ClientConnectionError):
|
| 250 |
+
"""Server connection errors."""
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
class ServerDisconnectedError(ServerConnectionError):
|
| 254 |
+
"""Server disconnected."""
|
| 255 |
+
|
| 256 |
+
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
| 257 |
+
if message is None:
|
| 258 |
+
message = "Server disconnected"
|
| 259 |
+
|
| 260 |
+
self.args = (message,)
|
| 261 |
+
self.message = message
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
| 265 |
+
"""Server timeout error."""
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
class ConnectionTimeoutError(ServerTimeoutError):
|
| 269 |
+
"""Connection timeout error."""
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
class SocketTimeoutError(ServerTimeoutError):
|
| 273 |
+
"""Socket timeout error."""
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
class ServerFingerprintMismatch(ServerConnectionError):
|
| 277 |
+
"""SSL certificate does not match expected fingerprint."""
|
| 278 |
+
|
| 279 |
+
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
| 280 |
+
self.expected = expected
|
| 281 |
+
self.got = got
|
| 282 |
+
self.host = host
|
| 283 |
+
self.port = port
|
| 284 |
+
self.args = (expected, got, host, port)
|
| 285 |
+
|
| 286 |
+
def __repr__(self) -> str:
|
| 287 |
+
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
| 288 |
+
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
| 289 |
+
)
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class ClientPayloadError(ClientError):
|
| 293 |
+
"""Response payload error."""
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
class InvalidURL(ClientError, ValueError):
|
| 297 |
+
"""Invalid URL.
|
| 298 |
+
|
| 299 |
+
URL used for fetching is malformed, e.g. it doesn't contains host
|
| 300 |
+
part.
|
| 301 |
+
"""
|
| 302 |
+
|
| 303 |
+
# Derive from ValueError for backward compatibility
|
| 304 |
+
|
| 305 |
+
def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None:
|
| 306 |
+
# The type of url is not yarl.URL because the exception can be raised
|
| 307 |
+
# on URL(url) call
|
| 308 |
+
self._url = url
|
| 309 |
+
self._description = description
|
| 310 |
+
|
| 311 |
+
if description:
|
| 312 |
+
super().__init__(url, description)
|
| 313 |
+
else:
|
| 314 |
+
super().__init__(url)
|
| 315 |
+
|
| 316 |
+
@property
|
| 317 |
+
def url(self) -> StrOrURL:
|
| 318 |
+
return self._url
|
| 319 |
+
|
| 320 |
+
@property
|
| 321 |
+
def description(self) -> "str | None":
|
| 322 |
+
return self._description
|
| 323 |
+
|
| 324 |
+
def __repr__(self) -> str:
|
| 325 |
+
return f"<{self.__class__.__name__} {self}>"
|
| 326 |
+
|
| 327 |
+
def __str__(self) -> str:
|
| 328 |
+
if self._description:
|
| 329 |
+
return f"{self._url} - {self._description}"
|
| 330 |
+
return str(self._url)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
class InvalidUrlClientError(InvalidURL):
|
| 334 |
+
"""Invalid URL client error."""
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class RedirectClientError(ClientError):
|
| 338 |
+
"""Client redirect error."""
|
| 339 |
+
|
| 340 |
+
|
| 341 |
+
class NonHttpUrlClientError(ClientError):
|
| 342 |
+
"""Non http URL client error."""
|
| 343 |
+
|
| 344 |
+
|
| 345 |
+
class InvalidUrlRedirectClientError(InvalidUrlClientError, RedirectClientError):
|
| 346 |
+
"""Invalid URL redirect client error."""
|
| 347 |
+
|
| 348 |
+
|
| 349 |
+
class NonHttpUrlRedirectClientError(NonHttpUrlClientError, RedirectClientError):
|
| 350 |
+
"""Non http URL redirect client error."""
|
| 351 |
+
|
| 352 |
+
|
| 353 |
+
class ClientSSLError(ClientConnectorError):
|
| 354 |
+
"""Base error for ssl.*Errors."""
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
if ssl is not None:
|
| 358 |
+
cert_errors = (ssl.CertificateError,)
|
| 359 |
+
cert_errors_bases = (
|
| 360 |
+
ClientSSLError,
|
| 361 |
+
ssl.CertificateError,
|
| 362 |
+
)
|
| 363 |
+
|
| 364 |
+
ssl_errors = (ssl.SSLError,)
|
| 365 |
+
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
| 366 |
+
else: # pragma: no cover
|
| 367 |
+
cert_errors = tuple()
|
| 368 |
+
cert_errors_bases = (
|
| 369 |
+
ClientSSLError,
|
| 370 |
+
ValueError,
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
ssl_errors = tuple()
|
| 374 |
+
ssl_error_bases = (ClientSSLError,)
|
| 375 |
+
|
| 376 |
+
|
| 377 |
+
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
| 378 |
+
"""Response ssl error."""
|
| 379 |
+
|
| 380 |
+
|
| 381 |
+
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
| 382 |
+
"""Response certificate error."""
|
| 383 |
+
|
| 384 |
+
def __init__(
|
| 385 |
+
self, connection_key: ConnectionKey, certificate_error: Exception
|
| 386 |
+
) -> None:
|
| 387 |
+
self._conn_key = connection_key
|
| 388 |
+
self._certificate_error = certificate_error
|
| 389 |
+
self.args = (connection_key, certificate_error)
|
| 390 |
+
|
| 391 |
+
@property
|
| 392 |
+
def certificate_error(self) -> Exception:
|
| 393 |
+
return self._certificate_error
|
| 394 |
+
|
| 395 |
+
@property
|
| 396 |
+
def host(self) -> str:
|
| 397 |
+
return self._conn_key.host
|
| 398 |
+
|
| 399 |
+
@property
|
| 400 |
+
def port(self) -> Optional[int]:
|
| 401 |
+
return self._conn_key.port
|
| 402 |
+
|
| 403 |
+
@property
|
| 404 |
+
def ssl(self) -> bool:
|
| 405 |
+
return self._conn_key.is_ssl
|
| 406 |
+
|
| 407 |
+
def __str__(self) -> str:
|
| 408 |
+
return (
|
| 409 |
+
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
| 410 |
+
"[{0.certificate_error.__class__.__name__}: "
|
| 411 |
+
"{0.certificate_error.args}]".format(self)
|
| 412 |
+
)
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/formdata.py
ADDED
|
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
import warnings
|
| 3 |
+
from typing import Any, Iterable, List, Optional
|
| 4 |
+
from urllib.parse import urlencode
|
| 5 |
+
|
| 6 |
+
from multidict import MultiDict, MultiDictProxy
|
| 7 |
+
|
| 8 |
+
from . import hdrs, multipart, payload
|
| 9 |
+
from .helpers import guess_filename
|
| 10 |
+
from .payload import Payload
|
| 11 |
+
|
| 12 |
+
__all__ = ("FormData",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class FormData:
|
| 16 |
+
"""Helper class for form body generation.
|
| 17 |
+
|
| 18 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
def __init__(
|
| 22 |
+
self,
|
| 23 |
+
fields: Iterable[Any] = (),
|
| 24 |
+
quote_fields: bool = True,
|
| 25 |
+
charset: Optional[str] = None,
|
| 26 |
+
) -> None:
|
| 27 |
+
self._writer = multipart.MultipartWriter("form-data")
|
| 28 |
+
self._fields: List[Any] = []
|
| 29 |
+
self._is_multipart = False
|
| 30 |
+
self._is_processed = False
|
| 31 |
+
self._quote_fields = quote_fields
|
| 32 |
+
self._charset = charset
|
| 33 |
+
|
| 34 |
+
if isinstance(fields, dict):
|
| 35 |
+
fields = list(fields.items())
|
| 36 |
+
elif not isinstance(fields, (list, tuple)):
|
| 37 |
+
fields = (fields,)
|
| 38 |
+
self.add_fields(*fields)
|
| 39 |
+
|
| 40 |
+
@property
|
| 41 |
+
def is_multipart(self) -> bool:
|
| 42 |
+
return self._is_multipart
|
| 43 |
+
|
| 44 |
+
def add_field(
|
| 45 |
+
self,
|
| 46 |
+
name: str,
|
| 47 |
+
value: Any,
|
| 48 |
+
*,
|
| 49 |
+
content_type: Optional[str] = None,
|
| 50 |
+
filename: Optional[str] = None,
|
| 51 |
+
content_transfer_encoding: Optional[str] = None,
|
| 52 |
+
) -> None:
|
| 53 |
+
|
| 54 |
+
if isinstance(value, io.IOBase):
|
| 55 |
+
self._is_multipart = True
|
| 56 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
| 57 |
+
msg = (
|
| 58 |
+
"In v4, passing bytes will no longer create a file field. "
|
| 59 |
+
"Please explicitly use the filename parameter or pass a BytesIO object."
|
| 60 |
+
)
|
| 61 |
+
if filename is None and content_transfer_encoding is None:
|
| 62 |
+
warnings.warn(msg, DeprecationWarning)
|
| 63 |
+
filename = name
|
| 64 |
+
|
| 65 |
+
type_options: MultiDict[str] = MultiDict({"name": name})
|
| 66 |
+
if filename is not None and not isinstance(filename, str):
|
| 67 |
+
raise TypeError(
|
| 68 |
+
"filename must be an instance of str. " "Got: %s" % filename
|
| 69 |
+
)
|
| 70 |
+
if filename is None and isinstance(value, io.IOBase):
|
| 71 |
+
filename = guess_filename(value, name)
|
| 72 |
+
if filename is not None:
|
| 73 |
+
type_options["filename"] = filename
|
| 74 |
+
self._is_multipart = True
|
| 75 |
+
|
| 76 |
+
headers = {}
|
| 77 |
+
if content_type is not None:
|
| 78 |
+
if not isinstance(content_type, str):
|
| 79 |
+
raise TypeError(
|
| 80 |
+
"content_type must be an instance of str. " "Got: %s" % content_type
|
| 81 |
+
)
|
| 82 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
| 83 |
+
self._is_multipart = True
|
| 84 |
+
if content_transfer_encoding is not None:
|
| 85 |
+
if not isinstance(content_transfer_encoding, str):
|
| 86 |
+
raise TypeError(
|
| 87 |
+
"content_transfer_encoding must be an instance"
|
| 88 |
+
" of str. Got: %s" % content_transfer_encoding
|
| 89 |
+
)
|
| 90 |
+
msg = (
|
| 91 |
+
"content_transfer_encoding is deprecated. "
|
| 92 |
+
"To maintain compatibility with v4 please pass a BytesPayload."
|
| 93 |
+
)
|
| 94 |
+
warnings.warn(msg, DeprecationWarning)
|
| 95 |
+
self._is_multipart = True
|
| 96 |
+
|
| 97 |
+
self._fields.append((type_options, headers, value))
|
| 98 |
+
|
| 99 |
+
def add_fields(self, *fields: Any) -> None:
|
| 100 |
+
to_add = list(fields)
|
| 101 |
+
|
| 102 |
+
while to_add:
|
| 103 |
+
rec = to_add.pop(0)
|
| 104 |
+
|
| 105 |
+
if isinstance(rec, io.IOBase):
|
| 106 |
+
k = guess_filename(rec, "unknown")
|
| 107 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
| 108 |
+
|
| 109 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
| 110 |
+
to_add.extend(rec.items())
|
| 111 |
+
|
| 112 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
| 113 |
+
k, fp = rec
|
| 114 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
| 115 |
+
|
| 116 |
+
else:
|
| 117 |
+
raise TypeError(
|
| 118 |
+
"Only io.IOBase, multidict and (name, file) "
|
| 119 |
+
"pairs allowed, use .add_field() for passing "
|
| 120 |
+
"more complex parameters, got {!r}".format(rec)
|
| 121 |
+
)
|
| 122 |
+
|
| 123 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
| 124 |
+
# form data (x-www-form-urlencoded)
|
| 125 |
+
data = []
|
| 126 |
+
for type_options, _, value in self._fields:
|
| 127 |
+
data.append((type_options["name"], value))
|
| 128 |
+
|
| 129 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
| 130 |
+
|
| 131 |
+
if charset == "utf-8":
|
| 132 |
+
content_type = "application/x-www-form-urlencoded"
|
| 133 |
+
else:
|
| 134 |
+
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
| 135 |
+
|
| 136 |
+
return payload.BytesPayload(
|
| 137 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
| 138 |
+
content_type=content_type,
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
| 142 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
| 143 |
+
if self._is_processed:
|
| 144 |
+
raise RuntimeError("Form data has been processed already")
|
| 145 |
+
for dispparams, headers, value in self._fields:
|
| 146 |
+
try:
|
| 147 |
+
if hdrs.CONTENT_TYPE in headers:
|
| 148 |
+
part = payload.get_payload(
|
| 149 |
+
value,
|
| 150 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
| 151 |
+
headers=headers,
|
| 152 |
+
encoding=self._charset,
|
| 153 |
+
)
|
| 154 |
+
else:
|
| 155 |
+
part = payload.get_payload(
|
| 156 |
+
value, headers=headers, encoding=self._charset
|
| 157 |
+
)
|
| 158 |
+
except Exception as exc:
|
| 159 |
+
raise TypeError(
|
| 160 |
+
"Can not serialize value type: %r\n "
|
| 161 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
| 162 |
+
) from exc
|
| 163 |
+
|
| 164 |
+
if dispparams:
|
| 165 |
+
part.set_content_disposition(
|
| 166 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
| 167 |
+
)
|
| 168 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
| 169 |
+
# Content-Length which were sent via chunked transfer encoding
|
| 170 |
+
assert part.headers is not None
|
| 171 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 172 |
+
|
| 173 |
+
self._writer.append_payload(part)
|
| 174 |
+
|
| 175 |
+
self._is_processed = True
|
| 176 |
+
return self._writer
|
| 177 |
+
|
| 178 |
+
def __call__(self) -> Payload:
|
| 179 |
+
if self._is_multipart:
|
| 180 |
+
return self._gen_form_data()
|
| 181 |
+
else:
|
| 182 |
+
return self._gen_form_urlencoded()
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/http_websocket.py
ADDED
|
@@ -0,0 +1,761 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""WebSocket protocol versions 13 and 8."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import functools
|
| 5 |
+
import json
|
| 6 |
+
import random
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
import zlib
|
| 10 |
+
from enum import IntEnum
|
| 11 |
+
from functools import partial
|
| 12 |
+
from struct import Struct
|
| 13 |
+
from typing import (
|
| 14 |
+
Any,
|
| 15 |
+
Callable,
|
| 16 |
+
Final,
|
| 17 |
+
List,
|
| 18 |
+
NamedTuple,
|
| 19 |
+
Optional,
|
| 20 |
+
Pattern,
|
| 21 |
+
Set,
|
| 22 |
+
Tuple,
|
| 23 |
+
Union,
|
| 24 |
+
cast,
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
from .base_protocol import BaseProtocol
|
| 28 |
+
from .client_exceptions import ClientConnectionResetError
|
| 29 |
+
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
| 30 |
+
from .helpers import NO_EXTENSIONS, set_exception
|
| 31 |
+
from .streams import DataQueue
|
| 32 |
+
|
| 33 |
+
__all__ = (
|
| 34 |
+
"WS_CLOSED_MESSAGE",
|
| 35 |
+
"WS_CLOSING_MESSAGE",
|
| 36 |
+
"WS_KEY",
|
| 37 |
+
"WebSocketReader",
|
| 38 |
+
"WebSocketWriter",
|
| 39 |
+
"WSMessage",
|
| 40 |
+
"WebSocketError",
|
| 41 |
+
"WSMsgType",
|
| 42 |
+
"WSCloseCode",
|
| 43 |
+
)
|
| 44 |
+
|
| 45 |
+
|
| 46 |
+
class WSCloseCode(IntEnum):
|
| 47 |
+
OK = 1000
|
| 48 |
+
GOING_AWAY = 1001
|
| 49 |
+
PROTOCOL_ERROR = 1002
|
| 50 |
+
UNSUPPORTED_DATA = 1003
|
| 51 |
+
ABNORMAL_CLOSURE = 1006
|
| 52 |
+
INVALID_TEXT = 1007
|
| 53 |
+
POLICY_VIOLATION = 1008
|
| 54 |
+
MESSAGE_TOO_BIG = 1009
|
| 55 |
+
MANDATORY_EXTENSION = 1010
|
| 56 |
+
INTERNAL_ERROR = 1011
|
| 57 |
+
SERVICE_RESTART = 1012
|
| 58 |
+
TRY_AGAIN_LATER = 1013
|
| 59 |
+
BAD_GATEWAY = 1014
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
| 63 |
+
|
| 64 |
+
# For websockets, keeping latency low is extremely important as implementations
|
| 65 |
+
# generally expect to be able to send and receive messages quickly. We use a
|
| 66 |
+
# larger chunk size than the default to reduce the number of executor calls
|
| 67 |
+
# since the executor is a significant source of latency and overhead when
|
| 68 |
+
# the chunks are small. A size of 5KiB was chosen because it is also the
|
| 69 |
+
# same value python-zlib-ng choose to use as the threshold to release the GIL.
|
| 70 |
+
|
| 71 |
+
WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
class WSMsgType(IntEnum):
|
| 75 |
+
# websocket spec types
|
| 76 |
+
CONTINUATION = 0x0
|
| 77 |
+
TEXT = 0x1
|
| 78 |
+
BINARY = 0x2
|
| 79 |
+
PING = 0x9
|
| 80 |
+
PONG = 0xA
|
| 81 |
+
CLOSE = 0x8
|
| 82 |
+
|
| 83 |
+
# aiohttp specific types
|
| 84 |
+
CLOSING = 0x100
|
| 85 |
+
CLOSED = 0x101
|
| 86 |
+
ERROR = 0x102
|
| 87 |
+
|
| 88 |
+
text = TEXT
|
| 89 |
+
binary = BINARY
|
| 90 |
+
ping = PING
|
| 91 |
+
pong = PONG
|
| 92 |
+
close = CLOSE
|
| 93 |
+
closing = CLOSING
|
| 94 |
+
closed = CLOSED
|
| 95 |
+
error = ERROR
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
MESSAGE_TYPES_WITH_CONTENT: Final = frozenset(
|
| 99 |
+
{
|
| 100 |
+
WSMsgType.BINARY,
|
| 101 |
+
WSMsgType.TEXT,
|
| 102 |
+
WSMsgType.CONTINUATION,
|
| 103 |
+
}
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
UNPACK_LEN2 = Struct("!H").unpack_from
|
| 110 |
+
UNPACK_LEN3 = Struct("!Q").unpack_from
|
| 111 |
+
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
| 112 |
+
PACK_LEN1 = Struct("!BB").pack
|
| 113 |
+
PACK_LEN2 = Struct("!BBH").pack
|
| 114 |
+
PACK_LEN3 = Struct("!BBQ").pack
|
| 115 |
+
PACK_CLOSE_CODE = Struct("!H").pack
|
| 116 |
+
PACK_RANDBITS = Struct("!L").pack
|
| 117 |
+
MSG_SIZE: Final[int] = 2**14
|
| 118 |
+
DEFAULT_LIMIT: Final[int] = 2**16
|
| 119 |
+
MASK_LEN: Final[int] = 4
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class WSMessage(NamedTuple):
|
| 123 |
+
type: WSMsgType
|
| 124 |
+
# To type correctly, this would need some kind of tagged union for each type.
|
| 125 |
+
data: Any
|
| 126 |
+
extra: Optional[str]
|
| 127 |
+
|
| 128 |
+
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
| 129 |
+
"""Return parsed JSON data.
|
| 130 |
+
|
| 131 |
+
.. versionadded:: 0.22
|
| 132 |
+
"""
|
| 133 |
+
return loads(self.data)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
| 137 |
+
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class WebSocketError(Exception):
|
| 141 |
+
"""WebSocket protocol parser error."""
|
| 142 |
+
|
| 143 |
+
def __init__(self, code: int, message: str) -> None:
|
| 144 |
+
self.code = code
|
| 145 |
+
super().__init__(code, message)
|
| 146 |
+
|
| 147 |
+
def __str__(self) -> str:
|
| 148 |
+
return cast(str, self.args[1])
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
class WSHandshakeError(Exception):
|
| 152 |
+
"""WebSocket protocol handshake error."""
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
native_byteorder: Final[str] = sys.byteorder
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
# Used by _websocket_mask_python
|
| 159 |
+
@functools.lru_cache
|
| 160 |
+
def _xor_table() -> List[bytes]:
|
| 161 |
+
return [bytes(a ^ b for a in range(256)) for b in range(256)]
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
| 165 |
+
"""Websocket masking function.
|
| 166 |
+
|
| 167 |
+
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
| 168 |
+
object of any length. The contents of `data` are masked with `mask`,
|
| 169 |
+
as specified in section 5.3 of RFC 6455.
|
| 170 |
+
|
| 171 |
+
Note that this function mutates the `data` argument.
|
| 172 |
+
|
| 173 |
+
This pure-python implementation may be replaced by an optimized
|
| 174 |
+
version when available.
|
| 175 |
+
|
| 176 |
+
"""
|
| 177 |
+
assert isinstance(data, bytearray), data
|
| 178 |
+
assert len(mask) == 4, mask
|
| 179 |
+
|
| 180 |
+
if data:
|
| 181 |
+
_XOR_TABLE = _xor_table()
|
| 182 |
+
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
| 183 |
+
data[::4] = data[::4].translate(a)
|
| 184 |
+
data[1::4] = data[1::4].translate(b)
|
| 185 |
+
data[2::4] = data[2::4].translate(c)
|
| 186 |
+
data[3::4] = data[3::4].translate(d)
|
| 187 |
+
|
| 188 |
+
|
| 189 |
+
if NO_EXTENSIONS: # pragma: no cover
|
| 190 |
+
_websocket_mask = _websocket_mask_python
|
| 191 |
+
else:
|
| 192 |
+
try:
|
| 193 |
+
from ._websocket import _websocket_mask_cython # type: ignore[import-not-found]
|
| 194 |
+
|
| 195 |
+
_websocket_mask = _websocket_mask_cython
|
| 196 |
+
except ImportError: # pragma: no cover
|
| 197 |
+
_websocket_mask = _websocket_mask_python
|
| 198 |
+
|
| 199 |
+
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
| 203 |
+
r"^(?:;\s*(?:"
|
| 204 |
+
r"(server_no_context_takeover)|"
|
| 205 |
+
r"(client_no_context_takeover)|"
|
| 206 |
+
r"(server_max_window_bits(?:=(\d+))?)|"
|
| 207 |
+
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
| 211 |
+
|
| 212 |
+
|
| 213 |
+
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
| 214 |
+
if not extstr:
|
| 215 |
+
return 0, False
|
| 216 |
+
|
| 217 |
+
compress = 0
|
| 218 |
+
notakeover = False
|
| 219 |
+
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
| 220 |
+
defext = ext.group(1)
|
| 221 |
+
# Return compress = 15 when get `permessage-deflate`
|
| 222 |
+
if not defext:
|
| 223 |
+
compress = 15
|
| 224 |
+
break
|
| 225 |
+
match = _WS_EXT_RE.match(defext)
|
| 226 |
+
if match:
|
| 227 |
+
compress = 15
|
| 228 |
+
if isserver:
|
| 229 |
+
# Server never fail to detect compress handshake.
|
| 230 |
+
# Server does not need to send max wbit to client
|
| 231 |
+
if match.group(4):
|
| 232 |
+
compress = int(match.group(4))
|
| 233 |
+
# Group3 must match if group4 matches
|
| 234 |
+
# Compress wbit 8 does not support in zlib
|
| 235 |
+
# If compress level not support,
|
| 236 |
+
# CONTINUE to next extension
|
| 237 |
+
if compress > 15 or compress < 9:
|
| 238 |
+
compress = 0
|
| 239 |
+
continue
|
| 240 |
+
if match.group(1):
|
| 241 |
+
notakeover = True
|
| 242 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
| 243 |
+
break
|
| 244 |
+
else:
|
| 245 |
+
if match.group(6):
|
| 246 |
+
compress = int(match.group(6))
|
| 247 |
+
# Group5 must match if group6 matches
|
| 248 |
+
# Compress wbit 8 does not support in zlib
|
| 249 |
+
# If compress level not support,
|
| 250 |
+
# FAIL the parse progress
|
| 251 |
+
if compress > 15 or compress < 9:
|
| 252 |
+
raise WSHandshakeError("Invalid window size")
|
| 253 |
+
if match.group(2):
|
| 254 |
+
notakeover = True
|
| 255 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
| 256 |
+
break
|
| 257 |
+
# Return Fail if client side and not match
|
| 258 |
+
elif not isserver:
|
| 259 |
+
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
| 260 |
+
|
| 261 |
+
return compress, notakeover
|
| 262 |
+
|
| 263 |
+
|
| 264 |
+
def ws_ext_gen(
|
| 265 |
+
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
| 266 |
+
) -> str:
|
| 267 |
+
# client_notakeover=False not used for server
|
| 268 |
+
# compress wbit 8 does not support in zlib
|
| 269 |
+
if compress < 9 or compress > 15:
|
| 270 |
+
raise ValueError(
|
| 271 |
+
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
| 272 |
+
)
|
| 273 |
+
enabledext = ["permessage-deflate"]
|
| 274 |
+
if not isserver:
|
| 275 |
+
enabledext.append("client_max_window_bits")
|
| 276 |
+
|
| 277 |
+
if compress < 15:
|
| 278 |
+
enabledext.append("server_max_window_bits=" + str(compress))
|
| 279 |
+
if server_notakeover:
|
| 280 |
+
enabledext.append("server_no_context_takeover")
|
| 281 |
+
# if client_notakeover:
|
| 282 |
+
# enabledext.append('client_no_context_takeover')
|
| 283 |
+
return "; ".join(enabledext)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
class WSParserState(IntEnum):
|
| 287 |
+
READ_HEADER = 1
|
| 288 |
+
READ_PAYLOAD_LENGTH = 2
|
| 289 |
+
READ_PAYLOAD_MASK = 3
|
| 290 |
+
READ_PAYLOAD = 4
|
| 291 |
+
|
| 292 |
+
|
| 293 |
+
class WebSocketReader:
|
| 294 |
+
def __init__(
|
| 295 |
+
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
| 296 |
+
) -> None:
|
| 297 |
+
self.queue = queue
|
| 298 |
+
self._max_msg_size = max_msg_size
|
| 299 |
+
|
| 300 |
+
self._exc: Optional[BaseException] = None
|
| 301 |
+
self._partial = bytearray()
|
| 302 |
+
self._state = WSParserState.READ_HEADER
|
| 303 |
+
|
| 304 |
+
self._opcode: Optional[int] = None
|
| 305 |
+
self._frame_fin = False
|
| 306 |
+
self._frame_opcode: Optional[int] = None
|
| 307 |
+
self._frame_payload = bytearray()
|
| 308 |
+
|
| 309 |
+
self._tail: bytes = b""
|
| 310 |
+
self._has_mask = False
|
| 311 |
+
self._frame_mask: Optional[bytes] = None
|
| 312 |
+
self._payload_length = 0
|
| 313 |
+
self._payload_length_flag = 0
|
| 314 |
+
self._compressed: Optional[bool] = None
|
| 315 |
+
self._decompressobj: Optional[ZLibDecompressor] = None
|
| 316 |
+
self._compress = compress
|
| 317 |
+
|
| 318 |
+
def feed_eof(self) -> None:
|
| 319 |
+
self.queue.feed_eof()
|
| 320 |
+
|
| 321 |
+
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
| 322 |
+
if self._exc:
|
| 323 |
+
return True, data
|
| 324 |
+
|
| 325 |
+
try:
|
| 326 |
+
self._feed_data(data)
|
| 327 |
+
except Exception as exc:
|
| 328 |
+
self._exc = exc
|
| 329 |
+
set_exception(self.queue, exc)
|
| 330 |
+
return True, b""
|
| 331 |
+
|
| 332 |
+
return False, b""
|
| 333 |
+
|
| 334 |
+
def _feed_data(self, data: bytes) -> None:
|
| 335 |
+
for fin, opcode, payload, compressed in self.parse_frame(data):
|
| 336 |
+
if opcode in MESSAGE_TYPES_WITH_CONTENT:
|
| 337 |
+
# load text/binary
|
| 338 |
+
is_continuation = opcode == WSMsgType.CONTINUATION
|
| 339 |
+
if not fin:
|
| 340 |
+
# got partial frame payload
|
| 341 |
+
if not is_continuation:
|
| 342 |
+
self._opcode = opcode
|
| 343 |
+
self._partial += payload
|
| 344 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
| 345 |
+
raise WebSocketError(
|
| 346 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 347 |
+
"Message size {} exceeds limit {}".format(
|
| 348 |
+
len(self._partial), self._max_msg_size
|
| 349 |
+
),
|
| 350 |
+
)
|
| 351 |
+
continue
|
| 352 |
+
|
| 353 |
+
has_partial = bool(self._partial)
|
| 354 |
+
if is_continuation:
|
| 355 |
+
if self._opcode is None:
|
| 356 |
+
raise WebSocketError(
|
| 357 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 358 |
+
"Continuation frame for non started message",
|
| 359 |
+
)
|
| 360 |
+
opcode = self._opcode
|
| 361 |
+
self._opcode = None
|
| 362 |
+
# previous frame was non finished
|
| 363 |
+
# we should get continuation opcode
|
| 364 |
+
elif has_partial:
|
| 365 |
+
raise WebSocketError(
|
| 366 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 367 |
+
"The opcode in non-fin frame is expected "
|
| 368 |
+
"to be zero, got {!r}".format(opcode),
|
| 369 |
+
)
|
| 370 |
+
|
| 371 |
+
if has_partial:
|
| 372 |
+
assembled_payload = self._partial + payload
|
| 373 |
+
self._partial.clear()
|
| 374 |
+
else:
|
| 375 |
+
assembled_payload = payload
|
| 376 |
+
|
| 377 |
+
if self._max_msg_size and len(assembled_payload) >= self._max_msg_size:
|
| 378 |
+
raise WebSocketError(
|
| 379 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 380 |
+
"Message size {} exceeds limit {}".format(
|
| 381 |
+
len(assembled_payload), self._max_msg_size
|
| 382 |
+
),
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
# Decompress process must to be done after all packets
|
| 386 |
+
# received.
|
| 387 |
+
if compressed:
|
| 388 |
+
if not self._decompressobj:
|
| 389 |
+
self._decompressobj = ZLibDecompressor(
|
| 390 |
+
suppress_deflate_header=True
|
| 391 |
+
)
|
| 392 |
+
payload_merged = self._decompressobj.decompress_sync(
|
| 393 |
+
assembled_payload + _WS_DEFLATE_TRAILING, self._max_msg_size
|
| 394 |
+
)
|
| 395 |
+
if self._decompressobj.unconsumed_tail:
|
| 396 |
+
left = len(self._decompressobj.unconsumed_tail)
|
| 397 |
+
raise WebSocketError(
|
| 398 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 399 |
+
"Decompressed message size {} exceeds limit {}".format(
|
| 400 |
+
self._max_msg_size + left, self._max_msg_size
|
| 401 |
+
),
|
| 402 |
+
)
|
| 403 |
+
else:
|
| 404 |
+
payload_merged = bytes(assembled_payload)
|
| 405 |
+
|
| 406 |
+
if opcode == WSMsgType.TEXT:
|
| 407 |
+
try:
|
| 408 |
+
text = payload_merged.decode("utf-8")
|
| 409 |
+
except UnicodeDecodeError as exc:
|
| 410 |
+
raise WebSocketError(
|
| 411 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 412 |
+
) from exc
|
| 413 |
+
|
| 414 |
+
self.queue.feed_data(WSMessage(WSMsgType.TEXT, text, ""), len(text))
|
| 415 |
+
continue
|
| 416 |
+
|
| 417 |
+
self.queue.feed_data(
|
| 418 |
+
WSMessage(WSMsgType.BINARY, payload_merged, ""), len(payload_merged)
|
| 419 |
+
)
|
| 420 |
+
elif opcode == WSMsgType.CLOSE:
|
| 421 |
+
if len(payload) >= 2:
|
| 422 |
+
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
| 423 |
+
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
| 424 |
+
raise WebSocketError(
|
| 425 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 426 |
+
f"Invalid close code: {close_code}",
|
| 427 |
+
)
|
| 428 |
+
try:
|
| 429 |
+
close_message = payload[2:].decode("utf-8")
|
| 430 |
+
except UnicodeDecodeError as exc:
|
| 431 |
+
raise WebSocketError(
|
| 432 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 433 |
+
) from exc
|
| 434 |
+
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
| 435 |
+
elif payload:
|
| 436 |
+
raise WebSocketError(
|
| 437 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 438 |
+
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
| 439 |
+
)
|
| 440 |
+
else:
|
| 441 |
+
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
| 442 |
+
|
| 443 |
+
self.queue.feed_data(msg, 0)
|
| 444 |
+
|
| 445 |
+
elif opcode == WSMsgType.PING:
|
| 446 |
+
self.queue.feed_data(
|
| 447 |
+
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
elif opcode == WSMsgType.PONG:
|
| 451 |
+
self.queue.feed_data(
|
| 452 |
+
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
else:
|
| 456 |
+
raise WebSocketError(
|
| 457 |
+
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
def parse_frame(
|
| 461 |
+
self, buf: bytes
|
| 462 |
+
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
| 463 |
+
"""Return the next frame from the socket."""
|
| 464 |
+
frames: List[Tuple[bool, Optional[int], bytearray, Optional[bool]]] = []
|
| 465 |
+
if self._tail:
|
| 466 |
+
buf, self._tail = self._tail + buf, b""
|
| 467 |
+
|
| 468 |
+
start_pos: int = 0
|
| 469 |
+
buf_length = len(buf)
|
| 470 |
+
|
| 471 |
+
while True:
|
| 472 |
+
# read header
|
| 473 |
+
if self._state is WSParserState.READ_HEADER:
|
| 474 |
+
if buf_length - start_pos < 2:
|
| 475 |
+
break
|
| 476 |
+
data = buf[start_pos : start_pos + 2]
|
| 477 |
+
start_pos += 2
|
| 478 |
+
first_byte, second_byte = data
|
| 479 |
+
|
| 480 |
+
fin = (first_byte >> 7) & 1
|
| 481 |
+
rsv1 = (first_byte >> 6) & 1
|
| 482 |
+
rsv2 = (first_byte >> 5) & 1
|
| 483 |
+
rsv3 = (first_byte >> 4) & 1
|
| 484 |
+
opcode = first_byte & 0xF
|
| 485 |
+
|
| 486 |
+
# frame-fin = %x0 ; more frames of this message follow
|
| 487 |
+
# / %x1 ; final frame of this message
|
| 488 |
+
# frame-rsv1 = %x0 ;
|
| 489 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 490 |
+
# frame-rsv2 = %x0 ;
|
| 491 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 492 |
+
# frame-rsv3 = %x0 ;
|
| 493 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 494 |
+
#
|
| 495 |
+
# Remove rsv1 from this test for deflate development
|
| 496 |
+
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
| 497 |
+
raise WebSocketError(
|
| 498 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 499 |
+
"Received frame with non-zero reserved bits",
|
| 500 |
+
)
|
| 501 |
+
|
| 502 |
+
if opcode > 0x7 and fin == 0:
|
| 503 |
+
raise WebSocketError(
|
| 504 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 505 |
+
"Received fragmented control frame",
|
| 506 |
+
)
|
| 507 |
+
|
| 508 |
+
has_mask = (second_byte >> 7) & 1
|
| 509 |
+
length = second_byte & 0x7F
|
| 510 |
+
|
| 511 |
+
# Control frames MUST have a payload
|
| 512 |
+
# length of 125 bytes or less
|
| 513 |
+
if opcode > 0x7 and length > 125:
|
| 514 |
+
raise WebSocketError(
|
| 515 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 516 |
+
"Control frame payload cannot be " "larger than 125 bytes",
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
# Set compress status if last package is FIN
|
| 520 |
+
# OR set compress status if this is first fragment
|
| 521 |
+
# Raise error if not first fragment with rsv1 = 0x1
|
| 522 |
+
if self._frame_fin or self._compressed is None:
|
| 523 |
+
self._compressed = True if rsv1 else False
|
| 524 |
+
elif rsv1:
|
| 525 |
+
raise WebSocketError(
|
| 526 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 527 |
+
"Received frame with non-zero reserved bits",
|
| 528 |
+
)
|
| 529 |
+
|
| 530 |
+
self._frame_fin = bool(fin)
|
| 531 |
+
self._frame_opcode = opcode
|
| 532 |
+
self._has_mask = bool(has_mask)
|
| 533 |
+
self._payload_length_flag = length
|
| 534 |
+
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
| 535 |
+
|
| 536 |
+
# read payload length
|
| 537 |
+
if self._state is WSParserState.READ_PAYLOAD_LENGTH:
|
| 538 |
+
length_flag = self._payload_length_flag
|
| 539 |
+
if length_flag == 126:
|
| 540 |
+
if buf_length - start_pos < 2:
|
| 541 |
+
break
|
| 542 |
+
data = buf[start_pos : start_pos + 2]
|
| 543 |
+
start_pos += 2
|
| 544 |
+
self._payload_length = UNPACK_LEN2(data)[0]
|
| 545 |
+
elif length_flag > 126:
|
| 546 |
+
if buf_length - start_pos < 8:
|
| 547 |
+
break
|
| 548 |
+
data = buf[start_pos : start_pos + 8]
|
| 549 |
+
start_pos += 8
|
| 550 |
+
self._payload_length = UNPACK_LEN3(data)[0]
|
| 551 |
+
else:
|
| 552 |
+
self._payload_length = length_flag
|
| 553 |
+
|
| 554 |
+
self._state = (
|
| 555 |
+
WSParserState.READ_PAYLOAD_MASK
|
| 556 |
+
if self._has_mask
|
| 557 |
+
else WSParserState.READ_PAYLOAD
|
| 558 |
+
)
|
| 559 |
+
|
| 560 |
+
# read payload mask
|
| 561 |
+
if self._state is WSParserState.READ_PAYLOAD_MASK:
|
| 562 |
+
if buf_length - start_pos < 4:
|
| 563 |
+
break
|
| 564 |
+
self._frame_mask = buf[start_pos : start_pos + 4]
|
| 565 |
+
start_pos += 4
|
| 566 |
+
self._state = WSParserState.READ_PAYLOAD
|
| 567 |
+
|
| 568 |
+
if self._state is WSParserState.READ_PAYLOAD:
|
| 569 |
+
length = self._payload_length
|
| 570 |
+
payload = self._frame_payload
|
| 571 |
+
|
| 572 |
+
chunk_len = buf_length - start_pos
|
| 573 |
+
if length >= chunk_len:
|
| 574 |
+
self._payload_length = length - chunk_len
|
| 575 |
+
payload += buf[start_pos:]
|
| 576 |
+
start_pos = buf_length
|
| 577 |
+
else:
|
| 578 |
+
self._payload_length = 0
|
| 579 |
+
payload += buf[start_pos : start_pos + length]
|
| 580 |
+
start_pos = start_pos + length
|
| 581 |
+
|
| 582 |
+
if self._payload_length != 0:
|
| 583 |
+
break
|
| 584 |
+
|
| 585 |
+
if self._has_mask:
|
| 586 |
+
assert self._frame_mask is not None
|
| 587 |
+
_websocket_mask(self._frame_mask, payload)
|
| 588 |
+
|
| 589 |
+
frames.append(
|
| 590 |
+
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
| 591 |
+
)
|
| 592 |
+
self._frame_payload = bytearray()
|
| 593 |
+
self._state = WSParserState.READ_HEADER
|
| 594 |
+
|
| 595 |
+
self._tail = buf[start_pos:]
|
| 596 |
+
|
| 597 |
+
return frames
|
| 598 |
+
|
| 599 |
+
|
| 600 |
+
class WebSocketWriter:
|
| 601 |
+
def __init__(
|
| 602 |
+
self,
|
| 603 |
+
protocol: BaseProtocol,
|
| 604 |
+
transport: asyncio.Transport,
|
| 605 |
+
*,
|
| 606 |
+
use_mask: bool = False,
|
| 607 |
+
limit: int = DEFAULT_LIMIT,
|
| 608 |
+
random: random.Random = random.Random(),
|
| 609 |
+
compress: int = 0,
|
| 610 |
+
notakeover: bool = False,
|
| 611 |
+
) -> None:
|
| 612 |
+
self.protocol = protocol
|
| 613 |
+
self.transport = transport
|
| 614 |
+
self.use_mask = use_mask
|
| 615 |
+
self.get_random_bits = partial(random.getrandbits, 32)
|
| 616 |
+
self.compress = compress
|
| 617 |
+
self.notakeover = notakeover
|
| 618 |
+
self._closing = False
|
| 619 |
+
self._limit = limit
|
| 620 |
+
self._output_size = 0
|
| 621 |
+
self._compressobj: Any = None # actually compressobj
|
| 622 |
+
|
| 623 |
+
async def _send_frame(
|
| 624 |
+
self, message: bytes, opcode: int, compress: Optional[int] = None
|
| 625 |
+
) -> None:
|
| 626 |
+
"""Send a frame over the websocket with message as its payload."""
|
| 627 |
+
if self._closing and not (opcode & WSMsgType.CLOSE):
|
| 628 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 629 |
+
|
| 630 |
+
# RSV are the reserved bits in the frame header. They are used to
|
| 631 |
+
# indicate that the frame is using an extension.
|
| 632 |
+
# https://datatracker.ietf.org/doc/html/rfc6455#section-5.2
|
| 633 |
+
rsv = 0
|
| 634 |
+
# Only compress larger packets (disabled)
|
| 635 |
+
# Does small packet needs to be compressed?
|
| 636 |
+
# if self.compress and opcode < 8 and len(message) > 124:
|
| 637 |
+
if (compress or self.compress) and opcode < 8:
|
| 638 |
+
# RSV1 (rsv = 0x40) is set for compressed frames
|
| 639 |
+
# https://datatracker.ietf.org/doc/html/rfc7692#section-7.2.3.1
|
| 640 |
+
rsv = 0x40
|
| 641 |
+
|
| 642 |
+
if compress:
|
| 643 |
+
# Do not set self._compress if compressing is for this frame
|
| 644 |
+
compressobj = self._make_compress_obj(compress)
|
| 645 |
+
else: # self.compress
|
| 646 |
+
if not self._compressobj:
|
| 647 |
+
self._compressobj = self._make_compress_obj(self.compress)
|
| 648 |
+
compressobj = self._compressobj
|
| 649 |
+
|
| 650 |
+
message = await compressobj.compress(message)
|
| 651 |
+
# Its critical that we do not return control to the event
|
| 652 |
+
# loop until we have finished sending all the compressed
|
| 653 |
+
# data. Otherwise we could end up mixing compressed frames
|
| 654 |
+
# if there are multiple coroutines compressing data.
|
| 655 |
+
message += compressobj.flush(
|
| 656 |
+
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
| 657 |
+
)
|
| 658 |
+
if message.endswith(_WS_DEFLATE_TRAILING):
|
| 659 |
+
message = message[:-4]
|
| 660 |
+
|
| 661 |
+
msg_length = len(message)
|
| 662 |
+
|
| 663 |
+
use_mask = self.use_mask
|
| 664 |
+
mask_bit = 0x80 if use_mask else 0
|
| 665 |
+
|
| 666 |
+
# Depending on the message length, the header is assembled differently.
|
| 667 |
+
# The first byte is reserved for the opcode and the RSV bits.
|
| 668 |
+
first_byte = 0x80 | rsv | opcode
|
| 669 |
+
if msg_length < 126:
|
| 670 |
+
header = PACK_LEN1(first_byte, msg_length | mask_bit)
|
| 671 |
+
header_len = 2
|
| 672 |
+
elif msg_length < (1 << 16):
|
| 673 |
+
header = PACK_LEN2(first_byte, 126 | mask_bit, msg_length)
|
| 674 |
+
header_len = 4
|
| 675 |
+
else:
|
| 676 |
+
header = PACK_LEN3(first_byte, 127 | mask_bit, msg_length)
|
| 677 |
+
header_len = 10
|
| 678 |
+
|
| 679 |
+
# https://datatracker.ietf.org/doc/html/rfc6455#section-5.3
|
| 680 |
+
# If we are using a mask, we need to generate it randomly
|
| 681 |
+
# and apply it to the message before sending it. A mask is
|
| 682 |
+
# a 32-bit value that is applied to the message using a
|
| 683 |
+
# bitwise XOR operation. It is used to prevent certain types
|
| 684 |
+
# of attacks on the websocket protocol. The mask is only used
|
| 685 |
+
# when aiohttp is acting as a client. Servers do not use a mask.
|
| 686 |
+
if use_mask:
|
| 687 |
+
mask = PACK_RANDBITS(self.get_random_bits())
|
| 688 |
+
message = bytearray(message)
|
| 689 |
+
_websocket_mask(mask, message)
|
| 690 |
+
self._write(header + mask + message)
|
| 691 |
+
self._output_size += header_len + MASK_LEN + msg_length
|
| 692 |
+
|
| 693 |
+
else:
|
| 694 |
+
if msg_length > MSG_SIZE:
|
| 695 |
+
self._write(header)
|
| 696 |
+
self._write(message)
|
| 697 |
+
else:
|
| 698 |
+
self._write(header + message)
|
| 699 |
+
|
| 700 |
+
self._output_size += header_len + msg_length
|
| 701 |
+
|
| 702 |
+
# It is safe to return control to the event loop when using compression
|
| 703 |
+
# after this point as we have already sent or buffered all the data.
|
| 704 |
+
|
| 705 |
+
# Once we have written output_size up to the limit, we call the
|
| 706 |
+
# drain helper which waits for the transport to be ready to accept
|
| 707 |
+
# more data. This is a flow control mechanism to prevent the buffer
|
| 708 |
+
# from growing too large. The drain helper will return right away
|
| 709 |
+
# if the writer is not paused.
|
| 710 |
+
if self._output_size > self._limit:
|
| 711 |
+
self._output_size = 0
|
| 712 |
+
await self.protocol._drain_helper()
|
| 713 |
+
|
| 714 |
+
def _make_compress_obj(self, compress: int) -> ZLibCompressor:
|
| 715 |
+
return ZLibCompressor(
|
| 716 |
+
level=zlib.Z_BEST_SPEED,
|
| 717 |
+
wbits=-compress,
|
| 718 |
+
max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
|
| 719 |
+
)
|
| 720 |
+
|
| 721 |
+
def _write(self, data: bytes) -> None:
|
| 722 |
+
if self.transport is None or self.transport.is_closing():
|
| 723 |
+
raise ClientConnectionResetError("Cannot write to closing transport")
|
| 724 |
+
self.transport.write(data)
|
| 725 |
+
|
| 726 |
+
async def pong(self, message: Union[bytes, str] = b"") -> None:
|
| 727 |
+
"""Send pong message."""
|
| 728 |
+
if isinstance(message, str):
|
| 729 |
+
message = message.encode("utf-8")
|
| 730 |
+
await self._send_frame(message, WSMsgType.PONG)
|
| 731 |
+
|
| 732 |
+
async def ping(self, message: Union[bytes, str] = b"") -> None:
|
| 733 |
+
"""Send ping message."""
|
| 734 |
+
if isinstance(message, str):
|
| 735 |
+
message = message.encode("utf-8")
|
| 736 |
+
await self._send_frame(message, WSMsgType.PING)
|
| 737 |
+
|
| 738 |
+
async def send(
|
| 739 |
+
self,
|
| 740 |
+
message: Union[str, bytes],
|
| 741 |
+
binary: bool = False,
|
| 742 |
+
compress: Optional[int] = None,
|
| 743 |
+
) -> None:
|
| 744 |
+
"""Send a frame over the websocket with message as its payload."""
|
| 745 |
+
if isinstance(message, str):
|
| 746 |
+
message = message.encode("utf-8")
|
| 747 |
+
if binary:
|
| 748 |
+
await self._send_frame(message, WSMsgType.BINARY, compress)
|
| 749 |
+
else:
|
| 750 |
+
await self._send_frame(message, WSMsgType.TEXT, compress)
|
| 751 |
+
|
| 752 |
+
async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
|
| 753 |
+
"""Close the websocket, sending the specified code and message."""
|
| 754 |
+
if isinstance(message, str):
|
| 755 |
+
message = message.encode("utf-8")
|
| 756 |
+
try:
|
| 757 |
+
await self._send_frame(
|
| 758 |
+
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
| 759 |
+
)
|
| 760 |
+
finally:
|
| 761 |
+
self._closing = True
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/log.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
|
| 3 |
+
access_logger = logging.getLogger("aiohttp.access")
|
| 4 |
+
client_logger = logging.getLogger("aiohttp.client")
|
| 5 |
+
internal_logger = logging.getLogger("aiohttp.internal")
|
| 6 |
+
server_logger = logging.getLogger("aiohttp.server")
|
| 7 |
+
web_logger = logging.getLogger("aiohttp.web")
|
| 8 |
+
ws_logger = logging.getLogger("aiohttp.websocket")
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/payload_streamer.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Payload implementation for coroutines as data provider.
|
| 3 |
+
|
| 4 |
+
As a simple case, you can upload data from file::
|
| 5 |
+
|
| 6 |
+
@aiohttp.streamer
|
| 7 |
+
async def file_sender(writer, file_name=None):
|
| 8 |
+
with open(file_name, 'rb') as f:
|
| 9 |
+
chunk = f.read(2**16)
|
| 10 |
+
while chunk:
|
| 11 |
+
await writer.write(chunk)
|
| 12 |
+
|
| 13 |
+
chunk = f.read(2**16)
|
| 14 |
+
|
| 15 |
+
Then you can use `file_sender` like this:
|
| 16 |
+
|
| 17 |
+
async with session.post('http://httpbin.org/post',
|
| 18 |
+
data=file_sender(file_name='huge_file')) as resp:
|
| 19 |
+
print(await resp.text())
|
| 20 |
+
|
| 21 |
+
..note:: Coroutine must accept `writer` as first argument
|
| 22 |
+
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
import types
|
| 26 |
+
import warnings
|
| 27 |
+
from typing import Any, Awaitable, Callable, Dict, Tuple
|
| 28 |
+
|
| 29 |
+
from .abc import AbstractStreamWriter
|
| 30 |
+
from .payload import Payload, payload_type
|
| 31 |
+
|
| 32 |
+
__all__ = ("streamer",)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class _stream_wrapper:
|
| 36 |
+
def __init__(
|
| 37 |
+
self,
|
| 38 |
+
coro: Callable[..., Awaitable[None]],
|
| 39 |
+
args: Tuple[Any, ...],
|
| 40 |
+
kwargs: Dict[str, Any],
|
| 41 |
+
) -> None:
|
| 42 |
+
self.coro = types.coroutine(coro)
|
| 43 |
+
self.args = args
|
| 44 |
+
self.kwargs = kwargs
|
| 45 |
+
|
| 46 |
+
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
| 47 |
+
await self.coro(writer, *self.args, **self.kwargs)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class streamer:
|
| 51 |
+
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
| 52 |
+
warnings.warn(
|
| 53 |
+
"@streamer is deprecated, use async generators instead",
|
| 54 |
+
DeprecationWarning,
|
| 55 |
+
stacklevel=2,
|
| 56 |
+
)
|
| 57 |
+
self.coro = coro
|
| 58 |
+
|
| 59 |
+
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
| 60 |
+
return _stream_wrapper(self.coro, args, kwargs)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
@payload_type(_stream_wrapper)
|
| 64 |
+
class StreamWrapperPayload(Payload):
|
| 65 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 66 |
+
await self._value(writer)
|
| 67 |
+
|
| 68 |
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
|
| 69 |
+
raise TypeError("Unable to decode.")
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
@payload_type(streamer)
|
| 73 |
+
class StreamPayload(StreamWrapperPayload):
|
| 74 |
+
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
| 75 |
+
super().__init__(value(), *args, **kwargs)
|
| 76 |
+
|
| 77 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 78 |
+
await self._value(writer)
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Marker
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/resolver.py
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import socket
|
| 3 |
+
import sys
|
| 4 |
+
from typing import Any, Dict, List, Optional, Tuple, Type, Union
|
| 5 |
+
|
| 6 |
+
from .abc import AbstractResolver, ResolveResult
|
| 7 |
+
|
| 8 |
+
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
try:
|
| 12 |
+
import aiodns
|
| 13 |
+
|
| 14 |
+
aiodns_default = hasattr(aiodns.DNSResolver, "getaddrinfo")
|
| 15 |
+
except ImportError: # pragma: no cover
|
| 16 |
+
aiodns = None # type: ignore[assignment]
|
| 17 |
+
aiodns_default = False
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
_NUMERIC_SOCKET_FLAGS = socket.AI_NUMERICHOST | socket.AI_NUMERICSERV
|
| 21 |
+
_NAME_SOCKET_FLAGS = socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
| 22 |
+
_SUPPORTS_SCOPE_ID = sys.version_info >= (3, 9, 0)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class ThreadedResolver(AbstractResolver):
|
| 26 |
+
"""Threaded resolver.
|
| 27 |
+
|
| 28 |
+
Uses an Executor for synchronous getaddrinfo() calls.
|
| 29 |
+
concurrent.futures.ThreadPoolExecutor is used by default.
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 33 |
+
self._loop = loop or asyncio.get_running_loop()
|
| 34 |
+
|
| 35 |
+
async def resolve(
|
| 36 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 37 |
+
) -> List[ResolveResult]:
|
| 38 |
+
infos = await self._loop.getaddrinfo(
|
| 39 |
+
host,
|
| 40 |
+
port,
|
| 41 |
+
type=socket.SOCK_STREAM,
|
| 42 |
+
family=family,
|
| 43 |
+
flags=socket.AI_ADDRCONFIG,
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
hosts: List[ResolveResult] = []
|
| 47 |
+
for family, _, proto, _, address in infos:
|
| 48 |
+
if family == socket.AF_INET6:
|
| 49 |
+
if len(address) < 3:
|
| 50 |
+
# IPv6 is not supported by Python build,
|
| 51 |
+
# or IPv6 is not enabled in the host
|
| 52 |
+
continue
|
| 53 |
+
if address[3] and _SUPPORTS_SCOPE_ID:
|
| 54 |
+
# This is essential for link-local IPv6 addresses.
|
| 55 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 56 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 57 |
+
resolved_host, _port = await self._loop.getnameinfo(
|
| 58 |
+
address, _NAME_SOCKET_FLAGS
|
| 59 |
+
)
|
| 60 |
+
port = int(_port)
|
| 61 |
+
else:
|
| 62 |
+
resolved_host, port = address[:2]
|
| 63 |
+
else: # IPv4
|
| 64 |
+
assert family == socket.AF_INET
|
| 65 |
+
resolved_host, port = address # type: ignore[misc]
|
| 66 |
+
hosts.append(
|
| 67 |
+
ResolveResult(
|
| 68 |
+
hostname=host,
|
| 69 |
+
host=resolved_host,
|
| 70 |
+
port=port,
|
| 71 |
+
family=family,
|
| 72 |
+
proto=proto,
|
| 73 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
| 74 |
+
)
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
return hosts
|
| 78 |
+
|
| 79 |
+
async def close(self) -> None:
|
| 80 |
+
pass
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
class AsyncResolver(AbstractResolver):
|
| 84 |
+
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
| 85 |
+
|
| 86 |
+
def __init__(
|
| 87 |
+
self,
|
| 88 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 89 |
+
*args: Any,
|
| 90 |
+
**kwargs: Any
|
| 91 |
+
) -> None:
|
| 92 |
+
if aiodns is None:
|
| 93 |
+
raise RuntimeError("Resolver requires aiodns library")
|
| 94 |
+
|
| 95 |
+
self._resolver = aiodns.DNSResolver(*args, **kwargs)
|
| 96 |
+
|
| 97 |
+
if not hasattr(self._resolver, "gethostbyname"):
|
| 98 |
+
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
| 99 |
+
self.resolve = self._resolve_with_query # type: ignore
|
| 100 |
+
|
| 101 |
+
async def resolve(
|
| 102 |
+
self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET
|
| 103 |
+
) -> List[ResolveResult]:
|
| 104 |
+
try:
|
| 105 |
+
resp = await self._resolver.getaddrinfo(
|
| 106 |
+
host,
|
| 107 |
+
port=port,
|
| 108 |
+
type=socket.SOCK_STREAM,
|
| 109 |
+
family=family,
|
| 110 |
+
flags=socket.AI_ADDRCONFIG,
|
| 111 |
+
)
|
| 112 |
+
except aiodns.error.DNSError as exc:
|
| 113 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 114 |
+
raise OSError(None, msg) from exc
|
| 115 |
+
hosts: List[ResolveResult] = []
|
| 116 |
+
for node in resp.nodes:
|
| 117 |
+
address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr
|
| 118 |
+
family = node.family
|
| 119 |
+
if family == socket.AF_INET6:
|
| 120 |
+
if len(address) > 3 and address[3] and _SUPPORTS_SCOPE_ID:
|
| 121 |
+
# This is essential for link-local IPv6 addresses.
|
| 122 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 123 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 124 |
+
result = await self._resolver.getnameinfo(
|
| 125 |
+
(address[0].decode("ascii"), *address[1:]),
|
| 126 |
+
_NAME_SOCKET_FLAGS,
|
| 127 |
+
)
|
| 128 |
+
resolved_host = result.node
|
| 129 |
+
else:
|
| 130 |
+
resolved_host = address[0].decode("ascii")
|
| 131 |
+
port = address[1]
|
| 132 |
+
else: # IPv4
|
| 133 |
+
assert family == socket.AF_INET
|
| 134 |
+
resolved_host = address[0].decode("ascii")
|
| 135 |
+
port = address[1]
|
| 136 |
+
hosts.append(
|
| 137 |
+
ResolveResult(
|
| 138 |
+
hostname=host,
|
| 139 |
+
host=resolved_host,
|
| 140 |
+
port=port,
|
| 141 |
+
family=family,
|
| 142 |
+
proto=0,
|
| 143 |
+
flags=_NUMERIC_SOCKET_FLAGS,
|
| 144 |
+
)
|
| 145 |
+
)
|
| 146 |
+
|
| 147 |
+
if not hosts:
|
| 148 |
+
raise OSError(None, "DNS lookup failed")
|
| 149 |
+
|
| 150 |
+
return hosts
|
| 151 |
+
|
| 152 |
+
async def _resolve_with_query(
|
| 153 |
+
self, host: str, port: int = 0, family: int = socket.AF_INET
|
| 154 |
+
) -> List[Dict[str, Any]]:
|
| 155 |
+
if family == socket.AF_INET6:
|
| 156 |
+
qtype = "AAAA"
|
| 157 |
+
else:
|
| 158 |
+
qtype = "A"
|
| 159 |
+
|
| 160 |
+
try:
|
| 161 |
+
resp = await self._resolver.query(host, qtype)
|
| 162 |
+
except aiodns.error.DNSError as exc:
|
| 163 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 164 |
+
raise OSError(None, msg) from exc
|
| 165 |
+
|
| 166 |
+
hosts = []
|
| 167 |
+
for rr in resp:
|
| 168 |
+
hosts.append(
|
| 169 |
+
{
|
| 170 |
+
"hostname": host,
|
| 171 |
+
"host": rr.host,
|
| 172 |
+
"port": port,
|
| 173 |
+
"family": family,
|
| 174 |
+
"proto": 0,
|
| 175 |
+
"flags": socket.AI_NUMERICHOST,
|
| 176 |
+
}
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
if not hosts:
|
| 180 |
+
raise OSError(None, "DNS lookup failed")
|
| 181 |
+
|
| 182 |
+
return hosts
|
| 183 |
+
|
| 184 |
+
async def close(self) -> None:
|
| 185 |
+
self._resolver.cancel()
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
| 189 |
+
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/streams.py
ADDED
|
@@ -0,0 +1,687 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections
|
| 3 |
+
import warnings
|
| 4 |
+
from typing import (
|
| 5 |
+
Awaitable,
|
| 6 |
+
Callable,
|
| 7 |
+
Deque,
|
| 8 |
+
Final,
|
| 9 |
+
Generic,
|
| 10 |
+
List,
|
| 11 |
+
Optional,
|
| 12 |
+
Tuple,
|
| 13 |
+
TypeVar,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from .base_protocol import BaseProtocol
|
| 17 |
+
from .helpers import (
|
| 18 |
+
_EXC_SENTINEL,
|
| 19 |
+
BaseTimerContext,
|
| 20 |
+
TimerNoop,
|
| 21 |
+
set_exception,
|
| 22 |
+
set_result,
|
| 23 |
+
)
|
| 24 |
+
from .log import internal_logger
|
| 25 |
+
|
| 26 |
+
__all__ = (
|
| 27 |
+
"EMPTY_PAYLOAD",
|
| 28 |
+
"EofStream",
|
| 29 |
+
"StreamReader",
|
| 30 |
+
"DataQueue",
|
| 31 |
+
"FlowControlDataQueue",
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
_T = TypeVar("_T")
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class EofStream(Exception):
|
| 38 |
+
"""eof stream indication."""
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class AsyncStreamIterator(Generic[_T]):
|
| 42 |
+
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
| 43 |
+
self.read_func = read_func
|
| 44 |
+
|
| 45 |
+
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
| 46 |
+
return self
|
| 47 |
+
|
| 48 |
+
async def __anext__(self) -> _T:
|
| 49 |
+
try:
|
| 50 |
+
rv = await self.read_func()
|
| 51 |
+
except EofStream:
|
| 52 |
+
raise StopAsyncIteration
|
| 53 |
+
if rv == b"":
|
| 54 |
+
raise StopAsyncIteration
|
| 55 |
+
return rv
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
class ChunkTupleAsyncStreamIterator:
|
| 59 |
+
def __init__(self, stream: "StreamReader") -> None:
|
| 60 |
+
self._stream = stream
|
| 61 |
+
|
| 62 |
+
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
| 63 |
+
return self
|
| 64 |
+
|
| 65 |
+
async def __anext__(self) -> Tuple[bytes, bool]:
|
| 66 |
+
rv = await self._stream.readchunk()
|
| 67 |
+
if rv == (b"", False):
|
| 68 |
+
raise StopAsyncIteration
|
| 69 |
+
return rv
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class AsyncStreamReaderMixin:
|
| 73 |
+
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
| 74 |
+
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
| 75 |
+
|
| 76 |
+
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
| 77 |
+
"""Returns an asynchronous iterator that yields chunks of size n."""
|
| 78 |
+
return AsyncStreamIterator(lambda: self.read(n)) # type: ignore[attr-defined]
|
| 79 |
+
|
| 80 |
+
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
| 81 |
+
"""Yield all available data as soon as it is received."""
|
| 82 |
+
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
| 83 |
+
|
| 84 |
+
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
| 85 |
+
"""Yield chunks of data as they are received by the server.
|
| 86 |
+
|
| 87 |
+
The yielded objects are tuples
|
| 88 |
+
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
| 89 |
+
"""
|
| 90 |
+
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
class StreamReader(AsyncStreamReaderMixin):
|
| 94 |
+
"""An enhancement of asyncio.StreamReader.
|
| 95 |
+
|
| 96 |
+
Supports asynchronous iteration by line, chunk or as available::
|
| 97 |
+
|
| 98 |
+
async for line in reader:
|
| 99 |
+
...
|
| 100 |
+
async for chunk in reader.iter_chunked(1024):
|
| 101 |
+
...
|
| 102 |
+
async for slice in reader.iter_any():
|
| 103 |
+
...
|
| 104 |
+
|
| 105 |
+
"""
|
| 106 |
+
|
| 107 |
+
total_bytes = 0
|
| 108 |
+
|
| 109 |
+
def __init__(
|
| 110 |
+
self,
|
| 111 |
+
protocol: BaseProtocol,
|
| 112 |
+
limit: int,
|
| 113 |
+
*,
|
| 114 |
+
timer: Optional[BaseTimerContext] = None,
|
| 115 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 116 |
+
) -> None:
|
| 117 |
+
self._protocol = protocol
|
| 118 |
+
self._low_water = limit
|
| 119 |
+
self._high_water = limit * 2
|
| 120 |
+
if loop is None:
|
| 121 |
+
loop = asyncio.get_event_loop()
|
| 122 |
+
self._loop = loop
|
| 123 |
+
self._size = 0
|
| 124 |
+
self._cursor = 0
|
| 125 |
+
self._http_chunk_splits: Optional[List[int]] = None
|
| 126 |
+
self._buffer: Deque[bytes] = collections.deque()
|
| 127 |
+
self._buffer_offset = 0
|
| 128 |
+
self._eof = False
|
| 129 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 130 |
+
self._eof_waiter: Optional[asyncio.Future[None]] = None
|
| 131 |
+
self._exception: Optional[BaseException] = None
|
| 132 |
+
self._timer = TimerNoop() if timer is None else timer
|
| 133 |
+
self._eof_callbacks: List[Callable[[], None]] = []
|
| 134 |
+
|
| 135 |
+
def __repr__(self) -> str:
|
| 136 |
+
info = [self.__class__.__name__]
|
| 137 |
+
if self._size:
|
| 138 |
+
info.append("%d bytes" % self._size)
|
| 139 |
+
if self._eof:
|
| 140 |
+
info.append("eof")
|
| 141 |
+
if self._low_water != 2**16: # default limit
|
| 142 |
+
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
| 143 |
+
if self._waiter:
|
| 144 |
+
info.append("w=%r" % self._waiter)
|
| 145 |
+
if self._exception:
|
| 146 |
+
info.append("e=%r" % self._exception)
|
| 147 |
+
return "<%s>" % " ".join(info)
|
| 148 |
+
|
| 149 |
+
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
| 150 |
+
return (self._low_water, self._high_water)
|
| 151 |
+
|
| 152 |
+
def exception(self) -> Optional[BaseException]:
|
| 153 |
+
return self._exception
|
| 154 |
+
|
| 155 |
+
def set_exception(
|
| 156 |
+
self,
|
| 157 |
+
exc: BaseException,
|
| 158 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 159 |
+
) -> None:
|
| 160 |
+
self._exception = exc
|
| 161 |
+
self._eof_callbacks.clear()
|
| 162 |
+
|
| 163 |
+
waiter = self._waiter
|
| 164 |
+
if waiter is not None:
|
| 165 |
+
self._waiter = None
|
| 166 |
+
set_exception(waiter, exc, exc_cause)
|
| 167 |
+
|
| 168 |
+
waiter = self._eof_waiter
|
| 169 |
+
if waiter is not None:
|
| 170 |
+
self._eof_waiter = None
|
| 171 |
+
set_exception(waiter, exc, exc_cause)
|
| 172 |
+
|
| 173 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
| 174 |
+
if self._eof:
|
| 175 |
+
try:
|
| 176 |
+
callback()
|
| 177 |
+
except Exception:
|
| 178 |
+
internal_logger.exception("Exception in eof callback")
|
| 179 |
+
else:
|
| 180 |
+
self._eof_callbacks.append(callback)
|
| 181 |
+
|
| 182 |
+
def feed_eof(self) -> None:
|
| 183 |
+
self._eof = True
|
| 184 |
+
|
| 185 |
+
waiter = self._waiter
|
| 186 |
+
if waiter is not None:
|
| 187 |
+
self._waiter = None
|
| 188 |
+
set_result(waiter, None)
|
| 189 |
+
|
| 190 |
+
waiter = self._eof_waiter
|
| 191 |
+
if waiter is not None:
|
| 192 |
+
self._eof_waiter = None
|
| 193 |
+
set_result(waiter, None)
|
| 194 |
+
|
| 195 |
+
for cb in self._eof_callbacks:
|
| 196 |
+
try:
|
| 197 |
+
cb()
|
| 198 |
+
except Exception:
|
| 199 |
+
internal_logger.exception("Exception in eof callback")
|
| 200 |
+
|
| 201 |
+
self._eof_callbacks.clear()
|
| 202 |
+
|
| 203 |
+
def is_eof(self) -> bool:
|
| 204 |
+
"""Return True if 'feed_eof' was called."""
|
| 205 |
+
return self._eof
|
| 206 |
+
|
| 207 |
+
def at_eof(self) -> bool:
|
| 208 |
+
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
| 209 |
+
return self._eof and not self._buffer
|
| 210 |
+
|
| 211 |
+
async def wait_eof(self) -> None:
|
| 212 |
+
if self._eof:
|
| 213 |
+
return
|
| 214 |
+
|
| 215 |
+
assert self._eof_waiter is None
|
| 216 |
+
self._eof_waiter = self._loop.create_future()
|
| 217 |
+
try:
|
| 218 |
+
await self._eof_waiter
|
| 219 |
+
finally:
|
| 220 |
+
self._eof_waiter = None
|
| 221 |
+
|
| 222 |
+
def unread_data(self, data: bytes) -> None:
|
| 223 |
+
"""rollback reading some data from stream, inserting it to buffer head."""
|
| 224 |
+
warnings.warn(
|
| 225 |
+
"unread_data() is deprecated "
|
| 226 |
+
"and will be removed in future releases (#3260)",
|
| 227 |
+
DeprecationWarning,
|
| 228 |
+
stacklevel=2,
|
| 229 |
+
)
|
| 230 |
+
if not data:
|
| 231 |
+
return
|
| 232 |
+
|
| 233 |
+
if self._buffer_offset:
|
| 234 |
+
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
| 235 |
+
self._buffer_offset = 0
|
| 236 |
+
self._size += len(data)
|
| 237 |
+
self._cursor -= len(data)
|
| 238 |
+
self._buffer.appendleft(data)
|
| 239 |
+
self._eof_counter = 0
|
| 240 |
+
|
| 241 |
+
# TODO: size is ignored, remove the param later
|
| 242 |
+
def feed_data(self, data: bytes, size: int = 0) -> None:
|
| 243 |
+
assert not self._eof, "feed_data after feed_eof"
|
| 244 |
+
|
| 245 |
+
if not data:
|
| 246 |
+
return
|
| 247 |
+
|
| 248 |
+
self._size += len(data)
|
| 249 |
+
self._buffer.append(data)
|
| 250 |
+
self.total_bytes += len(data)
|
| 251 |
+
|
| 252 |
+
waiter = self._waiter
|
| 253 |
+
if waiter is not None:
|
| 254 |
+
self._waiter = None
|
| 255 |
+
set_result(waiter, None)
|
| 256 |
+
|
| 257 |
+
if self._size > self._high_water and not self._protocol._reading_paused:
|
| 258 |
+
self._protocol.pause_reading()
|
| 259 |
+
|
| 260 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 261 |
+
if self._http_chunk_splits is None:
|
| 262 |
+
if self.total_bytes:
|
| 263 |
+
raise RuntimeError(
|
| 264 |
+
"Called begin_http_chunk_receiving when" "some data was already fed"
|
| 265 |
+
)
|
| 266 |
+
self._http_chunk_splits = []
|
| 267 |
+
|
| 268 |
+
def end_http_chunk_receiving(self) -> None:
|
| 269 |
+
if self._http_chunk_splits is None:
|
| 270 |
+
raise RuntimeError(
|
| 271 |
+
"Called end_chunk_receiving without calling "
|
| 272 |
+
"begin_chunk_receiving first"
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
# self._http_chunk_splits contains logical byte offsets from start of
|
| 276 |
+
# the body transfer. Each offset is the offset of the end of a chunk.
|
| 277 |
+
# "Logical" means bytes, accessible for a user.
|
| 278 |
+
# If no chunks containing logical data were received, current position
|
| 279 |
+
# is difinitely zero.
|
| 280 |
+
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
| 281 |
+
|
| 282 |
+
if self.total_bytes == pos:
|
| 283 |
+
# We should not add empty chunks here. So we check for that.
|
| 284 |
+
# Note, when chunked + gzip is used, we can receive a chunk
|
| 285 |
+
# of compressed data, but that data may not be enough for gzip FSM
|
| 286 |
+
# to yield any uncompressed data. That's why current position may
|
| 287 |
+
# not change after receiving a chunk.
|
| 288 |
+
return
|
| 289 |
+
|
| 290 |
+
self._http_chunk_splits.append(self.total_bytes)
|
| 291 |
+
|
| 292 |
+
# wake up readchunk when end of http chunk received
|
| 293 |
+
waiter = self._waiter
|
| 294 |
+
if waiter is not None:
|
| 295 |
+
self._waiter = None
|
| 296 |
+
set_result(waiter, None)
|
| 297 |
+
|
| 298 |
+
async def _wait(self, func_name: str) -> None:
|
| 299 |
+
if not self._protocol.connected:
|
| 300 |
+
raise RuntimeError("Connection closed.")
|
| 301 |
+
|
| 302 |
+
# StreamReader uses a future to link the protocol feed_data() method
|
| 303 |
+
# to a read coroutine. Running two read coroutines at the same time
|
| 304 |
+
# would have an unexpected behaviour. It would not possible to know
|
| 305 |
+
# which coroutine would get the next data.
|
| 306 |
+
if self._waiter is not None:
|
| 307 |
+
raise RuntimeError(
|
| 308 |
+
"%s() called while another coroutine is "
|
| 309 |
+
"already waiting for incoming data" % func_name
|
| 310 |
+
)
|
| 311 |
+
|
| 312 |
+
waiter = self._waiter = self._loop.create_future()
|
| 313 |
+
try:
|
| 314 |
+
with self._timer:
|
| 315 |
+
await waiter
|
| 316 |
+
finally:
|
| 317 |
+
self._waiter = None
|
| 318 |
+
|
| 319 |
+
async def readline(self) -> bytes:
|
| 320 |
+
return await self.readuntil()
|
| 321 |
+
|
| 322 |
+
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
| 323 |
+
seplen = len(separator)
|
| 324 |
+
if seplen == 0:
|
| 325 |
+
raise ValueError("Separator should be at least one-byte string")
|
| 326 |
+
|
| 327 |
+
if self._exception is not None:
|
| 328 |
+
raise self._exception
|
| 329 |
+
|
| 330 |
+
chunk = b""
|
| 331 |
+
chunk_size = 0
|
| 332 |
+
not_enough = True
|
| 333 |
+
|
| 334 |
+
while not_enough:
|
| 335 |
+
while self._buffer and not_enough:
|
| 336 |
+
offset = self._buffer_offset
|
| 337 |
+
ichar = self._buffer[0].find(separator, offset) + 1
|
| 338 |
+
# Read from current offset to found separator or to the end.
|
| 339 |
+
data = self._read_nowait_chunk(
|
| 340 |
+
ichar - offset + seplen - 1 if ichar else -1
|
| 341 |
+
)
|
| 342 |
+
chunk += data
|
| 343 |
+
chunk_size += len(data)
|
| 344 |
+
if ichar:
|
| 345 |
+
not_enough = False
|
| 346 |
+
|
| 347 |
+
if chunk_size > self._high_water:
|
| 348 |
+
raise ValueError("Chunk too big")
|
| 349 |
+
|
| 350 |
+
if self._eof:
|
| 351 |
+
break
|
| 352 |
+
|
| 353 |
+
if not_enough:
|
| 354 |
+
await self._wait("readuntil")
|
| 355 |
+
|
| 356 |
+
return chunk
|
| 357 |
+
|
| 358 |
+
async def read(self, n: int = -1) -> bytes:
|
| 359 |
+
if self._exception is not None:
|
| 360 |
+
raise self._exception
|
| 361 |
+
|
| 362 |
+
# migration problem; with DataQueue you have to catch
|
| 363 |
+
# EofStream exception, so common way is to run payload.read() inside
|
| 364 |
+
# infinite loop. what can cause real infinite loop with StreamReader
|
| 365 |
+
# lets keep this code one major release.
|
| 366 |
+
if __debug__:
|
| 367 |
+
if self._eof and not self._buffer:
|
| 368 |
+
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
| 369 |
+
if self._eof_counter > 5:
|
| 370 |
+
internal_logger.warning(
|
| 371 |
+
"Multiple access to StreamReader in eof state, "
|
| 372 |
+
"might be infinite loop.",
|
| 373 |
+
stack_info=True,
|
| 374 |
+
)
|
| 375 |
+
|
| 376 |
+
if not n:
|
| 377 |
+
return b""
|
| 378 |
+
|
| 379 |
+
if n < 0:
|
| 380 |
+
# This used to just loop creating a new waiter hoping to
|
| 381 |
+
# collect everything in self._buffer, but that would
|
| 382 |
+
# deadlock if the subprocess sends more than self.limit
|
| 383 |
+
# bytes. So just call self.readany() until EOF.
|
| 384 |
+
blocks = []
|
| 385 |
+
while True:
|
| 386 |
+
block = await self.readany()
|
| 387 |
+
if not block:
|
| 388 |
+
break
|
| 389 |
+
blocks.append(block)
|
| 390 |
+
return b"".join(blocks)
|
| 391 |
+
|
| 392 |
+
# TODO: should be `if` instead of `while`
|
| 393 |
+
# because waiter maybe triggered on chunk end,
|
| 394 |
+
# without feeding any data
|
| 395 |
+
while not self._buffer and not self._eof:
|
| 396 |
+
await self._wait("read")
|
| 397 |
+
|
| 398 |
+
return self._read_nowait(n)
|
| 399 |
+
|
| 400 |
+
async def readany(self) -> bytes:
|
| 401 |
+
if self._exception is not None:
|
| 402 |
+
raise self._exception
|
| 403 |
+
|
| 404 |
+
# TODO: should be `if` instead of `while`
|
| 405 |
+
# because waiter maybe triggered on chunk end,
|
| 406 |
+
# without feeding any data
|
| 407 |
+
while not self._buffer and not self._eof:
|
| 408 |
+
await self._wait("readany")
|
| 409 |
+
|
| 410 |
+
return self._read_nowait(-1)
|
| 411 |
+
|
| 412 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
| 413 |
+
"""Returns a tuple of (data, end_of_http_chunk).
|
| 414 |
+
|
| 415 |
+
When chunked transfer
|
| 416 |
+
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
| 417 |
+
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
| 418 |
+
always False.
|
| 419 |
+
"""
|
| 420 |
+
while True:
|
| 421 |
+
if self._exception is not None:
|
| 422 |
+
raise self._exception
|
| 423 |
+
|
| 424 |
+
while self._http_chunk_splits:
|
| 425 |
+
pos = self._http_chunk_splits.pop(0)
|
| 426 |
+
if pos == self._cursor:
|
| 427 |
+
return (b"", True)
|
| 428 |
+
if pos > self._cursor:
|
| 429 |
+
return (self._read_nowait(pos - self._cursor), True)
|
| 430 |
+
internal_logger.warning(
|
| 431 |
+
"Skipping HTTP chunk end due to data "
|
| 432 |
+
"consumption beyond chunk boundary"
|
| 433 |
+
)
|
| 434 |
+
|
| 435 |
+
if self._buffer:
|
| 436 |
+
return (self._read_nowait_chunk(-1), False)
|
| 437 |
+
# return (self._read_nowait(-1), False)
|
| 438 |
+
|
| 439 |
+
if self._eof:
|
| 440 |
+
# Special case for signifying EOF.
|
| 441 |
+
# (b'', True) is not a final return value actually.
|
| 442 |
+
return (b"", False)
|
| 443 |
+
|
| 444 |
+
await self._wait("readchunk")
|
| 445 |
+
|
| 446 |
+
async def readexactly(self, n: int) -> bytes:
|
| 447 |
+
if self._exception is not None:
|
| 448 |
+
raise self._exception
|
| 449 |
+
|
| 450 |
+
blocks: List[bytes] = []
|
| 451 |
+
while n > 0:
|
| 452 |
+
block = await self.read(n)
|
| 453 |
+
if not block:
|
| 454 |
+
partial = b"".join(blocks)
|
| 455 |
+
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
| 456 |
+
blocks.append(block)
|
| 457 |
+
n -= len(block)
|
| 458 |
+
|
| 459 |
+
return b"".join(blocks)
|
| 460 |
+
|
| 461 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
| 462 |
+
# default was changed to be consistent with .read(-1)
|
| 463 |
+
#
|
| 464 |
+
# I believe the most users don't know about the method and
|
| 465 |
+
# they are not affected.
|
| 466 |
+
if self._exception is not None:
|
| 467 |
+
raise self._exception
|
| 468 |
+
|
| 469 |
+
if self._waiter and not self._waiter.done():
|
| 470 |
+
raise RuntimeError(
|
| 471 |
+
"Called while some coroutine is waiting for incoming data."
|
| 472 |
+
)
|
| 473 |
+
|
| 474 |
+
return self._read_nowait(n)
|
| 475 |
+
|
| 476 |
+
def _read_nowait_chunk(self, n: int) -> bytes:
|
| 477 |
+
first_buffer = self._buffer[0]
|
| 478 |
+
offset = self._buffer_offset
|
| 479 |
+
if n != -1 and len(first_buffer) - offset > n:
|
| 480 |
+
data = first_buffer[offset : offset + n]
|
| 481 |
+
self._buffer_offset += n
|
| 482 |
+
|
| 483 |
+
elif offset:
|
| 484 |
+
self._buffer.popleft()
|
| 485 |
+
data = first_buffer[offset:]
|
| 486 |
+
self._buffer_offset = 0
|
| 487 |
+
|
| 488 |
+
else:
|
| 489 |
+
data = self._buffer.popleft()
|
| 490 |
+
|
| 491 |
+
self._size -= len(data)
|
| 492 |
+
self._cursor += len(data)
|
| 493 |
+
|
| 494 |
+
chunk_splits = self._http_chunk_splits
|
| 495 |
+
# Prevent memory leak: drop useless chunk splits
|
| 496 |
+
while chunk_splits and chunk_splits[0] < self._cursor:
|
| 497 |
+
chunk_splits.pop(0)
|
| 498 |
+
|
| 499 |
+
if self._size < self._low_water and self._protocol._reading_paused:
|
| 500 |
+
self._protocol.resume_reading()
|
| 501 |
+
return data
|
| 502 |
+
|
| 503 |
+
def _read_nowait(self, n: int) -> bytes:
|
| 504 |
+
"""Read not more than n bytes, or whole buffer if n == -1"""
|
| 505 |
+
self._timer.assert_timeout()
|
| 506 |
+
|
| 507 |
+
chunks = []
|
| 508 |
+
while self._buffer:
|
| 509 |
+
chunk = self._read_nowait_chunk(n)
|
| 510 |
+
chunks.append(chunk)
|
| 511 |
+
if n != -1:
|
| 512 |
+
n -= len(chunk)
|
| 513 |
+
if n == 0:
|
| 514 |
+
break
|
| 515 |
+
|
| 516 |
+
return b"".join(chunks) if chunks else b""
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
| 520 |
+
def __init__(self) -> None:
|
| 521 |
+
self._read_eof_chunk = False
|
| 522 |
+
|
| 523 |
+
def __repr__(self) -> str:
|
| 524 |
+
return "<%s>" % self.__class__.__name__
|
| 525 |
+
|
| 526 |
+
def exception(self) -> Optional[BaseException]:
|
| 527 |
+
return None
|
| 528 |
+
|
| 529 |
+
def set_exception(
|
| 530 |
+
self,
|
| 531 |
+
exc: BaseException,
|
| 532 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 533 |
+
) -> None:
|
| 534 |
+
pass
|
| 535 |
+
|
| 536 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
| 537 |
+
try:
|
| 538 |
+
callback()
|
| 539 |
+
except Exception:
|
| 540 |
+
internal_logger.exception("Exception in eof callback")
|
| 541 |
+
|
| 542 |
+
def feed_eof(self) -> None:
|
| 543 |
+
pass
|
| 544 |
+
|
| 545 |
+
def is_eof(self) -> bool:
|
| 546 |
+
return True
|
| 547 |
+
|
| 548 |
+
def at_eof(self) -> bool:
|
| 549 |
+
return True
|
| 550 |
+
|
| 551 |
+
async def wait_eof(self) -> None:
|
| 552 |
+
return
|
| 553 |
+
|
| 554 |
+
def feed_data(self, data: bytes, n: int = 0) -> None:
|
| 555 |
+
pass
|
| 556 |
+
|
| 557 |
+
async def readline(self) -> bytes:
|
| 558 |
+
return b""
|
| 559 |
+
|
| 560 |
+
async def read(self, n: int = -1) -> bytes:
|
| 561 |
+
return b""
|
| 562 |
+
|
| 563 |
+
# TODO add async def readuntil
|
| 564 |
+
|
| 565 |
+
async def readany(self) -> bytes:
|
| 566 |
+
return b""
|
| 567 |
+
|
| 568 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
| 569 |
+
if not self._read_eof_chunk:
|
| 570 |
+
self._read_eof_chunk = True
|
| 571 |
+
return (b"", False)
|
| 572 |
+
|
| 573 |
+
return (b"", True)
|
| 574 |
+
|
| 575 |
+
async def readexactly(self, n: int) -> bytes:
|
| 576 |
+
raise asyncio.IncompleteReadError(b"", n)
|
| 577 |
+
|
| 578 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
| 579 |
+
return b""
|
| 580 |
+
|
| 581 |
+
|
| 582 |
+
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
class DataQueue(Generic[_T]):
|
| 586 |
+
"""DataQueue is a general-purpose blocking queue with one reader."""
|
| 587 |
+
|
| 588 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 589 |
+
self._loop = loop
|
| 590 |
+
self._eof = False
|
| 591 |
+
self._waiter: Optional[asyncio.Future[None]] = None
|
| 592 |
+
self._exception: Optional[BaseException] = None
|
| 593 |
+
self._size = 0
|
| 594 |
+
self._buffer: Deque[Tuple[_T, int]] = collections.deque()
|
| 595 |
+
|
| 596 |
+
def __len__(self) -> int:
|
| 597 |
+
return len(self._buffer)
|
| 598 |
+
|
| 599 |
+
def is_eof(self) -> bool:
|
| 600 |
+
return self._eof
|
| 601 |
+
|
| 602 |
+
def at_eof(self) -> bool:
|
| 603 |
+
return self._eof and not self._buffer
|
| 604 |
+
|
| 605 |
+
def exception(self) -> Optional[BaseException]:
|
| 606 |
+
return self._exception
|
| 607 |
+
|
| 608 |
+
def set_exception(
|
| 609 |
+
self,
|
| 610 |
+
exc: BaseException,
|
| 611 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
| 612 |
+
) -> None:
|
| 613 |
+
self._eof = True
|
| 614 |
+
self._exception = exc
|
| 615 |
+
|
| 616 |
+
waiter = self._waiter
|
| 617 |
+
if waiter is not None:
|
| 618 |
+
self._waiter = None
|
| 619 |
+
set_exception(waiter, exc, exc_cause)
|
| 620 |
+
|
| 621 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
| 622 |
+
self._size += size
|
| 623 |
+
self._buffer.append((data, size))
|
| 624 |
+
|
| 625 |
+
waiter = self._waiter
|
| 626 |
+
if waiter is not None:
|
| 627 |
+
self._waiter = None
|
| 628 |
+
set_result(waiter, None)
|
| 629 |
+
|
| 630 |
+
def feed_eof(self) -> None:
|
| 631 |
+
self._eof = True
|
| 632 |
+
|
| 633 |
+
waiter = self._waiter
|
| 634 |
+
if waiter is not None:
|
| 635 |
+
self._waiter = None
|
| 636 |
+
set_result(waiter, None)
|
| 637 |
+
|
| 638 |
+
async def read(self) -> _T:
|
| 639 |
+
if not self._buffer and not self._eof:
|
| 640 |
+
assert not self._waiter
|
| 641 |
+
self._waiter = self._loop.create_future()
|
| 642 |
+
try:
|
| 643 |
+
await self._waiter
|
| 644 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 645 |
+
self._waiter = None
|
| 646 |
+
raise
|
| 647 |
+
|
| 648 |
+
if self._buffer:
|
| 649 |
+
data, size = self._buffer.popleft()
|
| 650 |
+
self._size -= size
|
| 651 |
+
return data
|
| 652 |
+
else:
|
| 653 |
+
if self._exception is not None:
|
| 654 |
+
raise self._exception
|
| 655 |
+
else:
|
| 656 |
+
raise EofStream
|
| 657 |
+
|
| 658 |
+
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
| 659 |
+
return AsyncStreamIterator(self.read)
|
| 660 |
+
|
| 661 |
+
|
| 662 |
+
class FlowControlDataQueue(DataQueue[_T]):
|
| 663 |
+
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
| 664 |
+
|
| 665 |
+
It is a destination for parsed data.
|
| 666 |
+
"""
|
| 667 |
+
|
| 668 |
+
def __init__(
|
| 669 |
+
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
| 670 |
+
) -> None:
|
| 671 |
+
super().__init__(loop=loop)
|
| 672 |
+
|
| 673 |
+
self._protocol = protocol
|
| 674 |
+
self._limit = limit * 2
|
| 675 |
+
|
| 676 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
| 677 |
+
super().feed_data(data, size)
|
| 678 |
+
|
| 679 |
+
if self._size > self._limit and not self._protocol._reading_paused:
|
| 680 |
+
self._protocol.pause_reading()
|
| 681 |
+
|
| 682 |
+
async def read(self) -> _T:
|
| 683 |
+
try:
|
| 684 |
+
return await super().read()
|
| 685 |
+
finally:
|
| 686 |
+
if self._size < self._limit and self._protocol._reading_paused:
|
| 687 |
+
self._protocol.resume_reading()
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/typedefs.py
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Awaitable,
|
| 7 |
+
Callable,
|
| 8 |
+
Iterable,
|
| 9 |
+
Mapping,
|
| 10 |
+
Protocol,
|
| 11 |
+
Tuple,
|
| 12 |
+
Union,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
| 16 |
+
from yarl import URL, Query as _Query
|
| 17 |
+
|
| 18 |
+
Query = _Query
|
| 19 |
+
|
| 20 |
+
DEFAULT_JSON_ENCODER = json.dumps
|
| 21 |
+
DEFAULT_JSON_DECODER = json.loads
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
_CIMultiDict = CIMultiDict[str]
|
| 25 |
+
_CIMultiDictProxy = CIMultiDictProxy[str]
|
| 26 |
+
_MultiDict = MultiDict[str]
|
| 27 |
+
_MultiDictProxy = MultiDictProxy[str]
|
| 28 |
+
from http.cookies import BaseCookie, Morsel
|
| 29 |
+
|
| 30 |
+
from .web import Request, StreamResponse
|
| 31 |
+
else:
|
| 32 |
+
_CIMultiDict = CIMultiDict
|
| 33 |
+
_CIMultiDictProxy = CIMultiDictProxy
|
| 34 |
+
_MultiDict = MultiDict
|
| 35 |
+
_MultiDictProxy = MultiDictProxy
|
| 36 |
+
|
| 37 |
+
Byteish = Union[bytes, bytearray, memoryview]
|
| 38 |
+
JSONEncoder = Callable[[Any], str]
|
| 39 |
+
JSONDecoder = Callable[[str], Any]
|
| 40 |
+
LooseHeaders = Union[
|
| 41 |
+
Mapping[str, str],
|
| 42 |
+
Mapping[istr, str],
|
| 43 |
+
_CIMultiDict,
|
| 44 |
+
_CIMultiDictProxy,
|
| 45 |
+
Iterable[Tuple[Union[str, istr], str]],
|
| 46 |
+
]
|
| 47 |
+
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
| 48 |
+
StrOrURL = Union[str, URL]
|
| 49 |
+
|
| 50 |
+
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 51 |
+
LooseCookiesIterables = Iterable[
|
| 52 |
+
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 53 |
+
]
|
| 54 |
+
LooseCookies = Union[
|
| 55 |
+
LooseCookiesMappings,
|
| 56 |
+
LooseCookiesIterables,
|
| 57 |
+
"BaseCookie[str]",
|
| 58 |
+
]
|
| 59 |
+
|
| 60 |
+
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
class Middleware(Protocol):
|
| 64 |
+
def __call__(
|
| 65 |
+
self, request: "Request", handler: Handler
|
| 66 |
+
) -> Awaitable["StreamResponse"]: ...
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
PathLike = Union[str, "os.PathLike[str]"]
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/web.py
ADDED
|
@@ -0,0 +1,595 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import os
|
| 4 |
+
import socket
|
| 5 |
+
import sys
|
| 6 |
+
import warnings
|
| 7 |
+
from argparse import ArgumentParser
|
| 8 |
+
from collections.abc import Iterable
|
| 9 |
+
from contextlib import suppress
|
| 10 |
+
from importlib import import_module
|
| 11 |
+
from typing import (
|
| 12 |
+
Any,
|
| 13 |
+
Awaitable,
|
| 14 |
+
Callable,
|
| 15 |
+
Iterable as TypingIterable,
|
| 16 |
+
List,
|
| 17 |
+
Optional,
|
| 18 |
+
Set,
|
| 19 |
+
Type,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from .abc import AbstractAccessLogger
|
| 25 |
+
from .helpers import AppKey as AppKey
|
| 26 |
+
from .log import access_logger
|
| 27 |
+
from .typedefs import PathLike
|
| 28 |
+
from .web_app import Application as Application, CleanupError as CleanupError
|
| 29 |
+
from .web_exceptions import (
|
| 30 |
+
HTTPAccepted as HTTPAccepted,
|
| 31 |
+
HTTPBadGateway as HTTPBadGateway,
|
| 32 |
+
HTTPBadRequest as HTTPBadRequest,
|
| 33 |
+
HTTPClientError as HTTPClientError,
|
| 34 |
+
HTTPConflict as HTTPConflict,
|
| 35 |
+
HTTPCreated as HTTPCreated,
|
| 36 |
+
HTTPError as HTTPError,
|
| 37 |
+
HTTPException as HTTPException,
|
| 38 |
+
HTTPExpectationFailed as HTTPExpectationFailed,
|
| 39 |
+
HTTPFailedDependency as HTTPFailedDependency,
|
| 40 |
+
HTTPForbidden as HTTPForbidden,
|
| 41 |
+
HTTPFound as HTTPFound,
|
| 42 |
+
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
| 43 |
+
HTTPGone as HTTPGone,
|
| 44 |
+
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
| 45 |
+
HTTPInternalServerError as HTTPInternalServerError,
|
| 46 |
+
HTTPLengthRequired as HTTPLengthRequired,
|
| 47 |
+
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
| 48 |
+
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
| 49 |
+
HTTPMove as HTTPMove,
|
| 50 |
+
HTTPMovedPermanently as HTTPMovedPermanently,
|
| 51 |
+
HTTPMultipleChoices as HTTPMultipleChoices,
|
| 52 |
+
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
| 53 |
+
HTTPNoContent as HTTPNoContent,
|
| 54 |
+
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
| 55 |
+
HTTPNotAcceptable as HTTPNotAcceptable,
|
| 56 |
+
HTTPNotExtended as HTTPNotExtended,
|
| 57 |
+
HTTPNotFound as HTTPNotFound,
|
| 58 |
+
HTTPNotImplemented as HTTPNotImplemented,
|
| 59 |
+
HTTPNotModified as HTTPNotModified,
|
| 60 |
+
HTTPOk as HTTPOk,
|
| 61 |
+
HTTPPartialContent as HTTPPartialContent,
|
| 62 |
+
HTTPPaymentRequired as HTTPPaymentRequired,
|
| 63 |
+
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
| 64 |
+
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
| 65 |
+
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
| 66 |
+
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
| 67 |
+
HTTPRedirection as HTTPRedirection,
|
| 68 |
+
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
| 69 |
+
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
| 70 |
+
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
| 71 |
+
HTTPRequestTimeout as HTTPRequestTimeout,
|
| 72 |
+
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
| 73 |
+
HTTPResetContent as HTTPResetContent,
|
| 74 |
+
HTTPSeeOther as HTTPSeeOther,
|
| 75 |
+
HTTPServerError as HTTPServerError,
|
| 76 |
+
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
| 77 |
+
HTTPSuccessful as HTTPSuccessful,
|
| 78 |
+
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
| 79 |
+
HTTPTooManyRequests as HTTPTooManyRequests,
|
| 80 |
+
HTTPUnauthorized as HTTPUnauthorized,
|
| 81 |
+
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
| 82 |
+
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
| 83 |
+
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
| 84 |
+
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
| 85 |
+
HTTPUseProxy as HTTPUseProxy,
|
| 86 |
+
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
| 87 |
+
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
| 88 |
+
NotAppKeyWarning as NotAppKeyWarning,
|
| 89 |
+
)
|
| 90 |
+
from .web_fileresponse import FileResponse as FileResponse
|
| 91 |
+
from .web_log import AccessLogger
|
| 92 |
+
from .web_middlewares import (
|
| 93 |
+
middleware as middleware,
|
| 94 |
+
normalize_path_middleware as normalize_path_middleware,
|
| 95 |
+
)
|
| 96 |
+
from .web_protocol import (
|
| 97 |
+
PayloadAccessError as PayloadAccessError,
|
| 98 |
+
RequestHandler as RequestHandler,
|
| 99 |
+
RequestPayloadError as RequestPayloadError,
|
| 100 |
+
)
|
| 101 |
+
from .web_request import (
|
| 102 |
+
BaseRequest as BaseRequest,
|
| 103 |
+
FileField as FileField,
|
| 104 |
+
Request as Request,
|
| 105 |
+
)
|
| 106 |
+
from .web_response import (
|
| 107 |
+
ContentCoding as ContentCoding,
|
| 108 |
+
Response as Response,
|
| 109 |
+
StreamResponse as StreamResponse,
|
| 110 |
+
json_response as json_response,
|
| 111 |
+
)
|
| 112 |
+
from .web_routedef import (
|
| 113 |
+
AbstractRouteDef as AbstractRouteDef,
|
| 114 |
+
RouteDef as RouteDef,
|
| 115 |
+
RouteTableDef as RouteTableDef,
|
| 116 |
+
StaticDef as StaticDef,
|
| 117 |
+
delete as delete,
|
| 118 |
+
get as get,
|
| 119 |
+
head as head,
|
| 120 |
+
options as options,
|
| 121 |
+
patch as patch,
|
| 122 |
+
post as post,
|
| 123 |
+
put as put,
|
| 124 |
+
route as route,
|
| 125 |
+
static as static,
|
| 126 |
+
view as view,
|
| 127 |
+
)
|
| 128 |
+
from .web_runner import (
|
| 129 |
+
AppRunner as AppRunner,
|
| 130 |
+
BaseRunner as BaseRunner,
|
| 131 |
+
BaseSite as BaseSite,
|
| 132 |
+
GracefulExit as GracefulExit,
|
| 133 |
+
NamedPipeSite as NamedPipeSite,
|
| 134 |
+
ServerRunner as ServerRunner,
|
| 135 |
+
SockSite as SockSite,
|
| 136 |
+
TCPSite as TCPSite,
|
| 137 |
+
UnixSite as UnixSite,
|
| 138 |
+
)
|
| 139 |
+
from .web_server import Server as Server
|
| 140 |
+
from .web_urldispatcher import (
|
| 141 |
+
AbstractResource as AbstractResource,
|
| 142 |
+
AbstractRoute as AbstractRoute,
|
| 143 |
+
DynamicResource as DynamicResource,
|
| 144 |
+
PlainResource as PlainResource,
|
| 145 |
+
PrefixedSubAppResource as PrefixedSubAppResource,
|
| 146 |
+
Resource as Resource,
|
| 147 |
+
ResourceRoute as ResourceRoute,
|
| 148 |
+
StaticResource as StaticResource,
|
| 149 |
+
UrlDispatcher as UrlDispatcher,
|
| 150 |
+
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
| 151 |
+
View as View,
|
| 152 |
+
)
|
| 153 |
+
from .web_ws import (
|
| 154 |
+
WebSocketReady as WebSocketReady,
|
| 155 |
+
WebSocketResponse as WebSocketResponse,
|
| 156 |
+
WSMsgType as WSMsgType,
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
__all__ = (
|
| 160 |
+
# web_app
|
| 161 |
+
"AppKey",
|
| 162 |
+
"Application",
|
| 163 |
+
"CleanupError",
|
| 164 |
+
# web_exceptions
|
| 165 |
+
"NotAppKeyWarning",
|
| 166 |
+
"HTTPAccepted",
|
| 167 |
+
"HTTPBadGateway",
|
| 168 |
+
"HTTPBadRequest",
|
| 169 |
+
"HTTPClientError",
|
| 170 |
+
"HTTPConflict",
|
| 171 |
+
"HTTPCreated",
|
| 172 |
+
"HTTPError",
|
| 173 |
+
"HTTPException",
|
| 174 |
+
"HTTPExpectationFailed",
|
| 175 |
+
"HTTPFailedDependency",
|
| 176 |
+
"HTTPForbidden",
|
| 177 |
+
"HTTPFound",
|
| 178 |
+
"HTTPGatewayTimeout",
|
| 179 |
+
"HTTPGone",
|
| 180 |
+
"HTTPInsufficientStorage",
|
| 181 |
+
"HTTPInternalServerError",
|
| 182 |
+
"HTTPLengthRequired",
|
| 183 |
+
"HTTPMethodNotAllowed",
|
| 184 |
+
"HTTPMisdirectedRequest",
|
| 185 |
+
"HTTPMove",
|
| 186 |
+
"HTTPMovedPermanently",
|
| 187 |
+
"HTTPMultipleChoices",
|
| 188 |
+
"HTTPNetworkAuthenticationRequired",
|
| 189 |
+
"HTTPNoContent",
|
| 190 |
+
"HTTPNonAuthoritativeInformation",
|
| 191 |
+
"HTTPNotAcceptable",
|
| 192 |
+
"HTTPNotExtended",
|
| 193 |
+
"HTTPNotFound",
|
| 194 |
+
"HTTPNotImplemented",
|
| 195 |
+
"HTTPNotModified",
|
| 196 |
+
"HTTPOk",
|
| 197 |
+
"HTTPPartialContent",
|
| 198 |
+
"HTTPPaymentRequired",
|
| 199 |
+
"HTTPPermanentRedirect",
|
| 200 |
+
"HTTPPreconditionFailed",
|
| 201 |
+
"HTTPPreconditionRequired",
|
| 202 |
+
"HTTPProxyAuthenticationRequired",
|
| 203 |
+
"HTTPRedirection",
|
| 204 |
+
"HTTPRequestEntityTooLarge",
|
| 205 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
| 206 |
+
"HTTPRequestRangeNotSatisfiable",
|
| 207 |
+
"HTTPRequestTimeout",
|
| 208 |
+
"HTTPRequestURITooLong",
|
| 209 |
+
"HTTPResetContent",
|
| 210 |
+
"HTTPSeeOther",
|
| 211 |
+
"HTTPServerError",
|
| 212 |
+
"HTTPServiceUnavailable",
|
| 213 |
+
"HTTPSuccessful",
|
| 214 |
+
"HTTPTemporaryRedirect",
|
| 215 |
+
"HTTPTooManyRequests",
|
| 216 |
+
"HTTPUnauthorized",
|
| 217 |
+
"HTTPUnavailableForLegalReasons",
|
| 218 |
+
"HTTPUnprocessableEntity",
|
| 219 |
+
"HTTPUnsupportedMediaType",
|
| 220 |
+
"HTTPUpgradeRequired",
|
| 221 |
+
"HTTPUseProxy",
|
| 222 |
+
"HTTPVariantAlsoNegotiates",
|
| 223 |
+
"HTTPVersionNotSupported",
|
| 224 |
+
# web_fileresponse
|
| 225 |
+
"FileResponse",
|
| 226 |
+
# web_middlewares
|
| 227 |
+
"middleware",
|
| 228 |
+
"normalize_path_middleware",
|
| 229 |
+
# web_protocol
|
| 230 |
+
"PayloadAccessError",
|
| 231 |
+
"RequestHandler",
|
| 232 |
+
"RequestPayloadError",
|
| 233 |
+
# web_request
|
| 234 |
+
"BaseRequest",
|
| 235 |
+
"FileField",
|
| 236 |
+
"Request",
|
| 237 |
+
# web_response
|
| 238 |
+
"ContentCoding",
|
| 239 |
+
"Response",
|
| 240 |
+
"StreamResponse",
|
| 241 |
+
"json_response",
|
| 242 |
+
# web_routedef
|
| 243 |
+
"AbstractRouteDef",
|
| 244 |
+
"RouteDef",
|
| 245 |
+
"RouteTableDef",
|
| 246 |
+
"StaticDef",
|
| 247 |
+
"delete",
|
| 248 |
+
"get",
|
| 249 |
+
"head",
|
| 250 |
+
"options",
|
| 251 |
+
"patch",
|
| 252 |
+
"post",
|
| 253 |
+
"put",
|
| 254 |
+
"route",
|
| 255 |
+
"static",
|
| 256 |
+
"view",
|
| 257 |
+
# web_runner
|
| 258 |
+
"AppRunner",
|
| 259 |
+
"BaseRunner",
|
| 260 |
+
"BaseSite",
|
| 261 |
+
"GracefulExit",
|
| 262 |
+
"ServerRunner",
|
| 263 |
+
"SockSite",
|
| 264 |
+
"TCPSite",
|
| 265 |
+
"UnixSite",
|
| 266 |
+
"NamedPipeSite",
|
| 267 |
+
# web_server
|
| 268 |
+
"Server",
|
| 269 |
+
# web_urldispatcher
|
| 270 |
+
"AbstractResource",
|
| 271 |
+
"AbstractRoute",
|
| 272 |
+
"DynamicResource",
|
| 273 |
+
"PlainResource",
|
| 274 |
+
"PrefixedSubAppResource",
|
| 275 |
+
"Resource",
|
| 276 |
+
"ResourceRoute",
|
| 277 |
+
"StaticResource",
|
| 278 |
+
"UrlDispatcher",
|
| 279 |
+
"UrlMappingMatchInfo",
|
| 280 |
+
"View",
|
| 281 |
+
# web_ws
|
| 282 |
+
"WebSocketReady",
|
| 283 |
+
"WebSocketResponse",
|
| 284 |
+
"WSMsgType",
|
| 285 |
+
# web
|
| 286 |
+
"run_app",
|
| 287 |
+
)
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
try:
|
| 291 |
+
from ssl import SSLContext
|
| 292 |
+
except ImportError: # pragma: no cover
|
| 293 |
+
SSLContext = Any # type: ignore[misc,assignment]
|
| 294 |
+
|
| 295 |
+
# Only display warning when using -Wdefault, -We, -X dev or similar.
|
| 296 |
+
warnings.filterwarnings("ignore", category=NotAppKeyWarning, append=True)
|
| 297 |
+
|
| 298 |
+
HostSequence = TypingIterable[str]
|
| 299 |
+
|
| 300 |
+
|
| 301 |
+
async def _run_app(
|
| 302 |
+
app: Union[Application, Awaitable[Application]],
|
| 303 |
+
*,
|
| 304 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 305 |
+
port: Optional[int] = None,
|
| 306 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 307 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 308 |
+
shutdown_timeout: float = 60.0,
|
| 309 |
+
keepalive_timeout: float = 75.0,
|
| 310 |
+
ssl_context: Optional[SSLContext] = None,
|
| 311 |
+
print: Optional[Callable[..., None]] = print,
|
| 312 |
+
backlog: int = 128,
|
| 313 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 314 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 315 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 316 |
+
handle_signals: bool = True,
|
| 317 |
+
reuse_address: Optional[bool] = None,
|
| 318 |
+
reuse_port: Optional[bool] = None,
|
| 319 |
+
handler_cancellation: bool = False,
|
| 320 |
+
) -> None:
|
| 321 |
+
# An internal function to actually do all dirty job for application running
|
| 322 |
+
if asyncio.iscoroutine(app):
|
| 323 |
+
app = await app
|
| 324 |
+
|
| 325 |
+
app = cast(Application, app)
|
| 326 |
+
|
| 327 |
+
runner = AppRunner(
|
| 328 |
+
app,
|
| 329 |
+
handle_signals=handle_signals,
|
| 330 |
+
access_log_class=access_log_class,
|
| 331 |
+
access_log_format=access_log_format,
|
| 332 |
+
access_log=access_log,
|
| 333 |
+
keepalive_timeout=keepalive_timeout,
|
| 334 |
+
shutdown_timeout=shutdown_timeout,
|
| 335 |
+
handler_cancellation=handler_cancellation,
|
| 336 |
+
)
|
| 337 |
+
|
| 338 |
+
await runner.setup()
|
| 339 |
+
|
| 340 |
+
sites: List[BaseSite] = []
|
| 341 |
+
|
| 342 |
+
try:
|
| 343 |
+
if host is not None:
|
| 344 |
+
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
| 345 |
+
sites.append(
|
| 346 |
+
TCPSite(
|
| 347 |
+
runner,
|
| 348 |
+
host,
|
| 349 |
+
port,
|
| 350 |
+
ssl_context=ssl_context,
|
| 351 |
+
backlog=backlog,
|
| 352 |
+
reuse_address=reuse_address,
|
| 353 |
+
reuse_port=reuse_port,
|
| 354 |
+
)
|
| 355 |
+
)
|
| 356 |
+
else:
|
| 357 |
+
for h in host:
|
| 358 |
+
sites.append(
|
| 359 |
+
TCPSite(
|
| 360 |
+
runner,
|
| 361 |
+
h,
|
| 362 |
+
port,
|
| 363 |
+
ssl_context=ssl_context,
|
| 364 |
+
backlog=backlog,
|
| 365 |
+
reuse_address=reuse_address,
|
| 366 |
+
reuse_port=reuse_port,
|
| 367 |
+
)
|
| 368 |
+
)
|
| 369 |
+
elif path is None and sock is None or port is not None:
|
| 370 |
+
sites.append(
|
| 371 |
+
TCPSite(
|
| 372 |
+
runner,
|
| 373 |
+
port=port,
|
| 374 |
+
ssl_context=ssl_context,
|
| 375 |
+
backlog=backlog,
|
| 376 |
+
reuse_address=reuse_address,
|
| 377 |
+
reuse_port=reuse_port,
|
| 378 |
+
)
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
if path is not None:
|
| 382 |
+
if isinstance(path, (str, os.PathLike)):
|
| 383 |
+
sites.append(
|
| 384 |
+
UnixSite(
|
| 385 |
+
runner,
|
| 386 |
+
path,
|
| 387 |
+
ssl_context=ssl_context,
|
| 388 |
+
backlog=backlog,
|
| 389 |
+
)
|
| 390 |
+
)
|
| 391 |
+
else:
|
| 392 |
+
for p in path:
|
| 393 |
+
sites.append(
|
| 394 |
+
UnixSite(
|
| 395 |
+
runner,
|
| 396 |
+
p,
|
| 397 |
+
ssl_context=ssl_context,
|
| 398 |
+
backlog=backlog,
|
| 399 |
+
)
|
| 400 |
+
)
|
| 401 |
+
|
| 402 |
+
if sock is not None:
|
| 403 |
+
if not isinstance(sock, Iterable):
|
| 404 |
+
sites.append(
|
| 405 |
+
SockSite(
|
| 406 |
+
runner,
|
| 407 |
+
sock,
|
| 408 |
+
ssl_context=ssl_context,
|
| 409 |
+
backlog=backlog,
|
| 410 |
+
)
|
| 411 |
+
)
|
| 412 |
+
else:
|
| 413 |
+
for s in sock:
|
| 414 |
+
sites.append(
|
| 415 |
+
SockSite(
|
| 416 |
+
runner,
|
| 417 |
+
s,
|
| 418 |
+
ssl_context=ssl_context,
|
| 419 |
+
backlog=backlog,
|
| 420 |
+
)
|
| 421 |
+
)
|
| 422 |
+
for site in sites:
|
| 423 |
+
await site.start()
|
| 424 |
+
|
| 425 |
+
if print: # pragma: no branch
|
| 426 |
+
names = sorted(str(s.name) for s in runner.sites)
|
| 427 |
+
print(
|
| 428 |
+
"======== Running on {} ========\n"
|
| 429 |
+
"(Press CTRL+C to quit)".format(", ".join(names))
|
| 430 |
+
)
|
| 431 |
+
|
| 432 |
+
# sleep forever by 1 hour intervals,
|
| 433 |
+
while True:
|
| 434 |
+
await asyncio.sleep(3600)
|
| 435 |
+
finally:
|
| 436 |
+
await runner.cleanup()
|
| 437 |
+
|
| 438 |
+
|
| 439 |
+
def _cancel_tasks(
|
| 440 |
+
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
| 441 |
+
) -> None:
|
| 442 |
+
if not to_cancel:
|
| 443 |
+
return
|
| 444 |
+
|
| 445 |
+
for task in to_cancel:
|
| 446 |
+
task.cancel()
|
| 447 |
+
|
| 448 |
+
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
| 449 |
+
|
| 450 |
+
for task in to_cancel:
|
| 451 |
+
if task.cancelled():
|
| 452 |
+
continue
|
| 453 |
+
if task.exception() is not None:
|
| 454 |
+
loop.call_exception_handler(
|
| 455 |
+
{
|
| 456 |
+
"message": "unhandled exception during asyncio.run() shutdown",
|
| 457 |
+
"exception": task.exception(),
|
| 458 |
+
"task": task,
|
| 459 |
+
}
|
| 460 |
+
)
|
| 461 |
+
|
| 462 |
+
|
| 463 |
+
def run_app(
|
| 464 |
+
app: Union[Application, Awaitable[Application]],
|
| 465 |
+
*,
|
| 466 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 467 |
+
port: Optional[int] = None,
|
| 468 |
+
path: Union[PathLike, TypingIterable[PathLike], None] = None,
|
| 469 |
+
sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None,
|
| 470 |
+
shutdown_timeout: float = 60.0,
|
| 471 |
+
keepalive_timeout: float = 75.0,
|
| 472 |
+
ssl_context: Optional[SSLContext] = None,
|
| 473 |
+
print: Optional[Callable[..., None]] = print,
|
| 474 |
+
backlog: int = 128,
|
| 475 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 476 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 477 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 478 |
+
handle_signals: bool = True,
|
| 479 |
+
reuse_address: Optional[bool] = None,
|
| 480 |
+
reuse_port: Optional[bool] = None,
|
| 481 |
+
handler_cancellation: bool = False,
|
| 482 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 483 |
+
) -> None:
|
| 484 |
+
"""Run an app locally"""
|
| 485 |
+
if loop is None:
|
| 486 |
+
loop = asyncio.new_event_loop()
|
| 487 |
+
|
| 488 |
+
# Configure if and only if in debugging mode and using the default logger
|
| 489 |
+
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
| 490 |
+
if access_log.level == logging.NOTSET:
|
| 491 |
+
access_log.setLevel(logging.DEBUG)
|
| 492 |
+
if not access_log.hasHandlers():
|
| 493 |
+
access_log.addHandler(logging.StreamHandler())
|
| 494 |
+
|
| 495 |
+
main_task = loop.create_task(
|
| 496 |
+
_run_app(
|
| 497 |
+
app,
|
| 498 |
+
host=host,
|
| 499 |
+
port=port,
|
| 500 |
+
path=path,
|
| 501 |
+
sock=sock,
|
| 502 |
+
shutdown_timeout=shutdown_timeout,
|
| 503 |
+
keepalive_timeout=keepalive_timeout,
|
| 504 |
+
ssl_context=ssl_context,
|
| 505 |
+
print=print,
|
| 506 |
+
backlog=backlog,
|
| 507 |
+
access_log_class=access_log_class,
|
| 508 |
+
access_log_format=access_log_format,
|
| 509 |
+
access_log=access_log,
|
| 510 |
+
handle_signals=handle_signals,
|
| 511 |
+
reuse_address=reuse_address,
|
| 512 |
+
reuse_port=reuse_port,
|
| 513 |
+
handler_cancellation=handler_cancellation,
|
| 514 |
+
)
|
| 515 |
+
)
|
| 516 |
+
|
| 517 |
+
try:
|
| 518 |
+
asyncio.set_event_loop(loop)
|
| 519 |
+
loop.run_until_complete(main_task)
|
| 520 |
+
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
| 521 |
+
pass
|
| 522 |
+
finally:
|
| 523 |
+
try:
|
| 524 |
+
main_task.cancel()
|
| 525 |
+
with suppress(asyncio.CancelledError):
|
| 526 |
+
loop.run_until_complete(main_task)
|
| 527 |
+
finally:
|
| 528 |
+
_cancel_tasks(asyncio.all_tasks(loop), loop)
|
| 529 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
| 530 |
+
loop.close()
|
| 531 |
+
|
| 532 |
+
|
| 533 |
+
def main(argv: List[str]) -> None:
|
| 534 |
+
arg_parser = ArgumentParser(
|
| 535 |
+
description="aiohttp.web Application server", prog="aiohttp.web"
|
| 536 |
+
)
|
| 537 |
+
arg_parser.add_argument(
|
| 538 |
+
"entry_func",
|
| 539 |
+
help=(
|
| 540 |
+
"Callable returning the `aiohttp.web.Application` instance to "
|
| 541 |
+
"run. Should be specified in the 'module:function' syntax."
|
| 542 |
+
),
|
| 543 |
+
metavar="entry-func",
|
| 544 |
+
)
|
| 545 |
+
arg_parser.add_argument(
|
| 546 |
+
"-H",
|
| 547 |
+
"--hostname",
|
| 548 |
+
help="TCP/IP hostname to serve on (default: %(default)r)",
|
| 549 |
+
default="localhost",
|
| 550 |
+
)
|
| 551 |
+
arg_parser.add_argument(
|
| 552 |
+
"-P",
|
| 553 |
+
"--port",
|
| 554 |
+
help="TCP/IP port to serve on (default: %(default)r)",
|
| 555 |
+
type=int,
|
| 556 |
+
default="8080",
|
| 557 |
+
)
|
| 558 |
+
arg_parser.add_argument(
|
| 559 |
+
"-U",
|
| 560 |
+
"--path",
|
| 561 |
+
help="Unix file system path to serve on. Specifying a path will cause "
|
| 562 |
+
"hostname and port arguments to be ignored.",
|
| 563 |
+
)
|
| 564 |
+
args, extra_argv = arg_parser.parse_known_args(argv)
|
| 565 |
+
|
| 566 |
+
# Import logic
|
| 567 |
+
mod_str, _, func_str = args.entry_func.partition(":")
|
| 568 |
+
if not func_str or not mod_str:
|
| 569 |
+
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
| 570 |
+
if mod_str.startswith("."):
|
| 571 |
+
arg_parser.error("relative module names not supported")
|
| 572 |
+
try:
|
| 573 |
+
module = import_module(mod_str)
|
| 574 |
+
except ImportError as ex:
|
| 575 |
+
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
| 576 |
+
try:
|
| 577 |
+
func = getattr(module, func_str)
|
| 578 |
+
except AttributeError:
|
| 579 |
+
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
| 580 |
+
|
| 581 |
+
# Compatibility logic
|
| 582 |
+
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
| 583 |
+
arg_parser.error(
|
| 584 |
+
"file system paths not supported by your operating" " environment"
|
| 585 |
+
)
|
| 586 |
+
|
| 587 |
+
logging.basicConfig(level=logging.DEBUG)
|
| 588 |
+
|
| 589 |
+
app = func(extra_argv)
|
| 590 |
+
run_app(app, host=args.hostname, port=args.port, path=args.path)
|
| 591 |
+
arg_parser.exit(message="Stopped\n")
|
| 592 |
+
|
| 593 |
+
|
| 594 |
+
if __name__ == "__main__": # pragma: no branch
|
| 595 |
+
main(sys.argv[1:]) # pragma: no cover
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/web_response.py
ADDED
|
@@ -0,0 +1,820 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections.abc
|
| 3 |
+
import datetime
|
| 4 |
+
import enum
|
| 5 |
+
import json
|
| 6 |
+
import math
|
| 7 |
+
import time
|
| 8 |
+
import warnings
|
| 9 |
+
from concurrent.futures import Executor
|
| 10 |
+
from http import HTTPStatus
|
| 11 |
+
from http.cookies import SimpleCookie
|
| 12 |
+
from typing import (
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterator,
|
| 17 |
+
MutableMapping,
|
| 18 |
+
Optional,
|
| 19 |
+
Union,
|
| 20 |
+
cast,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from multidict import CIMultiDict, istr
|
| 24 |
+
|
| 25 |
+
from . import hdrs, payload
|
| 26 |
+
from .abc import AbstractStreamWriter
|
| 27 |
+
from .compression_utils import ZLibCompressor
|
| 28 |
+
from .helpers import (
|
| 29 |
+
ETAG_ANY,
|
| 30 |
+
QUOTED_ETAG_RE,
|
| 31 |
+
ETag,
|
| 32 |
+
HeadersMixin,
|
| 33 |
+
must_be_empty_body,
|
| 34 |
+
parse_http_date,
|
| 35 |
+
rfc822_formatted_time,
|
| 36 |
+
sentinel,
|
| 37 |
+
should_remove_content_length,
|
| 38 |
+
validate_etag_value,
|
| 39 |
+
)
|
| 40 |
+
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
| 41 |
+
from .payload import Payload
|
| 42 |
+
from .typedefs import JSONEncoder, LooseHeaders
|
| 43 |
+
|
| 44 |
+
REASON_PHRASES = {http_status.value: http_status.phrase for http_status in HTTPStatus}
|
| 45 |
+
|
| 46 |
+
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
if TYPE_CHECKING:
|
| 50 |
+
from .web_request import BaseRequest
|
| 51 |
+
|
| 52 |
+
BaseClass = MutableMapping[str, Any]
|
| 53 |
+
else:
|
| 54 |
+
BaseClass = collections.abc.MutableMapping
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
# TODO(py311): Convert to StrEnum for wider use
|
| 58 |
+
class ContentCoding(enum.Enum):
|
| 59 |
+
# The content codings that we have support for.
|
| 60 |
+
#
|
| 61 |
+
# Additional registered codings are listed at:
|
| 62 |
+
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
| 63 |
+
deflate = "deflate"
|
| 64 |
+
gzip = "gzip"
|
| 65 |
+
identity = "identity"
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
CONTENT_CODINGS = {coding.value: coding for coding in ContentCoding}
|
| 69 |
+
|
| 70 |
+
############################################################
|
| 71 |
+
# HTTP Response classes
|
| 72 |
+
############################################################
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class StreamResponse(BaseClass, HeadersMixin):
|
| 76 |
+
|
| 77 |
+
_length_check = True
|
| 78 |
+
|
| 79 |
+
_body: Union[None, bytes, bytearray, Payload]
|
| 80 |
+
|
| 81 |
+
def __init__(
|
| 82 |
+
self,
|
| 83 |
+
*,
|
| 84 |
+
status: int = 200,
|
| 85 |
+
reason: Optional[str] = None,
|
| 86 |
+
headers: Optional[LooseHeaders] = None,
|
| 87 |
+
) -> None:
|
| 88 |
+
self._body = None
|
| 89 |
+
self._keep_alive: Optional[bool] = None
|
| 90 |
+
self._chunked = False
|
| 91 |
+
self._compression = False
|
| 92 |
+
self._compression_force: Optional[ContentCoding] = None
|
| 93 |
+
self._cookies = SimpleCookie()
|
| 94 |
+
|
| 95 |
+
self._req: Optional[BaseRequest] = None
|
| 96 |
+
self._payload_writer: Optional[AbstractStreamWriter] = None
|
| 97 |
+
self._eof_sent = False
|
| 98 |
+
self._must_be_empty_body: Optional[bool] = None
|
| 99 |
+
self._body_length = 0
|
| 100 |
+
self._state: Dict[str, Any] = {}
|
| 101 |
+
|
| 102 |
+
if headers is not None:
|
| 103 |
+
self._headers: CIMultiDict[str] = CIMultiDict(headers)
|
| 104 |
+
else:
|
| 105 |
+
self._headers = CIMultiDict()
|
| 106 |
+
|
| 107 |
+
self._set_status(status, reason)
|
| 108 |
+
|
| 109 |
+
@property
|
| 110 |
+
def prepared(self) -> bool:
|
| 111 |
+
return self._eof_sent or self._payload_writer is not None
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def task(self) -> "Optional[asyncio.Task[None]]":
|
| 115 |
+
if self._req:
|
| 116 |
+
return self._req.task
|
| 117 |
+
else:
|
| 118 |
+
return None
|
| 119 |
+
|
| 120 |
+
@property
|
| 121 |
+
def status(self) -> int:
|
| 122 |
+
return self._status
|
| 123 |
+
|
| 124 |
+
@property
|
| 125 |
+
def chunked(self) -> bool:
|
| 126 |
+
return self._chunked
|
| 127 |
+
|
| 128 |
+
@property
|
| 129 |
+
def compression(self) -> bool:
|
| 130 |
+
return self._compression
|
| 131 |
+
|
| 132 |
+
@property
|
| 133 |
+
def reason(self) -> str:
|
| 134 |
+
return self._reason
|
| 135 |
+
|
| 136 |
+
def set_status(
|
| 137 |
+
self,
|
| 138 |
+
status: int,
|
| 139 |
+
reason: Optional[str] = None,
|
| 140 |
+
) -> None:
|
| 141 |
+
assert (
|
| 142 |
+
not self.prepared
|
| 143 |
+
), "Cannot change the response status code after the headers have been sent"
|
| 144 |
+
self._set_status(status, reason)
|
| 145 |
+
|
| 146 |
+
def _set_status(self, status: int, reason: Optional[str]) -> None:
|
| 147 |
+
self._status = int(status)
|
| 148 |
+
if reason is None:
|
| 149 |
+
reason = REASON_PHRASES.get(self._status, "")
|
| 150 |
+
elif "\n" in reason:
|
| 151 |
+
raise ValueError("Reason cannot contain \\n")
|
| 152 |
+
self._reason = reason
|
| 153 |
+
|
| 154 |
+
@property
|
| 155 |
+
def keep_alive(self) -> Optional[bool]:
|
| 156 |
+
return self._keep_alive
|
| 157 |
+
|
| 158 |
+
def force_close(self) -> None:
|
| 159 |
+
self._keep_alive = False
|
| 160 |
+
|
| 161 |
+
@property
|
| 162 |
+
def body_length(self) -> int:
|
| 163 |
+
return self._body_length
|
| 164 |
+
|
| 165 |
+
@property
|
| 166 |
+
def output_length(self) -> int:
|
| 167 |
+
warnings.warn("output_length is deprecated", DeprecationWarning)
|
| 168 |
+
assert self._payload_writer
|
| 169 |
+
return self._payload_writer.buffer_size
|
| 170 |
+
|
| 171 |
+
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
|
| 172 |
+
"""Enables automatic chunked transfer encoding."""
|
| 173 |
+
self._chunked = True
|
| 174 |
+
|
| 175 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 176 |
+
raise RuntimeError(
|
| 177 |
+
"You can't enable chunked encoding when " "a content length is set"
|
| 178 |
+
)
|
| 179 |
+
if chunk_size is not None:
|
| 180 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
| 181 |
+
|
| 182 |
+
def enable_compression(
|
| 183 |
+
self, force: Optional[Union[bool, ContentCoding]] = None
|
| 184 |
+
) -> None:
|
| 185 |
+
"""Enables response compression encoding."""
|
| 186 |
+
# Backwards compatibility for when force was a bool <0.17.
|
| 187 |
+
if isinstance(force, bool):
|
| 188 |
+
force = ContentCoding.deflate if force else ContentCoding.identity
|
| 189 |
+
warnings.warn(
|
| 190 |
+
"Using boolean for force is deprecated #3318", DeprecationWarning
|
| 191 |
+
)
|
| 192 |
+
elif force is not None:
|
| 193 |
+
assert isinstance(force, ContentCoding), (
|
| 194 |
+
"force should one of " "None, bool or " "ContentEncoding"
|
| 195 |
+
)
|
| 196 |
+
|
| 197 |
+
self._compression = True
|
| 198 |
+
self._compression_force = force
|
| 199 |
+
|
| 200 |
+
@property
|
| 201 |
+
def headers(self) -> "CIMultiDict[str]":
|
| 202 |
+
return self._headers
|
| 203 |
+
|
| 204 |
+
@property
|
| 205 |
+
def cookies(self) -> SimpleCookie:
|
| 206 |
+
return self._cookies
|
| 207 |
+
|
| 208 |
+
def set_cookie(
|
| 209 |
+
self,
|
| 210 |
+
name: str,
|
| 211 |
+
value: str,
|
| 212 |
+
*,
|
| 213 |
+
expires: Optional[str] = None,
|
| 214 |
+
domain: Optional[str] = None,
|
| 215 |
+
max_age: Optional[Union[int, str]] = None,
|
| 216 |
+
path: str = "/",
|
| 217 |
+
secure: Optional[bool] = None,
|
| 218 |
+
httponly: Optional[bool] = None,
|
| 219 |
+
version: Optional[str] = None,
|
| 220 |
+
samesite: Optional[str] = None,
|
| 221 |
+
) -> None:
|
| 222 |
+
"""Set or update response cookie.
|
| 223 |
+
|
| 224 |
+
Sets new cookie or updates existent with new value.
|
| 225 |
+
Also updates only those params which are not None.
|
| 226 |
+
"""
|
| 227 |
+
old = self._cookies.get(name)
|
| 228 |
+
if old is not None and old.coded_value == "":
|
| 229 |
+
# deleted cookie
|
| 230 |
+
self._cookies.pop(name, None)
|
| 231 |
+
|
| 232 |
+
self._cookies[name] = value
|
| 233 |
+
c = self._cookies[name]
|
| 234 |
+
|
| 235 |
+
if expires is not None:
|
| 236 |
+
c["expires"] = expires
|
| 237 |
+
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
|
| 238 |
+
del c["expires"]
|
| 239 |
+
|
| 240 |
+
if domain is not None:
|
| 241 |
+
c["domain"] = domain
|
| 242 |
+
|
| 243 |
+
if max_age is not None:
|
| 244 |
+
c["max-age"] = str(max_age)
|
| 245 |
+
elif "max-age" in c:
|
| 246 |
+
del c["max-age"]
|
| 247 |
+
|
| 248 |
+
c["path"] = path
|
| 249 |
+
|
| 250 |
+
if secure is not None:
|
| 251 |
+
c["secure"] = secure
|
| 252 |
+
if httponly is not None:
|
| 253 |
+
c["httponly"] = httponly
|
| 254 |
+
if version is not None:
|
| 255 |
+
c["version"] = version
|
| 256 |
+
if samesite is not None:
|
| 257 |
+
c["samesite"] = samesite
|
| 258 |
+
|
| 259 |
+
def del_cookie(
|
| 260 |
+
self, name: str, *, domain: Optional[str] = None, path: str = "/"
|
| 261 |
+
) -> None:
|
| 262 |
+
"""Delete cookie.
|
| 263 |
+
|
| 264 |
+
Creates new empty expired cookie.
|
| 265 |
+
"""
|
| 266 |
+
# TODO: do we need domain/path here?
|
| 267 |
+
self._cookies.pop(name, None)
|
| 268 |
+
self.set_cookie(
|
| 269 |
+
name,
|
| 270 |
+
"",
|
| 271 |
+
max_age=0,
|
| 272 |
+
expires="Thu, 01 Jan 1970 00:00:00 GMT",
|
| 273 |
+
domain=domain,
|
| 274 |
+
path=path,
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
@property
|
| 278 |
+
def content_length(self) -> Optional[int]:
|
| 279 |
+
# Just a placeholder for adding setter
|
| 280 |
+
return super().content_length
|
| 281 |
+
|
| 282 |
+
@content_length.setter
|
| 283 |
+
def content_length(self, value: Optional[int]) -> None:
|
| 284 |
+
if value is not None:
|
| 285 |
+
value = int(value)
|
| 286 |
+
if self._chunked:
|
| 287 |
+
raise RuntimeError(
|
| 288 |
+
"You can't set content length when " "chunked encoding is enable"
|
| 289 |
+
)
|
| 290 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(value)
|
| 291 |
+
else:
|
| 292 |
+
self._headers.pop(hdrs.CONTENT_LENGTH, None)
|
| 293 |
+
|
| 294 |
+
@property
|
| 295 |
+
def content_type(self) -> str:
|
| 296 |
+
# Just a placeholder for adding setter
|
| 297 |
+
return super().content_type
|
| 298 |
+
|
| 299 |
+
@content_type.setter
|
| 300 |
+
def content_type(self, value: str) -> None:
|
| 301 |
+
self.content_type # read header values if needed
|
| 302 |
+
self._content_type = str(value)
|
| 303 |
+
self._generate_content_type_header()
|
| 304 |
+
|
| 305 |
+
@property
|
| 306 |
+
def charset(self) -> Optional[str]:
|
| 307 |
+
# Just a placeholder for adding setter
|
| 308 |
+
return super().charset
|
| 309 |
+
|
| 310 |
+
@charset.setter
|
| 311 |
+
def charset(self, value: Optional[str]) -> None:
|
| 312 |
+
ctype = self.content_type # read header values if needed
|
| 313 |
+
if ctype == "application/octet-stream":
|
| 314 |
+
raise RuntimeError(
|
| 315 |
+
"Setting charset for application/octet-stream "
|
| 316 |
+
"doesn't make sense, setup content_type first"
|
| 317 |
+
)
|
| 318 |
+
assert self._content_dict is not None
|
| 319 |
+
if value is None:
|
| 320 |
+
self._content_dict.pop("charset", None)
|
| 321 |
+
else:
|
| 322 |
+
self._content_dict["charset"] = str(value).lower()
|
| 323 |
+
self._generate_content_type_header()
|
| 324 |
+
|
| 325 |
+
@property
|
| 326 |
+
def last_modified(self) -> Optional[datetime.datetime]:
|
| 327 |
+
"""The value of Last-Modified HTTP header, or None.
|
| 328 |
+
|
| 329 |
+
This header is represented as a `datetime` object.
|
| 330 |
+
"""
|
| 331 |
+
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
| 332 |
+
|
| 333 |
+
@last_modified.setter
|
| 334 |
+
def last_modified(
|
| 335 |
+
self, value: Optional[Union[int, float, datetime.datetime, str]]
|
| 336 |
+
) -> None:
|
| 337 |
+
if value is None:
|
| 338 |
+
self._headers.pop(hdrs.LAST_MODIFIED, None)
|
| 339 |
+
elif isinstance(value, (int, float)):
|
| 340 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
| 341 |
+
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
|
| 342 |
+
)
|
| 343 |
+
elif isinstance(value, datetime.datetime):
|
| 344 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
| 345 |
+
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
|
| 346 |
+
)
|
| 347 |
+
elif isinstance(value, str):
|
| 348 |
+
self._headers[hdrs.LAST_MODIFIED] = value
|
| 349 |
+
|
| 350 |
+
@property
|
| 351 |
+
def etag(self) -> Optional[ETag]:
|
| 352 |
+
quoted_value = self._headers.get(hdrs.ETAG)
|
| 353 |
+
if not quoted_value:
|
| 354 |
+
return None
|
| 355 |
+
elif quoted_value == ETAG_ANY:
|
| 356 |
+
return ETag(value=ETAG_ANY)
|
| 357 |
+
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
| 358 |
+
if not match:
|
| 359 |
+
return None
|
| 360 |
+
is_weak, value = match.group(1, 2)
|
| 361 |
+
return ETag(
|
| 362 |
+
is_weak=bool(is_weak),
|
| 363 |
+
value=value,
|
| 364 |
+
)
|
| 365 |
+
|
| 366 |
+
@etag.setter
|
| 367 |
+
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
| 368 |
+
if value is None:
|
| 369 |
+
self._headers.pop(hdrs.ETAG, None)
|
| 370 |
+
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
| 371 |
+
isinstance(value, ETag) and value.value == ETAG_ANY
|
| 372 |
+
):
|
| 373 |
+
self._headers[hdrs.ETAG] = ETAG_ANY
|
| 374 |
+
elif isinstance(value, str):
|
| 375 |
+
validate_etag_value(value)
|
| 376 |
+
self._headers[hdrs.ETAG] = f'"{value}"'
|
| 377 |
+
elif isinstance(value, ETag) and isinstance(value.value, str):
|
| 378 |
+
validate_etag_value(value.value)
|
| 379 |
+
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
| 380 |
+
self._headers[hdrs.ETAG] = hdr_value
|
| 381 |
+
else:
|
| 382 |
+
raise ValueError(
|
| 383 |
+
f"Unsupported etag type: {type(value)}. "
|
| 384 |
+
f"etag must be str, ETag or None"
|
| 385 |
+
)
|
| 386 |
+
|
| 387 |
+
def _generate_content_type_header(
|
| 388 |
+
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
| 389 |
+
) -> None:
|
| 390 |
+
assert self._content_dict is not None
|
| 391 |
+
assert self._content_type is not None
|
| 392 |
+
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
|
| 393 |
+
if params:
|
| 394 |
+
ctype = self._content_type + "; " + params
|
| 395 |
+
else:
|
| 396 |
+
ctype = self._content_type
|
| 397 |
+
self._headers[CONTENT_TYPE] = ctype
|
| 398 |
+
|
| 399 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
| 400 |
+
if coding != ContentCoding.identity:
|
| 401 |
+
assert self._payload_writer is not None
|
| 402 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
| 403 |
+
self._payload_writer.enable_compression(coding.value)
|
| 404 |
+
# Compressed payload may have different content length,
|
| 405 |
+
# remove the header
|
| 406 |
+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 407 |
+
|
| 408 |
+
async def _start_compression(self, request: "BaseRequest") -> None:
|
| 409 |
+
if self._compression_force:
|
| 410 |
+
await self._do_start_compression(self._compression_force)
|
| 411 |
+
else:
|
| 412 |
+
# Encoding comparisons should be case-insensitive
|
| 413 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1
|
| 414 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
| 415 |
+
for value, coding in CONTENT_CODINGS.items():
|
| 416 |
+
if value in accept_encoding:
|
| 417 |
+
await self._do_start_compression(coding)
|
| 418 |
+
return
|
| 419 |
+
|
| 420 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
| 421 |
+
if self._eof_sent:
|
| 422 |
+
return None
|
| 423 |
+
if self._payload_writer is not None:
|
| 424 |
+
return self._payload_writer
|
| 425 |
+
self._must_be_empty_body = must_be_empty_body(request.method, self.status)
|
| 426 |
+
return await self._start(request)
|
| 427 |
+
|
| 428 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
| 429 |
+
self._req = request
|
| 430 |
+
writer = self._payload_writer = request._payload_writer
|
| 431 |
+
|
| 432 |
+
await self._prepare_headers()
|
| 433 |
+
await request._prepare_hook(self)
|
| 434 |
+
await self._write_headers()
|
| 435 |
+
|
| 436 |
+
return writer
|
| 437 |
+
|
| 438 |
+
async def _prepare_headers(self) -> None:
|
| 439 |
+
request = self._req
|
| 440 |
+
assert request is not None
|
| 441 |
+
writer = self._payload_writer
|
| 442 |
+
assert writer is not None
|
| 443 |
+
keep_alive = self._keep_alive
|
| 444 |
+
if keep_alive is None:
|
| 445 |
+
keep_alive = request.keep_alive
|
| 446 |
+
self._keep_alive = keep_alive
|
| 447 |
+
|
| 448 |
+
version = request.version
|
| 449 |
+
|
| 450 |
+
headers = self._headers
|
| 451 |
+
for cookie in self._cookies.values():
|
| 452 |
+
value = cookie.output(header="")[1:]
|
| 453 |
+
headers.add(hdrs.SET_COOKIE, value)
|
| 454 |
+
|
| 455 |
+
if self._compression:
|
| 456 |
+
await self._start_compression(request)
|
| 457 |
+
|
| 458 |
+
if self._chunked:
|
| 459 |
+
if version != HttpVersion11:
|
| 460 |
+
raise RuntimeError(
|
| 461 |
+
"Using chunked encoding is forbidden "
|
| 462 |
+
"for HTTP/{0.major}.{0.minor}".format(request.version)
|
| 463 |
+
)
|
| 464 |
+
if not self._must_be_empty_body:
|
| 465 |
+
writer.enable_chunking()
|
| 466 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 467 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 468 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 469 |
+
elif self._length_check:
|
| 470 |
+
writer.length = self.content_length
|
| 471 |
+
if writer.length is None:
|
| 472 |
+
if version >= HttpVersion11:
|
| 473 |
+
if not self._must_be_empty_body:
|
| 474 |
+
writer.enable_chunking()
|
| 475 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 476 |
+
elif not self._must_be_empty_body:
|
| 477 |
+
keep_alive = False
|
| 478 |
+
|
| 479 |
+
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
| 480 |
+
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
|
| 481 |
+
if self._must_be_empty_body:
|
| 482 |
+
if hdrs.CONTENT_LENGTH in headers and should_remove_content_length(
|
| 483 |
+
request.method, self.status
|
| 484 |
+
):
|
| 485 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 486 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-10
|
| 487 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.1-13
|
| 488 |
+
if hdrs.TRANSFER_ENCODING in headers:
|
| 489 |
+
del headers[hdrs.TRANSFER_ENCODING]
|
| 490 |
+
elif self.content_length != 0:
|
| 491 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5
|
| 492 |
+
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
| 493 |
+
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
| 494 |
+
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
| 495 |
+
|
| 496 |
+
# connection header
|
| 497 |
+
if hdrs.CONNECTION not in headers:
|
| 498 |
+
if keep_alive:
|
| 499 |
+
if version == HttpVersion10:
|
| 500 |
+
headers[hdrs.CONNECTION] = "keep-alive"
|
| 501 |
+
else:
|
| 502 |
+
if version == HttpVersion11:
|
| 503 |
+
headers[hdrs.CONNECTION] = "close"
|
| 504 |
+
|
| 505 |
+
async def _write_headers(self) -> None:
|
| 506 |
+
request = self._req
|
| 507 |
+
assert request is not None
|
| 508 |
+
writer = self._payload_writer
|
| 509 |
+
assert writer is not None
|
| 510 |
+
# status line
|
| 511 |
+
version = request.version
|
| 512 |
+
status_line = f"HTTP/{version[0]}.{version[1]} {self._status} {self._reason}"
|
| 513 |
+
await writer.write_headers(status_line, self._headers)
|
| 514 |
+
|
| 515 |
+
async def write(self, data: bytes) -> None:
|
| 516 |
+
assert isinstance(
|
| 517 |
+
data, (bytes, bytearray, memoryview)
|
| 518 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
| 519 |
+
|
| 520 |
+
if self._eof_sent:
|
| 521 |
+
raise RuntimeError("Cannot call write() after write_eof()")
|
| 522 |
+
if self._payload_writer is None:
|
| 523 |
+
raise RuntimeError("Cannot call write() before prepare()")
|
| 524 |
+
|
| 525 |
+
await self._payload_writer.write(data)
|
| 526 |
+
|
| 527 |
+
async def drain(self) -> None:
|
| 528 |
+
assert not self._eof_sent, "EOF has already been sent"
|
| 529 |
+
assert self._payload_writer is not None, "Response has not been started"
|
| 530 |
+
warnings.warn(
|
| 531 |
+
"drain method is deprecated, use await resp.write()",
|
| 532 |
+
DeprecationWarning,
|
| 533 |
+
stacklevel=2,
|
| 534 |
+
)
|
| 535 |
+
await self._payload_writer.drain()
|
| 536 |
+
|
| 537 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
| 538 |
+
assert isinstance(
|
| 539 |
+
data, (bytes, bytearray, memoryview)
|
| 540 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
| 541 |
+
|
| 542 |
+
if self._eof_sent:
|
| 543 |
+
return
|
| 544 |
+
|
| 545 |
+
assert self._payload_writer is not None, "Response has not been started"
|
| 546 |
+
|
| 547 |
+
await self._payload_writer.write_eof(data)
|
| 548 |
+
self._eof_sent = True
|
| 549 |
+
self._req = None
|
| 550 |
+
self._body_length = self._payload_writer.output_size
|
| 551 |
+
self._payload_writer = None
|
| 552 |
+
|
| 553 |
+
def __repr__(self) -> str:
|
| 554 |
+
if self._eof_sent:
|
| 555 |
+
info = "eof"
|
| 556 |
+
elif self.prepared:
|
| 557 |
+
assert self._req is not None
|
| 558 |
+
info = f"{self._req.method} {self._req.path} "
|
| 559 |
+
else:
|
| 560 |
+
info = "not prepared"
|
| 561 |
+
return f"<{self.__class__.__name__} {self.reason} {info}>"
|
| 562 |
+
|
| 563 |
+
def __getitem__(self, key: str) -> Any:
|
| 564 |
+
return self._state[key]
|
| 565 |
+
|
| 566 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
| 567 |
+
self._state[key] = value
|
| 568 |
+
|
| 569 |
+
def __delitem__(self, key: str) -> None:
|
| 570 |
+
del self._state[key]
|
| 571 |
+
|
| 572 |
+
def __len__(self) -> int:
|
| 573 |
+
return len(self._state)
|
| 574 |
+
|
| 575 |
+
def __iter__(self) -> Iterator[str]:
|
| 576 |
+
return iter(self._state)
|
| 577 |
+
|
| 578 |
+
def __hash__(self) -> int:
|
| 579 |
+
return hash(id(self))
|
| 580 |
+
|
| 581 |
+
def __eq__(self, other: object) -> bool:
|
| 582 |
+
return self is other
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
class Response(StreamResponse):
|
| 586 |
+
def __init__(
|
| 587 |
+
self,
|
| 588 |
+
*,
|
| 589 |
+
body: Any = None,
|
| 590 |
+
status: int = 200,
|
| 591 |
+
reason: Optional[str] = None,
|
| 592 |
+
text: Optional[str] = None,
|
| 593 |
+
headers: Optional[LooseHeaders] = None,
|
| 594 |
+
content_type: Optional[str] = None,
|
| 595 |
+
charset: Optional[str] = None,
|
| 596 |
+
zlib_executor_size: Optional[int] = None,
|
| 597 |
+
zlib_executor: Optional[Executor] = None,
|
| 598 |
+
) -> None:
|
| 599 |
+
if body is not None and text is not None:
|
| 600 |
+
raise ValueError("body and text are not allowed together")
|
| 601 |
+
|
| 602 |
+
if headers is None:
|
| 603 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
| 604 |
+
elif not isinstance(headers, CIMultiDict):
|
| 605 |
+
real_headers = CIMultiDict(headers)
|
| 606 |
+
else:
|
| 607 |
+
real_headers = headers # = cast('CIMultiDict[str]', headers)
|
| 608 |
+
|
| 609 |
+
if content_type is not None and "charset" in content_type:
|
| 610 |
+
raise ValueError("charset must not be in content_type " "argument")
|
| 611 |
+
|
| 612 |
+
if text is not None:
|
| 613 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
| 614 |
+
if content_type or charset:
|
| 615 |
+
raise ValueError(
|
| 616 |
+
"passing both Content-Type header and "
|
| 617 |
+
"content_type or charset params "
|
| 618 |
+
"is forbidden"
|
| 619 |
+
)
|
| 620 |
+
else:
|
| 621 |
+
# fast path for filling headers
|
| 622 |
+
if not isinstance(text, str):
|
| 623 |
+
raise TypeError("text argument must be str (%r)" % type(text))
|
| 624 |
+
if content_type is None:
|
| 625 |
+
content_type = "text/plain"
|
| 626 |
+
if charset is None:
|
| 627 |
+
charset = "utf-8"
|
| 628 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
|
| 629 |
+
body = text.encode(charset)
|
| 630 |
+
text = None
|
| 631 |
+
else:
|
| 632 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
| 633 |
+
if content_type is not None or charset is not None:
|
| 634 |
+
raise ValueError(
|
| 635 |
+
"passing both Content-Type header and "
|
| 636 |
+
"content_type or charset params "
|
| 637 |
+
"is forbidden"
|
| 638 |
+
)
|
| 639 |
+
else:
|
| 640 |
+
if content_type is not None:
|
| 641 |
+
if charset is not None:
|
| 642 |
+
content_type += "; charset=" + charset
|
| 643 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type
|
| 644 |
+
|
| 645 |
+
super().__init__(status=status, reason=reason, headers=real_headers)
|
| 646 |
+
|
| 647 |
+
if text is not None:
|
| 648 |
+
self.text = text
|
| 649 |
+
else:
|
| 650 |
+
self.body = body
|
| 651 |
+
|
| 652 |
+
self._compressed_body: Optional[bytes] = None
|
| 653 |
+
self._zlib_executor_size = zlib_executor_size
|
| 654 |
+
self._zlib_executor = zlib_executor
|
| 655 |
+
|
| 656 |
+
@property
|
| 657 |
+
def body(self) -> Optional[Union[bytes, Payload]]:
|
| 658 |
+
return self._body
|
| 659 |
+
|
| 660 |
+
@body.setter
|
| 661 |
+
def body(self, body: Any) -> None:
|
| 662 |
+
if body is None:
|
| 663 |
+
self._body = None
|
| 664 |
+
elif isinstance(body, (bytes, bytearray)):
|
| 665 |
+
self._body = body
|
| 666 |
+
else:
|
| 667 |
+
try:
|
| 668 |
+
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
|
| 669 |
+
except payload.LookupError:
|
| 670 |
+
raise ValueError("Unsupported body type %r" % type(body))
|
| 671 |
+
|
| 672 |
+
headers = self._headers
|
| 673 |
+
|
| 674 |
+
# set content-type
|
| 675 |
+
if hdrs.CONTENT_TYPE not in headers:
|
| 676 |
+
headers[hdrs.CONTENT_TYPE] = body.content_type
|
| 677 |
+
|
| 678 |
+
# copy payload headers
|
| 679 |
+
if body.headers:
|
| 680 |
+
for key, value in body.headers.items():
|
| 681 |
+
if key not in headers:
|
| 682 |
+
headers[key] = value
|
| 683 |
+
|
| 684 |
+
self._compressed_body = None
|
| 685 |
+
|
| 686 |
+
@property
|
| 687 |
+
def text(self) -> Optional[str]:
|
| 688 |
+
if self._body is None:
|
| 689 |
+
return None
|
| 690 |
+
return self._body.decode(self.charset or "utf-8")
|
| 691 |
+
|
| 692 |
+
@text.setter
|
| 693 |
+
def text(self, text: str) -> None:
|
| 694 |
+
assert text is None or isinstance(
|
| 695 |
+
text, str
|
| 696 |
+
), "text argument must be str (%r)" % type(text)
|
| 697 |
+
|
| 698 |
+
if self.content_type == "application/octet-stream":
|
| 699 |
+
self.content_type = "text/plain"
|
| 700 |
+
if self.charset is None:
|
| 701 |
+
self.charset = "utf-8"
|
| 702 |
+
|
| 703 |
+
self._body = text.encode(self.charset)
|
| 704 |
+
self._compressed_body = None
|
| 705 |
+
|
| 706 |
+
@property
|
| 707 |
+
def content_length(self) -> Optional[int]:
|
| 708 |
+
if self._chunked:
|
| 709 |
+
return None
|
| 710 |
+
|
| 711 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 712 |
+
return int(self._headers[hdrs.CONTENT_LENGTH])
|
| 713 |
+
|
| 714 |
+
if self._compressed_body is not None:
|
| 715 |
+
# Return length of the compressed body
|
| 716 |
+
return len(self._compressed_body)
|
| 717 |
+
elif isinstance(self._body, Payload):
|
| 718 |
+
# A payload without content length, or a compressed payload
|
| 719 |
+
return None
|
| 720 |
+
elif self._body is not None:
|
| 721 |
+
return len(self._body)
|
| 722 |
+
else:
|
| 723 |
+
return 0
|
| 724 |
+
|
| 725 |
+
@content_length.setter
|
| 726 |
+
def content_length(self, value: Optional[int]) -> None:
|
| 727 |
+
raise RuntimeError("Content length is set automatically")
|
| 728 |
+
|
| 729 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
| 730 |
+
if self._eof_sent:
|
| 731 |
+
return
|
| 732 |
+
if self._compressed_body is None:
|
| 733 |
+
body: Optional[Union[bytes, Payload]] = self._body
|
| 734 |
+
else:
|
| 735 |
+
body = self._compressed_body
|
| 736 |
+
assert not data, f"data arg is not supported, got {data!r}"
|
| 737 |
+
assert self._req is not None
|
| 738 |
+
assert self._payload_writer is not None
|
| 739 |
+
if body is not None:
|
| 740 |
+
if self._must_be_empty_body:
|
| 741 |
+
await super().write_eof()
|
| 742 |
+
elif isinstance(self._body, Payload):
|
| 743 |
+
await self._body.write(self._payload_writer)
|
| 744 |
+
await super().write_eof()
|
| 745 |
+
else:
|
| 746 |
+
await super().write_eof(cast(bytes, body))
|
| 747 |
+
else:
|
| 748 |
+
await super().write_eof()
|
| 749 |
+
|
| 750 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
| 751 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 752 |
+
if should_remove_content_length(request.method, self.status):
|
| 753 |
+
del self._headers[hdrs.CONTENT_LENGTH]
|
| 754 |
+
elif not self._chunked:
|
| 755 |
+
if isinstance(self._body, Payload):
|
| 756 |
+
if self._body.size is not None:
|
| 757 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(self._body.size)
|
| 758 |
+
else:
|
| 759 |
+
body_len = len(self._body) if self._body else "0"
|
| 760 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-7
|
| 761 |
+
if body_len != "0" or (
|
| 762 |
+
self.status != 304 and request.method.upper() != hdrs.METH_HEAD
|
| 763 |
+
):
|
| 764 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(body_len)
|
| 765 |
+
|
| 766 |
+
return await super()._start(request)
|
| 767 |
+
|
| 768 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
| 769 |
+
if self._chunked or isinstance(self._body, Payload):
|
| 770 |
+
return await super()._do_start_compression(coding)
|
| 771 |
+
|
| 772 |
+
if coding != ContentCoding.identity:
|
| 773 |
+
# Instead of using _payload_writer.enable_compression,
|
| 774 |
+
# compress the whole body
|
| 775 |
+
compressor = ZLibCompressor(
|
| 776 |
+
encoding=str(coding.value),
|
| 777 |
+
max_sync_chunk_size=self._zlib_executor_size,
|
| 778 |
+
executor=self._zlib_executor,
|
| 779 |
+
)
|
| 780 |
+
assert self._body is not None
|
| 781 |
+
if self._zlib_executor_size is None and len(self._body) > 1024 * 1024:
|
| 782 |
+
warnings.warn(
|
| 783 |
+
"Synchronous compression of large response bodies "
|
| 784 |
+
f"({len(self._body)} bytes) might block the async event loop. "
|
| 785 |
+
"Consider providing a custom value to zlib_executor_size/"
|
| 786 |
+
"zlib_executor response properties or disabling compression on it."
|
| 787 |
+
)
|
| 788 |
+
self._compressed_body = (
|
| 789 |
+
await compressor.compress(self._body) + compressor.flush()
|
| 790 |
+
)
|
| 791 |
+
assert self._compressed_body is not None
|
| 792 |
+
|
| 793 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
| 794 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._compressed_body))
|
| 795 |
+
|
| 796 |
+
|
| 797 |
+
def json_response(
|
| 798 |
+
data: Any = sentinel,
|
| 799 |
+
*,
|
| 800 |
+
text: Optional[str] = None,
|
| 801 |
+
body: Optional[bytes] = None,
|
| 802 |
+
status: int = 200,
|
| 803 |
+
reason: Optional[str] = None,
|
| 804 |
+
headers: Optional[LooseHeaders] = None,
|
| 805 |
+
content_type: str = "application/json",
|
| 806 |
+
dumps: JSONEncoder = json.dumps,
|
| 807 |
+
) -> Response:
|
| 808 |
+
if data is not sentinel:
|
| 809 |
+
if text or body:
|
| 810 |
+
raise ValueError("only one of data, text, or body should be specified")
|
| 811 |
+
else:
|
| 812 |
+
text = dumps(data)
|
| 813 |
+
return Response(
|
| 814 |
+
text=text,
|
| 815 |
+
body=body,
|
| 816 |
+
status=status,
|
| 817 |
+
reason=reason,
|
| 818 |
+
headers=headers,
|
| 819 |
+
content_type=content_type,
|
| 820 |
+
)
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/web_routedef.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import os # noqa
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Callable,
|
| 7 |
+
Dict,
|
| 8 |
+
Iterator,
|
| 9 |
+
List,
|
| 10 |
+
Optional,
|
| 11 |
+
Sequence,
|
| 12 |
+
Type,
|
| 13 |
+
Union,
|
| 14 |
+
overload,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
import attr
|
| 18 |
+
|
| 19 |
+
from . import hdrs
|
| 20 |
+
from .abc import AbstractView
|
| 21 |
+
from .typedefs import Handler, PathLike
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING:
|
| 24 |
+
from .web_request import Request
|
| 25 |
+
from .web_response import StreamResponse
|
| 26 |
+
from .web_urldispatcher import AbstractRoute, UrlDispatcher
|
| 27 |
+
else:
|
| 28 |
+
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
__all__ = (
|
| 32 |
+
"AbstractRouteDef",
|
| 33 |
+
"RouteDef",
|
| 34 |
+
"StaticDef",
|
| 35 |
+
"RouteTableDef",
|
| 36 |
+
"head",
|
| 37 |
+
"options",
|
| 38 |
+
"get",
|
| 39 |
+
"post",
|
| 40 |
+
"patch",
|
| 41 |
+
"put",
|
| 42 |
+
"delete",
|
| 43 |
+
"route",
|
| 44 |
+
"view",
|
| 45 |
+
"static",
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class AbstractRouteDef(abc.ABC):
|
| 50 |
+
@abc.abstractmethod
|
| 51 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 52 |
+
pass # pragma: no cover
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
_HandlerType = Union[Type[AbstractView], Handler]
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 59 |
+
class RouteDef(AbstractRouteDef):
|
| 60 |
+
method: str
|
| 61 |
+
path: str
|
| 62 |
+
handler: _HandlerType
|
| 63 |
+
kwargs: Dict[str, Any]
|
| 64 |
+
|
| 65 |
+
def __repr__(self) -> str:
|
| 66 |
+
info = []
|
| 67 |
+
for name, value in sorted(self.kwargs.items()):
|
| 68 |
+
info.append(f", {name}={value!r}")
|
| 69 |
+
return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
|
| 70 |
+
method=self.method, path=self.path, handler=self.handler, info="".join(info)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 74 |
+
if self.method in hdrs.METH_ALL:
|
| 75 |
+
reg = getattr(router, "add_" + self.method.lower())
|
| 76 |
+
return [reg(self.path, self.handler, **self.kwargs)]
|
| 77 |
+
else:
|
| 78 |
+
return [
|
| 79 |
+
router.add_route(self.method, self.path, self.handler, **self.kwargs)
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 84 |
+
class StaticDef(AbstractRouteDef):
|
| 85 |
+
prefix: str
|
| 86 |
+
path: PathLike
|
| 87 |
+
kwargs: Dict[str, Any]
|
| 88 |
+
|
| 89 |
+
def __repr__(self) -> str:
|
| 90 |
+
info = []
|
| 91 |
+
for name, value in sorted(self.kwargs.items()):
|
| 92 |
+
info.append(f", {name}={value!r}")
|
| 93 |
+
return "<StaticDef {prefix} -> {path}" "{info}>".format(
|
| 94 |
+
prefix=self.prefix, path=self.path, info="".join(info)
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 98 |
+
resource = router.add_static(self.prefix, self.path, **self.kwargs)
|
| 99 |
+
routes = resource.get_info().get("routes", {})
|
| 100 |
+
return list(routes.values())
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 104 |
+
return RouteDef(method, path, handler, kwargs)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 108 |
+
return route(hdrs.METH_HEAD, path, handler, **kwargs)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 112 |
+
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def get(
|
| 116 |
+
path: str,
|
| 117 |
+
handler: _HandlerType,
|
| 118 |
+
*,
|
| 119 |
+
name: Optional[str] = None,
|
| 120 |
+
allow_head: bool = True,
|
| 121 |
+
**kwargs: Any,
|
| 122 |
+
) -> RouteDef:
|
| 123 |
+
return route(
|
| 124 |
+
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 129 |
+
return route(hdrs.METH_POST, path, handler, **kwargs)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 133 |
+
return route(hdrs.METH_PUT, path, handler, **kwargs)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 137 |
+
return route(hdrs.METH_PATCH, path, handler, **kwargs)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 141 |
+
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
|
| 145 |
+
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
|
| 149 |
+
return StaticDef(prefix, path, kwargs)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
_Deco = Callable[[_HandlerType], _HandlerType]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class RouteTableDef(Sequence[AbstractRouteDef]):
|
| 156 |
+
"""Route definition table"""
|
| 157 |
+
|
| 158 |
+
def __init__(self) -> None:
|
| 159 |
+
self._items: List[AbstractRouteDef] = []
|
| 160 |
+
|
| 161 |
+
def __repr__(self) -> str:
|
| 162 |
+
return f"<RouteTableDef count={len(self._items)}>"
|
| 163 |
+
|
| 164 |
+
@overload
|
| 165 |
+
def __getitem__(self, index: int) -> AbstractRouteDef: ...
|
| 166 |
+
|
| 167 |
+
@overload
|
| 168 |
+
def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ...
|
| 169 |
+
|
| 170 |
+
def __getitem__(self, index): # type: ignore[no-untyped-def]
|
| 171 |
+
return self._items[index]
|
| 172 |
+
|
| 173 |
+
def __iter__(self) -> Iterator[AbstractRouteDef]:
|
| 174 |
+
return iter(self._items)
|
| 175 |
+
|
| 176 |
+
def __len__(self) -> int:
|
| 177 |
+
return len(self._items)
|
| 178 |
+
|
| 179 |
+
def __contains__(self, item: object) -> bool:
|
| 180 |
+
return item in self._items
|
| 181 |
+
|
| 182 |
+
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
|
| 183 |
+
def inner(handler: _HandlerType) -> _HandlerType:
|
| 184 |
+
self._items.append(RouteDef(method, path, handler, kwargs))
|
| 185 |
+
return handler
|
| 186 |
+
|
| 187 |
+
return inner
|
| 188 |
+
|
| 189 |
+
def head(self, path: str, **kwargs: Any) -> _Deco:
|
| 190 |
+
return self.route(hdrs.METH_HEAD, path, **kwargs)
|
| 191 |
+
|
| 192 |
+
def get(self, path: str, **kwargs: Any) -> _Deco:
|
| 193 |
+
return self.route(hdrs.METH_GET, path, **kwargs)
|
| 194 |
+
|
| 195 |
+
def post(self, path: str, **kwargs: Any) -> _Deco:
|
| 196 |
+
return self.route(hdrs.METH_POST, path, **kwargs)
|
| 197 |
+
|
| 198 |
+
def put(self, path: str, **kwargs: Any) -> _Deco:
|
| 199 |
+
return self.route(hdrs.METH_PUT, path, **kwargs)
|
| 200 |
+
|
| 201 |
+
def patch(self, path: str, **kwargs: Any) -> _Deco:
|
| 202 |
+
return self.route(hdrs.METH_PATCH, path, **kwargs)
|
| 203 |
+
|
| 204 |
+
def delete(self, path: str, **kwargs: Any) -> _Deco:
|
| 205 |
+
return self.route(hdrs.METH_DELETE, path, **kwargs)
|
| 206 |
+
|
| 207 |
+
def options(self, path: str, **kwargs: Any) -> _Deco:
|
| 208 |
+
return self.route(hdrs.METH_OPTIONS, path, **kwargs)
|
| 209 |
+
|
| 210 |
+
def view(self, path: str, **kwargs: Any) -> _Deco:
|
| 211 |
+
return self.route(hdrs.METH_ANY, path, **kwargs)
|
| 212 |
+
|
| 213 |
+
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
|
| 214 |
+
self._items.append(StaticDef(prefix, path, kwargs))
|
evalkit_tf437/lib/python3.10/site-packages/aiohttp/web_urldispatcher.py
ADDED
|
@@ -0,0 +1,1299 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import base64
|
| 4 |
+
import functools
|
| 5 |
+
import hashlib
|
| 6 |
+
import html
|
| 7 |
+
import inspect
|
| 8 |
+
import keyword
|
| 9 |
+
import os
|
| 10 |
+
import re
|
| 11 |
+
import sys
|
| 12 |
+
import warnings
|
| 13 |
+
from functools import wraps
|
| 14 |
+
from pathlib import Path
|
| 15 |
+
from types import MappingProxyType
|
| 16 |
+
from typing import (
|
| 17 |
+
TYPE_CHECKING,
|
| 18 |
+
Any,
|
| 19 |
+
Awaitable,
|
| 20 |
+
Callable,
|
| 21 |
+
Container,
|
| 22 |
+
Dict,
|
| 23 |
+
Final,
|
| 24 |
+
Generator,
|
| 25 |
+
Iterable,
|
| 26 |
+
Iterator,
|
| 27 |
+
List,
|
| 28 |
+
Mapping,
|
| 29 |
+
NoReturn,
|
| 30 |
+
Optional,
|
| 31 |
+
Pattern,
|
| 32 |
+
Set,
|
| 33 |
+
Sized,
|
| 34 |
+
Tuple,
|
| 35 |
+
Type,
|
| 36 |
+
TypedDict,
|
| 37 |
+
Union,
|
| 38 |
+
cast,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
from yarl import URL, __version__ as yarl_version
|
| 42 |
+
|
| 43 |
+
from . import hdrs
|
| 44 |
+
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
|
| 45 |
+
from .helpers import DEBUG
|
| 46 |
+
from .http import HttpVersion11
|
| 47 |
+
from .typedefs import Handler, PathLike
|
| 48 |
+
from .web_exceptions import (
|
| 49 |
+
HTTPException,
|
| 50 |
+
HTTPExpectationFailed,
|
| 51 |
+
HTTPForbidden,
|
| 52 |
+
HTTPMethodNotAllowed,
|
| 53 |
+
HTTPNotFound,
|
| 54 |
+
)
|
| 55 |
+
from .web_fileresponse import FileResponse
|
| 56 |
+
from .web_request import Request
|
| 57 |
+
from .web_response import Response, StreamResponse
|
| 58 |
+
from .web_routedef import AbstractRouteDef
|
| 59 |
+
|
| 60 |
+
__all__ = (
|
| 61 |
+
"UrlDispatcher",
|
| 62 |
+
"UrlMappingMatchInfo",
|
| 63 |
+
"AbstractResource",
|
| 64 |
+
"Resource",
|
| 65 |
+
"PlainResource",
|
| 66 |
+
"DynamicResource",
|
| 67 |
+
"AbstractRoute",
|
| 68 |
+
"ResourceRoute",
|
| 69 |
+
"StaticResource",
|
| 70 |
+
"View",
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
if TYPE_CHECKING:
|
| 75 |
+
from .web_app import Application
|
| 76 |
+
|
| 77 |
+
BaseDict = Dict[str, str]
|
| 78 |
+
else:
|
| 79 |
+
BaseDict = dict
|
| 80 |
+
|
| 81 |
+
CIRCULAR_SYMLINK_ERROR = (
|
| 82 |
+
(OSError,)
|
| 83 |
+
if sys.version_info < (3, 10) and sys.platform.startswith("win32")
|
| 84 |
+
else (RuntimeError,) if sys.version_info < (3, 13) else ()
|
| 85 |
+
)
|
| 86 |
+
|
| 87 |
+
YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
|
| 88 |
+
|
| 89 |
+
HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
|
| 90 |
+
r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
|
| 91 |
+
)
|
| 92 |
+
ROUTE_RE: Final[Pattern[str]] = re.compile(
|
| 93 |
+
r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
|
| 94 |
+
)
|
| 95 |
+
PATH_SEP: Final[str] = re.escape("/")
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]]
|
| 99 |
+
_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
|
| 100 |
+
|
| 101 |
+
html_escape = functools.partial(html.escape, quote=True)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class _InfoDict(TypedDict, total=False):
|
| 105 |
+
path: str
|
| 106 |
+
|
| 107 |
+
formatter: str
|
| 108 |
+
pattern: Pattern[str]
|
| 109 |
+
|
| 110 |
+
directory: Path
|
| 111 |
+
prefix: str
|
| 112 |
+
routes: Mapping[str, "AbstractRoute"]
|
| 113 |
+
|
| 114 |
+
app: "Application"
|
| 115 |
+
|
| 116 |
+
domain: str
|
| 117 |
+
|
| 118 |
+
rule: "AbstractRuleMatching"
|
| 119 |
+
|
| 120 |
+
http_exception: HTTPException
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class AbstractResource(Sized, Iterable["AbstractRoute"]):
|
| 124 |
+
def __init__(self, *, name: Optional[str] = None) -> None:
|
| 125 |
+
self._name = name
|
| 126 |
+
|
| 127 |
+
@property
|
| 128 |
+
def name(self) -> Optional[str]:
|
| 129 |
+
return self._name
|
| 130 |
+
|
| 131 |
+
@property
|
| 132 |
+
@abc.abstractmethod
|
| 133 |
+
def canonical(self) -> str:
|
| 134 |
+
"""Exposes the resource's canonical path.
|
| 135 |
+
|
| 136 |
+
For example '/foo/bar/{name}'
|
| 137 |
+
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
@abc.abstractmethod # pragma: no branch
|
| 141 |
+
def url_for(self, **kwargs: str) -> URL:
|
| 142 |
+
"""Construct url for resource with additional params."""
|
| 143 |
+
|
| 144 |
+
@abc.abstractmethod # pragma: no branch
|
| 145 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 146 |
+
"""Resolve resource.
|
| 147 |
+
|
| 148 |
+
Return (UrlMappingMatchInfo, allowed_methods) pair.
|
| 149 |
+
"""
|
| 150 |
+
|
| 151 |
+
@abc.abstractmethod
|
| 152 |
+
def add_prefix(self, prefix: str) -> None:
|
| 153 |
+
"""Add a prefix to processed URLs.
|
| 154 |
+
|
| 155 |
+
Required for subapplications support.
|
| 156 |
+
"""
|
| 157 |
+
|
| 158 |
+
@abc.abstractmethod
|
| 159 |
+
def get_info(self) -> _InfoDict:
|
| 160 |
+
"""Return a dict with additional info useful for introspection"""
|
| 161 |
+
|
| 162 |
+
def freeze(self) -> None:
|
| 163 |
+
pass
|
| 164 |
+
|
| 165 |
+
@abc.abstractmethod
|
| 166 |
+
def raw_match(self, path: str) -> bool:
|
| 167 |
+
"""Perform a raw match against path"""
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
class AbstractRoute(abc.ABC):
|
| 171 |
+
def __init__(
|
| 172 |
+
self,
|
| 173 |
+
method: str,
|
| 174 |
+
handler: Union[Handler, Type[AbstractView]],
|
| 175 |
+
*,
|
| 176 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 177 |
+
resource: Optional[AbstractResource] = None,
|
| 178 |
+
) -> None:
|
| 179 |
+
|
| 180 |
+
if expect_handler is None:
|
| 181 |
+
expect_handler = _default_expect_handler
|
| 182 |
+
|
| 183 |
+
assert asyncio.iscoroutinefunction(
|
| 184 |
+
expect_handler
|
| 185 |
+
), f"Coroutine is expected, got {expect_handler!r}"
|
| 186 |
+
|
| 187 |
+
method = method.upper()
|
| 188 |
+
if not HTTP_METHOD_RE.match(method):
|
| 189 |
+
raise ValueError(f"{method} is not allowed HTTP method")
|
| 190 |
+
|
| 191 |
+
assert callable(handler), handler
|
| 192 |
+
if asyncio.iscoroutinefunction(handler):
|
| 193 |
+
pass
|
| 194 |
+
elif inspect.isgeneratorfunction(handler):
|
| 195 |
+
warnings.warn(
|
| 196 |
+
"Bare generators are deprecated, " "use @coroutine wrapper",
|
| 197 |
+
DeprecationWarning,
|
| 198 |
+
)
|
| 199 |
+
elif isinstance(handler, type) and issubclass(handler, AbstractView):
|
| 200 |
+
pass
|
| 201 |
+
else:
|
| 202 |
+
warnings.warn(
|
| 203 |
+
"Bare functions are deprecated, " "use async ones", DeprecationWarning
|
| 204 |
+
)
|
| 205 |
+
|
| 206 |
+
@wraps(handler)
|
| 207 |
+
async def handler_wrapper(request: Request) -> StreamResponse:
|
| 208 |
+
result = old_handler(request) # type: ignore[call-arg]
|
| 209 |
+
if asyncio.iscoroutine(result):
|
| 210 |
+
result = await result
|
| 211 |
+
assert isinstance(result, StreamResponse)
|
| 212 |
+
return result
|
| 213 |
+
|
| 214 |
+
old_handler = handler
|
| 215 |
+
handler = handler_wrapper
|
| 216 |
+
|
| 217 |
+
self._method = method
|
| 218 |
+
self._handler = handler
|
| 219 |
+
self._expect_handler = expect_handler
|
| 220 |
+
self._resource = resource
|
| 221 |
+
|
| 222 |
+
@property
|
| 223 |
+
def method(self) -> str:
|
| 224 |
+
return self._method
|
| 225 |
+
|
| 226 |
+
@property
|
| 227 |
+
def handler(self) -> Handler:
|
| 228 |
+
return self._handler
|
| 229 |
+
|
| 230 |
+
@property
|
| 231 |
+
@abc.abstractmethod
|
| 232 |
+
def name(self) -> Optional[str]:
|
| 233 |
+
"""Optional route's name, always equals to resource's name."""
|
| 234 |
+
|
| 235 |
+
@property
|
| 236 |
+
def resource(self) -> Optional[AbstractResource]:
|
| 237 |
+
return self._resource
|
| 238 |
+
|
| 239 |
+
@abc.abstractmethod
|
| 240 |
+
def get_info(self) -> _InfoDict:
|
| 241 |
+
"""Return a dict with additional info useful for introspection"""
|
| 242 |
+
|
| 243 |
+
@abc.abstractmethod # pragma: no branch
|
| 244 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 245 |
+
"""Construct url for route with additional params."""
|
| 246 |
+
|
| 247 |
+
async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]:
|
| 248 |
+
return await self._expect_handler(request)
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
|
| 252 |
+
def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
|
| 253 |
+
super().__init__(match_dict)
|
| 254 |
+
self._route = route
|
| 255 |
+
self._apps: List[Application] = []
|
| 256 |
+
self._current_app: Optional[Application] = None
|
| 257 |
+
self._frozen = False
|
| 258 |
+
|
| 259 |
+
@property
|
| 260 |
+
def handler(self) -> Handler:
|
| 261 |
+
return self._route.handler
|
| 262 |
+
|
| 263 |
+
@property
|
| 264 |
+
def route(self) -> AbstractRoute:
|
| 265 |
+
return self._route
|
| 266 |
+
|
| 267 |
+
@property
|
| 268 |
+
def expect_handler(self) -> _ExpectHandler:
|
| 269 |
+
return self._route.handle_expect_header
|
| 270 |
+
|
| 271 |
+
@property
|
| 272 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 273 |
+
return None
|
| 274 |
+
|
| 275 |
+
def get_info(self) -> _InfoDict: # type: ignore[override]
|
| 276 |
+
return self._route.get_info()
|
| 277 |
+
|
| 278 |
+
@property
|
| 279 |
+
def apps(self) -> Tuple["Application", ...]:
|
| 280 |
+
return tuple(self._apps)
|
| 281 |
+
|
| 282 |
+
def add_app(self, app: "Application") -> None:
|
| 283 |
+
if self._frozen:
|
| 284 |
+
raise RuntimeError("Cannot change apps stack after .freeze() call")
|
| 285 |
+
if self._current_app is None:
|
| 286 |
+
self._current_app = app
|
| 287 |
+
self._apps.insert(0, app)
|
| 288 |
+
|
| 289 |
+
@property
|
| 290 |
+
def current_app(self) -> "Application":
|
| 291 |
+
app = self._current_app
|
| 292 |
+
assert app is not None
|
| 293 |
+
return app
|
| 294 |
+
|
| 295 |
+
@current_app.setter
|
| 296 |
+
def current_app(self, app: "Application") -> None:
|
| 297 |
+
if DEBUG: # pragma: no cover
|
| 298 |
+
if app not in self._apps:
|
| 299 |
+
raise RuntimeError(
|
| 300 |
+
"Expected one of the following apps {!r}, got {!r}".format(
|
| 301 |
+
self._apps, app
|
| 302 |
+
)
|
| 303 |
+
)
|
| 304 |
+
self._current_app = app
|
| 305 |
+
|
| 306 |
+
def freeze(self) -> None:
|
| 307 |
+
self._frozen = True
|
| 308 |
+
|
| 309 |
+
def __repr__(self) -> str:
|
| 310 |
+
return f"<MatchInfo {super().__repr__()}: {self._route}>"
|
| 311 |
+
|
| 312 |
+
|
| 313 |
+
class MatchInfoError(UrlMappingMatchInfo):
|
| 314 |
+
def __init__(self, http_exception: HTTPException) -> None:
|
| 315 |
+
self._exception = http_exception
|
| 316 |
+
super().__init__({}, SystemRoute(self._exception))
|
| 317 |
+
|
| 318 |
+
@property
|
| 319 |
+
def http_exception(self) -> HTTPException:
|
| 320 |
+
return self._exception
|
| 321 |
+
|
| 322 |
+
def __repr__(self) -> str:
|
| 323 |
+
return "<MatchInfoError {}: {}>".format(
|
| 324 |
+
self._exception.status, self._exception.reason
|
| 325 |
+
)
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
async def _default_expect_handler(request: Request) -> None:
|
| 329 |
+
"""Default handler for Expect header.
|
| 330 |
+
|
| 331 |
+
Just send "100 Continue" to client.
|
| 332 |
+
raise HTTPExpectationFailed if value of header is not "100-continue"
|
| 333 |
+
"""
|
| 334 |
+
expect = request.headers.get(hdrs.EXPECT, "")
|
| 335 |
+
if request.version == HttpVersion11:
|
| 336 |
+
if expect.lower() == "100-continue":
|
| 337 |
+
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
|
| 338 |
+
# Reset output_size as we haven't started the main body yet.
|
| 339 |
+
request.writer.output_size = 0
|
| 340 |
+
else:
|
| 341 |
+
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
class Resource(AbstractResource):
|
| 345 |
+
def __init__(self, *, name: Optional[str] = None) -> None:
|
| 346 |
+
super().__init__(name=name)
|
| 347 |
+
self._routes: List[ResourceRoute] = []
|
| 348 |
+
|
| 349 |
+
def add_route(
|
| 350 |
+
self,
|
| 351 |
+
method: str,
|
| 352 |
+
handler: Union[Type[AbstractView], Handler],
|
| 353 |
+
*,
|
| 354 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 355 |
+
) -> "ResourceRoute":
|
| 356 |
+
|
| 357 |
+
for route_obj in self._routes:
|
| 358 |
+
if route_obj.method == method or route_obj.method == hdrs.METH_ANY:
|
| 359 |
+
raise RuntimeError(
|
| 360 |
+
"Added route will never be executed, "
|
| 361 |
+
"method {route.method} is already "
|
| 362 |
+
"registered".format(route=route_obj)
|
| 363 |
+
)
|
| 364 |
+
|
| 365 |
+
route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
|
| 366 |
+
self.register_route(route_obj)
|
| 367 |
+
return route_obj
|
| 368 |
+
|
| 369 |
+
def register_route(self, route: "ResourceRoute") -> None:
|
| 370 |
+
assert isinstance(
|
| 371 |
+
route, ResourceRoute
|
| 372 |
+
), f"Instance of Route class is required, got {route!r}"
|
| 373 |
+
self._routes.append(route)
|
| 374 |
+
|
| 375 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 376 |
+
allowed_methods: Set[str] = set()
|
| 377 |
+
|
| 378 |
+
match_dict = self._match(request.rel_url.path_safe)
|
| 379 |
+
if match_dict is None:
|
| 380 |
+
return None, allowed_methods
|
| 381 |
+
|
| 382 |
+
for route_obj in self._routes:
|
| 383 |
+
route_method = route_obj.method
|
| 384 |
+
allowed_methods.add(route_method)
|
| 385 |
+
|
| 386 |
+
if route_method == request.method or route_method == hdrs.METH_ANY:
|
| 387 |
+
return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods)
|
| 388 |
+
else:
|
| 389 |
+
return None, allowed_methods
|
| 390 |
+
|
| 391 |
+
@abc.abstractmethod
|
| 392 |
+
def _match(self, path: str) -> Optional[Dict[str, str]]:
|
| 393 |
+
pass # pragma: no cover
|
| 394 |
+
|
| 395 |
+
def __len__(self) -> int:
|
| 396 |
+
return len(self._routes)
|
| 397 |
+
|
| 398 |
+
def __iter__(self) -> Iterator["ResourceRoute"]:
|
| 399 |
+
return iter(self._routes)
|
| 400 |
+
|
| 401 |
+
# TODO: implement all abstract methods
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
class PlainResource(Resource):
|
| 405 |
+
def __init__(self, path: str, *, name: Optional[str] = None) -> None:
|
| 406 |
+
super().__init__(name=name)
|
| 407 |
+
assert not path or path.startswith("/")
|
| 408 |
+
self._path = path
|
| 409 |
+
|
| 410 |
+
@property
|
| 411 |
+
def canonical(self) -> str:
|
| 412 |
+
return self._path
|
| 413 |
+
|
| 414 |
+
def freeze(self) -> None:
|
| 415 |
+
if not self._path:
|
| 416 |
+
self._path = "/"
|
| 417 |
+
|
| 418 |
+
def add_prefix(self, prefix: str) -> None:
|
| 419 |
+
assert prefix.startswith("/")
|
| 420 |
+
assert not prefix.endswith("/")
|
| 421 |
+
assert len(prefix) > 1
|
| 422 |
+
self._path = prefix + self._path
|
| 423 |
+
|
| 424 |
+
def _match(self, path: str) -> Optional[Dict[str, str]]:
|
| 425 |
+
# string comparison is about 10 times faster than regexp matching
|
| 426 |
+
if self._path == path:
|
| 427 |
+
return {}
|
| 428 |
+
return None
|
| 429 |
+
|
| 430 |
+
def raw_match(self, path: str) -> bool:
|
| 431 |
+
return self._path == path
|
| 432 |
+
|
| 433 |
+
def get_info(self) -> _InfoDict:
|
| 434 |
+
return {"path": self._path}
|
| 435 |
+
|
| 436 |
+
def url_for(self) -> URL: # type: ignore[override]
|
| 437 |
+
return URL.build(path=self._path, encoded=True)
|
| 438 |
+
|
| 439 |
+
def __repr__(self) -> str:
|
| 440 |
+
name = "'" + self.name + "' " if self.name is not None else ""
|
| 441 |
+
return f"<PlainResource {name} {self._path}>"
|
| 442 |
+
|
| 443 |
+
|
| 444 |
+
class DynamicResource(Resource):
|
| 445 |
+
|
| 446 |
+
DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
|
| 447 |
+
DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
|
| 448 |
+
GOOD = r"[^{}/]+"
|
| 449 |
+
|
| 450 |
+
def __init__(self, path: str, *, name: Optional[str] = None) -> None:
|
| 451 |
+
super().__init__(name=name)
|
| 452 |
+
self._orig_path = path
|
| 453 |
+
pattern = ""
|
| 454 |
+
formatter = ""
|
| 455 |
+
for part in ROUTE_RE.split(path):
|
| 456 |
+
match = self.DYN.fullmatch(part)
|
| 457 |
+
if match:
|
| 458 |
+
pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
|
| 459 |
+
formatter += "{" + match.group("var") + "}"
|
| 460 |
+
continue
|
| 461 |
+
|
| 462 |
+
match = self.DYN_WITH_RE.fullmatch(part)
|
| 463 |
+
if match:
|
| 464 |
+
pattern += "(?P<{var}>{re})".format(**match.groupdict())
|
| 465 |
+
formatter += "{" + match.group("var") + "}"
|
| 466 |
+
continue
|
| 467 |
+
|
| 468 |
+
if "{" in part or "}" in part:
|
| 469 |
+
raise ValueError(f"Invalid path '{path}'['{part}']")
|
| 470 |
+
|
| 471 |
+
part = _requote_path(part)
|
| 472 |
+
formatter += part
|
| 473 |
+
pattern += re.escape(part)
|
| 474 |
+
|
| 475 |
+
try:
|
| 476 |
+
compiled = re.compile(pattern)
|
| 477 |
+
except re.error as exc:
|
| 478 |
+
raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
|
| 479 |
+
assert compiled.pattern.startswith(PATH_SEP)
|
| 480 |
+
assert formatter.startswith("/")
|
| 481 |
+
self._pattern = compiled
|
| 482 |
+
self._formatter = formatter
|
| 483 |
+
|
| 484 |
+
@property
|
| 485 |
+
def canonical(self) -> str:
|
| 486 |
+
return self._formatter
|
| 487 |
+
|
| 488 |
+
def add_prefix(self, prefix: str) -> None:
|
| 489 |
+
assert prefix.startswith("/")
|
| 490 |
+
assert not prefix.endswith("/")
|
| 491 |
+
assert len(prefix) > 1
|
| 492 |
+
self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
|
| 493 |
+
self._formatter = prefix + self._formatter
|
| 494 |
+
|
| 495 |
+
def _match(self, path: str) -> Optional[Dict[str, str]]:
|
| 496 |
+
match = self._pattern.fullmatch(path)
|
| 497 |
+
if match is None:
|
| 498 |
+
return None
|
| 499 |
+
return {
|
| 500 |
+
key: _unquote_path_safe(value) for key, value in match.groupdict().items()
|
| 501 |
+
}
|
| 502 |
+
|
| 503 |
+
def raw_match(self, path: str) -> bool:
|
| 504 |
+
return self._orig_path == path
|
| 505 |
+
|
| 506 |
+
def get_info(self) -> _InfoDict:
|
| 507 |
+
return {"formatter": self._formatter, "pattern": self._pattern}
|
| 508 |
+
|
| 509 |
+
def url_for(self, **parts: str) -> URL:
|
| 510 |
+
url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
|
| 511 |
+
return URL.build(path=url, encoded=True)
|
| 512 |
+
|
| 513 |
+
def __repr__(self) -> str:
|
| 514 |
+
name = "'" + self.name + "' " if self.name is not None else ""
|
| 515 |
+
return "<DynamicResource {name} {formatter}>".format(
|
| 516 |
+
name=name, formatter=self._formatter
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
|
| 520 |
+
class PrefixResource(AbstractResource):
|
| 521 |
+
def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
|
| 522 |
+
assert not prefix or prefix.startswith("/"), prefix
|
| 523 |
+
assert prefix in ("", "/") or not prefix.endswith("/"), prefix
|
| 524 |
+
super().__init__(name=name)
|
| 525 |
+
self._prefix = _requote_path(prefix)
|
| 526 |
+
self._prefix2 = self._prefix + "/"
|
| 527 |
+
|
| 528 |
+
@property
|
| 529 |
+
def canonical(self) -> str:
|
| 530 |
+
return self._prefix
|
| 531 |
+
|
| 532 |
+
def add_prefix(self, prefix: str) -> None:
|
| 533 |
+
assert prefix.startswith("/")
|
| 534 |
+
assert not prefix.endswith("/")
|
| 535 |
+
assert len(prefix) > 1
|
| 536 |
+
self._prefix = prefix + self._prefix
|
| 537 |
+
self._prefix2 = self._prefix + "/"
|
| 538 |
+
|
| 539 |
+
def raw_match(self, prefix: str) -> bool:
|
| 540 |
+
return False
|
| 541 |
+
|
| 542 |
+
# TODO: impl missing abstract methods
|
| 543 |
+
|
| 544 |
+
|
| 545 |
+
class StaticResource(PrefixResource):
|
| 546 |
+
VERSION_KEY = "v"
|
| 547 |
+
|
| 548 |
+
def __init__(
|
| 549 |
+
self,
|
| 550 |
+
prefix: str,
|
| 551 |
+
directory: PathLike,
|
| 552 |
+
*,
|
| 553 |
+
name: Optional[str] = None,
|
| 554 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 555 |
+
chunk_size: int = 256 * 1024,
|
| 556 |
+
show_index: bool = False,
|
| 557 |
+
follow_symlinks: bool = False,
|
| 558 |
+
append_version: bool = False,
|
| 559 |
+
) -> None:
|
| 560 |
+
super().__init__(prefix, name=name)
|
| 561 |
+
try:
|
| 562 |
+
directory = Path(directory).expanduser().resolve(strict=True)
|
| 563 |
+
except FileNotFoundError as error:
|
| 564 |
+
raise ValueError(f"'{directory}' does not exist") from error
|
| 565 |
+
if not directory.is_dir():
|
| 566 |
+
raise ValueError(f"'{directory}' is not a directory")
|
| 567 |
+
self._directory = directory
|
| 568 |
+
self._show_index = show_index
|
| 569 |
+
self._chunk_size = chunk_size
|
| 570 |
+
self._follow_symlinks = follow_symlinks
|
| 571 |
+
self._expect_handler = expect_handler
|
| 572 |
+
self._append_version = append_version
|
| 573 |
+
|
| 574 |
+
self._routes = {
|
| 575 |
+
"GET": ResourceRoute(
|
| 576 |
+
"GET", self._handle, self, expect_handler=expect_handler
|
| 577 |
+
),
|
| 578 |
+
"HEAD": ResourceRoute(
|
| 579 |
+
"HEAD", self._handle, self, expect_handler=expect_handler
|
| 580 |
+
),
|
| 581 |
+
}
|
| 582 |
+
|
| 583 |
+
def url_for( # type: ignore[override]
|
| 584 |
+
self,
|
| 585 |
+
*,
|
| 586 |
+
filename: PathLike,
|
| 587 |
+
append_version: Optional[bool] = None,
|
| 588 |
+
) -> URL:
|
| 589 |
+
if append_version is None:
|
| 590 |
+
append_version = self._append_version
|
| 591 |
+
filename = str(filename).lstrip("/")
|
| 592 |
+
|
| 593 |
+
url = URL.build(path=self._prefix, encoded=True)
|
| 594 |
+
# filename is not encoded
|
| 595 |
+
if YARL_VERSION < (1, 6):
|
| 596 |
+
url = url / filename.replace("%", "%25")
|
| 597 |
+
else:
|
| 598 |
+
url = url / filename
|
| 599 |
+
|
| 600 |
+
if append_version:
|
| 601 |
+
unresolved_path = self._directory.joinpath(filename)
|
| 602 |
+
try:
|
| 603 |
+
if self._follow_symlinks:
|
| 604 |
+
normalized_path = Path(os.path.normpath(unresolved_path))
|
| 605 |
+
normalized_path.relative_to(self._directory)
|
| 606 |
+
filepath = normalized_path.resolve()
|
| 607 |
+
else:
|
| 608 |
+
filepath = unresolved_path.resolve()
|
| 609 |
+
filepath.relative_to(self._directory)
|
| 610 |
+
except (ValueError, FileNotFoundError):
|
| 611 |
+
# ValueError for case when path point to symlink
|
| 612 |
+
# with follow_symlinks is False
|
| 613 |
+
return url # relatively safe
|
| 614 |
+
if filepath.is_file():
|
| 615 |
+
# TODO cache file content
|
| 616 |
+
# with file watcher for cache invalidation
|
| 617 |
+
with filepath.open("rb") as f:
|
| 618 |
+
file_bytes = f.read()
|
| 619 |
+
h = self._get_file_hash(file_bytes)
|
| 620 |
+
url = url.with_query({self.VERSION_KEY: h})
|
| 621 |
+
return url
|
| 622 |
+
return url
|
| 623 |
+
|
| 624 |
+
@staticmethod
|
| 625 |
+
def _get_file_hash(byte_array: bytes) -> str:
|
| 626 |
+
m = hashlib.sha256() # todo sha256 can be configurable param
|
| 627 |
+
m.update(byte_array)
|
| 628 |
+
b64 = base64.urlsafe_b64encode(m.digest())
|
| 629 |
+
return b64.decode("ascii")
|
| 630 |
+
|
| 631 |
+
def get_info(self) -> _InfoDict:
|
| 632 |
+
return {
|
| 633 |
+
"directory": self._directory,
|
| 634 |
+
"prefix": self._prefix,
|
| 635 |
+
"routes": self._routes,
|
| 636 |
+
}
|
| 637 |
+
|
| 638 |
+
def set_options_route(self, handler: Handler) -> None:
|
| 639 |
+
if "OPTIONS" in self._routes:
|
| 640 |
+
raise RuntimeError("OPTIONS route was set already")
|
| 641 |
+
self._routes["OPTIONS"] = ResourceRoute(
|
| 642 |
+
"OPTIONS", handler, self, expect_handler=self._expect_handler
|
| 643 |
+
)
|
| 644 |
+
|
| 645 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 646 |
+
path = request.rel_url.path_safe
|
| 647 |
+
method = request.method
|
| 648 |
+
allowed_methods = set(self._routes)
|
| 649 |
+
if not path.startswith(self._prefix2) and path != self._prefix:
|
| 650 |
+
return None, set()
|
| 651 |
+
|
| 652 |
+
if method not in allowed_methods:
|
| 653 |
+
return None, allowed_methods
|
| 654 |
+
|
| 655 |
+
match_dict = {"filename": _unquote_path_safe(path[len(self._prefix) + 1 :])}
|
| 656 |
+
return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
|
| 657 |
+
|
| 658 |
+
def __len__(self) -> int:
|
| 659 |
+
return len(self._routes)
|
| 660 |
+
|
| 661 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 662 |
+
return iter(self._routes.values())
|
| 663 |
+
|
| 664 |
+
async def _handle(self, request: Request) -> StreamResponse:
|
| 665 |
+
rel_url = request.match_info["filename"]
|
| 666 |
+
filename = Path(rel_url)
|
| 667 |
+
if filename.anchor:
|
| 668 |
+
# rel_url is an absolute name like
|
| 669 |
+
# /static/\\machine_name\c$ or /static/D:\path
|
| 670 |
+
# where the static dir is totally different
|
| 671 |
+
raise HTTPForbidden()
|
| 672 |
+
|
| 673 |
+
unresolved_path = self._directory.joinpath(filename)
|
| 674 |
+
loop = asyncio.get_running_loop()
|
| 675 |
+
return await loop.run_in_executor(
|
| 676 |
+
None, self._resolve_path_to_response, unresolved_path
|
| 677 |
+
)
|
| 678 |
+
|
| 679 |
+
def _resolve_path_to_response(self, unresolved_path: Path) -> StreamResponse:
|
| 680 |
+
"""Take the unresolved path and query the file system to form a response."""
|
| 681 |
+
# Check for access outside the root directory. For follow symlinks, URI
|
| 682 |
+
# cannot traverse out, but symlinks can. Otherwise, no access outside
|
| 683 |
+
# root is permitted.
|
| 684 |
+
try:
|
| 685 |
+
if self._follow_symlinks:
|
| 686 |
+
normalized_path = Path(os.path.normpath(unresolved_path))
|
| 687 |
+
normalized_path.relative_to(self._directory)
|
| 688 |
+
file_path = normalized_path.resolve()
|
| 689 |
+
else:
|
| 690 |
+
file_path = unresolved_path.resolve()
|
| 691 |
+
file_path.relative_to(self._directory)
|
| 692 |
+
except (ValueError, *CIRCULAR_SYMLINK_ERROR) as error:
|
| 693 |
+
# ValueError is raised for the relative check. Circular symlinks
|
| 694 |
+
# raise here on resolving for python < 3.13.
|
| 695 |
+
raise HTTPNotFound() from error
|
| 696 |
+
|
| 697 |
+
# if path is a directory, return the contents if permitted. Note the
|
| 698 |
+
# directory check will raise if a segment is not readable.
|
| 699 |
+
try:
|
| 700 |
+
if file_path.is_dir():
|
| 701 |
+
if self._show_index:
|
| 702 |
+
return Response(
|
| 703 |
+
text=self._directory_as_html(file_path),
|
| 704 |
+
content_type="text/html",
|
| 705 |
+
)
|
| 706 |
+
else:
|
| 707 |
+
raise HTTPForbidden()
|
| 708 |
+
except PermissionError as error:
|
| 709 |
+
raise HTTPForbidden() from error
|
| 710 |
+
|
| 711 |
+
# Return the file response, which handles all other checks.
|
| 712 |
+
return FileResponse(file_path, chunk_size=self._chunk_size)
|
| 713 |
+
|
| 714 |
+
def _directory_as_html(self, dir_path: Path) -> str:
|
| 715 |
+
"""returns directory's index as html."""
|
| 716 |
+
assert dir_path.is_dir()
|
| 717 |
+
|
| 718 |
+
relative_path_to_dir = dir_path.relative_to(self._directory).as_posix()
|
| 719 |
+
index_of = f"Index of /{html_escape(relative_path_to_dir)}"
|
| 720 |
+
h1 = f"<h1>{index_of}</h1>"
|
| 721 |
+
|
| 722 |
+
index_list = []
|
| 723 |
+
dir_index = dir_path.iterdir()
|
| 724 |
+
for _file in sorted(dir_index):
|
| 725 |
+
# show file url as relative to static path
|
| 726 |
+
rel_path = _file.relative_to(self._directory).as_posix()
|
| 727 |
+
quoted_file_url = _quote_path(f"{self._prefix}/{rel_path}")
|
| 728 |
+
|
| 729 |
+
# if file is a directory, add '/' to the end of the name
|
| 730 |
+
if _file.is_dir():
|
| 731 |
+
file_name = f"{_file.name}/"
|
| 732 |
+
else:
|
| 733 |
+
file_name = _file.name
|
| 734 |
+
|
| 735 |
+
index_list.append(
|
| 736 |
+
f'<li><a href="{quoted_file_url}">{html_escape(file_name)}</a></li>'
|
| 737 |
+
)
|
| 738 |
+
ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
|
| 739 |
+
body = f"<body>\n{h1}\n{ul}\n</body>"
|
| 740 |
+
|
| 741 |
+
head_str = f"<head>\n<title>{index_of}</title>\n</head>"
|
| 742 |
+
html = f"<html>\n{head_str}\n{body}\n</html>"
|
| 743 |
+
|
| 744 |
+
return html
|
| 745 |
+
|
| 746 |
+
def __repr__(self) -> str:
|
| 747 |
+
name = "'" + self.name + "'" if self.name is not None else ""
|
| 748 |
+
return "<StaticResource {name} {path} -> {directory!r}>".format(
|
| 749 |
+
name=name, path=self._prefix, directory=self._directory
|
| 750 |
+
)
|
| 751 |
+
|
| 752 |
+
|
| 753 |
+
class PrefixedSubAppResource(PrefixResource):
|
| 754 |
+
def __init__(self, prefix: str, app: "Application") -> None:
|
| 755 |
+
super().__init__(prefix)
|
| 756 |
+
self._app = app
|
| 757 |
+
self._add_prefix_to_resources(prefix)
|
| 758 |
+
|
| 759 |
+
def add_prefix(self, prefix: str) -> None:
|
| 760 |
+
super().add_prefix(prefix)
|
| 761 |
+
self._add_prefix_to_resources(prefix)
|
| 762 |
+
|
| 763 |
+
def _add_prefix_to_resources(self, prefix: str) -> None:
|
| 764 |
+
router = self._app.router
|
| 765 |
+
for resource in router.resources():
|
| 766 |
+
# Since the canonical path of a resource is about
|
| 767 |
+
# to change, we need to unindex it and then reindex
|
| 768 |
+
router.unindex_resource(resource)
|
| 769 |
+
resource.add_prefix(prefix)
|
| 770 |
+
router.index_resource(resource)
|
| 771 |
+
|
| 772 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 773 |
+
raise RuntimeError(".url_for() is not supported " "by sub-application root")
|
| 774 |
+
|
| 775 |
+
def get_info(self) -> _InfoDict:
|
| 776 |
+
return {"app": self._app, "prefix": self._prefix}
|
| 777 |
+
|
| 778 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 779 |
+
match_info = await self._app.router.resolve(request)
|
| 780 |
+
match_info.add_app(self._app)
|
| 781 |
+
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
|
| 782 |
+
methods = match_info.http_exception.allowed_methods
|
| 783 |
+
else:
|
| 784 |
+
methods = set()
|
| 785 |
+
return match_info, methods
|
| 786 |
+
|
| 787 |
+
def __len__(self) -> int:
|
| 788 |
+
return len(self._app.router.routes())
|
| 789 |
+
|
| 790 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 791 |
+
return iter(self._app.router.routes())
|
| 792 |
+
|
| 793 |
+
def __repr__(self) -> str:
|
| 794 |
+
return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
|
| 795 |
+
prefix=self._prefix, app=self._app
|
| 796 |
+
)
|
| 797 |
+
|
| 798 |
+
|
| 799 |
+
class AbstractRuleMatching(abc.ABC):
|
| 800 |
+
@abc.abstractmethod # pragma: no branch
|
| 801 |
+
async def match(self, request: Request) -> bool:
|
| 802 |
+
"""Return bool if the request satisfies the criteria"""
|
| 803 |
+
|
| 804 |
+
@abc.abstractmethod # pragma: no branch
|
| 805 |
+
def get_info(self) -> _InfoDict:
|
| 806 |
+
"""Return a dict with additional info useful for introspection"""
|
| 807 |
+
|
| 808 |
+
@property
|
| 809 |
+
@abc.abstractmethod # pragma: no branch
|
| 810 |
+
def canonical(self) -> str:
|
| 811 |
+
"""Return a str"""
|
| 812 |
+
|
| 813 |
+
|
| 814 |
+
class Domain(AbstractRuleMatching):
|
| 815 |
+
re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
|
| 816 |
+
|
| 817 |
+
def __init__(self, domain: str) -> None:
|
| 818 |
+
super().__init__()
|
| 819 |
+
self._domain = self.validation(domain)
|
| 820 |
+
|
| 821 |
+
@property
|
| 822 |
+
def canonical(self) -> str:
|
| 823 |
+
return self._domain
|
| 824 |
+
|
| 825 |
+
def validation(self, domain: str) -> str:
|
| 826 |
+
if not isinstance(domain, str):
|
| 827 |
+
raise TypeError("Domain must be str")
|
| 828 |
+
domain = domain.rstrip(".").lower()
|
| 829 |
+
if not domain:
|
| 830 |
+
raise ValueError("Domain cannot be empty")
|
| 831 |
+
elif "://" in domain:
|
| 832 |
+
raise ValueError("Scheme not supported")
|
| 833 |
+
url = URL("http://" + domain)
|
| 834 |
+
assert url.raw_host is not None
|
| 835 |
+
if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
|
| 836 |
+
raise ValueError("Domain not valid")
|
| 837 |
+
if url.port == 80:
|
| 838 |
+
return url.raw_host
|
| 839 |
+
return f"{url.raw_host}:{url.port}"
|
| 840 |
+
|
| 841 |
+
async def match(self, request: Request) -> bool:
|
| 842 |
+
host = request.headers.get(hdrs.HOST)
|
| 843 |
+
if not host:
|
| 844 |
+
return False
|
| 845 |
+
return self.match_domain(host)
|
| 846 |
+
|
| 847 |
+
def match_domain(self, host: str) -> bool:
|
| 848 |
+
return host.lower() == self._domain
|
| 849 |
+
|
| 850 |
+
def get_info(self) -> _InfoDict:
|
| 851 |
+
return {"domain": self._domain}
|
| 852 |
+
|
| 853 |
+
|
| 854 |
+
class MaskDomain(Domain):
|
| 855 |
+
re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
|
| 856 |
+
|
| 857 |
+
def __init__(self, domain: str) -> None:
|
| 858 |
+
super().__init__(domain)
|
| 859 |
+
mask = self._domain.replace(".", r"\.").replace("*", ".*")
|
| 860 |
+
self._mask = re.compile(mask)
|
| 861 |
+
|
| 862 |
+
@property
|
| 863 |
+
def canonical(self) -> str:
|
| 864 |
+
return self._mask.pattern
|
| 865 |
+
|
| 866 |
+
def match_domain(self, host: str) -> bool:
|
| 867 |
+
return self._mask.fullmatch(host) is not None
|
| 868 |
+
|
| 869 |
+
|
| 870 |
+
class MatchedSubAppResource(PrefixedSubAppResource):
|
| 871 |
+
def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
|
| 872 |
+
AbstractResource.__init__(self)
|
| 873 |
+
self._prefix = ""
|
| 874 |
+
self._app = app
|
| 875 |
+
self._rule = rule
|
| 876 |
+
|
| 877 |
+
@property
|
| 878 |
+
def canonical(self) -> str:
|
| 879 |
+
return self._rule.canonical
|
| 880 |
+
|
| 881 |
+
def get_info(self) -> _InfoDict:
|
| 882 |
+
return {"app": self._app, "rule": self._rule}
|
| 883 |
+
|
| 884 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 885 |
+
if not await self._rule.match(request):
|
| 886 |
+
return None, set()
|
| 887 |
+
match_info = await self._app.router.resolve(request)
|
| 888 |
+
match_info.add_app(self._app)
|
| 889 |
+
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
|
| 890 |
+
methods = match_info.http_exception.allowed_methods
|
| 891 |
+
else:
|
| 892 |
+
methods = set()
|
| 893 |
+
return match_info, methods
|
| 894 |
+
|
| 895 |
+
def __repr__(self) -> str:
|
| 896 |
+
return "<MatchedSubAppResource -> {app!r}>" "".format(app=self._app)
|
| 897 |
+
|
| 898 |
+
|
| 899 |
+
class ResourceRoute(AbstractRoute):
|
| 900 |
+
"""A route with resource"""
|
| 901 |
+
|
| 902 |
+
def __init__(
|
| 903 |
+
self,
|
| 904 |
+
method: str,
|
| 905 |
+
handler: Union[Handler, Type[AbstractView]],
|
| 906 |
+
resource: AbstractResource,
|
| 907 |
+
*,
|
| 908 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 909 |
+
) -> None:
|
| 910 |
+
super().__init__(
|
| 911 |
+
method, handler, expect_handler=expect_handler, resource=resource
|
| 912 |
+
)
|
| 913 |
+
|
| 914 |
+
def __repr__(self) -> str:
|
| 915 |
+
return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
|
| 916 |
+
method=self.method, resource=self._resource, handler=self.handler
|
| 917 |
+
)
|
| 918 |
+
|
| 919 |
+
@property
|
| 920 |
+
def name(self) -> Optional[str]:
|
| 921 |
+
if self._resource is None:
|
| 922 |
+
return None
|
| 923 |
+
return self._resource.name
|
| 924 |
+
|
| 925 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 926 |
+
"""Construct url for route with additional params."""
|
| 927 |
+
assert self._resource is not None
|
| 928 |
+
return self._resource.url_for(*args, **kwargs)
|
| 929 |
+
|
| 930 |
+
def get_info(self) -> _InfoDict:
|
| 931 |
+
assert self._resource is not None
|
| 932 |
+
return self._resource.get_info()
|
| 933 |
+
|
| 934 |
+
|
| 935 |
+
class SystemRoute(AbstractRoute):
|
| 936 |
+
def __init__(self, http_exception: HTTPException) -> None:
|
| 937 |
+
super().__init__(hdrs.METH_ANY, self._handle)
|
| 938 |
+
self._http_exception = http_exception
|
| 939 |
+
|
| 940 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 941 |
+
raise RuntimeError(".url_for() is not allowed for SystemRoute")
|
| 942 |
+
|
| 943 |
+
@property
|
| 944 |
+
def name(self) -> Optional[str]:
|
| 945 |
+
return None
|
| 946 |
+
|
| 947 |
+
def get_info(self) -> _InfoDict:
|
| 948 |
+
return {"http_exception": self._http_exception}
|
| 949 |
+
|
| 950 |
+
async def _handle(self, request: Request) -> StreamResponse:
|
| 951 |
+
raise self._http_exception
|
| 952 |
+
|
| 953 |
+
@property
|
| 954 |
+
def status(self) -> int:
|
| 955 |
+
return self._http_exception.status
|
| 956 |
+
|
| 957 |
+
@property
|
| 958 |
+
def reason(self) -> str:
|
| 959 |
+
return self._http_exception.reason
|
| 960 |
+
|
| 961 |
+
def __repr__(self) -> str:
|
| 962 |
+
return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
|
| 963 |
+
|
| 964 |
+
|
| 965 |
+
class View(AbstractView):
|
| 966 |
+
async def _iter(self) -> StreamResponse:
|
| 967 |
+
if self.request.method not in hdrs.METH_ALL:
|
| 968 |
+
self._raise_allowed_methods()
|
| 969 |
+
method: Optional[Callable[[], Awaitable[StreamResponse]]]
|
| 970 |
+
method = getattr(self, self.request.method.lower(), None)
|
| 971 |
+
if method is None:
|
| 972 |
+
self._raise_allowed_methods()
|
| 973 |
+
ret = await method()
|
| 974 |
+
assert isinstance(ret, StreamResponse)
|
| 975 |
+
return ret
|
| 976 |
+
|
| 977 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 978 |
+
return self._iter().__await__()
|
| 979 |
+
|
| 980 |
+
def _raise_allowed_methods(self) -> NoReturn:
|
| 981 |
+
allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
|
| 982 |
+
raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
|
| 983 |
+
|
| 984 |
+
|
| 985 |
+
class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
|
| 986 |
+
def __init__(self, resources: List[AbstractResource]) -> None:
|
| 987 |
+
self._resources = resources
|
| 988 |
+
|
| 989 |
+
def __len__(self) -> int:
|
| 990 |
+
return len(self._resources)
|
| 991 |
+
|
| 992 |
+
def __iter__(self) -> Iterator[AbstractResource]:
|
| 993 |
+
yield from self._resources
|
| 994 |
+
|
| 995 |
+
def __contains__(self, resource: object) -> bool:
|
| 996 |
+
return resource in self._resources
|
| 997 |
+
|
| 998 |
+
|
| 999 |
+
class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
|
| 1000 |
+
def __init__(self, resources: List[AbstractResource]):
|
| 1001 |
+
self._routes: List[AbstractRoute] = []
|
| 1002 |
+
for resource in resources:
|
| 1003 |
+
for route in resource:
|
| 1004 |
+
self._routes.append(route)
|
| 1005 |
+
|
| 1006 |
+
def __len__(self) -> int:
|
| 1007 |
+
return len(self._routes)
|
| 1008 |
+
|
| 1009 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 1010 |
+
yield from self._routes
|
| 1011 |
+
|
| 1012 |
+
def __contains__(self, route: object) -> bool:
|
| 1013 |
+
return route in self._routes
|
| 1014 |
+
|
| 1015 |
+
|
| 1016 |
+
class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
| 1017 |
+
|
| 1018 |
+
NAME_SPLIT_RE = re.compile(r"[.:-]")
|
| 1019 |
+
|
| 1020 |
+
def __init__(self) -> None:
|
| 1021 |
+
super().__init__()
|
| 1022 |
+
self._resources: List[AbstractResource] = []
|
| 1023 |
+
self._named_resources: Dict[str, AbstractResource] = {}
|
| 1024 |
+
self._resource_index: dict[str, list[AbstractResource]] = {}
|
| 1025 |
+
self._matched_sub_app_resources: List[MatchedSubAppResource] = []
|
| 1026 |
+
|
| 1027 |
+
async def resolve(self, request: Request) -> UrlMappingMatchInfo:
|
| 1028 |
+
resource_index = self._resource_index
|
| 1029 |
+
allowed_methods: Set[str] = set()
|
| 1030 |
+
|
| 1031 |
+
# Walk the url parts looking for candidates. We walk the url backwards
|
| 1032 |
+
# to ensure the most explicit match is found first. If there are multiple
|
| 1033 |
+
# candidates for a given url part because there are multiple resources
|
| 1034 |
+
# registered for the same canonical path, we resolve them in a linear
|
| 1035 |
+
# fashion to ensure registration order is respected.
|
| 1036 |
+
url_part = request.rel_url.path_safe
|
| 1037 |
+
while url_part:
|
| 1038 |
+
for candidate in resource_index.get(url_part, ()):
|
| 1039 |
+
match_dict, allowed = await candidate.resolve(request)
|
| 1040 |
+
if match_dict is not None:
|
| 1041 |
+
return match_dict
|
| 1042 |
+
else:
|
| 1043 |
+
allowed_methods |= allowed
|
| 1044 |
+
if url_part == "/":
|
| 1045 |
+
break
|
| 1046 |
+
url_part = url_part.rpartition("/")[0] or "/"
|
| 1047 |
+
|
| 1048 |
+
#
|
| 1049 |
+
# We didn't find any candidates, so we'll try the matched sub-app
|
| 1050 |
+
# resources which we have to walk in a linear fashion because they
|
| 1051 |
+
# have regex/wildcard match rules and we cannot index them.
|
| 1052 |
+
#
|
| 1053 |
+
# For most cases we do not expect there to be many of these since
|
| 1054 |
+
# currently they are only added by `add_domain`
|
| 1055 |
+
#
|
| 1056 |
+
for resource in self._matched_sub_app_resources:
|
| 1057 |
+
match_dict, allowed = await resource.resolve(request)
|
| 1058 |
+
if match_dict is not None:
|
| 1059 |
+
return match_dict
|
| 1060 |
+
else:
|
| 1061 |
+
allowed_methods |= allowed
|
| 1062 |
+
|
| 1063 |
+
if allowed_methods:
|
| 1064 |
+
return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods))
|
| 1065 |
+
|
| 1066 |
+
return MatchInfoError(HTTPNotFound())
|
| 1067 |
+
|
| 1068 |
+
def __iter__(self) -> Iterator[str]:
|
| 1069 |
+
return iter(self._named_resources)
|
| 1070 |
+
|
| 1071 |
+
def __len__(self) -> int:
|
| 1072 |
+
return len(self._named_resources)
|
| 1073 |
+
|
| 1074 |
+
def __contains__(self, resource: object) -> bool:
|
| 1075 |
+
return resource in self._named_resources
|
| 1076 |
+
|
| 1077 |
+
def __getitem__(self, name: str) -> AbstractResource:
|
| 1078 |
+
return self._named_resources[name]
|
| 1079 |
+
|
| 1080 |
+
def resources(self) -> ResourcesView:
|
| 1081 |
+
return ResourcesView(self._resources)
|
| 1082 |
+
|
| 1083 |
+
def routes(self) -> RoutesView:
|
| 1084 |
+
return RoutesView(self._resources)
|
| 1085 |
+
|
| 1086 |
+
def named_resources(self) -> Mapping[str, AbstractResource]:
|
| 1087 |
+
return MappingProxyType(self._named_resources)
|
| 1088 |
+
|
| 1089 |
+
def register_resource(self, resource: AbstractResource) -> None:
|
| 1090 |
+
assert isinstance(
|
| 1091 |
+
resource, AbstractResource
|
| 1092 |
+
), f"Instance of AbstractResource class is required, got {resource!r}"
|
| 1093 |
+
if self.frozen:
|
| 1094 |
+
raise RuntimeError("Cannot register a resource into frozen router.")
|
| 1095 |
+
|
| 1096 |
+
name = resource.name
|
| 1097 |
+
|
| 1098 |
+
if name is not None:
|
| 1099 |
+
parts = self.NAME_SPLIT_RE.split(name)
|
| 1100 |
+
for part in parts:
|
| 1101 |
+
if keyword.iskeyword(part):
|
| 1102 |
+
raise ValueError(
|
| 1103 |
+
f"Incorrect route name {name!r}, "
|
| 1104 |
+
"python keywords cannot be used "
|
| 1105 |
+
"for route name"
|
| 1106 |
+
)
|
| 1107 |
+
if not part.isidentifier():
|
| 1108 |
+
raise ValueError(
|
| 1109 |
+
"Incorrect route name {!r}, "
|
| 1110 |
+
"the name should be a sequence of "
|
| 1111 |
+
"python identifiers separated "
|
| 1112 |
+
"by dash, dot or column".format(name)
|
| 1113 |
+
)
|
| 1114 |
+
if name in self._named_resources:
|
| 1115 |
+
raise ValueError(
|
| 1116 |
+
"Duplicate {!r}, "
|
| 1117 |
+
"already handled by {!r}".format(name, self._named_resources[name])
|
| 1118 |
+
)
|
| 1119 |
+
self._named_resources[name] = resource
|
| 1120 |
+
self._resources.append(resource)
|
| 1121 |
+
|
| 1122 |
+
if isinstance(resource, MatchedSubAppResource):
|
| 1123 |
+
# We cannot index match sub-app resources because they have match rules
|
| 1124 |
+
self._matched_sub_app_resources.append(resource)
|
| 1125 |
+
else:
|
| 1126 |
+
self.index_resource(resource)
|
| 1127 |
+
|
| 1128 |
+
def _get_resource_index_key(self, resource: AbstractResource) -> str:
|
| 1129 |
+
"""Return a key to index the resource in the resource index."""
|
| 1130 |
+
if "{" in (index_key := resource.canonical):
|
| 1131 |
+
# strip at the first { to allow for variables, and than
|
| 1132 |
+
# rpartition at / to allow for variable parts in the path
|
| 1133 |
+
# For example if the canonical path is `/core/locations{tail:.*}`
|
| 1134 |
+
# the index key will be `/core` since index is based on the
|
| 1135 |
+
# url parts split by `/`
|
| 1136 |
+
index_key = index_key.partition("{")[0].rpartition("/")[0]
|
| 1137 |
+
return index_key.rstrip("/") or "/"
|
| 1138 |
+
|
| 1139 |
+
def index_resource(self, resource: AbstractResource) -> None:
|
| 1140 |
+
"""Add a resource to the resource index."""
|
| 1141 |
+
resource_key = self._get_resource_index_key(resource)
|
| 1142 |
+
# There may be multiple resources for a canonical path
|
| 1143 |
+
# so we keep them in a list to ensure that registration
|
| 1144 |
+
# order is respected.
|
| 1145 |
+
self._resource_index.setdefault(resource_key, []).append(resource)
|
| 1146 |
+
|
| 1147 |
+
def unindex_resource(self, resource: AbstractResource) -> None:
|
| 1148 |
+
"""Remove a resource from the resource index."""
|
| 1149 |
+
resource_key = self._get_resource_index_key(resource)
|
| 1150 |
+
self._resource_index[resource_key].remove(resource)
|
| 1151 |
+
|
| 1152 |
+
def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
|
| 1153 |
+
if path and not path.startswith("/"):
|
| 1154 |
+
raise ValueError("path should be started with / or be empty")
|
| 1155 |
+
# Reuse last added resource if path and name are the same
|
| 1156 |
+
if self._resources:
|
| 1157 |
+
resource = self._resources[-1]
|
| 1158 |
+
if resource.name == name and resource.raw_match(path):
|
| 1159 |
+
return cast(Resource, resource)
|
| 1160 |
+
if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
|
| 1161 |
+
resource = PlainResource(path, name=name)
|
| 1162 |
+
self.register_resource(resource)
|
| 1163 |
+
return resource
|
| 1164 |
+
resource = DynamicResource(path, name=name)
|
| 1165 |
+
self.register_resource(resource)
|
| 1166 |
+
return resource
|
| 1167 |
+
|
| 1168 |
+
def add_route(
|
| 1169 |
+
self,
|
| 1170 |
+
method: str,
|
| 1171 |
+
path: str,
|
| 1172 |
+
handler: Union[Handler, Type[AbstractView]],
|
| 1173 |
+
*,
|
| 1174 |
+
name: Optional[str] = None,
|
| 1175 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 1176 |
+
) -> AbstractRoute:
|
| 1177 |
+
resource = self.add_resource(path, name=name)
|
| 1178 |
+
return resource.add_route(method, handler, expect_handler=expect_handler)
|
| 1179 |
+
|
| 1180 |
+
def add_static(
|
| 1181 |
+
self,
|
| 1182 |
+
prefix: str,
|
| 1183 |
+
path: PathLike,
|
| 1184 |
+
*,
|
| 1185 |
+
name: Optional[str] = None,
|
| 1186 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 1187 |
+
chunk_size: int = 256 * 1024,
|
| 1188 |
+
show_index: bool = False,
|
| 1189 |
+
follow_symlinks: bool = False,
|
| 1190 |
+
append_version: bool = False,
|
| 1191 |
+
) -> AbstractResource:
|
| 1192 |
+
"""Add static files view.
|
| 1193 |
+
|
| 1194 |
+
prefix - url prefix
|
| 1195 |
+
path - folder with files
|
| 1196 |
+
|
| 1197 |
+
"""
|
| 1198 |
+
assert prefix.startswith("/")
|
| 1199 |
+
if prefix.endswith("/"):
|
| 1200 |
+
prefix = prefix[:-1]
|
| 1201 |
+
resource = StaticResource(
|
| 1202 |
+
prefix,
|
| 1203 |
+
path,
|
| 1204 |
+
name=name,
|
| 1205 |
+
expect_handler=expect_handler,
|
| 1206 |
+
chunk_size=chunk_size,
|
| 1207 |
+
show_index=show_index,
|
| 1208 |
+
follow_symlinks=follow_symlinks,
|
| 1209 |
+
append_version=append_version,
|
| 1210 |
+
)
|
| 1211 |
+
self.register_resource(resource)
|
| 1212 |
+
return resource
|
| 1213 |
+
|
| 1214 |
+
def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1215 |
+
"""Shortcut for add_route with method HEAD."""
|
| 1216 |
+
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
|
| 1217 |
+
|
| 1218 |
+
def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1219 |
+
"""Shortcut for add_route with method OPTIONS."""
|
| 1220 |
+
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
| 1221 |
+
|
| 1222 |
+
def add_get(
|
| 1223 |
+
self,
|
| 1224 |
+
path: str,
|
| 1225 |
+
handler: Handler,
|
| 1226 |
+
*,
|
| 1227 |
+
name: Optional[str] = None,
|
| 1228 |
+
allow_head: bool = True,
|
| 1229 |
+
**kwargs: Any,
|
| 1230 |
+
) -> AbstractRoute:
|
| 1231 |
+
"""Shortcut for add_route with method GET.
|
| 1232 |
+
|
| 1233 |
+
If allow_head is true, another
|
| 1234 |
+
route is added allowing head requests to the same endpoint.
|
| 1235 |
+
"""
|
| 1236 |
+
resource = self.add_resource(path, name=name)
|
| 1237 |
+
if allow_head:
|
| 1238 |
+
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
|
| 1239 |
+
return resource.add_route(hdrs.METH_GET, handler, **kwargs)
|
| 1240 |
+
|
| 1241 |
+
def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1242 |
+
"""Shortcut for add_route with method POST."""
|
| 1243 |
+
return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
|
| 1244 |
+
|
| 1245 |
+
def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1246 |
+
"""Shortcut for add_route with method PUT."""
|
| 1247 |
+
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
|
| 1248 |
+
|
| 1249 |
+
def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1250 |
+
"""Shortcut for add_route with method PATCH."""
|
| 1251 |
+
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
|
| 1252 |
+
|
| 1253 |
+
def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1254 |
+
"""Shortcut for add_route with method DELETE."""
|
| 1255 |
+
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
|
| 1256 |
+
|
| 1257 |
+
def add_view(
|
| 1258 |
+
self, path: str, handler: Type[AbstractView], **kwargs: Any
|
| 1259 |
+
) -> AbstractRoute:
|
| 1260 |
+
"""Shortcut for add_route with ANY methods for a class-based view."""
|
| 1261 |
+
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
|
| 1262 |
+
|
| 1263 |
+
def freeze(self) -> None:
|
| 1264 |
+
super().freeze()
|
| 1265 |
+
for resource in self._resources:
|
| 1266 |
+
resource.freeze()
|
| 1267 |
+
|
| 1268 |
+
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
|
| 1269 |
+
"""Append routes to route table.
|
| 1270 |
+
|
| 1271 |
+
Parameter should be a sequence of RouteDef objects.
|
| 1272 |
+
|
| 1273 |
+
Returns a list of registered AbstractRoute instances.
|
| 1274 |
+
"""
|
| 1275 |
+
registered_routes = []
|
| 1276 |
+
for route_def in routes:
|
| 1277 |
+
registered_routes.extend(route_def.register(self))
|
| 1278 |
+
return registered_routes
|
| 1279 |
+
|
| 1280 |
+
|
| 1281 |
+
def _quote_path(value: str) -> str:
|
| 1282 |
+
if YARL_VERSION < (1, 6):
|
| 1283 |
+
value = value.replace("%", "%25")
|
| 1284 |
+
return URL.build(path=value, encoded=False).raw_path
|
| 1285 |
+
|
| 1286 |
+
|
| 1287 |
+
def _unquote_path_safe(value: str) -> str:
|
| 1288 |
+
if "%" not in value:
|
| 1289 |
+
return value
|
| 1290 |
+
return value.replace("%2F", "/").replace("%25", "%")
|
| 1291 |
+
|
| 1292 |
+
|
| 1293 |
+
def _requote_path(value: str) -> str:
|
| 1294 |
+
# Quote non-ascii characters and other characters which must be quoted,
|
| 1295 |
+
# but preserve existing %-sequences.
|
| 1296 |
+
result = _quote_path(value)
|
| 1297 |
+
if "%" in value:
|
| 1298 |
+
result = result.replace("%25", "%")
|
| 1299 |
+
return result
|
evalkit_tf437/lib/python3.10/site-packages/pyasn1_modules/rfc5914.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# This file is being contributed to pyasn1-modules software.
|
| 2 |
+
#
|
| 3 |
+
# Created by Russ Housley with assistance from asn1ate v.0.6.0.
|
| 4 |
+
#
|
| 5 |
+
# Copyright (c) 2019, Vigil Security, LLC
|
| 6 |
+
# License: http://snmplabs.com/pyasn1/license.html
|
| 7 |
+
#
|
| 8 |
+
# Trust Anchor Format
|
| 9 |
+
#
|
| 10 |
+
# ASN.1 source from:
|
| 11 |
+
# https://www.rfc-editor.org/rfc/rfc5914.txt
|
| 12 |
+
|
| 13 |
+
from pyasn1.type import char
|
| 14 |
+
from pyasn1.type import constraint
|
| 15 |
+
from pyasn1.type import namedtype
|
| 16 |
+
from pyasn1.type import namedval
|
| 17 |
+
from pyasn1.type import tag
|
| 18 |
+
from pyasn1.type import univ
|
| 19 |
+
|
| 20 |
+
from pyasn1_modules import rfc5280
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
MAX = float('inf')
|
| 24 |
+
|
| 25 |
+
Certificate = rfc5280.Certificate
|
| 26 |
+
|
| 27 |
+
Name = rfc5280.Name
|
| 28 |
+
|
| 29 |
+
Extensions = rfc5280.Extensions
|
| 30 |
+
|
| 31 |
+
SubjectPublicKeyInfo = rfc5280.SubjectPublicKeyInfo
|
| 32 |
+
|
| 33 |
+
TBSCertificate = rfc5280.TBSCertificate
|
| 34 |
+
|
| 35 |
+
CertificatePolicies = rfc5280.CertificatePolicies
|
| 36 |
+
|
| 37 |
+
KeyIdentifier = rfc5280.KeyIdentifier
|
| 38 |
+
|
| 39 |
+
NameConstraints = rfc5280.NameConstraints
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class CertPolicyFlags(univ.BitString):
|
| 43 |
+
pass
|
| 44 |
+
|
| 45 |
+
CertPolicyFlags.namedValues = namedval.NamedValues(
|
| 46 |
+
('inhibitPolicyMapping', 0),
|
| 47 |
+
('requireExplicitPolicy', 1),
|
| 48 |
+
('inhibitAnyPolicy', 2)
|
| 49 |
+
)
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
class CertPathControls(univ.Sequence):
|
| 53 |
+
pass
|
| 54 |
+
|
| 55 |
+
CertPathControls.componentType = namedtype.NamedTypes(
|
| 56 |
+
namedtype.NamedType('taName', Name()),
|
| 57 |
+
namedtype.OptionalNamedType('certificate', Certificate().subtype(
|
| 58 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))),
|
| 59 |
+
namedtype.OptionalNamedType('policySet', CertificatePolicies().subtype(
|
| 60 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 61 |
+
namedtype.OptionalNamedType('policyFlags', CertPolicyFlags().subtype(
|
| 62 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))),
|
| 63 |
+
namedtype.OptionalNamedType('nameConstr', NameConstraints().subtype(
|
| 64 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))),
|
| 65 |
+
namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(
|
| 66 |
+
subtypeSpec=constraint.ValueRangeConstraint(0, MAX)).subtype(
|
| 67 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4)))
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
class TrustAnchorTitle(char.UTF8String):
|
| 72 |
+
pass
|
| 73 |
+
|
| 74 |
+
TrustAnchorTitle.subtypeSpec = constraint.ValueSizeConstraint(1, 64)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
class TrustAnchorInfoVersion(univ.Integer):
|
| 78 |
+
pass
|
| 79 |
+
|
| 80 |
+
TrustAnchorInfoVersion.namedValues = namedval.NamedValues(
|
| 81 |
+
('v1', 1)
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
+
|
| 85 |
+
class TrustAnchorInfo(univ.Sequence):
|
| 86 |
+
pass
|
| 87 |
+
|
| 88 |
+
TrustAnchorInfo.componentType = namedtype.NamedTypes(
|
| 89 |
+
namedtype.DefaultedNamedType('version', TrustAnchorInfoVersion().subtype(value='v1')),
|
| 90 |
+
namedtype.NamedType('pubKey', SubjectPublicKeyInfo()),
|
| 91 |
+
namedtype.NamedType('keyId', KeyIdentifier()),
|
| 92 |
+
namedtype.OptionalNamedType('taTitle', TrustAnchorTitle()),
|
| 93 |
+
namedtype.OptionalNamedType('certPath', CertPathControls()),
|
| 94 |
+
namedtype.OptionalNamedType('exts', Extensions().subtype(explicitTag=tag.Tag(
|
| 95 |
+
tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 96 |
+
namedtype.OptionalNamedType('taTitleLangTag', char.UTF8String().subtype(
|
| 97 |
+
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
class TrustAnchorChoice(univ.Choice):
|
| 102 |
+
pass
|
| 103 |
+
|
| 104 |
+
TrustAnchorChoice.componentType = namedtype.NamedTypes(
|
| 105 |
+
namedtype.NamedType('certificate', Certificate()),
|
| 106 |
+
namedtype.NamedType('tbsCert', TBSCertificate().subtype(
|
| 107 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))),
|
| 108 |
+
namedtype.NamedType('taInfo', TrustAnchorInfo().subtype(
|
| 109 |
+
explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2)))
|
| 110 |
+
)
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
id_ct_trustAnchorList = univ.ObjectIdentifier('1.2.840.113549.1.9.16.1.34')
|
| 114 |
+
|
| 115 |
+
class TrustAnchorList(univ.SequenceOf):
|
| 116 |
+
pass
|
| 117 |
+
|
| 118 |
+
TrustAnchorList.componentType = TrustAnchorChoice()
|
| 119 |
+
TrustAnchorList.subtypeSpec=constraint.ValueSizeConstraint(1, MAX)
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/__init__.py
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
backport of asyncio.TaskGroup, asyncio.Runner and asyncio.timeout
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
__version__ = "0.0.0a4"
|
| 6 |
+
|
| 7 |
+
__all__ = ["run", "Runner", "TaskGroup", "Timeout", "timeout", "timeout_at"]
|
| 8 |
+
|
| 9 |
+
from .runners import run, Runner
|
| 10 |
+
from .taskgroups import TaskGroup
|
| 11 |
+
from .timeouts import Timeout, timeout, timeout_at
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (506 Bytes). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/runners.cpython-310.pyc
ADDED
|
Binary file (6.14 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/taskgroups.cpython-310.pyc
ADDED
|
Binary file (3.91 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/tasks.cpython-310.pyc
ADDED
|
Binary file (2.26 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/__pycache__/timeouts.cpython-310.pyc
ADDED
|
Binary file (4.64 kB). View file
|
|
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/runners.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# backported from cpython 3.12 bceb197947bbaebb11e01195bdce4f240fdf9332
|
| 2 |
+
# Copyright © 2001-2022 Python Software Foundation; All Rights Reserved
|
| 3 |
+
# modified to support working on 3.10, custom task_factory installed to
|
| 4 |
+
# support uncancel and contexts
|
| 5 |
+
|
| 6 |
+
__all__ = ('Runner', 'run')
|
| 7 |
+
|
| 8 |
+
import contextvars
|
| 9 |
+
import enum
|
| 10 |
+
import functools
|
| 11 |
+
import threading
|
| 12 |
+
import signal
|
| 13 |
+
from asyncio import coroutines
|
| 14 |
+
from asyncio import events
|
| 15 |
+
from asyncio import exceptions
|
| 16 |
+
from asyncio import tasks
|
| 17 |
+
from . tasks import task_factory as _task_factory
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class _State(enum.Enum):
|
| 21 |
+
CREATED = "created"
|
| 22 |
+
INITIALIZED = "initialized"
|
| 23 |
+
CLOSED = "closed"
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class Runner:
|
| 27 |
+
"""A context manager that controls event loop life cycle.
|
| 28 |
+
|
| 29 |
+
The context manager always creates a new event loop,
|
| 30 |
+
allows to run async functions inside it,
|
| 31 |
+
and properly finalizes the loop at the context manager exit.
|
| 32 |
+
|
| 33 |
+
If debug is True, the event loop will be run in debug mode.
|
| 34 |
+
If loop_factory is passed, it is used for new event loop creation.
|
| 35 |
+
|
| 36 |
+
asyncio.run(main(), debug=True)
|
| 37 |
+
|
| 38 |
+
is a shortcut for
|
| 39 |
+
|
| 40 |
+
with asyncio.Runner(debug=True) as runner:
|
| 41 |
+
runner.run(main())
|
| 42 |
+
|
| 43 |
+
The run() method can be called multiple times within the runner's context.
|
| 44 |
+
|
| 45 |
+
This can be useful for interactive console (e.g. IPython),
|
| 46 |
+
unittest runners, console tools, -- everywhere when async code
|
| 47 |
+
is called from existing sync framework and where the preferred single
|
| 48 |
+
asyncio.run() call doesn't work.
|
| 49 |
+
|
| 50 |
+
"""
|
| 51 |
+
|
| 52 |
+
# Note: the class is final, it is not intended for inheritance.
|
| 53 |
+
|
| 54 |
+
def __init__(self, *, debug=None, loop_factory=None):
|
| 55 |
+
self._state = _State.CREATED
|
| 56 |
+
self._debug = debug
|
| 57 |
+
self._loop_factory = loop_factory
|
| 58 |
+
self._loop = None
|
| 59 |
+
self._context = None
|
| 60 |
+
self._interrupt_count = 0
|
| 61 |
+
self._set_event_loop = False
|
| 62 |
+
|
| 63 |
+
def __enter__(self):
|
| 64 |
+
self._lazy_init()
|
| 65 |
+
return self
|
| 66 |
+
|
| 67 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
| 68 |
+
self.close()
|
| 69 |
+
|
| 70 |
+
def close(self):
|
| 71 |
+
"""Shutdown and close event loop."""
|
| 72 |
+
if self._state is not _State.INITIALIZED:
|
| 73 |
+
return
|
| 74 |
+
try:
|
| 75 |
+
loop = self._loop
|
| 76 |
+
_cancel_all_tasks(loop)
|
| 77 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
| 78 |
+
loop.run_until_complete(loop.shutdown_default_executor())
|
| 79 |
+
finally:
|
| 80 |
+
if self._set_event_loop:
|
| 81 |
+
events.set_event_loop(None)
|
| 82 |
+
loop.close()
|
| 83 |
+
self._loop = None
|
| 84 |
+
self._state = _State.CLOSED
|
| 85 |
+
|
| 86 |
+
def get_loop(self):
|
| 87 |
+
"""Return embedded event loop."""
|
| 88 |
+
self._lazy_init()
|
| 89 |
+
return self._loop
|
| 90 |
+
|
| 91 |
+
def run(self, coro, *, context=None):
|
| 92 |
+
"""Run a coroutine inside the embedded event loop."""
|
| 93 |
+
if not coroutines.iscoroutine(coro):
|
| 94 |
+
raise ValueError("a coroutine was expected, got {!r}".format(coro))
|
| 95 |
+
|
| 96 |
+
if events._get_running_loop() is not None:
|
| 97 |
+
# fail fast with short traceback
|
| 98 |
+
raise RuntimeError(
|
| 99 |
+
"Runner.run() cannot be called from a running event loop")
|
| 100 |
+
|
| 101 |
+
self._lazy_init()
|
| 102 |
+
|
| 103 |
+
if context is None:
|
| 104 |
+
context = self._context
|
| 105 |
+
task = _task_factory(self._loop, coro, context=context)
|
| 106 |
+
|
| 107 |
+
if (threading.current_thread() is threading.main_thread()
|
| 108 |
+
and signal.getsignal(signal.SIGINT) is signal.default_int_handler
|
| 109 |
+
):
|
| 110 |
+
sigint_handler = functools.partial(self._on_sigint, main_task=task)
|
| 111 |
+
try:
|
| 112 |
+
signal.signal(signal.SIGINT, sigint_handler)
|
| 113 |
+
except ValueError:
|
| 114 |
+
# `signal.signal` may throw if `threading.main_thread` does
|
| 115 |
+
# not support signals (e.g. embedded interpreter with signals
|
| 116 |
+
# not registered - see gh-91880)
|
| 117 |
+
sigint_handler = None
|
| 118 |
+
else:
|
| 119 |
+
sigint_handler = None
|
| 120 |
+
|
| 121 |
+
self._interrupt_count = 0
|
| 122 |
+
try:
|
| 123 |
+
if self._set_event_loop:
|
| 124 |
+
events.set_event_loop(self._loop)
|
| 125 |
+
return self._loop.run_until_complete(task)
|
| 126 |
+
except exceptions.CancelledError:
|
| 127 |
+
if self._interrupt_count > 0 and task.uncancel() == 0:
|
| 128 |
+
raise KeyboardInterrupt()
|
| 129 |
+
else:
|
| 130 |
+
raise # CancelledError
|
| 131 |
+
finally:
|
| 132 |
+
if (sigint_handler is not None
|
| 133 |
+
and signal.getsignal(signal.SIGINT) is sigint_handler
|
| 134 |
+
):
|
| 135 |
+
signal.signal(signal.SIGINT, signal.default_int_handler)
|
| 136 |
+
|
| 137 |
+
def _lazy_init(self):
|
| 138 |
+
if self._state is _State.CLOSED:
|
| 139 |
+
raise RuntimeError("Runner is closed")
|
| 140 |
+
if self._state is _State.INITIALIZED:
|
| 141 |
+
return
|
| 142 |
+
if self._loop_factory is None:
|
| 143 |
+
self._loop = events.new_event_loop()
|
| 144 |
+
self._set_event_loop = True
|
| 145 |
+
else:
|
| 146 |
+
self._loop = self._loop_factory()
|
| 147 |
+
if self._debug is not None:
|
| 148 |
+
self._loop.set_debug(self._debug)
|
| 149 |
+
self._loop.set_task_factory(_task_factory)
|
| 150 |
+
self._context = contextvars.copy_context()
|
| 151 |
+
self._state = _State.INITIALIZED
|
| 152 |
+
|
| 153 |
+
def _on_sigint(self, signum, frame, main_task):
|
| 154 |
+
self._interrupt_count += 1
|
| 155 |
+
if self._interrupt_count == 1 and not main_task.done():
|
| 156 |
+
main_task.cancel()
|
| 157 |
+
# wakeup loop if it is blocked by select() with long timeout
|
| 158 |
+
self._loop.call_soon_threadsafe(lambda: None)
|
| 159 |
+
return
|
| 160 |
+
raise KeyboardInterrupt()
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def run(main, *, debug=None):
|
| 164 |
+
"""Execute the coroutine and return the result.
|
| 165 |
+
|
| 166 |
+
This function runs the passed coroutine, taking care of
|
| 167 |
+
managing the asyncio event loop and finalizing asynchronous
|
| 168 |
+
generators.
|
| 169 |
+
|
| 170 |
+
This function cannot be called when another asyncio event loop is
|
| 171 |
+
running in the same thread.
|
| 172 |
+
|
| 173 |
+
If debug is True, the event loop will be run in debug mode.
|
| 174 |
+
|
| 175 |
+
This function always creates a new event loop and closes it at the end.
|
| 176 |
+
It should be used as a main entry point for asyncio programs, and should
|
| 177 |
+
ideally only be called once.
|
| 178 |
+
|
| 179 |
+
Example:
|
| 180 |
+
|
| 181 |
+
async def main():
|
| 182 |
+
await asyncio.sleep(1)
|
| 183 |
+
print('hello')
|
| 184 |
+
|
| 185 |
+
asyncio.run(main())
|
| 186 |
+
"""
|
| 187 |
+
if events._get_running_loop() is not None:
|
| 188 |
+
# fail fast with short traceback
|
| 189 |
+
raise RuntimeError(
|
| 190 |
+
"asyncio.run() cannot be called from a running event loop")
|
| 191 |
+
|
| 192 |
+
with Runner(debug=debug) as runner:
|
| 193 |
+
return runner.run(main)
|
| 194 |
+
|
| 195 |
+
|
| 196 |
+
def _cancel_all_tasks(loop):
|
| 197 |
+
to_cancel = tasks.all_tasks(loop)
|
| 198 |
+
if not to_cancel:
|
| 199 |
+
return
|
| 200 |
+
|
| 201 |
+
for task in to_cancel:
|
| 202 |
+
task.cancel()
|
| 203 |
+
|
| 204 |
+
loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True))
|
| 205 |
+
|
| 206 |
+
for task in to_cancel:
|
| 207 |
+
if task.cancelled():
|
| 208 |
+
continue
|
| 209 |
+
if task.exception() is not None:
|
| 210 |
+
loop.call_exception_handler({
|
| 211 |
+
'message': 'unhandled exception during asyncio.run() shutdown',
|
| 212 |
+
'exception': task.exception(),
|
| 213 |
+
'task': task,
|
| 214 |
+
})
|
evalkit_tf437/lib/python3.10/site-packages/taskgroup/timeouts.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# backported from cpython 3.12 bceb197947bbaebb11e01195bdce4f240fdf9332
|
| 2 |
+
# Copyright © 2001-2022 Python Software Foundation; All Rights Reserved
|
| 3 |
+
# modified to support working on 3.10 (basically just the imports changed here)
|
| 4 |
+
|
| 5 |
+
import enum
|
| 6 |
+
|
| 7 |
+
from types import TracebackType
|
| 8 |
+
from typing import final, Optional, Type
|
| 9 |
+
|
| 10 |
+
from asyncio import events
|
| 11 |
+
from asyncio import exceptions
|
| 12 |
+
from asyncio import tasks
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
__all__ = (
|
| 16 |
+
"Timeout",
|
| 17 |
+
"timeout",
|
| 18 |
+
"timeout_at",
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class _State(enum.Enum):
|
| 23 |
+
CREATED = "created"
|
| 24 |
+
ENTERED = "active"
|
| 25 |
+
EXPIRING = "expiring"
|
| 26 |
+
EXPIRED = "expired"
|
| 27 |
+
EXITED = "finished"
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
@final
|
| 31 |
+
class Timeout:
|
| 32 |
+
|
| 33 |
+
def __init__(self, when: Optional[float]) -> None:
|
| 34 |
+
self._state = _State.CREATED
|
| 35 |
+
|
| 36 |
+
self._timeout_handler: Optional[events.TimerHandle] = None
|
| 37 |
+
self._task: Optional[tasks.Task] = None
|
| 38 |
+
self._when = when
|
| 39 |
+
|
| 40 |
+
def when(self) -> Optional[float]:
|
| 41 |
+
return self._when
|
| 42 |
+
|
| 43 |
+
def reschedule(self, when: Optional[float]) -> None:
|
| 44 |
+
assert self._state is not _State.CREATED
|
| 45 |
+
if self._state is not _State.ENTERED:
|
| 46 |
+
raise RuntimeError(
|
| 47 |
+
f"Cannot change state of {self._state.value} Timeout",
|
| 48 |
+
)
|
| 49 |
+
|
| 50 |
+
self._when = when
|
| 51 |
+
|
| 52 |
+
if self._timeout_handler is not None:
|
| 53 |
+
self._timeout_handler.cancel()
|
| 54 |
+
|
| 55 |
+
if when is None:
|
| 56 |
+
self._timeout_handler = None
|
| 57 |
+
else:
|
| 58 |
+
loop = events.get_running_loop()
|
| 59 |
+
if when <= loop.time():
|
| 60 |
+
self._timeout_handler = loop.call_soon(self._on_timeout)
|
| 61 |
+
else:
|
| 62 |
+
self._timeout_handler = loop.call_at(when, self._on_timeout)
|
| 63 |
+
|
| 64 |
+
def expired(self) -> bool:
|
| 65 |
+
"""Is timeout expired during execution?"""
|
| 66 |
+
return self._state in (_State.EXPIRING, _State.EXPIRED)
|
| 67 |
+
|
| 68 |
+
def __repr__(self) -> str:
|
| 69 |
+
info = ['']
|
| 70 |
+
if self._state is _State.ENTERED:
|
| 71 |
+
when = round(self._when, 3) if self._when is not None else None
|
| 72 |
+
info.append(f"when={when}")
|
| 73 |
+
info_str = ' '.join(info)
|
| 74 |
+
return f"<Timeout [{self._state.value}]{info_str}>"
|
| 75 |
+
|
| 76 |
+
async def __aenter__(self) -> "Timeout":
|
| 77 |
+
self._state = _State.ENTERED
|
| 78 |
+
self._task = tasks.current_task()
|
| 79 |
+
if self._task is None:
|
| 80 |
+
raise RuntimeError("Timeout should be used inside a task")
|
| 81 |
+
self.reschedule(self._when)
|
| 82 |
+
return self
|
| 83 |
+
|
| 84 |
+
async def __aexit__(
|
| 85 |
+
self,
|
| 86 |
+
exc_type: Optional[Type[BaseException]],
|
| 87 |
+
exc_val: Optional[BaseException],
|
| 88 |
+
exc_tb: Optional[TracebackType],
|
| 89 |
+
) -> Optional[bool]:
|
| 90 |
+
assert self._state in (_State.ENTERED, _State.EXPIRING)
|
| 91 |
+
|
| 92 |
+
if self._timeout_handler is not None:
|
| 93 |
+
self._timeout_handler.cancel()
|
| 94 |
+
self._timeout_handler = None
|
| 95 |
+
|
| 96 |
+
if self._state is _State.EXPIRING:
|
| 97 |
+
self._state = _State.EXPIRED
|
| 98 |
+
|
| 99 |
+
if self._task.uncancel() == 0 and exc_type is exceptions.CancelledError:
|
| 100 |
+
# Since there are no outstanding cancel requests, we're
|
| 101 |
+
# handling this.
|
| 102 |
+
raise TimeoutError
|
| 103 |
+
elif self._state is _State.ENTERED:
|
| 104 |
+
self._state = _State.EXITED
|
| 105 |
+
|
| 106 |
+
return None
|
| 107 |
+
|
| 108 |
+
def _on_timeout(self) -> None:
|
| 109 |
+
assert self._state is _State.ENTERED
|
| 110 |
+
self._task.cancel()
|
| 111 |
+
self._state = _State.EXPIRING
|
| 112 |
+
# drop the reference early
|
| 113 |
+
self._timeout_handler = None
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def timeout(delay: Optional[float]) -> Timeout:
|
| 117 |
+
"""Timeout async context manager.
|
| 118 |
+
|
| 119 |
+
Useful in cases when you want to apply timeout logic around block
|
| 120 |
+
of code or in cases when asyncio.wait_for is not suitable. For example:
|
| 121 |
+
|
| 122 |
+
>>> async with asyncio.timeout(10): # 10 seconds timeout
|
| 123 |
+
... await long_running_task()
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
delay - value in seconds or None to disable timeout logic
|
| 127 |
+
|
| 128 |
+
long_running_task() is interrupted by raising asyncio.CancelledError,
|
| 129 |
+
the top-most affected timeout() context manager converts CancelledError
|
| 130 |
+
into TimeoutError.
|
| 131 |
+
"""
|
| 132 |
+
loop = events.get_running_loop()
|
| 133 |
+
return Timeout(loop.time() + delay if delay is not None else None)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def timeout_at(when: Optional[float]) -> Timeout:
|
| 137 |
+
"""Schedule the timeout at absolute time.
|
| 138 |
+
|
| 139 |
+
Like timeout() but argument gives absolute time in the same clock system
|
| 140 |
+
as loop.time().
|
| 141 |
+
|
| 142 |
+
Please note: it is not POSIX time but a time with
|
| 143 |
+
undefined starting base, e.g. the time of the system power on.
|
| 144 |
+
|
| 145 |
+
>>> async with asyncio.timeout_at(loop.time() + 10):
|
| 146 |
+
... await long_running_task()
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
when - a deadline when timeout occurs or None to disable timeout logic
|
| 150 |
+
|
| 151 |
+
long_running_task() is interrupted by raising asyncio.CancelledError,
|
| 152 |
+
the top-most affected timeout() context manager converts CancelledError
|
| 153 |
+
into TimeoutError.
|
| 154 |
+
"""
|
| 155 |
+
return Timeout(when)
|
evalkit_tf449/lib/python3.10/site-packages/scipy/sparse/__pycache__/_construct.cpython-310.pyc
ADDED
|
Binary file (42.8 kB). View file
|
|
|