Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +11 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/INSTALLER +1 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/LICENSE.rst +28 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/METADATA +128 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/RECORD +87 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/REQUESTED +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/WHEEL +5 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/top_level.txt +1 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/__init__.py +216 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_find_header.c +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_find_header.h +14 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_find_header.pxd +2 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_headers.pxi +83 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_helpers.c +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_helpers.pyx +35 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_parser.c +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_parser.pyx +818 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_writer.c +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_writer.pyx +163 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_websocket.pyx +56 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/abc.py +207 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_proto.py +251 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_reqrep.py +1133 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/connector.py +1449 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/formdata.py +172 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/hdrs.py +114 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_exceptions.py +105 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_websocket.py +701 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_writer.py +200 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/multipart.py +963 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/payload_streamer.py +75 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/pytest_plugin.py +391 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/tcp_helpers.py +38 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/test_utils.py +698 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web.py +586 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_app.py +557 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_exceptions.py +441 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_fileresponse.py +288 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_log.py +208 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_request.py +874 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_response.py +825 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py +1220 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/worker.py +269 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/brotli/__init__.py +6 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/brotli/brotli.py +466 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/brotli/build.py +224 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/LICENSE-3RD-PARTY.txt +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/LICENSE.txt +21 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/__init__.pyi +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/config-3.py +24 -0
.gitattributes
CHANGED
|
@@ -283,3 +283,14 @@ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_proxy
|
|
| 283 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torchaudio/_torchaudio_ffmpeg.so filter=lfs diff=lfs merge=lfs -text
|
| 284 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5z.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 285 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/_qhull.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 283 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torchaudio/_torchaudio_ffmpeg.so filter=lfs diff=lfs merge=lfs -text
|
| 284 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5z.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 285 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/_qhull.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 286 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/_tri.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 287 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_selector.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 288 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5o.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 289 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_objects.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 290 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5ac.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 291 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5g.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 292 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5ds.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 293 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/utils.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 294 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5fd.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 295 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/defs.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 296 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5d.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/LICENSE.rst
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Copyright 2007 Pallets
|
| 2 |
+
|
| 3 |
+
Redistribution and use in source and binary forms, with or without
|
| 4 |
+
modification, are permitted provided that the following conditions are
|
| 5 |
+
met:
|
| 6 |
+
|
| 7 |
+
1. Redistributions of source code must retain the above copyright
|
| 8 |
+
notice, this list of conditions and the following disclaimer.
|
| 9 |
+
|
| 10 |
+
2. Redistributions in binary form must reproduce the above copyright
|
| 11 |
+
notice, this list of conditions and the following disclaimer in the
|
| 12 |
+
documentation and/or other materials provided with the distribution.
|
| 13 |
+
|
| 14 |
+
3. Neither the name of the copyright holder nor the names of its
|
| 15 |
+
contributors may be used to endorse or promote products derived from
|
| 16 |
+
this software without specific prior written permission.
|
| 17 |
+
|
| 18 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
| 19 |
+
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
| 20 |
+
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
| 21 |
+
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
| 22 |
+
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
| 23 |
+
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
| 24 |
+
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
| 25 |
+
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
| 26 |
+
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
| 27 |
+
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
| 28 |
+
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/METADATA
ADDED
|
@@ -0,0 +1,128 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: Werkzeug
|
| 3 |
+
Version: 2.1.2
|
| 4 |
+
Summary: The comprehensive WSGI web application library.
|
| 5 |
+
Home-page: https://palletsprojects.com/p/werkzeug/
|
| 6 |
+
Author: Armin Ronacher
|
| 7 |
+
Author-email: armin.ronacher@active-4.com
|
| 8 |
+
Maintainer: Pallets
|
| 9 |
+
Maintainer-email: contact@palletsprojects.com
|
| 10 |
+
License: BSD-3-Clause
|
| 11 |
+
Project-URL: Donate, https://palletsprojects.com/donate
|
| 12 |
+
Project-URL: Documentation, https://werkzeug.palletsprojects.com/
|
| 13 |
+
Project-URL: Changes, https://werkzeug.palletsprojects.com/changes/
|
| 14 |
+
Project-URL: Source Code, https://github.com/pallets/werkzeug/
|
| 15 |
+
Project-URL: Issue Tracker, https://github.com/pallets/werkzeug/issues/
|
| 16 |
+
Project-URL: Twitter, https://twitter.com/PalletsTeam
|
| 17 |
+
Project-URL: Chat, https://discord.gg/pallets
|
| 18 |
+
Platform: UNKNOWN
|
| 19 |
+
Classifier: Development Status :: 5 - Production/Stable
|
| 20 |
+
Classifier: Environment :: Web Environment
|
| 21 |
+
Classifier: Intended Audience :: Developers
|
| 22 |
+
Classifier: License :: OSI Approved :: BSD License
|
| 23 |
+
Classifier: Operating System :: OS Independent
|
| 24 |
+
Classifier: Programming Language :: Python
|
| 25 |
+
Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
|
| 26 |
+
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI
|
| 27 |
+
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application
|
| 28 |
+
Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware
|
| 29 |
+
Classifier: Topic :: Software Development :: Libraries :: Application Frameworks
|
| 30 |
+
Requires-Python: >=3.7
|
| 31 |
+
Description-Content-Type: text/x-rst
|
| 32 |
+
License-File: LICENSE.rst
|
| 33 |
+
Provides-Extra: watchdog
|
| 34 |
+
Requires-Dist: watchdog ; extra == 'watchdog'
|
| 35 |
+
|
| 36 |
+
Werkzeug
|
| 37 |
+
========
|
| 38 |
+
|
| 39 |
+
*werkzeug* German noun: "tool". Etymology: *werk* ("work"), *zeug* ("stuff")
|
| 40 |
+
|
| 41 |
+
Werkzeug is a comprehensive `WSGI`_ web application library. It began as
|
| 42 |
+
a simple collection of various utilities for WSGI applications and has
|
| 43 |
+
become one of the most advanced WSGI utility libraries.
|
| 44 |
+
|
| 45 |
+
It includes:
|
| 46 |
+
|
| 47 |
+
- An interactive debugger that allows inspecting stack traces and
|
| 48 |
+
source code in the browser with an interactive interpreter for any
|
| 49 |
+
frame in the stack.
|
| 50 |
+
- A full-featured request object with objects to interact with
|
| 51 |
+
headers, query args, form data, files, and cookies.
|
| 52 |
+
- A response object that can wrap other WSGI applications and handle
|
| 53 |
+
streaming data.
|
| 54 |
+
- A routing system for matching URLs to endpoints and generating URLs
|
| 55 |
+
for endpoints, with an extensible system for capturing variables
|
| 56 |
+
from URLs.
|
| 57 |
+
- HTTP utilities to handle entity tags, cache control, dates, user
|
| 58 |
+
agents, cookies, files, and more.
|
| 59 |
+
- A threaded WSGI server for use while developing applications
|
| 60 |
+
locally.
|
| 61 |
+
- A test client for simulating HTTP requests during testing without
|
| 62 |
+
requiring running a server.
|
| 63 |
+
|
| 64 |
+
Werkzeug doesn't enforce any dependencies. It is up to the developer to
|
| 65 |
+
choose a template engine, database adapter, and even how to handle
|
| 66 |
+
requests. It can be used to build all sorts of end user applications
|
| 67 |
+
such as blogs, wikis, or bulletin boards.
|
| 68 |
+
|
| 69 |
+
`Flask`_ wraps Werkzeug, using it to handle the details of WSGI while
|
| 70 |
+
providing more structure and patterns for defining powerful
|
| 71 |
+
applications.
|
| 72 |
+
|
| 73 |
+
.. _WSGI: https://wsgi.readthedocs.io/en/latest/
|
| 74 |
+
.. _Flask: https://www.palletsprojects.com/p/flask/
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
Installing
|
| 78 |
+
----------
|
| 79 |
+
|
| 80 |
+
Install and update using `pip`_:
|
| 81 |
+
|
| 82 |
+
.. code-block:: text
|
| 83 |
+
|
| 84 |
+
pip install -U Werkzeug
|
| 85 |
+
|
| 86 |
+
.. _pip: https://pip.pypa.io/en/stable/getting-started/
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
A Simple Example
|
| 90 |
+
----------------
|
| 91 |
+
|
| 92 |
+
.. code-block:: python
|
| 93 |
+
|
| 94 |
+
from werkzeug.wrappers import Request, Response
|
| 95 |
+
|
| 96 |
+
@Request.application
|
| 97 |
+
def application(request):
|
| 98 |
+
return Response('Hello, World!')
|
| 99 |
+
|
| 100 |
+
if __name__ == '__main__':
|
| 101 |
+
from werkzeug.serving import run_simple
|
| 102 |
+
run_simple('localhost', 4000, application)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
Donate
|
| 106 |
+
------
|
| 107 |
+
|
| 108 |
+
The Pallets organization develops and supports Werkzeug and other
|
| 109 |
+
popular packages. In order to grow the community of contributors and
|
| 110 |
+
users, and allow the maintainers to devote more time to the projects,
|
| 111 |
+
`please donate today`_.
|
| 112 |
+
|
| 113 |
+
.. _please donate today: https://palletsprojects.com/donate
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
Links
|
| 117 |
+
-----
|
| 118 |
+
|
| 119 |
+
- Documentation: https://werkzeug.palletsprojects.com/
|
| 120 |
+
- Changes: https://werkzeug.palletsprojects.com/changes/
|
| 121 |
+
- PyPI Releases: https://pypi.org/project/Werkzeug/
|
| 122 |
+
- Source Code: https://github.com/pallets/werkzeug/
|
| 123 |
+
- Issue Tracker: https://github.com/pallets/werkzeug/issues/
|
| 124 |
+
- Website: https://palletsprojects.com/p/werkzeug/
|
| 125 |
+
- Twitter: https://twitter.com/PalletsTeam
|
| 126 |
+
- Chat: https://discord.gg/pallets
|
| 127 |
+
|
| 128 |
+
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/RECORD
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Werkzeug-2.1.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 2 |
+
Werkzeug-2.1.2.dist-info/LICENSE.rst,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475
|
| 3 |
+
Werkzeug-2.1.2.dist-info/METADATA,sha256=vWBYPD9d_Qzl4WAupfJ5Fy_ep7pqMPnGvkSLYiCi4B0,4400
|
| 4 |
+
Werkzeug-2.1.2.dist-info/RECORD,,
|
| 5 |
+
Werkzeug-2.1.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 6 |
+
Werkzeug-2.1.2.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
|
| 7 |
+
Werkzeug-2.1.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
|
| 8 |
+
werkzeug/__init__.py,sha256=BtM-3LM8iob4OwPY693-LqZ5RcnDS4iftOqBK28uZ2k,188
|
| 9 |
+
werkzeug/__pycache__/__init__.cpython-38.pyc,,
|
| 10 |
+
werkzeug/__pycache__/_internal.cpython-38.pyc,,
|
| 11 |
+
werkzeug/__pycache__/_reloader.cpython-38.pyc,,
|
| 12 |
+
werkzeug/__pycache__/datastructures.cpython-38.pyc,,
|
| 13 |
+
werkzeug/__pycache__/exceptions.cpython-38.pyc,,
|
| 14 |
+
werkzeug/__pycache__/formparser.cpython-38.pyc,,
|
| 15 |
+
werkzeug/__pycache__/http.cpython-38.pyc,,
|
| 16 |
+
werkzeug/__pycache__/local.cpython-38.pyc,,
|
| 17 |
+
werkzeug/__pycache__/routing.cpython-38.pyc,,
|
| 18 |
+
werkzeug/__pycache__/security.cpython-38.pyc,,
|
| 19 |
+
werkzeug/__pycache__/serving.cpython-38.pyc,,
|
| 20 |
+
werkzeug/__pycache__/test.cpython-38.pyc,,
|
| 21 |
+
werkzeug/__pycache__/testapp.cpython-38.pyc,,
|
| 22 |
+
werkzeug/__pycache__/urls.cpython-38.pyc,,
|
| 23 |
+
werkzeug/__pycache__/user_agent.cpython-38.pyc,,
|
| 24 |
+
werkzeug/__pycache__/utils.cpython-38.pyc,,
|
| 25 |
+
werkzeug/__pycache__/wsgi.cpython-38.pyc,,
|
| 26 |
+
werkzeug/_internal.py,sha256=g8PHJz2z39I3x0vwTvTKbXIg0eUQqGF9UtUzDMWT0Qw,16222
|
| 27 |
+
werkzeug/_reloader.py,sha256=lYStlIDduTxBOB8BSozy_44HQ7YT5fup-x3uuac1-2o,14331
|
| 28 |
+
werkzeug/datastructures.py,sha256=Sk5gYGJbgvwpM-5IursyEWwo815RB5NAs2wFcTjHG0M,97018
|
| 29 |
+
werkzeug/datastructures.pyi,sha256=L7MfJjHrEjKuAZ57w5d2eaiUIWYya52crapklFnKUz0,34493
|
| 30 |
+
werkzeug/debug/__init__.py,sha256=Qds7CmReDr13XUaKYvcwnGNBQp6d86ooGV_to2Uw0C0,17730
|
| 31 |
+
werkzeug/debug/__pycache__/__init__.cpython-38.pyc,,
|
| 32 |
+
werkzeug/debug/__pycache__/console.cpython-38.pyc,,
|
| 33 |
+
werkzeug/debug/__pycache__/repr.cpython-38.pyc,,
|
| 34 |
+
werkzeug/debug/__pycache__/tbtools.cpython-38.pyc,,
|
| 35 |
+
werkzeug/debug/console.py,sha256=08mKGZLMsrd2E-0qD82J5knUbI2DomHXUQ5z0550a_o,6082
|
| 36 |
+
werkzeug/debug/repr.py,sha256=Mp911LMRzZUoNvrCLQfKKpQZbNKdIM8VbjzJQjBkdsM,9481
|
| 37 |
+
werkzeug/debug/shared/ICON_LICENSE.md,sha256=DhA6Y1gUl5Jwfg0NFN9Rj4VWITt8tUx0IvdGf0ux9-s,222
|
| 38 |
+
werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
|
| 39 |
+
werkzeug/debug/shared/debugger.js,sha256=tg42SZs1SVmYWZ-_Fj5ELK5-FLHnGNQrei0K2By8Bw8,10521
|
| 40 |
+
werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
|
| 41 |
+
werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
|
| 42 |
+
werkzeug/debug/shared/style.css,sha256=-xSxzUEZGw_IqlDR5iZxitNl8LQUjBM-_Y4UAvXVH8g,6078
|
| 43 |
+
werkzeug/debug/tbtools.py,sha256=cBsPKCrB0FRT8i5EUdGo0A8MStWSj7O3Jk40r7Ll3ok,12633
|
| 44 |
+
werkzeug/exceptions.py,sha256=5nzjr4AN_J-jtkT2FgDIm8SUiC0tjzWcROXse06H6a8,26498
|
| 45 |
+
werkzeug/formparser.py,sha256=rLEu_ZwVpvqshZg6E4Qiv36QsmzmCytTijBeGX3dDGk,16056
|
| 46 |
+
werkzeug/http.py,sha256=RUwj0JM1Em3LHyqyXSJOkdtBOT24mJlGFbklqo3PWDY,44602
|
| 47 |
+
werkzeug/local.py,sha256=cj0M4BzMGdg_CD-H3osv9Zf9by4qY-BzAD68bxp979Q,18343
|
| 48 |
+
werkzeug/middleware/__init__.py,sha256=qfqgdT5npwG9ses3-FXQJf3aB95JYP1zchetH_T3PUw,500
|
| 49 |
+
werkzeug/middleware/__pycache__/__init__.cpython-38.pyc,,
|
| 50 |
+
werkzeug/middleware/__pycache__/dispatcher.cpython-38.pyc,,
|
| 51 |
+
werkzeug/middleware/__pycache__/http_proxy.cpython-38.pyc,,
|
| 52 |
+
werkzeug/middleware/__pycache__/lint.cpython-38.pyc,,
|
| 53 |
+
werkzeug/middleware/__pycache__/profiler.cpython-38.pyc,,
|
| 54 |
+
werkzeug/middleware/__pycache__/proxy_fix.cpython-38.pyc,,
|
| 55 |
+
werkzeug/middleware/__pycache__/shared_data.cpython-38.pyc,,
|
| 56 |
+
werkzeug/middleware/dispatcher.py,sha256=Fh_w-KyWnTSYF-Lfv5dimQ7THSS7afPAZMmvc4zF1gg,2580
|
| 57 |
+
werkzeug/middleware/http_proxy.py,sha256=HE8VyhS7CR-E1O6_9b68huv8FLgGGR1DLYqkS3Xcp3Q,7558
|
| 58 |
+
werkzeug/middleware/lint.py,sha256=L4ISeRPhFbrMWt8CFHHExyvuWxE3CyqbfD5hTQKkVjA,13966
|
| 59 |
+
werkzeug/middleware/profiler.py,sha256=QkXk7cqnaPnF8wQu-5SyPCIOT3_kdABUBorQOghVNOA,4899
|
| 60 |
+
werkzeug/middleware/proxy_fix.py,sha256=l7LC_LDu0Yd4SvUxS5SFigAJMzcIOGm6LNKl9IXJBSU,6974
|
| 61 |
+
werkzeug/middleware/shared_data.py,sha256=fXjrEkuqxUVLG1DLrOdQLc96QQdjftCBZ1oM5oK89h4,9528
|
| 62 |
+
werkzeug/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 63 |
+
werkzeug/routing.py,sha256=zopf1P3MG-atd33YdBwIO49AnJ7nem5SKQig5FIhKEI,84346
|
| 64 |
+
werkzeug/sansio/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 65 |
+
werkzeug/sansio/__pycache__/__init__.cpython-38.pyc,,
|
| 66 |
+
werkzeug/sansio/__pycache__/multipart.cpython-38.pyc,,
|
| 67 |
+
werkzeug/sansio/__pycache__/request.cpython-38.pyc,,
|
| 68 |
+
werkzeug/sansio/__pycache__/response.cpython-38.pyc,,
|
| 69 |
+
werkzeug/sansio/__pycache__/utils.cpython-38.pyc,,
|
| 70 |
+
werkzeug/sansio/multipart.py,sha256=BRjBk_mCPjSJzwNVvBgmrJGk3QxA9pYfsgzFki28bxc,8751
|
| 71 |
+
werkzeug/sansio/request.py,sha256=6xhrNJAqScdbBF5i7HN-Y_1XjJ04wQtBKOsZuCy0AYw,20176
|
| 72 |
+
werkzeug/sansio/response.py,sha256=zvCq9HSBBZGBd5Gg412BY9RZIwnKsJl5Kzfd3Kl9sSo,26098
|
| 73 |
+
werkzeug/sansio/utils.py,sha256=V5v-UUnX8pm4RehP9Tt_NiUSOJGJGUvKjlW0eOIQldM,4164
|
| 74 |
+
werkzeug/security.py,sha256=vrBofh4WZZoUo1eAdJ6F1DrzVRlYauGS2CUDYpbQKj8,4658
|
| 75 |
+
werkzeug/serving.py,sha256=aL-dIwzwO_-UuUs0cKwYFOynUWVmYcaDjz713Wy_BHE,38337
|
| 76 |
+
werkzeug/test.py,sha256=7Ur4IinGCk9k5WCNk6x-mr2JrnupvKRXt6n-qNfo9oE,47841
|
| 77 |
+
werkzeug/testapp.py,sha256=p-2lMyvaHXzP1lau0tUAJTbW4STogoMpXFyCkeRBkAI,9397
|
| 78 |
+
werkzeug/urls.py,sha256=Q9Si-eVh7yxk3rwkzrwGRm146FXVXgg9lBP3k0HUfVM,36600
|
| 79 |
+
werkzeug/user_agent.py,sha256=WclZhpvgLurMF45hsioSbS75H1Zb4iMQGKN3_yZ2oKo,1420
|
| 80 |
+
werkzeug/utils.py,sha256=5HGm_5WSKBTVVl8IgvA-b-jL7gjT-LHWXH0ZKzCCI0I,24932
|
| 81 |
+
werkzeug/wrappers/__init__.py,sha256=kGyK7rOud3qCxll_jFyW15YarJhj1xtdf3ocx9ZheB8,120
|
| 82 |
+
werkzeug/wrappers/__pycache__/__init__.cpython-38.pyc,,
|
| 83 |
+
werkzeug/wrappers/__pycache__/request.cpython-38.pyc,,
|
| 84 |
+
werkzeug/wrappers/__pycache__/response.cpython-38.pyc,,
|
| 85 |
+
werkzeug/wrappers/request.py,sha256=UQ559KkGS0Po6HTBgvKMlk1_AsNw5zstzm8o_dRrfdQ,23415
|
| 86 |
+
werkzeug/wrappers/response.py,sha256=c2HUXrrt5Sf8-XEB1fUXxm6jp7Lu80KR0A_tbQFvw1Q,34750
|
| 87 |
+
werkzeug/wsgi.py,sha256=L7s5-Rlt7BRVEZ1m81MaenGfMDP7yL3p1Kxt9Yssqzg,33727
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/REQUESTED
ADDED
|
File without changes
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.37.1)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/Werkzeug-2.1.2.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
werkzeug
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/__init__.py
ADDED
|
@@ -0,0 +1,216 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
__version__ = "3.8.1"
|
| 2 |
+
|
| 3 |
+
from typing import Tuple
|
| 4 |
+
|
| 5 |
+
from . import hdrs as hdrs
|
| 6 |
+
from .client import (
|
| 7 |
+
BaseConnector as BaseConnector,
|
| 8 |
+
ClientConnectionError as ClientConnectionError,
|
| 9 |
+
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
| 10 |
+
ClientConnectorError as ClientConnectorError,
|
| 11 |
+
ClientConnectorSSLError as ClientConnectorSSLError,
|
| 12 |
+
ClientError as ClientError,
|
| 13 |
+
ClientHttpProxyError as ClientHttpProxyError,
|
| 14 |
+
ClientOSError as ClientOSError,
|
| 15 |
+
ClientPayloadError as ClientPayloadError,
|
| 16 |
+
ClientProxyConnectionError as ClientProxyConnectionError,
|
| 17 |
+
ClientRequest as ClientRequest,
|
| 18 |
+
ClientResponse as ClientResponse,
|
| 19 |
+
ClientResponseError as ClientResponseError,
|
| 20 |
+
ClientSession as ClientSession,
|
| 21 |
+
ClientSSLError as ClientSSLError,
|
| 22 |
+
ClientTimeout as ClientTimeout,
|
| 23 |
+
ClientWebSocketResponse as ClientWebSocketResponse,
|
| 24 |
+
ContentTypeError as ContentTypeError,
|
| 25 |
+
Fingerprint as Fingerprint,
|
| 26 |
+
InvalidURL as InvalidURL,
|
| 27 |
+
NamedPipeConnector as NamedPipeConnector,
|
| 28 |
+
RequestInfo as RequestInfo,
|
| 29 |
+
ServerConnectionError as ServerConnectionError,
|
| 30 |
+
ServerDisconnectedError as ServerDisconnectedError,
|
| 31 |
+
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
| 32 |
+
ServerTimeoutError as ServerTimeoutError,
|
| 33 |
+
TCPConnector as TCPConnector,
|
| 34 |
+
TooManyRedirects as TooManyRedirects,
|
| 35 |
+
UnixConnector as UnixConnector,
|
| 36 |
+
WSServerHandshakeError as WSServerHandshakeError,
|
| 37 |
+
request as request,
|
| 38 |
+
)
|
| 39 |
+
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
| 40 |
+
from .formdata import FormData as FormData
|
| 41 |
+
from .helpers import BasicAuth, ChainMapProxy, ETag
|
| 42 |
+
from .http import (
|
| 43 |
+
HttpVersion as HttpVersion,
|
| 44 |
+
HttpVersion10 as HttpVersion10,
|
| 45 |
+
HttpVersion11 as HttpVersion11,
|
| 46 |
+
WebSocketError as WebSocketError,
|
| 47 |
+
WSCloseCode as WSCloseCode,
|
| 48 |
+
WSMessage as WSMessage,
|
| 49 |
+
WSMsgType as WSMsgType,
|
| 50 |
+
)
|
| 51 |
+
from .multipart import (
|
| 52 |
+
BadContentDispositionHeader as BadContentDispositionHeader,
|
| 53 |
+
BadContentDispositionParam as BadContentDispositionParam,
|
| 54 |
+
BodyPartReader as BodyPartReader,
|
| 55 |
+
MultipartReader as MultipartReader,
|
| 56 |
+
MultipartWriter as MultipartWriter,
|
| 57 |
+
content_disposition_filename as content_disposition_filename,
|
| 58 |
+
parse_content_disposition as parse_content_disposition,
|
| 59 |
+
)
|
| 60 |
+
from .payload import (
|
| 61 |
+
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
| 62 |
+
AsyncIterablePayload as AsyncIterablePayload,
|
| 63 |
+
BufferedReaderPayload as BufferedReaderPayload,
|
| 64 |
+
BytesIOPayload as BytesIOPayload,
|
| 65 |
+
BytesPayload as BytesPayload,
|
| 66 |
+
IOBasePayload as IOBasePayload,
|
| 67 |
+
JsonPayload as JsonPayload,
|
| 68 |
+
Payload as Payload,
|
| 69 |
+
StringIOPayload as StringIOPayload,
|
| 70 |
+
StringPayload as StringPayload,
|
| 71 |
+
TextIOPayload as TextIOPayload,
|
| 72 |
+
get_payload as get_payload,
|
| 73 |
+
payload_type as payload_type,
|
| 74 |
+
)
|
| 75 |
+
from .payload_streamer import streamer as streamer
|
| 76 |
+
from .resolver import (
|
| 77 |
+
AsyncResolver as AsyncResolver,
|
| 78 |
+
DefaultResolver as DefaultResolver,
|
| 79 |
+
ThreadedResolver as ThreadedResolver,
|
| 80 |
+
)
|
| 81 |
+
from .streams import (
|
| 82 |
+
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
| 83 |
+
DataQueue as DataQueue,
|
| 84 |
+
EofStream as EofStream,
|
| 85 |
+
FlowControlDataQueue as FlowControlDataQueue,
|
| 86 |
+
StreamReader as StreamReader,
|
| 87 |
+
)
|
| 88 |
+
from .tracing import (
|
| 89 |
+
TraceConfig as TraceConfig,
|
| 90 |
+
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
| 91 |
+
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
| 92 |
+
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
| 93 |
+
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
| 94 |
+
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
| 95 |
+
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
| 96 |
+
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
| 97 |
+
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
| 98 |
+
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
| 99 |
+
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
| 100 |
+
TraceRequestEndParams as TraceRequestEndParams,
|
| 101 |
+
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
| 102 |
+
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
| 103 |
+
TraceRequestStartParams as TraceRequestStartParams,
|
| 104 |
+
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
__all__: Tuple[str, ...] = (
|
| 108 |
+
"hdrs",
|
| 109 |
+
# client
|
| 110 |
+
"BaseConnector",
|
| 111 |
+
"ClientConnectionError",
|
| 112 |
+
"ClientConnectorCertificateError",
|
| 113 |
+
"ClientConnectorError",
|
| 114 |
+
"ClientConnectorSSLError",
|
| 115 |
+
"ClientError",
|
| 116 |
+
"ClientHttpProxyError",
|
| 117 |
+
"ClientOSError",
|
| 118 |
+
"ClientPayloadError",
|
| 119 |
+
"ClientProxyConnectionError",
|
| 120 |
+
"ClientResponse",
|
| 121 |
+
"ClientRequest",
|
| 122 |
+
"ClientResponseError",
|
| 123 |
+
"ClientSSLError",
|
| 124 |
+
"ClientSession",
|
| 125 |
+
"ClientTimeout",
|
| 126 |
+
"ClientWebSocketResponse",
|
| 127 |
+
"ContentTypeError",
|
| 128 |
+
"Fingerprint",
|
| 129 |
+
"InvalidURL",
|
| 130 |
+
"RequestInfo",
|
| 131 |
+
"ServerConnectionError",
|
| 132 |
+
"ServerDisconnectedError",
|
| 133 |
+
"ServerFingerprintMismatch",
|
| 134 |
+
"ServerTimeoutError",
|
| 135 |
+
"TCPConnector",
|
| 136 |
+
"TooManyRedirects",
|
| 137 |
+
"UnixConnector",
|
| 138 |
+
"NamedPipeConnector",
|
| 139 |
+
"WSServerHandshakeError",
|
| 140 |
+
"request",
|
| 141 |
+
# cookiejar
|
| 142 |
+
"CookieJar",
|
| 143 |
+
"DummyCookieJar",
|
| 144 |
+
# formdata
|
| 145 |
+
"FormData",
|
| 146 |
+
# helpers
|
| 147 |
+
"BasicAuth",
|
| 148 |
+
"ChainMapProxy",
|
| 149 |
+
"ETag",
|
| 150 |
+
# http
|
| 151 |
+
"HttpVersion",
|
| 152 |
+
"HttpVersion10",
|
| 153 |
+
"HttpVersion11",
|
| 154 |
+
"WSMsgType",
|
| 155 |
+
"WSCloseCode",
|
| 156 |
+
"WSMessage",
|
| 157 |
+
"WebSocketError",
|
| 158 |
+
# multipart
|
| 159 |
+
"BadContentDispositionHeader",
|
| 160 |
+
"BadContentDispositionParam",
|
| 161 |
+
"BodyPartReader",
|
| 162 |
+
"MultipartReader",
|
| 163 |
+
"MultipartWriter",
|
| 164 |
+
"content_disposition_filename",
|
| 165 |
+
"parse_content_disposition",
|
| 166 |
+
# payload
|
| 167 |
+
"AsyncIterablePayload",
|
| 168 |
+
"BufferedReaderPayload",
|
| 169 |
+
"BytesIOPayload",
|
| 170 |
+
"BytesPayload",
|
| 171 |
+
"IOBasePayload",
|
| 172 |
+
"JsonPayload",
|
| 173 |
+
"PAYLOAD_REGISTRY",
|
| 174 |
+
"Payload",
|
| 175 |
+
"StringIOPayload",
|
| 176 |
+
"StringPayload",
|
| 177 |
+
"TextIOPayload",
|
| 178 |
+
"get_payload",
|
| 179 |
+
"payload_type",
|
| 180 |
+
# payload_streamer
|
| 181 |
+
"streamer",
|
| 182 |
+
# resolver
|
| 183 |
+
"AsyncResolver",
|
| 184 |
+
"DefaultResolver",
|
| 185 |
+
"ThreadedResolver",
|
| 186 |
+
# streams
|
| 187 |
+
"DataQueue",
|
| 188 |
+
"EMPTY_PAYLOAD",
|
| 189 |
+
"EofStream",
|
| 190 |
+
"FlowControlDataQueue",
|
| 191 |
+
"StreamReader",
|
| 192 |
+
# tracing
|
| 193 |
+
"TraceConfig",
|
| 194 |
+
"TraceConnectionCreateEndParams",
|
| 195 |
+
"TraceConnectionCreateStartParams",
|
| 196 |
+
"TraceConnectionQueuedEndParams",
|
| 197 |
+
"TraceConnectionQueuedStartParams",
|
| 198 |
+
"TraceConnectionReuseconnParams",
|
| 199 |
+
"TraceDnsCacheHitParams",
|
| 200 |
+
"TraceDnsCacheMissParams",
|
| 201 |
+
"TraceDnsResolveHostEndParams",
|
| 202 |
+
"TraceDnsResolveHostStartParams",
|
| 203 |
+
"TraceRequestChunkSentParams",
|
| 204 |
+
"TraceRequestEndParams",
|
| 205 |
+
"TraceRequestExceptionParams",
|
| 206 |
+
"TraceRequestRedirectParams",
|
| 207 |
+
"TraceRequestStartParams",
|
| 208 |
+
"TraceResponseChunkReceivedParams",
|
| 209 |
+
)
|
| 210 |
+
|
| 211 |
+
try:
|
| 212 |
+
from .worker import GunicornUVLoopWebWorker, GunicornWebWorker
|
| 213 |
+
|
| 214 |
+
__all__ += ("GunicornWebWorker", "GunicornUVLoopWebWorker")
|
| 215 |
+
except ImportError: # pragma: no cover
|
| 216 |
+
pass
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_find_header.c
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_find_header.h
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#ifndef _FIND_HEADERS_H
|
| 2 |
+
#define _FIND_HEADERS_H
|
| 3 |
+
|
| 4 |
+
#ifdef __cplusplus
|
| 5 |
+
extern "C" {
|
| 6 |
+
#endif
|
| 7 |
+
|
| 8 |
+
int find_header(const char *str, int size);
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
#ifdef __cplusplus
|
| 12 |
+
}
|
| 13 |
+
#endif
|
| 14 |
+
#endif
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_find_header.pxd
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef extern from "_find_header.h":
|
| 2 |
+
int find_header(char *, int)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_headers.pxi
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
| 2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
| 3 |
+
|
| 4 |
+
from . import hdrs
|
| 5 |
+
cdef tuple headers = (
|
| 6 |
+
hdrs.ACCEPT,
|
| 7 |
+
hdrs.ACCEPT_CHARSET,
|
| 8 |
+
hdrs.ACCEPT_ENCODING,
|
| 9 |
+
hdrs.ACCEPT_LANGUAGE,
|
| 10 |
+
hdrs.ACCEPT_RANGES,
|
| 11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
| 12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
| 13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
| 14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
| 15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
| 16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
| 17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
| 18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
| 19 |
+
hdrs.AGE,
|
| 20 |
+
hdrs.ALLOW,
|
| 21 |
+
hdrs.AUTHORIZATION,
|
| 22 |
+
hdrs.CACHE_CONTROL,
|
| 23 |
+
hdrs.CONNECTION,
|
| 24 |
+
hdrs.CONTENT_DISPOSITION,
|
| 25 |
+
hdrs.CONTENT_ENCODING,
|
| 26 |
+
hdrs.CONTENT_LANGUAGE,
|
| 27 |
+
hdrs.CONTENT_LENGTH,
|
| 28 |
+
hdrs.CONTENT_LOCATION,
|
| 29 |
+
hdrs.CONTENT_MD5,
|
| 30 |
+
hdrs.CONTENT_RANGE,
|
| 31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
| 32 |
+
hdrs.CONTENT_TYPE,
|
| 33 |
+
hdrs.COOKIE,
|
| 34 |
+
hdrs.DATE,
|
| 35 |
+
hdrs.DESTINATION,
|
| 36 |
+
hdrs.DIGEST,
|
| 37 |
+
hdrs.ETAG,
|
| 38 |
+
hdrs.EXPECT,
|
| 39 |
+
hdrs.EXPIRES,
|
| 40 |
+
hdrs.FORWARDED,
|
| 41 |
+
hdrs.FROM,
|
| 42 |
+
hdrs.HOST,
|
| 43 |
+
hdrs.IF_MATCH,
|
| 44 |
+
hdrs.IF_MODIFIED_SINCE,
|
| 45 |
+
hdrs.IF_NONE_MATCH,
|
| 46 |
+
hdrs.IF_RANGE,
|
| 47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
| 48 |
+
hdrs.KEEP_ALIVE,
|
| 49 |
+
hdrs.LAST_EVENT_ID,
|
| 50 |
+
hdrs.LAST_MODIFIED,
|
| 51 |
+
hdrs.LINK,
|
| 52 |
+
hdrs.LOCATION,
|
| 53 |
+
hdrs.MAX_FORWARDS,
|
| 54 |
+
hdrs.ORIGIN,
|
| 55 |
+
hdrs.PRAGMA,
|
| 56 |
+
hdrs.PROXY_AUTHENTICATE,
|
| 57 |
+
hdrs.PROXY_AUTHORIZATION,
|
| 58 |
+
hdrs.RANGE,
|
| 59 |
+
hdrs.REFERER,
|
| 60 |
+
hdrs.RETRY_AFTER,
|
| 61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
| 62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
| 63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
| 64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
| 65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
| 66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
| 67 |
+
hdrs.SERVER,
|
| 68 |
+
hdrs.SET_COOKIE,
|
| 69 |
+
hdrs.TE,
|
| 70 |
+
hdrs.TRAILER,
|
| 71 |
+
hdrs.TRANSFER_ENCODING,
|
| 72 |
+
hdrs.URI,
|
| 73 |
+
hdrs.UPGRADE,
|
| 74 |
+
hdrs.USER_AGENT,
|
| 75 |
+
hdrs.VARY,
|
| 76 |
+
hdrs.VIA,
|
| 77 |
+
hdrs.WWW_AUTHENTICATE,
|
| 78 |
+
hdrs.WANT_DIGEST,
|
| 79 |
+
hdrs.WARNING,
|
| 80 |
+
hdrs.X_FORWARDED_FOR,
|
| 81 |
+
hdrs.X_FORWARDED_HOST,
|
| 82 |
+
hdrs.X_FORWARDED_PROTO,
|
| 83 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_helpers.c
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_helpers.pyx
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
cdef class reify:
|
| 2 |
+
"""Use as a class method decorator. It operates almost exactly like
|
| 3 |
+
the Python `@property` decorator, but it puts the result of the
|
| 4 |
+
method it decorates into the instance dict after the first call,
|
| 5 |
+
effectively replacing the function it decorates with an instance
|
| 6 |
+
variable. It is, in Python parlance, a data descriptor.
|
| 7 |
+
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
cdef object wrapped
|
| 11 |
+
cdef object name
|
| 12 |
+
|
| 13 |
+
def __init__(self, wrapped):
|
| 14 |
+
self.wrapped = wrapped
|
| 15 |
+
self.name = wrapped.__name__
|
| 16 |
+
|
| 17 |
+
@property
|
| 18 |
+
def __doc__(self):
|
| 19 |
+
return self.wrapped.__doc__
|
| 20 |
+
|
| 21 |
+
def __get__(self, inst, owner):
|
| 22 |
+
try:
|
| 23 |
+
try:
|
| 24 |
+
return inst._cache[self.name]
|
| 25 |
+
except KeyError:
|
| 26 |
+
val = self.wrapped(inst)
|
| 27 |
+
inst._cache[self.name] = val
|
| 28 |
+
return val
|
| 29 |
+
except AttributeError:
|
| 30 |
+
if inst is None:
|
| 31 |
+
return self
|
| 32 |
+
raise
|
| 33 |
+
|
| 34 |
+
def __set__(self, inst, value):
|
| 35 |
+
raise AttributeError("reified property is read-only")
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_parser.c
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_parser.pyx
ADDED
|
@@ -0,0 +1,818 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#cython: language_level=3
|
| 2 |
+
#
|
| 3 |
+
# Based on https://github.com/MagicStack/httptools
|
| 4 |
+
#
|
| 5 |
+
from __future__ import absolute_import, print_function
|
| 6 |
+
|
| 7 |
+
from cpython cimport (
|
| 8 |
+
Py_buffer,
|
| 9 |
+
PyBUF_SIMPLE,
|
| 10 |
+
PyBuffer_Release,
|
| 11 |
+
PyBytes_AsString,
|
| 12 |
+
PyBytes_AsStringAndSize,
|
| 13 |
+
PyObject_GetBuffer,
|
| 14 |
+
)
|
| 15 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
| 16 |
+
from libc.limits cimport ULLONG_MAX
|
| 17 |
+
from libc.string cimport memcpy
|
| 18 |
+
|
| 19 |
+
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
| 20 |
+
from yarl import URL as _URL
|
| 21 |
+
|
| 22 |
+
from aiohttp import hdrs
|
| 23 |
+
|
| 24 |
+
from .http_exceptions import (
|
| 25 |
+
BadHttpMessage,
|
| 26 |
+
BadStatusLine,
|
| 27 |
+
ContentLengthError,
|
| 28 |
+
InvalidHeader,
|
| 29 |
+
InvalidURLError,
|
| 30 |
+
LineTooLong,
|
| 31 |
+
PayloadEncodingError,
|
| 32 |
+
TransferEncodingError,
|
| 33 |
+
)
|
| 34 |
+
from .http_parser import DeflateBuffer as _DeflateBuffer
|
| 35 |
+
from .http_writer import (
|
| 36 |
+
HttpVersion as _HttpVersion,
|
| 37 |
+
HttpVersion10 as _HttpVersion10,
|
| 38 |
+
HttpVersion11 as _HttpVersion11,
|
| 39 |
+
)
|
| 40 |
+
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
| 41 |
+
|
| 42 |
+
cimport cython
|
| 43 |
+
|
| 44 |
+
from aiohttp cimport _cparser as cparser
|
| 45 |
+
|
| 46 |
+
include "_headers.pxi"
|
| 47 |
+
|
| 48 |
+
from aiohttp cimport _find_header
|
| 49 |
+
|
| 50 |
+
DEF DEFAULT_FREELIST_SIZE = 250
|
| 51 |
+
|
| 52 |
+
cdef extern from "Python.h":
|
| 53 |
+
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
| 54 |
+
Py_ssize_t PyByteArray_Size(object) except -1
|
| 55 |
+
char* PyByteArray_AsString(object)
|
| 56 |
+
|
| 57 |
+
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
| 58 |
+
'RawRequestMessage', 'RawResponseMessage')
|
| 59 |
+
|
| 60 |
+
cdef object URL = _URL
|
| 61 |
+
cdef object URL_build = URL.build
|
| 62 |
+
cdef object CIMultiDict = _CIMultiDict
|
| 63 |
+
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
| 64 |
+
cdef object HttpVersion = _HttpVersion
|
| 65 |
+
cdef object HttpVersion10 = _HttpVersion10
|
| 66 |
+
cdef object HttpVersion11 = _HttpVersion11
|
| 67 |
+
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
| 68 |
+
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
| 69 |
+
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
| 70 |
+
cdef object StreamReader = _StreamReader
|
| 71 |
+
cdef object DeflateBuffer = _DeflateBuffer
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
cdef inline object extend(object buf, const char* at, size_t length):
|
| 75 |
+
cdef Py_ssize_t s
|
| 76 |
+
cdef char* ptr
|
| 77 |
+
s = PyByteArray_Size(buf)
|
| 78 |
+
PyByteArray_Resize(buf, s + length)
|
| 79 |
+
ptr = PyByteArray_AsString(buf)
|
| 80 |
+
memcpy(ptr + s, at, length)
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
DEF METHODS_COUNT = 46;
|
| 84 |
+
|
| 85 |
+
cdef list _http_method = []
|
| 86 |
+
|
| 87 |
+
for i in range(METHODS_COUNT):
|
| 88 |
+
_http_method.append(
|
| 89 |
+
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
cdef inline str http_method_str(int i):
|
| 93 |
+
if i < METHODS_COUNT:
|
| 94 |
+
return <str>_http_method[i]
|
| 95 |
+
else:
|
| 96 |
+
return "<unknown>"
|
| 97 |
+
|
| 98 |
+
cdef inline object find_header(bytes raw_header):
|
| 99 |
+
cdef Py_ssize_t size
|
| 100 |
+
cdef char *buf
|
| 101 |
+
cdef int idx
|
| 102 |
+
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
| 103 |
+
idx = _find_header.find_header(buf, size)
|
| 104 |
+
if idx == -1:
|
| 105 |
+
return raw_header.decode('utf-8', 'surrogateescape')
|
| 106 |
+
return headers[idx]
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
| 110 |
+
cdef class RawRequestMessage:
|
| 111 |
+
cdef readonly str method
|
| 112 |
+
cdef readonly str path
|
| 113 |
+
cdef readonly object version # HttpVersion
|
| 114 |
+
cdef readonly object headers # CIMultiDict
|
| 115 |
+
cdef readonly object raw_headers # tuple
|
| 116 |
+
cdef readonly object should_close
|
| 117 |
+
cdef readonly object compression
|
| 118 |
+
cdef readonly object upgrade
|
| 119 |
+
cdef readonly object chunked
|
| 120 |
+
cdef readonly object url # yarl.URL
|
| 121 |
+
|
| 122 |
+
def __init__(self, method, path, version, headers, raw_headers,
|
| 123 |
+
should_close, compression, upgrade, chunked, url):
|
| 124 |
+
self.method = method
|
| 125 |
+
self.path = path
|
| 126 |
+
self.version = version
|
| 127 |
+
self.headers = headers
|
| 128 |
+
self.raw_headers = raw_headers
|
| 129 |
+
self.should_close = should_close
|
| 130 |
+
self.compression = compression
|
| 131 |
+
self.upgrade = upgrade
|
| 132 |
+
self.chunked = chunked
|
| 133 |
+
self.url = url
|
| 134 |
+
|
| 135 |
+
def __repr__(self):
|
| 136 |
+
info = []
|
| 137 |
+
info.append(("method", self.method))
|
| 138 |
+
info.append(("path", self.path))
|
| 139 |
+
info.append(("version", self.version))
|
| 140 |
+
info.append(("headers", self.headers))
|
| 141 |
+
info.append(("raw_headers", self.raw_headers))
|
| 142 |
+
info.append(("should_close", self.should_close))
|
| 143 |
+
info.append(("compression", self.compression))
|
| 144 |
+
info.append(("upgrade", self.upgrade))
|
| 145 |
+
info.append(("chunked", self.chunked))
|
| 146 |
+
info.append(("url", self.url))
|
| 147 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
| 148 |
+
return '<RawRequestMessage(' + sinfo + ')>'
|
| 149 |
+
|
| 150 |
+
def _replace(self, **dct):
|
| 151 |
+
cdef RawRequestMessage ret
|
| 152 |
+
ret = _new_request_message(self.method,
|
| 153 |
+
self.path,
|
| 154 |
+
self.version,
|
| 155 |
+
self.headers,
|
| 156 |
+
self.raw_headers,
|
| 157 |
+
self.should_close,
|
| 158 |
+
self.compression,
|
| 159 |
+
self.upgrade,
|
| 160 |
+
self.chunked,
|
| 161 |
+
self.url)
|
| 162 |
+
if "method" in dct:
|
| 163 |
+
ret.method = dct["method"]
|
| 164 |
+
if "path" in dct:
|
| 165 |
+
ret.path = dct["path"]
|
| 166 |
+
if "version" in dct:
|
| 167 |
+
ret.version = dct["version"]
|
| 168 |
+
if "headers" in dct:
|
| 169 |
+
ret.headers = dct["headers"]
|
| 170 |
+
if "raw_headers" in dct:
|
| 171 |
+
ret.raw_headers = dct["raw_headers"]
|
| 172 |
+
if "should_close" in dct:
|
| 173 |
+
ret.should_close = dct["should_close"]
|
| 174 |
+
if "compression" in dct:
|
| 175 |
+
ret.compression = dct["compression"]
|
| 176 |
+
if "upgrade" in dct:
|
| 177 |
+
ret.upgrade = dct["upgrade"]
|
| 178 |
+
if "chunked" in dct:
|
| 179 |
+
ret.chunked = dct["chunked"]
|
| 180 |
+
if "url" in dct:
|
| 181 |
+
ret.url = dct["url"]
|
| 182 |
+
return ret
|
| 183 |
+
|
| 184 |
+
cdef _new_request_message(str method,
|
| 185 |
+
str path,
|
| 186 |
+
object version,
|
| 187 |
+
object headers,
|
| 188 |
+
object raw_headers,
|
| 189 |
+
bint should_close,
|
| 190 |
+
object compression,
|
| 191 |
+
bint upgrade,
|
| 192 |
+
bint chunked,
|
| 193 |
+
object url):
|
| 194 |
+
cdef RawRequestMessage ret
|
| 195 |
+
ret = RawRequestMessage.__new__(RawRequestMessage)
|
| 196 |
+
ret.method = method
|
| 197 |
+
ret.path = path
|
| 198 |
+
ret.version = version
|
| 199 |
+
ret.headers = headers
|
| 200 |
+
ret.raw_headers = raw_headers
|
| 201 |
+
ret.should_close = should_close
|
| 202 |
+
ret.compression = compression
|
| 203 |
+
ret.upgrade = upgrade
|
| 204 |
+
ret.chunked = chunked
|
| 205 |
+
ret.url = url
|
| 206 |
+
return ret
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
| 210 |
+
cdef class RawResponseMessage:
|
| 211 |
+
cdef readonly object version # HttpVersion
|
| 212 |
+
cdef readonly int code
|
| 213 |
+
cdef readonly str reason
|
| 214 |
+
cdef readonly object headers # CIMultiDict
|
| 215 |
+
cdef readonly object raw_headers # tuple
|
| 216 |
+
cdef readonly object should_close
|
| 217 |
+
cdef readonly object compression
|
| 218 |
+
cdef readonly object upgrade
|
| 219 |
+
cdef readonly object chunked
|
| 220 |
+
|
| 221 |
+
def __init__(self, version, code, reason, headers, raw_headers,
|
| 222 |
+
should_close, compression, upgrade, chunked):
|
| 223 |
+
self.version = version
|
| 224 |
+
self.code = code
|
| 225 |
+
self.reason = reason
|
| 226 |
+
self.headers = headers
|
| 227 |
+
self.raw_headers = raw_headers
|
| 228 |
+
self.should_close = should_close
|
| 229 |
+
self.compression = compression
|
| 230 |
+
self.upgrade = upgrade
|
| 231 |
+
self.chunked = chunked
|
| 232 |
+
|
| 233 |
+
def __repr__(self):
|
| 234 |
+
info = []
|
| 235 |
+
info.append(("version", self.version))
|
| 236 |
+
info.append(("code", self.code))
|
| 237 |
+
info.append(("reason", self.reason))
|
| 238 |
+
info.append(("headers", self.headers))
|
| 239 |
+
info.append(("raw_headers", self.raw_headers))
|
| 240 |
+
info.append(("should_close", self.should_close))
|
| 241 |
+
info.append(("compression", self.compression))
|
| 242 |
+
info.append(("upgrade", self.upgrade))
|
| 243 |
+
info.append(("chunked", self.chunked))
|
| 244 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
| 245 |
+
return '<RawResponseMessage(' + sinfo + ')>'
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
cdef _new_response_message(object version,
|
| 249 |
+
int code,
|
| 250 |
+
str reason,
|
| 251 |
+
object headers,
|
| 252 |
+
object raw_headers,
|
| 253 |
+
bint should_close,
|
| 254 |
+
object compression,
|
| 255 |
+
bint upgrade,
|
| 256 |
+
bint chunked):
|
| 257 |
+
cdef RawResponseMessage ret
|
| 258 |
+
ret = RawResponseMessage.__new__(RawResponseMessage)
|
| 259 |
+
ret.version = version
|
| 260 |
+
ret.code = code
|
| 261 |
+
ret.reason = reason
|
| 262 |
+
ret.headers = headers
|
| 263 |
+
ret.raw_headers = raw_headers
|
| 264 |
+
ret.should_close = should_close
|
| 265 |
+
ret.compression = compression
|
| 266 |
+
ret.upgrade = upgrade
|
| 267 |
+
ret.chunked = chunked
|
| 268 |
+
return ret
|
| 269 |
+
|
| 270 |
+
|
| 271 |
+
@cython.internal
|
| 272 |
+
cdef class HttpParser:
|
| 273 |
+
|
| 274 |
+
cdef:
|
| 275 |
+
cparser.llhttp_t* _cparser
|
| 276 |
+
cparser.llhttp_settings_t* _csettings
|
| 277 |
+
|
| 278 |
+
bytearray _raw_name
|
| 279 |
+
bytearray _raw_value
|
| 280 |
+
bint _has_value
|
| 281 |
+
|
| 282 |
+
object _protocol
|
| 283 |
+
object _loop
|
| 284 |
+
object _timer
|
| 285 |
+
|
| 286 |
+
size_t _max_line_size
|
| 287 |
+
size_t _max_field_size
|
| 288 |
+
size_t _max_headers
|
| 289 |
+
bint _response_with_body
|
| 290 |
+
bint _read_until_eof
|
| 291 |
+
|
| 292 |
+
bint _started
|
| 293 |
+
object _url
|
| 294 |
+
bytearray _buf
|
| 295 |
+
str _path
|
| 296 |
+
str _reason
|
| 297 |
+
object _headers
|
| 298 |
+
list _raw_headers
|
| 299 |
+
bint _upgraded
|
| 300 |
+
list _messages
|
| 301 |
+
object _payload
|
| 302 |
+
bint _payload_error
|
| 303 |
+
object _payload_exception
|
| 304 |
+
object _last_error
|
| 305 |
+
bint _auto_decompress
|
| 306 |
+
int _limit
|
| 307 |
+
|
| 308 |
+
str _content_encoding
|
| 309 |
+
|
| 310 |
+
Py_buffer py_buf
|
| 311 |
+
|
| 312 |
+
def __cinit__(self):
|
| 313 |
+
self._cparser = <cparser.llhttp_t*> \
|
| 314 |
+
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
| 315 |
+
if self._cparser is NULL:
|
| 316 |
+
raise MemoryError()
|
| 317 |
+
|
| 318 |
+
self._csettings = <cparser.llhttp_settings_t*> \
|
| 319 |
+
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
| 320 |
+
if self._csettings is NULL:
|
| 321 |
+
raise MemoryError()
|
| 322 |
+
|
| 323 |
+
def __dealloc__(self):
|
| 324 |
+
PyMem_Free(self._cparser)
|
| 325 |
+
PyMem_Free(self._csettings)
|
| 326 |
+
|
| 327 |
+
cdef _init(
|
| 328 |
+
self, cparser.llhttp_type mode,
|
| 329 |
+
object protocol, object loop, int limit,
|
| 330 |
+
object timer=None,
|
| 331 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
| 332 |
+
size_t max_field_size=8190, payload_exception=None,
|
| 333 |
+
bint response_with_body=True, bint read_until_eof=False,
|
| 334 |
+
bint auto_decompress=True,
|
| 335 |
+
):
|
| 336 |
+
cparser.llhttp_settings_init(self._csettings)
|
| 337 |
+
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
| 338 |
+
self._cparser.data = <void*>self
|
| 339 |
+
self._cparser.content_length = 0
|
| 340 |
+
|
| 341 |
+
self._protocol = protocol
|
| 342 |
+
self._loop = loop
|
| 343 |
+
self._timer = timer
|
| 344 |
+
|
| 345 |
+
self._buf = bytearray()
|
| 346 |
+
self._payload = None
|
| 347 |
+
self._payload_error = 0
|
| 348 |
+
self._payload_exception = payload_exception
|
| 349 |
+
self._messages = []
|
| 350 |
+
|
| 351 |
+
self._raw_name = bytearray()
|
| 352 |
+
self._raw_value = bytearray()
|
| 353 |
+
self._has_value = False
|
| 354 |
+
|
| 355 |
+
self._max_line_size = max_line_size
|
| 356 |
+
self._max_headers = max_headers
|
| 357 |
+
self._max_field_size = max_field_size
|
| 358 |
+
self._response_with_body = response_with_body
|
| 359 |
+
self._read_until_eof = read_until_eof
|
| 360 |
+
self._upgraded = False
|
| 361 |
+
self._auto_decompress = auto_decompress
|
| 362 |
+
self._content_encoding = None
|
| 363 |
+
|
| 364 |
+
self._csettings.on_url = cb_on_url
|
| 365 |
+
self._csettings.on_status = cb_on_status
|
| 366 |
+
self._csettings.on_header_field = cb_on_header_field
|
| 367 |
+
self._csettings.on_header_value = cb_on_header_value
|
| 368 |
+
self._csettings.on_headers_complete = cb_on_headers_complete
|
| 369 |
+
self._csettings.on_body = cb_on_body
|
| 370 |
+
self._csettings.on_message_begin = cb_on_message_begin
|
| 371 |
+
self._csettings.on_message_complete = cb_on_message_complete
|
| 372 |
+
self._csettings.on_chunk_header = cb_on_chunk_header
|
| 373 |
+
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
| 374 |
+
|
| 375 |
+
self._last_error = None
|
| 376 |
+
self._limit = limit
|
| 377 |
+
|
| 378 |
+
cdef _process_header(self):
|
| 379 |
+
if self._raw_name:
|
| 380 |
+
raw_name = bytes(self._raw_name)
|
| 381 |
+
raw_value = bytes(self._raw_value)
|
| 382 |
+
|
| 383 |
+
name = find_header(raw_name)
|
| 384 |
+
value = raw_value.decode('utf-8', 'surrogateescape')
|
| 385 |
+
|
| 386 |
+
self._headers.add(name, value)
|
| 387 |
+
|
| 388 |
+
if name is CONTENT_ENCODING:
|
| 389 |
+
self._content_encoding = value
|
| 390 |
+
|
| 391 |
+
PyByteArray_Resize(self._raw_name, 0)
|
| 392 |
+
PyByteArray_Resize(self._raw_value, 0)
|
| 393 |
+
self._has_value = False
|
| 394 |
+
self._raw_headers.append((raw_name, raw_value))
|
| 395 |
+
|
| 396 |
+
cdef _on_header_field(self, char* at, size_t length):
|
| 397 |
+
cdef Py_ssize_t size
|
| 398 |
+
cdef char *buf
|
| 399 |
+
if self._has_value:
|
| 400 |
+
self._process_header()
|
| 401 |
+
|
| 402 |
+
size = PyByteArray_Size(self._raw_name)
|
| 403 |
+
PyByteArray_Resize(self._raw_name, size + length)
|
| 404 |
+
buf = PyByteArray_AsString(self._raw_name)
|
| 405 |
+
memcpy(buf + size, at, length)
|
| 406 |
+
|
| 407 |
+
cdef _on_header_value(self, char* at, size_t length):
|
| 408 |
+
cdef Py_ssize_t size
|
| 409 |
+
cdef char *buf
|
| 410 |
+
|
| 411 |
+
size = PyByteArray_Size(self._raw_value)
|
| 412 |
+
PyByteArray_Resize(self._raw_value, size + length)
|
| 413 |
+
buf = PyByteArray_AsString(self._raw_value)
|
| 414 |
+
memcpy(buf + size, at, length)
|
| 415 |
+
self._has_value = True
|
| 416 |
+
|
| 417 |
+
cdef _on_headers_complete(self):
|
| 418 |
+
self._process_header()
|
| 419 |
+
|
| 420 |
+
method = http_method_str(self._cparser.method)
|
| 421 |
+
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
| 422 |
+
upgrade = self._cparser.upgrade
|
| 423 |
+
chunked = self._cparser.flags & cparser.F_CHUNKED
|
| 424 |
+
|
| 425 |
+
raw_headers = tuple(self._raw_headers)
|
| 426 |
+
headers = CIMultiDictProxy(self._headers)
|
| 427 |
+
|
| 428 |
+
if upgrade or self._cparser.method == 5: # cparser.CONNECT:
|
| 429 |
+
self._upgraded = True
|
| 430 |
+
|
| 431 |
+
# do not support old websocket spec
|
| 432 |
+
if SEC_WEBSOCKET_KEY1 in headers:
|
| 433 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 434 |
+
|
| 435 |
+
encoding = None
|
| 436 |
+
enc = self._content_encoding
|
| 437 |
+
if enc is not None:
|
| 438 |
+
self._content_encoding = None
|
| 439 |
+
enc = enc.lower()
|
| 440 |
+
if enc in ('gzip', 'deflate', 'br'):
|
| 441 |
+
encoding = enc
|
| 442 |
+
|
| 443 |
+
if self._cparser.type == cparser.HTTP_REQUEST:
|
| 444 |
+
msg = _new_request_message(
|
| 445 |
+
method, self._path,
|
| 446 |
+
self.http_version(), headers, raw_headers,
|
| 447 |
+
should_close, encoding, upgrade, chunked, self._url)
|
| 448 |
+
else:
|
| 449 |
+
msg = _new_response_message(
|
| 450 |
+
self.http_version(), self._cparser.status_code, self._reason,
|
| 451 |
+
headers, raw_headers, should_close, encoding,
|
| 452 |
+
upgrade, chunked)
|
| 453 |
+
|
| 454 |
+
if (
|
| 455 |
+
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
| 456 |
+
self._cparser.method == 5 or # CONNECT: 5
|
| 457 |
+
(self._cparser.status_code >= 199 and
|
| 458 |
+
self._cparser.content_length == 0 and
|
| 459 |
+
self._read_until_eof)
|
| 460 |
+
):
|
| 461 |
+
payload = StreamReader(
|
| 462 |
+
self._protocol, timer=self._timer, loop=self._loop,
|
| 463 |
+
limit=self._limit)
|
| 464 |
+
else:
|
| 465 |
+
payload = EMPTY_PAYLOAD
|
| 466 |
+
|
| 467 |
+
self._payload = payload
|
| 468 |
+
if encoding is not None and self._auto_decompress:
|
| 469 |
+
self._payload = DeflateBuffer(payload, encoding)
|
| 470 |
+
|
| 471 |
+
if not self._response_with_body:
|
| 472 |
+
payload = EMPTY_PAYLOAD
|
| 473 |
+
|
| 474 |
+
self._messages.append((msg, payload))
|
| 475 |
+
|
| 476 |
+
cdef _on_message_complete(self):
|
| 477 |
+
self._payload.feed_eof()
|
| 478 |
+
self._payload = None
|
| 479 |
+
|
| 480 |
+
cdef _on_chunk_header(self):
|
| 481 |
+
self._payload.begin_http_chunk_receiving()
|
| 482 |
+
|
| 483 |
+
cdef _on_chunk_complete(self):
|
| 484 |
+
self._payload.end_http_chunk_receiving()
|
| 485 |
+
|
| 486 |
+
cdef object _on_status_complete(self):
|
| 487 |
+
pass
|
| 488 |
+
|
| 489 |
+
cdef inline http_version(self):
|
| 490 |
+
cdef cparser.llhttp_t* parser = self._cparser
|
| 491 |
+
|
| 492 |
+
if parser.http_major == 1:
|
| 493 |
+
if parser.http_minor == 0:
|
| 494 |
+
return HttpVersion10
|
| 495 |
+
elif parser.http_minor == 1:
|
| 496 |
+
return HttpVersion11
|
| 497 |
+
|
| 498 |
+
return HttpVersion(parser.http_major, parser.http_minor)
|
| 499 |
+
|
| 500 |
+
### Public API ###
|
| 501 |
+
|
| 502 |
+
def feed_eof(self):
|
| 503 |
+
cdef bytes desc
|
| 504 |
+
|
| 505 |
+
if self._payload is not None:
|
| 506 |
+
if self._cparser.flags & cparser.F_CHUNKED:
|
| 507 |
+
raise TransferEncodingError(
|
| 508 |
+
"Not enough data for satisfy transfer length header.")
|
| 509 |
+
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
| 510 |
+
raise ContentLengthError(
|
| 511 |
+
"Not enough data for satisfy content length header.")
|
| 512 |
+
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
| 513 |
+
desc = cparser.llhttp_get_error_reason(self._cparser)
|
| 514 |
+
raise PayloadEncodingError(desc.decode('latin-1'))
|
| 515 |
+
else:
|
| 516 |
+
self._payload.feed_eof()
|
| 517 |
+
elif self._started:
|
| 518 |
+
self._on_headers_complete()
|
| 519 |
+
if self._messages:
|
| 520 |
+
return self._messages[-1][0]
|
| 521 |
+
|
| 522 |
+
def feed_data(self, data):
|
| 523 |
+
cdef:
|
| 524 |
+
size_t data_len
|
| 525 |
+
size_t nb
|
| 526 |
+
cdef cparser.llhttp_errno_t errno
|
| 527 |
+
|
| 528 |
+
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
| 529 |
+
data_len = <size_t>self.py_buf.len
|
| 530 |
+
|
| 531 |
+
errno = cparser.llhttp_execute(
|
| 532 |
+
self._cparser,
|
| 533 |
+
<char*>self.py_buf.buf,
|
| 534 |
+
data_len)
|
| 535 |
+
|
| 536 |
+
if errno is cparser.HPE_PAUSED_UPGRADE:
|
| 537 |
+
cparser.llhttp_resume_after_upgrade(self._cparser)
|
| 538 |
+
|
| 539 |
+
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
| 540 |
+
|
| 541 |
+
PyBuffer_Release(&self.py_buf)
|
| 542 |
+
|
| 543 |
+
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
| 544 |
+
if self._payload_error == 0:
|
| 545 |
+
if self._last_error is not None:
|
| 546 |
+
ex = self._last_error
|
| 547 |
+
self._last_error = None
|
| 548 |
+
else:
|
| 549 |
+
ex = parser_error_from_errno(self._cparser)
|
| 550 |
+
self._payload = None
|
| 551 |
+
raise ex
|
| 552 |
+
|
| 553 |
+
if self._messages:
|
| 554 |
+
messages = self._messages
|
| 555 |
+
self._messages = []
|
| 556 |
+
else:
|
| 557 |
+
messages = ()
|
| 558 |
+
|
| 559 |
+
if self._upgraded:
|
| 560 |
+
return messages, True, data[nb:]
|
| 561 |
+
else:
|
| 562 |
+
return messages, False, b''
|
| 563 |
+
|
| 564 |
+
def set_upgraded(self, val):
|
| 565 |
+
self._upgraded = val
|
| 566 |
+
|
| 567 |
+
|
| 568 |
+
cdef class HttpRequestParser(HttpParser):
|
| 569 |
+
|
| 570 |
+
def __init__(
|
| 571 |
+
self, protocol, loop, int limit, timer=None,
|
| 572 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
| 573 |
+
size_t max_field_size=8190, payload_exception=None,
|
| 574 |
+
bint response_with_body=True, bint read_until_eof=False,
|
| 575 |
+
bint auto_decompress=True,
|
| 576 |
+
):
|
| 577 |
+
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
| 578 |
+
max_line_size, max_headers, max_field_size,
|
| 579 |
+
payload_exception, response_with_body, read_until_eof,
|
| 580 |
+
auto_decompress)
|
| 581 |
+
|
| 582 |
+
cdef object _on_status_complete(self):
|
| 583 |
+
cdef int idx1, idx2
|
| 584 |
+
if not self._buf:
|
| 585 |
+
return
|
| 586 |
+
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
| 587 |
+
try:
|
| 588 |
+
idx3 = len(self._path)
|
| 589 |
+
idx1 = self._path.find("?")
|
| 590 |
+
if idx1 == -1:
|
| 591 |
+
query = ""
|
| 592 |
+
idx2 = self._path.find("#")
|
| 593 |
+
if idx2 == -1:
|
| 594 |
+
path = self._path
|
| 595 |
+
fragment = ""
|
| 596 |
+
else:
|
| 597 |
+
path = self._path[0: idx2]
|
| 598 |
+
fragment = self._path[idx2+1:]
|
| 599 |
+
|
| 600 |
+
else:
|
| 601 |
+
path = self._path[0:idx1]
|
| 602 |
+
idx1 += 1
|
| 603 |
+
idx2 = self._path.find("#", idx1+1)
|
| 604 |
+
if idx2 == -1:
|
| 605 |
+
query = self._path[idx1:]
|
| 606 |
+
fragment = ""
|
| 607 |
+
else:
|
| 608 |
+
query = self._path[idx1: idx2]
|
| 609 |
+
fragment = self._path[idx2+1:]
|
| 610 |
+
|
| 611 |
+
self._url = URL.build(
|
| 612 |
+
path=path,
|
| 613 |
+
query_string=query,
|
| 614 |
+
fragment=fragment,
|
| 615 |
+
encoded=True,
|
| 616 |
+
)
|
| 617 |
+
finally:
|
| 618 |
+
PyByteArray_Resize(self._buf, 0)
|
| 619 |
+
|
| 620 |
+
|
| 621 |
+
cdef class HttpResponseParser(HttpParser):
|
| 622 |
+
|
| 623 |
+
def __init__(
|
| 624 |
+
self, protocol, loop, int limit, timer=None,
|
| 625 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
| 626 |
+
size_t max_field_size=8190, payload_exception=None,
|
| 627 |
+
bint response_with_body=True, bint read_until_eof=False,
|
| 628 |
+
bint auto_decompress=True
|
| 629 |
+
):
|
| 630 |
+
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
| 631 |
+
max_line_size, max_headers, max_field_size,
|
| 632 |
+
payload_exception, response_with_body, read_until_eof,
|
| 633 |
+
auto_decompress)
|
| 634 |
+
|
| 635 |
+
cdef object _on_status_complete(self):
|
| 636 |
+
if self._buf:
|
| 637 |
+
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
| 638 |
+
PyByteArray_Resize(self._buf, 0)
|
| 639 |
+
else:
|
| 640 |
+
self._reason = self._reason or ''
|
| 641 |
+
|
| 642 |
+
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
| 643 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 644 |
+
|
| 645 |
+
pyparser._started = True
|
| 646 |
+
pyparser._headers = CIMultiDict()
|
| 647 |
+
pyparser._raw_headers = []
|
| 648 |
+
PyByteArray_Resize(pyparser._buf, 0)
|
| 649 |
+
pyparser._path = None
|
| 650 |
+
pyparser._reason = None
|
| 651 |
+
return 0
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
cdef int cb_on_url(cparser.llhttp_t* parser,
|
| 655 |
+
const char *at, size_t length) except -1:
|
| 656 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 657 |
+
try:
|
| 658 |
+
if length > pyparser._max_line_size:
|
| 659 |
+
raise LineTooLong(
|
| 660 |
+
'Status line is too long', pyparser._max_line_size, length)
|
| 661 |
+
extend(pyparser._buf, at, length)
|
| 662 |
+
except BaseException as ex:
|
| 663 |
+
pyparser._last_error = ex
|
| 664 |
+
return -1
|
| 665 |
+
else:
|
| 666 |
+
return 0
|
| 667 |
+
|
| 668 |
+
|
| 669 |
+
cdef int cb_on_status(cparser.llhttp_t* parser,
|
| 670 |
+
const char *at, size_t length) except -1:
|
| 671 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 672 |
+
cdef str reason
|
| 673 |
+
try:
|
| 674 |
+
if length > pyparser._max_line_size:
|
| 675 |
+
raise LineTooLong(
|
| 676 |
+
'Status line is too long', pyparser._max_line_size, length)
|
| 677 |
+
extend(pyparser._buf, at, length)
|
| 678 |
+
except BaseException as ex:
|
| 679 |
+
pyparser._last_error = ex
|
| 680 |
+
return -1
|
| 681 |
+
else:
|
| 682 |
+
return 0
|
| 683 |
+
|
| 684 |
+
|
| 685 |
+
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
| 686 |
+
const char *at, size_t length) except -1:
|
| 687 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 688 |
+
cdef Py_ssize_t size
|
| 689 |
+
try:
|
| 690 |
+
pyparser._on_status_complete()
|
| 691 |
+
size = len(pyparser._raw_name) + length
|
| 692 |
+
if size > pyparser._max_field_size:
|
| 693 |
+
raise LineTooLong(
|
| 694 |
+
'Header name is too long', pyparser._max_field_size, size)
|
| 695 |
+
pyparser._on_header_field(at, length)
|
| 696 |
+
except BaseException as ex:
|
| 697 |
+
pyparser._last_error = ex
|
| 698 |
+
return -1
|
| 699 |
+
else:
|
| 700 |
+
return 0
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
| 704 |
+
const char *at, size_t length) except -1:
|
| 705 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 706 |
+
cdef Py_ssize_t size
|
| 707 |
+
try:
|
| 708 |
+
size = len(pyparser._raw_value) + length
|
| 709 |
+
if size > pyparser._max_field_size:
|
| 710 |
+
raise LineTooLong(
|
| 711 |
+
'Header value is too long', pyparser._max_field_size, size)
|
| 712 |
+
pyparser._on_header_value(at, length)
|
| 713 |
+
except BaseException as ex:
|
| 714 |
+
pyparser._last_error = ex
|
| 715 |
+
return -1
|
| 716 |
+
else:
|
| 717 |
+
return 0
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
| 721 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 722 |
+
try:
|
| 723 |
+
pyparser._on_status_complete()
|
| 724 |
+
pyparser._on_headers_complete()
|
| 725 |
+
except BaseException as exc:
|
| 726 |
+
pyparser._last_error = exc
|
| 727 |
+
return -1
|
| 728 |
+
else:
|
| 729 |
+
if pyparser._cparser.upgrade or pyparser._cparser.method == 5: # CONNECT
|
| 730 |
+
return 2
|
| 731 |
+
else:
|
| 732 |
+
return 0
|
| 733 |
+
|
| 734 |
+
|
| 735 |
+
cdef int cb_on_body(cparser.llhttp_t* parser,
|
| 736 |
+
const char *at, size_t length) except -1:
|
| 737 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 738 |
+
cdef bytes body = at[:length]
|
| 739 |
+
try:
|
| 740 |
+
pyparser._payload.feed_data(body, length)
|
| 741 |
+
except BaseException as exc:
|
| 742 |
+
if pyparser._payload_exception is not None:
|
| 743 |
+
pyparser._payload.set_exception(pyparser._payload_exception(str(exc)))
|
| 744 |
+
else:
|
| 745 |
+
pyparser._payload.set_exception(exc)
|
| 746 |
+
pyparser._payload_error = 1
|
| 747 |
+
return -1
|
| 748 |
+
else:
|
| 749 |
+
return 0
|
| 750 |
+
|
| 751 |
+
|
| 752 |
+
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
| 753 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 754 |
+
try:
|
| 755 |
+
pyparser._started = False
|
| 756 |
+
pyparser._on_message_complete()
|
| 757 |
+
except BaseException as exc:
|
| 758 |
+
pyparser._last_error = exc
|
| 759 |
+
return -1
|
| 760 |
+
else:
|
| 761 |
+
return 0
|
| 762 |
+
|
| 763 |
+
|
| 764 |
+
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
| 765 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 766 |
+
try:
|
| 767 |
+
pyparser._on_chunk_header()
|
| 768 |
+
except BaseException as exc:
|
| 769 |
+
pyparser._last_error = exc
|
| 770 |
+
return -1
|
| 771 |
+
else:
|
| 772 |
+
return 0
|
| 773 |
+
|
| 774 |
+
|
| 775 |
+
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
| 776 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
| 777 |
+
try:
|
| 778 |
+
pyparser._on_chunk_complete()
|
| 779 |
+
except BaseException as exc:
|
| 780 |
+
pyparser._last_error = exc
|
| 781 |
+
return -1
|
| 782 |
+
else:
|
| 783 |
+
return 0
|
| 784 |
+
|
| 785 |
+
|
| 786 |
+
cdef parser_error_from_errno(cparser.llhttp_t* parser):
|
| 787 |
+
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
| 788 |
+
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
| 789 |
+
|
| 790 |
+
if errno in (cparser.HPE_CB_MESSAGE_BEGIN,
|
| 791 |
+
cparser.HPE_CB_HEADERS_COMPLETE,
|
| 792 |
+
cparser.HPE_CB_MESSAGE_COMPLETE,
|
| 793 |
+
cparser.HPE_CB_CHUNK_HEADER,
|
| 794 |
+
cparser.HPE_CB_CHUNK_COMPLETE,
|
| 795 |
+
cparser.HPE_INVALID_CONSTANT,
|
| 796 |
+
cparser.HPE_INVALID_HEADER_TOKEN,
|
| 797 |
+
cparser.HPE_INVALID_CONTENT_LENGTH,
|
| 798 |
+
cparser.HPE_INVALID_CHUNK_SIZE,
|
| 799 |
+
cparser.HPE_INVALID_EOF_STATE,
|
| 800 |
+
cparser.HPE_INVALID_TRANSFER_ENCODING):
|
| 801 |
+
cls = BadHttpMessage
|
| 802 |
+
|
| 803 |
+
elif errno == cparser.HPE_INVALID_STATUS:
|
| 804 |
+
cls = BadStatusLine
|
| 805 |
+
|
| 806 |
+
elif errno == cparser.HPE_INVALID_METHOD:
|
| 807 |
+
cls = BadStatusLine
|
| 808 |
+
|
| 809 |
+
elif errno == cparser.HPE_INVALID_VERSION:
|
| 810 |
+
cls = BadStatusLine
|
| 811 |
+
|
| 812 |
+
elif errno == cparser.HPE_INVALID_URL:
|
| 813 |
+
cls = InvalidURLError
|
| 814 |
+
|
| 815 |
+
else:
|
| 816 |
+
cls = BadHttpMessage
|
| 817 |
+
|
| 818 |
+
return cls(desc.decode('latin-1'))
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_writer.c
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_http_writer.pyx
ADDED
|
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from cpython.bytes cimport PyBytes_FromStringAndSize
|
| 2 |
+
from cpython.exc cimport PyErr_NoMemory
|
| 3 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
| 4 |
+
from cpython.object cimport PyObject_Str
|
| 5 |
+
from libc.stdint cimport uint8_t, uint64_t
|
| 6 |
+
from libc.string cimport memcpy
|
| 7 |
+
|
| 8 |
+
from multidict import istr
|
| 9 |
+
|
| 10 |
+
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
| 11 |
+
cdef char BUFFER[BUF_SIZE]
|
| 12 |
+
|
| 13 |
+
cdef object _istr = istr
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
# ----------------- writer ---------------------------
|
| 17 |
+
|
| 18 |
+
cdef struct Writer:
|
| 19 |
+
char *buf
|
| 20 |
+
Py_ssize_t size
|
| 21 |
+
Py_ssize_t pos
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
cdef inline void _init_writer(Writer* writer):
|
| 25 |
+
writer.buf = &BUFFER[0]
|
| 26 |
+
writer.size = BUF_SIZE
|
| 27 |
+
writer.pos = 0
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
cdef inline void _release_writer(Writer* writer):
|
| 31 |
+
if writer.buf != BUFFER:
|
| 32 |
+
PyMem_Free(writer.buf)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
| 36 |
+
cdef char * buf
|
| 37 |
+
cdef Py_ssize_t size
|
| 38 |
+
|
| 39 |
+
if writer.pos == writer.size:
|
| 40 |
+
# reallocate
|
| 41 |
+
size = writer.size + BUF_SIZE
|
| 42 |
+
if writer.buf == BUFFER:
|
| 43 |
+
buf = <char*>PyMem_Malloc(size)
|
| 44 |
+
if buf == NULL:
|
| 45 |
+
PyErr_NoMemory()
|
| 46 |
+
return -1
|
| 47 |
+
memcpy(buf, writer.buf, writer.size)
|
| 48 |
+
else:
|
| 49 |
+
buf = <char*>PyMem_Realloc(writer.buf, size)
|
| 50 |
+
if buf == NULL:
|
| 51 |
+
PyErr_NoMemory()
|
| 52 |
+
return -1
|
| 53 |
+
writer.buf = buf
|
| 54 |
+
writer.size = size
|
| 55 |
+
writer.buf[writer.pos] = <char>ch
|
| 56 |
+
writer.pos += 1
|
| 57 |
+
return 0
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
| 61 |
+
cdef uint64_t utf = <uint64_t> symbol
|
| 62 |
+
|
| 63 |
+
if utf < 0x80:
|
| 64 |
+
return _write_byte(writer, <uint8_t>utf)
|
| 65 |
+
elif utf < 0x800:
|
| 66 |
+
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
| 67 |
+
return -1
|
| 68 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
| 69 |
+
elif 0xD800 <= utf <= 0xDFFF:
|
| 70 |
+
# surogate pair, ignored
|
| 71 |
+
return 0
|
| 72 |
+
elif utf < 0x10000:
|
| 73 |
+
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
| 74 |
+
return -1
|
| 75 |
+
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
| 76 |
+
return -1
|
| 77 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
| 78 |
+
elif utf > 0x10FFFF:
|
| 79 |
+
# symbol is too large
|
| 80 |
+
return 0
|
| 81 |
+
else:
|
| 82 |
+
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
| 83 |
+
return -1
|
| 84 |
+
if _write_byte(writer,
|
| 85 |
+
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
| 86 |
+
return -1
|
| 87 |
+
if _write_byte(writer,
|
| 88 |
+
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
| 89 |
+
return -1
|
| 90 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
cdef inline int _write_str(Writer* writer, str s):
|
| 94 |
+
cdef Py_UCS4 ch
|
| 95 |
+
for ch in s:
|
| 96 |
+
if _write_utf8(writer, ch) < 0:
|
| 97 |
+
return -1
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
# --------------- _serialize_headers ----------------------
|
| 101 |
+
|
| 102 |
+
cdef str to_str(object s):
|
| 103 |
+
typ = type(s)
|
| 104 |
+
if typ is str:
|
| 105 |
+
return <str>s
|
| 106 |
+
elif typ is _istr:
|
| 107 |
+
return PyObject_Str(s)
|
| 108 |
+
elif not isinstance(s, str):
|
| 109 |
+
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
| 110 |
+
else:
|
| 111 |
+
return str(s)
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
cdef void _safe_header(str string) except *:
|
| 115 |
+
if "\r" in string or "\n" in string:
|
| 116 |
+
raise ValueError(
|
| 117 |
+
"Newline or carriage return character detected in HTTP status message or "
|
| 118 |
+
"header. This is a potential security issue."
|
| 119 |
+
)
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
def _serialize_headers(str status_line, headers):
|
| 123 |
+
cdef Writer writer
|
| 124 |
+
cdef object key
|
| 125 |
+
cdef object val
|
| 126 |
+
cdef bytes ret
|
| 127 |
+
|
| 128 |
+
_init_writer(&writer)
|
| 129 |
+
|
| 130 |
+
for key, val in headers.items():
|
| 131 |
+
_safe_header(to_str(key))
|
| 132 |
+
_safe_header(to_str(val))
|
| 133 |
+
|
| 134 |
+
try:
|
| 135 |
+
if _write_str(&writer, status_line) < 0:
|
| 136 |
+
raise
|
| 137 |
+
if _write_byte(&writer, b'\r') < 0:
|
| 138 |
+
raise
|
| 139 |
+
if _write_byte(&writer, b'\n') < 0:
|
| 140 |
+
raise
|
| 141 |
+
|
| 142 |
+
for key, val in headers.items():
|
| 143 |
+
if _write_str(&writer, to_str(key)) < 0:
|
| 144 |
+
raise
|
| 145 |
+
if _write_byte(&writer, b':') < 0:
|
| 146 |
+
raise
|
| 147 |
+
if _write_byte(&writer, b' ') < 0:
|
| 148 |
+
raise
|
| 149 |
+
if _write_str(&writer, to_str(val)) < 0:
|
| 150 |
+
raise
|
| 151 |
+
if _write_byte(&writer, b'\r') < 0:
|
| 152 |
+
raise
|
| 153 |
+
if _write_byte(&writer, b'\n') < 0:
|
| 154 |
+
raise
|
| 155 |
+
|
| 156 |
+
if _write_byte(&writer, b'\r') < 0:
|
| 157 |
+
raise
|
| 158 |
+
if _write_byte(&writer, b'\n') < 0:
|
| 159 |
+
raise
|
| 160 |
+
|
| 161 |
+
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
| 162 |
+
finally:
|
| 163 |
+
_release_writer(&writer)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_websocket.pyx
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from cpython cimport PyBytes_AsString
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
| 5 |
+
cdef extern from "Python.h":
|
| 6 |
+
char* PyByteArray_AsString(bytearray ba) except NULL
|
| 7 |
+
|
| 8 |
+
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def _websocket_mask_cython(object mask, object data):
|
| 12 |
+
"""Note, this function mutates its `data` argument
|
| 13 |
+
"""
|
| 14 |
+
cdef:
|
| 15 |
+
Py_ssize_t data_len, i
|
| 16 |
+
# bit operations on signed integers are implementation-specific
|
| 17 |
+
unsigned char * in_buf
|
| 18 |
+
const unsigned char * mask_buf
|
| 19 |
+
uint32_t uint32_msk
|
| 20 |
+
uint64_t uint64_msk
|
| 21 |
+
|
| 22 |
+
assert len(mask) == 4
|
| 23 |
+
|
| 24 |
+
if not isinstance(mask, bytes):
|
| 25 |
+
mask = bytes(mask)
|
| 26 |
+
|
| 27 |
+
if isinstance(data, bytearray):
|
| 28 |
+
data = <bytearray>data
|
| 29 |
+
else:
|
| 30 |
+
data = bytearray(data)
|
| 31 |
+
|
| 32 |
+
data_len = len(data)
|
| 33 |
+
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
| 34 |
+
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
| 35 |
+
uint32_msk = (<uint32_t*>mask_buf)[0]
|
| 36 |
+
|
| 37 |
+
# TODO: align in_data ptr to achieve even faster speeds
|
| 38 |
+
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
| 39 |
+
|
| 40 |
+
if sizeof(size_t) >= 8:
|
| 41 |
+
uint64_msk = uint32_msk
|
| 42 |
+
uint64_msk = (uint64_msk << 32) | uint32_msk
|
| 43 |
+
|
| 44 |
+
while data_len >= 8:
|
| 45 |
+
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
| 46 |
+
in_buf += 8
|
| 47 |
+
data_len -= 8
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
while data_len >= 4:
|
| 51 |
+
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
| 52 |
+
in_buf += 4
|
| 53 |
+
data_len -= 4
|
| 54 |
+
|
| 55 |
+
for i in range(0, data_len):
|
| 56 |
+
in_buf[i] ^= mask_buf[i]
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/abc.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
from abc import ABC, abstractmethod
|
| 4 |
+
from collections.abc import Sized
|
| 5 |
+
from http.cookies import BaseCookie, Morsel
|
| 6 |
+
from typing import (
|
| 7 |
+
TYPE_CHECKING,
|
| 8 |
+
Any,
|
| 9 |
+
Awaitable,
|
| 10 |
+
Callable,
|
| 11 |
+
Dict,
|
| 12 |
+
Generator,
|
| 13 |
+
Iterable,
|
| 14 |
+
List,
|
| 15 |
+
Optional,
|
| 16 |
+
Tuple,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
from multidict import CIMultiDict
|
| 20 |
+
from yarl import URL
|
| 21 |
+
|
| 22 |
+
from .helpers import get_running_loop
|
| 23 |
+
from .typedefs import LooseCookies
|
| 24 |
+
|
| 25 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 26 |
+
from .web_app import Application
|
| 27 |
+
from .web_exceptions import HTTPException
|
| 28 |
+
from .web_request import BaseRequest, Request
|
| 29 |
+
from .web_response import StreamResponse
|
| 30 |
+
else:
|
| 31 |
+
BaseRequest = Request = Application = StreamResponse = None
|
| 32 |
+
HTTPException = None
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class AbstractRouter(ABC):
|
| 36 |
+
def __init__(self) -> None:
|
| 37 |
+
self._frozen = False
|
| 38 |
+
|
| 39 |
+
def post_init(self, app: Application) -> None:
|
| 40 |
+
"""Post init stage.
|
| 41 |
+
|
| 42 |
+
Not an abstract method for sake of backward compatibility,
|
| 43 |
+
but if the router wants to be aware of the application
|
| 44 |
+
it can override this.
|
| 45 |
+
"""
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def frozen(self) -> bool:
|
| 49 |
+
return self._frozen
|
| 50 |
+
|
| 51 |
+
def freeze(self) -> None:
|
| 52 |
+
"""Freeze router."""
|
| 53 |
+
self._frozen = True
|
| 54 |
+
|
| 55 |
+
@abstractmethod
|
| 56 |
+
async def resolve(self, request: Request) -> "AbstractMatchInfo":
|
| 57 |
+
"""Return MATCH_INFO for given request"""
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class AbstractMatchInfo(ABC):
|
| 61 |
+
@property # pragma: no branch
|
| 62 |
+
@abstractmethod
|
| 63 |
+
def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]:
|
| 64 |
+
"""Execute matched request handler"""
|
| 65 |
+
|
| 66 |
+
@property
|
| 67 |
+
@abstractmethod
|
| 68 |
+
def expect_handler(self) -> Callable[[Request], Awaitable[None]]:
|
| 69 |
+
"""Expect handler for 100-continue processing"""
|
| 70 |
+
|
| 71 |
+
@property # pragma: no branch
|
| 72 |
+
@abstractmethod
|
| 73 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 74 |
+
"""HTTPException instance raised on router's resolving, or None"""
|
| 75 |
+
|
| 76 |
+
@abstractmethod # pragma: no branch
|
| 77 |
+
def get_info(self) -> Dict[str, Any]:
|
| 78 |
+
"""Return a dict with additional info useful for introspection"""
|
| 79 |
+
|
| 80 |
+
@property # pragma: no branch
|
| 81 |
+
@abstractmethod
|
| 82 |
+
def apps(self) -> Tuple[Application, ...]:
|
| 83 |
+
"""Stack of nested applications.
|
| 84 |
+
|
| 85 |
+
Top level application is left-most element.
|
| 86 |
+
|
| 87 |
+
"""
|
| 88 |
+
|
| 89 |
+
@abstractmethod
|
| 90 |
+
def add_app(self, app: Application) -> None:
|
| 91 |
+
"""Add application to the nested apps stack."""
|
| 92 |
+
|
| 93 |
+
@abstractmethod
|
| 94 |
+
def freeze(self) -> None:
|
| 95 |
+
"""Freeze the match info.
|
| 96 |
+
|
| 97 |
+
The method is called after route resolution.
|
| 98 |
+
|
| 99 |
+
After the call .add_app() is forbidden.
|
| 100 |
+
|
| 101 |
+
"""
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class AbstractView(ABC):
|
| 105 |
+
"""Abstract class based view."""
|
| 106 |
+
|
| 107 |
+
def __init__(self, request: Request) -> None:
|
| 108 |
+
self._request = request
|
| 109 |
+
|
| 110 |
+
@property
|
| 111 |
+
def request(self) -> Request:
|
| 112 |
+
"""Request instance."""
|
| 113 |
+
return self._request
|
| 114 |
+
|
| 115 |
+
@abstractmethod
|
| 116 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 117 |
+
"""Execute the view handler."""
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class AbstractResolver(ABC):
|
| 121 |
+
"""Abstract DNS resolver."""
|
| 122 |
+
|
| 123 |
+
@abstractmethod
|
| 124 |
+
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]:
|
| 125 |
+
"""Return IP address for given hostname"""
|
| 126 |
+
|
| 127 |
+
@abstractmethod
|
| 128 |
+
async def close(self) -> None:
|
| 129 |
+
"""Release resolver"""
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 133 |
+
IterableBase = Iterable[Morsel[str]]
|
| 134 |
+
else:
|
| 135 |
+
IterableBase = Iterable
|
| 136 |
+
|
| 137 |
+
|
| 138 |
+
ClearCookiePredicate = Callable[["Morsel[str]"], bool]
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
class AbstractCookieJar(Sized, IterableBase):
|
| 142 |
+
"""Abstract Cookie Jar."""
|
| 143 |
+
|
| 144 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 145 |
+
self._loop = get_running_loop(loop)
|
| 146 |
+
|
| 147 |
+
@abstractmethod
|
| 148 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 149 |
+
"""Clear all cookies if no predicate is passed."""
|
| 150 |
+
|
| 151 |
+
@abstractmethod
|
| 152 |
+
def clear_domain(self, domain: str) -> None:
|
| 153 |
+
"""Clear all cookies for domain and all subdomains."""
|
| 154 |
+
|
| 155 |
+
@abstractmethod
|
| 156 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 157 |
+
"""Update cookies."""
|
| 158 |
+
|
| 159 |
+
@abstractmethod
|
| 160 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 161 |
+
"""Return the jar's cookies filtered by their attributes."""
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class AbstractStreamWriter(ABC):
|
| 165 |
+
"""Abstract stream writer."""
|
| 166 |
+
|
| 167 |
+
buffer_size = 0
|
| 168 |
+
output_size = 0
|
| 169 |
+
length = 0 # type: Optional[int]
|
| 170 |
+
|
| 171 |
+
@abstractmethod
|
| 172 |
+
async def write(self, chunk: bytes) -> None:
|
| 173 |
+
"""Write chunk into stream."""
|
| 174 |
+
|
| 175 |
+
@abstractmethod
|
| 176 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 177 |
+
"""Write last chunk."""
|
| 178 |
+
|
| 179 |
+
@abstractmethod
|
| 180 |
+
async def drain(self) -> None:
|
| 181 |
+
"""Flush the write buffer."""
|
| 182 |
+
|
| 183 |
+
@abstractmethod
|
| 184 |
+
def enable_compression(self, encoding: str = "deflate") -> None:
|
| 185 |
+
"""Enable HTTP body compression"""
|
| 186 |
+
|
| 187 |
+
@abstractmethod
|
| 188 |
+
def enable_chunking(self) -> None:
|
| 189 |
+
"""Enable HTTP chunked mode"""
|
| 190 |
+
|
| 191 |
+
@abstractmethod
|
| 192 |
+
async def write_headers(
|
| 193 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 194 |
+
) -> None:
|
| 195 |
+
"""Write HTTP headers"""
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
class AbstractAccessLogger(ABC):
|
| 199 |
+
"""Abstract writer to access log."""
|
| 200 |
+
|
| 201 |
+
def __init__(self, logger: logging.Logger, log_format: str) -> None:
|
| 202 |
+
self.logger = logger
|
| 203 |
+
self.log_format = log_format
|
| 204 |
+
|
| 205 |
+
@abstractmethod
|
| 206 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
| 207 |
+
"""Emit log to logger."""
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_proto.py
ADDED
|
@@ -0,0 +1,251 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from contextlib import suppress
|
| 3 |
+
from typing import Any, Optional, Tuple
|
| 4 |
+
|
| 5 |
+
from .base_protocol import BaseProtocol
|
| 6 |
+
from .client_exceptions import (
|
| 7 |
+
ClientOSError,
|
| 8 |
+
ClientPayloadError,
|
| 9 |
+
ServerDisconnectedError,
|
| 10 |
+
ServerTimeoutError,
|
| 11 |
+
)
|
| 12 |
+
from .helpers import BaseTimerContext
|
| 13 |
+
from .http import HttpResponseParser, RawResponseMessage
|
| 14 |
+
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
| 18 |
+
"""Helper class to adapt between Protocol and StreamReader."""
|
| 19 |
+
|
| 20 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 21 |
+
BaseProtocol.__init__(self, loop=loop)
|
| 22 |
+
DataQueue.__init__(self, loop)
|
| 23 |
+
|
| 24 |
+
self._should_close = False
|
| 25 |
+
|
| 26 |
+
self._payload: Optional[StreamReader] = None
|
| 27 |
+
self._skip_payload = False
|
| 28 |
+
self._payload_parser = None
|
| 29 |
+
|
| 30 |
+
self._timer = None
|
| 31 |
+
|
| 32 |
+
self._tail = b""
|
| 33 |
+
self._upgraded = False
|
| 34 |
+
self._parser = None # type: Optional[HttpResponseParser]
|
| 35 |
+
|
| 36 |
+
self._read_timeout = None # type: Optional[float]
|
| 37 |
+
self._read_timeout_handle = None # type: Optional[asyncio.TimerHandle]
|
| 38 |
+
|
| 39 |
+
@property
|
| 40 |
+
def upgraded(self) -> bool:
|
| 41 |
+
return self._upgraded
|
| 42 |
+
|
| 43 |
+
@property
|
| 44 |
+
def should_close(self) -> bool:
|
| 45 |
+
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
| 46 |
+
return True
|
| 47 |
+
|
| 48 |
+
return (
|
| 49 |
+
self._should_close
|
| 50 |
+
or self._upgraded
|
| 51 |
+
or self.exception() is not None
|
| 52 |
+
or self._payload_parser is not None
|
| 53 |
+
or len(self) > 0
|
| 54 |
+
or bool(self._tail)
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
def force_close(self) -> None:
|
| 58 |
+
self._should_close = True
|
| 59 |
+
|
| 60 |
+
def close(self) -> None:
|
| 61 |
+
transport = self.transport
|
| 62 |
+
if transport is not None:
|
| 63 |
+
transport.close()
|
| 64 |
+
self.transport = None
|
| 65 |
+
self._payload = None
|
| 66 |
+
self._drop_timeout()
|
| 67 |
+
|
| 68 |
+
def is_connected(self) -> bool:
|
| 69 |
+
return self.transport is not None and not self.transport.is_closing()
|
| 70 |
+
|
| 71 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 72 |
+
self._drop_timeout()
|
| 73 |
+
|
| 74 |
+
if self._payload_parser is not None:
|
| 75 |
+
with suppress(Exception):
|
| 76 |
+
self._payload_parser.feed_eof()
|
| 77 |
+
|
| 78 |
+
uncompleted = None
|
| 79 |
+
if self._parser is not None:
|
| 80 |
+
try:
|
| 81 |
+
uncompleted = self._parser.feed_eof()
|
| 82 |
+
except Exception:
|
| 83 |
+
if self._payload is not None:
|
| 84 |
+
self._payload.set_exception(
|
| 85 |
+
ClientPayloadError("Response payload is not completed")
|
| 86 |
+
)
|
| 87 |
+
|
| 88 |
+
if not self.is_eof():
|
| 89 |
+
if isinstance(exc, OSError):
|
| 90 |
+
exc = ClientOSError(*exc.args)
|
| 91 |
+
if exc is None:
|
| 92 |
+
exc = ServerDisconnectedError(uncompleted)
|
| 93 |
+
# assigns self._should_close to True as side effect,
|
| 94 |
+
# we do it anyway below
|
| 95 |
+
self.set_exception(exc)
|
| 96 |
+
|
| 97 |
+
self._should_close = True
|
| 98 |
+
self._parser = None
|
| 99 |
+
self._payload = None
|
| 100 |
+
self._payload_parser = None
|
| 101 |
+
self._reading_paused = False
|
| 102 |
+
|
| 103 |
+
super().connection_lost(exc)
|
| 104 |
+
|
| 105 |
+
def eof_received(self) -> None:
|
| 106 |
+
# should call parser.feed_eof() most likely
|
| 107 |
+
self._drop_timeout()
|
| 108 |
+
|
| 109 |
+
def pause_reading(self) -> None:
|
| 110 |
+
super().pause_reading()
|
| 111 |
+
self._drop_timeout()
|
| 112 |
+
|
| 113 |
+
def resume_reading(self) -> None:
|
| 114 |
+
super().resume_reading()
|
| 115 |
+
self._reschedule_timeout()
|
| 116 |
+
|
| 117 |
+
def set_exception(self, exc: BaseException) -> None:
|
| 118 |
+
self._should_close = True
|
| 119 |
+
self._drop_timeout()
|
| 120 |
+
super().set_exception(exc)
|
| 121 |
+
|
| 122 |
+
def set_parser(self, parser: Any, payload: Any) -> None:
|
| 123 |
+
# TODO: actual types are:
|
| 124 |
+
# parser: WebSocketReader
|
| 125 |
+
# payload: FlowControlDataQueue
|
| 126 |
+
# but they are not generi enough
|
| 127 |
+
# Need an ABC for both types
|
| 128 |
+
self._payload = payload
|
| 129 |
+
self._payload_parser = parser
|
| 130 |
+
|
| 131 |
+
self._drop_timeout()
|
| 132 |
+
|
| 133 |
+
if self._tail:
|
| 134 |
+
data, self._tail = self._tail, b""
|
| 135 |
+
self.data_received(data)
|
| 136 |
+
|
| 137 |
+
def set_response_params(
|
| 138 |
+
self,
|
| 139 |
+
*,
|
| 140 |
+
timer: Optional[BaseTimerContext] = None,
|
| 141 |
+
skip_payload: bool = False,
|
| 142 |
+
read_until_eof: bool = False,
|
| 143 |
+
auto_decompress: bool = True,
|
| 144 |
+
read_timeout: Optional[float] = None,
|
| 145 |
+
read_bufsize: int = 2 ** 16,
|
| 146 |
+
) -> None:
|
| 147 |
+
self._skip_payload = skip_payload
|
| 148 |
+
|
| 149 |
+
self._read_timeout = read_timeout
|
| 150 |
+
self._reschedule_timeout()
|
| 151 |
+
|
| 152 |
+
self._parser = HttpResponseParser(
|
| 153 |
+
self,
|
| 154 |
+
self._loop,
|
| 155 |
+
read_bufsize,
|
| 156 |
+
timer=timer,
|
| 157 |
+
payload_exception=ClientPayloadError,
|
| 158 |
+
response_with_body=not skip_payload,
|
| 159 |
+
read_until_eof=read_until_eof,
|
| 160 |
+
auto_decompress=auto_decompress,
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
if self._tail:
|
| 164 |
+
data, self._tail = self._tail, b""
|
| 165 |
+
self.data_received(data)
|
| 166 |
+
|
| 167 |
+
def _drop_timeout(self) -> None:
|
| 168 |
+
if self._read_timeout_handle is not None:
|
| 169 |
+
self._read_timeout_handle.cancel()
|
| 170 |
+
self._read_timeout_handle = None
|
| 171 |
+
|
| 172 |
+
def _reschedule_timeout(self) -> None:
|
| 173 |
+
timeout = self._read_timeout
|
| 174 |
+
if self._read_timeout_handle is not None:
|
| 175 |
+
self._read_timeout_handle.cancel()
|
| 176 |
+
|
| 177 |
+
if timeout:
|
| 178 |
+
self._read_timeout_handle = self._loop.call_later(
|
| 179 |
+
timeout, self._on_read_timeout
|
| 180 |
+
)
|
| 181 |
+
else:
|
| 182 |
+
self._read_timeout_handle = None
|
| 183 |
+
|
| 184 |
+
def _on_read_timeout(self) -> None:
|
| 185 |
+
exc = ServerTimeoutError("Timeout on reading data from socket")
|
| 186 |
+
self.set_exception(exc)
|
| 187 |
+
if self._payload is not None:
|
| 188 |
+
self._payload.set_exception(exc)
|
| 189 |
+
|
| 190 |
+
def data_received(self, data: bytes) -> None:
|
| 191 |
+
self._reschedule_timeout()
|
| 192 |
+
|
| 193 |
+
if not data:
|
| 194 |
+
return
|
| 195 |
+
|
| 196 |
+
# custom payload parser
|
| 197 |
+
if self._payload_parser is not None:
|
| 198 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 199 |
+
if eof:
|
| 200 |
+
self._payload = None
|
| 201 |
+
self._payload_parser = None
|
| 202 |
+
|
| 203 |
+
if tail:
|
| 204 |
+
self.data_received(tail)
|
| 205 |
+
return
|
| 206 |
+
else:
|
| 207 |
+
if self._upgraded or self._parser is None:
|
| 208 |
+
# i.e. websocket connection, websocket parser is not set yet
|
| 209 |
+
self._tail += data
|
| 210 |
+
else:
|
| 211 |
+
# parse http messages
|
| 212 |
+
try:
|
| 213 |
+
messages, upgraded, tail = self._parser.feed_data(data)
|
| 214 |
+
except BaseException as exc:
|
| 215 |
+
if self.transport is not None:
|
| 216 |
+
# connection.release() could be called BEFORE
|
| 217 |
+
# data_received(), the transport is already
|
| 218 |
+
# closed in this case
|
| 219 |
+
self.transport.close()
|
| 220 |
+
# should_close is True after the call
|
| 221 |
+
self.set_exception(exc)
|
| 222 |
+
return
|
| 223 |
+
|
| 224 |
+
self._upgraded = upgraded
|
| 225 |
+
|
| 226 |
+
payload: Optional[StreamReader] = None
|
| 227 |
+
for message, payload in messages:
|
| 228 |
+
if message.should_close:
|
| 229 |
+
self._should_close = True
|
| 230 |
+
|
| 231 |
+
self._payload = payload
|
| 232 |
+
|
| 233 |
+
if self._skip_payload or message.code in (204, 304):
|
| 234 |
+
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
| 235 |
+
else:
|
| 236 |
+
self.feed_data((message, payload), 0)
|
| 237 |
+
if payload is not None:
|
| 238 |
+
# new message(s) was processed
|
| 239 |
+
# register timeout handler unsubscribing
|
| 240 |
+
# either on end-of-stream or immediately for
|
| 241 |
+
# EMPTY_PAYLOAD
|
| 242 |
+
if payload is not EMPTY_PAYLOAD:
|
| 243 |
+
payload.on_eof(self._drop_timeout)
|
| 244 |
+
else:
|
| 245 |
+
self._drop_timeout()
|
| 246 |
+
|
| 247 |
+
if tail:
|
| 248 |
+
if upgraded:
|
| 249 |
+
self.data_received(tail)
|
| 250 |
+
else:
|
| 251 |
+
self._tail = tail
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_reqrep.py
ADDED
|
@@ -0,0 +1,1133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import codecs
|
| 3 |
+
import functools
|
| 4 |
+
import io
|
| 5 |
+
import re
|
| 6 |
+
import sys
|
| 7 |
+
import traceback
|
| 8 |
+
import warnings
|
| 9 |
+
from hashlib import md5, sha1, sha256
|
| 10 |
+
from http.cookies import CookieError, Morsel, SimpleCookie
|
| 11 |
+
from types import MappingProxyType, TracebackType
|
| 12 |
+
from typing import (
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterable,
|
| 17 |
+
List,
|
| 18 |
+
Mapping,
|
| 19 |
+
Optional,
|
| 20 |
+
Tuple,
|
| 21 |
+
Type,
|
| 22 |
+
Union,
|
| 23 |
+
cast,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
import attr
|
| 27 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
| 28 |
+
from yarl import URL
|
| 29 |
+
|
| 30 |
+
from . import hdrs, helpers, http, multipart, payload
|
| 31 |
+
from .abc import AbstractStreamWriter
|
| 32 |
+
from .client_exceptions import (
|
| 33 |
+
ClientConnectionError,
|
| 34 |
+
ClientOSError,
|
| 35 |
+
ClientResponseError,
|
| 36 |
+
ContentTypeError,
|
| 37 |
+
InvalidURL,
|
| 38 |
+
ServerFingerprintMismatch,
|
| 39 |
+
)
|
| 40 |
+
from .formdata import FormData
|
| 41 |
+
from .helpers import (
|
| 42 |
+
PY_36,
|
| 43 |
+
BaseTimerContext,
|
| 44 |
+
BasicAuth,
|
| 45 |
+
HeadersMixin,
|
| 46 |
+
TimerNoop,
|
| 47 |
+
noop,
|
| 48 |
+
reify,
|
| 49 |
+
set_result,
|
| 50 |
+
)
|
| 51 |
+
from .http import SERVER_SOFTWARE, HttpVersion10, HttpVersion11, StreamWriter
|
| 52 |
+
from .log import client_logger
|
| 53 |
+
from .streams import StreamReader
|
| 54 |
+
from .typedefs import (
|
| 55 |
+
DEFAULT_JSON_DECODER,
|
| 56 |
+
JSONDecoder,
|
| 57 |
+
LooseCookies,
|
| 58 |
+
LooseHeaders,
|
| 59 |
+
RawHeaders,
|
| 60 |
+
)
|
| 61 |
+
|
| 62 |
+
try:
|
| 63 |
+
import ssl
|
| 64 |
+
from ssl import SSLContext
|
| 65 |
+
except ImportError: # pragma: no cover
|
| 66 |
+
ssl = None # type: ignore[assignment]
|
| 67 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 68 |
+
|
| 69 |
+
try:
|
| 70 |
+
import cchardet as chardet
|
| 71 |
+
except ImportError: # pragma: no cover
|
| 72 |
+
import charset_normalizer as chardet # type: ignore[no-redef]
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 79 |
+
from .client import ClientSession
|
| 80 |
+
from .connector import Connection
|
| 81 |
+
from .tracing import Trace
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 88 |
+
class ContentDisposition:
|
| 89 |
+
type: Optional[str]
|
| 90 |
+
parameters: "MappingProxyType[str, str]"
|
| 91 |
+
filename: Optional[str]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 95 |
+
class RequestInfo:
|
| 96 |
+
url: URL
|
| 97 |
+
method: str
|
| 98 |
+
headers: "CIMultiDictProxy[str]"
|
| 99 |
+
real_url: URL = attr.ib()
|
| 100 |
+
|
| 101 |
+
@real_url.default
|
| 102 |
+
def real_url_default(self) -> URL:
|
| 103 |
+
return self.url
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
class Fingerprint:
|
| 107 |
+
HASHFUNC_BY_DIGESTLEN = {
|
| 108 |
+
16: md5,
|
| 109 |
+
20: sha1,
|
| 110 |
+
32: sha256,
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
def __init__(self, fingerprint: bytes) -> None:
|
| 114 |
+
digestlen = len(fingerprint)
|
| 115 |
+
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
|
| 116 |
+
if not hashfunc:
|
| 117 |
+
raise ValueError("fingerprint has invalid length")
|
| 118 |
+
elif hashfunc is md5 or hashfunc is sha1:
|
| 119 |
+
raise ValueError(
|
| 120 |
+
"md5 and sha1 are insecure and " "not supported. Use sha256."
|
| 121 |
+
)
|
| 122 |
+
self._hashfunc = hashfunc
|
| 123 |
+
self._fingerprint = fingerprint
|
| 124 |
+
|
| 125 |
+
@property
|
| 126 |
+
def fingerprint(self) -> bytes:
|
| 127 |
+
return self._fingerprint
|
| 128 |
+
|
| 129 |
+
def check(self, transport: asyncio.Transport) -> None:
|
| 130 |
+
if not transport.get_extra_info("sslcontext"):
|
| 131 |
+
return
|
| 132 |
+
sslobj = transport.get_extra_info("ssl_object")
|
| 133 |
+
cert = sslobj.getpeercert(binary_form=True)
|
| 134 |
+
got = self._hashfunc(cert).digest()
|
| 135 |
+
if got != self._fingerprint:
|
| 136 |
+
host, port, *_ = transport.get_extra_info("peername")
|
| 137 |
+
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
if ssl is not None:
|
| 141 |
+
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
|
| 142 |
+
else: # pragma: no cover
|
| 143 |
+
SSL_ALLOWED_TYPES = type(None)
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def _merge_ssl_params(
|
| 147 |
+
ssl: Union["SSLContext", bool, Fingerprint, None],
|
| 148 |
+
verify_ssl: Optional[bool],
|
| 149 |
+
ssl_context: Optional["SSLContext"],
|
| 150 |
+
fingerprint: Optional[bytes],
|
| 151 |
+
) -> Union["SSLContext", bool, Fingerprint, None]:
|
| 152 |
+
if verify_ssl is not None and not verify_ssl:
|
| 153 |
+
warnings.warn(
|
| 154 |
+
"verify_ssl is deprecated, use ssl=False instead",
|
| 155 |
+
DeprecationWarning,
|
| 156 |
+
stacklevel=3,
|
| 157 |
+
)
|
| 158 |
+
if ssl is not None:
|
| 159 |
+
raise ValueError(
|
| 160 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 161 |
+
"parameters are mutually exclusive"
|
| 162 |
+
)
|
| 163 |
+
else:
|
| 164 |
+
ssl = False
|
| 165 |
+
if ssl_context is not None:
|
| 166 |
+
warnings.warn(
|
| 167 |
+
"ssl_context is deprecated, use ssl=context instead",
|
| 168 |
+
DeprecationWarning,
|
| 169 |
+
stacklevel=3,
|
| 170 |
+
)
|
| 171 |
+
if ssl is not None:
|
| 172 |
+
raise ValueError(
|
| 173 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 174 |
+
"parameters are mutually exclusive"
|
| 175 |
+
)
|
| 176 |
+
else:
|
| 177 |
+
ssl = ssl_context
|
| 178 |
+
if fingerprint is not None:
|
| 179 |
+
warnings.warn(
|
| 180 |
+
"fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
|
| 181 |
+
DeprecationWarning,
|
| 182 |
+
stacklevel=3,
|
| 183 |
+
)
|
| 184 |
+
if ssl is not None:
|
| 185 |
+
raise ValueError(
|
| 186 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
| 187 |
+
"parameters are mutually exclusive"
|
| 188 |
+
)
|
| 189 |
+
else:
|
| 190 |
+
ssl = Fingerprint(fingerprint)
|
| 191 |
+
if not isinstance(ssl, SSL_ALLOWED_TYPES):
|
| 192 |
+
raise TypeError(
|
| 193 |
+
"ssl should be SSLContext, bool, Fingerprint or None, "
|
| 194 |
+
"got {!r} instead.".format(ssl)
|
| 195 |
+
)
|
| 196 |
+
return ssl
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
| 200 |
+
class ConnectionKey:
|
| 201 |
+
# the key should contain an information about used proxy / TLS
|
| 202 |
+
# to prevent reusing wrong connections from a pool
|
| 203 |
+
host: str
|
| 204 |
+
port: Optional[int]
|
| 205 |
+
is_ssl: bool
|
| 206 |
+
ssl: Union[SSLContext, None, bool, Fingerprint]
|
| 207 |
+
proxy: Optional[URL]
|
| 208 |
+
proxy_auth: Optional[BasicAuth]
|
| 209 |
+
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def _is_expected_content_type(
|
| 213 |
+
response_content_type: str, expected_content_type: str
|
| 214 |
+
) -> bool:
|
| 215 |
+
if expected_content_type == "application/json":
|
| 216 |
+
return json_re.match(response_content_type) is not None
|
| 217 |
+
return expected_content_type in response_content_type
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class ClientRequest:
|
| 221 |
+
GET_METHODS = {
|
| 222 |
+
hdrs.METH_GET,
|
| 223 |
+
hdrs.METH_HEAD,
|
| 224 |
+
hdrs.METH_OPTIONS,
|
| 225 |
+
hdrs.METH_TRACE,
|
| 226 |
+
}
|
| 227 |
+
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
|
| 228 |
+
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
|
| 229 |
+
|
| 230 |
+
DEFAULT_HEADERS = {
|
| 231 |
+
hdrs.ACCEPT: "*/*",
|
| 232 |
+
hdrs.ACCEPT_ENCODING: "gzip, deflate",
|
| 233 |
+
}
|
| 234 |
+
|
| 235 |
+
body = b""
|
| 236 |
+
auth = None
|
| 237 |
+
response = None
|
| 238 |
+
|
| 239 |
+
_writer = None # async task for streaming data
|
| 240 |
+
_continue = None # waiter future for '100 Continue' response
|
| 241 |
+
|
| 242 |
+
# N.B.
|
| 243 |
+
# Adding __del__ method with self._writer closing doesn't make sense
|
| 244 |
+
# because _writer is instance method, thus it keeps a reference to self.
|
| 245 |
+
# Until writer has finished finalizer will not be called.
|
| 246 |
+
|
| 247 |
+
def __init__(
|
| 248 |
+
self,
|
| 249 |
+
method: str,
|
| 250 |
+
url: URL,
|
| 251 |
+
*,
|
| 252 |
+
params: Optional[Mapping[str, str]] = None,
|
| 253 |
+
headers: Optional[LooseHeaders] = None,
|
| 254 |
+
skip_auto_headers: Iterable[str] = frozenset(),
|
| 255 |
+
data: Any = None,
|
| 256 |
+
cookies: Optional[LooseCookies] = None,
|
| 257 |
+
auth: Optional[BasicAuth] = None,
|
| 258 |
+
version: http.HttpVersion = http.HttpVersion11,
|
| 259 |
+
compress: Optional[str] = None,
|
| 260 |
+
chunked: Optional[bool] = None,
|
| 261 |
+
expect100: bool = False,
|
| 262 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 263 |
+
response_class: Optional[Type["ClientResponse"]] = None,
|
| 264 |
+
proxy: Optional[URL] = None,
|
| 265 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 266 |
+
timer: Optional[BaseTimerContext] = None,
|
| 267 |
+
session: Optional["ClientSession"] = None,
|
| 268 |
+
ssl: Union[SSLContext, bool, Fingerprint, None] = None,
|
| 269 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 270 |
+
traces: Optional[List["Trace"]] = None,
|
| 271 |
+
):
|
| 272 |
+
|
| 273 |
+
if loop is None:
|
| 274 |
+
loop = asyncio.get_event_loop()
|
| 275 |
+
|
| 276 |
+
assert isinstance(url, URL), url
|
| 277 |
+
assert isinstance(proxy, (URL, type(None))), proxy
|
| 278 |
+
# FIXME: session is None in tests only, need to fix tests
|
| 279 |
+
# assert session is not None
|
| 280 |
+
self._session = cast("ClientSession", session)
|
| 281 |
+
if params:
|
| 282 |
+
q = MultiDict(url.query)
|
| 283 |
+
url2 = url.with_query(params)
|
| 284 |
+
q.extend(url2.query)
|
| 285 |
+
url = url.with_query(q)
|
| 286 |
+
self.original_url = url
|
| 287 |
+
self.url = url.with_fragment(None)
|
| 288 |
+
self.method = method.upper()
|
| 289 |
+
self.chunked = chunked
|
| 290 |
+
self.compress = compress
|
| 291 |
+
self.loop = loop
|
| 292 |
+
self.length = None
|
| 293 |
+
if response_class is None:
|
| 294 |
+
real_response_class = ClientResponse
|
| 295 |
+
else:
|
| 296 |
+
real_response_class = response_class
|
| 297 |
+
self.response_class = real_response_class # type: Type[ClientResponse]
|
| 298 |
+
self._timer = timer if timer is not None else TimerNoop()
|
| 299 |
+
self._ssl = ssl
|
| 300 |
+
|
| 301 |
+
if loop.get_debug():
|
| 302 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 303 |
+
|
| 304 |
+
self.update_version(version)
|
| 305 |
+
self.update_host(url)
|
| 306 |
+
self.update_headers(headers)
|
| 307 |
+
self.update_auto_headers(skip_auto_headers)
|
| 308 |
+
self.update_cookies(cookies)
|
| 309 |
+
self.update_content_encoding(data)
|
| 310 |
+
self.update_auth(auth)
|
| 311 |
+
self.update_proxy(proxy, proxy_auth, proxy_headers)
|
| 312 |
+
|
| 313 |
+
self.update_body_from_data(data)
|
| 314 |
+
if data is not None or self.method not in self.GET_METHODS:
|
| 315 |
+
self.update_transfer_encoding()
|
| 316 |
+
self.update_expect_continue(expect100)
|
| 317 |
+
if traces is None:
|
| 318 |
+
traces = []
|
| 319 |
+
self._traces = traces
|
| 320 |
+
|
| 321 |
+
def is_ssl(self) -> bool:
|
| 322 |
+
return self.url.scheme in ("https", "wss")
|
| 323 |
+
|
| 324 |
+
@property
|
| 325 |
+
def ssl(self) -> Union["SSLContext", None, bool, Fingerprint]:
|
| 326 |
+
return self._ssl
|
| 327 |
+
|
| 328 |
+
@property
|
| 329 |
+
def connection_key(self) -> ConnectionKey:
|
| 330 |
+
proxy_headers = self.proxy_headers
|
| 331 |
+
if proxy_headers:
|
| 332 |
+
h = hash(
|
| 333 |
+
tuple((k, v) for k, v in proxy_headers.items())
|
| 334 |
+
) # type: Optional[int]
|
| 335 |
+
else:
|
| 336 |
+
h = None
|
| 337 |
+
return ConnectionKey(
|
| 338 |
+
self.host,
|
| 339 |
+
self.port,
|
| 340 |
+
self.is_ssl(),
|
| 341 |
+
self.ssl,
|
| 342 |
+
self.proxy,
|
| 343 |
+
self.proxy_auth,
|
| 344 |
+
h,
|
| 345 |
+
)
|
| 346 |
+
|
| 347 |
+
@property
|
| 348 |
+
def host(self) -> str:
|
| 349 |
+
ret = self.url.raw_host
|
| 350 |
+
assert ret is not None
|
| 351 |
+
return ret
|
| 352 |
+
|
| 353 |
+
@property
|
| 354 |
+
def port(self) -> Optional[int]:
|
| 355 |
+
return self.url.port
|
| 356 |
+
|
| 357 |
+
@property
|
| 358 |
+
def request_info(self) -> RequestInfo:
|
| 359 |
+
headers = CIMultiDictProxy(self.headers) # type: CIMultiDictProxy[str]
|
| 360 |
+
return RequestInfo(self.url, self.method, headers, self.original_url)
|
| 361 |
+
|
| 362 |
+
def update_host(self, url: URL) -> None:
|
| 363 |
+
"""Update destination host, port and connection type (ssl)."""
|
| 364 |
+
# get host/port
|
| 365 |
+
if not url.raw_host:
|
| 366 |
+
raise InvalidURL(url)
|
| 367 |
+
|
| 368 |
+
# basic auth info
|
| 369 |
+
username, password = url.user, url.password
|
| 370 |
+
if username:
|
| 371 |
+
self.auth = helpers.BasicAuth(username, password or "")
|
| 372 |
+
|
| 373 |
+
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
|
| 374 |
+
"""Convert request version to two elements tuple.
|
| 375 |
+
|
| 376 |
+
parser HTTP version '1.1' => (1, 1)
|
| 377 |
+
"""
|
| 378 |
+
if isinstance(version, str):
|
| 379 |
+
v = [part.strip() for part in version.split(".", 1)]
|
| 380 |
+
try:
|
| 381 |
+
version = http.HttpVersion(int(v[0]), int(v[1]))
|
| 382 |
+
except ValueError:
|
| 383 |
+
raise ValueError(
|
| 384 |
+
f"Can not parse http version number: {version}"
|
| 385 |
+
) from None
|
| 386 |
+
self.version = version
|
| 387 |
+
|
| 388 |
+
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
|
| 389 |
+
"""Update request headers."""
|
| 390 |
+
self.headers = CIMultiDict() # type: CIMultiDict[str]
|
| 391 |
+
|
| 392 |
+
# add host
|
| 393 |
+
netloc = cast(str, self.url.raw_host)
|
| 394 |
+
if helpers.is_ipv6_address(netloc):
|
| 395 |
+
netloc = f"[{netloc}]"
|
| 396 |
+
if self.url.port is not None and not self.url.is_default_port():
|
| 397 |
+
netloc += ":" + str(self.url.port)
|
| 398 |
+
self.headers[hdrs.HOST] = netloc
|
| 399 |
+
|
| 400 |
+
if headers:
|
| 401 |
+
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
|
| 402 |
+
headers = headers.items() # type: ignore[assignment]
|
| 403 |
+
|
| 404 |
+
for key, value in headers: # type: ignore[misc]
|
| 405 |
+
# A special case for Host header
|
| 406 |
+
if key.lower() == "host":
|
| 407 |
+
self.headers[key] = value
|
| 408 |
+
else:
|
| 409 |
+
self.headers.add(key, value)
|
| 410 |
+
|
| 411 |
+
def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
|
| 412 |
+
self.skip_auto_headers = CIMultiDict(
|
| 413 |
+
(hdr, None) for hdr in sorted(skip_auto_headers)
|
| 414 |
+
)
|
| 415 |
+
used_headers = self.headers.copy()
|
| 416 |
+
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
|
| 417 |
+
|
| 418 |
+
for hdr, val in self.DEFAULT_HEADERS.items():
|
| 419 |
+
if hdr not in used_headers:
|
| 420 |
+
self.headers.add(hdr, val)
|
| 421 |
+
|
| 422 |
+
if hdrs.USER_AGENT not in used_headers:
|
| 423 |
+
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
|
| 424 |
+
|
| 425 |
+
def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
|
| 426 |
+
"""Update request cookies header."""
|
| 427 |
+
if not cookies:
|
| 428 |
+
return
|
| 429 |
+
|
| 430 |
+
c = SimpleCookie() # type: SimpleCookie[str]
|
| 431 |
+
if hdrs.COOKIE in self.headers:
|
| 432 |
+
c.load(self.headers.get(hdrs.COOKIE, ""))
|
| 433 |
+
del self.headers[hdrs.COOKIE]
|
| 434 |
+
|
| 435 |
+
if isinstance(cookies, Mapping):
|
| 436 |
+
iter_cookies = cookies.items()
|
| 437 |
+
else:
|
| 438 |
+
iter_cookies = cookies # type: ignore[assignment]
|
| 439 |
+
for name, value in iter_cookies:
|
| 440 |
+
if isinstance(value, Morsel):
|
| 441 |
+
# Preserve coded_value
|
| 442 |
+
mrsl_val = value.get(value.key, Morsel())
|
| 443 |
+
mrsl_val.set(value.key, value.value, value.coded_value)
|
| 444 |
+
c[name] = mrsl_val
|
| 445 |
+
else:
|
| 446 |
+
c[name] = value # type: ignore[assignment]
|
| 447 |
+
|
| 448 |
+
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
|
| 449 |
+
|
| 450 |
+
def update_content_encoding(self, data: Any) -> None:
|
| 451 |
+
"""Set request content encoding."""
|
| 452 |
+
if data is None:
|
| 453 |
+
return
|
| 454 |
+
|
| 455 |
+
enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
|
| 456 |
+
if enc:
|
| 457 |
+
if self.compress:
|
| 458 |
+
raise ValueError(
|
| 459 |
+
"compress can not be set " "if Content-Encoding header is set"
|
| 460 |
+
)
|
| 461 |
+
elif self.compress:
|
| 462 |
+
if not isinstance(self.compress, str):
|
| 463 |
+
self.compress = "deflate"
|
| 464 |
+
self.headers[hdrs.CONTENT_ENCODING] = self.compress
|
| 465 |
+
self.chunked = True # enable chunked, no need to deal with length
|
| 466 |
+
|
| 467 |
+
def update_transfer_encoding(self) -> None:
|
| 468 |
+
"""Analyze transfer-encoding header."""
|
| 469 |
+
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
| 470 |
+
|
| 471 |
+
if "chunked" in te:
|
| 472 |
+
if self.chunked:
|
| 473 |
+
raise ValueError(
|
| 474 |
+
"chunked can not be set "
|
| 475 |
+
'if "Transfer-Encoding: chunked" header is set'
|
| 476 |
+
)
|
| 477 |
+
|
| 478 |
+
elif self.chunked:
|
| 479 |
+
if hdrs.CONTENT_LENGTH in self.headers:
|
| 480 |
+
raise ValueError(
|
| 481 |
+
"chunked can not be set " "if Content-Length header is set"
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 485 |
+
else:
|
| 486 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 487 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
|
| 488 |
+
|
| 489 |
+
def update_auth(self, auth: Optional[BasicAuth]) -> None:
|
| 490 |
+
"""Set basic auth."""
|
| 491 |
+
if auth is None:
|
| 492 |
+
auth = self.auth
|
| 493 |
+
if auth is None:
|
| 494 |
+
return
|
| 495 |
+
|
| 496 |
+
if not isinstance(auth, helpers.BasicAuth):
|
| 497 |
+
raise TypeError("BasicAuth() tuple is required instead")
|
| 498 |
+
|
| 499 |
+
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
| 500 |
+
|
| 501 |
+
def update_body_from_data(self, body: Any) -> None:
|
| 502 |
+
if body is None:
|
| 503 |
+
return
|
| 504 |
+
|
| 505 |
+
# FormData
|
| 506 |
+
if isinstance(body, FormData):
|
| 507 |
+
body = body()
|
| 508 |
+
|
| 509 |
+
try:
|
| 510 |
+
body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
|
| 511 |
+
except payload.LookupError:
|
| 512 |
+
body = FormData(body)()
|
| 513 |
+
|
| 514 |
+
self.body = body
|
| 515 |
+
|
| 516 |
+
# enable chunked encoding if needed
|
| 517 |
+
if not self.chunked:
|
| 518 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 519 |
+
size = body.size
|
| 520 |
+
if size is None:
|
| 521 |
+
self.chunked = True
|
| 522 |
+
else:
|
| 523 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
| 524 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(size)
|
| 525 |
+
|
| 526 |
+
# copy payload headers
|
| 527 |
+
assert body.headers
|
| 528 |
+
for (key, value) in body.headers.items():
|
| 529 |
+
if key in self.headers:
|
| 530 |
+
continue
|
| 531 |
+
if key in self.skip_auto_headers:
|
| 532 |
+
continue
|
| 533 |
+
self.headers[key] = value
|
| 534 |
+
|
| 535 |
+
def update_expect_continue(self, expect: bool = False) -> None:
|
| 536 |
+
if expect:
|
| 537 |
+
self.headers[hdrs.EXPECT] = "100-continue"
|
| 538 |
+
elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
|
| 539 |
+
expect = True
|
| 540 |
+
|
| 541 |
+
if expect:
|
| 542 |
+
self._continue = self.loop.create_future()
|
| 543 |
+
|
| 544 |
+
def update_proxy(
|
| 545 |
+
self,
|
| 546 |
+
proxy: Optional[URL],
|
| 547 |
+
proxy_auth: Optional[BasicAuth],
|
| 548 |
+
proxy_headers: Optional[LooseHeaders],
|
| 549 |
+
) -> None:
|
| 550 |
+
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
|
| 551 |
+
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
|
| 552 |
+
self.proxy = proxy
|
| 553 |
+
self.proxy_auth = proxy_auth
|
| 554 |
+
self.proxy_headers = proxy_headers
|
| 555 |
+
|
| 556 |
+
def keep_alive(self) -> bool:
|
| 557 |
+
if self.version < HttpVersion10:
|
| 558 |
+
# keep alive not supported at all
|
| 559 |
+
return False
|
| 560 |
+
if self.version == HttpVersion10:
|
| 561 |
+
if self.headers.get(hdrs.CONNECTION) == "keep-alive":
|
| 562 |
+
return True
|
| 563 |
+
else: # no headers means we close for Http 1.0
|
| 564 |
+
return False
|
| 565 |
+
elif self.headers.get(hdrs.CONNECTION) == "close":
|
| 566 |
+
return False
|
| 567 |
+
|
| 568 |
+
return True
|
| 569 |
+
|
| 570 |
+
async def write_bytes(
|
| 571 |
+
self, writer: AbstractStreamWriter, conn: "Connection"
|
| 572 |
+
) -> None:
|
| 573 |
+
"""Support coroutines that yields bytes objects."""
|
| 574 |
+
# 100 response
|
| 575 |
+
if self._continue is not None:
|
| 576 |
+
await writer.drain()
|
| 577 |
+
await self._continue
|
| 578 |
+
|
| 579 |
+
protocol = conn.protocol
|
| 580 |
+
assert protocol is not None
|
| 581 |
+
try:
|
| 582 |
+
if isinstance(self.body, payload.Payload):
|
| 583 |
+
await self.body.write(writer)
|
| 584 |
+
else:
|
| 585 |
+
if isinstance(self.body, (bytes, bytearray)):
|
| 586 |
+
self.body = (self.body,) # type: ignore[assignment]
|
| 587 |
+
|
| 588 |
+
for chunk in self.body:
|
| 589 |
+
await writer.write(chunk) # type: ignore[arg-type]
|
| 590 |
+
|
| 591 |
+
await writer.write_eof()
|
| 592 |
+
except OSError as exc:
|
| 593 |
+
new_exc = ClientOSError(
|
| 594 |
+
exc.errno, "Can not write request body for %s" % self.url
|
| 595 |
+
)
|
| 596 |
+
new_exc.__context__ = exc
|
| 597 |
+
new_exc.__cause__ = exc
|
| 598 |
+
protocol.set_exception(new_exc)
|
| 599 |
+
except asyncio.CancelledError as exc:
|
| 600 |
+
if not conn.closed:
|
| 601 |
+
protocol.set_exception(exc)
|
| 602 |
+
except Exception as exc:
|
| 603 |
+
protocol.set_exception(exc)
|
| 604 |
+
finally:
|
| 605 |
+
self._writer = None
|
| 606 |
+
|
| 607 |
+
async def send(self, conn: "Connection") -> "ClientResponse":
|
| 608 |
+
# Specify request target:
|
| 609 |
+
# - CONNECT request must send authority form URI
|
| 610 |
+
# - not CONNECT proxy must send absolute form URI
|
| 611 |
+
# - most common is origin form URI
|
| 612 |
+
if self.method == hdrs.METH_CONNECT:
|
| 613 |
+
connect_host = self.url.raw_host
|
| 614 |
+
assert connect_host is not None
|
| 615 |
+
if helpers.is_ipv6_address(connect_host):
|
| 616 |
+
connect_host = f"[{connect_host}]"
|
| 617 |
+
path = f"{connect_host}:{self.url.port}"
|
| 618 |
+
elif self.proxy and not self.is_ssl():
|
| 619 |
+
path = str(self.url)
|
| 620 |
+
else:
|
| 621 |
+
path = self.url.raw_path
|
| 622 |
+
if self.url.raw_query_string:
|
| 623 |
+
path += "?" + self.url.raw_query_string
|
| 624 |
+
|
| 625 |
+
protocol = conn.protocol
|
| 626 |
+
assert protocol is not None
|
| 627 |
+
writer = StreamWriter(
|
| 628 |
+
protocol,
|
| 629 |
+
self.loop,
|
| 630 |
+
on_chunk_sent=functools.partial(
|
| 631 |
+
self._on_chunk_request_sent, self.method, self.url
|
| 632 |
+
),
|
| 633 |
+
on_headers_sent=functools.partial(
|
| 634 |
+
self._on_headers_request_sent, self.method, self.url
|
| 635 |
+
),
|
| 636 |
+
)
|
| 637 |
+
|
| 638 |
+
if self.compress:
|
| 639 |
+
writer.enable_compression(self.compress)
|
| 640 |
+
|
| 641 |
+
if self.chunked is not None:
|
| 642 |
+
writer.enable_chunking()
|
| 643 |
+
|
| 644 |
+
# set default content-type
|
| 645 |
+
if (
|
| 646 |
+
self.method in self.POST_METHODS
|
| 647 |
+
and hdrs.CONTENT_TYPE not in self.skip_auto_headers
|
| 648 |
+
and hdrs.CONTENT_TYPE not in self.headers
|
| 649 |
+
):
|
| 650 |
+
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
|
| 651 |
+
|
| 652 |
+
# set the connection header
|
| 653 |
+
connection = self.headers.get(hdrs.CONNECTION)
|
| 654 |
+
if not connection:
|
| 655 |
+
if self.keep_alive():
|
| 656 |
+
if self.version == HttpVersion10:
|
| 657 |
+
connection = "keep-alive"
|
| 658 |
+
else:
|
| 659 |
+
if self.version == HttpVersion11:
|
| 660 |
+
connection = "close"
|
| 661 |
+
|
| 662 |
+
if connection is not None:
|
| 663 |
+
self.headers[hdrs.CONNECTION] = connection
|
| 664 |
+
|
| 665 |
+
# status + headers
|
| 666 |
+
status_line = "{0} {1} HTTP/{2[0]}.{2[1]}".format(
|
| 667 |
+
self.method, path, self.version
|
| 668 |
+
)
|
| 669 |
+
await writer.write_headers(status_line, self.headers)
|
| 670 |
+
|
| 671 |
+
self._writer = self.loop.create_task(self.write_bytes(writer, conn))
|
| 672 |
+
|
| 673 |
+
response_class = self.response_class
|
| 674 |
+
assert response_class is not None
|
| 675 |
+
self.response = response_class(
|
| 676 |
+
self.method,
|
| 677 |
+
self.original_url,
|
| 678 |
+
writer=self._writer,
|
| 679 |
+
continue100=self._continue,
|
| 680 |
+
timer=self._timer,
|
| 681 |
+
request_info=self.request_info,
|
| 682 |
+
traces=self._traces,
|
| 683 |
+
loop=self.loop,
|
| 684 |
+
session=self._session,
|
| 685 |
+
)
|
| 686 |
+
return self.response
|
| 687 |
+
|
| 688 |
+
async def close(self) -> None:
|
| 689 |
+
if self._writer is not None:
|
| 690 |
+
try:
|
| 691 |
+
await self._writer
|
| 692 |
+
finally:
|
| 693 |
+
self._writer = None
|
| 694 |
+
|
| 695 |
+
def terminate(self) -> None:
|
| 696 |
+
if self._writer is not None:
|
| 697 |
+
if not self.loop.is_closed():
|
| 698 |
+
self._writer.cancel()
|
| 699 |
+
self._writer = None
|
| 700 |
+
|
| 701 |
+
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
|
| 702 |
+
for trace in self._traces:
|
| 703 |
+
await trace.send_request_chunk_sent(method, url, chunk)
|
| 704 |
+
|
| 705 |
+
async def _on_headers_request_sent(
|
| 706 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
| 707 |
+
) -> None:
|
| 708 |
+
for trace in self._traces:
|
| 709 |
+
await trace.send_request_headers(method, url, headers)
|
| 710 |
+
|
| 711 |
+
|
| 712 |
+
class ClientResponse(HeadersMixin):
|
| 713 |
+
|
| 714 |
+
# from the Status-Line of the response
|
| 715 |
+
version = None # HTTP-Version
|
| 716 |
+
status = None # type: int # Status-Code
|
| 717 |
+
reason = None # Reason-Phrase
|
| 718 |
+
|
| 719 |
+
content = None # type: StreamReader # Payload stream
|
| 720 |
+
_headers = None # type: CIMultiDictProxy[str] # Response headers
|
| 721 |
+
_raw_headers = None # type: RawHeaders # Response raw headers
|
| 722 |
+
|
| 723 |
+
_connection = None # current connection
|
| 724 |
+
_source_traceback = None
|
| 725 |
+
# setted up by ClientRequest after ClientResponse object creation
|
| 726 |
+
# post-init stage allows to not change ctor signature
|
| 727 |
+
_closed = True # to allow __del__ for non-initialized properly response
|
| 728 |
+
_released = False
|
| 729 |
+
|
| 730 |
+
def __init__(
|
| 731 |
+
self,
|
| 732 |
+
method: str,
|
| 733 |
+
url: URL,
|
| 734 |
+
*,
|
| 735 |
+
writer: "asyncio.Task[None]",
|
| 736 |
+
continue100: Optional["asyncio.Future[bool]"],
|
| 737 |
+
timer: BaseTimerContext,
|
| 738 |
+
request_info: RequestInfo,
|
| 739 |
+
traces: List["Trace"],
|
| 740 |
+
loop: asyncio.AbstractEventLoop,
|
| 741 |
+
session: "ClientSession",
|
| 742 |
+
) -> None:
|
| 743 |
+
assert isinstance(url, URL)
|
| 744 |
+
|
| 745 |
+
self.method = method
|
| 746 |
+
self.cookies = SimpleCookie() # type: SimpleCookie[str]
|
| 747 |
+
|
| 748 |
+
self._real_url = url
|
| 749 |
+
self._url = url.with_fragment(None)
|
| 750 |
+
self._body = None # type: Any
|
| 751 |
+
self._writer = writer # type: Optional[asyncio.Task[None]]
|
| 752 |
+
self._continue = continue100 # None by default
|
| 753 |
+
self._closed = True
|
| 754 |
+
self._history = () # type: Tuple[ClientResponse, ...]
|
| 755 |
+
self._request_info = request_info
|
| 756 |
+
self._timer = timer if timer is not None else TimerNoop()
|
| 757 |
+
self._cache = {} # type: Dict[str, Any]
|
| 758 |
+
self._traces = traces
|
| 759 |
+
self._loop = loop
|
| 760 |
+
# store a reference to session #1985
|
| 761 |
+
self._session = session # type: Optional[ClientSession]
|
| 762 |
+
if loop.get_debug():
|
| 763 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 764 |
+
|
| 765 |
+
@reify
|
| 766 |
+
def url(self) -> URL:
|
| 767 |
+
return self._url
|
| 768 |
+
|
| 769 |
+
@reify
|
| 770 |
+
def url_obj(self) -> URL:
|
| 771 |
+
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
|
| 772 |
+
return self._url
|
| 773 |
+
|
| 774 |
+
@reify
|
| 775 |
+
def real_url(self) -> URL:
|
| 776 |
+
return self._real_url
|
| 777 |
+
|
| 778 |
+
@reify
|
| 779 |
+
def host(self) -> str:
|
| 780 |
+
assert self._url.host is not None
|
| 781 |
+
return self._url.host
|
| 782 |
+
|
| 783 |
+
@reify
|
| 784 |
+
def headers(self) -> "CIMultiDictProxy[str]":
|
| 785 |
+
return self._headers
|
| 786 |
+
|
| 787 |
+
@reify
|
| 788 |
+
def raw_headers(self) -> RawHeaders:
|
| 789 |
+
return self._raw_headers
|
| 790 |
+
|
| 791 |
+
@reify
|
| 792 |
+
def request_info(self) -> RequestInfo:
|
| 793 |
+
return self._request_info
|
| 794 |
+
|
| 795 |
+
@reify
|
| 796 |
+
def content_disposition(self) -> Optional[ContentDisposition]:
|
| 797 |
+
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
|
| 798 |
+
if raw is None:
|
| 799 |
+
return None
|
| 800 |
+
disposition_type, params_dct = multipart.parse_content_disposition(raw)
|
| 801 |
+
params = MappingProxyType(params_dct)
|
| 802 |
+
filename = multipart.content_disposition_filename(params)
|
| 803 |
+
return ContentDisposition(disposition_type, params, filename)
|
| 804 |
+
|
| 805 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 806 |
+
if self._closed:
|
| 807 |
+
return
|
| 808 |
+
|
| 809 |
+
if self._connection is not None:
|
| 810 |
+
self._connection.release()
|
| 811 |
+
self._cleanup_writer()
|
| 812 |
+
|
| 813 |
+
if self._loop.get_debug():
|
| 814 |
+
if PY_36:
|
| 815 |
+
kwargs = {"source": self}
|
| 816 |
+
else:
|
| 817 |
+
kwargs = {}
|
| 818 |
+
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
|
| 819 |
+
context = {"client_response": self, "message": "Unclosed response"}
|
| 820 |
+
if self._source_traceback:
|
| 821 |
+
context["source_traceback"] = self._source_traceback
|
| 822 |
+
self._loop.call_exception_handler(context)
|
| 823 |
+
|
| 824 |
+
def __repr__(self) -> str:
|
| 825 |
+
out = io.StringIO()
|
| 826 |
+
ascii_encodable_url = str(self.url)
|
| 827 |
+
if self.reason:
|
| 828 |
+
ascii_encodable_reason = self.reason.encode(
|
| 829 |
+
"ascii", "backslashreplace"
|
| 830 |
+
).decode("ascii")
|
| 831 |
+
else:
|
| 832 |
+
ascii_encodable_reason = self.reason
|
| 833 |
+
print(
|
| 834 |
+
"<ClientResponse({}) [{} {}]>".format(
|
| 835 |
+
ascii_encodable_url, self.status, ascii_encodable_reason
|
| 836 |
+
),
|
| 837 |
+
file=out,
|
| 838 |
+
)
|
| 839 |
+
print(self.headers, file=out)
|
| 840 |
+
return out.getvalue()
|
| 841 |
+
|
| 842 |
+
@property
|
| 843 |
+
def connection(self) -> Optional["Connection"]:
|
| 844 |
+
return self._connection
|
| 845 |
+
|
| 846 |
+
@reify
|
| 847 |
+
def history(self) -> Tuple["ClientResponse", ...]:
|
| 848 |
+
"""A sequence of of responses, if redirects occurred."""
|
| 849 |
+
return self._history
|
| 850 |
+
|
| 851 |
+
@reify
|
| 852 |
+
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
|
| 853 |
+
links_str = ", ".join(self.headers.getall("link", []))
|
| 854 |
+
|
| 855 |
+
if not links_str:
|
| 856 |
+
return MultiDictProxy(MultiDict())
|
| 857 |
+
|
| 858 |
+
links = MultiDict() # type: MultiDict[MultiDictProxy[Union[str, URL]]]
|
| 859 |
+
|
| 860 |
+
for val in re.split(r",(?=\s*<)", links_str):
|
| 861 |
+
match = re.match(r"\s*<(.*)>(.*)", val)
|
| 862 |
+
if match is None: # pragma: no cover
|
| 863 |
+
# the check exists to suppress mypy error
|
| 864 |
+
continue
|
| 865 |
+
url, params_str = match.groups()
|
| 866 |
+
params = params_str.split(";")[1:]
|
| 867 |
+
|
| 868 |
+
link = MultiDict() # type: MultiDict[Union[str, URL]]
|
| 869 |
+
|
| 870 |
+
for param in params:
|
| 871 |
+
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
|
| 872 |
+
if match is None: # pragma: no cover
|
| 873 |
+
# the check exists to suppress mypy error
|
| 874 |
+
continue
|
| 875 |
+
key, _, value, _ = match.groups()
|
| 876 |
+
|
| 877 |
+
link.add(key, value)
|
| 878 |
+
|
| 879 |
+
key = link.get("rel", url) # type: ignore[assignment]
|
| 880 |
+
|
| 881 |
+
link.add("url", self.url.join(URL(url)))
|
| 882 |
+
|
| 883 |
+
links.add(key, MultiDictProxy(link))
|
| 884 |
+
|
| 885 |
+
return MultiDictProxy(links)
|
| 886 |
+
|
| 887 |
+
async def start(self, connection: "Connection") -> "ClientResponse":
|
| 888 |
+
"""Start response processing."""
|
| 889 |
+
self._closed = False
|
| 890 |
+
self._protocol = connection.protocol
|
| 891 |
+
self._connection = connection
|
| 892 |
+
|
| 893 |
+
with self._timer:
|
| 894 |
+
while True:
|
| 895 |
+
# read response
|
| 896 |
+
try:
|
| 897 |
+
protocol = self._protocol
|
| 898 |
+
message, payload = await protocol.read() # type: ignore[union-attr]
|
| 899 |
+
except http.HttpProcessingError as exc:
|
| 900 |
+
raise ClientResponseError(
|
| 901 |
+
self.request_info,
|
| 902 |
+
self.history,
|
| 903 |
+
status=exc.code,
|
| 904 |
+
message=exc.message,
|
| 905 |
+
headers=exc.headers,
|
| 906 |
+
) from exc
|
| 907 |
+
|
| 908 |
+
if message.code < 100 or message.code > 199 or message.code == 101:
|
| 909 |
+
break
|
| 910 |
+
|
| 911 |
+
if self._continue is not None:
|
| 912 |
+
set_result(self._continue, True)
|
| 913 |
+
self._continue = None
|
| 914 |
+
|
| 915 |
+
# payload eof handler
|
| 916 |
+
payload.on_eof(self._response_eof)
|
| 917 |
+
|
| 918 |
+
# response status
|
| 919 |
+
self.version = message.version
|
| 920 |
+
self.status = message.code
|
| 921 |
+
self.reason = message.reason
|
| 922 |
+
|
| 923 |
+
# headers
|
| 924 |
+
self._headers = message.headers # type is CIMultiDictProxy
|
| 925 |
+
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
|
| 926 |
+
|
| 927 |
+
# payload
|
| 928 |
+
self.content = payload
|
| 929 |
+
|
| 930 |
+
# cookies
|
| 931 |
+
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
|
| 932 |
+
try:
|
| 933 |
+
self.cookies.load(hdr)
|
| 934 |
+
except CookieError as exc:
|
| 935 |
+
client_logger.warning("Can not load response cookies: %s", exc)
|
| 936 |
+
return self
|
| 937 |
+
|
| 938 |
+
def _response_eof(self) -> None:
|
| 939 |
+
if self._closed:
|
| 940 |
+
return
|
| 941 |
+
|
| 942 |
+
if self._connection is not None:
|
| 943 |
+
# websocket, protocol could be None because
|
| 944 |
+
# connection could be detached
|
| 945 |
+
if (
|
| 946 |
+
self._connection.protocol is not None
|
| 947 |
+
and self._connection.protocol.upgraded
|
| 948 |
+
):
|
| 949 |
+
return
|
| 950 |
+
|
| 951 |
+
self._connection.release()
|
| 952 |
+
self._connection = None
|
| 953 |
+
|
| 954 |
+
self._closed = True
|
| 955 |
+
self._cleanup_writer()
|
| 956 |
+
|
| 957 |
+
@property
|
| 958 |
+
def closed(self) -> bool:
|
| 959 |
+
return self._closed
|
| 960 |
+
|
| 961 |
+
def close(self) -> None:
|
| 962 |
+
if not self._released:
|
| 963 |
+
self._notify_content()
|
| 964 |
+
if self._closed:
|
| 965 |
+
return
|
| 966 |
+
|
| 967 |
+
self._closed = True
|
| 968 |
+
if self._loop is None or self._loop.is_closed():
|
| 969 |
+
return
|
| 970 |
+
|
| 971 |
+
if self._connection is not None:
|
| 972 |
+
self._connection.close()
|
| 973 |
+
self._connection = None
|
| 974 |
+
self._cleanup_writer()
|
| 975 |
+
|
| 976 |
+
def release(self) -> Any:
|
| 977 |
+
if not self._released:
|
| 978 |
+
self._notify_content()
|
| 979 |
+
if self._closed:
|
| 980 |
+
return noop()
|
| 981 |
+
|
| 982 |
+
self._closed = True
|
| 983 |
+
if self._connection is not None:
|
| 984 |
+
self._connection.release()
|
| 985 |
+
self._connection = None
|
| 986 |
+
|
| 987 |
+
self._cleanup_writer()
|
| 988 |
+
return noop()
|
| 989 |
+
|
| 990 |
+
@property
|
| 991 |
+
def ok(self) -> bool:
|
| 992 |
+
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
|
| 993 |
+
|
| 994 |
+
This is **not** a check for ``200 OK`` but a check that the response
|
| 995 |
+
status is under 400.
|
| 996 |
+
"""
|
| 997 |
+
return 400 > self.status
|
| 998 |
+
|
| 999 |
+
def raise_for_status(self) -> None:
|
| 1000 |
+
if not self.ok:
|
| 1001 |
+
# reason should always be not None for a started response
|
| 1002 |
+
assert self.reason is not None
|
| 1003 |
+
self.release()
|
| 1004 |
+
raise ClientResponseError(
|
| 1005 |
+
self.request_info,
|
| 1006 |
+
self.history,
|
| 1007 |
+
status=self.status,
|
| 1008 |
+
message=self.reason,
|
| 1009 |
+
headers=self.headers,
|
| 1010 |
+
)
|
| 1011 |
+
|
| 1012 |
+
def _cleanup_writer(self) -> None:
|
| 1013 |
+
if self._writer is not None:
|
| 1014 |
+
self._writer.cancel()
|
| 1015 |
+
self._writer = None
|
| 1016 |
+
self._session = None
|
| 1017 |
+
|
| 1018 |
+
def _notify_content(self) -> None:
|
| 1019 |
+
content = self.content
|
| 1020 |
+
if content and content.exception() is None:
|
| 1021 |
+
content.set_exception(ClientConnectionError("Connection closed"))
|
| 1022 |
+
self._released = True
|
| 1023 |
+
|
| 1024 |
+
async def wait_for_close(self) -> None:
|
| 1025 |
+
if self._writer is not None:
|
| 1026 |
+
try:
|
| 1027 |
+
await self._writer
|
| 1028 |
+
finally:
|
| 1029 |
+
self._writer = None
|
| 1030 |
+
self.release()
|
| 1031 |
+
|
| 1032 |
+
async def read(self) -> bytes:
|
| 1033 |
+
"""Read response payload."""
|
| 1034 |
+
if self._body is None:
|
| 1035 |
+
try:
|
| 1036 |
+
self._body = await self.content.read()
|
| 1037 |
+
for trace in self._traces:
|
| 1038 |
+
await trace.send_response_chunk_received(
|
| 1039 |
+
self.method, self.url, self._body
|
| 1040 |
+
)
|
| 1041 |
+
except BaseException:
|
| 1042 |
+
self.close()
|
| 1043 |
+
raise
|
| 1044 |
+
elif self._released:
|
| 1045 |
+
raise ClientConnectionError("Connection closed")
|
| 1046 |
+
|
| 1047 |
+
return self._body # type: ignore[no-any-return]
|
| 1048 |
+
|
| 1049 |
+
def get_encoding(self) -> str:
|
| 1050 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
| 1051 |
+
mimetype = helpers.parse_mimetype(ctype)
|
| 1052 |
+
|
| 1053 |
+
encoding = mimetype.parameters.get("charset")
|
| 1054 |
+
if encoding:
|
| 1055 |
+
try:
|
| 1056 |
+
codecs.lookup(encoding)
|
| 1057 |
+
except LookupError:
|
| 1058 |
+
encoding = None
|
| 1059 |
+
if not encoding:
|
| 1060 |
+
if mimetype.type == "application" and (
|
| 1061 |
+
mimetype.subtype == "json" or mimetype.subtype == "rdap"
|
| 1062 |
+
):
|
| 1063 |
+
# RFC 7159 states that the default encoding is UTF-8.
|
| 1064 |
+
# RFC 7483 defines application/rdap+json
|
| 1065 |
+
encoding = "utf-8"
|
| 1066 |
+
elif self._body is None:
|
| 1067 |
+
raise RuntimeError(
|
| 1068 |
+
"Cannot guess the encoding of " "a not yet read body"
|
| 1069 |
+
)
|
| 1070 |
+
else:
|
| 1071 |
+
encoding = chardet.detect(self._body)["encoding"]
|
| 1072 |
+
if not encoding:
|
| 1073 |
+
encoding = "utf-8"
|
| 1074 |
+
|
| 1075 |
+
return encoding
|
| 1076 |
+
|
| 1077 |
+
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
|
| 1078 |
+
"""Read response payload and decode."""
|
| 1079 |
+
if self._body is None:
|
| 1080 |
+
await self.read()
|
| 1081 |
+
|
| 1082 |
+
if encoding is None:
|
| 1083 |
+
encoding = self.get_encoding()
|
| 1084 |
+
|
| 1085 |
+
return self._body.decode( # type: ignore[no-any-return,union-attr]
|
| 1086 |
+
encoding, errors=errors
|
| 1087 |
+
)
|
| 1088 |
+
|
| 1089 |
+
async def json(
|
| 1090 |
+
self,
|
| 1091 |
+
*,
|
| 1092 |
+
encoding: Optional[str] = None,
|
| 1093 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
| 1094 |
+
content_type: Optional[str] = "application/json",
|
| 1095 |
+
) -> Any:
|
| 1096 |
+
"""Read and decodes JSON response."""
|
| 1097 |
+
if self._body is None:
|
| 1098 |
+
await self.read()
|
| 1099 |
+
|
| 1100 |
+
if content_type:
|
| 1101 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
| 1102 |
+
if not _is_expected_content_type(ctype, content_type):
|
| 1103 |
+
raise ContentTypeError(
|
| 1104 |
+
self.request_info,
|
| 1105 |
+
self.history,
|
| 1106 |
+
message=(
|
| 1107 |
+
"Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
|
| 1108 |
+
),
|
| 1109 |
+
headers=self.headers,
|
| 1110 |
+
)
|
| 1111 |
+
|
| 1112 |
+
stripped = self._body.strip() # type: ignore[union-attr]
|
| 1113 |
+
if not stripped:
|
| 1114 |
+
return None
|
| 1115 |
+
|
| 1116 |
+
if encoding is None:
|
| 1117 |
+
encoding = self.get_encoding()
|
| 1118 |
+
|
| 1119 |
+
return loads(stripped.decode(encoding))
|
| 1120 |
+
|
| 1121 |
+
async def __aenter__(self) -> "ClientResponse":
|
| 1122 |
+
return self
|
| 1123 |
+
|
| 1124 |
+
async def __aexit__(
|
| 1125 |
+
self,
|
| 1126 |
+
exc_type: Optional[Type[BaseException]],
|
| 1127 |
+
exc_val: Optional[BaseException],
|
| 1128 |
+
exc_tb: Optional[TracebackType],
|
| 1129 |
+
) -> None:
|
| 1130 |
+
# similar to _RequestContextManager, we do not need to check
|
| 1131 |
+
# for exceptions, response object can close connection
|
| 1132 |
+
# if state is broken
|
| 1133 |
+
self.release()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/connector.py
ADDED
|
@@ -0,0 +1,1449 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import functools
|
| 3 |
+
import random
|
| 4 |
+
import sys
|
| 5 |
+
import traceback
|
| 6 |
+
import warnings
|
| 7 |
+
from collections import defaultdict, deque
|
| 8 |
+
from contextlib import suppress
|
| 9 |
+
from http.cookies import SimpleCookie
|
| 10 |
+
from itertools import cycle, islice
|
| 11 |
+
from time import monotonic
|
| 12 |
+
from types import TracebackType
|
| 13 |
+
from typing import (
|
| 14 |
+
TYPE_CHECKING,
|
| 15 |
+
Any,
|
| 16 |
+
Awaitable,
|
| 17 |
+
Callable,
|
| 18 |
+
DefaultDict,
|
| 19 |
+
Dict,
|
| 20 |
+
Iterator,
|
| 21 |
+
List,
|
| 22 |
+
Optional,
|
| 23 |
+
Set,
|
| 24 |
+
Tuple,
|
| 25 |
+
Type,
|
| 26 |
+
Union,
|
| 27 |
+
cast,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
import attr
|
| 31 |
+
|
| 32 |
+
from . import hdrs, helpers
|
| 33 |
+
from .abc import AbstractResolver
|
| 34 |
+
from .client_exceptions import (
|
| 35 |
+
ClientConnectionError,
|
| 36 |
+
ClientConnectorCertificateError,
|
| 37 |
+
ClientConnectorError,
|
| 38 |
+
ClientConnectorSSLError,
|
| 39 |
+
ClientHttpProxyError,
|
| 40 |
+
ClientProxyConnectionError,
|
| 41 |
+
ServerFingerprintMismatch,
|
| 42 |
+
UnixClientConnectorError,
|
| 43 |
+
cert_errors,
|
| 44 |
+
ssl_errors,
|
| 45 |
+
)
|
| 46 |
+
from .client_proto import ResponseHandler
|
| 47 |
+
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
|
| 48 |
+
from .helpers import (
|
| 49 |
+
PY_36,
|
| 50 |
+
ceil_timeout,
|
| 51 |
+
get_running_loop,
|
| 52 |
+
is_ip_address,
|
| 53 |
+
noop,
|
| 54 |
+
sentinel,
|
| 55 |
+
)
|
| 56 |
+
from .http import RESPONSES
|
| 57 |
+
from .locks import EventResultOrError
|
| 58 |
+
from .resolver import DefaultResolver
|
| 59 |
+
|
| 60 |
+
try:
|
| 61 |
+
import ssl
|
| 62 |
+
|
| 63 |
+
SSLContext = ssl.SSLContext
|
| 64 |
+
except ImportError: # pragma: no cover
|
| 65 |
+
ssl = None # type: ignore[assignment]
|
| 66 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 73 |
+
from .client import ClientTimeout
|
| 74 |
+
from .client_reqrep import ConnectionKey
|
| 75 |
+
from .tracing import Trace
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class _DeprecationWaiter:
|
| 79 |
+
__slots__ = ("_awaitable", "_awaited")
|
| 80 |
+
|
| 81 |
+
def __init__(self, awaitable: Awaitable[Any]) -> None:
|
| 82 |
+
self._awaitable = awaitable
|
| 83 |
+
self._awaited = False
|
| 84 |
+
|
| 85 |
+
def __await__(self) -> Any:
|
| 86 |
+
self._awaited = True
|
| 87 |
+
return self._awaitable.__await__()
|
| 88 |
+
|
| 89 |
+
def __del__(self) -> None:
|
| 90 |
+
if not self._awaited:
|
| 91 |
+
warnings.warn(
|
| 92 |
+
"Connector.close() is a coroutine, "
|
| 93 |
+
"please use await connector.close()",
|
| 94 |
+
DeprecationWarning,
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
|
| 98 |
+
class Connection:
|
| 99 |
+
|
| 100 |
+
_source_traceback = None
|
| 101 |
+
_transport = None
|
| 102 |
+
|
| 103 |
+
def __init__(
|
| 104 |
+
self,
|
| 105 |
+
connector: "BaseConnector",
|
| 106 |
+
key: "ConnectionKey",
|
| 107 |
+
protocol: ResponseHandler,
|
| 108 |
+
loop: asyncio.AbstractEventLoop,
|
| 109 |
+
) -> None:
|
| 110 |
+
self._key = key
|
| 111 |
+
self._connector = connector
|
| 112 |
+
self._loop = loop
|
| 113 |
+
self._protocol = protocol # type: Optional[ResponseHandler]
|
| 114 |
+
self._callbacks = [] # type: List[Callable[[], None]]
|
| 115 |
+
|
| 116 |
+
if loop.get_debug():
|
| 117 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 118 |
+
|
| 119 |
+
def __repr__(self) -> str:
|
| 120 |
+
return f"Connection<{self._key}>"
|
| 121 |
+
|
| 122 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 123 |
+
if self._protocol is not None:
|
| 124 |
+
if PY_36:
|
| 125 |
+
kwargs = {"source": self}
|
| 126 |
+
else:
|
| 127 |
+
kwargs = {}
|
| 128 |
+
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
|
| 129 |
+
if self._loop.is_closed():
|
| 130 |
+
return
|
| 131 |
+
|
| 132 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
| 133 |
+
|
| 134 |
+
context = {"client_connection": self, "message": "Unclosed connection"}
|
| 135 |
+
if self._source_traceback is not None:
|
| 136 |
+
context["source_traceback"] = self._source_traceback
|
| 137 |
+
self._loop.call_exception_handler(context)
|
| 138 |
+
|
| 139 |
+
@property
|
| 140 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 141 |
+
warnings.warn(
|
| 142 |
+
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
|
| 143 |
+
)
|
| 144 |
+
return self._loop
|
| 145 |
+
|
| 146 |
+
@property
|
| 147 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
| 148 |
+
if self._protocol is None:
|
| 149 |
+
return None
|
| 150 |
+
return self._protocol.transport
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def protocol(self) -> Optional[ResponseHandler]:
|
| 154 |
+
return self._protocol
|
| 155 |
+
|
| 156 |
+
def add_callback(self, callback: Callable[[], None]) -> None:
|
| 157 |
+
if callback is not None:
|
| 158 |
+
self._callbacks.append(callback)
|
| 159 |
+
|
| 160 |
+
def _notify_release(self) -> None:
|
| 161 |
+
callbacks, self._callbacks = self._callbacks[:], []
|
| 162 |
+
|
| 163 |
+
for cb in callbacks:
|
| 164 |
+
with suppress(Exception):
|
| 165 |
+
cb()
|
| 166 |
+
|
| 167 |
+
def close(self) -> None:
|
| 168 |
+
self._notify_release()
|
| 169 |
+
|
| 170 |
+
if self._protocol is not None:
|
| 171 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
| 172 |
+
self._protocol = None
|
| 173 |
+
|
| 174 |
+
def release(self) -> None:
|
| 175 |
+
self._notify_release()
|
| 176 |
+
|
| 177 |
+
if self._protocol is not None:
|
| 178 |
+
self._connector._release(
|
| 179 |
+
self._key, self._protocol, should_close=self._protocol.should_close
|
| 180 |
+
)
|
| 181 |
+
self._protocol = None
|
| 182 |
+
|
| 183 |
+
@property
|
| 184 |
+
def closed(self) -> bool:
|
| 185 |
+
return self._protocol is None or not self._protocol.is_connected()
|
| 186 |
+
|
| 187 |
+
|
| 188 |
+
class _TransportPlaceholder:
|
| 189 |
+
"""placeholder for BaseConnector.connect function"""
|
| 190 |
+
|
| 191 |
+
def close(self) -> None:
|
| 192 |
+
pass
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
class BaseConnector:
|
| 196 |
+
"""Base connector class.
|
| 197 |
+
|
| 198 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 199 |
+
force_close - Set to True to force close and do reconnect
|
| 200 |
+
after each request (and between redirects).
|
| 201 |
+
limit - The total number of simultaneous connections.
|
| 202 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 203 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
| 204 |
+
Disabled by default.
|
| 205 |
+
loop - Optional event loop.
|
| 206 |
+
"""
|
| 207 |
+
|
| 208 |
+
_closed = True # prevent AttributeError in __del__ if ctor was failed
|
| 209 |
+
_source_traceback = None
|
| 210 |
+
|
| 211 |
+
# abort transport after 2 seconds (cleanup broken connections)
|
| 212 |
+
_cleanup_closed_period = 2.0
|
| 213 |
+
|
| 214 |
+
def __init__(
|
| 215 |
+
self,
|
| 216 |
+
*,
|
| 217 |
+
keepalive_timeout: Union[object, None, float] = sentinel,
|
| 218 |
+
force_close: bool = False,
|
| 219 |
+
limit: int = 100,
|
| 220 |
+
limit_per_host: int = 0,
|
| 221 |
+
enable_cleanup_closed: bool = False,
|
| 222 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 223 |
+
) -> None:
|
| 224 |
+
|
| 225 |
+
if force_close:
|
| 226 |
+
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
|
| 227 |
+
raise ValueError(
|
| 228 |
+
"keepalive_timeout cannot " "be set if force_close is True"
|
| 229 |
+
)
|
| 230 |
+
else:
|
| 231 |
+
if keepalive_timeout is sentinel:
|
| 232 |
+
keepalive_timeout = 15.0
|
| 233 |
+
|
| 234 |
+
loop = get_running_loop(loop)
|
| 235 |
+
|
| 236 |
+
self._closed = False
|
| 237 |
+
if loop.get_debug():
|
| 238 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 239 |
+
|
| 240 |
+
self._conns = (
|
| 241 |
+
{}
|
| 242 |
+
) # type: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]]
|
| 243 |
+
self._limit = limit
|
| 244 |
+
self._limit_per_host = limit_per_host
|
| 245 |
+
self._acquired = set() # type: Set[ResponseHandler]
|
| 246 |
+
self._acquired_per_host = defaultdict(
|
| 247 |
+
set
|
| 248 |
+
) # type: DefaultDict[ConnectionKey, Set[ResponseHandler]]
|
| 249 |
+
self._keepalive_timeout = cast(float, keepalive_timeout)
|
| 250 |
+
self._force_close = force_close
|
| 251 |
+
|
| 252 |
+
# {host_key: FIFO list of waiters}
|
| 253 |
+
self._waiters = defaultdict(deque) # type: ignore[var-annotated]
|
| 254 |
+
|
| 255 |
+
self._loop = loop
|
| 256 |
+
self._factory = functools.partial(ResponseHandler, loop=loop)
|
| 257 |
+
|
| 258 |
+
self.cookies = SimpleCookie() # type: SimpleCookie[str]
|
| 259 |
+
|
| 260 |
+
# start keep-alive connection cleanup task
|
| 261 |
+
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
|
| 262 |
+
|
| 263 |
+
# start cleanup closed transports task
|
| 264 |
+
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
|
| 265 |
+
self._cleanup_closed_disabled = not enable_cleanup_closed
|
| 266 |
+
self._cleanup_closed_transports = [] # type: List[Optional[asyncio.Transport]]
|
| 267 |
+
self._cleanup_closed()
|
| 268 |
+
|
| 269 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 270 |
+
if self._closed:
|
| 271 |
+
return
|
| 272 |
+
if not self._conns:
|
| 273 |
+
return
|
| 274 |
+
|
| 275 |
+
conns = [repr(c) for c in self._conns.values()]
|
| 276 |
+
|
| 277 |
+
self._close()
|
| 278 |
+
|
| 279 |
+
if PY_36:
|
| 280 |
+
kwargs = {"source": self}
|
| 281 |
+
else:
|
| 282 |
+
kwargs = {}
|
| 283 |
+
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
|
| 284 |
+
context = {
|
| 285 |
+
"connector": self,
|
| 286 |
+
"connections": conns,
|
| 287 |
+
"message": "Unclosed connector",
|
| 288 |
+
}
|
| 289 |
+
if self._source_traceback is not None:
|
| 290 |
+
context["source_traceback"] = self._source_traceback
|
| 291 |
+
self._loop.call_exception_handler(context)
|
| 292 |
+
|
| 293 |
+
def __enter__(self) -> "BaseConnector":
|
| 294 |
+
warnings.warn(
|
| 295 |
+
'"witn Connector():" is deprecated, '
|
| 296 |
+
'use "async with Connector():" instead',
|
| 297 |
+
DeprecationWarning,
|
| 298 |
+
)
|
| 299 |
+
return self
|
| 300 |
+
|
| 301 |
+
def __exit__(self, *exc: Any) -> None:
|
| 302 |
+
self.close()
|
| 303 |
+
|
| 304 |
+
async def __aenter__(self) -> "BaseConnector":
|
| 305 |
+
return self
|
| 306 |
+
|
| 307 |
+
async def __aexit__(
|
| 308 |
+
self,
|
| 309 |
+
exc_type: Optional[Type[BaseException]] = None,
|
| 310 |
+
exc_value: Optional[BaseException] = None,
|
| 311 |
+
exc_traceback: Optional[TracebackType] = None,
|
| 312 |
+
) -> None:
|
| 313 |
+
await self.close()
|
| 314 |
+
|
| 315 |
+
@property
|
| 316 |
+
def force_close(self) -> bool:
|
| 317 |
+
"""Ultimately close connection on releasing if True."""
|
| 318 |
+
return self._force_close
|
| 319 |
+
|
| 320 |
+
@property
|
| 321 |
+
def limit(self) -> int:
|
| 322 |
+
"""The total number for simultaneous connections.
|
| 323 |
+
|
| 324 |
+
If limit is 0 the connector has no limit.
|
| 325 |
+
The default limit size is 100.
|
| 326 |
+
"""
|
| 327 |
+
return self._limit
|
| 328 |
+
|
| 329 |
+
@property
|
| 330 |
+
def limit_per_host(self) -> int:
|
| 331 |
+
"""The limit for simultaneous connections to the same endpoint.
|
| 332 |
+
|
| 333 |
+
Endpoints are the same if they are have equal
|
| 334 |
+
(host, port, is_ssl) triple.
|
| 335 |
+
"""
|
| 336 |
+
return self._limit_per_host
|
| 337 |
+
|
| 338 |
+
def _cleanup(self) -> None:
|
| 339 |
+
"""Cleanup unused transports."""
|
| 340 |
+
if self._cleanup_handle:
|
| 341 |
+
self._cleanup_handle.cancel()
|
| 342 |
+
# _cleanup_handle should be unset, otherwise _release() will not
|
| 343 |
+
# recreate it ever!
|
| 344 |
+
self._cleanup_handle = None
|
| 345 |
+
|
| 346 |
+
now = self._loop.time()
|
| 347 |
+
timeout = self._keepalive_timeout
|
| 348 |
+
|
| 349 |
+
if self._conns:
|
| 350 |
+
connections = {}
|
| 351 |
+
deadline = now - timeout
|
| 352 |
+
for key, conns in self._conns.items():
|
| 353 |
+
alive = []
|
| 354 |
+
for proto, use_time in conns:
|
| 355 |
+
if proto.is_connected():
|
| 356 |
+
if use_time - deadline < 0:
|
| 357 |
+
transport = proto.transport
|
| 358 |
+
proto.close()
|
| 359 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 360 |
+
self._cleanup_closed_transports.append(transport)
|
| 361 |
+
else:
|
| 362 |
+
alive.append((proto, use_time))
|
| 363 |
+
else:
|
| 364 |
+
transport = proto.transport
|
| 365 |
+
proto.close()
|
| 366 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 367 |
+
self._cleanup_closed_transports.append(transport)
|
| 368 |
+
|
| 369 |
+
if alive:
|
| 370 |
+
connections[key] = alive
|
| 371 |
+
|
| 372 |
+
self._conns = connections
|
| 373 |
+
|
| 374 |
+
if self._conns:
|
| 375 |
+
self._cleanup_handle = helpers.weakref_handle(
|
| 376 |
+
self, "_cleanup", timeout, self._loop
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
def _drop_acquired_per_host(
|
| 380 |
+
self, key: "ConnectionKey", val: ResponseHandler
|
| 381 |
+
) -> None:
|
| 382 |
+
acquired_per_host = self._acquired_per_host
|
| 383 |
+
if key not in acquired_per_host:
|
| 384 |
+
return
|
| 385 |
+
conns = acquired_per_host[key]
|
| 386 |
+
conns.remove(val)
|
| 387 |
+
if not conns:
|
| 388 |
+
del self._acquired_per_host[key]
|
| 389 |
+
|
| 390 |
+
def _cleanup_closed(self) -> None:
|
| 391 |
+
"""Double confirmation for transport close.
|
| 392 |
+
|
| 393 |
+
Some broken ssl servers may leave socket open without proper close.
|
| 394 |
+
"""
|
| 395 |
+
if self._cleanup_closed_handle:
|
| 396 |
+
self._cleanup_closed_handle.cancel()
|
| 397 |
+
|
| 398 |
+
for transport in self._cleanup_closed_transports:
|
| 399 |
+
if transport is not None:
|
| 400 |
+
transport.abort()
|
| 401 |
+
|
| 402 |
+
self._cleanup_closed_transports = []
|
| 403 |
+
|
| 404 |
+
if not self._cleanup_closed_disabled:
|
| 405 |
+
self._cleanup_closed_handle = helpers.weakref_handle(
|
| 406 |
+
self, "_cleanup_closed", self._cleanup_closed_period, self._loop
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
def close(self) -> Awaitable[None]:
|
| 410 |
+
"""Close all opened transports."""
|
| 411 |
+
self._close()
|
| 412 |
+
return _DeprecationWaiter(noop())
|
| 413 |
+
|
| 414 |
+
def _close(self) -> None:
|
| 415 |
+
if self._closed:
|
| 416 |
+
return
|
| 417 |
+
|
| 418 |
+
self._closed = True
|
| 419 |
+
|
| 420 |
+
try:
|
| 421 |
+
if self._loop.is_closed():
|
| 422 |
+
return
|
| 423 |
+
|
| 424 |
+
# cancel cleanup task
|
| 425 |
+
if self._cleanup_handle:
|
| 426 |
+
self._cleanup_handle.cancel()
|
| 427 |
+
|
| 428 |
+
# cancel cleanup close task
|
| 429 |
+
if self._cleanup_closed_handle:
|
| 430 |
+
self._cleanup_closed_handle.cancel()
|
| 431 |
+
|
| 432 |
+
for data in self._conns.values():
|
| 433 |
+
for proto, t0 in data:
|
| 434 |
+
proto.close()
|
| 435 |
+
|
| 436 |
+
for proto in self._acquired:
|
| 437 |
+
proto.close()
|
| 438 |
+
|
| 439 |
+
for transport in self._cleanup_closed_transports:
|
| 440 |
+
if transport is not None:
|
| 441 |
+
transport.abort()
|
| 442 |
+
|
| 443 |
+
finally:
|
| 444 |
+
self._conns.clear()
|
| 445 |
+
self._acquired.clear()
|
| 446 |
+
self._waiters.clear()
|
| 447 |
+
self._cleanup_handle = None
|
| 448 |
+
self._cleanup_closed_transports.clear()
|
| 449 |
+
self._cleanup_closed_handle = None
|
| 450 |
+
|
| 451 |
+
@property
|
| 452 |
+
def closed(self) -> bool:
|
| 453 |
+
"""Is connector closed.
|
| 454 |
+
|
| 455 |
+
A readonly property.
|
| 456 |
+
"""
|
| 457 |
+
return self._closed
|
| 458 |
+
|
| 459 |
+
def _available_connections(self, key: "ConnectionKey") -> int:
|
| 460 |
+
"""
|
| 461 |
+
Return number of available connections.
|
| 462 |
+
|
| 463 |
+
The limit, limit_per_host and the connection key are taken into account.
|
| 464 |
+
|
| 465 |
+
If it returns less than 1 means that there are no connections
|
| 466 |
+
available.
|
| 467 |
+
"""
|
| 468 |
+
if self._limit:
|
| 469 |
+
# total calc available connections
|
| 470 |
+
available = self._limit - len(self._acquired)
|
| 471 |
+
|
| 472 |
+
# check limit per host
|
| 473 |
+
if (
|
| 474 |
+
self._limit_per_host
|
| 475 |
+
and available > 0
|
| 476 |
+
and key in self._acquired_per_host
|
| 477 |
+
):
|
| 478 |
+
acquired = self._acquired_per_host.get(key)
|
| 479 |
+
assert acquired is not None
|
| 480 |
+
available = self._limit_per_host - len(acquired)
|
| 481 |
+
|
| 482 |
+
elif self._limit_per_host and key in self._acquired_per_host:
|
| 483 |
+
# check limit per host
|
| 484 |
+
acquired = self._acquired_per_host.get(key)
|
| 485 |
+
assert acquired is not None
|
| 486 |
+
available = self._limit_per_host - len(acquired)
|
| 487 |
+
else:
|
| 488 |
+
available = 1
|
| 489 |
+
|
| 490 |
+
return available
|
| 491 |
+
|
| 492 |
+
async def connect(
|
| 493 |
+
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
| 494 |
+
) -> Connection:
|
| 495 |
+
"""Get from pool or create new connection."""
|
| 496 |
+
key = req.connection_key
|
| 497 |
+
available = self._available_connections(key)
|
| 498 |
+
|
| 499 |
+
# Wait if there are no available connections or if there are/were
|
| 500 |
+
# waiters (i.e. don't steal connection from a waiter about to wake up)
|
| 501 |
+
if available <= 0 or key in self._waiters:
|
| 502 |
+
fut = self._loop.create_future()
|
| 503 |
+
|
| 504 |
+
# This connection will now count towards the limit.
|
| 505 |
+
self._waiters[key].append(fut)
|
| 506 |
+
|
| 507 |
+
if traces:
|
| 508 |
+
for trace in traces:
|
| 509 |
+
await trace.send_connection_queued_start()
|
| 510 |
+
|
| 511 |
+
try:
|
| 512 |
+
await fut
|
| 513 |
+
except BaseException as e:
|
| 514 |
+
if key in self._waiters:
|
| 515 |
+
# remove a waiter even if it was cancelled, normally it's
|
| 516 |
+
# removed when it's notified
|
| 517 |
+
try:
|
| 518 |
+
self._waiters[key].remove(fut)
|
| 519 |
+
except ValueError: # fut may no longer be in list
|
| 520 |
+
pass
|
| 521 |
+
|
| 522 |
+
raise e
|
| 523 |
+
finally:
|
| 524 |
+
if key in self._waiters and not self._waiters[key]:
|
| 525 |
+
del self._waiters[key]
|
| 526 |
+
|
| 527 |
+
if traces:
|
| 528 |
+
for trace in traces:
|
| 529 |
+
await trace.send_connection_queued_end()
|
| 530 |
+
|
| 531 |
+
proto = self._get(key)
|
| 532 |
+
if proto is None:
|
| 533 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
| 534 |
+
self._acquired.add(placeholder)
|
| 535 |
+
self._acquired_per_host[key].add(placeholder)
|
| 536 |
+
|
| 537 |
+
if traces:
|
| 538 |
+
for trace in traces:
|
| 539 |
+
await trace.send_connection_create_start()
|
| 540 |
+
|
| 541 |
+
try:
|
| 542 |
+
proto = await self._create_connection(req, traces, timeout)
|
| 543 |
+
if self._closed:
|
| 544 |
+
proto.close()
|
| 545 |
+
raise ClientConnectionError("Connector is closed.")
|
| 546 |
+
except BaseException:
|
| 547 |
+
if not self._closed:
|
| 548 |
+
self._acquired.remove(placeholder)
|
| 549 |
+
self._drop_acquired_per_host(key, placeholder)
|
| 550 |
+
self._release_waiter()
|
| 551 |
+
raise
|
| 552 |
+
else:
|
| 553 |
+
if not self._closed:
|
| 554 |
+
self._acquired.remove(placeholder)
|
| 555 |
+
self._drop_acquired_per_host(key, placeholder)
|
| 556 |
+
|
| 557 |
+
if traces:
|
| 558 |
+
for trace in traces:
|
| 559 |
+
await trace.send_connection_create_end()
|
| 560 |
+
else:
|
| 561 |
+
if traces:
|
| 562 |
+
# Acquire the connection to prevent race conditions with limits
|
| 563 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
| 564 |
+
self._acquired.add(placeholder)
|
| 565 |
+
self._acquired_per_host[key].add(placeholder)
|
| 566 |
+
for trace in traces:
|
| 567 |
+
await trace.send_connection_reuseconn()
|
| 568 |
+
self._acquired.remove(placeholder)
|
| 569 |
+
self._drop_acquired_per_host(key, placeholder)
|
| 570 |
+
|
| 571 |
+
self._acquired.add(proto)
|
| 572 |
+
self._acquired_per_host[key].add(proto)
|
| 573 |
+
return Connection(self, key, proto, self._loop)
|
| 574 |
+
|
| 575 |
+
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
|
| 576 |
+
try:
|
| 577 |
+
conns = self._conns[key]
|
| 578 |
+
except KeyError:
|
| 579 |
+
return None
|
| 580 |
+
|
| 581 |
+
t1 = self._loop.time()
|
| 582 |
+
while conns:
|
| 583 |
+
proto, t0 = conns.pop()
|
| 584 |
+
if proto.is_connected():
|
| 585 |
+
if t1 - t0 > self._keepalive_timeout:
|
| 586 |
+
transport = proto.transport
|
| 587 |
+
proto.close()
|
| 588 |
+
# only for SSL transports
|
| 589 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 590 |
+
self._cleanup_closed_transports.append(transport)
|
| 591 |
+
else:
|
| 592 |
+
if not conns:
|
| 593 |
+
# The very last connection was reclaimed: drop the key
|
| 594 |
+
del self._conns[key]
|
| 595 |
+
return proto
|
| 596 |
+
else:
|
| 597 |
+
transport = proto.transport
|
| 598 |
+
proto.close()
|
| 599 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 600 |
+
self._cleanup_closed_transports.append(transport)
|
| 601 |
+
|
| 602 |
+
# No more connections: drop the key
|
| 603 |
+
del self._conns[key]
|
| 604 |
+
return None
|
| 605 |
+
|
| 606 |
+
def _release_waiter(self) -> None:
|
| 607 |
+
"""
|
| 608 |
+
Iterates over all waiters until one to be released is found.
|
| 609 |
+
|
| 610 |
+
The one to be released is not finsihed and
|
| 611 |
+
belongs to a host that has available connections.
|
| 612 |
+
"""
|
| 613 |
+
if not self._waiters:
|
| 614 |
+
return
|
| 615 |
+
|
| 616 |
+
# Having the dict keys ordered this avoids to iterate
|
| 617 |
+
# at the same order at each call.
|
| 618 |
+
queues = list(self._waiters.keys())
|
| 619 |
+
random.shuffle(queues)
|
| 620 |
+
|
| 621 |
+
for key in queues:
|
| 622 |
+
if self._available_connections(key) < 1:
|
| 623 |
+
continue
|
| 624 |
+
|
| 625 |
+
waiters = self._waiters[key]
|
| 626 |
+
while waiters:
|
| 627 |
+
waiter = waiters.popleft()
|
| 628 |
+
if not waiter.done():
|
| 629 |
+
waiter.set_result(None)
|
| 630 |
+
return
|
| 631 |
+
|
| 632 |
+
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
|
| 633 |
+
if self._closed:
|
| 634 |
+
# acquired connection is already released on connector closing
|
| 635 |
+
return
|
| 636 |
+
|
| 637 |
+
try:
|
| 638 |
+
self._acquired.remove(proto)
|
| 639 |
+
self._drop_acquired_per_host(key, proto)
|
| 640 |
+
except KeyError: # pragma: no cover
|
| 641 |
+
# this may be result of undetermenistic order of objects
|
| 642 |
+
# finalization due garbage collection.
|
| 643 |
+
pass
|
| 644 |
+
else:
|
| 645 |
+
self._release_waiter()
|
| 646 |
+
|
| 647 |
+
def _release(
|
| 648 |
+
self,
|
| 649 |
+
key: "ConnectionKey",
|
| 650 |
+
protocol: ResponseHandler,
|
| 651 |
+
*,
|
| 652 |
+
should_close: bool = False,
|
| 653 |
+
) -> None:
|
| 654 |
+
if self._closed:
|
| 655 |
+
# acquired connection is already released on connector closing
|
| 656 |
+
return
|
| 657 |
+
|
| 658 |
+
self._release_acquired(key, protocol)
|
| 659 |
+
|
| 660 |
+
if self._force_close:
|
| 661 |
+
should_close = True
|
| 662 |
+
|
| 663 |
+
if should_close or protocol.should_close:
|
| 664 |
+
transport = protocol.transport
|
| 665 |
+
protocol.close()
|
| 666 |
+
|
| 667 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
| 668 |
+
self._cleanup_closed_transports.append(transport)
|
| 669 |
+
else:
|
| 670 |
+
conns = self._conns.get(key)
|
| 671 |
+
if conns is None:
|
| 672 |
+
conns = self._conns[key] = []
|
| 673 |
+
conns.append((protocol, self._loop.time()))
|
| 674 |
+
|
| 675 |
+
if self._cleanup_handle is None:
|
| 676 |
+
self._cleanup_handle = helpers.weakref_handle(
|
| 677 |
+
self, "_cleanup", self._keepalive_timeout, self._loop
|
| 678 |
+
)
|
| 679 |
+
|
| 680 |
+
async def _create_connection(
|
| 681 |
+
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
| 682 |
+
) -> ResponseHandler:
|
| 683 |
+
raise NotImplementedError()
|
| 684 |
+
|
| 685 |
+
|
| 686 |
+
class _DNSCacheTable:
|
| 687 |
+
def __init__(self, ttl: Optional[float] = None) -> None:
|
| 688 |
+
self._addrs_rr = (
|
| 689 |
+
{}
|
| 690 |
+
) # type: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]]
|
| 691 |
+
self._timestamps = {} # type: Dict[Tuple[str, int], float]
|
| 692 |
+
self._ttl = ttl
|
| 693 |
+
|
| 694 |
+
def __contains__(self, host: object) -> bool:
|
| 695 |
+
return host in self._addrs_rr
|
| 696 |
+
|
| 697 |
+
def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
|
| 698 |
+
self._addrs_rr[key] = (cycle(addrs), len(addrs))
|
| 699 |
+
|
| 700 |
+
if self._ttl:
|
| 701 |
+
self._timestamps[key] = monotonic()
|
| 702 |
+
|
| 703 |
+
def remove(self, key: Tuple[str, int]) -> None:
|
| 704 |
+
self._addrs_rr.pop(key, None)
|
| 705 |
+
|
| 706 |
+
if self._ttl:
|
| 707 |
+
self._timestamps.pop(key, None)
|
| 708 |
+
|
| 709 |
+
def clear(self) -> None:
|
| 710 |
+
self._addrs_rr.clear()
|
| 711 |
+
self._timestamps.clear()
|
| 712 |
+
|
| 713 |
+
def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
|
| 714 |
+
loop, length = self._addrs_rr[key]
|
| 715 |
+
addrs = list(islice(loop, length))
|
| 716 |
+
# Consume one more element to shift internal state of `cycle`
|
| 717 |
+
next(loop)
|
| 718 |
+
return addrs
|
| 719 |
+
|
| 720 |
+
def expired(self, key: Tuple[str, int]) -> bool:
|
| 721 |
+
if self._ttl is None:
|
| 722 |
+
return False
|
| 723 |
+
|
| 724 |
+
return self._timestamps[key] + self._ttl < monotonic()
|
| 725 |
+
|
| 726 |
+
|
| 727 |
+
class TCPConnector(BaseConnector):
|
| 728 |
+
"""TCP connector.
|
| 729 |
+
|
| 730 |
+
verify_ssl - Set to True to check ssl certifications.
|
| 731 |
+
fingerprint - Pass the binary sha256
|
| 732 |
+
digest of the expected certificate in DER format to verify
|
| 733 |
+
that the certificate the server presents matches. See also
|
| 734 |
+
https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
|
| 735 |
+
resolver - Enable DNS lookups and use this
|
| 736 |
+
resolver
|
| 737 |
+
use_dns_cache - Use memory cache for DNS lookups.
|
| 738 |
+
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
|
| 739 |
+
family - socket address family
|
| 740 |
+
local_addr - local tuple of (host, port) to bind socket to
|
| 741 |
+
|
| 742 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 743 |
+
force_close - Set to True to force close and do reconnect
|
| 744 |
+
after each request (and between redirects).
|
| 745 |
+
limit - The total number of simultaneous connections.
|
| 746 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 747 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
| 748 |
+
Disabled by default.
|
| 749 |
+
loop - Optional event loop.
|
| 750 |
+
"""
|
| 751 |
+
|
| 752 |
+
def __init__(
|
| 753 |
+
self,
|
| 754 |
+
*,
|
| 755 |
+
verify_ssl: bool = True,
|
| 756 |
+
fingerprint: Optional[bytes] = None,
|
| 757 |
+
use_dns_cache: bool = True,
|
| 758 |
+
ttl_dns_cache: Optional[int] = 10,
|
| 759 |
+
family: int = 0,
|
| 760 |
+
ssl_context: Optional[SSLContext] = None,
|
| 761 |
+
ssl: Union[None, bool, Fingerprint, SSLContext] = None,
|
| 762 |
+
local_addr: Optional[Tuple[str, int]] = None,
|
| 763 |
+
resolver: Optional[AbstractResolver] = None,
|
| 764 |
+
keepalive_timeout: Union[None, float, object] = sentinel,
|
| 765 |
+
force_close: bool = False,
|
| 766 |
+
limit: int = 100,
|
| 767 |
+
limit_per_host: int = 0,
|
| 768 |
+
enable_cleanup_closed: bool = False,
|
| 769 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 770 |
+
):
|
| 771 |
+
super().__init__(
|
| 772 |
+
keepalive_timeout=keepalive_timeout,
|
| 773 |
+
force_close=force_close,
|
| 774 |
+
limit=limit,
|
| 775 |
+
limit_per_host=limit_per_host,
|
| 776 |
+
enable_cleanup_closed=enable_cleanup_closed,
|
| 777 |
+
loop=loop,
|
| 778 |
+
)
|
| 779 |
+
|
| 780 |
+
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 781 |
+
if resolver is None:
|
| 782 |
+
resolver = DefaultResolver(loop=self._loop)
|
| 783 |
+
self._resolver = resolver
|
| 784 |
+
|
| 785 |
+
self._use_dns_cache = use_dns_cache
|
| 786 |
+
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
|
| 787 |
+
self._throttle_dns_events = (
|
| 788 |
+
{}
|
| 789 |
+
) # type: Dict[Tuple[str, int], EventResultOrError]
|
| 790 |
+
self._family = family
|
| 791 |
+
self._local_addr = local_addr
|
| 792 |
+
|
| 793 |
+
def close(self) -> Awaitable[None]:
|
| 794 |
+
"""Close all ongoing DNS calls."""
|
| 795 |
+
for ev in self._throttle_dns_events.values():
|
| 796 |
+
ev.cancel()
|
| 797 |
+
|
| 798 |
+
return super().close()
|
| 799 |
+
|
| 800 |
+
@property
|
| 801 |
+
def family(self) -> int:
|
| 802 |
+
"""Socket family like AF_INET."""
|
| 803 |
+
return self._family
|
| 804 |
+
|
| 805 |
+
@property
|
| 806 |
+
def use_dns_cache(self) -> bool:
|
| 807 |
+
"""True if local DNS caching is enabled."""
|
| 808 |
+
return self._use_dns_cache
|
| 809 |
+
|
| 810 |
+
def clear_dns_cache(
|
| 811 |
+
self, host: Optional[str] = None, port: Optional[int] = None
|
| 812 |
+
) -> None:
|
| 813 |
+
"""Remove specified host/port or clear all dns local cache."""
|
| 814 |
+
if host is not None and port is not None:
|
| 815 |
+
self._cached_hosts.remove((host, port))
|
| 816 |
+
elif host is not None or port is not None:
|
| 817 |
+
raise ValueError("either both host and port " "or none of them are allowed")
|
| 818 |
+
else:
|
| 819 |
+
self._cached_hosts.clear()
|
| 820 |
+
|
| 821 |
+
async def _resolve_host(
|
| 822 |
+
self, host: str, port: int, traces: Optional[List["Trace"]] = None
|
| 823 |
+
) -> List[Dict[str, Any]]:
|
| 824 |
+
if is_ip_address(host):
|
| 825 |
+
return [
|
| 826 |
+
{
|
| 827 |
+
"hostname": host,
|
| 828 |
+
"host": host,
|
| 829 |
+
"port": port,
|
| 830 |
+
"family": self._family,
|
| 831 |
+
"proto": 0,
|
| 832 |
+
"flags": 0,
|
| 833 |
+
}
|
| 834 |
+
]
|
| 835 |
+
|
| 836 |
+
if not self._use_dns_cache:
|
| 837 |
+
|
| 838 |
+
if traces:
|
| 839 |
+
for trace in traces:
|
| 840 |
+
await trace.send_dns_resolvehost_start(host)
|
| 841 |
+
|
| 842 |
+
res = await self._resolver.resolve(host, port, family=self._family)
|
| 843 |
+
|
| 844 |
+
if traces:
|
| 845 |
+
for trace in traces:
|
| 846 |
+
await trace.send_dns_resolvehost_end(host)
|
| 847 |
+
|
| 848 |
+
return res
|
| 849 |
+
|
| 850 |
+
key = (host, port)
|
| 851 |
+
|
| 852 |
+
if (key in self._cached_hosts) and (not self._cached_hosts.expired(key)):
|
| 853 |
+
# get result early, before any await (#4014)
|
| 854 |
+
result = self._cached_hosts.next_addrs(key)
|
| 855 |
+
|
| 856 |
+
if traces:
|
| 857 |
+
for trace in traces:
|
| 858 |
+
await trace.send_dns_cache_hit(host)
|
| 859 |
+
return result
|
| 860 |
+
|
| 861 |
+
if key in self._throttle_dns_events:
|
| 862 |
+
# get event early, before any await (#4014)
|
| 863 |
+
event = self._throttle_dns_events[key]
|
| 864 |
+
if traces:
|
| 865 |
+
for trace in traces:
|
| 866 |
+
await trace.send_dns_cache_hit(host)
|
| 867 |
+
await event.wait()
|
| 868 |
+
else:
|
| 869 |
+
# update dict early, before any await (#4014)
|
| 870 |
+
self._throttle_dns_events[key] = EventResultOrError(self._loop)
|
| 871 |
+
if traces:
|
| 872 |
+
for trace in traces:
|
| 873 |
+
await trace.send_dns_cache_miss(host)
|
| 874 |
+
try:
|
| 875 |
+
|
| 876 |
+
if traces:
|
| 877 |
+
for trace in traces:
|
| 878 |
+
await trace.send_dns_resolvehost_start(host)
|
| 879 |
+
|
| 880 |
+
addrs = await self._resolver.resolve(host, port, family=self._family)
|
| 881 |
+
if traces:
|
| 882 |
+
for trace in traces:
|
| 883 |
+
await trace.send_dns_resolvehost_end(host)
|
| 884 |
+
|
| 885 |
+
self._cached_hosts.add(key, addrs)
|
| 886 |
+
self._throttle_dns_events[key].set()
|
| 887 |
+
except BaseException as e:
|
| 888 |
+
# any DNS exception, independently of the implementation
|
| 889 |
+
# is set for the waiters to raise the same exception.
|
| 890 |
+
self._throttle_dns_events[key].set(exc=e)
|
| 891 |
+
raise
|
| 892 |
+
finally:
|
| 893 |
+
self._throttle_dns_events.pop(key)
|
| 894 |
+
|
| 895 |
+
return self._cached_hosts.next_addrs(key)
|
| 896 |
+
|
| 897 |
+
async def _create_connection(
|
| 898 |
+
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
| 899 |
+
) -> ResponseHandler:
|
| 900 |
+
"""Create connection.
|
| 901 |
+
|
| 902 |
+
Has same keyword arguments as BaseEventLoop.create_connection.
|
| 903 |
+
"""
|
| 904 |
+
if req.proxy:
|
| 905 |
+
_, proto = await self._create_proxy_connection(req, traces, timeout)
|
| 906 |
+
else:
|
| 907 |
+
_, proto = await self._create_direct_connection(req, traces, timeout)
|
| 908 |
+
|
| 909 |
+
return proto
|
| 910 |
+
|
| 911 |
+
@staticmethod
|
| 912 |
+
@functools.lru_cache(None)
|
| 913 |
+
def _make_ssl_context(verified: bool) -> SSLContext:
|
| 914 |
+
if verified:
|
| 915 |
+
return ssl.create_default_context()
|
| 916 |
+
else:
|
| 917 |
+
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
| 918 |
+
sslcontext.options |= ssl.OP_NO_SSLv2
|
| 919 |
+
sslcontext.options |= ssl.OP_NO_SSLv3
|
| 920 |
+
sslcontext.check_hostname = False
|
| 921 |
+
sslcontext.verify_mode = ssl.CERT_NONE
|
| 922 |
+
try:
|
| 923 |
+
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
| 924 |
+
except AttributeError as attr_err:
|
| 925 |
+
warnings.warn(
|
| 926 |
+
"{!s}: The Python interpreter is compiled "
|
| 927 |
+
"against OpenSSL < 1.0.0. Ref: "
|
| 928 |
+
"https://docs.python.org/3/library/ssl.html"
|
| 929 |
+
"#ssl.OP_NO_COMPRESSION".format(attr_err),
|
| 930 |
+
)
|
| 931 |
+
sslcontext.set_default_verify_paths()
|
| 932 |
+
return sslcontext
|
| 933 |
+
|
| 934 |
+
def _get_ssl_context(self, req: "ClientRequest") -> Optional[SSLContext]:
|
| 935 |
+
"""Logic to get the correct SSL context
|
| 936 |
+
|
| 937 |
+
0. if req.ssl is false, return None
|
| 938 |
+
|
| 939 |
+
1. if ssl_context is specified in req, use it
|
| 940 |
+
2. if _ssl_context is specified in self, use it
|
| 941 |
+
3. otherwise:
|
| 942 |
+
1. if verify_ssl is not specified in req, use self.ssl_context
|
| 943 |
+
(will generate a default context according to self.verify_ssl)
|
| 944 |
+
2. if verify_ssl is True in req, generate a default SSL context
|
| 945 |
+
3. if verify_ssl is False in req, generate a SSL context that
|
| 946 |
+
won't verify
|
| 947 |
+
"""
|
| 948 |
+
if req.is_ssl():
|
| 949 |
+
if ssl is None: # pragma: no cover
|
| 950 |
+
raise RuntimeError("SSL is not supported.")
|
| 951 |
+
sslcontext = req.ssl
|
| 952 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
| 953 |
+
return sslcontext
|
| 954 |
+
if sslcontext is not None:
|
| 955 |
+
# not verified or fingerprinted
|
| 956 |
+
return self._make_ssl_context(False)
|
| 957 |
+
sslcontext = self._ssl
|
| 958 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
| 959 |
+
return sslcontext
|
| 960 |
+
if sslcontext is not None:
|
| 961 |
+
# not verified or fingerprinted
|
| 962 |
+
return self._make_ssl_context(False)
|
| 963 |
+
return self._make_ssl_context(True)
|
| 964 |
+
else:
|
| 965 |
+
return None
|
| 966 |
+
|
| 967 |
+
def _get_fingerprint(self, req: "ClientRequest") -> Optional["Fingerprint"]:
|
| 968 |
+
ret = req.ssl
|
| 969 |
+
if isinstance(ret, Fingerprint):
|
| 970 |
+
return ret
|
| 971 |
+
ret = self._ssl
|
| 972 |
+
if isinstance(ret, Fingerprint):
|
| 973 |
+
return ret
|
| 974 |
+
return None
|
| 975 |
+
|
| 976 |
+
async def _wrap_create_connection(
|
| 977 |
+
self,
|
| 978 |
+
*args: Any,
|
| 979 |
+
req: "ClientRequest",
|
| 980 |
+
timeout: "ClientTimeout",
|
| 981 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 982 |
+
**kwargs: Any,
|
| 983 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
| 984 |
+
try:
|
| 985 |
+
async with ceil_timeout(timeout.sock_connect):
|
| 986 |
+
return await self._loop.create_connection(*args, **kwargs) # type: ignore[return-value] # noqa
|
| 987 |
+
except cert_errors as exc:
|
| 988 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
| 989 |
+
except ssl_errors as exc:
|
| 990 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
| 991 |
+
except OSError as exc:
|
| 992 |
+
raise client_error(req.connection_key, exc) from exc
|
| 993 |
+
|
| 994 |
+
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
|
| 995 |
+
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
|
| 996 |
+
|
| 997 |
+
One case is that :py:meth:`asyncio.loop.start_tls` is not yet
|
| 998 |
+
implemented under Python 3.6. It is necessary for TLS-in-TLS so
|
| 999 |
+
that it is possible to send HTTPS queries through HTTPS proxies.
|
| 1000 |
+
|
| 1001 |
+
This doesn't affect regular HTTP requests, though.
|
| 1002 |
+
"""
|
| 1003 |
+
if not req.is_ssl():
|
| 1004 |
+
return
|
| 1005 |
+
|
| 1006 |
+
proxy_url = req.proxy
|
| 1007 |
+
assert proxy_url is not None
|
| 1008 |
+
if proxy_url.scheme != "https":
|
| 1009 |
+
return
|
| 1010 |
+
|
| 1011 |
+
self._check_loop_for_start_tls()
|
| 1012 |
+
|
| 1013 |
+
def _check_loop_for_start_tls(self) -> None:
|
| 1014 |
+
try:
|
| 1015 |
+
self._loop.start_tls
|
| 1016 |
+
except AttributeError as attr_exc:
|
| 1017 |
+
raise RuntimeError(
|
| 1018 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
| 1019 |
+
"This needs support for TLS in TLS but it is not implemented "
|
| 1020 |
+
"in your runtime for the stdlib asyncio.\n\n"
|
| 1021 |
+
"Please upgrade to Python 3.7 or higher. For more details, "
|
| 1022 |
+
"please see:\n"
|
| 1023 |
+
"* https://bugs.python.org/issue37179\n"
|
| 1024 |
+
"* https://github.com/python/cpython/pull/28073\n"
|
| 1025 |
+
"* https://docs.aiohttp.org/en/stable/"
|
| 1026 |
+
"client_advanced.html#proxy-support\n"
|
| 1027 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
| 1028 |
+
) from attr_exc
|
| 1029 |
+
|
| 1030 |
+
def _loop_supports_start_tls(self) -> bool:
|
| 1031 |
+
try:
|
| 1032 |
+
self._check_loop_for_start_tls()
|
| 1033 |
+
except RuntimeError:
|
| 1034 |
+
return False
|
| 1035 |
+
else:
|
| 1036 |
+
return True
|
| 1037 |
+
|
| 1038 |
+
def _warn_about_tls_in_tls(
|
| 1039 |
+
self,
|
| 1040 |
+
underlying_transport: asyncio.Transport,
|
| 1041 |
+
req: "ClientRequest",
|
| 1042 |
+
) -> None:
|
| 1043 |
+
"""Issue a warning if the requested URL has HTTPS scheme."""
|
| 1044 |
+
if req.request_info.url.scheme != "https":
|
| 1045 |
+
return
|
| 1046 |
+
|
| 1047 |
+
asyncio_supports_tls_in_tls = getattr(
|
| 1048 |
+
underlying_transport,
|
| 1049 |
+
"_start_tls_compatible",
|
| 1050 |
+
False,
|
| 1051 |
+
)
|
| 1052 |
+
|
| 1053 |
+
if asyncio_supports_tls_in_tls:
|
| 1054 |
+
return
|
| 1055 |
+
|
| 1056 |
+
warnings.warn(
|
| 1057 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
| 1058 |
+
"This support for TLS in TLS is known to be disabled "
|
| 1059 |
+
"in the stdlib asyncio. This is why you'll probably see "
|
| 1060 |
+
"an error in the log below.\n\n"
|
| 1061 |
+
"It is possible to enable it via monkeypatching under "
|
| 1062 |
+
"Python 3.7 or higher. For more details, see:\n"
|
| 1063 |
+
"* https://bugs.python.org/issue37179\n"
|
| 1064 |
+
"* https://github.com/python/cpython/pull/28073\n\n"
|
| 1065 |
+
"You can temporarily patch this as follows:\n"
|
| 1066 |
+
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
|
| 1067 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
| 1068 |
+
RuntimeWarning,
|
| 1069 |
+
source=self,
|
| 1070 |
+
# Why `4`? At least 3 of the calls in the stack originate
|
| 1071 |
+
# from the methods in this class.
|
| 1072 |
+
stacklevel=3,
|
| 1073 |
+
)
|
| 1074 |
+
|
| 1075 |
+
async def _start_tls_connection(
|
| 1076 |
+
self,
|
| 1077 |
+
underlying_transport: asyncio.Transport,
|
| 1078 |
+
req: "ClientRequest",
|
| 1079 |
+
timeout: "ClientTimeout",
|
| 1080 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 1081 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
| 1082 |
+
"""Wrap the raw TCP transport with TLS."""
|
| 1083 |
+
tls_proto = self._factory() # Create a brand new proto for TLS
|
| 1084 |
+
|
| 1085 |
+
# Safety of the `cast()` call here is based on the fact that
|
| 1086 |
+
# internally `_get_ssl_context()` only returns `None` when
|
| 1087 |
+
# `req.is_ssl()` evaluates to `False` which is never gonna happen
|
| 1088 |
+
# in this code path. Of course, it's rather fragile
|
| 1089 |
+
# maintainability-wise but this is to be solved separately.
|
| 1090 |
+
sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
|
| 1091 |
+
|
| 1092 |
+
try:
|
| 1093 |
+
async with ceil_timeout(timeout.sock_connect):
|
| 1094 |
+
try:
|
| 1095 |
+
tls_transport = await self._loop.start_tls(
|
| 1096 |
+
underlying_transport,
|
| 1097 |
+
tls_proto,
|
| 1098 |
+
sslcontext,
|
| 1099 |
+
server_hostname=req.host,
|
| 1100 |
+
ssl_handshake_timeout=timeout.total,
|
| 1101 |
+
)
|
| 1102 |
+
except BaseException:
|
| 1103 |
+
# We need to close the underlying transport since
|
| 1104 |
+
# `start_tls()` probably failed before it had a
|
| 1105 |
+
# chance to do this:
|
| 1106 |
+
underlying_transport.close()
|
| 1107 |
+
raise
|
| 1108 |
+
except cert_errors as exc:
|
| 1109 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
| 1110 |
+
except ssl_errors as exc:
|
| 1111 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
| 1112 |
+
except OSError as exc:
|
| 1113 |
+
raise client_error(req.connection_key, exc) from exc
|
| 1114 |
+
except TypeError as type_err:
|
| 1115 |
+
# Example cause looks like this:
|
| 1116 |
+
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
|
| 1117 |
+
# object at 0x7f760615e460> is not supported by start_tls()
|
| 1118 |
+
|
| 1119 |
+
raise ClientConnectionError(
|
| 1120 |
+
"Cannot initialize a TLS-in-TLS connection to host "
|
| 1121 |
+
f"{req.host!s}:{req.port:d} through an underlying connection "
|
| 1122 |
+
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
|
| 1123 |
+
f"[{type_err!s}]"
|
| 1124 |
+
) from type_err
|
| 1125 |
+
else:
|
| 1126 |
+
tls_proto.connection_made(
|
| 1127 |
+
tls_transport
|
| 1128 |
+
) # Kick the state machine of the new TLS protocol
|
| 1129 |
+
|
| 1130 |
+
return tls_transport, tls_proto
|
| 1131 |
+
|
| 1132 |
+
async def _create_direct_connection(
|
| 1133 |
+
self,
|
| 1134 |
+
req: "ClientRequest",
|
| 1135 |
+
traces: List["Trace"],
|
| 1136 |
+
timeout: "ClientTimeout",
|
| 1137 |
+
*,
|
| 1138 |
+
client_error: Type[Exception] = ClientConnectorError,
|
| 1139 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
| 1140 |
+
sslcontext = self._get_ssl_context(req)
|
| 1141 |
+
fingerprint = self._get_fingerprint(req)
|
| 1142 |
+
|
| 1143 |
+
host = req.url.raw_host
|
| 1144 |
+
assert host is not None
|
| 1145 |
+
port = req.port
|
| 1146 |
+
assert port is not None
|
| 1147 |
+
host_resolved = asyncio.ensure_future(
|
| 1148 |
+
self._resolve_host(host, port, traces=traces), loop=self._loop
|
| 1149 |
+
)
|
| 1150 |
+
try:
|
| 1151 |
+
# Cancelling this lookup should not cancel the underlying lookup
|
| 1152 |
+
# or else the cancel event will get broadcast to all the waiters
|
| 1153 |
+
# across all connections.
|
| 1154 |
+
hosts = await asyncio.shield(host_resolved)
|
| 1155 |
+
except asyncio.CancelledError:
|
| 1156 |
+
|
| 1157 |
+
def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
|
| 1158 |
+
with suppress(Exception, asyncio.CancelledError):
|
| 1159 |
+
fut.result()
|
| 1160 |
+
|
| 1161 |
+
host_resolved.add_done_callback(drop_exception)
|
| 1162 |
+
raise
|
| 1163 |
+
except OSError as exc:
|
| 1164 |
+
# in case of proxy it is not ClientProxyConnectionError
|
| 1165 |
+
# it is problem of resolving proxy ip itself
|
| 1166 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
| 1167 |
+
|
| 1168 |
+
last_exc = None # type: Optional[Exception]
|
| 1169 |
+
|
| 1170 |
+
for hinfo in hosts:
|
| 1171 |
+
host = hinfo["host"]
|
| 1172 |
+
port = hinfo["port"]
|
| 1173 |
+
|
| 1174 |
+
try:
|
| 1175 |
+
transp, proto = await self._wrap_create_connection(
|
| 1176 |
+
self._factory,
|
| 1177 |
+
host,
|
| 1178 |
+
port,
|
| 1179 |
+
timeout=timeout,
|
| 1180 |
+
ssl=sslcontext,
|
| 1181 |
+
family=hinfo["family"],
|
| 1182 |
+
proto=hinfo["proto"],
|
| 1183 |
+
flags=hinfo["flags"],
|
| 1184 |
+
server_hostname=hinfo["hostname"] if sslcontext else None,
|
| 1185 |
+
local_addr=self._local_addr,
|
| 1186 |
+
req=req,
|
| 1187 |
+
client_error=client_error,
|
| 1188 |
+
)
|
| 1189 |
+
except ClientConnectorError as exc:
|
| 1190 |
+
last_exc = exc
|
| 1191 |
+
continue
|
| 1192 |
+
|
| 1193 |
+
if req.is_ssl() and fingerprint:
|
| 1194 |
+
try:
|
| 1195 |
+
fingerprint.check(transp)
|
| 1196 |
+
except ServerFingerprintMismatch as exc:
|
| 1197 |
+
transp.close()
|
| 1198 |
+
if not self._cleanup_closed_disabled:
|
| 1199 |
+
self._cleanup_closed_transports.append(transp)
|
| 1200 |
+
last_exc = exc
|
| 1201 |
+
continue
|
| 1202 |
+
|
| 1203 |
+
return transp, proto
|
| 1204 |
+
else:
|
| 1205 |
+
assert last_exc is not None
|
| 1206 |
+
raise last_exc
|
| 1207 |
+
|
| 1208 |
+
async def _create_proxy_connection(
|
| 1209 |
+
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
| 1210 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
| 1211 |
+
self._fail_on_no_start_tls(req)
|
| 1212 |
+
runtime_has_start_tls = self._loop_supports_start_tls()
|
| 1213 |
+
|
| 1214 |
+
headers = {} # type: Dict[str, str]
|
| 1215 |
+
if req.proxy_headers is not None:
|
| 1216 |
+
headers = req.proxy_headers # type: ignore[assignment]
|
| 1217 |
+
headers[hdrs.HOST] = req.headers[hdrs.HOST]
|
| 1218 |
+
|
| 1219 |
+
url = req.proxy
|
| 1220 |
+
assert url is not None
|
| 1221 |
+
proxy_req = ClientRequest(
|
| 1222 |
+
hdrs.METH_GET,
|
| 1223 |
+
url,
|
| 1224 |
+
headers=headers,
|
| 1225 |
+
auth=req.proxy_auth,
|
| 1226 |
+
loop=self._loop,
|
| 1227 |
+
ssl=req.ssl,
|
| 1228 |
+
)
|
| 1229 |
+
|
| 1230 |
+
# create connection to proxy server
|
| 1231 |
+
transport, proto = await self._create_direct_connection(
|
| 1232 |
+
proxy_req, [], timeout, client_error=ClientProxyConnectionError
|
| 1233 |
+
)
|
| 1234 |
+
|
| 1235 |
+
# Many HTTP proxies has buggy keepalive support. Let's not
|
| 1236 |
+
# reuse connection but close it after processing every
|
| 1237 |
+
# response.
|
| 1238 |
+
proto.force_close()
|
| 1239 |
+
|
| 1240 |
+
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
|
| 1241 |
+
if auth is not None:
|
| 1242 |
+
if not req.is_ssl():
|
| 1243 |
+
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
| 1244 |
+
else:
|
| 1245 |
+
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
| 1246 |
+
|
| 1247 |
+
if req.is_ssl():
|
| 1248 |
+
if runtime_has_start_tls:
|
| 1249 |
+
self._warn_about_tls_in_tls(transport, req)
|
| 1250 |
+
|
| 1251 |
+
# For HTTPS requests over HTTP proxy
|
| 1252 |
+
# we must notify proxy to tunnel connection
|
| 1253 |
+
# so we send CONNECT command:
|
| 1254 |
+
# CONNECT www.python.org:443 HTTP/1.1
|
| 1255 |
+
# Host: www.python.org
|
| 1256 |
+
#
|
| 1257 |
+
# next we must do TLS handshake and so on
|
| 1258 |
+
# to do this we must wrap raw socket into secure one
|
| 1259 |
+
# asyncio handles this perfectly
|
| 1260 |
+
proxy_req.method = hdrs.METH_CONNECT
|
| 1261 |
+
proxy_req.url = req.url
|
| 1262 |
+
key = attr.evolve(
|
| 1263 |
+
req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
|
| 1264 |
+
)
|
| 1265 |
+
conn = Connection(self, key, proto, self._loop)
|
| 1266 |
+
proxy_resp = await proxy_req.send(conn)
|
| 1267 |
+
try:
|
| 1268 |
+
protocol = conn._protocol
|
| 1269 |
+
assert protocol is not None
|
| 1270 |
+
|
| 1271 |
+
# read_until_eof=True will ensure the connection isn't closed
|
| 1272 |
+
# once the response is received and processed allowing
|
| 1273 |
+
# START_TLS to work on the connection below.
|
| 1274 |
+
protocol.set_response_params(read_until_eof=runtime_has_start_tls)
|
| 1275 |
+
resp = await proxy_resp.start(conn)
|
| 1276 |
+
except BaseException:
|
| 1277 |
+
proxy_resp.close()
|
| 1278 |
+
conn.close()
|
| 1279 |
+
raise
|
| 1280 |
+
else:
|
| 1281 |
+
conn._protocol = None
|
| 1282 |
+
conn._transport = None
|
| 1283 |
+
try:
|
| 1284 |
+
if resp.status != 200:
|
| 1285 |
+
message = resp.reason
|
| 1286 |
+
if message is None:
|
| 1287 |
+
message = RESPONSES[resp.status][0]
|
| 1288 |
+
raise ClientHttpProxyError(
|
| 1289 |
+
proxy_resp.request_info,
|
| 1290 |
+
resp.history,
|
| 1291 |
+
status=resp.status,
|
| 1292 |
+
message=message,
|
| 1293 |
+
headers=resp.headers,
|
| 1294 |
+
)
|
| 1295 |
+
if not runtime_has_start_tls:
|
| 1296 |
+
rawsock = transport.get_extra_info("socket", default=None)
|
| 1297 |
+
if rawsock is None:
|
| 1298 |
+
raise RuntimeError(
|
| 1299 |
+
"Transport does not expose socket instance"
|
| 1300 |
+
)
|
| 1301 |
+
# Duplicate the socket, so now we can close proxy transport
|
| 1302 |
+
rawsock = rawsock.dup()
|
| 1303 |
+
except BaseException:
|
| 1304 |
+
# It shouldn't be closed in `finally` because it's fed to
|
| 1305 |
+
# `loop.start_tls()` and the docs say not to touch it after
|
| 1306 |
+
# passing there.
|
| 1307 |
+
transport.close()
|
| 1308 |
+
raise
|
| 1309 |
+
finally:
|
| 1310 |
+
if not runtime_has_start_tls:
|
| 1311 |
+
transport.close()
|
| 1312 |
+
|
| 1313 |
+
if not runtime_has_start_tls:
|
| 1314 |
+
# HTTP proxy with support for upgrade to HTTPS
|
| 1315 |
+
sslcontext = self._get_ssl_context(req)
|
| 1316 |
+
return await self._wrap_create_connection(
|
| 1317 |
+
self._factory,
|
| 1318 |
+
timeout=timeout,
|
| 1319 |
+
ssl=sslcontext,
|
| 1320 |
+
sock=rawsock,
|
| 1321 |
+
server_hostname=req.host,
|
| 1322 |
+
req=req,
|
| 1323 |
+
)
|
| 1324 |
+
|
| 1325 |
+
return await self._start_tls_connection(
|
| 1326 |
+
# Access the old transport for the last time before it's
|
| 1327 |
+
# closed and forgotten forever:
|
| 1328 |
+
transport,
|
| 1329 |
+
req=req,
|
| 1330 |
+
timeout=timeout,
|
| 1331 |
+
)
|
| 1332 |
+
finally:
|
| 1333 |
+
proxy_resp.close()
|
| 1334 |
+
|
| 1335 |
+
return transport, proto
|
| 1336 |
+
|
| 1337 |
+
|
| 1338 |
+
class UnixConnector(BaseConnector):
|
| 1339 |
+
"""Unix socket connector.
|
| 1340 |
+
|
| 1341 |
+
path - Unix socket path.
|
| 1342 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 1343 |
+
force_close - Set to True to force close and do reconnect
|
| 1344 |
+
after each request (and between redirects).
|
| 1345 |
+
limit - The total number of simultaneous connections.
|
| 1346 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 1347 |
+
loop - Optional event loop.
|
| 1348 |
+
"""
|
| 1349 |
+
|
| 1350 |
+
def __init__(
|
| 1351 |
+
self,
|
| 1352 |
+
path: str,
|
| 1353 |
+
force_close: bool = False,
|
| 1354 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
| 1355 |
+
limit: int = 100,
|
| 1356 |
+
limit_per_host: int = 0,
|
| 1357 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1358 |
+
) -> None:
|
| 1359 |
+
super().__init__(
|
| 1360 |
+
force_close=force_close,
|
| 1361 |
+
keepalive_timeout=keepalive_timeout,
|
| 1362 |
+
limit=limit,
|
| 1363 |
+
limit_per_host=limit_per_host,
|
| 1364 |
+
loop=loop,
|
| 1365 |
+
)
|
| 1366 |
+
self._path = path
|
| 1367 |
+
|
| 1368 |
+
@property
|
| 1369 |
+
def path(self) -> str:
|
| 1370 |
+
"""Path to unix socket."""
|
| 1371 |
+
return self._path
|
| 1372 |
+
|
| 1373 |
+
async def _create_connection(
|
| 1374 |
+
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
| 1375 |
+
) -> ResponseHandler:
|
| 1376 |
+
try:
|
| 1377 |
+
async with ceil_timeout(timeout.sock_connect):
|
| 1378 |
+
_, proto = await self._loop.create_unix_connection(
|
| 1379 |
+
self._factory, self._path
|
| 1380 |
+
)
|
| 1381 |
+
except OSError as exc:
|
| 1382 |
+
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
|
| 1383 |
+
|
| 1384 |
+
return cast(ResponseHandler, proto)
|
| 1385 |
+
|
| 1386 |
+
|
| 1387 |
+
class NamedPipeConnector(BaseConnector):
|
| 1388 |
+
"""Named pipe connector.
|
| 1389 |
+
|
| 1390 |
+
Only supported by the proactor event loop.
|
| 1391 |
+
See also: https://docs.python.org/3.7/library/asyncio-eventloop.html
|
| 1392 |
+
|
| 1393 |
+
path - Windows named pipe path.
|
| 1394 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
| 1395 |
+
force_close - Set to True to force close and do reconnect
|
| 1396 |
+
after each request (and between redirects).
|
| 1397 |
+
limit - The total number of simultaneous connections.
|
| 1398 |
+
limit_per_host - Number of simultaneous connections to one host.
|
| 1399 |
+
loop - Optional event loop.
|
| 1400 |
+
"""
|
| 1401 |
+
|
| 1402 |
+
def __init__(
|
| 1403 |
+
self,
|
| 1404 |
+
path: str,
|
| 1405 |
+
force_close: bool = False,
|
| 1406 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
| 1407 |
+
limit: int = 100,
|
| 1408 |
+
limit_per_host: int = 0,
|
| 1409 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1410 |
+
) -> None:
|
| 1411 |
+
super().__init__(
|
| 1412 |
+
force_close=force_close,
|
| 1413 |
+
keepalive_timeout=keepalive_timeout,
|
| 1414 |
+
limit=limit,
|
| 1415 |
+
limit_per_host=limit_per_host,
|
| 1416 |
+
loop=loop,
|
| 1417 |
+
)
|
| 1418 |
+
if not isinstance(
|
| 1419 |
+
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
| 1420 |
+
):
|
| 1421 |
+
raise RuntimeError(
|
| 1422 |
+
"Named Pipes only available in proactor " "loop under windows"
|
| 1423 |
+
)
|
| 1424 |
+
self._path = path
|
| 1425 |
+
|
| 1426 |
+
@property
|
| 1427 |
+
def path(self) -> str:
|
| 1428 |
+
"""Path to the named pipe."""
|
| 1429 |
+
return self._path
|
| 1430 |
+
|
| 1431 |
+
async def _create_connection(
|
| 1432 |
+
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
|
| 1433 |
+
) -> ResponseHandler:
|
| 1434 |
+
try:
|
| 1435 |
+
async with ceil_timeout(timeout.sock_connect):
|
| 1436 |
+
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined] # noqa: E501
|
| 1437 |
+
self._factory, self._path
|
| 1438 |
+
)
|
| 1439 |
+
# the drain is required so that the connection_made is called
|
| 1440 |
+
# and transport is set otherwise it is not set before the
|
| 1441 |
+
# `assert conn.transport is not None`
|
| 1442 |
+
# in client.py's _request method
|
| 1443 |
+
await asyncio.sleep(0)
|
| 1444 |
+
# other option is to manually set transport like
|
| 1445 |
+
# `proto.transport = trans`
|
| 1446 |
+
except OSError as exc:
|
| 1447 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
| 1448 |
+
|
| 1449 |
+
return cast(ResponseHandler, proto)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/formdata.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import io
|
| 2 |
+
from typing import Any, Iterable, List, Optional
|
| 3 |
+
from urllib.parse import urlencode
|
| 4 |
+
|
| 5 |
+
from multidict import MultiDict, MultiDictProxy
|
| 6 |
+
|
| 7 |
+
from . import hdrs, multipart, payload
|
| 8 |
+
from .helpers import guess_filename
|
| 9 |
+
from .payload import Payload
|
| 10 |
+
|
| 11 |
+
__all__ = ("FormData",)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class FormData:
|
| 15 |
+
"""Helper class for form body generation.
|
| 16 |
+
|
| 17 |
+
Supports multipart/form-data and application/x-www-form-urlencoded.
|
| 18 |
+
"""
|
| 19 |
+
|
| 20 |
+
def __init__(
|
| 21 |
+
self,
|
| 22 |
+
fields: Iterable[Any] = (),
|
| 23 |
+
quote_fields: bool = True,
|
| 24 |
+
charset: Optional[str] = None,
|
| 25 |
+
) -> None:
|
| 26 |
+
self._writer = multipart.MultipartWriter("form-data")
|
| 27 |
+
self._fields = [] # type: List[Any]
|
| 28 |
+
self._is_multipart = False
|
| 29 |
+
self._is_processed = False
|
| 30 |
+
self._quote_fields = quote_fields
|
| 31 |
+
self._charset = charset
|
| 32 |
+
|
| 33 |
+
if isinstance(fields, dict):
|
| 34 |
+
fields = list(fields.items())
|
| 35 |
+
elif not isinstance(fields, (list, tuple)):
|
| 36 |
+
fields = (fields,)
|
| 37 |
+
self.add_fields(*fields)
|
| 38 |
+
|
| 39 |
+
@property
|
| 40 |
+
def is_multipart(self) -> bool:
|
| 41 |
+
return self._is_multipart
|
| 42 |
+
|
| 43 |
+
def add_field(
|
| 44 |
+
self,
|
| 45 |
+
name: str,
|
| 46 |
+
value: Any,
|
| 47 |
+
*,
|
| 48 |
+
content_type: Optional[str] = None,
|
| 49 |
+
filename: Optional[str] = None,
|
| 50 |
+
content_transfer_encoding: Optional[str] = None,
|
| 51 |
+
) -> None:
|
| 52 |
+
|
| 53 |
+
if isinstance(value, io.IOBase):
|
| 54 |
+
self._is_multipart = True
|
| 55 |
+
elif isinstance(value, (bytes, bytearray, memoryview)):
|
| 56 |
+
if filename is None and content_transfer_encoding is None:
|
| 57 |
+
filename = name
|
| 58 |
+
|
| 59 |
+
type_options = MultiDict({"name": name}) # type: MultiDict[str]
|
| 60 |
+
if filename is not None and not isinstance(filename, str):
|
| 61 |
+
raise TypeError(
|
| 62 |
+
"filename must be an instance of str. " "Got: %s" % filename
|
| 63 |
+
)
|
| 64 |
+
if filename is None and isinstance(value, io.IOBase):
|
| 65 |
+
filename = guess_filename(value, name)
|
| 66 |
+
if filename is not None:
|
| 67 |
+
type_options["filename"] = filename
|
| 68 |
+
self._is_multipart = True
|
| 69 |
+
|
| 70 |
+
headers = {}
|
| 71 |
+
if content_type is not None:
|
| 72 |
+
if not isinstance(content_type, str):
|
| 73 |
+
raise TypeError(
|
| 74 |
+
"content_type must be an instance of str. " "Got: %s" % content_type
|
| 75 |
+
)
|
| 76 |
+
headers[hdrs.CONTENT_TYPE] = content_type
|
| 77 |
+
self._is_multipart = True
|
| 78 |
+
if content_transfer_encoding is not None:
|
| 79 |
+
if not isinstance(content_transfer_encoding, str):
|
| 80 |
+
raise TypeError(
|
| 81 |
+
"content_transfer_encoding must be an instance"
|
| 82 |
+
" of str. Got: %s" % content_transfer_encoding
|
| 83 |
+
)
|
| 84 |
+
headers[hdrs.CONTENT_TRANSFER_ENCODING] = content_transfer_encoding
|
| 85 |
+
self._is_multipart = True
|
| 86 |
+
|
| 87 |
+
self._fields.append((type_options, headers, value))
|
| 88 |
+
|
| 89 |
+
def add_fields(self, *fields: Any) -> None:
|
| 90 |
+
to_add = list(fields)
|
| 91 |
+
|
| 92 |
+
while to_add:
|
| 93 |
+
rec = to_add.pop(0)
|
| 94 |
+
|
| 95 |
+
if isinstance(rec, io.IOBase):
|
| 96 |
+
k = guess_filename(rec, "unknown")
|
| 97 |
+
self.add_field(k, rec) # type: ignore[arg-type]
|
| 98 |
+
|
| 99 |
+
elif isinstance(rec, (MultiDictProxy, MultiDict)):
|
| 100 |
+
to_add.extend(rec.items())
|
| 101 |
+
|
| 102 |
+
elif isinstance(rec, (list, tuple)) and len(rec) == 2:
|
| 103 |
+
k, fp = rec
|
| 104 |
+
self.add_field(k, fp) # type: ignore[arg-type]
|
| 105 |
+
|
| 106 |
+
else:
|
| 107 |
+
raise TypeError(
|
| 108 |
+
"Only io.IOBase, multidict and (name, file) "
|
| 109 |
+
"pairs allowed, use .add_field() for passing "
|
| 110 |
+
"more complex parameters, got {!r}".format(rec)
|
| 111 |
+
)
|
| 112 |
+
|
| 113 |
+
def _gen_form_urlencoded(self) -> payload.BytesPayload:
|
| 114 |
+
# form data (x-www-form-urlencoded)
|
| 115 |
+
data = []
|
| 116 |
+
for type_options, _, value in self._fields:
|
| 117 |
+
data.append((type_options["name"], value))
|
| 118 |
+
|
| 119 |
+
charset = self._charset if self._charset is not None else "utf-8"
|
| 120 |
+
|
| 121 |
+
if charset == "utf-8":
|
| 122 |
+
content_type = "application/x-www-form-urlencoded"
|
| 123 |
+
else:
|
| 124 |
+
content_type = "application/x-www-form-urlencoded; " "charset=%s" % charset
|
| 125 |
+
|
| 126 |
+
return payload.BytesPayload(
|
| 127 |
+
urlencode(data, doseq=True, encoding=charset).encode(),
|
| 128 |
+
content_type=content_type,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
def _gen_form_data(self) -> multipart.MultipartWriter:
|
| 132 |
+
"""Encode a list of fields using the multipart/form-data MIME format"""
|
| 133 |
+
if self._is_processed:
|
| 134 |
+
raise RuntimeError("Form data has been processed already")
|
| 135 |
+
for dispparams, headers, value in self._fields:
|
| 136 |
+
try:
|
| 137 |
+
if hdrs.CONTENT_TYPE in headers:
|
| 138 |
+
part = payload.get_payload(
|
| 139 |
+
value,
|
| 140 |
+
content_type=headers[hdrs.CONTENT_TYPE],
|
| 141 |
+
headers=headers,
|
| 142 |
+
encoding=self._charset,
|
| 143 |
+
)
|
| 144 |
+
else:
|
| 145 |
+
part = payload.get_payload(
|
| 146 |
+
value, headers=headers, encoding=self._charset
|
| 147 |
+
)
|
| 148 |
+
except Exception as exc:
|
| 149 |
+
raise TypeError(
|
| 150 |
+
"Can not serialize value type: %r\n "
|
| 151 |
+
"headers: %r\n value: %r" % (type(value), headers, value)
|
| 152 |
+
) from exc
|
| 153 |
+
|
| 154 |
+
if dispparams:
|
| 155 |
+
part.set_content_disposition(
|
| 156 |
+
"form-data", quote_fields=self._quote_fields, **dispparams
|
| 157 |
+
)
|
| 158 |
+
# FIXME cgi.FieldStorage doesn't likes body parts with
|
| 159 |
+
# Content-Length which were sent via chunked transfer encoding
|
| 160 |
+
assert part.headers is not None
|
| 161 |
+
part.headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 162 |
+
|
| 163 |
+
self._writer.append_payload(part)
|
| 164 |
+
|
| 165 |
+
self._is_processed = True
|
| 166 |
+
return self._writer
|
| 167 |
+
|
| 168 |
+
def __call__(self) -> Payload:
|
| 169 |
+
if self._is_multipart:
|
| 170 |
+
return self._gen_form_data()
|
| 171 |
+
else:
|
| 172 |
+
return self._gen_form_urlencoded()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/hdrs.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP Headers constants."""
|
| 2 |
+
|
| 3 |
+
# After changing the file content call ./tools/gen.py
|
| 4 |
+
# to regenerate the headers parser
|
| 5 |
+
import sys
|
| 6 |
+
from typing import Set
|
| 7 |
+
|
| 8 |
+
from multidict import istr
|
| 9 |
+
|
| 10 |
+
if sys.version_info >= (3, 8):
|
| 11 |
+
from typing import Final
|
| 12 |
+
else:
|
| 13 |
+
from typing_extensions import Final
|
| 14 |
+
|
| 15 |
+
METH_ANY: Final[str] = "*"
|
| 16 |
+
METH_CONNECT: Final[str] = "CONNECT"
|
| 17 |
+
METH_HEAD: Final[str] = "HEAD"
|
| 18 |
+
METH_GET: Final[str] = "GET"
|
| 19 |
+
METH_DELETE: Final[str] = "DELETE"
|
| 20 |
+
METH_OPTIONS: Final[str] = "OPTIONS"
|
| 21 |
+
METH_PATCH: Final[str] = "PATCH"
|
| 22 |
+
METH_POST: Final[str] = "POST"
|
| 23 |
+
METH_PUT: Final[str] = "PUT"
|
| 24 |
+
METH_TRACE: Final[str] = "TRACE"
|
| 25 |
+
|
| 26 |
+
METH_ALL: Final[Set[str]] = {
|
| 27 |
+
METH_CONNECT,
|
| 28 |
+
METH_HEAD,
|
| 29 |
+
METH_GET,
|
| 30 |
+
METH_DELETE,
|
| 31 |
+
METH_OPTIONS,
|
| 32 |
+
METH_PATCH,
|
| 33 |
+
METH_POST,
|
| 34 |
+
METH_PUT,
|
| 35 |
+
METH_TRACE,
|
| 36 |
+
}
|
| 37 |
+
|
| 38 |
+
ACCEPT: Final[istr] = istr("Accept")
|
| 39 |
+
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
| 40 |
+
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
| 41 |
+
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
| 42 |
+
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
| 43 |
+
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
| 44 |
+
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
| 45 |
+
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
| 46 |
+
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
| 47 |
+
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
| 48 |
+
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
| 49 |
+
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
| 50 |
+
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
| 51 |
+
AGE: Final[istr] = istr("Age")
|
| 52 |
+
ALLOW: Final[istr] = istr("Allow")
|
| 53 |
+
AUTHORIZATION: Final[istr] = istr("Authorization")
|
| 54 |
+
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
| 55 |
+
CONNECTION: Final[istr] = istr("Connection")
|
| 56 |
+
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
| 57 |
+
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
| 58 |
+
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
| 59 |
+
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
| 60 |
+
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
| 61 |
+
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
| 62 |
+
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
| 63 |
+
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
| 64 |
+
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
| 65 |
+
COOKIE: Final[istr] = istr("Cookie")
|
| 66 |
+
DATE: Final[istr] = istr("Date")
|
| 67 |
+
DESTINATION: Final[istr] = istr("Destination")
|
| 68 |
+
DIGEST: Final[istr] = istr("Digest")
|
| 69 |
+
ETAG: Final[istr] = istr("Etag")
|
| 70 |
+
EXPECT: Final[istr] = istr("Expect")
|
| 71 |
+
EXPIRES: Final[istr] = istr("Expires")
|
| 72 |
+
FORWARDED: Final[istr] = istr("Forwarded")
|
| 73 |
+
FROM: Final[istr] = istr("From")
|
| 74 |
+
HOST: Final[istr] = istr("Host")
|
| 75 |
+
IF_MATCH: Final[istr] = istr("If-Match")
|
| 76 |
+
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
| 77 |
+
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
| 78 |
+
IF_RANGE: Final[istr] = istr("If-Range")
|
| 79 |
+
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
| 80 |
+
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
| 81 |
+
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
| 82 |
+
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
| 83 |
+
LINK: Final[istr] = istr("Link")
|
| 84 |
+
LOCATION: Final[istr] = istr("Location")
|
| 85 |
+
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
| 86 |
+
ORIGIN: Final[istr] = istr("Origin")
|
| 87 |
+
PRAGMA: Final[istr] = istr("Pragma")
|
| 88 |
+
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
| 89 |
+
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
| 90 |
+
RANGE: Final[istr] = istr("Range")
|
| 91 |
+
REFERER: Final[istr] = istr("Referer")
|
| 92 |
+
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
| 93 |
+
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
| 94 |
+
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
| 95 |
+
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
| 96 |
+
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
| 97 |
+
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
| 98 |
+
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
| 99 |
+
SERVER: Final[istr] = istr("Server")
|
| 100 |
+
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
| 101 |
+
TE: Final[istr] = istr("TE")
|
| 102 |
+
TRAILER: Final[istr] = istr("Trailer")
|
| 103 |
+
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
| 104 |
+
UPGRADE: Final[istr] = istr("Upgrade")
|
| 105 |
+
URI: Final[istr] = istr("URI")
|
| 106 |
+
USER_AGENT: Final[istr] = istr("User-Agent")
|
| 107 |
+
VARY: Final[istr] = istr("Vary")
|
| 108 |
+
VIA: Final[istr] = istr("Via")
|
| 109 |
+
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
| 110 |
+
WARNING: Final[istr] = istr("Warning")
|
| 111 |
+
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
| 112 |
+
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
| 113 |
+
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
| 114 |
+
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_exceptions.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low-level http related exceptions."""
|
| 2 |
+
|
| 3 |
+
|
| 4 |
+
from typing import Optional, Union
|
| 5 |
+
|
| 6 |
+
from .typedefs import _CIMultiDict
|
| 7 |
+
|
| 8 |
+
__all__ = ("HttpProcessingError",)
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class HttpProcessingError(Exception):
|
| 12 |
+
"""HTTP error.
|
| 13 |
+
|
| 14 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
| 15 |
+
|
| 16 |
+
code: HTTP Error code.
|
| 17 |
+
message: (optional) Error message.
|
| 18 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
code = 0
|
| 22 |
+
message = ""
|
| 23 |
+
headers = None
|
| 24 |
+
|
| 25 |
+
def __init__(
|
| 26 |
+
self,
|
| 27 |
+
*,
|
| 28 |
+
code: Optional[int] = None,
|
| 29 |
+
message: str = "",
|
| 30 |
+
headers: Optional[_CIMultiDict] = None,
|
| 31 |
+
) -> None:
|
| 32 |
+
if code is not None:
|
| 33 |
+
self.code = code
|
| 34 |
+
self.headers = headers
|
| 35 |
+
self.message = message
|
| 36 |
+
|
| 37 |
+
def __str__(self) -> str:
|
| 38 |
+
return f"{self.code}, message={self.message!r}"
|
| 39 |
+
|
| 40 |
+
def __repr__(self) -> str:
|
| 41 |
+
return f"<{self.__class__.__name__}: {self}>"
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class BadHttpMessage(HttpProcessingError):
|
| 45 |
+
|
| 46 |
+
code = 400
|
| 47 |
+
message = "Bad Request"
|
| 48 |
+
|
| 49 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
| 50 |
+
super().__init__(message=message, headers=headers)
|
| 51 |
+
self.args = (message,)
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
class HttpBadRequest(BadHttpMessage):
|
| 55 |
+
|
| 56 |
+
code = 400
|
| 57 |
+
message = "Bad Request"
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class PayloadEncodingError(BadHttpMessage):
|
| 61 |
+
"""Base class for payload errors"""
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class ContentEncodingError(PayloadEncodingError):
|
| 65 |
+
"""Content encoding error."""
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
class TransferEncodingError(PayloadEncodingError):
|
| 69 |
+
"""transfer encoding error."""
|
| 70 |
+
|
| 71 |
+
|
| 72 |
+
class ContentLengthError(PayloadEncodingError):
|
| 73 |
+
"""Not enough data for satisfy content length header."""
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class LineTooLong(BadHttpMessage):
|
| 77 |
+
def __init__(
|
| 78 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
| 79 |
+
) -> None:
|
| 80 |
+
super().__init__(
|
| 81 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
| 82 |
+
)
|
| 83 |
+
self.args = (line, limit, actual_size)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
class InvalidHeader(BadHttpMessage):
|
| 87 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
| 88 |
+
if isinstance(hdr, bytes):
|
| 89 |
+
hdr = hdr.decode("utf-8", "surrogateescape")
|
| 90 |
+
super().__init__(f"Invalid HTTP Header: {hdr}")
|
| 91 |
+
self.hdr = hdr
|
| 92 |
+
self.args = (hdr,)
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
class BadStatusLine(BadHttpMessage):
|
| 96 |
+
def __init__(self, line: str = "") -> None:
|
| 97 |
+
if not isinstance(line, str):
|
| 98 |
+
line = repr(line)
|
| 99 |
+
super().__init__(f"Bad status line {line!r}")
|
| 100 |
+
self.args = (line,)
|
| 101 |
+
self.line = line
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
class InvalidURLError(BadHttpMessage):
|
| 105 |
+
pass
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_websocket.py
ADDED
|
@@ -0,0 +1,701 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""WebSocket protocol versions 13 and 8."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import collections
|
| 5 |
+
import json
|
| 6 |
+
import random
|
| 7 |
+
import re
|
| 8 |
+
import sys
|
| 9 |
+
import zlib
|
| 10 |
+
from enum import IntEnum
|
| 11 |
+
from struct import Struct
|
| 12 |
+
from typing import Any, Callable, List, Optional, Pattern, Set, Tuple, Union, cast
|
| 13 |
+
|
| 14 |
+
from .base_protocol import BaseProtocol
|
| 15 |
+
from .helpers import NO_EXTENSIONS
|
| 16 |
+
from .streams import DataQueue
|
| 17 |
+
from .typedefs import Final
|
| 18 |
+
|
| 19 |
+
__all__ = (
|
| 20 |
+
"WS_CLOSED_MESSAGE",
|
| 21 |
+
"WS_CLOSING_MESSAGE",
|
| 22 |
+
"WS_KEY",
|
| 23 |
+
"WebSocketReader",
|
| 24 |
+
"WebSocketWriter",
|
| 25 |
+
"WSMessage",
|
| 26 |
+
"WebSocketError",
|
| 27 |
+
"WSMsgType",
|
| 28 |
+
"WSCloseCode",
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class WSCloseCode(IntEnum):
|
| 33 |
+
OK = 1000
|
| 34 |
+
GOING_AWAY = 1001
|
| 35 |
+
PROTOCOL_ERROR = 1002
|
| 36 |
+
UNSUPPORTED_DATA = 1003
|
| 37 |
+
ABNORMAL_CLOSURE = 1006
|
| 38 |
+
INVALID_TEXT = 1007
|
| 39 |
+
POLICY_VIOLATION = 1008
|
| 40 |
+
MESSAGE_TOO_BIG = 1009
|
| 41 |
+
MANDATORY_EXTENSION = 1010
|
| 42 |
+
INTERNAL_ERROR = 1011
|
| 43 |
+
SERVICE_RESTART = 1012
|
| 44 |
+
TRY_AGAIN_LATER = 1013
|
| 45 |
+
BAD_GATEWAY = 1014
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
class WSMsgType(IntEnum):
|
| 52 |
+
# websocket spec types
|
| 53 |
+
CONTINUATION = 0x0
|
| 54 |
+
TEXT = 0x1
|
| 55 |
+
BINARY = 0x2
|
| 56 |
+
PING = 0x9
|
| 57 |
+
PONG = 0xA
|
| 58 |
+
CLOSE = 0x8
|
| 59 |
+
|
| 60 |
+
# aiohttp specific types
|
| 61 |
+
CLOSING = 0x100
|
| 62 |
+
CLOSED = 0x101
|
| 63 |
+
ERROR = 0x102
|
| 64 |
+
|
| 65 |
+
text = TEXT
|
| 66 |
+
binary = BINARY
|
| 67 |
+
ping = PING
|
| 68 |
+
pong = PONG
|
| 69 |
+
close = CLOSE
|
| 70 |
+
closing = CLOSING
|
| 71 |
+
closed = CLOSED
|
| 72 |
+
error = ERROR
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
UNPACK_LEN2 = Struct("!H").unpack_from
|
| 79 |
+
UNPACK_LEN3 = Struct("!Q").unpack_from
|
| 80 |
+
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
| 81 |
+
PACK_LEN1 = Struct("!BB").pack
|
| 82 |
+
PACK_LEN2 = Struct("!BBH").pack
|
| 83 |
+
PACK_LEN3 = Struct("!BBQ").pack
|
| 84 |
+
PACK_CLOSE_CODE = Struct("!H").pack
|
| 85 |
+
MSG_SIZE: Final[int] = 2 ** 14
|
| 86 |
+
DEFAULT_LIMIT: Final[int] = 2 ** 16
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
_WSMessageBase = collections.namedtuple("_WSMessageBase", ["type", "data", "extra"])
|
| 90 |
+
|
| 91 |
+
|
| 92 |
+
class WSMessage(_WSMessageBase):
|
| 93 |
+
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
| 94 |
+
"""Return parsed JSON data.
|
| 95 |
+
|
| 96 |
+
.. versionadded:: 0.22
|
| 97 |
+
"""
|
| 98 |
+
return loads(self.data)
|
| 99 |
+
|
| 100 |
+
|
| 101 |
+
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
| 102 |
+
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
| 103 |
+
|
| 104 |
+
|
| 105 |
+
class WebSocketError(Exception):
|
| 106 |
+
"""WebSocket protocol parser error."""
|
| 107 |
+
|
| 108 |
+
def __init__(self, code: int, message: str) -> None:
|
| 109 |
+
self.code = code
|
| 110 |
+
super().__init__(code, message)
|
| 111 |
+
|
| 112 |
+
def __str__(self) -> str:
|
| 113 |
+
return cast(str, self.args[1])
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class WSHandshakeError(Exception):
|
| 117 |
+
"""WebSocket protocol handshake error."""
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
native_byteorder: Final[str] = sys.byteorder
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
# Used by _websocket_mask_python
|
| 124 |
+
_XOR_TABLE: Final[List[bytes]] = [bytes(a ^ b for a in range(256)) for b in range(256)]
|
| 125 |
+
|
| 126 |
+
|
| 127 |
+
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
| 128 |
+
"""Websocket masking function.
|
| 129 |
+
|
| 130 |
+
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
| 131 |
+
object of any length. The contents of `data` are masked with `mask`,
|
| 132 |
+
as specified in section 5.3 of RFC 6455.
|
| 133 |
+
|
| 134 |
+
Note that this function mutates the `data` argument.
|
| 135 |
+
|
| 136 |
+
This pure-python implementation may be replaced by an optimized
|
| 137 |
+
version when available.
|
| 138 |
+
|
| 139 |
+
"""
|
| 140 |
+
assert isinstance(data, bytearray), data
|
| 141 |
+
assert len(mask) == 4, mask
|
| 142 |
+
|
| 143 |
+
if data:
|
| 144 |
+
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
| 145 |
+
data[::4] = data[::4].translate(a)
|
| 146 |
+
data[1::4] = data[1::4].translate(b)
|
| 147 |
+
data[2::4] = data[2::4].translate(c)
|
| 148 |
+
data[3::4] = data[3::4].translate(d)
|
| 149 |
+
|
| 150 |
+
|
| 151 |
+
if NO_EXTENSIONS: # pragma: no cover
|
| 152 |
+
_websocket_mask = _websocket_mask_python
|
| 153 |
+
else:
|
| 154 |
+
try:
|
| 155 |
+
from ._websocket import _websocket_mask_cython # type: ignore[import]
|
| 156 |
+
|
| 157 |
+
_websocket_mask = _websocket_mask_cython
|
| 158 |
+
except ImportError: # pragma: no cover
|
| 159 |
+
_websocket_mask = _websocket_mask_python
|
| 160 |
+
|
| 161 |
+
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
| 165 |
+
r"^(?:;\s*(?:"
|
| 166 |
+
r"(server_no_context_takeover)|"
|
| 167 |
+
r"(client_no_context_takeover)|"
|
| 168 |
+
r"(server_max_window_bits(?:=(\d+))?)|"
|
| 169 |
+
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
| 170 |
+
)
|
| 171 |
+
|
| 172 |
+
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
| 176 |
+
if not extstr:
|
| 177 |
+
return 0, False
|
| 178 |
+
|
| 179 |
+
compress = 0
|
| 180 |
+
notakeover = False
|
| 181 |
+
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
| 182 |
+
defext = ext.group(1)
|
| 183 |
+
# Return compress = 15 when get `permessage-deflate`
|
| 184 |
+
if not defext:
|
| 185 |
+
compress = 15
|
| 186 |
+
break
|
| 187 |
+
match = _WS_EXT_RE.match(defext)
|
| 188 |
+
if match:
|
| 189 |
+
compress = 15
|
| 190 |
+
if isserver:
|
| 191 |
+
# Server never fail to detect compress handshake.
|
| 192 |
+
# Server does not need to send max wbit to client
|
| 193 |
+
if match.group(4):
|
| 194 |
+
compress = int(match.group(4))
|
| 195 |
+
# Group3 must match if group4 matches
|
| 196 |
+
# Compress wbit 8 does not support in zlib
|
| 197 |
+
# If compress level not support,
|
| 198 |
+
# CONTINUE to next extension
|
| 199 |
+
if compress > 15 or compress < 9:
|
| 200 |
+
compress = 0
|
| 201 |
+
continue
|
| 202 |
+
if match.group(1):
|
| 203 |
+
notakeover = True
|
| 204 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
| 205 |
+
break
|
| 206 |
+
else:
|
| 207 |
+
if match.group(6):
|
| 208 |
+
compress = int(match.group(6))
|
| 209 |
+
# Group5 must match if group6 matches
|
| 210 |
+
# Compress wbit 8 does not support in zlib
|
| 211 |
+
# If compress level not support,
|
| 212 |
+
# FAIL the parse progress
|
| 213 |
+
if compress > 15 or compress < 9:
|
| 214 |
+
raise WSHandshakeError("Invalid window size")
|
| 215 |
+
if match.group(2):
|
| 216 |
+
notakeover = True
|
| 217 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
| 218 |
+
break
|
| 219 |
+
# Return Fail if client side and not match
|
| 220 |
+
elif not isserver:
|
| 221 |
+
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
| 222 |
+
|
| 223 |
+
return compress, notakeover
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def ws_ext_gen(
|
| 227 |
+
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
| 228 |
+
) -> str:
|
| 229 |
+
# client_notakeover=False not used for server
|
| 230 |
+
# compress wbit 8 does not support in zlib
|
| 231 |
+
if compress < 9 or compress > 15:
|
| 232 |
+
raise ValueError(
|
| 233 |
+
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
| 234 |
+
)
|
| 235 |
+
enabledext = ["permessage-deflate"]
|
| 236 |
+
if not isserver:
|
| 237 |
+
enabledext.append("client_max_window_bits")
|
| 238 |
+
|
| 239 |
+
if compress < 15:
|
| 240 |
+
enabledext.append("server_max_window_bits=" + str(compress))
|
| 241 |
+
if server_notakeover:
|
| 242 |
+
enabledext.append("server_no_context_takeover")
|
| 243 |
+
# if client_notakeover:
|
| 244 |
+
# enabledext.append('client_no_context_takeover')
|
| 245 |
+
return "; ".join(enabledext)
|
| 246 |
+
|
| 247 |
+
|
| 248 |
+
class WSParserState(IntEnum):
|
| 249 |
+
READ_HEADER = 1
|
| 250 |
+
READ_PAYLOAD_LENGTH = 2
|
| 251 |
+
READ_PAYLOAD_MASK = 3
|
| 252 |
+
READ_PAYLOAD = 4
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
class WebSocketReader:
|
| 256 |
+
def __init__(
|
| 257 |
+
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
| 258 |
+
) -> None:
|
| 259 |
+
self.queue = queue
|
| 260 |
+
self._max_msg_size = max_msg_size
|
| 261 |
+
|
| 262 |
+
self._exc = None # type: Optional[BaseException]
|
| 263 |
+
self._partial = bytearray()
|
| 264 |
+
self._state = WSParserState.READ_HEADER
|
| 265 |
+
|
| 266 |
+
self._opcode = None # type: Optional[int]
|
| 267 |
+
self._frame_fin = False
|
| 268 |
+
self._frame_opcode = None # type: Optional[int]
|
| 269 |
+
self._frame_payload = bytearray()
|
| 270 |
+
|
| 271 |
+
self._tail = b""
|
| 272 |
+
self._has_mask = False
|
| 273 |
+
self._frame_mask = None # type: Optional[bytes]
|
| 274 |
+
self._payload_length = 0
|
| 275 |
+
self._payload_length_flag = 0
|
| 276 |
+
self._compressed = None # type: Optional[bool]
|
| 277 |
+
self._decompressobj = None # type: Any # zlib.decompressobj actually
|
| 278 |
+
self._compress = compress
|
| 279 |
+
|
| 280 |
+
def feed_eof(self) -> None:
|
| 281 |
+
self.queue.feed_eof()
|
| 282 |
+
|
| 283 |
+
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
| 284 |
+
if self._exc:
|
| 285 |
+
return True, data
|
| 286 |
+
|
| 287 |
+
try:
|
| 288 |
+
return self._feed_data(data)
|
| 289 |
+
except Exception as exc:
|
| 290 |
+
self._exc = exc
|
| 291 |
+
self.queue.set_exception(exc)
|
| 292 |
+
return True, b""
|
| 293 |
+
|
| 294 |
+
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
| 295 |
+
for fin, opcode, payload, compressed in self.parse_frame(data):
|
| 296 |
+
if compressed and not self._decompressobj:
|
| 297 |
+
self._decompressobj = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
| 298 |
+
if opcode == WSMsgType.CLOSE:
|
| 299 |
+
if len(payload) >= 2:
|
| 300 |
+
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
| 301 |
+
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
| 302 |
+
raise WebSocketError(
|
| 303 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 304 |
+
f"Invalid close code: {close_code}",
|
| 305 |
+
)
|
| 306 |
+
try:
|
| 307 |
+
close_message = payload[2:].decode("utf-8")
|
| 308 |
+
except UnicodeDecodeError as exc:
|
| 309 |
+
raise WebSocketError(
|
| 310 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 311 |
+
) from exc
|
| 312 |
+
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
| 313 |
+
elif payload:
|
| 314 |
+
raise WebSocketError(
|
| 315 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 316 |
+
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
| 317 |
+
)
|
| 318 |
+
else:
|
| 319 |
+
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
| 320 |
+
|
| 321 |
+
self.queue.feed_data(msg, 0)
|
| 322 |
+
|
| 323 |
+
elif opcode == WSMsgType.PING:
|
| 324 |
+
self.queue.feed_data(
|
| 325 |
+
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
| 326 |
+
)
|
| 327 |
+
|
| 328 |
+
elif opcode == WSMsgType.PONG:
|
| 329 |
+
self.queue.feed_data(
|
| 330 |
+
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
| 331 |
+
)
|
| 332 |
+
|
| 333 |
+
elif (
|
| 334 |
+
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
| 335 |
+
and self._opcode is None
|
| 336 |
+
):
|
| 337 |
+
raise WebSocketError(
|
| 338 |
+
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
| 339 |
+
)
|
| 340 |
+
else:
|
| 341 |
+
# load text/binary
|
| 342 |
+
if not fin:
|
| 343 |
+
# got partial frame payload
|
| 344 |
+
if opcode != WSMsgType.CONTINUATION:
|
| 345 |
+
self._opcode = opcode
|
| 346 |
+
self._partial.extend(payload)
|
| 347 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
| 348 |
+
raise WebSocketError(
|
| 349 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 350 |
+
"Message size {} exceeds limit {}".format(
|
| 351 |
+
len(self._partial), self._max_msg_size
|
| 352 |
+
),
|
| 353 |
+
)
|
| 354 |
+
else:
|
| 355 |
+
# previous frame was non finished
|
| 356 |
+
# we should get continuation opcode
|
| 357 |
+
if self._partial:
|
| 358 |
+
if opcode != WSMsgType.CONTINUATION:
|
| 359 |
+
raise WebSocketError(
|
| 360 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 361 |
+
"The opcode in non-fin frame is expected "
|
| 362 |
+
"to be zero, got {!r}".format(opcode),
|
| 363 |
+
)
|
| 364 |
+
|
| 365 |
+
if opcode == WSMsgType.CONTINUATION:
|
| 366 |
+
assert self._opcode is not None
|
| 367 |
+
opcode = self._opcode
|
| 368 |
+
self._opcode = None
|
| 369 |
+
|
| 370 |
+
self._partial.extend(payload)
|
| 371 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
| 372 |
+
raise WebSocketError(
|
| 373 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 374 |
+
"Message size {} exceeds limit {}".format(
|
| 375 |
+
len(self._partial), self._max_msg_size
|
| 376 |
+
),
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
# Decompress process must to be done after all packets
|
| 380 |
+
# received.
|
| 381 |
+
if compressed:
|
| 382 |
+
self._partial.extend(_WS_DEFLATE_TRAILING)
|
| 383 |
+
payload_merged = self._decompressobj.decompress(
|
| 384 |
+
self._partial, self._max_msg_size
|
| 385 |
+
)
|
| 386 |
+
if self._decompressobj.unconsumed_tail:
|
| 387 |
+
left = len(self._decompressobj.unconsumed_tail)
|
| 388 |
+
raise WebSocketError(
|
| 389 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
| 390 |
+
"Decompressed message size {} exceeds limit {}".format(
|
| 391 |
+
self._max_msg_size + left, self._max_msg_size
|
| 392 |
+
),
|
| 393 |
+
)
|
| 394 |
+
else:
|
| 395 |
+
payload_merged = bytes(self._partial)
|
| 396 |
+
|
| 397 |
+
self._partial.clear()
|
| 398 |
+
|
| 399 |
+
if opcode == WSMsgType.TEXT:
|
| 400 |
+
try:
|
| 401 |
+
text = payload_merged.decode("utf-8")
|
| 402 |
+
self.queue.feed_data(
|
| 403 |
+
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
| 404 |
+
)
|
| 405 |
+
except UnicodeDecodeError as exc:
|
| 406 |
+
raise WebSocketError(
|
| 407 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
| 408 |
+
) from exc
|
| 409 |
+
else:
|
| 410 |
+
self.queue.feed_data(
|
| 411 |
+
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
| 412 |
+
len(payload_merged),
|
| 413 |
+
)
|
| 414 |
+
|
| 415 |
+
return False, b""
|
| 416 |
+
|
| 417 |
+
def parse_frame(
|
| 418 |
+
self, buf: bytes
|
| 419 |
+
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
| 420 |
+
"""Return the next frame from the socket."""
|
| 421 |
+
frames = []
|
| 422 |
+
if self._tail:
|
| 423 |
+
buf, self._tail = self._tail + buf, b""
|
| 424 |
+
|
| 425 |
+
start_pos = 0
|
| 426 |
+
buf_length = len(buf)
|
| 427 |
+
|
| 428 |
+
while True:
|
| 429 |
+
# read header
|
| 430 |
+
if self._state == WSParserState.READ_HEADER:
|
| 431 |
+
if buf_length - start_pos >= 2:
|
| 432 |
+
data = buf[start_pos : start_pos + 2]
|
| 433 |
+
start_pos += 2
|
| 434 |
+
first_byte, second_byte = data
|
| 435 |
+
|
| 436 |
+
fin = (first_byte >> 7) & 1
|
| 437 |
+
rsv1 = (first_byte >> 6) & 1
|
| 438 |
+
rsv2 = (first_byte >> 5) & 1
|
| 439 |
+
rsv3 = (first_byte >> 4) & 1
|
| 440 |
+
opcode = first_byte & 0xF
|
| 441 |
+
|
| 442 |
+
# frame-fin = %x0 ; more frames of this message follow
|
| 443 |
+
# / %x1 ; final frame of this message
|
| 444 |
+
# frame-rsv1 = %x0 ;
|
| 445 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 446 |
+
# frame-rsv2 = %x0 ;
|
| 447 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 448 |
+
# frame-rsv3 = %x0 ;
|
| 449 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
| 450 |
+
#
|
| 451 |
+
# Remove rsv1 from this test for deflate development
|
| 452 |
+
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
| 453 |
+
raise WebSocketError(
|
| 454 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 455 |
+
"Received frame with non-zero reserved bits",
|
| 456 |
+
)
|
| 457 |
+
|
| 458 |
+
if opcode > 0x7 and fin == 0:
|
| 459 |
+
raise WebSocketError(
|
| 460 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 461 |
+
"Received fragmented control frame",
|
| 462 |
+
)
|
| 463 |
+
|
| 464 |
+
has_mask = (second_byte >> 7) & 1
|
| 465 |
+
length = second_byte & 0x7F
|
| 466 |
+
|
| 467 |
+
# Control frames MUST have a payload
|
| 468 |
+
# length of 125 bytes or less
|
| 469 |
+
if opcode > 0x7 and length > 125:
|
| 470 |
+
raise WebSocketError(
|
| 471 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 472 |
+
"Control frame payload cannot be " "larger than 125 bytes",
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
# Set compress status if last package is FIN
|
| 476 |
+
# OR set compress status if this is first fragment
|
| 477 |
+
# Raise error if not first fragment with rsv1 = 0x1
|
| 478 |
+
if self._frame_fin or self._compressed is None:
|
| 479 |
+
self._compressed = True if rsv1 else False
|
| 480 |
+
elif rsv1:
|
| 481 |
+
raise WebSocketError(
|
| 482 |
+
WSCloseCode.PROTOCOL_ERROR,
|
| 483 |
+
"Received frame with non-zero reserved bits",
|
| 484 |
+
)
|
| 485 |
+
|
| 486 |
+
self._frame_fin = bool(fin)
|
| 487 |
+
self._frame_opcode = opcode
|
| 488 |
+
self._has_mask = bool(has_mask)
|
| 489 |
+
self._payload_length_flag = length
|
| 490 |
+
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
| 491 |
+
else:
|
| 492 |
+
break
|
| 493 |
+
|
| 494 |
+
# read payload length
|
| 495 |
+
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
| 496 |
+
length = self._payload_length_flag
|
| 497 |
+
if length == 126:
|
| 498 |
+
if buf_length - start_pos >= 2:
|
| 499 |
+
data = buf[start_pos : start_pos + 2]
|
| 500 |
+
start_pos += 2
|
| 501 |
+
length = UNPACK_LEN2(data)[0]
|
| 502 |
+
self._payload_length = length
|
| 503 |
+
self._state = (
|
| 504 |
+
WSParserState.READ_PAYLOAD_MASK
|
| 505 |
+
if self._has_mask
|
| 506 |
+
else WSParserState.READ_PAYLOAD
|
| 507 |
+
)
|
| 508 |
+
else:
|
| 509 |
+
break
|
| 510 |
+
elif length > 126:
|
| 511 |
+
if buf_length - start_pos >= 8:
|
| 512 |
+
data = buf[start_pos : start_pos + 8]
|
| 513 |
+
start_pos += 8
|
| 514 |
+
length = UNPACK_LEN3(data)[0]
|
| 515 |
+
self._payload_length = length
|
| 516 |
+
self._state = (
|
| 517 |
+
WSParserState.READ_PAYLOAD_MASK
|
| 518 |
+
if self._has_mask
|
| 519 |
+
else WSParserState.READ_PAYLOAD
|
| 520 |
+
)
|
| 521 |
+
else:
|
| 522 |
+
break
|
| 523 |
+
else:
|
| 524 |
+
self._payload_length = length
|
| 525 |
+
self._state = (
|
| 526 |
+
WSParserState.READ_PAYLOAD_MASK
|
| 527 |
+
if self._has_mask
|
| 528 |
+
else WSParserState.READ_PAYLOAD
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
# read payload mask
|
| 532 |
+
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
| 533 |
+
if buf_length - start_pos >= 4:
|
| 534 |
+
self._frame_mask = buf[start_pos : start_pos + 4]
|
| 535 |
+
start_pos += 4
|
| 536 |
+
self._state = WSParserState.READ_PAYLOAD
|
| 537 |
+
else:
|
| 538 |
+
break
|
| 539 |
+
|
| 540 |
+
if self._state == WSParserState.READ_PAYLOAD:
|
| 541 |
+
length = self._payload_length
|
| 542 |
+
payload = self._frame_payload
|
| 543 |
+
|
| 544 |
+
chunk_len = buf_length - start_pos
|
| 545 |
+
if length >= chunk_len:
|
| 546 |
+
self._payload_length = length - chunk_len
|
| 547 |
+
payload.extend(buf[start_pos:])
|
| 548 |
+
start_pos = buf_length
|
| 549 |
+
else:
|
| 550 |
+
self._payload_length = 0
|
| 551 |
+
payload.extend(buf[start_pos : start_pos + length])
|
| 552 |
+
start_pos = start_pos + length
|
| 553 |
+
|
| 554 |
+
if self._payload_length == 0:
|
| 555 |
+
if self._has_mask:
|
| 556 |
+
assert self._frame_mask is not None
|
| 557 |
+
_websocket_mask(self._frame_mask, payload)
|
| 558 |
+
|
| 559 |
+
frames.append(
|
| 560 |
+
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
| 561 |
+
)
|
| 562 |
+
|
| 563 |
+
self._frame_payload = bytearray()
|
| 564 |
+
self._state = WSParserState.READ_HEADER
|
| 565 |
+
else:
|
| 566 |
+
break
|
| 567 |
+
|
| 568 |
+
self._tail = buf[start_pos:]
|
| 569 |
+
|
| 570 |
+
return frames
|
| 571 |
+
|
| 572 |
+
|
| 573 |
+
class WebSocketWriter:
|
| 574 |
+
def __init__(
|
| 575 |
+
self,
|
| 576 |
+
protocol: BaseProtocol,
|
| 577 |
+
transport: asyncio.Transport,
|
| 578 |
+
*,
|
| 579 |
+
use_mask: bool = False,
|
| 580 |
+
limit: int = DEFAULT_LIMIT,
|
| 581 |
+
random: Any = random.Random(),
|
| 582 |
+
compress: int = 0,
|
| 583 |
+
notakeover: bool = False,
|
| 584 |
+
) -> None:
|
| 585 |
+
self.protocol = protocol
|
| 586 |
+
self.transport = transport
|
| 587 |
+
self.use_mask = use_mask
|
| 588 |
+
self.randrange = random.randrange
|
| 589 |
+
self.compress = compress
|
| 590 |
+
self.notakeover = notakeover
|
| 591 |
+
self._closing = False
|
| 592 |
+
self._limit = limit
|
| 593 |
+
self._output_size = 0
|
| 594 |
+
self._compressobj = None # type: Any # actually compressobj
|
| 595 |
+
|
| 596 |
+
async def _send_frame(
|
| 597 |
+
self, message: bytes, opcode: int, compress: Optional[int] = None
|
| 598 |
+
) -> None:
|
| 599 |
+
"""Send a frame over the websocket with message as its payload."""
|
| 600 |
+
if self._closing and not (opcode & WSMsgType.CLOSE):
|
| 601 |
+
raise ConnectionResetError("Cannot write to closing transport")
|
| 602 |
+
|
| 603 |
+
rsv = 0
|
| 604 |
+
|
| 605 |
+
# Only compress larger packets (disabled)
|
| 606 |
+
# Does small packet needs to be compressed?
|
| 607 |
+
# if self.compress and opcode < 8 and len(message) > 124:
|
| 608 |
+
if (compress or self.compress) and opcode < 8:
|
| 609 |
+
if compress:
|
| 610 |
+
# Do not set self._compress if compressing is for this frame
|
| 611 |
+
compressobj = zlib.compressobj(level=zlib.Z_BEST_SPEED, wbits=-compress)
|
| 612 |
+
else: # self.compress
|
| 613 |
+
if not self._compressobj:
|
| 614 |
+
self._compressobj = zlib.compressobj(
|
| 615 |
+
level=zlib.Z_BEST_SPEED, wbits=-self.compress
|
| 616 |
+
)
|
| 617 |
+
compressobj = self._compressobj
|
| 618 |
+
|
| 619 |
+
message = compressobj.compress(message)
|
| 620 |
+
message = message + compressobj.flush(
|
| 621 |
+
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
| 622 |
+
)
|
| 623 |
+
if message.endswith(_WS_DEFLATE_TRAILING):
|
| 624 |
+
message = message[:-4]
|
| 625 |
+
rsv = rsv | 0x40
|
| 626 |
+
|
| 627 |
+
msg_length = len(message)
|
| 628 |
+
|
| 629 |
+
use_mask = self.use_mask
|
| 630 |
+
if use_mask:
|
| 631 |
+
mask_bit = 0x80
|
| 632 |
+
else:
|
| 633 |
+
mask_bit = 0
|
| 634 |
+
|
| 635 |
+
if msg_length < 126:
|
| 636 |
+
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
| 637 |
+
elif msg_length < (1 << 16):
|
| 638 |
+
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
| 639 |
+
else:
|
| 640 |
+
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
| 641 |
+
if use_mask:
|
| 642 |
+
mask = self.randrange(0, 0xFFFFFFFF)
|
| 643 |
+
mask = mask.to_bytes(4, "big")
|
| 644 |
+
message = bytearray(message)
|
| 645 |
+
_websocket_mask(mask, message)
|
| 646 |
+
self._write(header + mask + message)
|
| 647 |
+
self._output_size += len(header) + len(mask) + len(message)
|
| 648 |
+
else:
|
| 649 |
+
if len(message) > MSG_SIZE:
|
| 650 |
+
self._write(header)
|
| 651 |
+
self._write(message)
|
| 652 |
+
else:
|
| 653 |
+
self._write(header + message)
|
| 654 |
+
|
| 655 |
+
self._output_size += len(header) + len(message)
|
| 656 |
+
|
| 657 |
+
if self._output_size > self._limit:
|
| 658 |
+
self._output_size = 0
|
| 659 |
+
await self.protocol._drain_helper()
|
| 660 |
+
|
| 661 |
+
def _write(self, data: bytes) -> None:
|
| 662 |
+
if self.transport is None or self.transport.is_closing():
|
| 663 |
+
raise ConnectionResetError("Cannot write to closing transport")
|
| 664 |
+
self.transport.write(data)
|
| 665 |
+
|
| 666 |
+
async def pong(self, message: bytes = b"") -> None:
|
| 667 |
+
"""Send pong message."""
|
| 668 |
+
if isinstance(message, str):
|
| 669 |
+
message = message.encode("utf-8")
|
| 670 |
+
await self._send_frame(message, WSMsgType.PONG)
|
| 671 |
+
|
| 672 |
+
async def ping(self, message: bytes = b"") -> None:
|
| 673 |
+
"""Send ping message."""
|
| 674 |
+
if isinstance(message, str):
|
| 675 |
+
message = message.encode("utf-8")
|
| 676 |
+
await self._send_frame(message, WSMsgType.PING)
|
| 677 |
+
|
| 678 |
+
async def send(
|
| 679 |
+
self,
|
| 680 |
+
message: Union[str, bytes],
|
| 681 |
+
binary: bool = False,
|
| 682 |
+
compress: Optional[int] = None,
|
| 683 |
+
) -> None:
|
| 684 |
+
"""Send a frame over the websocket with message as its payload."""
|
| 685 |
+
if isinstance(message, str):
|
| 686 |
+
message = message.encode("utf-8")
|
| 687 |
+
if binary:
|
| 688 |
+
await self._send_frame(message, WSMsgType.BINARY, compress)
|
| 689 |
+
else:
|
| 690 |
+
await self._send_frame(message, WSMsgType.TEXT, compress)
|
| 691 |
+
|
| 692 |
+
async def close(self, code: int = 1000, message: bytes = b"") -> None:
|
| 693 |
+
"""Close the websocket, sending the specified code and message."""
|
| 694 |
+
if isinstance(message, str):
|
| 695 |
+
message = message.encode("utf-8")
|
| 696 |
+
try:
|
| 697 |
+
await self._send_frame(
|
| 698 |
+
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
| 699 |
+
)
|
| 700 |
+
finally:
|
| 701 |
+
self._closing = True
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_writer.py
ADDED
|
@@ -0,0 +1,200 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Http related parsers and protocol."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import zlib
|
| 5 |
+
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
|
| 6 |
+
|
| 7 |
+
from multidict import CIMultiDict
|
| 8 |
+
|
| 9 |
+
from .abc import AbstractStreamWriter
|
| 10 |
+
from .base_protocol import BaseProtocol
|
| 11 |
+
from .helpers import NO_EXTENSIONS
|
| 12 |
+
|
| 13 |
+
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class HttpVersion(NamedTuple):
|
| 17 |
+
major: int
|
| 18 |
+
minor: int
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
HttpVersion10 = HttpVersion(1, 0)
|
| 22 |
+
HttpVersion11 = HttpVersion(1, 1)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
| 26 |
+
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class StreamWriter(AbstractStreamWriter):
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
protocol: BaseProtocol,
|
| 33 |
+
loop: asyncio.AbstractEventLoop,
|
| 34 |
+
on_chunk_sent: _T_OnChunkSent = None,
|
| 35 |
+
on_headers_sent: _T_OnHeadersSent = None,
|
| 36 |
+
) -> None:
|
| 37 |
+
self._protocol = protocol
|
| 38 |
+
self._transport = protocol.transport
|
| 39 |
+
|
| 40 |
+
self.loop = loop
|
| 41 |
+
self.length = None
|
| 42 |
+
self.chunked = False
|
| 43 |
+
self.buffer_size = 0
|
| 44 |
+
self.output_size = 0
|
| 45 |
+
|
| 46 |
+
self._eof = False
|
| 47 |
+
self._compress = None # type: Any
|
| 48 |
+
self._drain_waiter = None
|
| 49 |
+
|
| 50 |
+
self._on_chunk_sent = on_chunk_sent # type: _T_OnChunkSent
|
| 51 |
+
self._on_headers_sent = on_headers_sent # type: _T_OnHeadersSent
|
| 52 |
+
|
| 53 |
+
@property
|
| 54 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
| 55 |
+
return self._transport
|
| 56 |
+
|
| 57 |
+
@property
|
| 58 |
+
def protocol(self) -> BaseProtocol:
|
| 59 |
+
return self._protocol
|
| 60 |
+
|
| 61 |
+
def enable_chunking(self) -> None:
|
| 62 |
+
self.chunked = True
|
| 63 |
+
|
| 64 |
+
def enable_compression(
|
| 65 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 66 |
+
) -> None:
|
| 67 |
+
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
| 68 |
+
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
| 69 |
+
|
| 70 |
+
def _write(self, chunk: bytes) -> None:
|
| 71 |
+
size = len(chunk)
|
| 72 |
+
self.buffer_size += size
|
| 73 |
+
self.output_size += size
|
| 74 |
+
|
| 75 |
+
if self._transport is None or self._transport.is_closing():
|
| 76 |
+
raise ConnectionResetError("Cannot write to closing transport")
|
| 77 |
+
self._transport.write(chunk)
|
| 78 |
+
|
| 79 |
+
async def write(
|
| 80 |
+
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
| 81 |
+
) -> None:
|
| 82 |
+
"""Writes chunk of data to a stream.
|
| 83 |
+
|
| 84 |
+
write_eof() indicates end of stream.
|
| 85 |
+
writer can't be used after write_eof() method being called.
|
| 86 |
+
write() return drain future.
|
| 87 |
+
"""
|
| 88 |
+
if self._on_chunk_sent is not None:
|
| 89 |
+
await self._on_chunk_sent(chunk)
|
| 90 |
+
|
| 91 |
+
if isinstance(chunk, memoryview):
|
| 92 |
+
if chunk.nbytes != len(chunk):
|
| 93 |
+
# just reshape it
|
| 94 |
+
chunk = chunk.cast("c")
|
| 95 |
+
|
| 96 |
+
if self._compress is not None:
|
| 97 |
+
chunk = self._compress.compress(chunk)
|
| 98 |
+
if not chunk:
|
| 99 |
+
return
|
| 100 |
+
|
| 101 |
+
if self.length is not None:
|
| 102 |
+
chunk_len = len(chunk)
|
| 103 |
+
if self.length >= chunk_len:
|
| 104 |
+
self.length = self.length - chunk_len
|
| 105 |
+
else:
|
| 106 |
+
chunk = chunk[: self.length]
|
| 107 |
+
self.length = 0
|
| 108 |
+
if not chunk:
|
| 109 |
+
return
|
| 110 |
+
|
| 111 |
+
if chunk:
|
| 112 |
+
if self.chunked:
|
| 113 |
+
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
| 114 |
+
chunk = chunk_len_pre + chunk + b"\r\n"
|
| 115 |
+
|
| 116 |
+
self._write(chunk)
|
| 117 |
+
|
| 118 |
+
if self.buffer_size > LIMIT and drain:
|
| 119 |
+
self.buffer_size = 0
|
| 120 |
+
await self.drain()
|
| 121 |
+
|
| 122 |
+
async def write_headers(
|
| 123 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
| 124 |
+
) -> None:
|
| 125 |
+
"""Write request/response status and headers."""
|
| 126 |
+
if self._on_headers_sent is not None:
|
| 127 |
+
await self._on_headers_sent(headers)
|
| 128 |
+
|
| 129 |
+
# status + headers
|
| 130 |
+
buf = _serialize_headers(status_line, headers)
|
| 131 |
+
self._write(buf)
|
| 132 |
+
|
| 133 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
| 134 |
+
if self._eof:
|
| 135 |
+
return
|
| 136 |
+
|
| 137 |
+
if chunk and self._on_chunk_sent is not None:
|
| 138 |
+
await self._on_chunk_sent(chunk)
|
| 139 |
+
|
| 140 |
+
if self._compress:
|
| 141 |
+
if chunk:
|
| 142 |
+
chunk = self._compress.compress(chunk)
|
| 143 |
+
|
| 144 |
+
chunk = chunk + self._compress.flush()
|
| 145 |
+
if chunk and self.chunked:
|
| 146 |
+
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
| 147 |
+
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
| 148 |
+
else:
|
| 149 |
+
if self.chunked:
|
| 150 |
+
if chunk:
|
| 151 |
+
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
| 152 |
+
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
| 153 |
+
else:
|
| 154 |
+
chunk = b"0\r\n\r\n"
|
| 155 |
+
|
| 156 |
+
if chunk:
|
| 157 |
+
self._write(chunk)
|
| 158 |
+
|
| 159 |
+
await self.drain()
|
| 160 |
+
|
| 161 |
+
self._eof = True
|
| 162 |
+
self._transport = None
|
| 163 |
+
|
| 164 |
+
async def drain(self) -> None:
|
| 165 |
+
"""Flush the write buffer.
|
| 166 |
+
|
| 167 |
+
The intended use is to write
|
| 168 |
+
|
| 169 |
+
await w.write(data)
|
| 170 |
+
await w.drain()
|
| 171 |
+
"""
|
| 172 |
+
if self._protocol.transport is not None:
|
| 173 |
+
await self._protocol._drain_helper()
|
| 174 |
+
|
| 175 |
+
|
| 176 |
+
def _safe_header(string: str) -> str:
|
| 177 |
+
if "\r" in string or "\n" in string:
|
| 178 |
+
raise ValueError(
|
| 179 |
+
"Newline or carriage return detected in headers. "
|
| 180 |
+
"Potential header injection attack."
|
| 181 |
+
)
|
| 182 |
+
return string
|
| 183 |
+
|
| 184 |
+
|
| 185 |
+
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
| 186 |
+
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
| 187 |
+
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
| 188 |
+
return line.encode("utf-8")
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
_serialize_headers = _py_serialize_headers
|
| 192 |
+
|
| 193 |
+
try:
|
| 194 |
+
import aiohttp._http_writer as _http_writer # type: ignore[import]
|
| 195 |
+
|
| 196 |
+
_c_serialize_headers = _http_writer._serialize_headers
|
| 197 |
+
if not NO_EXTENSIONS:
|
| 198 |
+
_serialize_headers = _c_serialize_headers
|
| 199 |
+
except ImportError:
|
| 200 |
+
pass
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/multipart.py
ADDED
|
@@ -0,0 +1,963 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import base64
|
| 2 |
+
import binascii
|
| 3 |
+
import json
|
| 4 |
+
import re
|
| 5 |
+
import uuid
|
| 6 |
+
import warnings
|
| 7 |
+
import zlib
|
| 8 |
+
from collections import deque
|
| 9 |
+
from types import TracebackType
|
| 10 |
+
from typing import (
|
| 11 |
+
TYPE_CHECKING,
|
| 12 |
+
Any,
|
| 13 |
+
AsyncIterator,
|
| 14 |
+
Deque,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterator,
|
| 17 |
+
List,
|
| 18 |
+
Mapping,
|
| 19 |
+
Optional,
|
| 20 |
+
Sequence,
|
| 21 |
+
Tuple,
|
| 22 |
+
Type,
|
| 23 |
+
Union,
|
| 24 |
+
cast,
|
| 25 |
+
)
|
| 26 |
+
from urllib.parse import parse_qsl, unquote, urlencode
|
| 27 |
+
|
| 28 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiMapping
|
| 29 |
+
|
| 30 |
+
from .hdrs import (
|
| 31 |
+
CONTENT_DISPOSITION,
|
| 32 |
+
CONTENT_ENCODING,
|
| 33 |
+
CONTENT_LENGTH,
|
| 34 |
+
CONTENT_TRANSFER_ENCODING,
|
| 35 |
+
CONTENT_TYPE,
|
| 36 |
+
)
|
| 37 |
+
from .helpers import CHAR, TOKEN, parse_mimetype, reify
|
| 38 |
+
from .http import HeadersParser
|
| 39 |
+
from .payload import (
|
| 40 |
+
JsonPayload,
|
| 41 |
+
LookupError,
|
| 42 |
+
Order,
|
| 43 |
+
Payload,
|
| 44 |
+
StringPayload,
|
| 45 |
+
get_payload,
|
| 46 |
+
payload_type,
|
| 47 |
+
)
|
| 48 |
+
from .streams import StreamReader
|
| 49 |
+
|
| 50 |
+
__all__ = (
|
| 51 |
+
"MultipartReader",
|
| 52 |
+
"MultipartWriter",
|
| 53 |
+
"BodyPartReader",
|
| 54 |
+
"BadContentDispositionHeader",
|
| 55 |
+
"BadContentDispositionParam",
|
| 56 |
+
"parse_content_disposition",
|
| 57 |
+
"content_disposition_filename",
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 62 |
+
from .client_reqrep import ClientResponse
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
class BadContentDispositionHeader(RuntimeWarning):
|
| 66 |
+
pass
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
class BadContentDispositionParam(RuntimeWarning):
|
| 70 |
+
pass
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
def parse_content_disposition(
|
| 74 |
+
header: Optional[str],
|
| 75 |
+
) -> Tuple[Optional[str], Dict[str, str]]:
|
| 76 |
+
def is_token(string: str) -> bool:
|
| 77 |
+
return bool(string) and TOKEN >= set(string)
|
| 78 |
+
|
| 79 |
+
def is_quoted(string: str) -> bool:
|
| 80 |
+
return string[0] == string[-1] == '"'
|
| 81 |
+
|
| 82 |
+
def is_rfc5987(string: str) -> bool:
|
| 83 |
+
return is_token(string) and string.count("'") == 2
|
| 84 |
+
|
| 85 |
+
def is_extended_param(string: str) -> bool:
|
| 86 |
+
return string.endswith("*")
|
| 87 |
+
|
| 88 |
+
def is_continuous_param(string: str) -> bool:
|
| 89 |
+
pos = string.find("*") + 1
|
| 90 |
+
if not pos:
|
| 91 |
+
return False
|
| 92 |
+
substring = string[pos:-1] if string.endswith("*") else string[pos:]
|
| 93 |
+
return substring.isdigit()
|
| 94 |
+
|
| 95 |
+
def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
|
| 96 |
+
return re.sub(f"\\\\([{chars}])", "\\1", text)
|
| 97 |
+
|
| 98 |
+
if not header:
|
| 99 |
+
return None, {}
|
| 100 |
+
|
| 101 |
+
disptype, *parts = header.split(";")
|
| 102 |
+
if not is_token(disptype):
|
| 103 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 104 |
+
return None, {}
|
| 105 |
+
|
| 106 |
+
params = {} # type: Dict[str, str]
|
| 107 |
+
while parts:
|
| 108 |
+
item = parts.pop(0)
|
| 109 |
+
|
| 110 |
+
if "=" not in item:
|
| 111 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 112 |
+
return None, {}
|
| 113 |
+
|
| 114 |
+
key, value = item.split("=", 1)
|
| 115 |
+
key = key.lower().strip()
|
| 116 |
+
value = value.lstrip()
|
| 117 |
+
|
| 118 |
+
if key in params:
|
| 119 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 120 |
+
return None, {}
|
| 121 |
+
|
| 122 |
+
if not is_token(key):
|
| 123 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 124 |
+
continue
|
| 125 |
+
|
| 126 |
+
elif is_continuous_param(key):
|
| 127 |
+
if is_quoted(value):
|
| 128 |
+
value = unescape(value[1:-1])
|
| 129 |
+
elif not is_token(value):
|
| 130 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 131 |
+
continue
|
| 132 |
+
|
| 133 |
+
elif is_extended_param(key):
|
| 134 |
+
if is_rfc5987(value):
|
| 135 |
+
encoding, _, value = value.split("'", 2)
|
| 136 |
+
encoding = encoding or "utf-8"
|
| 137 |
+
else:
|
| 138 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 139 |
+
continue
|
| 140 |
+
|
| 141 |
+
try:
|
| 142 |
+
value = unquote(value, encoding, "strict")
|
| 143 |
+
except UnicodeDecodeError: # pragma: nocover
|
| 144 |
+
warnings.warn(BadContentDispositionParam(item))
|
| 145 |
+
continue
|
| 146 |
+
|
| 147 |
+
else:
|
| 148 |
+
failed = True
|
| 149 |
+
if is_quoted(value):
|
| 150 |
+
failed = False
|
| 151 |
+
value = unescape(value[1:-1].lstrip("\\/"))
|
| 152 |
+
elif is_token(value):
|
| 153 |
+
failed = False
|
| 154 |
+
elif parts:
|
| 155 |
+
# maybe just ; in filename, in any case this is just
|
| 156 |
+
# one case fix, for proper fix we need to redesign parser
|
| 157 |
+
_value = f"{value};{parts[0]}"
|
| 158 |
+
if is_quoted(_value):
|
| 159 |
+
parts.pop(0)
|
| 160 |
+
value = unescape(_value[1:-1].lstrip("\\/"))
|
| 161 |
+
failed = False
|
| 162 |
+
|
| 163 |
+
if failed:
|
| 164 |
+
warnings.warn(BadContentDispositionHeader(header))
|
| 165 |
+
return None, {}
|
| 166 |
+
|
| 167 |
+
params[key] = value
|
| 168 |
+
|
| 169 |
+
return disptype.lower(), params
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
def content_disposition_filename(
|
| 173 |
+
params: Mapping[str, str], name: str = "filename"
|
| 174 |
+
) -> Optional[str]:
|
| 175 |
+
name_suf = "%s*" % name
|
| 176 |
+
if not params:
|
| 177 |
+
return None
|
| 178 |
+
elif name_suf in params:
|
| 179 |
+
return params[name_suf]
|
| 180 |
+
elif name in params:
|
| 181 |
+
return params[name]
|
| 182 |
+
else:
|
| 183 |
+
parts = []
|
| 184 |
+
fnparams = sorted(
|
| 185 |
+
(key, value) for key, value in params.items() if key.startswith(name_suf)
|
| 186 |
+
)
|
| 187 |
+
for num, (key, value) in enumerate(fnparams):
|
| 188 |
+
_, tail = key.split("*", 1)
|
| 189 |
+
if tail.endswith("*"):
|
| 190 |
+
tail = tail[:-1]
|
| 191 |
+
if tail == str(num):
|
| 192 |
+
parts.append(value)
|
| 193 |
+
else:
|
| 194 |
+
break
|
| 195 |
+
if not parts:
|
| 196 |
+
return None
|
| 197 |
+
value = "".join(parts)
|
| 198 |
+
if "'" in value:
|
| 199 |
+
encoding, _, value = value.split("'", 2)
|
| 200 |
+
encoding = encoding or "utf-8"
|
| 201 |
+
return unquote(value, encoding, "strict")
|
| 202 |
+
return value
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
class MultipartResponseWrapper:
|
| 206 |
+
"""Wrapper around the MultipartReader.
|
| 207 |
+
|
| 208 |
+
It takes care about
|
| 209 |
+
underlying connection and close it when it needs in.
|
| 210 |
+
"""
|
| 211 |
+
|
| 212 |
+
def __init__(
|
| 213 |
+
self,
|
| 214 |
+
resp: "ClientResponse",
|
| 215 |
+
stream: "MultipartReader",
|
| 216 |
+
) -> None:
|
| 217 |
+
self.resp = resp
|
| 218 |
+
self.stream = stream
|
| 219 |
+
|
| 220 |
+
def __aiter__(self) -> "MultipartResponseWrapper":
|
| 221 |
+
return self
|
| 222 |
+
|
| 223 |
+
async def __anext__(
|
| 224 |
+
self,
|
| 225 |
+
) -> Union["MultipartReader", "BodyPartReader"]:
|
| 226 |
+
part = await self.next()
|
| 227 |
+
if part is None:
|
| 228 |
+
raise StopAsyncIteration
|
| 229 |
+
return part
|
| 230 |
+
|
| 231 |
+
def at_eof(self) -> bool:
|
| 232 |
+
"""Returns True when all response data had been read."""
|
| 233 |
+
return self.resp.content.at_eof()
|
| 234 |
+
|
| 235 |
+
async def next(
|
| 236 |
+
self,
|
| 237 |
+
) -> Optional[Union["MultipartReader", "BodyPartReader"]]:
|
| 238 |
+
"""Emits next multipart reader object."""
|
| 239 |
+
item = await self.stream.next()
|
| 240 |
+
if self.stream.at_eof():
|
| 241 |
+
await self.release()
|
| 242 |
+
return item
|
| 243 |
+
|
| 244 |
+
async def release(self) -> None:
|
| 245 |
+
"""Release the connection gracefully.
|
| 246 |
+
|
| 247 |
+
All remaining content is read to the void.
|
| 248 |
+
"""
|
| 249 |
+
await self.resp.release()
|
| 250 |
+
|
| 251 |
+
|
| 252 |
+
class BodyPartReader:
|
| 253 |
+
"""Multipart reader for single body part."""
|
| 254 |
+
|
| 255 |
+
chunk_size = 8192
|
| 256 |
+
|
| 257 |
+
def __init__(
|
| 258 |
+
self, boundary: bytes, headers: "CIMultiDictProxy[str]", content: StreamReader
|
| 259 |
+
) -> None:
|
| 260 |
+
self.headers = headers
|
| 261 |
+
self._boundary = boundary
|
| 262 |
+
self._content = content
|
| 263 |
+
self._at_eof = False
|
| 264 |
+
length = self.headers.get(CONTENT_LENGTH, None)
|
| 265 |
+
self._length = int(length) if length is not None else None
|
| 266 |
+
self._read_bytes = 0
|
| 267 |
+
# TODO: typeing.Deque is not supported by Python 3.5
|
| 268 |
+
self._unread: Deque[bytes] = deque()
|
| 269 |
+
self._prev_chunk = None # type: Optional[bytes]
|
| 270 |
+
self._content_eof = 0
|
| 271 |
+
self._cache = {} # type: Dict[str, Any]
|
| 272 |
+
|
| 273 |
+
def __aiter__(self) -> AsyncIterator["BodyPartReader"]:
|
| 274 |
+
return self # type: ignore[return-value]
|
| 275 |
+
|
| 276 |
+
async def __anext__(self) -> bytes:
|
| 277 |
+
part = await self.next()
|
| 278 |
+
if part is None:
|
| 279 |
+
raise StopAsyncIteration
|
| 280 |
+
return part
|
| 281 |
+
|
| 282 |
+
async def next(self) -> Optional[bytes]:
|
| 283 |
+
item = await self.read()
|
| 284 |
+
if not item:
|
| 285 |
+
return None
|
| 286 |
+
return item
|
| 287 |
+
|
| 288 |
+
async def read(self, *, decode: bool = False) -> bytes:
|
| 289 |
+
"""Reads body part data.
|
| 290 |
+
|
| 291 |
+
decode: Decodes data following by encoding
|
| 292 |
+
method from Content-Encoding header. If it missed
|
| 293 |
+
data remains untouched
|
| 294 |
+
"""
|
| 295 |
+
if self._at_eof:
|
| 296 |
+
return b""
|
| 297 |
+
data = bytearray()
|
| 298 |
+
while not self._at_eof:
|
| 299 |
+
data.extend(await self.read_chunk(self.chunk_size))
|
| 300 |
+
if decode:
|
| 301 |
+
return self.decode(data)
|
| 302 |
+
return data
|
| 303 |
+
|
| 304 |
+
async def read_chunk(self, size: int = chunk_size) -> bytes:
|
| 305 |
+
"""Reads body part content chunk of the specified size.
|
| 306 |
+
|
| 307 |
+
size: chunk size
|
| 308 |
+
"""
|
| 309 |
+
if self._at_eof:
|
| 310 |
+
return b""
|
| 311 |
+
if self._length:
|
| 312 |
+
chunk = await self._read_chunk_from_length(size)
|
| 313 |
+
else:
|
| 314 |
+
chunk = await self._read_chunk_from_stream(size)
|
| 315 |
+
|
| 316 |
+
self._read_bytes += len(chunk)
|
| 317 |
+
if self._read_bytes == self._length:
|
| 318 |
+
self._at_eof = True
|
| 319 |
+
if self._at_eof:
|
| 320 |
+
clrf = await self._content.readline()
|
| 321 |
+
assert (
|
| 322 |
+
b"\r\n" == clrf
|
| 323 |
+
), "reader did not read all the data or it is malformed"
|
| 324 |
+
return chunk
|
| 325 |
+
|
| 326 |
+
async def _read_chunk_from_length(self, size: int) -> bytes:
|
| 327 |
+
# Reads body part content chunk of the specified size.
|
| 328 |
+
# The body part must has Content-Length header with proper value.
|
| 329 |
+
assert self._length is not None, "Content-Length required for chunked read"
|
| 330 |
+
chunk_size = min(size, self._length - self._read_bytes)
|
| 331 |
+
chunk = await self._content.read(chunk_size)
|
| 332 |
+
return chunk
|
| 333 |
+
|
| 334 |
+
async def _read_chunk_from_stream(self, size: int) -> bytes:
|
| 335 |
+
# Reads content chunk of body part with unknown length.
|
| 336 |
+
# The Content-Length header for body part is not necessary.
|
| 337 |
+
assert (
|
| 338 |
+
size >= len(self._boundary) + 2
|
| 339 |
+
), "Chunk size must be greater or equal than boundary length + 2"
|
| 340 |
+
first_chunk = self._prev_chunk is None
|
| 341 |
+
if first_chunk:
|
| 342 |
+
self._prev_chunk = await self._content.read(size)
|
| 343 |
+
|
| 344 |
+
chunk = await self._content.read(size)
|
| 345 |
+
self._content_eof += int(self._content.at_eof())
|
| 346 |
+
assert self._content_eof < 3, "Reading after EOF"
|
| 347 |
+
assert self._prev_chunk is not None
|
| 348 |
+
window = self._prev_chunk + chunk
|
| 349 |
+
sub = b"\r\n" + self._boundary
|
| 350 |
+
if first_chunk:
|
| 351 |
+
idx = window.find(sub)
|
| 352 |
+
else:
|
| 353 |
+
idx = window.find(sub, max(0, len(self._prev_chunk) - len(sub)))
|
| 354 |
+
if idx >= 0:
|
| 355 |
+
# pushing boundary back to content
|
| 356 |
+
with warnings.catch_warnings():
|
| 357 |
+
warnings.filterwarnings("ignore", category=DeprecationWarning)
|
| 358 |
+
self._content.unread_data(window[idx:])
|
| 359 |
+
if size > idx:
|
| 360 |
+
self._prev_chunk = self._prev_chunk[:idx]
|
| 361 |
+
chunk = window[len(self._prev_chunk) : idx]
|
| 362 |
+
if not chunk:
|
| 363 |
+
self._at_eof = True
|
| 364 |
+
result = self._prev_chunk
|
| 365 |
+
self._prev_chunk = chunk
|
| 366 |
+
return result
|
| 367 |
+
|
| 368 |
+
async def readline(self) -> bytes:
|
| 369 |
+
"""Reads body part by line by line."""
|
| 370 |
+
if self._at_eof:
|
| 371 |
+
return b""
|
| 372 |
+
|
| 373 |
+
if self._unread:
|
| 374 |
+
line = self._unread.popleft()
|
| 375 |
+
else:
|
| 376 |
+
line = await self._content.readline()
|
| 377 |
+
|
| 378 |
+
if line.startswith(self._boundary):
|
| 379 |
+
# the very last boundary may not come with \r\n,
|
| 380 |
+
# so set single rules for everyone
|
| 381 |
+
sline = line.rstrip(b"\r\n")
|
| 382 |
+
boundary = self._boundary
|
| 383 |
+
last_boundary = self._boundary + b"--"
|
| 384 |
+
# ensure that we read exactly the boundary, not something alike
|
| 385 |
+
if sline == boundary or sline == last_boundary:
|
| 386 |
+
self._at_eof = True
|
| 387 |
+
self._unread.append(line)
|
| 388 |
+
return b""
|
| 389 |
+
else:
|
| 390 |
+
next_line = await self._content.readline()
|
| 391 |
+
if next_line.startswith(self._boundary):
|
| 392 |
+
line = line[:-2] # strip CRLF but only once
|
| 393 |
+
self._unread.append(next_line)
|
| 394 |
+
|
| 395 |
+
return line
|
| 396 |
+
|
| 397 |
+
async def release(self) -> None:
|
| 398 |
+
"""Like read(), but reads all the data to the void."""
|
| 399 |
+
if self._at_eof:
|
| 400 |
+
return
|
| 401 |
+
while not self._at_eof:
|
| 402 |
+
await self.read_chunk(self.chunk_size)
|
| 403 |
+
|
| 404 |
+
async def text(self, *, encoding: Optional[str] = None) -> str:
|
| 405 |
+
"""Like read(), but assumes that body part contains text data."""
|
| 406 |
+
data = await self.read(decode=True)
|
| 407 |
+
# see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm # NOQA
|
| 408 |
+
# and https://dvcs.w3.org/hg/xhr/raw-file/tip/Overview.html#dom-xmlhttprequest-send # NOQA
|
| 409 |
+
encoding = encoding or self.get_charset(default="utf-8")
|
| 410 |
+
return data.decode(encoding)
|
| 411 |
+
|
| 412 |
+
async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
| 413 |
+
"""Like read(), but assumes that body parts contains JSON data."""
|
| 414 |
+
data = await self.read(decode=True)
|
| 415 |
+
if not data:
|
| 416 |
+
return None
|
| 417 |
+
encoding = encoding or self.get_charset(default="utf-8")
|
| 418 |
+
return cast(Dict[str, Any], json.loads(data.decode(encoding)))
|
| 419 |
+
|
| 420 |
+
async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
|
| 421 |
+
"""Like read(), but assumes that body parts contain form urlencoded data."""
|
| 422 |
+
data = await self.read(decode=True)
|
| 423 |
+
if not data:
|
| 424 |
+
return []
|
| 425 |
+
if encoding is not None:
|
| 426 |
+
real_encoding = encoding
|
| 427 |
+
else:
|
| 428 |
+
real_encoding = self.get_charset(default="utf-8")
|
| 429 |
+
return parse_qsl(
|
| 430 |
+
data.rstrip().decode(real_encoding),
|
| 431 |
+
keep_blank_values=True,
|
| 432 |
+
encoding=real_encoding,
|
| 433 |
+
)
|
| 434 |
+
|
| 435 |
+
def at_eof(self) -> bool:
|
| 436 |
+
"""Returns True if the boundary was reached or False otherwise."""
|
| 437 |
+
return self._at_eof
|
| 438 |
+
|
| 439 |
+
def decode(self, data: bytes) -> bytes:
|
| 440 |
+
"""Decodes data.
|
| 441 |
+
|
| 442 |
+
Decoding is done according the specified Content-Encoding
|
| 443 |
+
or Content-Transfer-Encoding headers value.
|
| 444 |
+
"""
|
| 445 |
+
if CONTENT_TRANSFER_ENCODING in self.headers:
|
| 446 |
+
data = self._decode_content_transfer(data)
|
| 447 |
+
if CONTENT_ENCODING in self.headers:
|
| 448 |
+
return self._decode_content(data)
|
| 449 |
+
return data
|
| 450 |
+
|
| 451 |
+
def _decode_content(self, data: bytes) -> bytes:
|
| 452 |
+
encoding = self.headers.get(CONTENT_ENCODING, "").lower()
|
| 453 |
+
|
| 454 |
+
if encoding == "deflate":
|
| 455 |
+
return zlib.decompress(data, -zlib.MAX_WBITS)
|
| 456 |
+
elif encoding == "gzip":
|
| 457 |
+
return zlib.decompress(data, 16 + zlib.MAX_WBITS)
|
| 458 |
+
elif encoding == "identity":
|
| 459 |
+
return data
|
| 460 |
+
else:
|
| 461 |
+
raise RuntimeError(f"unknown content encoding: {encoding}")
|
| 462 |
+
|
| 463 |
+
def _decode_content_transfer(self, data: bytes) -> bytes:
|
| 464 |
+
encoding = self.headers.get(CONTENT_TRANSFER_ENCODING, "").lower()
|
| 465 |
+
|
| 466 |
+
if encoding == "base64":
|
| 467 |
+
return base64.b64decode(data)
|
| 468 |
+
elif encoding == "quoted-printable":
|
| 469 |
+
return binascii.a2b_qp(data)
|
| 470 |
+
elif encoding in ("binary", "8bit", "7bit"):
|
| 471 |
+
return data
|
| 472 |
+
else:
|
| 473 |
+
raise RuntimeError(
|
| 474 |
+
"unknown content transfer encoding: {}" "".format(encoding)
|
| 475 |
+
)
|
| 476 |
+
|
| 477 |
+
def get_charset(self, default: str) -> str:
|
| 478 |
+
"""Returns charset parameter from Content-Type header or default."""
|
| 479 |
+
ctype = self.headers.get(CONTENT_TYPE, "")
|
| 480 |
+
mimetype = parse_mimetype(ctype)
|
| 481 |
+
return mimetype.parameters.get("charset", default)
|
| 482 |
+
|
| 483 |
+
@reify
|
| 484 |
+
def name(self) -> Optional[str]:
|
| 485 |
+
"""Returns name specified in Content-Disposition header.
|
| 486 |
+
|
| 487 |
+
If the header is missing or malformed, returns None.
|
| 488 |
+
"""
|
| 489 |
+
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
| 490 |
+
return content_disposition_filename(params, "name")
|
| 491 |
+
|
| 492 |
+
@reify
|
| 493 |
+
def filename(self) -> Optional[str]:
|
| 494 |
+
"""Returns filename specified in Content-Disposition header.
|
| 495 |
+
|
| 496 |
+
Returns None if the header is missing or malformed.
|
| 497 |
+
"""
|
| 498 |
+
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
|
| 499 |
+
return content_disposition_filename(params, "filename")
|
| 500 |
+
|
| 501 |
+
|
| 502 |
+
@payload_type(BodyPartReader, order=Order.try_first)
|
| 503 |
+
class BodyPartReaderPayload(Payload):
|
| 504 |
+
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
|
| 505 |
+
super().__init__(value, *args, **kwargs)
|
| 506 |
+
|
| 507 |
+
params = {} # type: Dict[str, str]
|
| 508 |
+
if value.name is not None:
|
| 509 |
+
params["name"] = value.name
|
| 510 |
+
if value.filename is not None:
|
| 511 |
+
params["filename"] = value.filename
|
| 512 |
+
|
| 513 |
+
if params:
|
| 514 |
+
self.set_content_disposition("attachment", True, **params)
|
| 515 |
+
|
| 516 |
+
async def write(self, writer: Any) -> None:
|
| 517 |
+
field = self._value
|
| 518 |
+
chunk = await field.read_chunk(size=2 ** 16)
|
| 519 |
+
while chunk:
|
| 520 |
+
await writer.write(field.decode(chunk))
|
| 521 |
+
chunk = await field.read_chunk(size=2 ** 16)
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
class MultipartReader:
|
| 525 |
+
"""Multipart body reader."""
|
| 526 |
+
|
| 527 |
+
#: Response wrapper, used when multipart readers constructs from response.
|
| 528 |
+
response_wrapper_cls = MultipartResponseWrapper
|
| 529 |
+
#: Multipart reader class, used to handle multipart/* body parts.
|
| 530 |
+
#: None points to type(self)
|
| 531 |
+
multipart_reader_cls = None
|
| 532 |
+
#: Body part reader class for non multipart/* content types.
|
| 533 |
+
part_reader_cls = BodyPartReader
|
| 534 |
+
|
| 535 |
+
def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None:
|
| 536 |
+
self.headers = headers
|
| 537 |
+
self._boundary = ("--" + self._get_boundary()).encode()
|
| 538 |
+
self._content = content
|
| 539 |
+
self._last_part = (
|
| 540 |
+
None
|
| 541 |
+
) # type: Optional[Union['MultipartReader', BodyPartReader]]
|
| 542 |
+
self._at_eof = False
|
| 543 |
+
self._at_bof = True
|
| 544 |
+
self._unread = [] # type: List[bytes]
|
| 545 |
+
|
| 546 |
+
def __aiter__(
|
| 547 |
+
self,
|
| 548 |
+
) -> AsyncIterator["BodyPartReader"]:
|
| 549 |
+
return self # type: ignore[return-value]
|
| 550 |
+
|
| 551 |
+
async def __anext__(
|
| 552 |
+
self,
|
| 553 |
+
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
| 554 |
+
part = await self.next()
|
| 555 |
+
if part is None:
|
| 556 |
+
raise StopAsyncIteration
|
| 557 |
+
return part
|
| 558 |
+
|
| 559 |
+
@classmethod
|
| 560 |
+
def from_response(
|
| 561 |
+
cls,
|
| 562 |
+
response: "ClientResponse",
|
| 563 |
+
) -> MultipartResponseWrapper:
|
| 564 |
+
"""Constructs reader instance from HTTP response.
|
| 565 |
+
|
| 566 |
+
:param response: :class:`~aiohttp.client.ClientResponse` instance
|
| 567 |
+
"""
|
| 568 |
+
obj = cls.response_wrapper_cls(
|
| 569 |
+
response, cls(response.headers, response.content)
|
| 570 |
+
)
|
| 571 |
+
return obj
|
| 572 |
+
|
| 573 |
+
def at_eof(self) -> bool:
|
| 574 |
+
"""Returns True if the final boundary was reached, false otherwise."""
|
| 575 |
+
return self._at_eof
|
| 576 |
+
|
| 577 |
+
async def next(
|
| 578 |
+
self,
|
| 579 |
+
) -> Optional[Union["MultipartReader", BodyPartReader]]:
|
| 580 |
+
"""Emits the next multipart body part."""
|
| 581 |
+
# So, if we're at BOF, we need to skip till the boundary.
|
| 582 |
+
if self._at_eof:
|
| 583 |
+
return None
|
| 584 |
+
await self._maybe_release_last_part()
|
| 585 |
+
if self._at_bof:
|
| 586 |
+
await self._read_until_first_boundary()
|
| 587 |
+
self._at_bof = False
|
| 588 |
+
else:
|
| 589 |
+
await self._read_boundary()
|
| 590 |
+
if self._at_eof: # we just read the last boundary, nothing to do there
|
| 591 |
+
return None
|
| 592 |
+
self._last_part = await self.fetch_next_part()
|
| 593 |
+
return self._last_part
|
| 594 |
+
|
| 595 |
+
async def release(self) -> None:
|
| 596 |
+
"""Reads all the body parts to the void till the final boundary."""
|
| 597 |
+
while not self._at_eof:
|
| 598 |
+
item = await self.next()
|
| 599 |
+
if item is None:
|
| 600 |
+
break
|
| 601 |
+
await item.release()
|
| 602 |
+
|
| 603 |
+
async def fetch_next_part(
|
| 604 |
+
self,
|
| 605 |
+
) -> Union["MultipartReader", BodyPartReader]:
|
| 606 |
+
"""Returns the next body part reader."""
|
| 607 |
+
headers = await self._read_headers()
|
| 608 |
+
return self._get_part_reader(headers)
|
| 609 |
+
|
| 610 |
+
def _get_part_reader(
|
| 611 |
+
self,
|
| 612 |
+
headers: "CIMultiDictProxy[str]",
|
| 613 |
+
) -> Union["MultipartReader", BodyPartReader]:
|
| 614 |
+
"""Dispatches the response by the `Content-Type` header.
|
| 615 |
+
|
| 616 |
+
Returns a suitable reader instance.
|
| 617 |
+
|
| 618 |
+
:param dict headers: Response headers
|
| 619 |
+
"""
|
| 620 |
+
ctype = headers.get(CONTENT_TYPE, "")
|
| 621 |
+
mimetype = parse_mimetype(ctype)
|
| 622 |
+
|
| 623 |
+
if mimetype.type == "multipart":
|
| 624 |
+
if self.multipart_reader_cls is None:
|
| 625 |
+
return type(self)(headers, self._content)
|
| 626 |
+
return self.multipart_reader_cls(headers, self._content)
|
| 627 |
+
else:
|
| 628 |
+
return self.part_reader_cls(self._boundary, headers, self._content)
|
| 629 |
+
|
| 630 |
+
def _get_boundary(self) -> str:
|
| 631 |
+
mimetype = parse_mimetype(self.headers[CONTENT_TYPE])
|
| 632 |
+
|
| 633 |
+
assert mimetype.type == "multipart", "multipart/* content type expected"
|
| 634 |
+
|
| 635 |
+
if "boundary" not in mimetype.parameters:
|
| 636 |
+
raise ValueError(
|
| 637 |
+
"boundary missed for Content-Type: %s" % self.headers[CONTENT_TYPE]
|
| 638 |
+
)
|
| 639 |
+
|
| 640 |
+
boundary = mimetype.parameters["boundary"]
|
| 641 |
+
if len(boundary) > 70:
|
| 642 |
+
raise ValueError("boundary %r is too long (70 chars max)" % boundary)
|
| 643 |
+
|
| 644 |
+
return boundary
|
| 645 |
+
|
| 646 |
+
async def _readline(self) -> bytes:
|
| 647 |
+
if self._unread:
|
| 648 |
+
return self._unread.pop()
|
| 649 |
+
return await self._content.readline()
|
| 650 |
+
|
| 651 |
+
async def _read_until_first_boundary(self) -> None:
|
| 652 |
+
while True:
|
| 653 |
+
chunk = await self._readline()
|
| 654 |
+
if chunk == b"":
|
| 655 |
+
raise ValueError(
|
| 656 |
+
"Could not find starting boundary %r" % (self._boundary)
|
| 657 |
+
)
|
| 658 |
+
chunk = chunk.rstrip()
|
| 659 |
+
if chunk == self._boundary:
|
| 660 |
+
return
|
| 661 |
+
elif chunk == self._boundary + b"--":
|
| 662 |
+
self._at_eof = True
|
| 663 |
+
return
|
| 664 |
+
|
| 665 |
+
async def _read_boundary(self) -> None:
|
| 666 |
+
chunk = (await self._readline()).rstrip()
|
| 667 |
+
if chunk == self._boundary:
|
| 668 |
+
pass
|
| 669 |
+
elif chunk == self._boundary + b"--":
|
| 670 |
+
self._at_eof = True
|
| 671 |
+
epilogue = await self._readline()
|
| 672 |
+
next_line = await self._readline()
|
| 673 |
+
|
| 674 |
+
# the epilogue is expected and then either the end of input or the
|
| 675 |
+
# parent multipart boundary, if the parent boundary is found then
|
| 676 |
+
# it should be marked as unread and handed to the parent for
|
| 677 |
+
# processing
|
| 678 |
+
if next_line[:2] == b"--":
|
| 679 |
+
self._unread.append(next_line)
|
| 680 |
+
# otherwise the request is likely missing an epilogue and both
|
| 681 |
+
# lines should be passed to the parent for processing
|
| 682 |
+
# (this handles the old behavior gracefully)
|
| 683 |
+
else:
|
| 684 |
+
self._unread.extend([next_line, epilogue])
|
| 685 |
+
else:
|
| 686 |
+
raise ValueError(f"Invalid boundary {chunk!r}, expected {self._boundary!r}")
|
| 687 |
+
|
| 688 |
+
async def _read_headers(self) -> "CIMultiDictProxy[str]":
|
| 689 |
+
lines = [b""]
|
| 690 |
+
while True:
|
| 691 |
+
chunk = await self._content.readline()
|
| 692 |
+
chunk = chunk.strip()
|
| 693 |
+
lines.append(chunk)
|
| 694 |
+
if not chunk:
|
| 695 |
+
break
|
| 696 |
+
parser = HeadersParser()
|
| 697 |
+
headers, raw_headers = parser.parse_headers(lines)
|
| 698 |
+
return headers
|
| 699 |
+
|
| 700 |
+
async def _maybe_release_last_part(self) -> None:
|
| 701 |
+
"""Ensures that the last read body part is read completely."""
|
| 702 |
+
if self._last_part is not None:
|
| 703 |
+
if not self._last_part.at_eof():
|
| 704 |
+
await self._last_part.release()
|
| 705 |
+
self._unread.extend(self._last_part._unread)
|
| 706 |
+
self._last_part = None
|
| 707 |
+
|
| 708 |
+
|
| 709 |
+
_Part = Tuple[Payload, str, str]
|
| 710 |
+
|
| 711 |
+
|
| 712 |
+
class MultipartWriter(Payload):
|
| 713 |
+
"""Multipart body writer."""
|
| 714 |
+
|
| 715 |
+
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
|
| 716 |
+
boundary = boundary if boundary is not None else uuid.uuid4().hex
|
| 717 |
+
# The underlying Payload API demands a str (utf-8), not bytes,
|
| 718 |
+
# so we need to ensure we don't lose anything during conversion.
|
| 719 |
+
# As a result, require the boundary to be ASCII only.
|
| 720 |
+
# In both situations.
|
| 721 |
+
|
| 722 |
+
try:
|
| 723 |
+
self._boundary = boundary.encode("ascii")
|
| 724 |
+
except UnicodeEncodeError:
|
| 725 |
+
raise ValueError("boundary should contain ASCII only chars") from None
|
| 726 |
+
ctype = f"multipart/{subtype}; boundary={self._boundary_value}"
|
| 727 |
+
|
| 728 |
+
super().__init__(None, content_type=ctype)
|
| 729 |
+
|
| 730 |
+
self._parts = [] # type: List[_Part]
|
| 731 |
+
|
| 732 |
+
def __enter__(self) -> "MultipartWriter":
|
| 733 |
+
return self
|
| 734 |
+
|
| 735 |
+
def __exit__(
|
| 736 |
+
self,
|
| 737 |
+
exc_type: Optional[Type[BaseException]],
|
| 738 |
+
exc_val: Optional[BaseException],
|
| 739 |
+
exc_tb: Optional[TracebackType],
|
| 740 |
+
) -> None:
|
| 741 |
+
pass
|
| 742 |
+
|
| 743 |
+
def __iter__(self) -> Iterator[_Part]:
|
| 744 |
+
return iter(self._parts)
|
| 745 |
+
|
| 746 |
+
def __len__(self) -> int:
|
| 747 |
+
return len(self._parts)
|
| 748 |
+
|
| 749 |
+
def __bool__(self) -> bool:
|
| 750 |
+
return True
|
| 751 |
+
|
| 752 |
+
_valid_tchar_regex = re.compile(br"\A[!#$%&'*+\-.^_`|~\w]+\Z")
|
| 753 |
+
_invalid_qdtext_char_regex = re.compile(br"[\x00-\x08\x0A-\x1F\x7F]")
|
| 754 |
+
|
| 755 |
+
@property
|
| 756 |
+
def _boundary_value(self) -> str:
|
| 757 |
+
"""Wrap boundary parameter value in quotes, if necessary.
|
| 758 |
+
|
| 759 |
+
Reads self.boundary and returns a unicode sting.
|
| 760 |
+
"""
|
| 761 |
+
# Refer to RFCs 7231, 7230, 5234.
|
| 762 |
+
#
|
| 763 |
+
# parameter = token "=" ( token / quoted-string )
|
| 764 |
+
# token = 1*tchar
|
| 765 |
+
# quoted-string = DQUOTE *( qdtext / quoted-pair ) DQUOTE
|
| 766 |
+
# qdtext = HTAB / SP / %x21 / %x23-5B / %x5D-7E / obs-text
|
| 767 |
+
# obs-text = %x80-FF
|
| 768 |
+
# quoted-pair = "\" ( HTAB / SP / VCHAR / obs-text )
|
| 769 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
|
| 770 |
+
# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
|
| 771 |
+
# / DIGIT / ALPHA
|
| 772 |
+
# ; any VCHAR, except delimiters
|
| 773 |
+
# VCHAR = %x21-7E
|
| 774 |
+
value = self._boundary
|
| 775 |
+
if re.match(self._valid_tchar_regex, value):
|
| 776 |
+
return value.decode("ascii") # cannot fail
|
| 777 |
+
|
| 778 |
+
if re.search(self._invalid_qdtext_char_regex, value):
|
| 779 |
+
raise ValueError("boundary value contains invalid characters")
|
| 780 |
+
|
| 781 |
+
# escape %x5C and %x22
|
| 782 |
+
quoted_value_content = value.replace(b"\\", b"\\\\")
|
| 783 |
+
quoted_value_content = quoted_value_content.replace(b'"', b'\\"')
|
| 784 |
+
|
| 785 |
+
return '"' + quoted_value_content.decode("ascii") + '"'
|
| 786 |
+
|
| 787 |
+
@property
|
| 788 |
+
def boundary(self) -> str:
|
| 789 |
+
return self._boundary.decode("ascii")
|
| 790 |
+
|
| 791 |
+
def append(self, obj: Any, headers: Optional[MultiMapping[str]] = None) -> Payload:
|
| 792 |
+
if headers is None:
|
| 793 |
+
headers = CIMultiDict()
|
| 794 |
+
|
| 795 |
+
if isinstance(obj, Payload):
|
| 796 |
+
obj.headers.update(headers)
|
| 797 |
+
return self.append_payload(obj)
|
| 798 |
+
else:
|
| 799 |
+
try:
|
| 800 |
+
payload = get_payload(obj, headers=headers)
|
| 801 |
+
except LookupError:
|
| 802 |
+
raise TypeError("Cannot create payload from %r" % obj)
|
| 803 |
+
else:
|
| 804 |
+
return self.append_payload(payload)
|
| 805 |
+
|
| 806 |
+
def append_payload(self, payload: Payload) -> Payload:
|
| 807 |
+
"""Adds a new body part to multipart writer."""
|
| 808 |
+
# compression
|
| 809 |
+
encoding = payload.headers.get(
|
| 810 |
+
CONTENT_ENCODING,
|
| 811 |
+
"",
|
| 812 |
+
).lower() # type: Optional[str]
|
| 813 |
+
if encoding and encoding not in ("deflate", "gzip", "identity"):
|
| 814 |
+
raise RuntimeError(f"unknown content encoding: {encoding}")
|
| 815 |
+
if encoding == "identity":
|
| 816 |
+
encoding = None
|
| 817 |
+
|
| 818 |
+
# te encoding
|
| 819 |
+
te_encoding = payload.headers.get(
|
| 820 |
+
CONTENT_TRANSFER_ENCODING,
|
| 821 |
+
"",
|
| 822 |
+
).lower() # type: Optional[str]
|
| 823 |
+
if te_encoding not in ("", "base64", "quoted-printable", "binary"):
|
| 824 |
+
raise RuntimeError(
|
| 825 |
+
"unknown content transfer encoding: {}" "".format(te_encoding)
|
| 826 |
+
)
|
| 827 |
+
if te_encoding == "binary":
|
| 828 |
+
te_encoding = None
|
| 829 |
+
|
| 830 |
+
# size
|
| 831 |
+
size = payload.size
|
| 832 |
+
if size is not None and not (encoding or te_encoding):
|
| 833 |
+
payload.headers[CONTENT_LENGTH] = str(size)
|
| 834 |
+
|
| 835 |
+
self._parts.append((payload, encoding, te_encoding)) # type: ignore[arg-type]
|
| 836 |
+
return payload
|
| 837 |
+
|
| 838 |
+
def append_json(
|
| 839 |
+
self, obj: Any, headers: Optional[MultiMapping[str]] = None
|
| 840 |
+
) -> Payload:
|
| 841 |
+
"""Helper to append JSON part."""
|
| 842 |
+
if headers is None:
|
| 843 |
+
headers = CIMultiDict()
|
| 844 |
+
|
| 845 |
+
return self.append_payload(JsonPayload(obj, headers=headers))
|
| 846 |
+
|
| 847 |
+
def append_form(
|
| 848 |
+
self,
|
| 849 |
+
obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]],
|
| 850 |
+
headers: Optional[MultiMapping[str]] = None,
|
| 851 |
+
) -> Payload:
|
| 852 |
+
"""Helper to append form urlencoded part."""
|
| 853 |
+
assert isinstance(obj, (Sequence, Mapping))
|
| 854 |
+
|
| 855 |
+
if headers is None:
|
| 856 |
+
headers = CIMultiDict()
|
| 857 |
+
|
| 858 |
+
if isinstance(obj, Mapping):
|
| 859 |
+
obj = list(obj.items())
|
| 860 |
+
data = urlencode(obj, doseq=True)
|
| 861 |
+
|
| 862 |
+
return self.append_payload(
|
| 863 |
+
StringPayload(
|
| 864 |
+
data, headers=headers, content_type="application/x-www-form-urlencoded"
|
| 865 |
+
)
|
| 866 |
+
)
|
| 867 |
+
|
| 868 |
+
@property
|
| 869 |
+
def size(self) -> Optional[int]:
|
| 870 |
+
"""Size of the payload."""
|
| 871 |
+
total = 0
|
| 872 |
+
for part, encoding, te_encoding in self._parts:
|
| 873 |
+
if encoding or te_encoding or part.size is None:
|
| 874 |
+
return None
|
| 875 |
+
|
| 876 |
+
total += int(
|
| 877 |
+
2
|
| 878 |
+
+ len(self._boundary)
|
| 879 |
+
+ 2
|
| 880 |
+
+ part.size # b'--'+self._boundary+b'\r\n'
|
| 881 |
+
+ len(part._binary_headers)
|
| 882 |
+
+ 2 # b'\r\n'
|
| 883 |
+
)
|
| 884 |
+
|
| 885 |
+
total += 2 + len(self._boundary) + 4 # b'--'+self._boundary+b'--\r\n'
|
| 886 |
+
return total
|
| 887 |
+
|
| 888 |
+
async def write(self, writer: Any, close_boundary: bool = True) -> None:
|
| 889 |
+
"""Write body."""
|
| 890 |
+
for part, encoding, te_encoding in self._parts:
|
| 891 |
+
await writer.write(b"--" + self._boundary + b"\r\n")
|
| 892 |
+
await writer.write(part._binary_headers)
|
| 893 |
+
|
| 894 |
+
if encoding or te_encoding:
|
| 895 |
+
w = MultipartPayloadWriter(writer)
|
| 896 |
+
if encoding:
|
| 897 |
+
w.enable_compression(encoding)
|
| 898 |
+
if te_encoding:
|
| 899 |
+
w.enable_encoding(te_encoding)
|
| 900 |
+
await part.write(w) # type: ignore[arg-type]
|
| 901 |
+
await w.write_eof()
|
| 902 |
+
else:
|
| 903 |
+
await part.write(writer)
|
| 904 |
+
|
| 905 |
+
await writer.write(b"\r\n")
|
| 906 |
+
|
| 907 |
+
if close_boundary:
|
| 908 |
+
await writer.write(b"--" + self._boundary + b"--\r\n")
|
| 909 |
+
|
| 910 |
+
|
| 911 |
+
class MultipartPayloadWriter:
|
| 912 |
+
def __init__(self, writer: Any) -> None:
|
| 913 |
+
self._writer = writer
|
| 914 |
+
self._encoding = None # type: Optional[str]
|
| 915 |
+
self._compress = None # type: Any
|
| 916 |
+
self._encoding_buffer = None # type: Optional[bytearray]
|
| 917 |
+
|
| 918 |
+
def enable_encoding(self, encoding: str) -> None:
|
| 919 |
+
if encoding == "base64":
|
| 920 |
+
self._encoding = encoding
|
| 921 |
+
self._encoding_buffer = bytearray()
|
| 922 |
+
elif encoding == "quoted-printable":
|
| 923 |
+
self._encoding = "quoted-printable"
|
| 924 |
+
|
| 925 |
+
def enable_compression(
|
| 926 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
| 927 |
+
) -> None:
|
| 928 |
+
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS
|
| 929 |
+
self._compress = zlib.compressobj(wbits=zlib_mode, strategy=strategy)
|
| 930 |
+
|
| 931 |
+
async def write_eof(self) -> None:
|
| 932 |
+
if self._compress is not None:
|
| 933 |
+
chunk = self._compress.flush()
|
| 934 |
+
if chunk:
|
| 935 |
+
self._compress = None
|
| 936 |
+
await self.write(chunk)
|
| 937 |
+
|
| 938 |
+
if self._encoding == "base64":
|
| 939 |
+
if self._encoding_buffer:
|
| 940 |
+
await self._writer.write(base64.b64encode(self._encoding_buffer))
|
| 941 |
+
|
| 942 |
+
async def write(self, chunk: bytes) -> None:
|
| 943 |
+
if self._compress is not None:
|
| 944 |
+
if chunk:
|
| 945 |
+
chunk = self._compress.compress(chunk)
|
| 946 |
+
if not chunk:
|
| 947 |
+
return
|
| 948 |
+
|
| 949 |
+
if self._encoding == "base64":
|
| 950 |
+
buf = self._encoding_buffer
|
| 951 |
+
assert buf is not None
|
| 952 |
+
buf.extend(chunk)
|
| 953 |
+
|
| 954 |
+
if buf:
|
| 955 |
+
div, mod = divmod(len(buf), 3)
|
| 956 |
+
enc_chunk, self._encoding_buffer = (buf[: div * 3], buf[div * 3 :])
|
| 957 |
+
if enc_chunk:
|
| 958 |
+
b64chunk = base64.b64encode(enc_chunk)
|
| 959 |
+
await self._writer.write(b64chunk)
|
| 960 |
+
elif self._encoding == "quoted-printable":
|
| 961 |
+
await self._writer.write(binascii.b2a_qp(chunk))
|
| 962 |
+
else:
|
| 963 |
+
await self._writer.write(chunk)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/payload_streamer.py
ADDED
|
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Payload implemenation for coroutines as data provider.
|
| 3 |
+
|
| 4 |
+
As a simple case, you can upload data from file::
|
| 5 |
+
|
| 6 |
+
@aiohttp.streamer
|
| 7 |
+
async def file_sender(writer, file_name=None):
|
| 8 |
+
with open(file_name, 'rb') as f:
|
| 9 |
+
chunk = f.read(2**16)
|
| 10 |
+
while chunk:
|
| 11 |
+
await writer.write(chunk)
|
| 12 |
+
|
| 13 |
+
chunk = f.read(2**16)
|
| 14 |
+
|
| 15 |
+
Then you can use `file_sender` like this:
|
| 16 |
+
|
| 17 |
+
async with session.post('http://httpbin.org/post',
|
| 18 |
+
data=file_sender(file_name='huge_file')) as resp:
|
| 19 |
+
print(await resp.text())
|
| 20 |
+
|
| 21 |
+
..note:: Coroutine must accept `writer` as first argument
|
| 22 |
+
|
| 23 |
+
"""
|
| 24 |
+
|
| 25 |
+
import types
|
| 26 |
+
import warnings
|
| 27 |
+
from typing import Any, Awaitable, Callable, Dict, Tuple
|
| 28 |
+
|
| 29 |
+
from .abc import AbstractStreamWriter
|
| 30 |
+
from .payload import Payload, payload_type
|
| 31 |
+
|
| 32 |
+
__all__ = ("streamer",)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class _stream_wrapper:
|
| 36 |
+
def __init__(
|
| 37 |
+
self,
|
| 38 |
+
coro: Callable[..., Awaitable[None]],
|
| 39 |
+
args: Tuple[Any, ...],
|
| 40 |
+
kwargs: Dict[str, Any],
|
| 41 |
+
) -> None:
|
| 42 |
+
self.coro = types.coroutine(coro)
|
| 43 |
+
self.args = args
|
| 44 |
+
self.kwargs = kwargs
|
| 45 |
+
|
| 46 |
+
async def __call__(self, writer: AbstractStreamWriter) -> None:
|
| 47 |
+
await self.coro(writer, *self.args, **self.kwargs) # type: ignore[operator]
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class streamer:
|
| 51 |
+
def __init__(self, coro: Callable[..., Awaitable[None]]) -> None:
|
| 52 |
+
warnings.warn(
|
| 53 |
+
"@streamer is deprecated, use async generators instead",
|
| 54 |
+
DeprecationWarning,
|
| 55 |
+
stacklevel=2,
|
| 56 |
+
)
|
| 57 |
+
self.coro = coro
|
| 58 |
+
|
| 59 |
+
def __call__(self, *args: Any, **kwargs: Any) -> _stream_wrapper:
|
| 60 |
+
return _stream_wrapper(self.coro, args, kwargs)
|
| 61 |
+
|
| 62 |
+
|
| 63 |
+
@payload_type(_stream_wrapper)
|
| 64 |
+
class StreamWrapperPayload(Payload):
|
| 65 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 66 |
+
await self._value(writer)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
@payload_type(streamer)
|
| 70 |
+
class StreamPayload(StreamWrapperPayload):
|
| 71 |
+
def __init__(self, value: Any, *args: Any, **kwargs: Any) -> None:
|
| 72 |
+
super().__init__(value(), *args, **kwargs)
|
| 73 |
+
|
| 74 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 75 |
+
await self._value(writer)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/pytest_plugin.py
ADDED
|
@@ -0,0 +1,391 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import contextlib
|
| 3 |
+
import warnings
|
| 4 |
+
from collections.abc import Callable
|
| 5 |
+
from typing import Any, Awaitable, Callable, Dict, Generator, Optional, Union
|
| 6 |
+
|
| 7 |
+
import pytest
|
| 8 |
+
|
| 9 |
+
from aiohttp.helpers import PY_37, isasyncgenfunction
|
| 10 |
+
from aiohttp.web import Application
|
| 11 |
+
|
| 12 |
+
from .test_utils import (
|
| 13 |
+
BaseTestServer,
|
| 14 |
+
RawTestServer,
|
| 15 |
+
TestClient,
|
| 16 |
+
TestServer,
|
| 17 |
+
loop_context,
|
| 18 |
+
setup_test_loop,
|
| 19 |
+
teardown_test_loop,
|
| 20 |
+
unused_port as _unused_port,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
try:
|
| 24 |
+
import uvloop
|
| 25 |
+
except ImportError: # pragma: no cover
|
| 26 |
+
uvloop = None
|
| 27 |
+
|
| 28 |
+
try:
|
| 29 |
+
import tokio
|
| 30 |
+
except ImportError: # pragma: no cover
|
| 31 |
+
tokio = None
|
| 32 |
+
|
| 33 |
+
AiohttpClient = Callable[[Union[Application, BaseTestServer]], Awaitable[TestClient]]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
def pytest_addoption(parser): # type: ignore[no-untyped-def]
|
| 37 |
+
parser.addoption(
|
| 38 |
+
"--aiohttp-fast",
|
| 39 |
+
action="store_true",
|
| 40 |
+
default=False,
|
| 41 |
+
help="run tests faster by disabling extra checks",
|
| 42 |
+
)
|
| 43 |
+
parser.addoption(
|
| 44 |
+
"--aiohttp-loop",
|
| 45 |
+
action="store",
|
| 46 |
+
default="pyloop",
|
| 47 |
+
help="run tests with specific loop: pyloop, uvloop, tokio or all",
|
| 48 |
+
)
|
| 49 |
+
parser.addoption(
|
| 50 |
+
"--aiohttp-enable-loop-debug",
|
| 51 |
+
action="store_true",
|
| 52 |
+
default=False,
|
| 53 |
+
help="enable event loop debug mode",
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
|
| 58 |
+
"""Set up pytest fixture.
|
| 59 |
+
|
| 60 |
+
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
|
| 61 |
+
"""
|
| 62 |
+
func = fixturedef.func
|
| 63 |
+
|
| 64 |
+
if isasyncgenfunction(func):
|
| 65 |
+
# async generator fixture
|
| 66 |
+
is_async_gen = True
|
| 67 |
+
elif asyncio.iscoroutinefunction(func):
|
| 68 |
+
# regular async fixture
|
| 69 |
+
is_async_gen = False
|
| 70 |
+
else:
|
| 71 |
+
# not an async fixture, nothing to do
|
| 72 |
+
return
|
| 73 |
+
|
| 74 |
+
strip_request = False
|
| 75 |
+
if "request" not in fixturedef.argnames:
|
| 76 |
+
fixturedef.argnames += ("request",)
|
| 77 |
+
strip_request = True
|
| 78 |
+
|
| 79 |
+
def wrapper(*args, **kwargs): # type: ignore[no-untyped-def]
|
| 80 |
+
request = kwargs["request"]
|
| 81 |
+
if strip_request:
|
| 82 |
+
del kwargs["request"]
|
| 83 |
+
|
| 84 |
+
# if neither the fixture nor the test use the 'loop' fixture,
|
| 85 |
+
# 'getfixturevalue' will fail because the test is not parameterized
|
| 86 |
+
# (this can be removed someday if 'loop' is no longer parameterized)
|
| 87 |
+
if "loop" not in request.fixturenames:
|
| 88 |
+
raise Exception(
|
| 89 |
+
"Asynchronous fixtures must depend on the 'loop' fixture or "
|
| 90 |
+
"be used in tests depending from it."
|
| 91 |
+
)
|
| 92 |
+
|
| 93 |
+
_loop = request.getfixturevalue("loop")
|
| 94 |
+
|
| 95 |
+
if is_async_gen:
|
| 96 |
+
# for async generators, we need to advance the generator once,
|
| 97 |
+
# then advance it again in a finalizer
|
| 98 |
+
gen = func(*args, **kwargs)
|
| 99 |
+
|
| 100 |
+
def finalizer(): # type: ignore[no-untyped-def]
|
| 101 |
+
try:
|
| 102 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 103 |
+
except StopAsyncIteration:
|
| 104 |
+
pass
|
| 105 |
+
|
| 106 |
+
request.addfinalizer(finalizer)
|
| 107 |
+
return _loop.run_until_complete(gen.__anext__())
|
| 108 |
+
else:
|
| 109 |
+
return _loop.run_until_complete(func(*args, **kwargs))
|
| 110 |
+
|
| 111 |
+
fixturedef.func = wrapper
|
| 112 |
+
|
| 113 |
+
|
| 114 |
+
@pytest.fixture
|
| 115 |
+
def fast(request): # type: ignore[no-untyped-def]
|
| 116 |
+
"""--fast config option"""
|
| 117 |
+
return request.config.getoption("--aiohttp-fast")
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@pytest.fixture
|
| 121 |
+
def loop_debug(request): # type: ignore[no-untyped-def]
|
| 122 |
+
"""--enable-loop-debug config option"""
|
| 123 |
+
return request.config.getoption("--aiohttp-enable-loop-debug")
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
@contextlib.contextmanager
|
| 127 |
+
def _runtime_warning_context(): # type: ignore[no-untyped-def]
|
| 128 |
+
"""Context manager which checks for RuntimeWarnings.
|
| 129 |
+
|
| 130 |
+
This exists specifically to
|
| 131 |
+
avoid "coroutine 'X' was never awaited" warnings being missed.
|
| 132 |
+
|
| 133 |
+
If RuntimeWarnings occur in the context a RuntimeError is raised.
|
| 134 |
+
"""
|
| 135 |
+
with warnings.catch_warnings(record=True) as _warnings:
|
| 136 |
+
yield
|
| 137 |
+
rw = [
|
| 138 |
+
"{w.filename}:{w.lineno}:{w.message}".format(w=w)
|
| 139 |
+
for w in _warnings
|
| 140 |
+
if w.category == RuntimeWarning
|
| 141 |
+
]
|
| 142 |
+
if rw:
|
| 143 |
+
raise RuntimeError(
|
| 144 |
+
"{} Runtime Warning{},\n{}".format(
|
| 145 |
+
len(rw), "" if len(rw) == 1 else "s", "\n".join(rw)
|
| 146 |
+
)
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
|
| 150 |
+
@contextlib.contextmanager
|
| 151 |
+
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
|
| 152 |
+
"""Passthrough loop context.
|
| 153 |
+
|
| 154 |
+
Sets up and tears down a loop unless one is passed in via the loop
|
| 155 |
+
argument when it's passed straight through.
|
| 156 |
+
"""
|
| 157 |
+
if loop:
|
| 158 |
+
# loop already exists, pass it straight through
|
| 159 |
+
yield loop
|
| 160 |
+
else:
|
| 161 |
+
# this shadows loop_context's standard behavior
|
| 162 |
+
loop = setup_test_loop()
|
| 163 |
+
yield loop
|
| 164 |
+
teardown_test_loop(loop, fast=fast)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
|
| 168 |
+
"""Fix pytest collecting for coroutines."""
|
| 169 |
+
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
|
| 170 |
+
return list(collector._genfunctions(name, obj))
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
|
| 174 |
+
"""Run coroutines in an event loop instead of a normal function call."""
|
| 175 |
+
fast = pyfuncitem.config.getoption("--aiohttp-fast")
|
| 176 |
+
if asyncio.iscoroutinefunction(pyfuncitem.function):
|
| 177 |
+
existing_loop = pyfuncitem.funcargs.get(
|
| 178 |
+
"proactor_loop"
|
| 179 |
+
) or pyfuncitem.funcargs.get("loop", None)
|
| 180 |
+
with _runtime_warning_context():
|
| 181 |
+
with _passthrough_loop_context(existing_loop, fast=fast) as _loop:
|
| 182 |
+
testargs = {
|
| 183 |
+
arg: pyfuncitem.funcargs[arg]
|
| 184 |
+
for arg in pyfuncitem._fixtureinfo.argnames
|
| 185 |
+
}
|
| 186 |
+
_loop.run_until_complete(pyfuncitem.obj(**testargs))
|
| 187 |
+
|
| 188 |
+
return True
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
def pytest_generate_tests(metafunc): # type: ignore[no-untyped-def]
|
| 192 |
+
if "loop_factory" not in metafunc.fixturenames:
|
| 193 |
+
return
|
| 194 |
+
|
| 195 |
+
loops = metafunc.config.option.aiohttp_loop
|
| 196 |
+
avail_factories = {"pyloop": asyncio.DefaultEventLoopPolicy}
|
| 197 |
+
|
| 198 |
+
if uvloop is not None: # pragma: no cover
|
| 199 |
+
avail_factories["uvloop"] = uvloop.EventLoopPolicy
|
| 200 |
+
|
| 201 |
+
if tokio is not None: # pragma: no cover
|
| 202 |
+
avail_factories["tokio"] = tokio.EventLoopPolicy
|
| 203 |
+
|
| 204 |
+
if loops == "all":
|
| 205 |
+
loops = "pyloop,uvloop?,tokio?"
|
| 206 |
+
|
| 207 |
+
factories = {} # type: ignore[var-annotated]
|
| 208 |
+
for name in loops.split(","):
|
| 209 |
+
required = not name.endswith("?")
|
| 210 |
+
name = name.strip(" ?")
|
| 211 |
+
if name not in avail_factories: # pragma: no cover
|
| 212 |
+
if required:
|
| 213 |
+
raise ValueError(
|
| 214 |
+
"Unknown loop '%s', available loops: %s"
|
| 215 |
+
% (name, list(factories.keys()))
|
| 216 |
+
)
|
| 217 |
+
else:
|
| 218 |
+
continue
|
| 219 |
+
factories[name] = avail_factories[name]
|
| 220 |
+
metafunc.parametrize(
|
| 221 |
+
"loop_factory", list(factories.values()), ids=list(factories.keys())
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
@pytest.fixture
|
| 226 |
+
def loop(loop_factory, fast, loop_debug): # type: ignore[no-untyped-def]
|
| 227 |
+
"""Return an instance of the event loop."""
|
| 228 |
+
policy = loop_factory()
|
| 229 |
+
asyncio.set_event_loop_policy(policy)
|
| 230 |
+
with loop_context(fast=fast) as _loop:
|
| 231 |
+
if loop_debug:
|
| 232 |
+
_loop.set_debug(True) # pragma: no cover
|
| 233 |
+
asyncio.set_event_loop(_loop)
|
| 234 |
+
yield _loop
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
@pytest.fixture
|
| 238 |
+
def proactor_loop(): # type: ignore[no-untyped-def]
|
| 239 |
+
if not PY_37:
|
| 240 |
+
policy = asyncio.get_event_loop_policy()
|
| 241 |
+
policy._loop_factory = asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
| 242 |
+
else:
|
| 243 |
+
policy = asyncio.WindowsProactorEventLoopPolicy() # type: ignore[attr-defined]
|
| 244 |
+
asyncio.set_event_loop_policy(policy)
|
| 245 |
+
|
| 246 |
+
with loop_context(policy.new_event_loop) as _loop:
|
| 247 |
+
asyncio.set_event_loop(_loop)
|
| 248 |
+
yield _loop
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@pytest.fixture
|
| 252 |
+
def unused_port(aiohttp_unused_port): # type: ignore[no-untyped-def] # pragma: no cover
|
| 253 |
+
warnings.warn(
|
| 254 |
+
"Deprecated, use aiohttp_unused_port fixture instead",
|
| 255 |
+
DeprecationWarning,
|
| 256 |
+
stacklevel=2,
|
| 257 |
+
)
|
| 258 |
+
return aiohttp_unused_port
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
@pytest.fixture
|
| 262 |
+
def aiohttp_unused_port(): # type: ignore[no-untyped-def]
|
| 263 |
+
"""Return a port that is unused on the current host."""
|
| 264 |
+
return _unused_port
|
| 265 |
+
|
| 266 |
+
|
| 267 |
+
@pytest.fixture
|
| 268 |
+
def aiohttp_server(loop): # type: ignore[no-untyped-def]
|
| 269 |
+
"""Factory to create a TestServer instance, given an app.
|
| 270 |
+
|
| 271 |
+
aiohttp_server(app, **kwargs)
|
| 272 |
+
"""
|
| 273 |
+
servers = []
|
| 274 |
+
|
| 275 |
+
async def go(app, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
| 276 |
+
server = TestServer(app, port=port)
|
| 277 |
+
await server.start_server(loop=loop, **kwargs)
|
| 278 |
+
servers.append(server)
|
| 279 |
+
return server
|
| 280 |
+
|
| 281 |
+
yield go
|
| 282 |
+
|
| 283 |
+
async def finalize() -> None:
|
| 284 |
+
while servers:
|
| 285 |
+
await servers.pop().close()
|
| 286 |
+
|
| 287 |
+
loop.run_until_complete(finalize())
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
@pytest.fixture
|
| 291 |
+
def test_server(aiohttp_server): # type: ignore[no-untyped-def] # pragma: no cover
|
| 292 |
+
warnings.warn(
|
| 293 |
+
"Deprecated, use aiohttp_server fixture instead",
|
| 294 |
+
DeprecationWarning,
|
| 295 |
+
stacklevel=2,
|
| 296 |
+
)
|
| 297 |
+
return aiohttp_server
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
@pytest.fixture
|
| 301 |
+
def aiohttp_raw_server(loop): # type: ignore[no-untyped-def]
|
| 302 |
+
"""Factory to create a RawTestServer instance, given a web handler.
|
| 303 |
+
|
| 304 |
+
aiohttp_raw_server(handler, **kwargs)
|
| 305 |
+
"""
|
| 306 |
+
servers = []
|
| 307 |
+
|
| 308 |
+
async def go(handler, *, port=None, **kwargs): # type: ignore[no-untyped-def]
|
| 309 |
+
server = RawTestServer(handler, port=port)
|
| 310 |
+
await server.start_server(loop=loop, **kwargs)
|
| 311 |
+
servers.append(server)
|
| 312 |
+
return server
|
| 313 |
+
|
| 314 |
+
yield go
|
| 315 |
+
|
| 316 |
+
async def finalize() -> None:
|
| 317 |
+
while servers:
|
| 318 |
+
await servers.pop().close()
|
| 319 |
+
|
| 320 |
+
loop.run_until_complete(finalize())
|
| 321 |
+
|
| 322 |
+
|
| 323 |
+
@pytest.fixture
|
| 324 |
+
def raw_test_server( # type: ignore[no-untyped-def] # pragma: no cover
|
| 325 |
+
aiohttp_raw_server,
|
| 326 |
+
):
|
| 327 |
+
warnings.warn(
|
| 328 |
+
"Deprecated, use aiohttp_raw_server fixture instead",
|
| 329 |
+
DeprecationWarning,
|
| 330 |
+
stacklevel=2,
|
| 331 |
+
)
|
| 332 |
+
return aiohttp_raw_server
|
| 333 |
+
|
| 334 |
+
|
| 335 |
+
@pytest.fixture
|
| 336 |
+
def aiohttp_client(
|
| 337 |
+
loop: asyncio.AbstractEventLoop,
|
| 338 |
+
) -> Generator[AiohttpClient, None, None]:
|
| 339 |
+
"""Factory to create a TestClient instance.
|
| 340 |
+
|
| 341 |
+
aiohttp_client(app, **kwargs)
|
| 342 |
+
aiohttp_client(server, **kwargs)
|
| 343 |
+
aiohttp_client(raw_server, **kwargs)
|
| 344 |
+
"""
|
| 345 |
+
clients = []
|
| 346 |
+
|
| 347 |
+
async def go(
|
| 348 |
+
__param: Union[Application, BaseTestServer],
|
| 349 |
+
*args: Any,
|
| 350 |
+
server_kwargs: Optional[Dict[str, Any]] = None,
|
| 351 |
+
**kwargs: Any
|
| 352 |
+
) -> TestClient:
|
| 353 |
+
|
| 354 |
+
if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type]
|
| 355 |
+
__param, (Application, BaseTestServer)
|
| 356 |
+
):
|
| 357 |
+
__param = __param(loop, *args, **kwargs)
|
| 358 |
+
kwargs = {}
|
| 359 |
+
else:
|
| 360 |
+
assert not args, "args should be empty"
|
| 361 |
+
|
| 362 |
+
if isinstance(__param, Application):
|
| 363 |
+
server_kwargs = server_kwargs or {}
|
| 364 |
+
server = TestServer(__param, loop=loop, **server_kwargs)
|
| 365 |
+
client = TestClient(server, loop=loop, **kwargs)
|
| 366 |
+
elif isinstance(__param, BaseTestServer):
|
| 367 |
+
client = TestClient(__param, loop=loop, **kwargs)
|
| 368 |
+
else:
|
| 369 |
+
raise ValueError("Unknown argument type: %r" % type(__param))
|
| 370 |
+
|
| 371 |
+
await client.start_server()
|
| 372 |
+
clients.append(client)
|
| 373 |
+
return client
|
| 374 |
+
|
| 375 |
+
yield go
|
| 376 |
+
|
| 377 |
+
async def finalize() -> None:
|
| 378 |
+
while clients:
|
| 379 |
+
await clients.pop().close()
|
| 380 |
+
|
| 381 |
+
loop.run_until_complete(finalize())
|
| 382 |
+
|
| 383 |
+
|
| 384 |
+
@pytest.fixture
|
| 385 |
+
def test_client(aiohttp_client): # type: ignore[no-untyped-def] # pragma: no cover
|
| 386 |
+
warnings.warn(
|
| 387 |
+
"Deprecated, use aiohttp_client fixture instead",
|
| 388 |
+
DeprecationWarning,
|
| 389 |
+
stacklevel=2,
|
| 390 |
+
)
|
| 391 |
+
return aiohttp_client
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/tcp_helpers.py
ADDED
|
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Helper methods to tune a TCP connection"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import socket
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from typing import Optional # noqa
|
| 7 |
+
|
| 8 |
+
__all__ = ("tcp_keepalive", "tcp_nodelay")
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
if hasattr(socket, "SO_KEEPALIVE"):
|
| 12 |
+
|
| 13 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None:
|
| 14 |
+
sock = transport.get_extra_info("socket")
|
| 15 |
+
if sock is not None:
|
| 16 |
+
sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
else:
|
| 20 |
+
|
| 21 |
+
def tcp_keepalive(transport: asyncio.Transport) -> None: # pragma: no cover
|
| 22 |
+
pass
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def tcp_nodelay(transport: asyncio.Transport, value: bool) -> None:
|
| 26 |
+
sock = transport.get_extra_info("socket")
|
| 27 |
+
|
| 28 |
+
if sock is None:
|
| 29 |
+
return
|
| 30 |
+
|
| 31 |
+
if sock.family not in (socket.AF_INET, socket.AF_INET6):
|
| 32 |
+
return
|
| 33 |
+
|
| 34 |
+
value = bool(value)
|
| 35 |
+
|
| 36 |
+
# socket may be closed already, on windows OSError get raised
|
| 37 |
+
with suppress(OSError):
|
| 38 |
+
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, value)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/test_utils.py
ADDED
|
@@ -0,0 +1,698 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Utilities shared by tests."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import contextlib
|
| 5 |
+
import gc
|
| 6 |
+
import inspect
|
| 7 |
+
import ipaddress
|
| 8 |
+
import os
|
| 9 |
+
import socket
|
| 10 |
+
import sys
|
| 11 |
+
import warnings
|
| 12 |
+
from abc import ABC, abstractmethod
|
| 13 |
+
from types import TracebackType
|
| 14 |
+
from typing import (
|
| 15 |
+
TYPE_CHECKING,
|
| 16 |
+
Any,
|
| 17 |
+
Callable,
|
| 18 |
+
Iterator,
|
| 19 |
+
List,
|
| 20 |
+
Optional,
|
| 21 |
+
Type,
|
| 22 |
+
Union,
|
| 23 |
+
cast,
|
| 24 |
+
)
|
| 25 |
+
from unittest import mock
|
| 26 |
+
|
| 27 |
+
from aiosignal import Signal
|
| 28 |
+
from multidict import CIMultiDict, CIMultiDictProxy
|
| 29 |
+
from yarl import URL
|
| 30 |
+
|
| 31 |
+
import aiohttp
|
| 32 |
+
from aiohttp.client import _RequestContextManager, _WSRequestContextManager
|
| 33 |
+
|
| 34 |
+
from . import ClientSession, hdrs
|
| 35 |
+
from .abc import AbstractCookieJar
|
| 36 |
+
from .client_reqrep import ClientResponse
|
| 37 |
+
from .client_ws import ClientWebSocketResponse
|
| 38 |
+
from .helpers import PY_38, sentinel
|
| 39 |
+
from .http import HttpVersion, RawRequestMessage
|
| 40 |
+
from .web import (
|
| 41 |
+
Application,
|
| 42 |
+
AppRunner,
|
| 43 |
+
BaseRunner,
|
| 44 |
+
Request,
|
| 45 |
+
Server,
|
| 46 |
+
ServerRunner,
|
| 47 |
+
SockSite,
|
| 48 |
+
UrlMappingMatchInfo,
|
| 49 |
+
)
|
| 50 |
+
from .web_protocol import _RequestHandler
|
| 51 |
+
|
| 52 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 53 |
+
from ssl import SSLContext
|
| 54 |
+
else:
|
| 55 |
+
SSLContext = None
|
| 56 |
+
|
| 57 |
+
if PY_38:
|
| 58 |
+
from unittest import IsolatedAsyncioTestCase as TestCase
|
| 59 |
+
else:
|
| 60 |
+
from asynctest import TestCase # type: ignore[no-redef]
|
| 61 |
+
|
| 62 |
+
REUSE_ADDRESS = os.name == "posix" and sys.platform != "cygwin"
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def get_unused_port_socket(
|
| 66 |
+
host: str, family: socket.AddressFamily = socket.AF_INET
|
| 67 |
+
) -> socket.socket:
|
| 68 |
+
return get_port_socket(host, 0, family)
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def get_port_socket(
|
| 72 |
+
host: str, port: int, family: socket.AddressFamily
|
| 73 |
+
) -> socket.socket:
|
| 74 |
+
s = socket.socket(family, socket.SOCK_STREAM)
|
| 75 |
+
if REUSE_ADDRESS:
|
| 76 |
+
# Windows has different semantics for SO_REUSEADDR,
|
| 77 |
+
# so don't set it. Ref:
|
| 78 |
+
# https://docs.microsoft.com/en-us/windows/win32/winsock/using-so-reuseaddr-and-so-exclusiveaddruse
|
| 79 |
+
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
| 80 |
+
s.bind((host, port))
|
| 81 |
+
return s
|
| 82 |
+
|
| 83 |
+
|
| 84 |
+
def unused_port() -> int:
|
| 85 |
+
"""Return a port that is unused on the current host."""
|
| 86 |
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
|
| 87 |
+
s.bind(("127.0.0.1", 0))
|
| 88 |
+
return cast(int, s.getsockname()[1])
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class BaseTestServer(ABC):
|
| 92 |
+
__test__ = False
|
| 93 |
+
|
| 94 |
+
def __init__(
|
| 95 |
+
self,
|
| 96 |
+
*,
|
| 97 |
+
scheme: Union[str, object] = sentinel,
|
| 98 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 99 |
+
host: str = "127.0.0.1",
|
| 100 |
+
port: Optional[int] = None,
|
| 101 |
+
skip_url_asserts: bool = False,
|
| 102 |
+
socket_factory: Callable[
|
| 103 |
+
[str, int, socket.AddressFamily], socket.socket
|
| 104 |
+
] = get_port_socket,
|
| 105 |
+
**kwargs: Any,
|
| 106 |
+
) -> None:
|
| 107 |
+
self._loop = loop
|
| 108 |
+
self.runner = None # type: Optional[BaseRunner]
|
| 109 |
+
self._root = None # type: Optional[URL]
|
| 110 |
+
self.host = host
|
| 111 |
+
self.port = port
|
| 112 |
+
self._closed = False
|
| 113 |
+
self.scheme = scheme
|
| 114 |
+
self.skip_url_asserts = skip_url_asserts
|
| 115 |
+
self.socket_factory = socket_factory
|
| 116 |
+
|
| 117 |
+
async def start_server(
|
| 118 |
+
self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any
|
| 119 |
+
) -> None:
|
| 120 |
+
if self.runner:
|
| 121 |
+
return
|
| 122 |
+
self._loop = loop
|
| 123 |
+
self._ssl = kwargs.pop("ssl", None)
|
| 124 |
+
self.runner = await self._make_runner(**kwargs)
|
| 125 |
+
await self.runner.setup()
|
| 126 |
+
if not self.port:
|
| 127 |
+
self.port = 0
|
| 128 |
+
try:
|
| 129 |
+
version = ipaddress.ip_address(self.host).version
|
| 130 |
+
except ValueError:
|
| 131 |
+
version = 4
|
| 132 |
+
family = socket.AF_INET6 if version == 6 else socket.AF_INET
|
| 133 |
+
_sock = self.socket_factory(self.host, self.port, family)
|
| 134 |
+
self.host, self.port = _sock.getsockname()[:2]
|
| 135 |
+
site = SockSite(self.runner, sock=_sock, ssl_context=self._ssl)
|
| 136 |
+
await site.start()
|
| 137 |
+
server = site._server
|
| 138 |
+
assert server is not None
|
| 139 |
+
sockets = server.sockets
|
| 140 |
+
assert sockets is not None
|
| 141 |
+
self.port = sockets[0].getsockname()[1]
|
| 142 |
+
if self.scheme is sentinel:
|
| 143 |
+
if self._ssl:
|
| 144 |
+
scheme = "https"
|
| 145 |
+
else:
|
| 146 |
+
scheme = "http"
|
| 147 |
+
self.scheme = scheme
|
| 148 |
+
self._root = URL(f"{self.scheme}://{self.host}:{self.port}")
|
| 149 |
+
|
| 150 |
+
@abstractmethod # pragma: no cover
|
| 151 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
| 152 |
+
pass
|
| 153 |
+
|
| 154 |
+
def make_url(self, path: str) -> URL:
|
| 155 |
+
assert self._root is not None
|
| 156 |
+
url = URL(path)
|
| 157 |
+
if not self.skip_url_asserts:
|
| 158 |
+
assert not url.is_absolute()
|
| 159 |
+
return self._root.join(url)
|
| 160 |
+
else:
|
| 161 |
+
return URL(str(self._root) + path)
|
| 162 |
+
|
| 163 |
+
@property
|
| 164 |
+
def started(self) -> bool:
|
| 165 |
+
return self.runner is not None
|
| 166 |
+
|
| 167 |
+
@property
|
| 168 |
+
def closed(self) -> bool:
|
| 169 |
+
return self._closed
|
| 170 |
+
|
| 171 |
+
@property
|
| 172 |
+
def handler(self) -> Server:
|
| 173 |
+
# for backward compatibility
|
| 174 |
+
# web.Server instance
|
| 175 |
+
runner = self.runner
|
| 176 |
+
assert runner is not None
|
| 177 |
+
assert runner.server is not None
|
| 178 |
+
return runner.server
|
| 179 |
+
|
| 180 |
+
async def close(self) -> None:
|
| 181 |
+
"""Close all fixtures created by the test client.
|
| 182 |
+
|
| 183 |
+
After that point, the TestClient is no longer usable.
|
| 184 |
+
|
| 185 |
+
This is an idempotent function: running close multiple times
|
| 186 |
+
will not have any additional effects.
|
| 187 |
+
|
| 188 |
+
close is also run when the object is garbage collected, and on
|
| 189 |
+
exit when used as a context manager.
|
| 190 |
+
|
| 191 |
+
"""
|
| 192 |
+
if self.started and not self.closed:
|
| 193 |
+
assert self.runner is not None
|
| 194 |
+
await self.runner.cleanup()
|
| 195 |
+
self._root = None
|
| 196 |
+
self.port = None
|
| 197 |
+
self._closed = True
|
| 198 |
+
|
| 199 |
+
def __enter__(self) -> None:
|
| 200 |
+
raise TypeError("Use async with instead")
|
| 201 |
+
|
| 202 |
+
def __exit__(
|
| 203 |
+
self,
|
| 204 |
+
exc_type: Optional[Type[BaseException]],
|
| 205 |
+
exc_value: Optional[BaseException],
|
| 206 |
+
traceback: Optional[TracebackType],
|
| 207 |
+
) -> None:
|
| 208 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 209 |
+
pass # pragma: no cover
|
| 210 |
+
|
| 211 |
+
async def __aenter__(self) -> "BaseTestServer":
|
| 212 |
+
await self.start_server(loop=self._loop)
|
| 213 |
+
return self
|
| 214 |
+
|
| 215 |
+
async def __aexit__(
|
| 216 |
+
self,
|
| 217 |
+
exc_type: Optional[Type[BaseException]],
|
| 218 |
+
exc_value: Optional[BaseException],
|
| 219 |
+
traceback: Optional[TracebackType],
|
| 220 |
+
) -> None:
|
| 221 |
+
await self.close()
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
class TestServer(BaseTestServer):
|
| 225 |
+
def __init__(
|
| 226 |
+
self,
|
| 227 |
+
app: Application,
|
| 228 |
+
*,
|
| 229 |
+
scheme: Union[str, object] = sentinel,
|
| 230 |
+
host: str = "127.0.0.1",
|
| 231 |
+
port: Optional[int] = None,
|
| 232 |
+
**kwargs: Any,
|
| 233 |
+
):
|
| 234 |
+
self.app = app
|
| 235 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
| 236 |
+
|
| 237 |
+
async def _make_runner(self, **kwargs: Any) -> BaseRunner:
|
| 238 |
+
return AppRunner(self.app, **kwargs)
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
class RawTestServer(BaseTestServer):
|
| 242 |
+
def __init__(
|
| 243 |
+
self,
|
| 244 |
+
handler: _RequestHandler,
|
| 245 |
+
*,
|
| 246 |
+
scheme: Union[str, object] = sentinel,
|
| 247 |
+
host: str = "127.0.0.1",
|
| 248 |
+
port: Optional[int] = None,
|
| 249 |
+
**kwargs: Any,
|
| 250 |
+
) -> None:
|
| 251 |
+
self._handler = handler
|
| 252 |
+
super().__init__(scheme=scheme, host=host, port=port, **kwargs)
|
| 253 |
+
|
| 254 |
+
async def _make_runner(self, debug: bool = True, **kwargs: Any) -> ServerRunner:
|
| 255 |
+
srv = Server(self._handler, loop=self._loop, debug=debug, **kwargs)
|
| 256 |
+
return ServerRunner(srv, debug=debug, **kwargs)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
class TestClient:
|
| 260 |
+
"""
|
| 261 |
+
A test client implementation.
|
| 262 |
+
|
| 263 |
+
To write functional tests for aiohttp based servers.
|
| 264 |
+
|
| 265 |
+
"""
|
| 266 |
+
|
| 267 |
+
__test__ = False
|
| 268 |
+
|
| 269 |
+
def __init__(
|
| 270 |
+
self,
|
| 271 |
+
server: BaseTestServer,
|
| 272 |
+
*,
|
| 273 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 274 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 275 |
+
**kwargs: Any,
|
| 276 |
+
) -> None:
|
| 277 |
+
if not isinstance(server, BaseTestServer):
|
| 278 |
+
raise TypeError(
|
| 279 |
+
"server must be TestServer " "instance, found type: %r" % type(server)
|
| 280 |
+
)
|
| 281 |
+
self._server = server
|
| 282 |
+
self._loop = loop
|
| 283 |
+
if cookie_jar is None:
|
| 284 |
+
cookie_jar = aiohttp.CookieJar(unsafe=True, loop=loop)
|
| 285 |
+
self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs)
|
| 286 |
+
self._closed = False
|
| 287 |
+
self._responses = [] # type: List[ClientResponse]
|
| 288 |
+
self._websockets = [] # type: List[ClientWebSocketResponse]
|
| 289 |
+
|
| 290 |
+
async def start_server(self) -> None:
|
| 291 |
+
await self._server.start_server(loop=self._loop)
|
| 292 |
+
|
| 293 |
+
@property
|
| 294 |
+
def host(self) -> str:
|
| 295 |
+
return self._server.host
|
| 296 |
+
|
| 297 |
+
@property
|
| 298 |
+
def port(self) -> Optional[int]:
|
| 299 |
+
return self._server.port
|
| 300 |
+
|
| 301 |
+
@property
|
| 302 |
+
def server(self) -> BaseTestServer:
|
| 303 |
+
return self._server
|
| 304 |
+
|
| 305 |
+
@property
|
| 306 |
+
def app(self) -> Optional[Application]:
|
| 307 |
+
return cast(Optional[Application], getattr(self._server, "app", None))
|
| 308 |
+
|
| 309 |
+
@property
|
| 310 |
+
def session(self) -> ClientSession:
|
| 311 |
+
"""An internal aiohttp.ClientSession.
|
| 312 |
+
|
| 313 |
+
Unlike the methods on the TestClient, client session requests
|
| 314 |
+
do not automatically include the host in the url queried, and
|
| 315 |
+
will require an absolute path to the resource.
|
| 316 |
+
|
| 317 |
+
"""
|
| 318 |
+
return self._session
|
| 319 |
+
|
| 320 |
+
def make_url(self, path: str) -> URL:
|
| 321 |
+
return self._server.make_url(path)
|
| 322 |
+
|
| 323 |
+
async def _request(self, method: str, path: str, **kwargs: Any) -> ClientResponse:
|
| 324 |
+
resp = await self._session.request(method, self.make_url(path), **kwargs)
|
| 325 |
+
# save it to close later
|
| 326 |
+
self._responses.append(resp)
|
| 327 |
+
return resp
|
| 328 |
+
|
| 329 |
+
def request(self, method: str, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 330 |
+
"""Routes a request to tested http server.
|
| 331 |
+
|
| 332 |
+
The interface is identical to aiohttp.ClientSession.request,
|
| 333 |
+
except the loop kwarg is overridden by the instance used by the
|
| 334 |
+
test server.
|
| 335 |
+
|
| 336 |
+
"""
|
| 337 |
+
return _RequestContextManager(self._request(method, path, **kwargs))
|
| 338 |
+
|
| 339 |
+
def get(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 340 |
+
"""Perform an HTTP GET request."""
|
| 341 |
+
return _RequestContextManager(self._request(hdrs.METH_GET, path, **kwargs))
|
| 342 |
+
|
| 343 |
+
def post(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 344 |
+
"""Perform an HTTP POST request."""
|
| 345 |
+
return _RequestContextManager(self._request(hdrs.METH_POST, path, **kwargs))
|
| 346 |
+
|
| 347 |
+
def options(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 348 |
+
"""Perform an HTTP OPTIONS request."""
|
| 349 |
+
return _RequestContextManager(self._request(hdrs.METH_OPTIONS, path, **kwargs))
|
| 350 |
+
|
| 351 |
+
def head(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 352 |
+
"""Perform an HTTP HEAD request."""
|
| 353 |
+
return _RequestContextManager(self._request(hdrs.METH_HEAD, path, **kwargs))
|
| 354 |
+
|
| 355 |
+
def put(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 356 |
+
"""Perform an HTTP PUT request."""
|
| 357 |
+
return _RequestContextManager(self._request(hdrs.METH_PUT, path, **kwargs))
|
| 358 |
+
|
| 359 |
+
def patch(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 360 |
+
"""Perform an HTTP PATCH request."""
|
| 361 |
+
return _RequestContextManager(self._request(hdrs.METH_PATCH, path, **kwargs))
|
| 362 |
+
|
| 363 |
+
def delete(self, path: str, **kwargs: Any) -> _RequestContextManager:
|
| 364 |
+
"""Perform an HTTP PATCH request."""
|
| 365 |
+
return _RequestContextManager(self._request(hdrs.METH_DELETE, path, **kwargs))
|
| 366 |
+
|
| 367 |
+
def ws_connect(self, path: str, **kwargs: Any) -> _WSRequestContextManager:
|
| 368 |
+
"""Initiate websocket connection.
|
| 369 |
+
|
| 370 |
+
The api corresponds to aiohttp.ClientSession.ws_connect.
|
| 371 |
+
|
| 372 |
+
"""
|
| 373 |
+
return _WSRequestContextManager(self._ws_connect(path, **kwargs))
|
| 374 |
+
|
| 375 |
+
async def _ws_connect(self, path: str, **kwargs: Any) -> ClientWebSocketResponse:
|
| 376 |
+
ws = await self._session.ws_connect(self.make_url(path), **kwargs)
|
| 377 |
+
self._websockets.append(ws)
|
| 378 |
+
return ws
|
| 379 |
+
|
| 380 |
+
async def close(self) -> None:
|
| 381 |
+
"""Close all fixtures created by the test client.
|
| 382 |
+
|
| 383 |
+
After that point, the TestClient is no longer usable.
|
| 384 |
+
|
| 385 |
+
This is an idempotent function: running close multiple times
|
| 386 |
+
will not have any additional effects.
|
| 387 |
+
|
| 388 |
+
close is also run on exit when used as a(n) (asynchronous)
|
| 389 |
+
context manager.
|
| 390 |
+
|
| 391 |
+
"""
|
| 392 |
+
if not self._closed:
|
| 393 |
+
for resp in self._responses:
|
| 394 |
+
resp.close()
|
| 395 |
+
for ws in self._websockets:
|
| 396 |
+
await ws.close()
|
| 397 |
+
await self._session.close()
|
| 398 |
+
await self._server.close()
|
| 399 |
+
self._closed = True
|
| 400 |
+
|
| 401 |
+
def __enter__(self) -> None:
|
| 402 |
+
raise TypeError("Use async with instead")
|
| 403 |
+
|
| 404 |
+
def __exit__(
|
| 405 |
+
self,
|
| 406 |
+
exc_type: Optional[Type[BaseException]],
|
| 407 |
+
exc: Optional[BaseException],
|
| 408 |
+
tb: Optional[TracebackType],
|
| 409 |
+
) -> None:
|
| 410 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 411 |
+
pass # pragma: no cover
|
| 412 |
+
|
| 413 |
+
async def __aenter__(self) -> "TestClient":
|
| 414 |
+
await self.start_server()
|
| 415 |
+
return self
|
| 416 |
+
|
| 417 |
+
async def __aexit__(
|
| 418 |
+
self,
|
| 419 |
+
exc_type: Optional[Type[BaseException]],
|
| 420 |
+
exc: Optional[BaseException],
|
| 421 |
+
tb: Optional[TracebackType],
|
| 422 |
+
) -> None:
|
| 423 |
+
await self.close()
|
| 424 |
+
|
| 425 |
+
|
| 426 |
+
class AioHTTPTestCase(TestCase):
|
| 427 |
+
"""A base class to allow for unittest web applications using aiohttp.
|
| 428 |
+
|
| 429 |
+
Provides the following:
|
| 430 |
+
|
| 431 |
+
* self.client (aiohttp.test_utils.TestClient): an aiohttp test client.
|
| 432 |
+
* self.loop (asyncio.BaseEventLoop): the event loop in which the
|
| 433 |
+
application and server are running.
|
| 434 |
+
* self.app (aiohttp.web.Application): the application returned by
|
| 435 |
+
self.get_application()
|
| 436 |
+
|
| 437 |
+
Note that the TestClient's methods are asynchronous: you have to
|
| 438 |
+
execute function on the test client using asynchronous methods.
|
| 439 |
+
"""
|
| 440 |
+
|
| 441 |
+
async def get_application(self) -> Application:
|
| 442 |
+
"""Get application.
|
| 443 |
+
|
| 444 |
+
This method should be overridden
|
| 445 |
+
to return the aiohttp.web.Application
|
| 446 |
+
object to test.
|
| 447 |
+
"""
|
| 448 |
+
return self.get_app()
|
| 449 |
+
|
| 450 |
+
def get_app(self) -> Application:
|
| 451 |
+
"""Obsolete method used to constructing web application.
|
| 452 |
+
|
| 453 |
+
Use .get_application() coroutine instead.
|
| 454 |
+
"""
|
| 455 |
+
raise RuntimeError("Did you forget to define get_application()?")
|
| 456 |
+
|
| 457 |
+
def setUp(self) -> None:
|
| 458 |
+
try:
|
| 459 |
+
self.loop = asyncio.get_running_loop()
|
| 460 |
+
except (AttributeError, RuntimeError): # AttributeError->py36
|
| 461 |
+
self.loop = asyncio.get_event_loop_policy().get_event_loop()
|
| 462 |
+
|
| 463 |
+
self.loop.run_until_complete(self.setUpAsync())
|
| 464 |
+
|
| 465 |
+
async def setUpAsync(self) -> None:
|
| 466 |
+
self.app = await self.get_application()
|
| 467 |
+
self.server = await self.get_server(self.app)
|
| 468 |
+
self.client = await self.get_client(self.server)
|
| 469 |
+
|
| 470 |
+
await self.client.start_server()
|
| 471 |
+
|
| 472 |
+
def tearDown(self) -> None:
|
| 473 |
+
self.loop.run_until_complete(self.tearDownAsync())
|
| 474 |
+
|
| 475 |
+
async def tearDownAsync(self) -> None:
|
| 476 |
+
await self.client.close()
|
| 477 |
+
|
| 478 |
+
async def get_server(self, app: Application) -> TestServer:
|
| 479 |
+
"""Return a TestServer instance."""
|
| 480 |
+
return TestServer(app, loop=self.loop)
|
| 481 |
+
|
| 482 |
+
async def get_client(self, server: TestServer) -> TestClient:
|
| 483 |
+
"""Return a TestClient instance."""
|
| 484 |
+
return TestClient(server, loop=self.loop)
|
| 485 |
+
|
| 486 |
+
|
| 487 |
+
def unittest_run_loop(func: Any, *args: Any, **kwargs: Any) -> Any:
|
| 488 |
+
"""
|
| 489 |
+
A decorator dedicated to use with asynchronous AioHTTPTestCase test methods.
|
| 490 |
+
|
| 491 |
+
In 3.8+, this does nothing.
|
| 492 |
+
"""
|
| 493 |
+
warnings.warn(
|
| 494 |
+
"Decorator `@unittest_run_loop` is no longer needed in aiohttp 3.8+",
|
| 495 |
+
DeprecationWarning,
|
| 496 |
+
stacklevel=2,
|
| 497 |
+
)
|
| 498 |
+
return func
|
| 499 |
+
|
| 500 |
+
|
| 501 |
+
_LOOP_FACTORY = Callable[[], asyncio.AbstractEventLoop]
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
@contextlib.contextmanager
|
| 505 |
+
def loop_context(
|
| 506 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop, fast: bool = False
|
| 507 |
+
) -> Iterator[asyncio.AbstractEventLoop]:
|
| 508 |
+
"""A contextmanager that creates an event_loop, for test purposes.
|
| 509 |
+
|
| 510 |
+
Handles the creation and cleanup of a test loop.
|
| 511 |
+
"""
|
| 512 |
+
loop = setup_test_loop(loop_factory)
|
| 513 |
+
yield loop
|
| 514 |
+
teardown_test_loop(loop, fast=fast)
|
| 515 |
+
|
| 516 |
+
|
| 517 |
+
def setup_test_loop(
|
| 518 |
+
loop_factory: _LOOP_FACTORY = asyncio.new_event_loop,
|
| 519 |
+
) -> asyncio.AbstractEventLoop:
|
| 520 |
+
"""Create and return an asyncio.BaseEventLoop instance.
|
| 521 |
+
|
| 522 |
+
The caller should also call teardown_test_loop,
|
| 523 |
+
once they are done with the loop.
|
| 524 |
+
"""
|
| 525 |
+
loop = loop_factory()
|
| 526 |
+
try:
|
| 527 |
+
module = loop.__class__.__module__
|
| 528 |
+
skip_watcher = "uvloop" in module
|
| 529 |
+
except AttributeError: # pragma: no cover
|
| 530 |
+
# Just in case
|
| 531 |
+
skip_watcher = True
|
| 532 |
+
asyncio.set_event_loop(loop)
|
| 533 |
+
if sys.platform != "win32" and not skip_watcher:
|
| 534 |
+
policy = asyncio.get_event_loop_policy()
|
| 535 |
+
watcher: asyncio.AbstractChildWatcher
|
| 536 |
+
try: # Python >= 3.8
|
| 537 |
+
# Refs:
|
| 538 |
+
# * https://github.com/pytest-dev/pytest-xdist/issues/620
|
| 539 |
+
# * https://stackoverflow.com/a/58614689/595220
|
| 540 |
+
# * https://bugs.python.org/issue35621
|
| 541 |
+
# * https://github.com/python/cpython/pull/14344
|
| 542 |
+
watcher = asyncio.ThreadedChildWatcher()
|
| 543 |
+
except AttributeError: # Python < 3.8
|
| 544 |
+
watcher = asyncio.SafeChildWatcher()
|
| 545 |
+
watcher.attach_loop(loop)
|
| 546 |
+
with contextlib.suppress(NotImplementedError):
|
| 547 |
+
policy.set_child_watcher(watcher)
|
| 548 |
+
return loop
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
def teardown_test_loop(loop: asyncio.AbstractEventLoop, fast: bool = False) -> None:
|
| 552 |
+
"""Teardown and cleanup an event_loop created by setup_test_loop."""
|
| 553 |
+
closed = loop.is_closed()
|
| 554 |
+
if not closed:
|
| 555 |
+
loop.call_soon(loop.stop)
|
| 556 |
+
loop.run_forever()
|
| 557 |
+
loop.close()
|
| 558 |
+
|
| 559 |
+
if not fast:
|
| 560 |
+
gc.collect()
|
| 561 |
+
|
| 562 |
+
asyncio.set_event_loop(None)
|
| 563 |
+
|
| 564 |
+
|
| 565 |
+
def _create_app_mock() -> mock.MagicMock:
|
| 566 |
+
def get_dict(app: Any, key: str) -> Any:
|
| 567 |
+
return app.__app_dict[key]
|
| 568 |
+
|
| 569 |
+
def set_dict(app: Any, key: str, value: Any) -> None:
|
| 570 |
+
app.__app_dict[key] = value
|
| 571 |
+
|
| 572 |
+
app = mock.MagicMock()
|
| 573 |
+
app.__app_dict = {}
|
| 574 |
+
app.__getitem__ = get_dict
|
| 575 |
+
app.__setitem__ = set_dict
|
| 576 |
+
|
| 577 |
+
app._debug = False
|
| 578 |
+
app.on_response_prepare = Signal(app)
|
| 579 |
+
app.on_response_prepare.freeze()
|
| 580 |
+
return app
|
| 581 |
+
|
| 582 |
+
|
| 583 |
+
def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock:
|
| 584 |
+
transport = mock.Mock()
|
| 585 |
+
|
| 586 |
+
def get_extra_info(key: str) -> Optional[SSLContext]:
|
| 587 |
+
if key == "sslcontext":
|
| 588 |
+
return sslcontext
|
| 589 |
+
else:
|
| 590 |
+
return None
|
| 591 |
+
|
| 592 |
+
transport.get_extra_info.side_effect = get_extra_info
|
| 593 |
+
return transport
|
| 594 |
+
|
| 595 |
+
|
| 596 |
+
def make_mocked_request(
|
| 597 |
+
method: str,
|
| 598 |
+
path: str,
|
| 599 |
+
headers: Any = None,
|
| 600 |
+
*,
|
| 601 |
+
match_info: Any = sentinel,
|
| 602 |
+
version: HttpVersion = HttpVersion(1, 1),
|
| 603 |
+
closing: bool = False,
|
| 604 |
+
app: Any = None,
|
| 605 |
+
writer: Any = sentinel,
|
| 606 |
+
protocol: Any = sentinel,
|
| 607 |
+
transport: Any = sentinel,
|
| 608 |
+
payload: Any = sentinel,
|
| 609 |
+
sslcontext: Optional[SSLContext] = None,
|
| 610 |
+
client_max_size: int = 1024 ** 2,
|
| 611 |
+
loop: Any = ...,
|
| 612 |
+
) -> Request:
|
| 613 |
+
"""Creates mocked web.Request testing purposes.
|
| 614 |
+
|
| 615 |
+
Useful in unit tests, when spinning full web server is overkill or
|
| 616 |
+
specific conditions and errors are hard to trigger.
|
| 617 |
+
"""
|
| 618 |
+
task = mock.Mock()
|
| 619 |
+
if loop is ...:
|
| 620 |
+
loop = mock.Mock()
|
| 621 |
+
loop.create_future.return_value = ()
|
| 622 |
+
|
| 623 |
+
if version < HttpVersion(1, 1):
|
| 624 |
+
closing = True
|
| 625 |
+
|
| 626 |
+
if headers:
|
| 627 |
+
headers = CIMultiDictProxy(CIMultiDict(headers))
|
| 628 |
+
raw_hdrs = tuple(
|
| 629 |
+
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
| 630 |
+
)
|
| 631 |
+
else:
|
| 632 |
+
headers = CIMultiDictProxy(CIMultiDict())
|
| 633 |
+
raw_hdrs = ()
|
| 634 |
+
|
| 635 |
+
chunked = "chunked" in headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
| 636 |
+
|
| 637 |
+
message = RawRequestMessage(
|
| 638 |
+
method,
|
| 639 |
+
path,
|
| 640 |
+
version,
|
| 641 |
+
headers,
|
| 642 |
+
raw_hdrs,
|
| 643 |
+
closing,
|
| 644 |
+
None,
|
| 645 |
+
False,
|
| 646 |
+
chunked,
|
| 647 |
+
URL(path),
|
| 648 |
+
)
|
| 649 |
+
if app is None:
|
| 650 |
+
app = _create_app_mock()
|
| 651 |
+
|
| 652 |
+
if transport is sentinel:
|
| 653 |
+
transport = _create_transport(sslcontext)
|
| 654 |
+
|
| 655 |
+
if protocol is sentinel:
|
| 656 |
+
protocol = mock.Mock()
|
| 657 |
+
protocol.transport = transport
|
| 658 |
+
|
| 659 |
+
if writer is sentinel:
|
| 660 |
+
writer = mock.Mock()
|
| 661 |
+
writer.write_headers = make_mocked_coro(None)
|
| 662 |
+
writer.write = make_mocked_coro(None)
|
| 663 |
+
writer.write_eof = make_mocked_coro(None)
|
| 664 |
+
writer.drain = make_mocked_coro(None)
|
| 665 |
+
writer.transport = transport
|
| 666 |
+
|
| 667 |
+
protocol.transport = transport
|
| 668 |
+
protocol.writer = writer
|
| 669 |
+
|
| 670 |
+
if payload is sentinel:
|
| 671 |
+
payload = mock.Mock()
|
| 672 |
+
|
| 673 |
+
req = Request(
|
| 674 |
+
message, payload, protocol, writer, task, loop, client_max_size=client_max_size
|
| 675 |
+
)
|
| 676 |
+
|
| 677 |
+
match_info = UrlMappingMatchInfo(
|
| 678 |
+
{} if match_info is sentinel else match_info, mock.Mock()
|
| 679 |
+
)
|
| 680 |
+
match_info.add_app(app)
|
| 681 |
+
req._match_info = match_info
|
| 682 |
+
|
| 683 |
+
return req
|
| 684 |
+
|
| 685 |
+
|
| 686 |
+
def make_mocked_coro(
|
| 687 |
+
return_value: Any = sentinel, raise_exception: Any = sentinel
|
| 688 |
+
) -> Any:
|
| 689 |
+
"""Creates a coroutine mock."""
|
| 690 |
+
|
| 691 |
+
async def mock_coro(*args: Any, **kwargs: Any) -> Any:
|
| 692 |
+
if raise_exception is not sentinel:
|
| 693 |
+
raise raise_exception
|
| 694 |
+
if not inspect.isawaitable(return_value):
|
| 695 |
+
return return_value
|
| 696 |
+
await return_value
|
| 697 |
+
|
| 698 |
+
return mock.Mock(wraps=mock_coro)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web.py
ADDED
|
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import socket
|
| 4 |
+
import sys
|
| 5 |
+
from argparse import ArgumentParser
|
| 6 |
+
from collections.abc import Iterable
|
| 7 |
+
from importlib import import_module
|
| 8 |
+
from typing import (
|
| 9 |
+
Any,
|
| 10 |
+
Awaitable,
|
| 11 |
+
Callable,
|
| 12 |
+
Iterable as TypingIterable,
|
| 13 |
+
List,
|
| 14 |
+
Optional,
|
| 15 |
+
Set,
|
| 16 |
+
Type,
|
| 17 |
+
Union,
|
| 18 |
+
cast,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from .abc import AbstractAccessLogger
|
| 22 |
+
from .helpers import all_tasks
|
| 23 |
+
from .log import access_logger
|
| 24 |
+
from .web_app import Application as Application, CleanupError as CleanupError
|
| 25 |
+
from .web_exceptions import (
|
| 26 |
+
HTTPAccepted as HTTPAccepted,
|
| 27 |
+
HTTPBadGateway as HTTPBadGateway,
|
| 28 |
+
HTTPBadRequest as HTTPBadRequest,
|
| 29 |
+
HTTPClientError as HTTPClientError,
|
| 30 |
+
HTTPConflict as HTTPConflict,
|
| 31 |
+
HTTPCreated as HTTPCreated,
|
| 32 |
+
HTTPError as HTTPError,
|
| 33 |
+
HTTPException as HTTPException,
|
| 34 |
+
HTTPExpectationFailed as HTTPExpectationFailed,
|
| 35 |
+
HTTPFailedDependency as HTTPFailedDependency,
|
| 36 |
+
HTTPForbidden as HTTPForbidden,
|
| 37 |
+
HTTPFound as HTTPFound,
|
| 38 |
+
HTTPGatewayTimeout as HTTPGatewayTimeout,
|
| 39 |
+
HTTPGone as HTTPGone,
|
| 40 |
+
HTTPInsufficientStorage as HTTPInsufficientStorage,
|
| 41 |
+
HTTPInternalServerError as HTTPInternalServerError,
|
| 42 |
+
HTTPLengthRequired as HTTPLengthRequired,
|
| 43 |
+
HTTPMethodNotAllowed as HTTPMethodNotAllowed,
|
| 44 |
+
HTTPMisdirectedRequest as HTTPMisdirectedRequest,
|
| 45 |
+
HTTPMovedPermanently as HTTPMovedPermanently,
|
| 46 |
+
HTTPMultipleChoices as HTTPMultipleChoices,
|
| 47 |
+
HTTPNetworkAuthenticationRequired as HTTPNetworkAuthenticationRequired,
|
| 48 |
+
HTTPNoContent as HTTPNoContent,
|
| 49 |
+
HTTPNonAuthoritativeInformation as HTTPNonAuthoritativeInformation,
|
| 50 |
+
HTTPNotAcceptable as HTTPNotAcceptable,
|
| 51 |
+
HTTPNotExtended as HTTPNotExtended,
|
| 52 |
+
HTTPNotFound as HTTPNotFound,
|
| 53 |
+
HTTPNotImplemented as HTTPNotImplemented,
|
| 54 |
+
HTTPNotModified as HTTPNotModified,
|
| 55 |
+
HTTPOk as HTTPOk,
|
| 56 |
+
HTTPPartialContent as HTTPPartialContent,
|
| 57 |
+
HTTPPaymentRequired as HTTPPaymentRequired,
|
| 58 |
+
HTTPPermanentRedirect as HTTPPermanentRedirect,
|
| 59 |
+
HTTPPreconditionFailed as HTTPPreconditionFailed,
|
| 60 |
+
HTTPPreconditionRequired as HTTPPreconditionRequired,
|
| 61 |
+
HTTPProxyAuthenticationRequired as HTTPProxyAuthenticationRequired,
|
| 62 |
+
HTTPRedirection as HTTPRedirection,
|
| 63 |
+
HTTPRequestEntityTooLarge as HTTPRequestEntityTooLarge,
|
| 64 |
+
HTTPRequestHeaderFieldsTooLarge as HTTPRequestHeaderFieldsTooLarge,
|
| 65 |
+
HTTPRequestRangeNotSatisfiable as HTTPRequestRangeNotSatisfiable,
|
| 66 |
+
HTTPRequestTimeout as HTTPRequestTimeout,
|
| 67 |
+
HTTPRequestURITooLong as HTTPRequestURITooLong,
|
| 68 |
+
HTTPResetContent as HTTPResetContent,
|
| 69 |
+
HTTPSeeOther as HTTPSeeOther,
|
| 70 |
+
HTTPServerError as HTTPServerError,
|
| 71 |
+
HTTPServiceUnavailable as HTTPServiceUnavailable,
|
| 72 |
+
HTTPSuccessful as HTTPSuccessful,
|
| 73 |
+
HTTPTemporaryRedirect as HTTPTemporaryRedirect,
|
| 74 |
+
HTTPTooManyRequests as HTTPTooManyRequests,
|
| 75 |
+
HTTPUnauthorized as HTTPUnauthorized,
|
| 76 |
+
HTTPUnavailableForLegalReasons as HTTPUnavailableForLegalReasons,
|
| 77 |
+
HTTPUnprocessableEntity as HTTPUnprocessableEntity,
|
| 78 |
+
HTTPUnsupportedMediaType as HTTPUnsupportedMediaType,
|
| 79 |
+
HTTPUpgradeRequired as HTTPUpgradeRequired,
|
| 80 |
+
HTTPUseProxy as HTTPUseProxy,
|
| 81 |
+
HTTPVariantAlsoNegotiates as HTTPVariantAlsoNegotiates,
|
| 82 |
+
HTTPVersionNotSupported as HTTPVersionNotSupported,
|
| 83 |
+
)
|
| 84 |
+
from .web_fileresponse import FileResponse as FileResponse
|
| 85 |
+
from .web_log import AccessLogger
|
| 86 |
+
from .web_middlewares import (
|
| 87 |
+
middleware as middleware,
|
| 88 |
+
normalize_path_middleware as normalize_path_middleware,
|
| 89 |
+
)
|
| 90 |
+
from .web_protocol import (
|
| 91 |
+
PayloadAccessError as PayloadAccessError,
|
| 92 |
+
RequestHandler as RequestHandler,
|
| 93 |
+
RequestPayloadError as RequestPayloadError,
|
| 94 |
+
)
|
| 95 |
+
from .web_request import (
|
| 96 |
+
BaseRequest as BaseRequest,
|
| 97 |
+
FileField as FileField,
|
| 98 |
+
Request as Request,
|
| 99 |
+
)
|
| 100 |
+
from .web_response import (
|
| 101 |
+
ContentCoding as ContentCoding,
|
| 102 |
+
Response as Response,
|
| 103 |
+
StreamResponse as StreamResponse,
|
| 104 |
+
json_response as json_response,
|
| 105 |
+
)
|
| 106 |
+
from .web_routedef import (
|
| 107 |
+
AbstractRouteDef as AbstractRouteDef,
|
| 108 |
+
RouteDef as RouteDef,
|
| 109 |
+
RouteTableDef as RouteTableDef,
|
| 110 |
+
StaticDef as StaticDef,
|
| 111 |
+
delete as delete,
|
| 112 |
+
get as get,
|
| 113 |
+
head as head,
|
| 114 |
+
options as options,
|
| 115 |
+
patch as patch,
|
| 116 |
+
post as post,
|
| 117 |
+
put as put,
|
| 118 |
+
route as route,
|
| 119 |
+
static as static,
|
| 120 |
+
view as view,
|
| 121 |
+
)
|
| 122 |
+
from .web_runner import (
|
| 123 |
+
AppRunner as AppRunner,
|
| 124 |
+
BaseRunner as BaseRunner,
|
| 125 |
+
BaseSite as BaseSite,
|
| 126 |
+
GracefulExit as GracefulExit,
|
| 127 |
+
NamedPipeSite as NamedPipeSite,
|
| 128 |
+
ServerRunner as ServerRunner,
|
| 129 |
+
SockSite as SockSite,
|
| 130 |
+
TCPSite as TCPSite,
|
| 131 |
+
UnixSite as UnixSite,
|
| 132 |
+
)
|
| 133 |
+
from .web_server import Server as Server
|
| 134 |
+
from .web_urldispatcher import (
|
| 135 |
+
AbstractResource as AbstractResource,
|
| 136 |
+
AbstractRoute as AbstractRoute,
|
| 137 |
+
DynamicResource as DynamicResource,
|
| 138 |
+
PlainResource as PlainResource,
|
| 139 |
+
Resource as Resource,
|
| 140 |
+
ResourceRoute as ResourceRoute,
|
| 141 |
+
StaticResource as StaticResource,
|
| 142 |
+
UrlDispatcher as UrlDispatcher,
|
| 143 |
+
UrlMappingMatchInfo as UrlMappingMatchInfo,
|
| 144 |
+
View as View,
|
| 145 |
+
)
|
| 146 |
+
from .web_ws import (
|
| 147 |
+
WebSocketReady as WebSocketReady,
|
| 148 |
+
WebSocketResponse as WebSocketResponse,
|
| 149 |
+
WSMsgType as WSMsgType,
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
__all__ = (
|
| 153 |
+
# web_app
|
| 154 |
+
"Application",
|
| 155 |
+
"CleanupError",
|
| 156 |
+
# web_exceptions
|
| 157 |
+
"HTTPAccepted",
|
| 158 |
+
"HTTPBadGateway",
|
| 159 |
+
"HTTPBadRequest",
|
| 160 |
+
"HTTPClientError",
|
| 161 |
+
"HTTPConflict",
|
| 162 |
+
"HTTPCreated",
|
| 163 |
+
"HTTPError",
|
| 164 |
+
"HTTPException",
|
| 165 |
+
"HTTPExpectationFailed",
|
| 166 |
+
"HTTPFailedDependency",
|
| 167 |
+
"HTTPForbidden",
|
| 168 |
+
"HTTPFound",
|
| 169 |
+
"HTTPGatewayTimeout",
|
| 170 |
+
"HTTPGone",
|
| 171 |
+
"HTTPInsufficientStorage",
|
| 172 |
+
"HTTPInternalServerError",
|
| 173 |
+
"HTTPLengthRequired",
|
| 174 |
+
"HTTPMethodNotAllowed",
|
| 175 |
+
"HTTPMisdirectedRequest",
|
| 176 |
+
"HTTPMovedPermanently",
|
| 177 |
+
"HTTPMultipleChoices",
|
| 178 |
+
"HTTPNetworkAuthenticationRequired",
|
| 179 |
+
"HTTPNoContent",
|
| 180 |
+
"HTTPNonAuthoritativeInformation",
|
| 181 |
+
"HTTPNotAcceptable",
|
| 182 |
+
"HTTPNotExtended",
|
| 183 |
+
"HTTPNotFound",
|
| 184 |
+
"HTTPNotImplemented",
|
| 185 |
+
"HTTPNotModified",
|
| 186 |
+
"HTTPOk",
|
| 187 |
+
"HTTPPartialContent",
|
| 188 |
+
"HTTPPaymentRequired",
|
| 189 |
+
"HTTPPermanentRedirect",
|
| 190 |
+
"HTTPPreconditionFailed",
|
| 191 |
+
"HTTPPreconditionRequired",
|
| 192 |
+
"HTTPProxyAuthenticationRequired",
|
| 193 |
+
"HTTPRedirection",
|
| 194 |
+
"HTTPRequestEntityTooLarge",
|
| 195 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
| 196 |
+
"HTTPRequestRangeNotSatisfiable",
|
| 197 |
+
"HTTPRequestTimeout",
|
| 198 |
+
"HTTPRequestURITooLong",
|
| 199 |
+
"HTTPResetContent",
|
| 200 |
+
"HTTPSeeOther",
|
| 201 |
+
"HTTPServerError",
|
| 202 |
+
"HTTPServiceUnavailable",
|
| 203 |
+
"HTTPSuccessful",
|
| 204 |
+
"HTTPTemporaryRedirect",
|
| 205 |
+
"HTTPTooManyRequests",
|
| 206 |
+
"HTTPUnauthorized",
|
| 207 |
+
"HTTPUnavailableForLegalReasons",
|
| 208 |
+
"HTTPUnprocessableEntity",
|
| 209 |
+
"HTTPUnsupportedMediaType",
|
| 210 |
+
"HTTPUpgradeRequired",
|
| 211 |
+
"HTTPUseProxy",
|
| 212 |
+
"HTTPVariantAlsoNegotiates",
|
| 213 |
+
"HTTPVersionNotSupported",
|
| 214 |
+
# web_fileresponse
|
| 215 |
+
"FileResponse",
|
| 216 |
+
# web_middlewares
|
| 217 |
+
"middleware",
|
| 218 |
+
"normalize_path_middleware",
|
| 219 |
+
# web_protocol
|
| 220 |
+
"PayloadAccessError",
|
| 221 |
+
"RequestHandler",
|
| 222 |
+
"RequestPayloadError",
|
| 223 |
+
# web_request
|
| 224 |
+
"BaseRequest",
|
| 225 |
+
"FileField",
|
| 226 |
+
"Request",
|
| 227 |
+
# web_response
|
| 228 |
+
"ContentCoding",
|
| 229 |
+
"Response",
|
| 230 |
+
"StreamResponse",
|
| 231 |
+
"json_response",
|
| 232 |
+
# web_routedef
|
| 233 |
+
"AbstractRouteDef",
|
| 234 |
+
"RouteDef",
|
| 235 |
+
"RouteTableDef",
|
| 236 |
+
"StaticDef",
|
| 237 |
+
"delete",
|
| 238 |
+
"get",
|
| 239 |
+
"head",
|
| 240 |
+
"options",
|
| 241 |
+
"patch",
|
| 242 |
+
"post",
|
| 243 |
+
"put",
|
| 244 |
+
"route",
|
| 245 |
+
"static",
|
| 246 |
+
"view",
|
| 247 |
+
# web_runner
|
| 248 |
+
"AppRunner",
|
| 249 |
+
"BaseRunner",
|
| 250 |
+
"BaseSite",
|
| 251 |
+
"GracefulExit",
|
| 252 |
+
"ServerRunner",
|
| 253 |
+
"SockSite",
|
| 254 |
+
"TCPSite",
|
| 255 |
+
"UnixSite",
|
| 256 |
+
"NamedPipeSite",
|
| 257 |
+
# web_server
|
| 258 |
+
"Server",
|
| 259 |
+
# web_urldispatcher
|
| 260 |
+
"AbstractResource",
|
| 261 |
+
"AbstractRoute",
|
| 262 |
+
"DynamicResource",
|
| 263 |
+
"PlainResource",
|
| 264 |
+
"Resource",
|
| 265 |
+
"ResourceRoute",
|
| 266 |
+
"StaticResource",
|
| 267 |
+
"UrlDispatcher",
|
| 268 |
+
"UrlMappingMatchInfo",
|
| 269 |
+
"View",
|
| 270 |
+
# web_ws
|
| 271 |
+
"WebSocketReady",
|
| 272 |
+
"WebSocketResponse",
|
| 273 |
+
"WSMsgType",
|
| 274 |
+
# web
|
| 275 |
+
"run_app",
|
| 276 |
+
)
|
| 277 |
+
|
| 278 |
+
|
| 279 |
+
try:
|
| 280 |
+
from ssl import SSLContext
|
| 281 |
+
except ImportError: # pragma: no cover
|
| 282 |
+
SSLContext = Any # type: ignore[misc,assignment]
|
| 283 |
+
|
| 284 |
+
HostSequence = TypingIterable[str]
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
async def _run_app(
|
| 288 |
+
app: Union[Application, Awaitable[Application]],
|
| 289 |
+
*,
|
| 290 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 291 |
+
port: Optional[int] = None,
|
| 292 |
+
path: Optional[str] = None,
|
| 293 |
+
sock: Optional[socket.socket] = None,
|
| 294 |
+
shutdown_timeout: float = 60.0,
|
| 295 |
+
keepalive_timeout: float = 75.0,
|
| 296 |
+
ssl_context: Optional[SSLContext] = None,
|
| 297 |
+
print: Callable[..., None] = print,
|
| 298 |
+
backlog: int = 128,
|
| 299 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 300 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 301 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 302 |
+
handle_signals: bool = True,
|
| 303 |
+
reuse_address: Optional[bool] = None,
|
| 304 |
+
reuse_port: Optional[bool] = None,
|
| 305 |
+
) -> None:
|
| 306 |
+
# A internal functio to actually do all dirty job for application running
|
| 307 |
+
if asyncio.iscoroutine(app):
|
| 308 |
+
app = await app # type: ignore[misc]
|
| 309 |
+
|
| 310 |
+
app = cast(Application, app)
|
| 311 |
+
|
| 312 |
+
runner = AppRunner(
|
| 313 |
+
app,
|
| 314 |
+
handle_signals=handle_signals,
|
| 315 |
+
access_log_class=access_log_class,
|
| 316 |
+
access_log_format=access_log_format,
|
| 317 |
+
access_log=access_log,
|
| 318 |
+
keepalive_timeout=keepalive_timeout,
|
| 319 |
+
)
|
| 320 |
+
|
| 321 |
+
await runner.setup()
|
| 322 |
+
|
| 323 |
+
sites = [] # type: List[BaseSite]
|
| 324 |
+
|
| 325 |
+
try:
|
| 326 |
+
if host is not None:
|
| 327 |
+
if isinstance(host, (str, bytes, bytearray, memoryview)):
|
| 328 |
+
sites.append(
|
| 329 |
+
TCPSite(
|
| 330 |
+
runner,
|
| 331 |
+
host,
|
| 332 |
+
port,
|
| 333 |
+
shutdown_timeout=shutdown_timeout,
|
| 334 |
+
ssl_context=ssl_context,
|
| 335 |
+
backlog=backlog,
|
| 336 |
+
reuse_address=reuse_address,
|
| 337 |
+
reuse_port=reuse_port,
|
| 338 |
+
)
|
| 339 |
+
)
|
| 340 |
+
else:
|
| 341 |
+
for h in host:
|
| 342 |
+
sites.append(
|
| 343 |
+
TCPSite(
|
| 344 |
+
runner,
|
| 345 |
+
h,
|
| 346 |
+
port,
|
| 347 |
+
shutdown_timeout=shutdown_timeout,
|
| 348 |
+
ssl_context=ssl_context,
|
| 349 |
+
backlog=backlog,
|
| 350 |
+
reuse_address=reuse_address,
|
| 351 |
+
reuse_port=reuse_port,
|
| 352 |
+
)
|
| 353 |
+
)
|
| 354 |
+
elif path is None and sock is None or port is not None:
|
| 355 |
+
sites.append(
|
| 356 |
+
TCPSite(
|
| 357 |
+
runner,
|
| 358 |
+
port=port,
|
| 359 |
+
shutdown_timeout=shutdown_timeout,
|
| 360 |
+
ssl_context=ssl_context,
|
| 361 |
+
backlog=backlog,
|
| 362 |
+
reuse_address=reuse_address,
|
| 363 |
+
reuse_port=reuse_port,
|
| 364 |
+
)
|
| 365 |
+
)
|
| 366 |
+
|
| 367 |
+
if path is not None:
|
| 368 |
+
if isinstance(path, (str, bytes, bytearray, memoryview)):
|
| 369 |
+
sites.append(
|
| 370 |
+
UnixSite(
|
| 371 |
+
runner,
|
| 372 |
+
path,
|
| 373 |
+
shutdown_timeout=shutdown_timeout,
|
| 374 |
+
ssl_context=ssl_context,
|
| 375 |
+
backlog=backlog,
|
| 376 |
+
)
|
| 377 |
+
)
|
| 378 |
+
else:
|
| 379 |
+
for p in path:
|
| 380 |
+
sites.append(
|
| 381 |
+
UnixSite(
|
| 382 |
+
runner,
|
| 383 |
+
p,
|
| 384 |
+
shutdown_timeout=shutdown_timeout,
|
| 385 |
+
ssl_context=ssl_context,
|
| 386 |
+
backlog=backlog,
|
| 387 |
+
)
|
| 388 |
+
)
|
| 389 |
+
|
| 390 |
+
if sock is not None:
|
| 391 |
+
if not isinstance(sock, Iterable):
|
| 392 |
+
sites.append(
|
| 393 |
+
SockSite(
|
| 394 |
+
runner,
|
| 395 |
+
sock,
|
| 396 |
+
shutdown_timeout=shutdown_timeout,
|
| 397 |
+
ssl_context=ssl_context,
|
| 398 |
+
backlog=backlog,
|
| 399 |
+
)
|
| 400 |
+
)
|
| 401 |
+
else:
|
| 402 |
+
for s in sock:
|
| 403 |
+
sites.append(
|
| 404 |
+
SockSite(
|
| 405 |
+
runner,
|
| 406 |
+
s,
|
| 407 |
+
shutdown_timeout=shutdown_timeout,
|
| 408 |
+
ssl_context=ssl_context,
|
| 409 |
+
backlog=backlog,
|
| 410 |
+
)
|
| 411 |
+
)
|
| 412 |
+
for site in sites:
|
| 413 |
+
await site.start()
|
| 414 |
+
|
| 415 |
+
if print: # pragma: no branch
|
| 416 |
+
names = sorted(str(s.name) for s in runner.sites)
|
| 417 |
+
print(
|
| 418 |
+
"======== Running on {} ========\n"
|
| 419 |
+
"(Press CTRL+C to quit)".format(", ".join(names))
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
# sleep forever by 1 hour intervals,
|
| 423 |
+
# on Windows before Python 3.8 wake up every 1 second to handle
|
| 424 |
+
# Ctrl+C smoothly
|
| 425 |
+
if sys.platform == "win32" and sys.version_info < (3, 8):
|
| 426 |
+
delay = 1
|
| 427 |
+
else:
|
| 428 |
+
delay = 3600
|
| 429 |
+
|
| 430 |
+
while True:
|
| 431 |
+
await asyncio.sleep(delay)
|
| 432 |
+
finally:
|
| 433 |
+
await runner.cleanup()
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
def _cancel_tasks(
|
| 437 |
+
to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop
|
| 438 |
+
) -> None:
|
| 439 |
+
if not to_cancel:
|
| 440 |
+
return
|
| 441 |
+
|
| 442 |
+
for task in to_cancel:
|
| 443 |
+
task.cancel()
|
| 444 |
+
|
| 445 |
+
loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
|
| 446 |
+
|
| 447 |
+
for task in to_cancel:
|
| 448 |
+
if task.cancelled():
|
| 449 |
+
continue
|
| 450 |
+
if task.exception() is not None:
|
| 451 |
+
loop.call_exception_handler(
|
| 452 |
+
{
|
| 453 |
+
"message": "unhandled exception during asyncio.run() shutdown",
|
| 454 |
+
"exception": task.exception(),
|
| 455 |
+
"task": task,
|
| 456 |
+
}
|
| 457 |
+
)
|
| 458 |
+
|
| 459 |
+
|
| 460 |
+
def run_app(
|
| 461 |
+
app: Union[Application, Awaitable[Application]],
|
| 462 |
+
*,
|
| 463 |
+
host: Optional[Union[str, HostSequence]] = None,
|
| 464 |
+
port: Optional[int] = None,
|
| 465 |
+
path: Optional[str] = None,
|
| 466 |
+
sock: Optional[socket.socket] = None,
|
| 467 |
+
shutdown_timeout: float = 60.0,
|
| 468 |
+
keepalive_timeout: float = 75.0,
|
| 469 |
+
ssl_context: Optional[SSLContext] = None,
|
| 470 |
+
print: Callable[..., None] = print,
|
| 471 |
+
backlog: int = 128,
|
| 472 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 473 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 474 |
+
access_log: Optional[logging.Logger] = access_logger,
|
| 475 |
+
handle_signals: bool = True,
|
| 476 |
+
reuse_address: Optional[bool] = None,
|
| 477 |
+
reuse_port: Optional[bool] = None,
|
| 478 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 479 |
+
) -> None:
|
| 480 |
+
"""Run an app locally"""
|
| 481 |
+
if loop is None:
|
| 482 |
+
loop = asyncio.new_event_loop()
|
| 483 |
+
|
| 484 |
+
# Configure if and only if in debugging mode and using the default logger
|
| 485 |
+
if loop.get_debug() and access_log and access_log.name == "aiohttp.access":
|
| 486 |
+
if access_log.level == logging.NOTSET:
|
| 487 |
+
access_log.setLevel(logging.DEBUG)
|
| 488 |
+
if not access_log.hasHandlers():
|
| 489 |
+
access_log.addHandler(logging.StreamHandler())
|
| 490 |
+
|
| 491 |
+
main_task = loop.create_task(
|
| 492 |
+
_run_app(
|
| 493 |
+
app,
|
| 494 |
+
host=host,
|
| 495 |
+
port=port,
|
| 496 |
+
path=path,
|
| 497 |
+
sock=sock,
|
| 498 |
+
shutdown_timeout=shutdown_timeout,
|
| 499 |
+
keepalive_timeout=keepalive_timeout,
|
| 500 |
+
ssl_context=ssl_context,
|
| 501 |
+
print=print,
|
| 502 |
+
backlog=backlog,
|
| 503 |
+
access_log_class=access_log_class,
|
| 504 |
+
access_log_format=access_log_format,
|
| 505 |
+
access_log=access_log,
|
| 506 |
+
handle_signals=handle_signals,
|
| 507 |
+
reuse_address=reuse_address,
|
| 508 |
+
reuse_port=reuse_port,
|
| 509 |
+
)
|
| 510 |
+
)
|
| 511 |
+
|
| 512 |
+
try:
|
| 513 |
+
asyncio.set_event_loop(loop)
|
| 514 |
+
loop.run_until_complete(main_task)
|
| 515 |
+
except (GracefulExit, KeyboardInterrupt): # pragma: no cover
|
| 516 |
+
pass
|
| 517 |
+
finally:
|
| 518 |
+
_cancel_tasks({main_task}, loop)
|
| 519 |
+
_cancel_tasks(all_tasks(loop), loop)
|
| 520 |
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
| 521 |
+
loop.close()
|
| 522 |
+
|
| 523 |
+
|
| 524 |
+
def main(argv: List[str]) -> None:
|
| 525 |
+
arg_parser = ArgumentParser(
|
| 526 |
+
description="aiohttp.web Application server", prog="aiohttp.web"
|
| 527 |
+
)
|
| 528 |
+
arg_parser.add_argument(
|
| 529 |
+
"entry_func",
|
| 530 |
+
help=(
|
| 531 |
+
"Callable returning the `aiohttp.web.Application` instance to "
|
| 532 |
+
"run. Should be specified in the 'module:function' syntax."
|
| 533 |
+
),
|
| 534 |
+
metavar="entry-func",
|
| 535 |
+
)
|
| 536 |
+
arg_parser.add_argument(
|
| 537 |
+
"-H",
|
| 538 |
+
"--hostname",
|
| 539 |
+
help="TCP/IP hostname to serve on (default: %(default)r)",
|
| 540 |
+
default="localhost",
|
| 541 |
+
)
|
| 542 |
+
arg_parser.add_argument(
|
| 543 |
+
"-P",
|
| 544 |
+
"--port",
|
| 545 |
+
help="TCP/IP port to serve on (default: %(default)r)",
|
| 546 |
+
type=int,
|
| 547 |
+
default="8080",
|
| 548 |
+
)
|
| 549 |
+
arg_parser.add_argument(
|
| 550 |
+
"-U",
|
| 551 |
+
"--path",
|
| 552 |
+
help="Unix file system path to serve on. Specifying a path will cause "
|
| 553 |
+
"hostname and port arguments to be ignored.",
|
| 554 |
+
)
|
| 555 |
+
args, extra_argv = arg_parser.parse_known_args(argv)
|
| 556 |
+
|
| 557 |
+
# Import logic
|
| 558 |
+
mod_str, _, func_str = args.entry_func.partition(":")
|
| 559 |
+
if not func_str or not mod_str:
|
| 560 |
+
arg_parser.error("'entry-func' not in 'module:function' syntax")
|
| 561 |
+
if mod_str.startswith("."):
|
| 562 |
+
arg_parser.error("relative module names not supported")
|
| 563 |
+
try:
|
| 564 |
+
module = import_module(mod_str)
|
| 565 |
+
except ImportError as ex:
|
| 566 |
+
arg_parser.error(f"unable to import {mod_str}: {ex}")
|
| 567 |
+
try:
|
| 568 |
+
func = getattr(module, func_str)
|
| 569 |
+
except AttributeError:
|
| 570 |
+
arg_parser.error(f"module {mod_str!r} has no attribute {func_str!r}")
|
| 571 |
+
|
| 572 |
+
# Compatibility logic
|
| 573 |
+
if args.path is not None and not hasattr(socket, "AF_UNIX"):
|
| 574 |
+
arg_parser.error(
|
| 575 |
+
"file system paths not supported by your operating" " environment"
|
| 576 |
+
)
|
| 577 |
+
|
| 578 |
+
logging.basicConfig(level=logging.DEBUG)
|
| 579 |
+
|
| 580 |
+
app = func(extra_argv)
|
| 581 |
+
run_app(app, host=args.hostname, port=args.port, path=args.path)
|
| 582 |
+
arg_parser.exit(message="Stopped\n")
|
| 583 |
+
|
| 584 |
+
|
| 585 |
+
if __name__ == "__main__": # pragma: no branch
|
| 586 |
+
main(sys.argv[1:]) # pragma: no cover
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_app.py
ADDED
|
@@ -0,0 +1,557 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import logging
|
| 3 |
+
import warnings
|
| 4 |
+
from functools import partial, update_wrapper
|
| 5 |
+
from typing import (
|
| 6 |
+
TYPE_CHECKING,
|
| 7 |
+
Any,
|
| 8 |
+
AsyncIterator,
|
| 9 |
+
Awaitable,
|
| 10 |
+
Callable,
|
| 11 |
+
Dict,
|
| 12 |
+
Iterable,
|
| 13 |
+
Iterator,
|
| 14 |
+
List,
|
| 15 |
+
Mapping,
|
| 16 |
+
MutableMapping,
|
| 17 |
+
Optional,
|
| 18 |
+
Sequence,
|
| 19 |
+
Tuple,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from aiosignal import Signal
|
| 26 |
+
from frozenlist import FrozenList
|
| 27 |
+
|
| 28 |
+
from . import hdrs
|
| 29 |
+
from .abc import (
|
| 30 |
+
AbstractAccessLogger,
|
| 31 |
+
AbstractMatchInfo,
|
| 32 |
+
AbstractRouter,
|
| 33 |
+
AbstractStreamWriter,
|
| 34 |
+
)
|
| 35 |
+
from .helpers import DEBUG
|
| 36 |
+
from .http_parser import RawRequestMessage
|
| 37 |
+
from .log import web_logger
|
| 38 |
+
from .streams import StreamReader
|
| 39 |
+
from .web_log import AccessLogger
|
| 40 |
+
from .web_middlewares import _fix_request_current_app
|
| 41 |
+
from .web_protocol import RequestHandler
|
| 42 |
+
from .web_request import Request
|
| 43 |
+
from .web_response import StreamResponse
|
| 44 |
+
from .web_routedef import AbstractRouteDef
|
| 45 |
+
from .web_server import Server
|
| 46 |
+
from .web_urldispatcher import (
|
| 47 |
+
AbstractResource,
|
| 48 |
+
AbstractRoute,
|
| 49 |
+
Domain,
|
| 50 |
+
MaskDomain,
|
| 51 |
+
MatchedSubAppResource,
|
| 52 |
+
PrefixedSubAppResource,
|
| 53 |
+
UrlDispatcher,
|
| 54 |
+
)
|
| 55 |
+
|
| 56 |
+
__all__ = ("Application", "CleanupError")
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 60 |
+
from .typedefs import Handler
|
| 61 |
+
|
| 62 |
+
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
|
| 63 |
+
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
|
| 64 |
+
_Middleware = Union[
|
| 65 |
+
Callable[[Request, Handler], Awaitable[StreamResponse]],
|
| 66 |
+
Callable[["Application", Handler], Awaitable[Handler]], # old-style
|
| 67 |
+
]
|
| 68 |
+
_Middlewares = FrozenList[_Middleware]
|
| 69 |
+
_MiddlewaresHandlers = Optional[Sequence[Tuple[_Middleware, bool]]]
|
| 70 |
+
_Subapps = List["Application"]
|
| 71 |
+
else:
|
| 72 |
+
# No type checker mode, skip types
|
| 73 |
+
_AppSignal = Signal
|
| 74 |
+
_RespPrepareSignal = Signal
|
| 75 |
+
_Middleware = Callable
|
| 76 |
+
_Middlewares = FrozenList
|
| 77 |
+
_MiddlewaresHandlers = Optional[Sequence]
|
| 78 |
+
_Subapps = List
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
class Application(MutableMapping[str, Any]):
|
| 82 |
+
ATTRS = frozenset(
|
| 83 |
+
[
|
| 84 |
+
"logger",
|
| 85 |
+
"_debug",
|
| 86 |
+
"_router",
|
| 87 |
+
"_loop",
|
| 88 |
+
"_handler_args",
|
| 89 |
+
"_middlewares",
|
| 90 |
+
"_middlewares_handlers",
|
| 91 |
+
"_run_middlewares",
|
| 92 |
+
"_state",
|
| 93 |
+
"_frozen",
|
| 94 |
+
"_pre_frozen",
|
| 95 |
+
"_subapps",
|
| 96 |
+
"_on_response_prepare",
|
| 97 |
+
"_on_startup",
|
| 98 |
+
"_on_shutdown",
|
| 99 |
+
"_on_cleanup",
|
| 100 |
+
"_client_max_size",
|
| 101 |
+
"_cleanup_ctx",
|
| 102 |
+
]
|
| 103 |
+
)
|
| 104 |
+
|
| 105 |
+
def __init__(
|
| 106 |
+
self,
|
| 107 |
+
*,
|
| 108 |
+
logger: logging.Logger = web_logger,
|
| 109 |
+
router: Optional[UrlDispatcher] = None,
|
| 110 |
+
middlewares: Iterable[_Middleware] = (),
|
| 111 |
+
handler_args: Optional[Mapping[str, Any]] = None,
|
| 112 |
+
client_max_size: int = 1024 ** 2,
|
| 113 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 114 |
+
debug: Any = ..., # mypy doesn't support ellipsis
|
| 115 |
+
) -> None:
|
| 116 |
+
if router is None:
|
| 117 |
+
router = UrlDispatcher()
|
| 118 |
+
else:
|
| 119 |
+
warnings.warn(
|
| 120 |
+
"router argument is deprecated", DeprecationWarning, stacklevel=2
|
| 121 |
+
)
|
| 122 |
+
assert isinstance(router, AbstractRouter), router
|
| 123 |
+
|
| 124 |
+
if loop is not None:
|
| 125 |
+
warnings.warn(
|
| 126 |
+
"loop argument is deprecated", DeprecationWarning, stacklevel=2
|
| 127 |
+
)
|
| 128 |
+
|
| 129 |
+
if debug is not ...:
|
| 130 |
+
warnings.warn(
|
| 131 |
+
"debug argument is deprecated", DeprecationWarning, stacklevel=2
|
| 132 |
+
)
|
| 133 |
+
self._debug = debug
|
| 134 |
+
self._router = router # type: UrlDispatcher
|
| 135 |
+
self._loop = loop
|
| 136 |
+
self._handler_args = handler_args
|
| 137 |
+
self.logger = logger
|
| 138 |
+
|
| 139 |
+
self._middlewares = FrozenList(middlewares) # type: _Middlewares
|
| 140 |
+
|
| 141 |
+
# initialized on freezing
|
| 142 |
+
self._middlewares_handlers = None # type: _MiddlewaresHandlers
|
| 143 |
+
# initialized on freezing
|
| 144 |
+
self._run_middlewares = None # type: Optional[bool]
|
| 145 |
+
|
| 146 |
+
self._state = {} # type: Dict[str, Any]
|
| 147 |
+
self._frozen = False
|
| 148 |
+
self._pre_frozen = False
|
| 149 |
+
self._subapps = [] # type: _Subapps
|
| 150 |
+
|
| 151 |
+
self._on_response_prepare = Signal(self) # type: _RespPrepareSignal
|
| 152 |
+
self._on_startup = Signal(self) # type: _AppSignal
|
| 153 |
+
self._on_shutdown = Signal(self) # type: _AppSignal
|
| 154 |
+
self._on_cleanup = Signal(self) # type: _AppSignal
|
| 155 |
+
self._cleanup_ctx = CleanupContext()
|
| 156 |
+
self._on_startup.append(self._cleanup_ctx._on_startup)
|
| 157 |
+
self._on_cleanup.append(self._cleanup_ctx._on_cleanup)
|
| 158 |
+
self._client_max_size = client_max_size
|
| 159 |
+
|
| 160 |
+
def __init_subclass__(cls: Type["Application"]) -> None:
|
| 161 |
+
warnings.warn(
|
| 162 |
+
"Inheritance class {} from web.Application "
|
| 163 |
+
"is discouraged".format(cls.__name__),
|
| 164 |
+
DeprecationWarning,
|
| 165 |
+
stacklevel=2,
|
| 166 |
+
)
|
| 167 |
+
|
| 168 |
+
if DEBUG: # pragma: no cover
|
| 169 |
+
|
| 170 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
| 171 |
+
if name not in self.ATTRS:
|
| 172 |
+
warnings.warn(
|
| 173 |
+
"Setting custom web.Application.{} attribute "
|
| 174 |
+
"is discouraged".format(name),
|
| 175 |
+
DeprecationWarning,
|
| 176 |
+
stacklevel=2,
|
| 177 |
+
)
|
| 178 |
+
super().__setattr__(name, val)
|
| 179 |
+
|
| 180 |
+
# MutableMapping API
|
| 181 |
+
|
| 182 |
+
def __eq__(self, other: object) -> bool:
|
| 183 |
+
return self is other
|
| 184 |
+
|
| 185 |
+
def __getitem__(self, key: str) -> Any:
|
| 186 |
+
return self._state[key]
|
| 187 |
+
|
| 188 |
+
def _check_frozen(self) -> None:
|
| 189 |
+
if self._frozen:
|
| 190 |
+
warnings.warn(
|
| 191 |
+
"Changing state of started or joined " "application is deprecated",
|
| 192 |
+
DeprecationWarning,
|
| 193 |
+
stacklevel=3,
|
| 194 |
+
)
|
| 195 |
+
|
| 196 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
| 197 |
+
self._check_frozen()
|
| 198 |
+
self._state[key] = value
|
| 199 |
+
|
| 200 |
+
def __delitem__(self, key: str) -> None:
|
| 201 |
+
self._check_frozen()
|
| 202 |
+
del self._state[key]
|
| 203 |
+
|
| 204 |
+
def __len__(self) -> int:
|
| 205 |
+
return len(self._state)
|
| 206 |
+
|
| 207 |
+
def __iter__(self) -> Iterator[str]:
|
| 208 |
+
return iter(self._state)
|
| 209 |
+
|
| 210 |
+
########
|
| 211 |
+
@property
|
| 212 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 213 |
+
# Technically the loop can be None
|
| 214 |
+
# but we mask it by explicit type cast
|
| 215 |
+
# to provide more convinient type annotation
|
| 216 |
+
warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2)
|
| 217 |
+
return cast(asyncio.AbstractEventLoop, self._loop)
|
| 218 |
+
|
| 219 |
+
def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None:
|
| 220 |
+
if loop is None:
|
| 221 |
+
loop = asyncio.get_event_loop()
|
| 222 |
+
if self._loop is not None and self._loop is not loop:
|
| 223 |
+
raise RuntimeError(
|
| 224 |
+
"web.Application instance initialized with different loop"
|
| 225 |
+
)
|
| 226 |
+
|
| 227 |
+
self._loop = loop
|
| 228 |
+
|
| 229 |
+
# set loop debug
|
| 230 |
+
if self._debug is ...:
|
| 231 |
+
self._debug = loop.get_debug()
|
| 232 |
+
|
| 233 |
+
# set loop to sub applications
|
| 234 |
+
for subapp in self._subapps:
|
| 235 |
+
subapp._set_loop(loop)
|
| 236 |
+
|
| 237 |
+
@property
|
| 238 |
+
def pre_frozen(self) -> bool:
|
| 239 |
+
return self._pre_frozen
|
| 240 |
+
|
| 241 |
+
def pre_freeze(self) -> None:
|
| 242 |
+
if self._pre_frozen:
|
| 243 |
+
return
|
| 244 |
+
|
| 245 |
+
self._pre_frozen = True
|
| 246 |
+
self._middlewares.freeze()
|
| 247 |
+
self._router.freeze()
|
| 248 |
+
self._on_response_prepare.freeze()
|
| 249 |
+
self._cleanup_ctx.freeze()
|
| 250 |
+
self._on_startup.freeze()
|
| 251 |
+
self._on_shutdown.freeze()
|
| 252 |
+
self._on_cleanup.freeze()
|
| 253 |
+
self._middlewares_handlers = tuple(self._prepare_middleware())
|
| 254 |
+
|
| 255 |
+
# If current app and any subapp do not have middlewares avoid run all
|
| 256 |
+
# of the code footprint that it implies, which have a middleware
|
| 257 |
+
# hardcoded per app that sets up the current_app attribute. If no
|
| 258 |
+
# middlewares are configured the handler will receive the proper
|
| 259 |
+
# current_app without needing all of this code.
|
| 260 |
+
self._run_middlewares = True if self.middlewares else False
|
| 261 |
+
|
| 262 |
+
for subapp in self._subapps:
|
| 263 |
+
subapp.pre_freeze()
|
| 264 |
+
self._run_middlewares = self._run_middlewares or subapp._run_middlewares
|
| 265 |
+
|
| 266 |
+
@property
|
| 267 |
+
def frozen(self) -> bool:
|
| 268 |
+
return self._frozen
|
| 269 |
+
|
| 270 |
+
def freeze(self) -> None:
|
| 271 |
+
if self._frozen:
|
| 272 |
+
return
|
| 273 |
+
|
| 274 |
+
self.pre_freeze()
|
| 275 |
+
self._frozen = True
|
| 276 |
+
for subapp in self._subapps:
|
| 277 |
+
subapp.freeze()
|
| 278 |
+
|
| 279 |
+
@property
|
| 280 |
+
def debug(self) -> bool:
|
| 281 |
+
warnings.warn("debug property is deprecated", DeprecationWarning, stacklevel=2)
|
| 282 |
+
return self._debug # type: ignore[no-any-return]
|
| 283 |
+
|
| 284 |
+
def _reg_subapp_signals(self, subapp: "Application") -> None:
|
| 285 |
+
def reg_handler(signame: str) -> None:
|
| 286 |
+
subsig = getattr(subapp, signame)
|
| 287 |
+
|
| 288 |
+
async def handler(app: "Application") -> None:
|
| 289 |
+
await subsig.send(subapp)
|
| 290 |
+
|
| 291 |
+
appsig = getattr(self, signame)
|
| 292 |
+
appsig.append(handler)
|
| 293 |
+
|
| 294 |
+
reg_handler("on_startup")
|
| 295 |
+
reg_handler("on_shutdown")
|
| 296 |
+
reg_handler("on_cleanup")
|
| 297 |
+
|
| 298 |
+
def add_subapp(self, prefix: str, subapp: "Application") -> AbstractResource:
|
| 299 |
+
if not isinstance(prefix, str):
|
| 300 |
+
raise TypeError("Prefix must be str")
|
| 301 |
+
prefix = prefix.rstrip("/")
|
| 302 |
+
if not prefix:
|
| 303 |
+
raise ValueError("Prefix cannot be empty")
|
| 304 |
+
factory = partial(PrefixedSubAppResource, prefix, subapp)
|
| 305 |
+
return self._add_subapp(factory, subapp)
|
| 306 |
+
|
| 307 |
+
def _add_subapp(
|
| 308 |
+
self, resource_factory: Callable[[], AbstractResource], subapp: "Application"
|
| 309 |
+
) -> AbstractResource:
|
| 310 |
+
if self.frozen:
|
| 311 |
+
raise RuntimeError("Cannot add sub application to frozen application")
|
| 312 |
+
if subapp.frozen:
|
| 313 |
+
raise RuntimeError("Cannot add frozen application")
|
| 314 |
+
resource = resource_factory()
|
| 315 |
+
self.router.register_resource(resource)
|
| 316 |
+
self._reg_subapp_signals(subapp)
|
| 317 |
+
self._subapps.append(subapp)
|
| 318 |
+
subapp.pre_freeze()
|
| 319 |
+
if self._loop is not None:
|
| 320 |
+
subapp._set_loop(self._loop)
|
| 321 |
+
return resource
|
| 322 |
+
|
| 323 |
+
def add_domain(self, domain: str, subapp: "Application") -> AbstractResource:
|
| 324 |
+
if not isinstance(domain, str):
|
| 325 |
+
raise TypeError("Domain must be str")
|
| 326 |
+
elif "*" in domain:
|
| 327 |
+
rule = MaskDomain(domain) # type: Domain
|
| 328 |
+
else:
|
| 329 |
+
rule = Domain(domain)
|
| 330 |
+
factory = partial(MatchedSubAppResource, rule, subapp)
|
| 331 |
+
return self._add_subapp(factory, subapp)
|
| 332 |
+
|
| 333 |
+
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
|
| 334 |
+
return self.router.add_routes(routes)
|
| 335 |
+
|
| 336 |
+
@property
|
| 337 |
+
def on_response_prepare(self) -> _RespPrepareSignal:
|
| 338 |
+
return self._on_response_prepare
|
| 339 |
+
|
| 340 |
+
@property
|
| 341 |
+
def on_startup(self) -> _AppSignal:
|
| 342 |
+
return self._on_startup
|
| 343 |
+
|
| 344 |
+
@property
|
| 345 |
+
def on_shutdown(self) -> _AppSignal:
|
| 346 |
+
return self._on_shutdown
|
| 347 |
+
|
| 348 |
+
@property
|
| 349 |
+
def on_cleanup(self) -> _AppSignal:
|
| 350 |
+
return self._on_cleanup
|
| 351 |
+
|
| 352 |
+
@property
|
| 353 |
+
def cleanup_ctx(self) -> "CleanupContext":
|
| 354 |
+
return self._cleanup_ctx
|
| 355 |
+
|
| 356 |
+
@property
|
| 357 |
+
def router(self) -> UrlDispatcher:
|
| 358 |
+
return self._router
|
| 359 |
+
|
| 360 |
+
@property
|
| 361 |
+
def middlewares(self) -> _Middlewares:
|
| 362 |
+
return self._middlewares
|
| 363 |
+
|
| 364 |
+
def _make_handler(
|
| 365 |
+
self,
|
| 366 |
+
*,
|
| 367 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 368 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 369 |
+
**kwargs: Any,
|
| 370 |
+
) -> Server:
|
| 371 |
+
|
| 372 |
+
if not issubclass(access_log_class, AbstractAccessLogger):
|
| 373 |
+
raise TypeError(
|
| 374 |
+
"access_log_class must be subclass of "
|
| 375 |
+
"aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class)
|
| 376 |
+
)
|
| 377 |
+
|
| 378 |
+
self._set_loop(loop)
|
| 379 |
+
self.freeze()
|
| 380 |
+
|
| 381 |
+
kwargs["debug"] = self._debug
|
| 382 |
+
kwargs["access_log_class"] = access_log_class
|
| 383 |
+
if self._handler_args:
|
| 384 |
+
for k, v in self._handler_args.items():
|
| 385 |
+
kwargs[k] = v
|
| 386 |
+
|
| 387 |
+
return Server(
|
| 388 |
+
self._handle, # type: ignore[arg-type]
|
| 389 |
+
request_factory=self._make_request,
|
| 390 |
+
loop=self._loop,
|
| 391 |
+
**kwargs,
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
def make_handler(
|
| 395 |
+
self,
|
| 396 |
+
*,
|
| 397 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 398 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 399 |
+
**kwargs: Any,
|
| 400 |
+
) -> Server:
|
| 401 |
+
|
| 402 |
+
warnings.warn(
|
| 403 |
+
"Application.make_handler(...) is deprecated, " "use AppRunner API instead",
|
| 404 |
+
DeprecationWarning,
|
| 405 |
+
stacklevel=2,
|
| 406 |
+
)
|
| 407 |
+
|
| 408 |
+
return self._make_handler(
|
| 409 |
+
loop=loop, access_log_class=access_log_class, **kwargs
|
| 410 |
+
)
|
| 411 |
+
|
| 412 |
+
async def startup(self) -> None:
|
| 413 |
+
"""Causes on_startup signal
|
| 414 |
+
|
| 415 |
+
Should be called in the event loop along with the request handler.
|
| 416 |
+
"""
|
| 417 |
+
await self.on_startup.send(self)
|
| 418 |
+
|
| 419 |
+
async def shutdown(self) -> None:
|
| 420 |
+
"""Causes on_shutdown signal
|
| 421 |
+
|
| 422 |
+
Should be called before cleanup()
|
| 423 |
+
"""
|
| 424 |
+
await self.on_shutdown.send(self)
|
| 425 |
+
|
| 426 |
+
async def cleanup(self) -> None:
|
| 427 |
+
"""Causes on_cleanup signal
|
| 428 |
+
|
| 429 |
+
Should be called after shutdown()
|
| 430 |
+
"""
|
| 431 |
+
if self.on_cleanup.frozen:
|
| 432 |
+
await self.on_cleanup.send(self)
|
| 433 |
+
else:
|
| 434 |
+
# If an exception occurs in startup, ensure cleanup contexts are completed.
|
| 435 |
+
await self._cleanup_ctx._on_cleanup(self)
|
| 436 |
+
|
| 437 |
+
def _make_request(
|
| 438 |
+
self,
|
| 439 |
+
message: RawRequestMessage,
|
| 440 |
+
payload: StreamReader,
|
| 441 |
+
protocol: RequestHandler,
|
| 442 |
+
writer: AbstractStreamWriter,
|
| 443 |
+
task: "asyncio.Task[None]",
|
| 444 |
+
_cls: Type[Request] = Request,
|
| 445 |
+
) -> Request:
|
| 446 |
+
return _cls(
|
| 447 |
+
message,
|
| 448 |
+
payload,
|
| 449 |
+
protocol,
|
| 450 |
+
writer,
|
| 451 |
+
task,
|
| 452 |
+
self._loop,
|
| 453 |
+
client_max_size=self._client_max_size,
|
| 454 |
+
)
|
| 455 |
+
|
| 456 |
+
def _prepare_middleware(self) -> Iterator[Tuple[_Middleware, bool]]:
|
| 457 |
+
for m in reversed(self._middlewares):
|
| 458 |
+
if getattr(m, "__middleware_version__", None) == 1:
|
| 459 |
+
yield m, True
|
| 460 |
+
else:
|
| 461 |
+
warnings.warn(
|
| 462 |
+
'old-style middleware "{!r}" deprecated, ' "see #2252".format(m),
|
| 463 |
+
DeprecationWarning,
|
| 464 |
+
stacklevel=2,
|
| 465 |
+
)
|
| 466 |
+
yield m, False
|
| 467 |
+
|
| 468 |
+
yield _fix_request_current_app(self), True
|
| 469 |
+
|
| 470 |
+
async def _handle(self, request: Request) -> StreamResponse:
|
| 471 |
+
loop = asyncio.get_event_loop()
|
| 472 |
+
debug = loop.get_debug()
|
| 473 |
+
match_info = await self._router.resolve(request)
|
| 474 |
+
if debug: # pragma: no cover
|
| 475 |
+
if not isinstance(match_info, AbstractMatchInfo):
|
| 476 |
+
raise TypeError(
|
| 477 |
+
"match_info should be AbstractMatchInfo "
|
| 478 |
+
"instance, not {!r}".format(match_info)
|
| 479 |
+
)
|
| 480 |
+
match_info.add_app(self)
|
| 481 |
+
|
| 482 |
+
match_info.freeze()
|
| 483 |
+
|
| 484 |
+
resp = None
|
| 485 |
+
request._match_info = match_info
|
| 486 |
+
expect = request.headers.get(hdrs.EXPECT)
|
| 487 |
+
if expect:
|
| 488 |
+
resp = await match_info.expect_handler(request)
|
| 489 |
+
await request.writer.drain()
|
| 490 |
+
|
| 491 |
+
if resp is None:
|
| 492 |
+
handler = match_info.handler
|
| 493 |
+
|
| 494 |
+
if self._run_middlewares:
|
| 495 |
+
for app in match_info.apps[::-1]:
|
| 496 |
+
for m, new_style in app._middlewares_handlers: # type: ignore[union-attr] # noqa
|
| 497 |
+
if new_style:
|
| 498 |
+
handler = update_wrapper(
|
| 499 |
+
partial(m, handler=handler), handler
|
| 500 |
+
)
|
| 501 |
+
else:
|
| 502 |
+
handler = await m(app, handler) # type: ignore[arg-type]
|
| 503 |
+
|
| 504 |
+
resp = await handler(request)
|
| 505 |
+
|
| 506 |
+
return resp
|
| 507 |
+
|
| 508 |
+
def __call__(self) -> "Application":
|
| 509 |
+
"""gunicorn compatibility"""
|
| 510 |
+
return self
|
| 511 |
+
|
| 512 |
+
def __repr__(self) -> str:
|
| 513 |
+
return f"<Application 0x{id(self):x}>"
|
| 514 |
+
|
| 515 |
+
def __bool__(self) -> bool:
|
| 516 |
+
return True
|
| 517 |
+
|
| 518 |
+
|
| 519 |
+
class CleanupError(RuntimeError):
|
| 520 |
+
@property
|
| 521 |
+
def exceptions(self) -> List[BaseException]:
|
| 522 |
+
return cast(List[BaseException], self.args[1])
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 526 |
+
_CleanupContextBase = FrozenList[Callable[[Application], AsyncIterator[None]]]
|
| 527 |
+
else:
|
| 528 |
+
_CleanupContextBase = FrozenList
|
| 529 |
+
|
| 530 |
+
|
| 531 |
+
class CleanupContext(_CleanupContextBase):
|
| 532 |
+
def __init__(self) -> None:
|
| 533 |
+
super().__init__()
|
| 534 |
+
self._exits = [] # type: List[AsyncIterator[None]]
|
| 535 |
+
|
| 536 |
+
async def _on_startup(self, app: Application) -> None:
|
| 537 |
+
for cb in self:
|
| 538 |
+
it = cb(app).__aiter__()
|
| 539 |
+
await it.__anext__()
|
| 540 |
+
self._exits.append(it)
|
| 541 |
+
|
| 542 |
+
async def _on_cleanup(self, app: Application) -> None:
|
| 543 |
+
errors = []
|
| 544 |
+
for it in reversed(self._exits):
|
| 545 |
+
try:
|
| 546 |
+
await it.__anext__()
|
| 547 |
+
except StopAsyncIteration:
|
| 548 |
+
pass
|
| 549 |
+
except Exception as exc:
|
| 550 |
+
errors.append(exc)
|
| 551 |
+
else:
|
| 552 |
+
errors.append(RuntimeError(f"{it!r} has more than one 'yield'"))
|
| 553 |
+
if errors:
|
| 554 |
+
if len(errors) == 1:
|
| 555 |
+
raise errors[0]
|
| 556 |
+
else:
|
| 557 |
+
raise CleanupError("Multiple errors on cleanup stage", errors)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_exceptions.py
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import warnings
|
| 2 |
+
from typing import Any, Dict, Iterable, List, Optional, Set # noqa
|
| 3 |
+
|
| 4 |
+
from yarl import URL
|
| 5 |
+
|
| 6 |
+
from .typedefs import LooseHeaders, StrOrURL
|
| 7 |
+
from .web_response import Response
|
| 8 |
+
|
| 9 |
+
__all__ = (
|
| 10 |
+
"HTTPException",
|
| 11 |
+
"HTTPError",
|
| 12 |
+
"HTTPRedirection",
|
| 13 |
+
"HTTPSuccessful",
|
| 14 |
+
"HTTPOk",
|
| 15 |
+
"HTTPCreated",
|
| 16 |
+
"HTTPAccepted",
|
| 17 |
+
"HTTPNonAuthoritativeInformation",
|
| 18 |
+
"HTTPNoContent",
|
| 19 |
+
"HTTPResetContent",
|
| 20 |
+
"HTTPPartialContent",
|
| 21 |
+
"HTTPMultipleChoices",
|
| 22 |
+
"HTTPMovedPermanently",
|
| 23 |
+
"HTTPFound",
|
| 24 |
+
"HTTPSeeOther",
|
| 25 |
+
"HTTPNotModified",
|
| 26 |
+
"HTTPUseProxy",
|
| 27 |
+
"HTTPTemporaryRedirect",
|
| 28 |
+
"HTTPPermanentRedirect",
|
| 29 |
+
"HTTPClientError",
|
| 30 |
+
"HTTPBadRequest",
|
| 31 |
+
"HTTPUnauthorized",
|
| 32 |
+
"HTTPPaymentRequired",
|
| 33 |
+
"HTTPForbidden",
|
| 34 |
+
"HTTPNotFound",
|
| 35 |
+
"HTTPMethodNotAllowed",
|
| 36 |
+
"HTTPNotAcceptable",
|
| 37 |
+
"HTTPProxyAuthenticationRequired",
|
| 38 |
+
"HTTPRequestTimeout",
|
| 39 |
+
"HTTPConflict",
|
| 40 |
+
"HTTPGone",
|
| 41 |
+
"HTTPLengthRequired",
|
| 42 |
+
"HTTPPreconditionFailed",
|
| 43 |
+
"HTTPRequestEntityTooLarge",
|
| 44 |
+
"HTTPRequestURITooLong",
|
| 45 |
+
"HTTPUnsupportedMediaType",
|
| 46 |
+
"HTTPRequestRangeNotSatisfiable",
|
| 47 |
+
"HTTPExpectationFailed",
|
| 48 |
+
"HTTPMisdirectedRequest",
|
| 49 |
+
"HTTPUnprocessableEntity",
|
| 50 |
+
"HTTPFailedDependency",
|
| 51 |
+
"HTTPUpgradeRequired",
|
| 52 |
+
"HTTPPreconditionRequired",
|
| 53 |
+
"HTTPTooManyRequests",
|
| 54 |
+
"HTTPRequestHeaderFieldsTooLarge",
|
| 55 |
+
"HTTPUnavailableForLegalReasons",
|
| 56 |
+
"HTTPServerError",
|
| 57 |
+
"HTTPInternalServerError",
|
| 58 |
+
"HTTPNotImplemented",
|
| 59 |
+
"HTTPBadGateway",
|
| 60 |
+
"HTTPServiceUnavailable",
|
| 61 |
+
"HTTPGatewayTimeout",
|
| 62 |
+
"HTTPVersionNotSupported",
|
| 63 |
+
"HTTPVariantAlsoNegotiates",
|
| 64 |
+
"HTTPInsufficientStorage",
|
| 65 |
+
"HTTPNotExtended",
|
| 66 |
+
"HTTPNetworkAuthenticationRequired",
|
| 67 |
+
)
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
############################################################
|
| 71 |
+
# HTTP Exceptions
|
| 72 |
+
############################################################
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class HTTPException(Response, Exception):
|
| 76 |
+
|
| 77 |
+
# You should set in subclasses:
|
| 78 |
+
# status = 200
|
| 79 |
+
|
| 80 |
+
status_code = -1
|
| 81 |
+
empty_body = False
|
| 82 |
+
|
| 83 |
+
__http_exception__ = True
|
| 84 |
+
|
| 85 |
+
def __init__(
|
| 86 |
+
self,
|
| 87 |
+
*,
|
| 88 |
+
headers: Optional[LooseHeaders] = None,
|
| 89 |
+
reason: Optional[str] = None,
|
| 90 |
+
body: Any = None,
|
| 91 |
+
text: Optional[str] = None,
|
| 92 |
+
content_type: Optional[str] = None,
|
| 93 |
+
) -> None:
|
| 94 |
+
if body is not None:
|
| 95 |
+
warnings.warn(
|
| 96 |
+
"body argument is deprecated for http web exceptions",
|
| 97 |
+
DeprecationWarning,
|
| 98 |
+
)
|
| 99 |
+
Response.__init__(
|
| 100 |
+
self,
|
| 101 |
+
status=self.status_code,
|
| 102 |
+
headers=headers,
|
| 103 |
+
reason=reason,
|
| 104 |
+
body=body,
|
| 105 |
+
text=text,
|
| 106 |
+
content_type=content_type,
|
| 107 |
+
)
|
| 108 |
+
Exception.__init__(self, self.reason)
|
| 109 |
+
if self.body is None and not self.empty_body:
|
| 110 |
+
self.text = f"{self.status}: {self.reason}"
|
| 111 |
+
|
| 112 |
+
def __bool__(self) -> bool:
|
| 113 |
+
return True
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class HTTPError(HTTPException):
|
| 117 |
+
"""Base class for exceptions with status codes in the 400s and 500s."""
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
class HTTPRedirection(HTTPException):
|
| 121 |
+
"""Base class for exceptions with status codes in the 300s."""
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class HTTPSuccessful(HTTPException):
|
| 125 |
+
"""Base class for exceptions with status codes in the 200s."""
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class HTTPOk(HTTPSuccessful):
|
| 129 |
+
status_code = 200
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class HTTPCreated(HTTPSuccessful):
|
| 133 |
+
status_code = 201
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
class HTTPAccepted(HTTPSuccessful):
|
| 137 |
+
status_code = 202
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class HTTPNonAuthoritativeInformation(HTTPSuccessful):
|
| 141 |
+
status_code = 203
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
class HTTPNoContent(HTTPSuccessful):
|
| 145 |
+
status_code = 204
|
| 146 |
+
empty_body = True
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
class HTTPResetContent(HTTPSuccessful):
|
| 150 |
+
status_code = 205
|
| 151 |
+
empty_body = True
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
class HTTPPartialContent(HTTPSuccessful):
|
| 155 |
+
status_code = 206
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
############################################################
|
| 159 |
+
# 3xx redirection
|
| 160 |
+
############################################################
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
class _HTTPMove(HTTPRedirection):
|
| 164 |
+
def __init__(
|
| 165 |
+
self,
|
| 166 |
+
location: StrOrURL,
|
| 167 |
+
*,
|
| 168 |
+
headers: Optional[LooseHeaders] = None,
|
| 169 |
+
reason: Optional[str] = None,
|
| 170 |
+
body: Any = None,
|
| 171 |
+
text: Optional[str] = None,
|
| 172 |
+
content_type: Optional[str] = None,
|
| 173 |
+
) -> None:
|
| 174 |
+
if not location:
|
| 175 |
+
raise ValueError("HTTP redirects need a location to redirect to.")
|
| 176 |
+
super().__init__(
|
| 177 |
+
headers=headers,
|
| 178 |
+
reason=reason,
|
| 179 |
+
body=body,
|
| 180 |
+
text=text,
|
| 181 |
+
content_type=content_type,
|
| 182 |
+
)
|
| 183 |
+
self.headers["Location"] = str(URL(location))
|
| 184 |
+
self.location = location
|
| 185 |
+
|
| 186 |
+
|
| 187 |
+
class HTTPMultipleChoices(_HTTPMove):
|
| 188 |
+
status_code = 300
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class HTTPMovedPermanently(_HTTPMove):
|
| 192 |
+
status_code = 301
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
class HTTPFound(_HTTPMove):
|
| 196 |
+
status_code = 302
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
# This one is safe after a POST (the redirected location will be
|
| 200 |
+
# retrieved with GET):
|
| 201 |
+
class HTTPSeeOther(_HTTPMove):
|
| 202 |
+
status_code = 303
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
class HTTPNotModified(HTTPRedirection):
|
| 206 |
+
# FIXME: this should include a date or etag header
|
| 207 |
+
status_code = 304
|
| 208 |
+
empty_body = True
|
| 209 |
+
|
| 210 |
+
|
| 211 |
+
class HTTPUseProxy(_HTTPMove):
|
| 212 |
+
# Not a move, but looks a little like one
|
| 213 |
+
status_code = 305
|
| 214 |
+
|
| 215 |
+
|
| 216 |
+
class HTTPTemporaryRedirect(_HTTPMove):
|
| 217 |
+
status_code = 307
|
| 218 |
+
|
| 219 |
+
|
| 220 |
+
class HTTPPermanentRedirect(_HTTPMove):
|
| 221 |
+
status_code = 308
|
| 222 |
+
|
| 223 |
+
|
| 224 |
+
############################################################
|
| 225 |
+
# 4xx client error
|
| 226 |
+
############################################################
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
class HTTPClientError(HTTPError):
|
| 230 |
+
pass
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
class HTTPBadRequest(HTTPClientError):
|
| 234 |
+
status_code = 400
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class HTTPUnauthorized(HTTPClientError):
|
| 238 |
+
status_code = 401
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
class HTTPPaymentRequired(HTTPClientError):
|
| 242 |
+
status_code = 402
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
class HTTPForbidden(HTTPClientError):
|
| 246 |
+
status_code = 403
|
| 247 |
+
|
| 248 |
+
|
| 249 |
+
class HTTPNotFound(HTTPClientError):
|
| 250 |
+
status_code = 404
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
class HTTPMethodNotAllowed(HTTPClientError):
|
| 254 |
+
status_code = 405
|
| 255 |
+
|
| 256 |
+
def __init__(
|
| 257 |
+
self,
|
| 258 |
+
method: str,
|
| 259 |
+
allowed_methods: Iterable[str],
|
| 260 |
+
*,
|
| 261 |
+
headers: Optional[LooseHeaders] = None,
|
| 262 |
+
reason: Optional[str] = None,
|
| 263 |
+
body: Any = None,
|
| 264 |
+
text: Optional[str] = None,
|
| 265 |
+
content_type: Optional[str] = None,
|
| 266 |
+
) -> None:
|
| 267 |
+
allow = ",".join(sorted(allowed_methods))
|
| 268 |
+
super().__init__(
|
| 269 |
+
headers=headers,
|
| 270 |
+
reason=reason,
|
| 271 |
+
body=body,
|
| 272 |
+
text=text,
|
| 273 |
+
content_type=content_type,
|
| 274 |
+
)
|
| 275 |
+
self.headers["Allow"] = allow
|
| 276 |
+
self.allowed_methods = set(allowed_methods) # type: Set[str]
|
| 277 |
+
self.method = method.upper()
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
class HTTPNotAcceptable(HTTPClientError):
|
| 281 |
+
status_code = 406
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
class HTTPProxyAuthenticationRequired(HTTPClientError):
|
| 285 |
+
status_code = 407
|
| 286 |
+
|
| 287 |
+
|
| 288 |
+
class HTTPRequestTimeout(HTTPClientError):
|
| 289 |
+
status_code = 408
|
| 290 |
+
|
| 291 |
+
|
| 292 |
+
class HTTPConflict(HTTPClientError):
|
| 293 |
+
status_code = 409
|
| 294 |
+
|
| 295 |
+
|
| 296 |
+
class HTTPGone(HTTPClientError):
|
| 297 |
+
status_code = 410
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
class HTTPLengthRequired(HTTPClientError):
|
| 301 |
+
status_code = 411
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
class HTTPPreconditionFailed(HTTPClientError):
|
| 305 |
+
status_code = 412
|
| 306 |
+
|
| 307 |
+
|
| 308 |
+
class HTTPRequestEntityTooLarge(HTTPClientError):
|
| 309 |
+
status_code = 413
|
| 310 |
+
|
| 311 |
+
def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None:
|
| 312 |
+
kwargs.setdefault(
|
| 313 |
+
"text",
|
| 314 |
+
"Maximum request body size {} exceeded, "
|
| 315 |
+
"actual body size {}".format(max_size, actual_size),
|
| 316 |
+
)
|
| 317 |
+
super().__init__(**kwargs)
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
class HTTPRequestURITooLong(HTTPClientError):
|
| 321 |
+
status_code = 414
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
class HTTPUnsupportedMediaType(HTTPClientError):
|
| 325 |
+
status_code = 415
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
|
| 329 |
+
status_code = 416
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
class HTTPExpectationFailed(HTTPClientError):
|
| 333 |
+
status_code = 417
|
| 334 |
+
|
| 335 |
+
|
| 336 |
+
class HTTPMisdirectedRequest(HTTPClientError):
|
| 337 |
+
status_code = 421
|
| 338 |
+
|
| 339 |
+
|
| 340 |
+
class HTTPUnprocessableEntity(HTTPClientError):
|
| 341 |
+
status_code = 422
|
| 342 |
+
|
| 343 |
+
|
| 344 |
+
class HTTPFailedDependency(HTTPClientError):
|
| 345 |
+
status_code = 424
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
class HTTPUpgradeRequired(HTTPClientError):
|
| 349 |
+
status_code = 426
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
class HTTPPreconditionRequired(HTTPClientError):
|
| 353 |
+
status_code = 428
|
| 354 |
+
|
| 355 |
+
|
| 356 |
+
class HTTPTooManyRequests(HTTPClientError):
|
| 357 |
+
status_code = 429
|
| 358 |
+
|
| 359 |
+
|
| 360 |
+
class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
|
| 361 |
+
status_code = 431
|
| 362 |
+
|
| 363 |
+
|
| 364 |
+
class HTTPUnavailableForLegalReasons(HTTPClientError):
|
| 365 |
+
status_code = 451
|
| 366 |
+
|
| 367 |
+
def __init__(
|
| 368 |
+
self,
|
| 369 |
+
link: str,
|
| 370 |
+
*,
|
| 371 |
+
headers: Optional[LooseHeaders] = None,
|
| 372 |
+
reason: Optional[str] = None,
|
| 373 |
+
body: Any = None,
|
| 374 |
+
text: Optional[str] = None,
|
| 375 |
+
content_type: Optional[str] = None,
|
| 376 |
+
) -> None:
|
| 377 |
+
super().__init__(
|
| 378 |
+
headers=headers,
|
| 379 |
+
reason=reason,
|
| 380 |
+
body=body,
|
| 381 |
+
text=text,
|
| 382 |
+
content_type=content_type,
|
| 383 |
+
)
|
| 384 |
+
self.headers["Link"] = '<%s>; rel="blocked-by"' % link
|
| 385 |
+
self.link = link
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
############################################################
|
| 389 |
+
# 5xx Server Error
|
| 390 |
+
############################################################
|
| 391 |
+
# Response status codes beginning with the digit "5" indicate cases in
|
| 392 |
+
# which the server is aware that it has erred or is incapable of
|
| 393 |
+
# performing the request. Except when responding to a HEAD request, the
|
| 394 |
+
# server SHOULD include an entity containing an explanation of the error
|
| 395 |
+
# situation, and whether it is a temporary or permanent condition. User
|
| 396 |
+
# agents SHOULD display any included entity to the user. These response
|
| 397 |
+
# codes are applicable to any request method.
|
| 398 |
+
|
| 399 |
+
|
| 400 |
+
class HTTPServerError(HTTPError):
|
| 401 |
+
pass
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
class HTTPInternalServerError(HTTPServerError):
|
| 405 |
+
status_code = 500
|
| 406 |
+
|
| 407 |
+
|
| 408 |
+
class HTTPNotImplemented(HTTPServerError):
|
| 409 |
+
status_code = 501
|
| 410 |
+
|
| 411 |
+
|
| 412 |
+
class HTTPBadGateway(HTTPServerError):
|
| 413 |
+
status_code = 502
|
| 414 |
+
|
| 415 |
+
|
| 416 |
+
class HTTPServiceUnavailable(HTTPServerError):
|
| 417 |
+
status_code = 503
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
class HTTPGatewayTimeout(HTTPServerError):
|
| 421 |
+
status_code = 504
|
| 422 |
+
|
| 423 |
+
|
| 424 |
+
class HTTPVersionNotSupported(HTTPServerError):
|
| 425 |
+
status_code = 505
|
| 426 |
+
|
| 427 |
+
|
| 428 |
+
class HTTPVariantAlsoNegotiates(HTTPServerError):
|
| 429 |
+
status_code = 506
|
| 430 |
+
|
| 431 |
+
|
| 432 |
+
class HTTPInsufficientStorage(HTTPServerError):
|
| 433 |
+
status_code = 507
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
class HTTPNotExtended(HTTPServerError):
|
| 437 |
+
status_code = 510
|
| 438 |
+
|
| 439 |
+
|
| 440 |
+
class HTTPNetworkAuthenticationRequired(HTTPServerError):
|
| 441 |
+
status_code = 511
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_fileresponse.py
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import mimetypes
|
| 3 |
+
import os
|
| 4 |
+
import pathlib
|
| 5 |
+
import sys
|
| 6 |
+
from typing import ( # noqa
|
| 7 |
+
IO,
|
| 8 |
+
TYPE_CHECKING,
|
| 9 |
+
Any,
|
| 10 |
+
Awaitable,
|
| 11 |
+
Callable,
|
| 12 |
+
Iterator,
|
| 13 |
+
List,
|
| 14 |
+
Optional,
|
| 15 |
+
Tuple,
|
| 16 |
+
Union,
|
| 17 |
+
cast,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
from . import hdrs
|
| 21 |
+
from .abc import AbstractStreamWriter
|
| 22 |
+
from .helpers import ETAG_ANY, ETag
|
| 23 |
+
from .typedefs import Final, LooseHeaders
|
| 24 |
+
from .web_exceptions import (
|
| 25 |
+
HTTPNotModified,
|
| 26 |
+
HTTPPartialContent,
|
| 27 |
+
HTTPPreconditionFailed,
|
| 28 |
+
HTTPRequestRangeNotSatisfiable,
|
| 29 |
+
)
|
| 30 |
+
from .web_response import StreamResponse
|
| 31 |
+
|
| 32 |
+
__all__ = ("FileResponse",)
|
| 33 |
+
|
| 34 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 35 |
+
from .web_request import BaseRequest
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class FileResponse(StreamResponse):
|
| 45 |
+
"""A response object can be used to send files."""
|
| 46 |
+
|
| 47 |
+
def __init__(
|
| 48 |
+
self,
|
| 49 |
+
path: Union[str, pathlib.Path],
|
| 50 |
+
chunk_size: int = 256 * 1024,
|
| 51 |
+
status: int = 200,
|
| 52 |
+
reason: Optional[str] = None,
|
| 53 |
+
headers: Optional[LooseHeaders] = None,
|
| 54 |
+
) -> None:
|
| 55 |
+
super().__init__(status=status, reason=reason, headers=headers)
|
| 56 |
+
|
| 57 |
+
if isinstance(path, str):
|
| 58 |
+
path = pathlib.Path(path)
|
| 59 |
+
|
| 60 |
+
self._path = path
|
| 61 |
+
self._chunk_size = chunk_size
|
| 62 |
+
|
| 63 |
+
async def _sendfile_fallback(
|
| 64 |
+
self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
|
| 65 |
+
) -> AbstractStreamWriter:
|
| 66 |
+
# To keep memory usage low,fobj is transferred in chunks
|
| 67 |
+
# controlled by the constructor's chunk_size argument.
|
| 68 |
+
|
| 69 |
+
chunk_size = self._chunk_size
|
| 70 |
+
loop = asyncio.get_event_loop()
|
| 71 |
+
|
| 72 |
+
await loop.run_in_executor(None, fobj.seek, offset)
|
| 73 |
+
|
| 74 |
+
chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
|
| 75 |
+
while chunk:
|
| 76 |
+
await writer.write(chunk)
|
| 77 |
+
count = count - chunk_size
|
| 78 |
+
if count <= 0:
|
| 79 |
+
break
|
| 80 |
+
chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
|
| 81 |
+
|
| 82 |
+
await writer.drain()
|
| 83 |
+
return writer
|
| 84 |
+
|
| 85 |
+
async def _sendfile(
|
| 86 |
+
self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
|
| 87 |
+
) -> AbstractStreamWriter:
|
| 88 |
+
writer = await super().prepare(request)
|
| 89 |
+
assert writer is not None
|
| 90 |
+
|
| 91 |
+
if NOSENDFILE or sys.version_info < (3, 7) or self.compression:
|
| 92 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
| 93 |
+
|
| 94 |
+
loop = request._loop
|
| 95 |
+
transport = request.transport
|
| 96 |
+
assert transport is not None
|
| 97 |
+
|
| 98 |
+
try:
|
| 99 |
+
await loop.sendfile(transport, fobj, offset, count)
|
| 100 |
+
except NotImplementedError:
|
| 101 |
+
return await self._sendfile_fallback(writer, fobj, offset, count)
|
| 102 |
+
|
| 103 |
+
await super().write_eof()
|
| 104 |
+
return writer
|
| 105 |
+
|
| 106 |
+
@staticmethod
|
| 107 |
+
def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
|
| 108 |
+
if len(etags) == 1 and etags[0].value == ETAG_ANY:
|
| 109 |
+
return True
|
| 110 |
+
return any(etag.value == etag_value for etag in etags if not etag.is_weak)
|
| 111 |
+
|
| 112 |
+
async def _not_modified(
|
| 113 |
+
self, request: "BaseRequest", etag_value: str, last_modified: float
|
| 114 |
+
) -> Optional[AbstractStreamWriter]:
|
| 115 |
+
self.set_status(HTTPNotModified.status_code)
|
| 116 |
+
self._length_check = False
|
| 117 |
+
self.etag = etag_value # type: ignore[assignment]
|
| 118 |
+
self.last_modified = last_modified # type: ignore[assignment]
|
| 119 |
+
# Delete any Content-Length headers provided by user. HTTP 304
|
| 120 |
+
# should always have empty response body
|
| 121 |
+
return await super().prepare(request)
|
| 122 |
+
|
| 123 |
+
async def _precondition_failed(
|
| 124 |
+
self, request: "BaseRequest"
|
| 125 |
+
) -> Optional[AbstractStreamWriter]:
|
| 126 |
+
self.set_status(HTTPPreconditionFailed.status_code)
|
| 127 |
+
self.content_length = 0
|
| 128 |
+
return await super().prepare(request)
|
| 129 |
+
|
| 130 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
| 131 |
+
filepath = self._path
|
| 132 |
+
|
| 133 |
+
gzip = False
|
| 134 |
+
if "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, ""):
|
| 135 |
+
gzip_path = filepath.with_name(filepath.name + ".gz")
|
| 136 |
+
|
| 137 |
+
if gzip_path.is_file():
|
| 138 |
+
filepath = gzip_path
|
| 139 |
+
gzip = True
|
| 140 |
+
|
| 141 |
+
loop = asyncio.get_event_loop()
|
| 142 |
+
st: os.stat_result = await loop.run_in_executor(None, filepath.stat)
|
| 143 |
+
|
| 144 |
+
etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
|
| 145 |
+
last_modified = st.st_mtime
|
| 146 |
+
|
| 147 |
+
# https://tools.ietf.org/html/rfc7232#section-6
|
| 148 |
+
ifmatch = request.if_match
|
| 149 |
+
if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
|
| 150 |
+
return await self._precondition_failed(request)
|
| 151 |
+
|
| 152 |
+
unmodsince = request.if_unmodified_since
|
| 153 |
+
if (
|
| 154 |
+
unmodsince is not None
|
| 155 |
+
and ifmatch is None
|
| 156 |
+
and st.st_mtime > unmodsince.timestamp()
|
| 157 |
+
):
|
| 158 |
+
return await self._precondition_failed(request)
|
| 159 |
+
|
| 160 |
+
ifnonematch = request.if_none_match
|
| 161 |
+
if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
|
| 162 |
+
return await self._not_modified(request, etag_value, last_modified)
|
| 163 |
+
|
| 164 |
+
modsince = request.if_modified_since
|
| 165 |
+
if (
|
| 166 |
+
modsince is not None
|
| 167 |
+
and ifnonematch is None
|
| 168 |
+
and st.st_mtime <= modsince.timestamp()
|
| 169 |
+
):
|
| 170 |
+
return await self._not_modified(request, etag_value, last_modified)
|
| 171 |
+
|
| 172 |
+
if hdrs.CONTENT_TYPE not in self.headers:
|
| 173 |
+
ct, encoding = mimetypes.guess_type(str(filepath))
|
| 174 |
+
if not ct:
|
| 175 |
+
ct = "application/octet-stream"
|
| 176 |
+
should_set_ct = True
|
| 177 |
+
else:
|
| 178 |
+
encoding = "gzip" if gzip else None
|
| 179 |
+
should_set_ct = False
|
| 180 |
+
|
| 181 |
+
status = self._status
|
| 182 |
+
file_size = st.st_size
|
| 183 |
+
count = file_size
|
| 184 |
+
|
| 185 |
+
start = None
|
| 186 |
+
|
| 187 |
+
ifrange = request.if_range
|
| 188 |
+
if ifrange is None or st.st_mtime <= ifrange.timestamp():
|
| 189 |
+
# If-Range header check:
|
| 190 |
+
# condition = cached date >= last modification date
|
| 191 |
+
# return 206 if True else 200.
|
| 192 |
+
# if False:
|
| 193 |
+
# Range header would not be processed, return 200
|
| 194 |
+
# if True but Range header missing
|
| 195 |
+
# return 200
|
| 196 |
+
try:
|
| 197 |
+
rng = request.http_range
|
| 198 |
+
start = rng.start
|
| 199 |
+
end = rng.stop
|
| 200 |
+
except ValueError:
|
| 201 |
+
# https://tools.ietf.org/html/rfc7233:
|
| 202 |
+
# A server generating a 416 (Range Not Satisfiable) response to
|
| 203 |
+
# a byte-range request SHOULD send a Content-Range header field
|
| 204 |
+
# with an unsatisfied-range value.
|
| 205 |
+
# The complete-length in a 416 response indicates the current
|
| 206 |
+
# length of the selected representation.
|
| 207 |
+
#
|
| 208 |
+
# Will do the same below. Many servers ignore this and do not
|
| 209 |
+
# send a Content-Range header with HTTP 416
|
| 210 |
+
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
| 211 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
| 212 |
+
return await super().prepare(request)
|
| 213 |
+
|
| 214 |
+
# If a range request has been made, convert start, end slice
|
| 215 |
+
# notation into file pointer offset and count
|
| 216 |
+
if start is not None or end is not None:
|
| 217 |
+
if start < 0 and end is None: # return tail of file
|
| 218 |
+
start += file_size
|
| 219 |
+
if start < 0:
|
| 220 |
+
# if Range:bytes=-1000 in request header but file size
|
| 221 |
+
# is only 200, there would be trouble without this
|
| 222 |
+
start = 0
|
| 223 |
+
count = file_size - start
|
| 224 |
+
else:
|
| 225 |
+
# rfc7233:If the last-byte-pos value is
|
| 226 |
+
# absent, or if the value is greater than or equal to
|
| 227 |
+
# the current length of the representation data,
|
| 228 |
+
# the byte range is interpreted as the remainder
|
| 229 |
+
# of the representation (i.e., the server replaces the
|
| 230 |
+
# value of last-byte-pos with a value that is one less than
|
| 231 |
+
# the current length of the selected representation).
|
| 232 |
+
count = (
|
| 233 |
+
min(end if end is not None else file_size, file_size) - start
|
| 234 |
+
)
|
| 235 |
+
|
| 236 |
+
if start >= file_size:
|
| 237 |
+
# HTTP 416 should be returned in this case.
|
| 238 |
+
#
|
| 239 |
+
# According to https://tools.ietf.org/html/rfc7233:
|
| 240 |
+
# If a valid byte-range-set includes at least one
|
| 241 |
+
# byte-range-spec with a first-byte-pos that is less than
|
| 242 |
+
# the current length of the representation, or at least one
|
| 243 |
+
# suffix-byte-range-spec with a non-zero suffix-length,
|
| 244 |
+
# then the byte-range-set is satisfiable. Otherwise, the
|
| 245 |
+
# byte-range-set is unsatisfiable.
|
| 246 |
+
self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
|
| 247 |
+
self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
|
| 248 |
+
return await super().prepare(request)
|
| 249 |
+
|
| 250 |
+
status = HTTPPartialContent.status_code
|
| 251 |
+
# Even though you are sending the whole file, you should still
|
| 252 |
+
# return a HTTP 206 for a Range request.
|
| 253 |
+
self.set_status(status)
|
| 254 |
+
|
| 255 |
+
if should_set_ct:
|
| 256 |
+
self.content_type = ct # type: ignore[assignment]
|
| 257 |
+
if encoding:
|
| 258 |
+
self.headers[hdrs.CONTENT_ENCODING] = encoding
|
| 259 |
+
if gzip:
|
| 260 |
+
self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
|
| 261 |
+
|
| 262 |
+
self.etag = etag_value # type: ignore[assignment]
|
| 263 |
+
self.last_modified = st.st_mtime # type: ignore[assignment]
|
| 264 |
+
self.content_length = count
|
| 265 |
+
|
| 266 |
+
self.headers[hdrs.ACCEPT_RANGES] = "bytes"
|
| 267 |
+
|
| 268 |
+
real_start = cast(int, start)
|
| 269 |
+
|
| 270 |
+
if status == HTTPPartialContent.status_code:
|
| 271 |
+
self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
|
| 272 |
+
real_start, real_start + count - 1, file_size
|
| 273 |
+
)
|
| 274 |
+
|
| 275 |
+
# If we are sending 0 bytes calling sendfile() will throw a ValueError
|
| 276 |
+
if count == 0 or request.method == hdrs.METH_HEAD or self.status in [204, 304]:
|
| 277 |
+
return await super().prepare(request)
|
| 278 |
+
|
| 279 |
+
fobj = await loop.run_in_executor(None, filepath.open, "rb")
|
| 280 |
+
if start: # be aware that start could be None or int=0 here.
|
| 281 |
+
offset = start
|
| 282 |
+
else:
|
| 283 |
+
offset = 0
|
| 284 |
+
|
| 285 |
+
try:
|
| 286 |
+
return await self._sendfile(request, fobj, offset, count)
|
| 287 |
+
finally:
|
| 288 |
+
await loop.run_in_executor(None, fobj.close)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_log.py
ADDED
|
@@ -0,0 +1,208 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import datetime
|
| 2 |
+
import functools
|
| 3 |
+
import logging
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
from collections import namedtuple
|
| 7 |
+
from typing import Any, Callable, Dict, Iterable, List, Tuple # noqa
|
| 8 |
+
|
| 9 |
+
from .abc import AbstractAccessLogger
|
| 10 |
+
from .web_request import BaseRequest
|
| 11 |
+
from .web_response import StreamResponse
|
| 12 |
+
|
| 13 |
+
KeyMethod = namedtuple("KeyMethod", "key method")
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class AccessLogger(AbstractAccessLogger):
|
| 17 |
+
"""Helper object to log access.
|
| 18 |
+
|
| 19 |
+
Usage:
|
| 20 |
+
log = logging.getLogger("spam")
|
| 21 |
+
log_format = "%a %{User-Agent}i"
|
| 22 |
+
access_logger = AccessLogger(log, log_format)
|
| 23 |
+
access_logger.log(request, response, time)
|
| 24 |
+
|
| 25 |
+
Format:
|
| 26 |
+
%% The percent sign
|
| 27 |
+
%a Remote IP-address (IP-address of proxy if using reverse proxy)
|
| 28 |
+
%t Time when the request was started to process
|
| 29 |
+
%P The process ID of the child that serviced the request
|
| 30 |
+
%r First line of request
|
| 31 |
+
%s Response status code
|
| 32 |
+
%b Size of response in bytes, including HTTP headers
|
| 33 |
+
%T Time taken to serve the request, in seconds
|
| 34 |
+
%Tf Time taken to serve the request, in seconds with floating fraction
|
| 35 |
+
in .06f format
|
| 36 |
+
%D Time taken to serve the request, in microseconds
|
| 37 |
+
%{FOO}i request.headers['FOO']
|
| 38 |
+
%{FOO}o response.headers['FOO']
|
| 39 |
+
%{FOO}e os.environ['FOO']
|
| 40 |
+
|
| 41 |
+
"""
|
| 42 |
+
|
| 43 |
+
LOG_FORMAT_MAP = {
|
| 44 |
+
"a": "remote_address",
|
| 45 |
+
"t": "request_start_time",
|
| 46 |
+
"P": "process_id",
|
| 47 |
+
"r": "first_request_line",
|
| 48 |
+
"s": "response_status",
|
| 49 |
+
"b": "response_size",
|
| 50 |
+
"T": "request_time",
|
| 51 |
+
"Tf": "request_time_frac",
|
| 52 |
+
"D": "request_time_micro",
|
| 53 |
+
"i": "request_header",
|
| 54 |
+
"o": "response_header",
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
|
| 58 |
+
FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)")
|
| 59 |
+
CLEANUP_RE = re.compile(r"(%[^s])")
|
| 60 |
+
_FORMAT_CACHE = {} # type: Dict[str, Tuple[str, List[KeyMethod]]]
|
| 61 |
+
|
| 62 |
+
def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None:
|
| 63 |
+
"""Initialise the logger.
|
| 64 |
+
|
| 65 |
+
logger is a logger object to be used for logging.
|
| 66 |
+
log_format is a string with apache compatible log format description.
|
| 67 |
+
|
| 68 |
+
"""
|
| 69 |
+
super().__init__(logger, log_format=log_format)
|
| 70 |
+
|
| 71 |
+
_compiled_format = AccessLogger._FORMAT_CACHE.get(log_format)
|
| 72 |
+
if not _compiled_format:
|
| 73 |
+
_compiled_format = self.compile_format(log_format)
|
| 74 |
+
AccessLogger._FORMAT_CACHE[log_format] = _compiled_format
|
| 75 |
+
|
| 76 |
+
self._log_format, self._methods = _compiled_format
|
| 77 |
+
|
| 78 |
+
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]:
|
| 79 |
+
"""Translate log_format into form usable by modulo formatting
|
| 80 |
+
|
| 81 |
+
All known atoms will be replaced with %s
|
| 82 |
+
Also methods for formatting of those atoms will be added to
|
| 83 |
+
_methods in appropriate order
|
| 84 |
+
|
| 85 |
+
For example we have log_format = "%a %t"
|
| 86 |
+
This format will be translated to "%s %s"
|
| 87 |
+
Also contents of _methods will be
|
| 88 |
+
[self._format_a, self._format_t]
|
| 89 |
+
These method will be called and results will be passed
|
| 90 |
+
to translated string format.
|
| 91 |
+
|
| 92 |
+
Each _format_* method receive 'args' which is list of arguments
|
| 93 |
+
given to self.log
|
| 94 |
+
|
| 95 |
+
Exceptions are _format_e, _format_i and _format_o methods which
|
| 96 |
+
also receive key name (by functools.partial)
|
| 97 |
+
|
| 98 |
+
"""
|
| 99 |
+
# list of (key, method) tuples, we don't use an OrderedDict as users
|
| 100 |
+
# can repeat the same key more than once
|
| 101 |
+
methods = list()
|
| 102 |
+
|
| 103 |
+
for atom in self.FORMAT_RE.findall(log_format):
|
| 104 |
+
if atom[1] == "":
|
| 105 |
+
format_key1 = self.LOG_FORMAT_MAP[atom[0]]
|
| 106 |
+
m = getattr(AccessLogger, "_format_%s" % atom[0])
|
| 107 |
+
key_method = KeyMethod(format_key1, m)
|
| 108 |
+
else:
|
| 109 |
+
format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1])
|
| 110 |
+
m = getattr(AccessLogger, "_format_%s" % atom[2])
|
| 111 |
+
key_method = KeyMethod(format_key2, functools.partial(m, atom[1]))
|
| 112 |
+
|
| 113 |
+
methods.append(key_method)
|
| 114 |
+
|
| 115 |
+
log_format = self.FORMAT_RE.sub(r"%s", log_format)
|
| 116 |
+
log_format = self.CLEANUP_RE.sub(r"%\1", log_format)
|
| 117 |
+
return log_format, methods
|
| 118 |
+
|
| 119 |
+
@staticmethod
|
| 120 |
+
def _format_i(
|
| 121 |
+
key: str, request: BaseRequest, response: StreamResponse, time: float
|
| 122 |
+
) -> str:
|
| 123 |
+
if request is None:
|
| 124 |
+
return "(no headers)"
|
| 125 |
+
|
| 126 |
+
# suboptimal, make istr(key) once
|
| 127 |
+
return request.headers.get(key, "-")
|
| 128 |
+
|
| 129 |
+
@staticmethod
|
| 130 |
+
def _format_o(
|
| 131 |
+
key: str, request: BaseRequest, response: StreamResponse, time: float
|
| 132 |
+
) -> str:
|
| 133 |
+
# suboptimal, make istr(key) once
|
| 134 |
+
return response.headers.get(key, "-")
|
| 135 |
+
|
| 136 |
+
@staticmethod
|
| 137 |
+
def _format_a(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 138 |
+
if request is None:
|
| 139 |
+
return "-"
|
| 140 |
+
ip = request.remote
|
| 141 |
+
return ip if ip is not None else "-"
|
| 142 |
+
|
| 143 |
+
@staticmethod
|
| 144 |
+
def _format_t(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 145 |
+
now = datetime.datetime.utcnow()
|
| 146 |
+
start_time = now - datetime.timedelta(seconds=time)
|
| 147 |
+
return start_time.strftime("[%d/%b/%Y:%H:%M:%S +0000]")
|
| 148 |
+
|
| 149 |
+
@staticmethod
|
| 150 |
+
def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 151 |
+
return "<%s>" % os.getpid()
|
| 152 |
+
|
| 153 |
+
@staticmethod
|
| 154 |
+
def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 155 |
+
if request is None:
|
| 156 |
+
return "-"
|
| 157 |
+
return "{} {} HTTP/{}.{}".format(
|
| 158 |
+
request.method,
|
| 159 |
+
request.path_qs,
|
| 160 |
+
request.version.major,
|
| 161 |
+
request.version.minor,
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
@staticmethod
|
| 165 |
+
def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int:
|
| 166 |
+
return response.status
|
| 167 |
+
|
| 168 |
+
@staticmethod
|
| 169 |
+
def _format_b(request: BaseRequest, response: StreamResponse, time: float) -> int:
|
| 170 |
+
return response.body_length
|
| 171 |
+
|
| 172 |
+
@staticmethod
|
| 173 |
+
def _format_T(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 174 |
+
return str(round(time))
|
| 175 |
+
|
| 176 |
+
@staticmethod
|
| 177 |
+
def _format_Tf(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 178 |
+
return "%06f" % time
|
| 179 |
+
|
| 180 |
+
@staticmethod
|
| 181 |
+
def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> str:
|
| 182 |
+
return str(round(time * 1000000))
|
| 183 |
+
|
| 184 |
+
def _format_line(
|
| 185 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
| 186 |
+
) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]:
|
| 187 |
+
return [(key, method(request, response, time)) for key, method in self._methods]
|
| 188 |
+
|
| 189 |
+
def log(self, request: BaseRequest, response: StreamResponse, time: float) -> None:
|
| 190 |
+
try:
|
| 191 |
+
fmt_info = self._format_line(request, response, time)
|
| 192 |
+
|
| 193 |
+
values = list()
|
| 194 |
+
extra = dict()
|
| 195 |
+
for key, value in fmt_info:
|
| 196 |
+
values.append(value)
|
| 197 |
+
|
| 198 |
+
if key.__class__ is str:
|
| 199 |
+
extra[key] = value
|
| 200 |
+
else:
|
| 201 |
+
k1, k2 = key # type: ignore[misc]
|
| 202 |
+
dct = extra.get(k1, {}) # type: ignore[var-annotated,has-type]
|
| 203 |
+
dct[k2] = value # type: ignore[index,has-type]
|
| 204 |
+
extra[k1] = dct # type: ignore[has-type,assignment]
|
| 205 |
+
|
| 206 |
+
self.logger.info(self._log_format % tuple(values), extra=extra)
|
| 207 |
+
except Exception:
|
| 208 |
+
self.logger.exception("Error in logging")
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_request.py
ADDED
|
@@ -0,0 +1,874 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import datetime
|
| 3 |
+
import io
|
| 4 |
+
import re
|
| 5 |
+
import socket
|
| 6 |
+
import string
|
| 7 |
+
import tempfile
|
| 8 |
+
import types
|
| 9 |
+
import warnings
|
| 10 |
+
from http.cookies import SimpleCookie
|
| 11 |
+
from types import MappingProxyType
|
| 12 |
+
from typing import (
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterator,
|
| 17 |
+
Mapping,
|
| 18 |
+
MutableMapping,
|
| 19 |
+
Optional,
|
| 20 |
+
Pattern,
|
| 21 |
+
Tuple,
|
| 22 |
+
Union,
|
| 23 |
+
cast,
|
| 24 |
+
)
|
| 25 |
+
from urllib.parse import parse_qsl
|
| 26 |
+
|
| 27 |
+
import attr
|
| 28 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
| 29 |
+
from yarl import URL
|
| 30 |
+
|
| 31 |
+
from . import hdrs
|
| 32 |
+
from .abc import AbstractStreamWriter
|
| 33 |
+
from .helpers import (
|
| 34 |
+
DEBUG,
|
| 35 |
+
ETAG_ANY,
|
| 36 |
+
LIST_QUOTED_ETAG_RE,
|
| 37 |
+
ChainMapProxy,
|
| 38 |
+
ETag,
|
| 39 |
+
HeadersMixin,
|
| 40 |
+
parse_http_date,
|
| 41 |
+
reify,
|
| 42 |
+
sentinel,
|
| 43 |
+
)
|
| 44 |
+
from .http_parser import RawRequestMessage
|
| 45 |
+
from .http_writer import HttpVersion
|
| 46 |
+
from .multipart import BodyPartReader, MultipartReader
|
| 47 |
+
from .streams import EmptyStreamReader, StreamReader
|
| 48 |
+
from .typedefs import (
|
| 49 |
+
DEFAULT_JSON_DECODER,
|
| 50 |
+
Final,
|
| 51 |
+
JSONDecoder,
|
| 52 |
+
LooseHeaders,
|
| 53 |
+
RawHeaders,
|
| 54 |
+
StrOrURL,
|
| 55 |
+
)
|
| 56 |
+
from .web_exceptions import HTTPRequestEntityTooLarge
|
| 57 |
+
from .web_response import StreamResponse
|
| 58 |
+
|
| 59 |
+
__all__ = ("BaseRequest", "FileField", "Request")
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 63 |
+
from .web_app import Application
|
| 64 |
+
from .web_protocol import RequestHandler
|
| 65 |
+
from .web_urldispatcher import UrlMappingMatchInfo
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 69 |
+
class FileField:
|
| 70 |
+
name: str
|
| 71 |
+
filename: str
|
| 72 |
+
file: io.BufferedReader
|
| 73 |
+
content_type: str
|
| 74 |
+
headers: "CIMultiDictProxy[str]"
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
_TCHAR: Final[str] = string.digits + string.ascii_letters + r"!#$%&'*+.^_`|~-"
|
| 78 |
+
# '-' at the end to prevent interpretation as range in a char class
|
| 79 |
+
|
| 80 |
+
_TOKEN: Final[str] = fr"[{_TCHAR}]+"
|
| 81 |
+
|
| 82 |
+
_QDTEXT: Final[str] = r"[{}]".format(
|
| 83 |
+
r"".join(chr(c) for c in (0x09, 0x20, 0x21) + tuple(range(0x23, 0x7F)))
|
| 84 |
+
)
|
| 85 |
+
# qdtext includes 0x5C to escape 0x5D ('\]')
|
| 86 |
+
# qdtext excludes obs-text (because obsoleted, and encoding not specified)
|
| 87 |
+
|
| 88 |
+
_QUOTED_PAIR: Final[str] = r"\\[\t !-~]"
|
| 89 |
+
|
| 90 |
+
_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format(
|
| 91 |
+
qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
_FORWARDED_PAIR: Final[
|
| 95 |
+
str
|
| 96 |
+
] = r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format(
|
| 97 |
+
token=_TOKEN, quoted_string=_QUOTED_STRING
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
_QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])")
|
| 101 |
+
# same pattern as _QUOTED_PAIR but contains a capture group
|
| 102 |
+
|
| 103 |
+
_FORWARDED_PAIR_RE: Final[Pattern[str]] = re.compile(_FORWARDED_PAIR)
|
| 104 |
+
|
| 105 |
+
############################################################
|
| 106 |
+
# HTTP Request
|
| 107 |
+
############################################################
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class BaseRequest(MutableMapping[str, Any], HeadersMixin):
|
| 111 |
+
|
| 112 |
+
POST_METHODS = {
|
| 113 |
+
hdrs.METH_PATCH,
|
| 114 |
+
hdrs.METH_POST,
|
| 115 |
+
hdrs.METH_PUT,
|
| 116 |
+
hdrs.METH_TRACE,
|
| 117 |
+
hdrs.METH_DELETE,
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
ATTRS = HeadersMixin.ATTRS | frozenset(
|
| 121 |
+
[
|
| 122 |
+
"_message",
|
| 123 |
+
"_protocol",
|
| 124 |
+
"_payload_writer",
|
| 125 |
+
"_payload",
|
| 126 |
+
"_headers",
|
| 127 |
+
"_method",
|
| 128 |
+
"_version",
|
| 129 |
+
"_rel_url",
|
| 130 |
+
"_post",
|
| 131 |
+
"_read_bytes",
|
| 132 |
+
"_state",
|
| 133 |
+
"_cache",
|
| 134 |
+
"_task",
|
| 135 |
+
"_client_max_size",
|
| 136 |
+
"_loop",
|
| 137 |
+
"_transport_sslcontext",
|
| 138 |
+
"_transport_peername",
|
| 139 |
+
]
|
| 140 |
+
)
|
| 141 |
+
|
| 142 |
+
def __init__(
|
| 143 |
+
self,
|
| 144 |
+
message: RawRequestMessage,
|
| 145 |
+
payload: StreamReader,
|
| 146 |
+
protocol: "RequestHandler",
|
| 147 |
+
payload_writer: AbstractStreamWriter,
|
| 148 |
+
task: "asyncio.Task[None]",
|
| 149 |
+
loop: asyncio.AbstractEventLoop,
|
| 150 |
+
*,
|
| 151 |
+
client_max_size: int = 1024 ** 2,
|
| 152 |
+
state: Optional[Dict[str, Any]] = None,
|
| 153 |
+
scheme: Optional[str] = None,
|
| 154 |
+
host: Optional[str] = None,
|
| 155 |
+
remote: Optional[str] = None,
|
| 156 |
+
) -> None:
|
| 157 |
+
if state is None:
|
| 158 |
+
state = {}
|
| 159 |
+
self._message = message
|
| 160 |
+
self._protocol = protocol
|
| 161 |
+
self._payload_writer = payload_writer
|
| 162 |
+
|
| 163 |
+
self._payload = payload
|
| 164 |
+
self._headers = message.headers
|
| 165 |
+
self._method = message.method
|
| 166 |
+
self._version = message.version
|
| 167 |
+
self._rel_url = message.url
|
| 168 |
+
self._post = (
|
| 169 |
+
None
|
| 170 |
+
) # type: Optional[MultiDictProxy[Union[str, bytes, FileField]]]
|
| 171 |
+
self._read_bytes = None # type: Optional[bytes]
|
| 172 |
+
|
| 173 |
+
self._state = state
|
| 174 |
+
self._cache = {} # type: Dict[str, Any]
|
| 175 |
+
self._task = task
|
| 176 |
+
self._client_max_size = client_max_size
|
| 177 |
+
self._loop = loop
|
| 178 |
+
|
| 179 |
+
transport = self._protocol.transport
|
| 180 |
+
assert transport is not None
|
| 181 |
+
self._transport_sslcontext = transport.get_extra_info("sslcontext")
|
| 182 |
+
self._transport_peername = transport.get_extra_info("peername")
|
| 183 |
+
|
| 184 |
+
if scheme is not None:
|
| 185 |
+
self._cache["scheme"] = scheme
|
| 186 |
+
if host is not None:
|
| 187 |
+
self._cache["host"] = host
|
| 188 |
+
if remote is not None:
|
| 189 |
+
self._cache["remote"] = remote
|
| 190 |
+
|
| 191 |
+
def clone(
|
| 192 |
+
self,
|
| 193 |
+
*,
|
| 194 |
+
method: str = sentinel,
|
| 195 |
+
rel_url: StrOrURL = sentinel,
|
| 196 |
+
headers: LooseHeaders = sentinel,
|
| 197 |
+
scheme: str = sentinel,
|
| 198 |
+
host: str = sentinel,
|
| 199 |
+
remote: str = sentinel,
|
| 200 |
+
) -> "BaseRequest":
|
| 201 |
+
"""Clone itself with replacement some attributes.
|
| 202 |
+
|
| 203 |
+
Creates and returns a new instance of Request object. If no parameters
|
| 204 |
+
are given, an exact copy is returned. If a parameter is not passed, it
|
| 205 |
+
will reuse the one from the current request object.
|
| 206 |
+
"""
|
| 207 |
+
if self._read_bytes:
|
| 208 |
+
raise RuntimeError("Cannot clone request " "after reading its content")
|
| 209 |
+
|
| 210 |
+
dct = {} # type: Dict[str, Any]
|
| 211 |
+
if method is not sentinel:
|
| 212 |
+
dct["method"] = method
|
| 213 |
+
if rel_url is not sentinel:
|
| 214 |
+
new_url = URL(rel_url)
|
| 215 |
+
dct["url"] = new_url
|
| 216 |
+
dct["path"] = str(new_url)
|
| 217 |
+
if headers is not sentinel:
|
| 218 |
+
# a copy semantic
|
| 219 |
+
dct["headers"] = CIMultiDictProxy(CIMultiDict(headers))
|
| 220 |
+
dct["raw_headers"] = tuple(
|
| 221 |
+
(k.encode("utf-8"), v.encode("utf-8")) for k, v in headers.items()
|
| 222 |
+
)
|
| 223 |
+
|
| 224 |
+
message = self._message._replace(**dct)
|
| 225 |
+
|
| 226 |
+
kwargs = {}
|
| 227 |
+
if scheme is not sentinel:
|
| 228 |
+
kwargs["scheme"] = scheme
|
| 229 |
+
if host is not sentinel:
|
| 230 |
+
kwargs["host"] = host
|
| 231 |
+
if remote is not sentinel:
|
| 232 |
+
kwargs["remote"] = remote
|
| 233 |
+
|
| 234 |
+
return self.__class__(
|
| 235 |
+
message,
|
| 236 |
+
self._payload,
|
| 237 |
+
self._protocol,
|
| 238 |
+
self._payload_writer,
|
| 239 |
+
self._task,
|
| 240 |
+
self._loop,
|
| 241 |
+
client_max_size=self._client_max_size,
|
| 242 |
+
state=self._state.copy(),
|
| 243 |
+
**kwargs,
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
@property
|
| 247 |
+
def task(self) -> "asyncio.Task[None]":
|
| 248 |
+
return self._task
|
| 249 |
+
|
| 250 |
+
@property
|
| 251 |
+
def protocol(self) -> "RequestHandler":
|
| 252 |
+
return self._protocol
|
| 253 |
+
|
| 254 |
+
@property
|
| 255 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
| 256 |
+
if self._protocol is None:
|
| 257 |
+
return None
|
| 258 |
+
return self._protocol.transport
|
| 259 |
+
|
| 260 |
+
@property
|
| 261 |
+
def writer(self) -> AbstractStreamWriter:
|
| 262 |
+
return self._payload_writer
|
| 263 |
+
|
| 264 |
+
@reify
|
| 265 |
+
def message(self) -> RawRequestMessage:
|
| 266 |
+
warnings.warn("Request.message is deprecated", DeprecationWarning, stacklevel=3)
|
| 267 |
+
return self._message
|
| 268 |
+
|
| 269 |
+
@reify
|
| 270 |
+
def rel_url(self) -> URL:
|
| 271 |
+
return self._rel_url
|
| 272 |
+
|
| 273 |
+
@reify
|
| 274 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 275 |
+
warnings.warn(
|
| 276 |
+
"request.loop property is deprecated", DeprecationWarning, stacklevel=2
|
| 277 |
+
)
|
| 278 |
+
return self._loop
|
| 279 |
+
|
| 280 |
+
# MutableMapping API
|
| 281 |
+
|
| 282 |
+
def __getitem__(self, key: str) -> Any:
|
| 283 |
+
return self._state[key]
|
| 284 |
+
|
| 285 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
| 286 |
+
self._state[key] = value
|
| 287 |
+
|
| 288 |
+
def __delitem__(self, key: str) -> None:
|
| 289 |
+
del self._state[key]
|
| 290 |
+
|
| 291 |
+
def __len__(self) -> int:
|
| 292 |
+
return len(self._state)
|
| 293 |
+
|
| 294 |
+
def __iter__(self) -> Iterator[str]:
|
| 295 |
+
return iter(self._state)
|
| 296 |
+
|
| 297 |
+
########
|
| 298 |
+
|
| 299 |
+
@reify
|
| 300 |
+
def secure(self) -> bool:
|
| 301 |
+
"""A bool indicating if the request is handled with SSL."""
|
| 302 |
+
return self.scheme == "https"
|
| 303 |
+
|
| 304 |
+
@reify
|
| 305 |
+
def forwarded(self) -> Tuple[Mapping[str, str], ...]:
|
| 306 |
+
"""A tuple containing all parsed Forwarded header(s).
|
| 307 |
+
|
| 308 |
+
Makes an effort to parse Forwarded headers as specified by RFC 7239:
|
| 309 |
+
|
| 310 |
+
- It adds one (immutable) dictionary per Forwarded 'field-value', ie
|
| 311 |
+
per proxy. The element corresponds to the data in the Forwarded
|
| 312 |
+
field-value added by the first proxy encountered by the client. Each
|
| 313 |
+
subsequent item corresponds to those added by later proxies.
|
| 314 |
+
- It checks that every value has valid syntax in general as specified
|
| 315 |
+
in section 4: either a 'token' or a 'quoted-string'.
|
| 316 |
+
- It un-escapes found escape sequences.
|
| 317 |
+
- It does NOT validate 'by' and 'for' contents as specified in section
|
| 318 |
+
6.
|
| 319 |
+
- It does NOT validate 'host' contents (Host ABNF).
|
| 320 |
+
- It does NOT validate 'proto' contents for valid URI scheme names.
|
| 321 |
+
|
| 322 |
+
Returns a tuple containing one or more immutable dicts
|
| 323 |
+
"""
|
| 324 |
+
elems = []
|
| 325 |
+
for field_value in self._message.headers.getall(hdrs.FORWARDED, ()):
|
| 326 |
+
length = len(field_value)
|
| 327 |
+
pos = 0
|
| 328 |
+
need_separator = False
|
| 329 |
+
elem = {} # type: Dict[str, str]
|
| 330 |
+
elems.append(types.MappingProxyType(elem))
|
| 331 |
+
while 0 <= pos < length:
|
| 332 |
+
match = _FORWARDED_PAIR_RE.match(field_value, pos)
|
| 333 |
+
if match is not None: # got a valid forwarded-pair
|
| 334 |
+
if need_separator:
|
| 335 |
+
# bad syntax here, skip to next comma
|
| 336 |
+
pos = field_value.find(",", pos)
|
| 337 |
+
else:
|
| 338 |
+
name, value, port = match.groups()
|
| 339 |
+
if value[0] == '"':
|
| 340 |
+
# quoted string: remove quotes and unescape
|
| 341 |
+
value = _QUOTED_PAIR_REPLACE_RE.sub(r"\1", value[1:-1])
|
| 342 |
+
if port:
|
| 343 |
+
value += port
|
| 344 |
+
elem[name.lower()] = value
|
| 345 |
+
pos += len(match.group(0))
|
| 346 |
+
need_separator = True
|
| 347 |
+
elif field_value[pos] == ",": # next forwarded-element
|
| 348 |
+
need_separator = False
|
| 349 |
+
elem = {}
|
| 350 |
+
elems.append(types.MappingProxyType(elem))
|
| 351 |
+
pos += 1
|
| 352 |
+
elif field_value[pos] == ";": # next forwarded-pair
|
| 353 |
+
need_separator = False
|
| 354 |
+
pos += 1
|
| 355 |
+
elif field_value[pos] in " \t":
|
| 356 |
+
# Allow whitespace even between forwarded-pairs, though
|
| 357 |
+
# RFC 7239 doesn't. This simplifies code and is in line
|
| 358 |
+
# with Postel's law.
|
| 359 |
+
pos += 1
|
| 360 |
+
else:
|
| 361 |
+
# bad syntax here, skip to next comma
|
| 362 |
+
pos = field_value.find(",", pos)
|
| 363 |
+
return tuple(elems)
|
| 364 |
+
|
| 365 |
+
@reify
|
| 366 |
+
def scheme(self) -> str:
|
| 367 |
+
"""A string representing the scheme of the request.
|
| 368 |
+
|
| 369 |
+
Hostname is resolved in this order:
|
| 370 |
+
|
| 371 |
+
- overridden value by .clone(scheme=new_scheme) call.
|
| 372 |
+
- type of connection to peer: HTTPS if socket is SSL, HTTP otherwise.
|
| 373 |
+
|
| 374 |
+
'http' or 'https'.
|
| 375 |
+
"""
|
| 376 |
+
if self._transport_sslcontext:
|
| 377 |
+
return "https"
|
| 378 |
+
else:
|
| 379 |
+
return "http"
|
| 380 |
+
|
| 381 |
+
@reify
|
| 382 |
+
def method(self) -> str:
|
| 383 |
+
"""Read only property for getting HTTP method.
|
| 384 |
+
|
| 385 |
+
The value is upper-cased str like 'GET', 'POST', 'PUT' etc.
|
| 386 |
+
"""
|
| 387 |
+
return self._method
|
| 388 |
+
|
| 389 |
+
@reify
|
| 390 |
+
def version(self) -> HttpVersion:
|
| 391 |
+
"""Read only property for getting HTTP version of request.
|
| 392 |
+
|
| 393 |
+
Returns aiohttp.protocol.HttpVersion instance.
|
| 394 |
+
"""
|
| 395 |
+
return self._version
|
| 396 |
+
|
| 397 |
+
@reify
|
| 398 |
+
def host(self) -> str:
|
| 399 |
+
"""Hostname of the request.
|
| 400 |
+
|
| 401 |
+
Hostname is resolved in this order:
|
| 402 |
+
|
| 403 |
+
- overridden value by .clone(host=new_host) call.
|
| 404 |
+
- HOST HTTP header
|
| 405 |
+
- socket.getfqdn() value
|
| 406 |
+
"""
|
| 407 |
+
host = self._message.headers.get(hdrs.HOST)
|
| 408 |
+
if host is not None:
|
| 409 |
+
return host
|
| 410 |
+
return socket.getfqdn()
|
| 411 |
+
|
| 412 |
+
@reify
|
| 413 |
+
def remote(self) -> Optional[str]:
|
| 414 |
+
"""Remote IP of client initiated HTTP request.
|
| 415 |
+
|
| 416 |
+
The IP is resolved in this order:
|
| 417 |
+
|
| 418 |
+
- overridden value by .clone(remote=new_remote) call.
|
| 419 |
+
- peername of opened socket
|
| 420 |
+
"""
|
| 421 |
+
if self._transport_peername is None:
|
| 422 |
+
return None
|
| 423 |
+
if isinstance(self._transport_peername, (list, tuple)):
|
| 424 |
+
return str(self._transport_peername[0])
|
| 425 |
+
return str(self._transport_peername)
|
| 426 |
+
|
| 427 |
+
@reify
|
| 428 |
+
def url(self) -> URL:
|
| 429 |
+
url = URL.build(scheme=self.scheme, host=self.host)
|
| 430 |
+
return url.join(self._rel_url)
|
| 431 |
+
|
| 432 |
+
@reify
|
| 433 |
+
def path(self) -> str:
|
| 434 |
+
"""The URL including *PATH INFO* without the host or scheme.
|
| 435 |
+
|
| 436 |
+
E.g., ``/app/blog``
|
| 437 |
+
"""
|
| 438 |
+
return self._rel_url.path
|
| 439 |
+
|
| 440 |
+
@reify
|
| 441 |
+
def path_qs(self) -> str:
|
| 442 |
+
"""The URL including PATH_INFO and the query string.
|
| 443 |
+
|
| 444 |
+
E.g, /app/blog?id=10
|
| 445 |
+
"""
|
| 446 |
+
return str(self._rel_url)
|
| 447 |
+
|
| 448 |
+
@reify
|
| 449 |
+
def raw_path(self) -> str:
|
| 450 |
+
"""The URL including raw *PATH INFO* without the host or scheme.
|
| 451 |
+
|
| 452 |
+
Warning, the path is unquoted and may contains non valid URL characters
|
| 453 |
+
|
| 454 |
+
E.g., ``/my%2Fpath%7Cwith%21some%25strange%24characters``
|
| 455 |
+
"""
|
| 456 |
+
return self._message.path
|
| 457 |
+
|
| 458 |
+
@reify
|
| 459 |
+
def query(self) -> "MultiDictProxy[str]":
|
| 460 |
+
"""A multidict with all the variables in the query string."""
|
| 461 |
+
return MultiDictProxy(self._rel_url.query)
|
| 462 |
+
|
| 463 |
+
@reify
|
| 464 |
+
def query_string(self) -> str:
|
| 465 |
+
"""The query string in the URL.
|
| 466 |
+
|
| 467 |
+
E.g., id=10
|
| 468 |
+
"""
|
| 469 |
+
return self._rel_url.query_string
|
| 470 |
+
|
| 471 |
+
@reify
|
| 472 |
+
def headers(self) -> "CIMultiDictProxy[str]":
|
| 473 |
+
"""A case-insensitive multidict proxy with all headers."""
|
| 474 |
+
return self._headers
|
| 475 |
+
|
| 476 |
+
@reify
|
| 477 |
+
def raw_headers(self) -> RawHeaders:
|
| 478 |
+
"""A sequence of pairs for all headers."""
|
| 479 |
+
return self._message.raw_headers
|
| 480 |
+
|
| 481 |
+
@reify
|
| 482 |
+
def if_modified_since(self) -> Optional[datetime.datetime]:
|
| 483 |
+
"""The value of If-Modified-Since HTTP header, or None.
|
| 484 |
+
|
| 485 |
+
This header is represented as a `datetime` object.
|
| 486 |
+
"""
|
| 487 |
+
return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
|
| 488 |
+
|
| 489 |
+
@reify
|
| 490 |
+
def if_unmodified_since(self) -> Optional[datetime.datetime]:
|
| 491 |
+
"""The value of If-Unmodified-Since HTTP header, or None.
|
| 492 |
+
|
| 493 |
+
This header is represented as a `datetime` object.
|
| 494 |
+
"""
|
| 495 |
+
return parse_http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
|
| 496 |
+
|
| 497 |
+
@staticmethod
|
| 498 |
+
def _etag_values(etag_header: str) -> Iterator[ETag]:
|
| 499 |
+
"""Extract `ETag` objects from raw header."""
|
| 500 |
+
if etag_header == ETAG_ANY:
|
| 501 |
+
yield ETag(
|
| 502 |
+
is_weak=False,
|
| 503 |
+
value=ETAG_ANY,
|
| 504 |
+
)
|
| 505 |
+
else:
|
| 506 |
+
for match in LIST_QUOTED_ETAG_RE.finditer(etag_header):
|
| 507 |
+
is_weak, value, garbage = match.group(2, 3, 4)
|
| 508 |
+
# Any symbol captured by 4th group means
|
| 509 |
+
# that the following sequence is invalid.
|
| 510 |
+
if garbage:
|
| 511 |
+
break
|
| 512 |
+
|
| 513 |
+
yield ETag(
|
| 514 |
+
is_weak=bool(is_weak),
|
| 515 |
+
value=value,
|
| 516 |
+
)
|
| 517 |
+
|
| 518 |
+
@classmethod
|
| 519 |
+
def _if_match_or_none_impl(
|
| 520 |
+
cls, header_value: Optional[str]
|
| 521 |
+
) -> Optional[Tuple[ETag, ...]]:
|
| 522 |
+
if not header_value:
|
| 523 |
+
return None
|
| 524 |
+
|
| 525 |
+
return tuple(cls._etag_values(header_value))
|
| 526 |
+
|
| 527 |
+
@reify
|
| 528 |
+
def if_match(self) -> Optional[Tuple[ETag, ...]]:
|
| 529 |
+
"""The value of If-Match HTTP header, or None.
|
| 530 |
+
|
| 531 |
+
This header is represented as a `tuple` of `ETag` objects.
|
| 532 |
+
"""
|
| 533 |
+
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH))
|
| 534 |
+
|
| 535 |
+
@reify
|
| 536 |
+
def if_none_match(self) -> Optional[Tuple[ETag, ...]]:
|
| 537 |
+
"""The value of If-None-Match HTTP header, or None.
|
| 538 |
+
|
| 539 |
+
This header is represented as a `tuple` of `ETag` objects.
|
| 540 |
+
"""
|
| 541 |
+
return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH))
|
| 542 |
+
|
| 543 |
+
@reify
|
| 544 |
+
def if_range(self) -> Optional[datetime.datetime]:
|
| 545 |
+
"""The value of If-Range HTTP header, or None.
|
| 546 |
+
|
| 547 |
+
This header is represented as a `datetime` object.
|
| 548 |
+
"""
|
| 549 |
+
return parse_http_date(self.headers.get(hdrs.IF_RANGE))
|
| 550 |
+
|
| 551 |
+
@reify
|
| 552 |
+
def keep_alive(self) -> bool:
|
| 553 |
+
"""Is keepalive enabled by client?"""
|
| 554 |
+
return not self._message.should_close
|
| 555 |
+
|
| 556 |
+
@reify
|
| 557 |
+
def cookies(self) -> Mapping[str, str]:
|
| 558 |
+
"""Return request cookies.
|
| 559 |
+
|
| 560 |
+
A read-only dictionary-like object.
|
| 561 |
+
"""
|
| 562 |
+
raw = self.headers.get(hdrs.COOKIE, "")
|
| 563 |
+
parsed = SimpleCookie(raw) # type: SimpleCookie[str]
|
| 564 |
+
return MappingProxyType({key: val.value for key, val in parsed.items()})
|
| 565 |
+
|
| 566 |
+
@reify
|
| 567 |
+
def http_range(self) -> slice:
|
| 568 |
+
"""The content of Range HTTP header.
|
| 569 |
+
|
| 570 |
+
Return a slice instance.
|
| 571 |
+
|
| 572 |
+
"""
|
| 573 |
+
rng = self._headers.get(hdrs.RANGE)
|
| 574 |
+
start, end = None, None
|
| 575 |
+
if rng is not None:
|
| 576 |
+
try:
|
| 577 |
+
pattern = r"^bytes=(\d*)-(\d*)$"
|
| 578 |
+
start, end = re.findall(pattern, rng)[0]
|
| 579 |
+
except IndexError: # pattern was not found in header
|
| 580 |
+
raise ValueError("range not in acceptable format")
|
| 581 |
+
|
| 582 |
+
end = int(end) if end else None
|
| 583 |
+
start = int(start) if start else None
|
| 584 |
+
|
| 585 |
+
if start is None and end is not None:
|
| 586 |
+
# end with no start is to return tail of content
|
| 587 |
+
start = -end
|
| 588 |
+
end = None
|
| 589 |
+
|
| 590 |
+
if start is not None and end is not None:
|
| 591 |
+
# end is inclusive in range header, exclusive for slice
|
| 592 |
+
end += 1
|
| 593 |
+
|
| 594 |
+
if start >= end:
|
| 595 |
+
raise ValueError("start cannot be after end")
|
| 596 |
+
|
| 597 |
+
if start is end is None: # No valid range supplied
|
| 598 |
+
raise ValueError("No start or end of range specified")
|
| 599 |
+
|
| 600 |
+
return slice(start, end, 1)
|
| 601 |
+
|
| 602 |
+
@reify
|
| 603 |
+
def content(self) -> StreamReader:
|
| 604 |
+
"""Return raw payload stream."""
|
| 605 |
+
return self._payload
|
| 606 |
+
|
| 607 |
+
@property
|
| 608 |
+
def has_body(self) -> bool:
|
| 609 |
+
"""Return True if request's HTTP BODY can be read, False otherwise."""
|
| 610 |
+
warnings.warn(
|
| 611 |
+
"Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2
|
| 612 |
+
)
|
| 613 |
+
return not self._payload.at_eof()
|
| 614 |
+
|
| 615 |
+
@property
|
| 616 |
+
def can_read_body(self) -> bool:
|
| 617 |
+
"""Return True if request's HTTP BODY can be read, False otherwise."""
|
| 618 |
+
return not self._payload.at_eof()
|
| 619 |
+
|
| 620 |
+
@reify
|
| 621 |
+
def body_exists(self) -> bool:
|
| 622 |
+
"""Return True if request has HTTP BODY, False otherwise."""
|
| 623 |
+
return type(self._payload) is not EmptyStreamReader
|
| 624 |
+
|
| 625 |
+
async def release(self) -> None:
|
| 626 |
+
"""Release request.
|
| 627 |
+
|
| 628 |
+
Eat unread part of HTTP BODY if present.
|
| 629 |
+
"""
|
| 630 |
+
while not self._payload.at_eof():
|
| 631 |
+
await self._payload.readany()
|
| 632 |
+
|
| 633 |
+
async def read(self) -> bytes:
|
| 634 |
+
"""Read request body if present.
|
| 635 |
+
|
| 636 |
+
Returns bytes object with full request content.
|
| 637 |
+
"""
|
| 638 |
+
if self._read_bytes is None:
|
| 639 |
+
body = bytearray()
|
| 640 |
+
while True:
|
| 641 |
+
chunk = await self._payload.readany()
|
| 642 |
+
body.extend(chunk)
|
| 643 |
+
if self._client_max_size:
|
| 644 |
+
body_size = len(body)
|
| 645 |
+
if body_size >= self._client_max_size:
|
| 646 |
+
raise HTTPRequestEntityTooLarge(
|
| 647 |
+
max_size=self._client_max_size, actual_size=body_size
|
| 648 |
+
)
|
| 649 |
+
if not chunk:
|
| 650 |
+
break
|
| 651 |
+
self._read_bytes = bytes(body)
|
| 652 |
+
return self._read_bytes
|
| 653 |
+
|
| 654 |
+
async def text(self) -> str:
|
| 655 |
+
"""Return BODY as text using encoding from .charset."""
|
| 656 |
+
bytes_body = await self.read()
|
| 657 |
+
encoding = self.charset or "utf-8"
|
| 658 |
+
return bytes_body.decode(encoding)
|
| 659 |
+
|
| 660 |
+
async def json(self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER) -> Any:
|
| 661 |
+
"""Return BODY as JSON."""
|
| 662 |
+
body = await self.text()
|
| 663 |
+
return loads(body)
|
| 664 |
+
|
| 665 |
+
async def multipart(self) -> MultipartReader:
|
| 666 |
+
"""Return async iterator to process BODY as multipart."""
|
| 667 |
+
return MultipartReader(self._headers, self._payload)
|
| 668 |
+
|
| 669 |
+
async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]":
|
| 670 |
+
"""Return POST parameters."""
|
| 671 |
+
if self._post is not None:
|
| 672 |
+
return self._post
|
| 673 |
+
if self._method not in self.POST_METHODS:
|
| 674 |
+
self._post = MultiDictProxy(MultiDict())
|
| 675 |
+
return self._post
|
| 676 |
+
|
| 677 |
+
content_type = self.content_type
|
| 678 |
+
if content_type not in (
|
| 679 |
+
"",
|
| 680 |
+
"application/x-www-form-urlencoded",
|
| 681 |
+
"multipart/form-data",
|
| 682 |
+
):
|
| 683 |
+
self._post = MultiDictProxy(MultiDict())
|
| 684 |
+
return self._post
|
| 685 |
+
|
| 686 |
+
out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]]
|
| 687 |
+
|
| 688 |
+
if content_type == "multipart/form-data":
|
| 689 |
+
multipart = await self.multipart()
|
| 690 |
+
max_size = self._client_max_size
|
| 691 |
+
|
| 692 |
+
field = await multipart.next()
|
| 693 |
+
while field is not None:
|
| 694 |
+
size = 0
|
| 695 |
+
field_ct = field.headers.get(hdrs.CONTENT_TYPE)
|
| 696 |
+
|
| 697 |
+
if isinstance(field, BodyPartReader):
|
| 698 |
+
assert field.name is not None
|
| 699 |
+
|
| 700 |
+
# Note that according to RFC 7578, the Content-Type header
|
| 701 |
+
# is optional, even for files, so we can't assume it's
|
| 702 |
+
# present.
|
| 703 |
+
# https://tools.ietf.org/html/rfc7578#section-4.4
|
| 704 |
+
if field.filename:
|
| 705 |
+
# store file in temp file
|
| 706 |
+
tmp = tempfile.TemporaryFile()
|
| 707 |
+
chunk = await field.read_chunk(size=2 ** 16)
|
| 708 |
+
while chunk:
|
| 709 |
+
chunk = field.decode(chunk)
|
| 710 |
+
tmp.write(chunk)
|
| 711 |
+
size += len(chunk)
|
| 712 |
+
if 0 < max_size < size:
|
| 713 |
+
tmp.close()
|
| 714 |
+
raise HTTPRequestEntityTooLarge(
|
| 715 |
+
max_size=max_size, actual_size=size
|
| 716 |
+
)
|
| 717 |
+
chunk = await field.read_chunk(size=2 ** 16)
|
| 718 |
+
tmp.seek(0)
|
| 719 |
+
|
| 720 |
+
if field_ct is None:
|
| 721 |
+
field_ct = "application/octet-stream"
|
| 722 |
+
|
| 723 |
+
ff = FileField(
|
| 724 |
+
field.name,
|
| 725 |
+
field.filename,
|
| 726 |
+
cast(io.BufferedReader, tmp),
|
| 727 |
+
field_ct,
|
| 728 |
+
field.headers,
|
| 729 |
+
)
|
| 730 |
+
out.add(field.name, ff)
|
| 731 |
+
else:
|
| 732 |
+
# deal with ordinary data
|
| 733 |
+
value = await field.read(decode=True)
|
| 734 |
+
if field_ct is None or field_ct.startswith("text/"):
|
| 735 |
+
charset = field.get_charset(default="utf-8")
|
| 736 |
+
out.add(field.name, value.decode(charset))
|
| 737 |
+
else:
|
| 738 |
+
out.add(field.name, value)
|
| 739 |
+
size += len(value)
|
| 740 |
+
if 0 < max_size < size:
|
| 741 |
+
raise HTTPRequestEntityTooLarge(
|
| 742 |
+
max_size=max_size, actual_size=size
|
| 743 |
+
)
|
| 744 |
+
else:
|
| 745 |
+
raise ValueError(
|
| 746 |
+
"To decode nested multipart you need " "to use custom reader",
|
| 747 |
+
)
|
| 748 |
+
|
| 749 |
+
field = await multipart.next()
|
| 750 |
+
else:
|
| 751 |
+
data = await self.read()
|
| 752 |
+
if data:
|
| 753 |
+
charset = self.charset or "utf-8"
|
| 754 |
+
out.extend(
|
| 755 |
+
parse_qsl(
|
| 756 |
+
data.rstrip().decode(charset),
|
| 757 |
+
keep_blank_values=True,
|
| 758 |
+
encoding=charset,
|
| 759 |
+
)
|
| 760 |
+
)
|
| 761 |
+
|
| 762 |
+
self._post = MultiDictProxy(out)
|
| 763 |
+
return self._post
|
| 764 |
+
|
| 765 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
| 766 |
+
"""Extra info from protocol transport"""
|
| 767 |
+
protocol = self._protocol
|
| 768 |
+
if protocol is None:
|
| 769 |
+
return default
|
| 770 |
+
|
| 771 |
+
transport = protocol.transport
|
| 772 |
+
if transport is None:
|
| 773 |
+
return default
|
| 774 |
+
|
| 775 |
+
return transport.get_extra_info(name, default)
|
| 776 |
+
|
| 777 |
+
def __repr__(self) -> str:
|
| 778 |
+
ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode(
|
| 779 |
+
"ascii"
|
| 780 |
+
)
|
| 781 |
+
return "<{} {} {} >".format(
|
| 782 |
+
self.__class__.__name__, self._method, ascii_encodable_path
|
| 783 |
+
)
|
| 784 |
+
|
| 785 |
+
def __eq__(self, other: object) -> bool:
|
| 786 |
+
return id(self) == id(other)
|
| 787 |
+
|
| 788 |
+
def __bool__(self) -> bool:
|
| 789 |
+
return True
|
| 790 |
+
|
| 791 |
+
async def _prepare_hook(self, response: StreamResponse) -> None:
|
| 792 |
+
return
|
| 793 |
+
|
| 794 |
+
def _cancel(self, exc: BaseException) -> None:
|
| 795 |
+
self._payload.set_exception(exc)
|
| 796 |
+
|
| 797 |
+
|
| 798 |
+
class Request(BaseRequest):
|
| 799 |
+
|
| 800 |
+
ATTRS = BaseRequest.ATTRS | frozenset(["_match_info"])
|
| 801 |
+
|
| 802 |
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
| 803 |
+
super().__init__(*args, **kwargs)
|
| 804 |
+
|
| 805 |
+
# matchdict, route_name, handler
|
| 806 |
+
# or information about traversal lookup
|
| 807 |
+
|
| 808 |
+
# initialized after route resolving
|
| 809 |
+
self._match_info = None # type: Optional[UrlMappingMatchInfo]
|
| 810 |
+
|
| 811 |
+
if DEBUG:
|
| 812 |
+
|
| 813 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
| 814 |
+
if name not in self.ATTRS:
|
| 815 |
+
warnings.warn(
|
| 816 |
+
"Setting custom {}.{} attribute "
|
| 817 |
+
"is discouraged".format(self.__class__.__name__, name),
|
| 818 |
+
DeprecationWarning,
|
| 819 |
+
stacklevel=2,
|
| 820 |
+
)
|
| 821 |
+
super().__setattr__(name, val)
|
| 822 |
+
|
| 823 |
+
def clone(
|
| 824 |
+
self,
|
| 825 |
+
*,
|
| 826 |
+
method: str = sentinel,
|
| 827 |
+
rel_url: StrOrURL = sentinel,
|
| 828 |
+
headers: LooseHeaders = sentinel,
|
| 829 |
+
scheme: str = sentinel,
|
| 830 |
+
host: str = sentinel,
|
| 831 |
+
remote: str = sentinel,
|
| 832 |
+
) -> "Request":
|
| 833 |
+
ret = super().clone(
|
| 834 |
+
method=method,
|
| 835 |
+
rel_url=rel_url,
|
| 836 |
+
headers=headers,
|
| 837 |
+
scheme=scheme,
|
| 838 |
+
host=host,
|
| 839 |
+
remote=remote,
|
| 840 |
+
)
|
| 841 |
+
new_ret = cast(Request, ret)
|
| 842 |
+
new_ret._match_info = self._match_info
|
| 843 |
+
return new_ret
|
| 844 |
+
|
| 845 |
+
@reify
|
| 846 |
+
def match_info(self) -> "UrlMappingMatchInfo":
|
| 847 |
+
"""Result of route resolving."""
|
| 848 |
+
match_info = self._match_info
|
| 849 |
+
assert match_info is not None
|
| 850 |
+
return match_info
|
| 851 |
+
|
| 852 |
+
@property
|
| 853 |
+
def app(self) -> "Application":
|
| 854 |
+
"""Application instance."""
|
| 855 |
+
match_info = self._match_info
|
| 856 |
+
assert match_info is not None
|
| 857 |
+
return match_info.current_app
|
| 858 |
+
|
| 859 |
+
@property
|
| 860 |
+
def config_dict(self) -> ChainMapProxy:
|
| 861 |
+
match_info = self._match_info
|
| 862 |
+
assert match_info is not None
|
| 863 |
+
lst = match_info.apps
|
| 864 |
+
app = self.app
|
| 865 |
+
idx = lst.index(app)
|
| 866 |
+
sublist = list(reversed(lst[: idx + 1]))
|
| 867 |
+
return ChainMapProxy(sublist)
|
| 868 |
+
|
| 869 |
+
async def _prepare_hook(self, response: StreamResponse) -> None:
|
| 870 |
+
match_info = self._match_info
|
| 871 |
+
if match_info is None:
|
| 872 |
+
return
|
| 873 |
+
for app in match_info._apps:
|
| 874 |
+
await app.on_response_prepare.send(self, response)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_response.py
ADDED
|
@@ -0,0 +1,825 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections.abc
|
| 3 |
+
import datetime
|
| 4 |
+
import enum
|
| 5 |
+
import json
|
| 6 |
+
import math
|
| 7 |
+
import time
|
| 8 |
+
import warnings
|
| 9 |
+
import zlib
|
| 10 |
+
from concurrent.futures import Executor
|
| 11 |
+
from http.cookies import Morsel, SimpleCookie
|
| 12 |
+
from typing import (
|
| 13 |
+
TYPE_CHECKING,
|
| 14 |
+
Any,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterator,
|
| 17 |
+
Mapping,
|
| 18 |
+
MutableMapping,
|
| 19 |
+
Optional,
|
| 20 |
+
Tuple,
|
| 21 |
+
Union,
|
| 22 |
+
cast,
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
from multidict import CIMultiDict, istr
|
| 26 |
+
|
| 27 |
+
from . import hdrs, payload
|
| 28 |
+
from .abc import AbstractStreamWriter
|
| 29 |
+
from .helpers import (
|
| 30 |
+
ETAG_ANY,
|
| 31 |
+
PY_38,
|
| 32 |
+
QUOTED_ETAG_RE,
|
| 33 |
+
ETag,
|
| 34 |
+
HeadersMixin,
|
| 35 |
+
parse_http_date,
|
| 36 |
+
rfc822_formatted_time,
|
| 37 |
+
sentinel,
|
| 38 |
+
validate_etag_value,
|
| 39 |
+
)
|
| 40 |
+
from .http import RESPONSES, SERVER_SOFTWARE, HttpVersion10, HttpVersion11
|
| 41 |
+
from .payload import Payload
|
| 42 |
+
from .typedefs import JSONEncoder, LooseHeaders
|
| 43 |
+
|
| 44 |
+
__all__ = ("ContentCoding", "StreamResponse", "Response", "json_response")
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 48 |
+
from .web_request import BaseRequest
|
| 49 |
+
|
| 50 |
+
BaseClass = MutableMapping[str, Any]
|
| 51 |
+
else:
|
| 52 |
+
BaseClass = collections.abc.MutableMapping
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
if not PY_38:
|
| 56 |
+
# allow samesite to be used in python < 3.8
|
| 57 |
+
# already permitted in python 3.8, see https://bugs.python.org/issue29613
|
| 58 |
+
Morsel._reserved["samesite"] = "SameSite" # type: ignore[attr-defined]
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
class ContentCoding(enum.Enum):
|
| 62 |
+
# The content codings that we have support for.
|
| 63 |
+
#
|
| 64 |
+
# Additional registered codings are listed at:
|
| 65 |
+
# https://www.iana.org/assignments/http-parameters/http-parameters.xhtml#content-coding
|
| 66 |
+
deflate = "deflate"
|
| 67 |
+
gzip = "gzip"
|
| 68 |
+
identity = "identity"
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
############################################################
|
| 72 |
+
# HTTP Response classes
|
| 73 |
+
############################################################
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class StreamResponse(BaseClass, HeadersMixin):
|
| 77 |
+
|
| 78 |
+
_length_check = True
|
| 79 |
+
|
| 80 |
+
def __init__(
|
| 81 |
+
self,
|
| 82 |
+
*,
|
| 83 |
+
status: int = 200,
|
| 84 |
+
reason: Optional[str] = None,
|
| 85 |
+
headers: Optional[LooseHeaders] = None,
|
| 86 |
+
) -> None:
|
| 87 |
+
self._body = None
|
| 88 |
+
self._keep_alive = None # type: Optional[bool]
|
| 89 |
+
self._chunked = False
|
| 90 |
+
self._compression = False
|
| 91 |
+
self._compression_force = None # type: Optional[ContentCoding]
|
| 92 |
+
self._cookies = SimpleCookie() # type: SimpleCookie[str]
|
| 93 |
+
|
| 94 |
+
self._req = None # type: Optional[BaseRequest]
|
| 95 |
+
self._payload_writer = None # type: Optional[AbstractStreamWriter]
|
| 96 |
+
self._eof_sent = False
|
| 97 |
+
self._body_length = 0
|
| 98 |
+
self._state = {} # type: Dict[str, Any]
|
| 99 |
+
|
| 100 |
+
if headers is not None:
|
| 101 |
+
self._headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
| 102 |
+
else:
|
| 103 |
+
self._headers = CIMultiDict()
|
| 104 |
+
|
| 105 |
+
self.set_status(status, reason)
|
| 106 |
+
|
| 107 |
+
@property
|
| 108 |
+
def prepared(self) -> bool:
|
| 109 |
+
return self._payload_writer is not None
|
| 110 |
+
|
| 111 |
+
@property
|
| 112 |
+
def task(self) -> "Optional[asyncio.Task[None]]":
|
| 113 |
+
if self._req:
|
| 114 |
+
return self._req.task
|
| 115 |
+
else:
|
| 116 |
+
return None
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
def status(self) -> int:
|
| 120 |
+
return self._status
|
| 121 |
+
|
| 122 |
+
@property
|
| 123 |
+
def chunked(self) -> bool:
|
| 124 |
+
return self._chunked
|
| 125 |
+
|
| 126 |
+
@property
|
| 127 |
+
def compression(self) -> bool:
|
| 128 |
+
return self._compression
|
| 129 |
+
|
| 130 |
+
@property
|
| 131 |
+
def reason(self) -> str:
|
| 132 |
+
return self._reason
|
| 133 |
+
|
| 134 |
+
def set_status(
|
| 135 |
+
self,
|
| 136 |
+
status: int,
|
| 137 |
+
reason: Optional[str] = None,
|
| 138 |
+
_RESPONSES: Mapping[int, Tuple[str, str]] = RESPONSES,
|
| 139 |
+
) -> None:
|
| 140 |
+
assert not self.prepared, (
|
| 141 |
+
"Cannot change the response status code after " "the headers have been sent"
|
| 142 |
+
)
|
| 143 |
+
self._status = int(status)
|
| 144 |
+
if reason is None:
|
| 145 |
+
try:
|
| 146 |
+
reason = _RESPONSES[self._status][0]
|
| 147 |
+
except Exception:
|
| 148 |
+
reason = ""
|
| 149 |
+
self._reason = reason
|
| 150 |
+
|
| 151 |
+
@property
|
| 152 |
+
def keep_alive(self) -> Optional[bool]:
|
| 153 |
+
return self._keep_alive
|
| 154 |
+
|
| 155 |
+
def force_close(self) -> None:
|
| 156 |
+
self._keep_alive = False
|
| 157 |
+
|
| 158 |
+
@property
|
| 159 |
+
def body_length(self) -> int:
|
| 160 |
+
return self._body_length
|
| 161 |
+
|
| 162 |
+
@property
|
| 163 |
+
def output_length(self) -> int:
|
| 164 |
+
warnings.warn("output_length is deprecated", DeprecationWarning)
|
| 165 |
+
assert self._payload_writer
|
| 166 |
+
return self._payload_writer.buffer_size
|
| 167 |
+
|
| 168 |
+
def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None:
|
| 169 |
+
"""Enables automatic chunked transfer encoding."""
|
| 170 |
+
self._chunked = True
|
| 171 |
+
|
| 172 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 173 |
+
raise RuntimeError(
|
| 174 |
+
"You can't enable chunked encoding when " "a content length is set"
|
| 175 |
+
)
|
| 176 |
+
if chunk_size is not None:
|
| 177 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
| 178 |
+
|
| 179 |
+
def enable_compression(
|
| 180 |
+
self, force: Optional[Union[bool, ContentCoding]] = None
|
| 181 |
+
) -> None:
|
| 182 |
+
"""Enables response compression encoding."""
|
| 183 |
+
# Backwards compatibility for when force was a bool <0.17.
|
| 184 |
+
if type(force) == bool:
|
| 185 |
+
force = ContentCoding.deflate if force else ContentCoding.identity
|
| 186 |
+
warnings.warn(
|
| 187 |
+
"Using boolean for force is deprecated #3318", DeprecationWarning
|
| 188 |
+
)
|
| 189 |
+
elif force is not None:
|
| 190 |
+
assert isinstance(force, ContentCoding), (
|
| 191 |
+
"force should one of " "None, bool or " "ContentEncoding"
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
self._compression = True
|
| 195 |
+
self._compression_force = force
|
| 196 |
+
|
| 197 |
+
@property
|
| 198 |
+
def headers(self) -> "CIMultiDict[str]":
|
| 199 |
+
return self._headers
|
| 200 |
+
|
| 201 |
+
@property
|
| 202 |
+
def cookies(self) -> "SimpleCookie[str]":
|
| 203 |
+
return self._cookies
|
| 204 |
+
|
| 205 |
+
def set_cookie(
|
| 206 |
+
self,
|
| 207 |
+
name: str,
|
| 208 |
+
value: str,
|
| 209 |
+
*,
|
| 210 |
+
expires: Optional[str] = None,
|
| 211 |
+
domain: Optional[str] = None,
|
| 212 |
+
max_age: Optional[Union[int, str]] = None,
|
| 213 |
+
path: str = "/",
|
| 214 |
+
secure: Optional[bool] = None,
|
| 215 |
+
httponly: Optional[bool] = None,
|
| 216 |
+
version: Optional[str] = None,
|
| 217 |
+
samesite: Optional[str] = None,
|
| 218 |
+
) -> None:
|
| 219 |
+
"""Set or update response cookie.
|
| 220 |
+
|
| 221 |
+
Sets new cookie or updates existent with new value.
|
| 222 |
+
Also updates only those params which are not None.
|
| 223 |
+
"""
|
| 224 |
+
old = self._cookies.get(name)
|
| 225 |
+
if old is not None and old.coded_value == "":
|
| 226 |
+
# deleted cookie
|
| 227 |
+
self._cookies.pop(name, None)
|
| 228 |
+
|
| 229 |
+
self._cookies[name] = value
|
| 230 |
+
c = self._cookies[name]
|
| 231 |
+
|
| 232 |
+
if expires is not None:
|
| 233 |
+
c["expires"] = expires
|
| 234 |
+
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
|
| 235 |
+
del c["expires"]
|
| 236 |
+
|
| 237 |
+
if domain is not None:
|
| 238 |
+
c["domain"] = domain
|
| 239 |
+
|
| 240 |
+
if max_age is not None:
|
| 241 |
+
c["max-age"] = str(max_age)
|
| 242 |
+
elif "max-age" in c:
|
| 243 |
+
del c["max-age"]
|
| 244 |
+
|
| 245 |
+
c["path"] = path
|
| 246 |
+
|
| 247 |
+
if secure is not None:
|
| 248 |
+
c["secure"] = secure
|
| 249 |
+
if httponly is not None:
|
| 250 |
+
c["httponly"] = httponly
|
| 251 |
+
if version is not None:
|
| 252 |
+
c["version"] = version
|
| 253 |
+
if samesite is not None:
|
| 254 |
+
c["samesite"] = samesite
|
| 255 |
+
|
| 256 |
+
def del_cookie(
|
| 257 |
+
self, name: str, *, domain: Optional[str] = None, path: str = "/"
|
| 258 |
+
) -> None:
|
| 259 |
+
"""Delete cookie.
|
| 260 |
+
|
| 261 |
+
Creates new empty expired cookie.
|
| 262 |
+
"""
|
| 263 |
+
# TODO: do we need domain/path here?
|
| 264 |
+
self._cookies.pop(name, None)
|
| 265 |
+
self.set_cookie(
|
| 266 |
+
name,
|
| 267 |
+
"",
|
| 268 |
+
max_age=0,
|
| 269 |
+
expires="Thu, 01 Jan 1970 00:00:00 GMT",
|
| 270 |
+
domain=domain,
|
| 271 |
+
path=path,
|
| 272 |
+
)
|
| 273 |
+
|
| 274 |
+
@property
|
| 275 |
+
def content_length(self) -> Optional[int]:
|
| 276 |
+
# Just a placeholder for adding setter
|
| 277 |
+
return super().content_length
|
| 278 |
+
|
| 279 |
+
@content_length.setter
|
| 280 |
+
def content_length(self, value: Optional[int]) -> None:
|
| 281 |
+
if value is not None:
|
| 282 |
+
value = int(value)
|
| 283 |
+
if self._chunked:
|
| 284 |
+
raise RuntimeError(
|
| 285 |
+
"You can't set content length when " "chunked encoding is enable"
|
| 286 |
+
)
|
| 287 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(value)
|
| 288 |
+
else:
|
| 289 |
+
self._headers.pop(hdrs.CONTENT_LENGTH, None)
|
| 290 |
+
|
| 291 |
+
@property
|
| 292 |
+
def content_type(self) -> str:
|
| 293 |
+
# Just a placeholder for adding setter
|
| 294 |
+
return super().content_type
|
| 295 |
+
|
| 296 |
+
@content_type.setter
|
| 297 |
+
def content_type(self, value: str) -> None:
|
| 298 |
+
self.content_type # read header values if needed
|
| 299 |
+
self._content_type = str(value)
|
| 300 |
+
self._generate_content_type_header()
|
| 301 |
+
|
| 302 |
+
@property
|
| 303 |
+
def charset(self) -> Optional[str]:
|
| 304 |
+
# Just a placeholder for adding setter
|
| 305 |
+
return super().charset
|
| 306 |
+
|
| 307 |
+
@charset.setter
|
| 308 |
+
def charset(self, value: Optional[str]) -> None:
|
| 309 |
+
ctype = self.content_type # read header values if needed
|
| 310 |
+
if ctype == "application/octet-stream":
|
| 311 |
+
raise RuntimeError(
|
| 312 |
+
"Setting charset for application/octet-stream "
|
| 313 |
+
"doesn't make sense, setup content_type first"
|
| 314 |
+
)
|
| 315 |
+
assert self._content_dict is not None
|
| 316 |
+
if value is None:
|
| 317 |
+
self._content_dict.pop("charset", None)
|
| 318 |
+
else:
|
| 319 |
+
self._content_dict["charset"] = str(value).lower()
|
| 320 |
+
self._generate_content_type_header()
|
| 321 |
+
|
| 322 |
+
@property
|
| 323 |
+
def last_modified(self) -> Optional[datetime.datetime]:
|
| 324 |
+
"""The value of Last-Modified HTTP header, or None.
|
| 325 |
+
|
| 326 |
+
This header is represented as a `datetime` object.
|
| 327 |
+
"""
|
| 328 |
+
return parse_http_date(self._headers.get(hdrs.LAST_MODIFIED))
|
| 329 |
+
|
| 330 |
+
@last_modified.setter
|
| 331 |
+
def last_modified(
|
| 332 |
+
self, value: Optional[Union[int, float, datetime.datetime, str]]
|
| 333 |
+
) -> None:
|
| 334 |
+
if value is None:
|
| 335 |
+
self._headers.pop(hdrs.LAST_MODIFIED, None)
|
| 336 |
+
elif isinstance(value, (int, float)):
|
| 337 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
| 338 |
+
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(math.ceil(value))
|
| 339 |
+
)
|
| 340 |
+
elif isinstance(value, datetime.datetime):
|
| 341 |
+
self._headers[hdrs.LAST_MODIFIED] = time.strftime(
|
| 342 |
+
"%a, %d %b %Y %H:%M:%S GMT", value.utctimetuple()
|
| 343 |
+
)
|
| 344 |
+
elif isinstance(value, str):
|
| 345 |
+
self._headers[hdrs.LAST_MODIFIED] = value
|
| 346 |
+
|
| 347 |
+
@property
|
| 348 |
+
def etag(self) -> Optional[ETag]:
|
| 349 |
+
quoted_value = self._headers.get(hdrs.ETAG)
|
| 350 |
+
if not quoted_value:
|
| 351 |
+
return None
|
| 352 |
+
elif quoted_value == ETAG_ANY:
|
| 353 |
+
return ETag(value=ETAG_ANY)
|
| 354 |
+
match = QUOTED_ETAG_RE.fullmatch(quoted_value)
|
| 355 |
+
if not match:
|
| 356 |
+
return None
|
| 357 |
+
is_weak, value = match.group(1, 2)
|
| 358 |
+
return ETag(
|
| 359 |
+
is_weak=bool(is_weak),
|
| 360 |
+
value=value,
|
| 361 |
+
)
|
| 362 |
+
|
| 363 |
+
@etag.setter
|
| 364 |
+
def etag(self, value: Optional[Union[ETag, str]]) -> None:
|
| 365 |
+
if value is None:
|
| 366 |
+
self._headers.pop(hdrs.ETAG, None)
|
| 367 |
+
elif (isinstance(value, str) and value == ETAG_ANY) or (
|
| 368 |
+
isinstance(value, ETag) and value.value == ETAG_ANY
|
| 369 |
+
):
|
| 370 |
+
self._headers[hdrs.ETAG] = ETAG_ANY
|
| 371 |
+
elif isinstance(value, str):
|
| 372 |
+
validate_etag_value(value)
|
| 373 |
+
self._headers[hdrs.ETAG] = f'"{value}"'
|
| 374 |
+
elif isinstance(value, ETag) and isinstance(value.value, str):
|
| 375 |
+
validate_etag_value(value.value)
|
| 376 |
+
hdr_value = f'W/"{value.value}"' if value.is_weak else f'"{value.value}"'
|
| 377 |
+
self._headers[hdrs.ETAG] = hdr_value
|
| 378 |
+
else:
|
| 379 |
+
raise ValueError(
|
| 380 |
+
f"Unsupported etag type: {type(value)}. "
|
| 381 |
+
f"etag must be str, ETag or None"
|
| 382 |
+
)
|
| 383 |
+
|
| 384 |
+
def _generate_content_type_header(
|
| 385 |
+
self, CONTENT_TYPE: istr = hdrs.CONTENT_TYPE
|
| 386 |
+
) -> None:
|
| 387 |
+
assert self._content_dict is not None
|
| 388 |
+
assert self._content_type is not None
|
| 389 |
+
params = "; ".join(f"{k}={v}" for k, v in self._content_dict.items())
|
| 390 |
+
if params:
|
| 391 |
+
ctype = self._content_type + "; " + params
|
| 392 |
+
else:
|
| 393 |
+
ctype = self._content_type
|
| 394 |
+
self._headers[CONTENT_TYPE] = ctype
|
| 395 |
+
|
| 396 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
| 397 |
+
if coding != ContentCoding.identity:
|
| 398 |
+
assert self._payload_writer is not None
|
| 399 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
| 400 |
+
self._payload_writer.enable_compression(coding.value)
|
| 401 |
+
# Compressed payload may have different content length,
|
| 402 |
+
# remove the header
|
| 403 |
+
self._headers.popall(hdrs.CONTENT_LENGTH, None)
|
| 404 |
+
|
| 405 |
+
async def _start_compression(self, request: "BaseRequest") -> None:
|
| 406 |
+
if self._compression_force:
|
| 407 |
+
await self._do_start_compression(self._compression_force)
|
| 408 |
+
else:
|
| 409 |
+
accept_encoding = request.headers.get(hdrs.ACCEPT_ENCODING, "").lower()
|
| 410 |
+
for coding in ContentCoding:
|
| 411 |
+
if coding.value in accept_encoding:
|
| 412 |
+
await self._do_start_compression(coding)
|
| 413 |
+
return
|
| 414 |
+
|
| 415 |
+
async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
|
| 416 |
+
if self._eof_sent:
|
| 417 |
+
return None
|
| 418 |
+
if self._payload_writer is not None:
|
| 419 |
+
return self._payload_writer
|
| 420 |
+
|
| 421 |
+
return await self._start(request)
|
| 422 |
+
|
| 423 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
| 424 |
+
self._req = request
|
| 425 |
+
writer = self._payload_writer = request._payload_writer
|
| 426 |
+
|
| 427 |
+
await self._prepare_headers()
|
| 428 |
+
await request._prepare_hook(self)
|
| 429 |
+
await self._write_headers()
|
| 430 |
+
|
| 431 |
+
return writer
|
| 432 |
+
|
| 433 |
+
async def _prepare_headers(self) -> None:
|
| 434 |
+
request = self._req
|
| 435 |
+
assert request is not None
|
| 436 |
+
writer = self._payload_writer
|
| 437 |
+
assert writer is not None
|
| 438 |
+
keep_alive = self._keep_alive
|
| 439 |
+
if keep_alive is None:
|
| 440 |
+
keep_alive = request.keep_alive
|
| 441 |
+
self._keep_alive = keep_alive
|
| 442 |
+
|
| 443 |
+
version = request.version
|
| 444 |
+
|
| 445 |
+
headers = self._headers
|
| 446 |
+
for cookie in self._cookies.values():
|
| 447 |
+
value = cookie.output(header="")[1:]
|
| 448 |
+
headers.add(hdrs.SET_COOKIE, value)
|
| 449 |
+
|
| 450 |
+
if self._compression:
|
| 451 |
+
await self._start_compression(request)
|
| 452 |
+
|
| 453 |
+
if self._chunked:
|
| 454 |
+
if version != HttpVersion11:
|
| 455 |
+
raise RuntimeError(
|
| 456 |
+
"Using chunked encoding is forbidden "
|
| 457 |
+
"for HTTP/{0.major}.{0.minor}".format(request.version)
|
| 458 |
+
)
|
| 459 |
+
writer.enable_chunking()
|
| 460 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 461 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 462 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 463 |
+
elif self._length_check:
|
| 464 |
+
writer.length = self.content_length
|
| 465 |
+
if writer.length is None:
|
| 466 |
+
if version >= HttpVersion11 and self.status != 204:
|
| 467 |
+
writer.enable_chunking()
|
| 468 |
+
headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
| 469 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 470 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 471 |
+
else:
|
| 472 |
+
keep_alive = False
|
| 473 |
+
# HTTP 1.1: https://tools.ietf.org/html/rfc7230#section-3.3.2
|
| 474 |
+
# HTTP 1.0: https://tools.ietf.org/html/rfc1945#section-10.4
|
| 475 |
+
elif version >= HttpVersion11 and self.status in (100, 101, 102, 103, 204):
|
| 476 |
+
del headers[hdrs.CONTENT_LENGTH]
|
| 477 |
+
|
| 478 |
+
if self.status not in (204, 304):
|
| 479 |
+
headers.setdefault(hdrs.CONTENT_TYPE, "application/octet-stream")
|
| 480 |
+
headers.setdefault(hdrs.DATE, rfc822_formatted_time())
|
| 481 |
+
headers.setdefault(hdrs.SERVER, SERVER_SOFTWARE)
|
| 482 |
+
|
| 483 |
+
# connection header
|
| 484 |
+
if hdrs.CONNECTION not in headers:
|
| 485 |
+
if keep_alive:
|
| 486 |
+
if version == HttpVersion10:
|
| 487 |
+
headers[hdrs.CONNECTION] = "keep-alive"
|
| 488 |
+
else:
|
| 489 |
+
if version == HttpVersion11:
|
| 490 |
+
headers[hdrs.CONNECTION] = "close"
|
| 491 |
+
|
| 492 |
+
async def _write_headers(self) -> None:
|
| 493 |
+
request = self._req
|
| 494 |
+
assert request is not None
|
| 495 |
+
writer = self._payload_writer
|
| 496 |
+
assert writer is not None
|
| 497 |
+
# status line
|
| 498 |
+
version = request.version
|
| 499 |
+
status_line = "HTTP/{}.{} {} {}".format(
|
| 500 |
+
version[0], version[1], self._status, self._reason
|
| 501 |
+
)
|
| 502 |
+
await writer.write_headers(status_line, self._headers)
|
| 503 |
+
|
| 504 |
+
async def write(self, data: bytes) -> None:
|
| 505 |
+
assert isinstance(
|
| 506 |
+
data, (bytes, bytearray, memoryview)
|
| 507 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
| 508 |
+
|
| 509 |
+
if self._eof_sent:
|
| 510 |
+
raise RuntimeError("Cannot call write() after write_eof()")
|
| 511 |
+
if self._payload_writer is None:
|
| 512 |
+
raise RuntimeError("Cannot call write() before prepare()")
|
| 513 |
+
|
| 514 |
+
await self._payload_writer.write(data)
|
| 515 |
+
|
| 516 |
+
async def drain(self) -> None:
|
| 517 |
+
assert not self._eof_sent, "EOF has already been sent"
|
| 518 |
+
assert self._payload_writer is not None, "Response has not been started"
|
| 519 |
+
warnings.warn(
|
| 520 |
+
"drain method is deprecated, use await resp.write()",
|
| 521 |
+
DeprecationWarning,
|
| 522 |
+
stacklevel=2,
|
| 523 |
+
)
|
| 524 |
+
await self._payload_writer.drain()
|
| 525 |
+
|
| 526 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
| 527 |
+
assert isinstance(
|
| 528 |
+
data, (bytes, bytearray, memoryview)
|
| 529 |
+
), "data argument must be byte-ish (%r)" % type(data)
|
| 530 |
+
|
| 531 |
+
if self._eof_sent:
|
| 532 |
+
return
|
| 533 |
+
|
| 534 |
+
assert self._payload_writer is not None, "Response has not been started"
|
| 535 |
+
|
| 536 |
+
await self._payload_writer.write_eof(data)
|
| 537 |
+
self._eof_sent = True
|
| 538 |
+
self._req = None
|
| 539 |
+
self._body_length = self._payload_writer.output_size
|
| 540 |
+
self._payload_writer = None
|
| 541 |
+
|
| 542 |
+
def __repr__(self) -> str:
|
| 543 |
+
if self._eof_sent:
|
| 544 |
+
info = "eof"
|
| 545 |
+
elif self.prepared:
|
| 546 |
+
assert self._req is not None
|
| 547 |
+
info = f"{self._req.method} {self._req.path} "
|
| 548 |
+
else:
|
| 549 |
+
info = "not prepared"
|
| 550 |
+
return f"<{self.__class__.__name__} {self.reason} {info}>"
|
| 551 |
+
|
| 552 |
+
def __getitem__(self, key: str) -> Any:
|
| 553 |
+
return self._state[key]
|
| 554 |
+
|
| 555 |
+
def __setitem__(self, key: str, value: Any) -> None:
|
| 556 |
+
self._state[key] = value
|
| 557 |
+
|
| 558 |
+
def __delitem__(self, key: str) -> None:
|
| 559 |
+
del self._state[key]
|
| 560 |
+
|
| 561 |
+
def __len__(self) -> int:
|
| 562 |
+
return len(self._state)
|
| 563 |
+
|
| 564 |
+
def __iter__(self) -> Iterator[str]:
|
| 565 |
+
return iter(self._state)
|
| 566 |
+
|
| 567 |
+
def __hash__(self) -> int:
|
| 568 |
+
return hash(id(self))
|
| 569 |
+
|
| 570 |
+
def __eq__(self, other: object) -> bool:
|
| 571 |
+
return self is other
|
| 572 |
+
|
| 573 |
+
|
| 574 |
+
class Response(StreamResponse):
|
| 575 |
+
def __init__(
|
| 576 |
+
self,
|
| 577 |
+
*,
|
| 578 |
+
body: Any = None,
|
| 579 |
+
status: int = 200,
|
| 580 |
+
reason: Optional[str] = None,
|
| 581 |
+
text: Optional[str] = None,
|
| 582 |
+
headers: Optional[LooseHeaders] = None,
|
| 583 |
+
content_type: Optional[str] = None,
|
| 584 |
+
charset: Optional[str] = None,
|
| 585 |
+
zlib_executor_size: Optional[int] = None,
|
| 586 |
+
zlib_executor: Optional[Executor] = None,
|
| 587 |
+
) -> None:
|
| 588 |
+
if body is not None and text is not None:
|
| 589 |
+
raise ValueError("body and text are not allowed together")
|
| 590 |
+
|
| 591 |
+
if headers is None:
|
| 592 |
+
real_headers = CIMultiDict() # type: CIMultiDict[str]
|
| 593 |
+
elif not isinstance(headers, CIMultiDict):
|
| 594 |
+
real_headers = CIMultiDict(headers)
|
| 595 |
+
else:
|
| 596 |
+
real_headers = headers # = cast('CIMultiDict[str]', headers)
|
| 597 |
+
|
| 598 |
+
if content_type is not None and "charset" in content_type:
|
| 599 |
+
raise ValueError("charset must not be in content_type " "argument")
|
| 600 |
+
|
| 601 |
+
if text is not None:
|
| 602 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
| 603 |
+
if content_type or charset:
|
| 604 |
+
raise ValueError(
|
| 605 |
+
"passing both Content-Type header and "
|
| 606 |
+
"content_type or charset params "
|
| 607 |
+
"is forbidden"
|
| 608 |
+
)
|
| 609 |
+
else:
|
| 610 |
+
# fast path for filling headers
|
| 611 |
+
if not isinstance(text, str):
|
| 612 |
+
raise TypeError("text argument must be str (%r)" % type(text))
|
| 613 |
+
if content_type is None:
|
| 614 |
+
content_type = "text/plain"
|
| 615 |
+
if charset is None:
|
| 616 |
+
charset = "utf-8"
|
| 617 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type + "; charset=" + charset
|
| 618 |
+
body = text.encode(charset)
|
| 619 |
+
text = None
|
| 620 |
+
else:
|
| 621 |
+
if hdrs.CONTENT_TYPE in real_headers:
|
| 622 |
+
if content_type is not None or charset is not None:
|
| 623 |
+
raise ValueError(
|
| 624 |
+
"passing both Content-Type header and "
|
| 625 |
+
"content_type or charset params "
|
| 626 |
+
"is forbidden"
|
| 627 |
+
)
|
| 628 |
+
else:
|
| 629 |
+
if content_type is not None:
|
| 630 |
+
if charset is not None:
|
| 631 |
+
content_type += "; charset=" + charset
|
| 632 |
+
real_headers[hdrs.CONTENT_TYPE] = content_type
|
| 633 |
+
|
| 634 |
+
super().__init__(status=status, reason=reason, headers=real_headers)
|
| 635 |
+
|
| 636 |
+
if text is not None:
|
| 637 |
+
self.text = text
|
| 638 |
+
else:
|
| 639 |
+
self.body = body
|
| 640 |
+
|
| 641 |
+
self._compressed_body = None # type: Optional[bytes]
|
| 642 |
+
self._zlib_executor_size = zlib_executor_size
|
| 643 |
+
self._zlib_executor = zlib_executor
|
| 644 |
+
|
| 645 |
+
@property
|
| 646 |
+
def body(self) -> Optional[Union[bytes, Payload]]:
|
| 647 |
+
return self._body
|
| 648 |
+
|
| 649 |
+
@body.setter
|
| 650 |
+
def body(
|
| 651 |
+
self,
|
| 652 |
+
body: bytes,
|
| 653 |
+
CONTENT_TYPE: istr = hdrs.CONTENT_TYPE,
|
| 654 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
| 655 |
+
) -> None:
|
| 656 |
+
if body is None:
|
| 657 |
+
self._body = None # type: Optional[bytes]
|
| 658 |
+
self._body_payload = False # type: bool
|
| 659 |
+
elif isinstance(body, (bytes, bytearray)):
|
| 660 |
+
self._body = body
|
| 661 |
+
self._body_payload = False
|
| 662 |
+
else:
|
| 663 |
+
try:
|
| 664 |
+
self._body = body = payload.PAYLOAD_REGISTRY.get(body)
|
| 665 |
+
except payload.LookupError:
|
| 666 |
+
raise ValueError("Unsupported body type %r" % type(body))
|
| 667 |
+
|
| 668 |
+
self._body_payload = True
|
| 669 |
+
|
| 670 |
+
headers = self._headers
|
| 671 |
+
|
| 672 |
+
# set content-length header if needed
|
| 673 |
+
if not self._chunked and CONTENT_LENGTH not in headers:
|
| 674 |
+
size = body.size
|
| 675 |
+
if size is not None:
|
| 676 |
+
headers[CONTENT_LENGTH] = str(size)
|
| 677 |
+
|
| 678 |
+
# set content-type
|
| 679 |
+
if CONTENT_TYPE not in headers:
|
| 680 |
+
headers[CONTENT_TYPE] = body.content_type
|
| 681 |
+
|
| 682 |
+
# copy payload headers
|
| 683 |
+
if body.headers:
|
| 684 |
+
for (key, value) in body.headers.items():
|
| 685 |
+
if key not in headers:
|
| 686 |
+
headers[key] = value
|
| 687 |
+
|
| 688 |
+
self._compressed_body = None
|
| 689 |
+
|
| 690 |
+
@property
|
| 691 |
+
def text(self) -> Optional[str]:
|
| 692 |
+
if self._body is None:
|
| 693 |
+
return None
|
| 694 |
+
return self._body.decode(self.charset or "utf-8")
|
| 695 |
+
|
| 696 |
+
@text.setter
|
| 697 |
+
def text(self, text: str) -> None:
|
| 698 |
+
assert text is None or isinstance(
|
| 699 |
+
text, str
|
| 700 |
+
), "text argument must be str (%r)" % type(text)
|
| 701 |
+
|
| 702 |
+
if self.content_type == "application/octet-stream":
|
| 703 |
+
self.content_type = "text/plain"
|
| 704 |
+
if self.charset is None:
|
| 705 |
+
self.charset = "utf-8"
|
| 706 |
+
|
| 707 |
+
self._body = text.encode(self.charset)
|
| 708 |
+
self._body_payload = False
|
| 709 |
+
self._compressed_body = None
|
| 710 |
+
|
| 711 |
+
@property
|
| 712 |
+
def content_length(self) -> Optional[int]:
|
| 713 |
+
if self._chunked:
|
| 714 |
+
return None
|
| 715 |
+
|
| 716 |
+
if hdrs.CONTENT_LENGTH in self._headers:
|
| 717 |
+
return super().content_length
|
| 718 |
+
|
| 719 |
+
if self._compressed_body is not None:
|
| 720 |
+
# Return length of the compressed body
|
| 721 |
+
return len(self._compressed_body)
|
| 722 |
+
elif self._body_payload:
|
| 723 |
+
# A payload without content length, or a compressed payload
|
| 724 |
+
return None
|
| 725 |
+
elif self._body is not None:
|
| 726 |
+
return len(self._body)
|
| 727 |
+
else:
|
| 728 |
+
return 0
|
| 729 |
+
|
| 730 |
+
@content_length.setter
|
| 731 |
+
def content_length(self, value: Optional[int]) -> None:
|
| 732 |
+
raise RuntimeError("Content length is set automatically")
|
| 733 |
+
|
| 734 |
+
async def write_eof(self, data: bytes = b"") -> None:
|
| 735 |
+
if self._eof_sent:
|
| 736 |
+
return
|
| 737 |
+
if self._compressed_body is None:
|
| 738 |
+
body = self._body # type: Optional[Union[bytes, Payload]]
|
| 739 |
+
else:
|
| 740 |
+
body = self._compressed_body
|
| 741 |
+
assert not data, f"data arg is not supported, got {data!r}"
|
| 742 |
+
assert self._req is not None
|
| 743 |
+
assert self._payload_writer is not None
|
| 744 |
+
if body is not None:
|
| 745 |
+
if self._req._method == hdrs.METH_HEAD or self._status in [204, 304]:
|
| 746 |
+
await super().write_eof()
|
| 747 |
+
elif self._body_payload:
|
| 748 |
+
payload = cast(Payload, body)
|
| 749 |
+
await payload.write(self._payload_writer)
|
| 750 |
+
await super().write_eof()
|
| 751 |
+
else:
|
| 752 |
+
await super().write_eof(cast(bytes, body))
|
| 753 |
+
else:
|
| 754 |
+
await super().write_eof()
|
| 755 |
+
|
| 756 |
+
async def _start(self, request: "BaseRequest") -> AbstractStreamWriter:
|
| 757 |
+
if not self._chunked and hdrs.CONTENT_LENGTH not in self._headers:
|
| 758 |
+
if not self._body_payload:
|
| 759 |
+
if self._body is not None:
|
| 760 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(len(self._body))
|
| 761 |
+
else:
|
| 762 |
+
self._headers[hdrs.CONTENT_LENGTH] = "0"
|
| 763 |
+
|
| 764 |
+
return await super()._start(request)
|
| 765 |
+
|
| 766 |
+
def _compress_body(self, zlib_mode: int) -> None:
|
| 767 |
+
assert zlib_mode > 0
|
| 768 |
+
compressobj = zlib.compressobj(wbits=zlib_mode)
|
| 769 |
+
body_in = self._body
|
| 770 |
+
assert body_in is not None
|
| 771 |
+
self._compressed_body = compressobj.compress(body_in) + compressobj.flush()
|
| 772 |
+
|
| 773 |
+
async def _do_start_compression(self, coding: ContentCoding) -> None:
|
| 774 |
+
if self._body_payload or self._chunked:
|
| 775 |
+
return await super()._do_start_compression(coding)
|
| 776 |
+
|
| 777 |
+
if coding != ContentCoding.identity:
|
| 778 |
+
# Instead of using _payload_writer.enable_compression,
|
| 779 |
+
# compress the whole body
|
| 780 |
+
zlib_mode = (
|
| 781 |
+
16 + zlib.MAX_WBITS if coding == ContentCoding.gzip else zlib.MAX_WBITS
|
| 782 |
+
)
|
| 783 |
+
body_in = self._body
|
| 784 |
+
assert body_in is not None
|
| 785 |
+
if (
|
| 786 |
+
self._zlib_executor_size is not None
|
| 787 |
+
and len(body_in) > self._zlib_executor_size
|
| 788 |
+
):
|
| 789 |
+
await asyncio.get_event_loop().run_in_executor(
|
| 790 |
+
self._zlib_executor, self._compress_body, zlib_mode
|
| 791 |
+
)
|
| 792 |
+
else:
|
| 793 |
+
self._compress_body(zlib_mode)
|
| 794 |
+
|
| 795 |
+
body_out = self._compressed_body
|
| 796 |
+
assert body_out is not None
|
| 797 |
+
|
| 798 |
+
self._headers[hdrs.CONTENT_ENCODING] = coding.value
|
| 799 |
+
self._headers[hdrs.CONTENT_LENGTH] = str(len(body_out))
|
| 800 |
+
|
| 801 |
+
|
| 802 |
+
def json_response(
|
| 803 |
+
data: Any = sentinel,
|
| 804 |
+
*,
|
| 805 |
+
text: Optional[str] = None,
|
| 806 |
+
body: Optional[bytes] = None,
|
| 807 |
+
status: int = 200,
|
| 808 |
+
reason: Optional[str] = None,
|
| 809 |
+
headers: Optional[LooseHeaders] = None,
|
| 810 |
+
content_type: str = "application/json",
|
| 811 |
+
dumps: JSONEncoder = json.dumps,
|
| 812 |
+
) -> Response:
|
| 813 |
+
if data is not sentinel:
|
| 814 |
+
if text or body:
|
| 815 |
+
raise ValueError("only one of data, text, or body should be specified")
|
| 816 |
+
else:
|
| 817 |
+
text = dumps(data)
|
| 818 |
+
return Response(
|
| 819 |
+
text=text,
|
| 820 |
+
body=body,
|
| 821 |
+
status=status,
|
| 822 |
+
reason=reason,
|
| 823 |
+
headers=headers,
|
| 824 |
+
content_type=content_type,
|
| 825 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_urldispatcher.py
ADDED
|
@@ -0,0 +1,1220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import base64
|
| 4 |
+
import hashlib
|
| 5 |
+
import inspect
|
| 6 |
+
import keyword
|
| 7 |
+
import os
|
| 8 |
+
import re
|
| 9 |
+
import warnings
|
| 10 |
+
from contextlib import contextmanager
|
| 11 |
+
from functools import wraps
|
| 12 |
+
from pathlib import Path
|
| 13 |
+
from types import MappingProxyType
|
| 14 |
+
from typing import (
|
| 15 |
+
TYPE_CHECKING,
|
| 16 |
+
Any,
|
| 17 |
+
Awaitable,
|
| 18 |
+
Callable,
|
| 19 |
+
Container,
|
| 20 |
+
Dict,
|
| 21 |
+
Generator,
|
| 22 |
+
Iterable,
|
| 23 |
+
Iterator,
|
| 24 |
+
List,
|
| 25 |
+
Mapping,
|
| 26 |
+
Optional,
|
| 27 |
+
Pattern,
|
| 28 |
+
Set,
|
| 29 |
+
Sized,
|
| 30 |
+
Tuple,
|
| 31 |
+
Type,
|
| 32 |
+
Union,
|
| 33 |
+
cast,
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
from yarl import URL, __version__ as yarl_version # type: ignore[attr-defined]
|
| 37 |
+
|
| 38 |
+
from . import hdrs
|
| 39 |
+
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
|
| 40 |
+
from .helpers import DEBUG
|
| 41 |
+
from .http import HttpVersion11
|
| 42 |
+
from .typedefs import Final, Handler, PathLike, TypedDict
|
| 43 |
+
from .web_exceptions import (
|
| 44 |
+
HTTPException,
|
| 45 |
+
HTTPExpectationFailed,
|
| 46 |
+
HTTPForbidden,
|
| 47 |
+
HTTPMethodNotAllowed,
|
| 48 |
+
HTTPNotFound,
|
| 49 |
+
)
|
| 50 |
+
from .web_fileresponse import FileResponse
|
| 51 |
+
from .web_request import Request
|
| 52 |
+
from .web_response import Response, StreamResponse
|
| 53 |
+
from .web_routedef import AbstractRouteDef
|
| 54 |
+
|
| 55 |
+
__all__ = (
|
| 56 |
+
"UrlDispatcher",
|
| 57 |
+
"UrlMappingMatchInfo",
|
| 58 |
+
"AbstractResource",
|
| 59 |
+
"Resource",
|
| 60 |
+
"PlainResource",
|
| 61 |
+
"DynamicResource",
|
| 62 |
+
"AbstractRoute",
|
| 63 |
+
"ResourceRoute",
|
| 64 |
+
"StaticResource",
|
| 65 |
+
"View",
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 70 |
+
from .web_app import Application
|
| 71 |
+
|
| 72 |
+
BaseDict = Dict[str, str]
|
| 73 |
+
else:
|
| 74 |
+
BaseDict = dict
|
| 75 |
+
|
| 76 |
+
YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2]))
|
| 77 |
+
|
| 78 |
+
HTTP_METHOD_RE: Final[Pattern[str]] = re.compile(
|
| 79 |
+
r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$"
|
| 80 |
+
)
|
| 81 |
+
ROUTE_RE: Final[Pattern[str]] = re.compile(
|
| 82 |
+
r"(\{[_a-zA-Z][^{}]*(?:\{[^{}]*\}[^{}]*)*\})"
|
| 83 |
+
)
|
| 84 |
+
PATH_SEP: Final[str] = re.escape("/")
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
_ExpectHandler = Callable[[Request], Awaitable[None]]
|
| 88 |
+
_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]]
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
class _InfoDict(TypedDict, total=False):
|
| 92 |
+
path: str
|
| 93 |
+
|
| 94 |
+
formatter: str
|
| 95 |
+
pattern: Pattern[str]
|
| 96 |
+
|
| 97 |
+
directory: Path
|
| 98 |
+
prefix: str
|
| 99 |
+
routes: Mapping[str, "AbstractRoute"]
|
| 100 |
+
|
| 101 |
+
app: "Application"
|
| 102 |
+
|
| 103 |
+
domain: str
|
| 104 |
+
|
| 105 |
+
rule: "AbstractRuleMatching"
|
| 106 |
+
|
| 107 |
+
http_exception: HTTPException
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
class AbstractResource(Sized, Iterable["AbstractRoute"]):
|
| 111 |
+
def __init__(self, *, name: Optional[str] = None) -> None:
|
| 112 |
+
self._name = name
|
| 113 |
+
|
| 114 |
+
@property
|
| 115 |
+
def name(self) -> Optional[str]:
|
| 116 |
+
return self._name
|
| 117 |
+
|
| 118 |
+
@property
|
| 119 |
+
@abc.abstractmethod
|
| 120 |
+
def canonical(self) -> str:
|
| 121 |
+
"""Exposes the resource's canonical path.
|
| 122 |
+
|
| 123 |
+
For example '/foo/bar/{name}'
|
| 124 |
+
|
| 125 |
+
"""
|
| 126 |
+
|
| 127 |
+
@abc.abstractmethod # pragma: no branch
|
| 128 |
+
def url_for(self, **kwargs: str) -> URL:
|
| 129 |
+
"""Construct url for resource with additional params."""
|
| 130 |
+
|
| 131 |
+
@abc.abstractmethod # pragma: no branch
|
| 132 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 133 |
+
"""Resolve resource.
|
| 134 |
+
|
| 135 |
+
Return (UrlMappingMatchInfo, allowed_methods) pair.
|
| 136 |
+
"""
|
| 137 |
+
|
| 138 |
+
@abc.abstractmethod
|
| 139 |
+
def add_prefix(self, prefix: str) -> None:
|
| 140 |
+
"""Add a prefix to processed URLs.
|
| 141 |
+
|
| 142 |
+
Required for subapplications support.
|
| 143 |
+
"""
|
| 144 |
+
|
| 145 |
+
@abc.abstractmethod
|
| 146 |
+
def get_info(self) -> _InfoDict:
|
| 147 |
+
"""Return a dict with additional info useful for introspection"""
|
| 148 |
+
|
| 149 |
+
def freeze(self) -> None:
|
| 150 |
+
pass
|
| 151 |
+
|
| 152 |
+
@abc.abstractmethod
|
| 153 |
+
def raw_match(self, path: str) -> bool:
|
| 154 |
+
"""Perform a raw match against path"""
|
| 155 |
+
|
| 156 |
+
|
| 157 |
+
class AbstractRoute(abc.ABC):
|
| 158 |
+
def __init__(
|
| 159 |
+
self,
|
| 160 |
+
method: str,
|
| 161 |
+
handler: Union[Handler, Type[AbstractView]],
|
| 162 |
+
*,
|
| 163 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 164 |
+
resource: Optional[AbstractResource] = None,
|
| 165 |
+
) -> None:
|
| 166 |
+
|
| 167 |
+
if expect_handler is None:
|
| 168 |
+
expect_handler = _default_expect_handler
|
| 169 |
+
|
| 170 |
+
assert asyncio.iscoroutinefunction(
|
| 171 |
+
expect_handler
|
| 172 |
+
), f"Coroutine is expected, got {expect_handler!r}"
|
| 173 |
+
|
| 174 |
+
method = method.upper()
|
| 175 |
+
if not HTTP_METHOD_RE.match(method):
|
| 176 |
+
raise ValueError(f"{method} is not allowed HTTP method")
|
| 177 |
+
|
| 178 |
+
assert callable(handler), handler
|
| 179 |
+
if asyncio.iscoroutinefunction(handler):
|
| 180 |
+
pass
|
| 181 |
+
elif inspect.isgeneratorfunction(handler):
|
| 182 |
+
warnings.warn(
|
| 183 |
+
"Bare generators are deprecated, " "use @coroutine wrapper",
|
| 184 |
+
DeprecationWarning,
|
| 185 |
+
)
|
| 186 |
+
elif isinstance(handler, type) and issubclass(handler, AbstractView):
|
| 187 |
+
pass
|
| 188 |
+
else:
|
| 189 |
+
warnings.warn(
|
| 190 |
+
"Bare functions are deprecated, " "use async ones", DeprecationWarning
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
@wraps(handler)
|
| 194 |
+
async def handler_wrapper(request: Request) -> StreamResponse:
|
| 195 |
+
result = old_handler(request)
|
| 196 |
+
if asyncio.iscoroutine(result):
|
| 197 |
+
return await result
|
| 198 |
+
return result # type: ignore[return-value]
|
| 199 |
+
|
| 200 |
+
old_handler = handler
|
| 201 |
+
handler = handler_wrapper
|
| 202 |
+
|
| 203 |
+
self._method = method
|
| 204 |
+
self._handler = handler
|
| 205 |
+
self._expect_handler = expect_handler
|
| 206 |
+
self._resource = resource
|
| 207 |
+
|
| 208 |
+
@property
|
| 209 |
+
def method(self) -> str:
|
| 210 |
+
return self._method
|
| 211 |
+
|
| 212 |
+
@property
|
| 213 |
+
def handler(self) -> Handler:
|
| 214 |
+
return self._handler
|
| 215 |
+
|
| 216 |
+
@property
|
| 217 |
+
@abc.abstractmethod
|
| 218 |
+
def name(self) -> Optional[str]:
|
| 219 |
+
"""Optional route's name, always equals to resource's name."""
|
| 220 |
+
|
| 221 |
+
@property
|
| 222 |
+
def resource(self) -> Optional[AbstractResource]:
|
| 223 |
+
return self._resource
|
| 224 |
+
|
| 225 |
+
@abc.abstractmethod
|
| 226 |
+
def get_info(self) -> _InfoDict:
|
| 227 |
+
"""Return a dict with additional info useful for introspection"""
|
| 228 |
+
|
| 229 |
+
@abc.abstractmethod # pragma: no branch
|
| 230 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 231 |
+
"""Construct url for route with additional params."""
|
| 232 |
+
|
| 233 |
+
async def handle_expect_header(self, request: Request) -> None:
|
| 234 |
+
await self._expect_handler(request)
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo):
|
| 238 |
+
def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
|
| 239 |
+
super().__init__(match_dict)
|
| 240 |
+
self._route = route
|
| 241 |
+
self._apps = [] # type: List[Application]
|
| 242 |
+
self._current_app = None # type: Optional[Application]
|
| 243 |
+
self._frozen = False
|
| 244 |
+
|
| 245 |
+
@property
|
| 246 |
+
def handler(self) -> Handler:
|
| 247 |
+
return self._route.handler
|
| 248 |
+
|
| 249 |
+
@property
|
| 250 |
+
def route(self) -> AbstractRoute:
|
| 251 |
+
return self._route
|
| 252 |
+
|
| 253 |
+
@property
|
| 254 |
+
def expect_handler(self) -> _ExpectHandler:
|
| 255 |
+
return self._route.handle_expect_header
|
| 256 |
+
|
| 257 |
+
@property
|
| 258 |
+
def http_exception(self) -> Optional[HTTPException]:
|
| 259 |
+
return None
|
| 260 |
+
|
| 261 |
+
def get_info(self) -> _InfoDict: # type: ignore[override]
|
| 262 |
+
return self._route.get_info()
|
| 263 |
+
|
| 264 |
+
@property
|
| 265 |
+
def apps(self) -> Tuple["Application", ...]:
|
| 266 |
+
return tuple(self._apps)
|
| 267 |
+
|
| 268 |
+
def add_app(self, app: "Application") -> None:
|
| 269 |
+
if self._frozen:
|
| 270 |
+
raise RuntimeError("Cannot change apps stack after .freeze() call")
|
| 271 |
+
if self._current_app is None:
|
| 272 |
+
self._current_app = app
|
| 273 |
+
self._apps.insert(0, app)
|
| 274 |
+
|
| 275 |
+
@property
|
| 276 |
+
def current_app(self) -> "Application":
|
| 277 |
+
app = self._current_app
|
| 278 |
+
assert app is not None
|
| 279 |
+
return app
|
| 280 |
+
|
| 281 |
+
@contextmanager
|
| 282 |
+
def set_current_app(self, app: "Application") -> Generator[None, None, None]:
|
| 283 |
+
if DEBUG: # pragma: no cover
|
| 284 |
+
if app not in self._apps:
|
| 285 |
+
raise RuntimeError(
|
| 286 |
+
"Expected one of the following apps {!r}, got {!r}".format(
|
| 287 |
+
self._apps, app
|
| 288 |
+
)
|
| 289 |
+
)
|
| 290 |
+
prev = self._current_app
|
| 291 |
+
self._current_app = app
|
| 292 |
+
try:
|
| 293 |
+
yield
|
| 294 |
+
finally:
|
| 295 |
+
self._current_app = prev
|
| 296 |
+
|
| 297 |
+
def freeze(self) -> None:
|
| 298 |
+
self._frozen = True
|
| 299 |
+
|
| 300 |
+
def __repr__(self) -> str:
|
| 301 |
+
return f"<MatchInfo {super().__repr__()}: {self._route}>"
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
class MatchInfoError(UrlMappingMatchInfo):
|
| 305 |
+
def __init__(self, http_exception: HTTPException) -> None:
|
| 306 |
+
self._exception = http_exception
|
| 307 |
+
super().__init__({}, SystemRoute(self._exception))
|
| 308 |
+
|
| 309 |
+
@property
|
| 310 |
+
def http_exception(self) -> HTTPException:
|
| 311 |
+
return self._exception
|
| 312 |
+
|
| 313 |
+
def __repr__(self) -> str:
|
| 314 |
+
return "<MatchInfoError {}: {}>".format(
|
| 315 |
+
self._exception.status, self._exception.reason
|
| 316 |
+
)
|
| 317 |
+
|
| 318 |
+
|
| 319 |
+
async def _default_expect_handler(request: Request) -> None:
|
| 320 |
+
"""Default handler for Expect header.
|
| 321 |
+
|
| 322 |
+
Just send "100 Continue" to client.
|
| 323 |
+
raise HTTPExpectationFailed if value of header is not "100-continue"
|
| 324 |
+
"""
|
| 325 |
+
expect = request.headers.get(hdrs.EXPECT, "")
|
| 326 |
+
if request.version == HttpVersion11:
|
| 327 |
+
if expect.lower() == "100-continue":
|
| 328 |
+
await request.writer.write(b"HTTP/1.1 100 Continue\r\n\r\n")
|
| 329 |
+
else:
|
| 330 |
+
raise HTTPExpectationFailed(text="Unknown Expect: %s" % expect)
|
| 331 |
+
|
| 332 |
+
|
| 333 |
+
class Resource(AbstractResource):
|
| 334 |
+
def __init__(self, *, name: Optional[str] = None) -> None:
|
| 335 |
+
super().__init__(name=name)
|
| 336 |
+
self._routes = [] # type: List[ResourceRoute]
|
| 337 |
+
|
| 338 |
+
def add_route(
|
| 339 |
+
self,
|
| 340 |
+
method: str,
|
| 341 |
+
handler: Union[Type[AbstractView], Handler],
|
| 342 |
+
*,
|
| 343 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 344 |
+
) -> "ResourceRoute":
|
| 345 |
+
|
| 346 |
+
for route_obj in self._routes:
|
| 347 |
+
if route_obj.method == method or route_obj.method == hdrs.METH_ANY:
|
| 348 |
+
raise RuntimeError(
|
| 349 |
+
"Added route will never be executed, "
|
| 350 |
+
"method {route.method} is already "
|
| 351 |
+
"registered".format(route=route_obj)
|
| 352 |
+
)
|
| 353 |
+
|
| 354 |
+
route_obj = ResourceRoute(method, handler, self, expect_handler=expect_handler)
|
| 355 |
+
self.register_route(route_obj)
|
| 356 |
+
return route_obj
|
| 357 |
+
|
| 358 |
+
def register_route(self, route: "ResourceRoute") -> None:
|
| 359 |
+
assert isinstance(
|
| 360 |
+
route, ResourceRoute
|
| 361 |
+
), f"Instance of Route class is required, got {route!r}"
|
| 362 |
+
self._routes.append(route)
|
| 363 |
+
|
| 364 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 365 |
+
allowed_methods = set() # type: Set[str]
|
| 366 |
+
|
| 367 |
+
match_dict = self._match(request.rel_url.raw_path)
|
| 368 |
+
if match_dict is None:
|
| 369 |
+
return None, allowed_methods
|
| 370 |
+
|
| 371 |
+
for route_obj in self._routes:
|
| 372 |
+
route_method = route_obj.method
|
| 373 |
+
allowed_methods.add(route_method)
|
| 374 |
+
|
| 375 |
+
if route_method == request.method or route_method == hdrs.METH_ANY:
|
| 376 |
+
return (UrlMappingMatchInfo(match_dict, route_obj), allowed_methods)
|
| 377 |
+
else:
|
| 378 |
+
return None, allowed_methods
|
| 379 |
+
|
| 380 |
+
@abc.abstractmethod
|
| 381 |
+
def _match(self, path: str) -> Optional[Dict[str, str]]:
|
| 382 |
+
pass # pragma: no cover
|
| 383 |
+
|
| 384 |
+
def __len__(self) -> int:
|
| 385 |
+
return len(self._routes)
|
| 386 |
+
|
| 387 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 388 |
+
return iter(self._routes)
|
| 389 |
+
|
| 390 |
+
# TODO: implement all abstract methods
|
| 391 |
+
|
| 392 |
+
|
| 393 |
+
class PlainResource(Resource):
|
| 394 |
+
def __init__(self, path: str, *, name: Optional[str] = None) -> None:
|
| 395 |
+
super().__init__(name=name)
|
| 396 |
+
assert not path or path.startswith("/")
|
| 397 |
+
self._path = path
|
| 398 |
+
|
| 399 |
+
@property
|
| 400 |
+
def canonical(self) -> str:
|
| 401 |
+
return self._path
|
| 402 |
+
|
| 403 |
+
def freeze(self) -> None:
|
| 404 |
+
if not self._path:
|
| 405 |
+
self._path = "/"
|
| 406 |
+
|
| 407 |
+
def add_prefix(self, prefix: str) -> None:
|
| 408 |
+
assert prefix.startswith("/")
|
| 409 |
+
assert not prefix.endswith("/")
|
| 410 |
+
assert len(prefix) > 1
|
| 411 |
+
self._path = prefix + self._path
|
| 412 |
+
|
| 413 |
+
def _match(self, path: str) -> Optional[Dict[str, str]]:
|
| 414 |
+
# string comparison is about 10 times faster than regexp matching
|
| 415 |
+
if self._path == path:
|
| 416 |
+
return {}
|
| 417 |
+
else:
|
| 418 |
+
return None
|
| 419 |
+
|
| 420 |
+
def raw_match(self, path: str) -> bool:
|
| 421 |
+
return self._path == path
|
| 422 |
+
|
| 423 |
+
def get_info(self) -> _InfoDict:
|
| 424 |
+
return {"path": self._path}
|
| 425 |
+
|
| 426 |
+
def url_for(self) -> URL: # type: ignore[override]
|
| 427 |
+
return URL.build(path=self._path, encoded=True)
|
| 428 |
+
|
| 429 |
+
def __repr__(self) -> str:
|
| 430 |
+
name = "'" + self.name + "' " if self.name is not None else ""
|
| 431 |
+
return f"<PlainResource {name} {self._path}>"
|
| 432 |
+
|
| 433 |
+
|
| 434 |
+
class DynamicResource(Resource):
|
| 435 |
+
|
| 436 |
+
DYN = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*)\}")
|
| 437 |
+
DYN_WITH_RE = re.compile(r"\{(?P<var>[_a-zA-Z][_a-zA-Z0-9]*):(?P<re>.+)\}")
|
| 438 |
+
GOOD = r"[^{}/]+"
|
| 439 |
+
|
| 440 |
+
def __init__(self, path: str, *, name: Optional[str] = None) -> None:
|
| 441 |
+
super().__init__(name=name)
|
| 442 |
+
pattern = ""
|
| 443 |
+
formatter = ""
|
| 444 |
+
for part in ROUTE_RE.split(path):
|
| 445 |
+
match = self.DYN.fullmatch(part)
|
| 446 |
+
if match:
|
| 447 |
+
pattern += "(?P<{}>{})".format(match.group("var"), self.GOOD)
|
| 448 |
+
formatter += "{" + match.group("var") + "}"
|
| 449 |
+
continue
|
| 450 |
+
|
| 451 |
+
match = self.DYN_WITH_RE.fullmatch(part)
|
| 452 |
+
if match:
|
| 453 |
+
pattern += "(?P<{var}>{re})".format(**match.groupdict())
|
| 454 |
+
formatter += "{" + match.group("var") + "}"
|
| 455 |
+
continue
|
| 456 |
+
|
| 457 |
+
if "{" in part or "}" in part:
|
| 458 |
+
raise ValueError(f"Invalid path '{path}'['{part}']")
|
| 459 |
+
|
| 460 |
+
part = _requote_path(part)
|
| 461 |
+
formatter += part
|
| 462 |
+
pattern += re.escape(part)
|
| 463 |
+
|
| 464 |
+
try:
|
| 465 |
+
compiled = re.compile(pattern)
|
| 466 |
+
except re.error as exc:
|
| 467 |
+
raise ValueError(f"Bad pattern '{pattern}': {exc}") from None
|
| 468 |
+
assert compiled.pattern.startswith(PATH_SEP)
|
| 469 |
+
assert formatter.startswith("/")
|
| 470 |
+
self._pattern = compiled
|
| 471 |
+
self._formatter = formatter
|
| 472 |
+
|
| 473 |
+
@property
|
| 474 |
+
def canonical(self) -> str:
|
| 475 |
+
return self._formatter
|
| 476 |
+
|
| 477 |
+
def add_prefix(self, prefix: str) -> None:
|
| 478 |
+
assert prefix.startswith("/")
|
| 479 |
+
assert not prefix.endswith("/")
|
| 480 |
+
assert len(prefix) > 1
|
| 481 |
+
self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern)
|
| 482 |
+
self._formatter = prefix + self._formatter
|
| 483 |
+
|
| 484 |
+
def _match(self, path: str) -> Optional[Dict[str, str]]:
|
| 485 |
+
match = self._pattern.fullmatch(path)
|
| 486 |
+
if match is None:
|
| 487 |
+
return None
|
| 488 |
+
else:
|
| 489 |
+
return {
|
| 490 |
+
key: _unquote_path(value) for key, value in match.groupdict().items()
|
| 491 |
+
}
|
| 492 |
+
|
| 493 |
+
def raw_match(self, path: str) -> bool:
|
| 494 |
+
return self._formatter == path
|
| 495 |
+
|
| 496 |
+
def get_info(self) -> _InfoDict:
|
| 497 |
+
return {"formatter": self._formatter, "pattern": self._pattern}
|
| 498 |
+
|
| 499 |
+
def url_for(self, **parts: str) -> URL:
|
| 500 |
+
url = self._formatter.format_map({k: _quote_path(v) for k, v in parts.items()})
|
| 501 |
+
return URL.build(path=url, encoded=True)
|
| 502 |
+
|
| 503 |
+
def __repr__(self) -> str:
|
| 504 |
+
name = "'" + self.name + "' " if self.name is not None else ""
|
| 505 |
+
return "<DynamicResource {name} {formatter}>".format(
|
| 506 |
+
name=name, formatter=self._formatter
|
| 507 |
+
)
|
| 508 |
+
|
| 509 |
+
|
| 510 |
+
class PrefixResource(AbstractResource):
|
| 511 |
+
def __init__(self, prefix: str, *, name: Optional[str] = None) -> None:
|
| 512 |
+
assert not prefix or prefix.startswith("/"), prefix
|
| 513 |
+
assert prefix in ("", "/") or not prefix.endswith("/"), prefix
|
| 514 |
+
super().__init__(name=name)
|
| 515 |
+
self._prefix = _requote_path(prefix)
|
| 516 |
+
self._prefix2 = self._prefix + "/"
|
| 517 |
+
|
| 518 |
+
@property
|
| 519 |
+
def canonical(self) -> str:
|
| 520 |
+
return self._prefix
|
| 521 |
+
|
| 522 |
+
def add_prefix(self, prefix: str) -> None:
|
| 523 |
+
assert prefix.startswith("/")
|
| 524 |
+
assert not prefix.endswith("/")
|
| 525 |
+
assert len(prefix) > 1
|
| 526 |
+
self._prefix = prefix + self._prefix
|
| 527 |
+
self._prefix2 = self._prefix + "/"
|
| 528 |
+
|
| 529 |
+
def raw_match(self, prefix: str) -> bool:
|
| 530 |
+
return False
|
| 531 |
+
|
| 532 |
+
# TODO: impl missing abstract methods
|
| 533 |
+
|
| 534 |
+
|
| 535 |
+
class StaticResource(PrefixResource):
|
| 536 |
+
VERSION_KEY = "v"
|
| 537 |
+
|
| 538 |
+
def __init__(
|
| 539 |
+
self,
|
| 540 |
+
prefix: str,
|
| 541 |
+
directory: PathLike,
|
| 542 |
+
*,
|
| 543 |
+
name: Optional[str] = None,
|
| 544 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 545 |
+
chunk_size: int = 256 * 1024,
|
| 546 |
+
show_index: bool = False,
|
| 547 |
+
follow_symlinks: bool = False,
|
| 548 |
+
append_version: bool = False,
|
| 549 |
+
) -> None:
|
| 550 |
+
super().__init__(prefix, name=name)
|
| 551 |
+
try:
|
| 552 |
+
directory = Path(directory)
|
| 553 |
+
if str(directory).startswith("~"):
|
| 554 |
+
directory = Path(os.path.expanduser(str(directory)))
|
| 555 |
+
directory = directory.resolve()
|
| 556 |
+
if not directory.is_dir():
|
| 557 |
+
raise ValueError("Not a directory")
|
| 558 |
+
except (FileNotFoundError, ValueError) as error:
|
| 559 |
+
raise ValueError(f"No directory exists at '{directory}'") from error
|
| 560 |
+
self._directory = directory
|
| 561 |
+
self._show_index = show_index
|
| 562 |
+
self._chunk_size = chunk_size
|
| 563 |
+
self._follow_symlinks = follow_symlinks
|
| 564 |
+
self._expect_handler = expect_handler
|
| 565 |
+
self._append_version = append_version
|
| 566 |
+
|
| 567 |
+
self._routes = {
|
| 568 |
+
"GET": ResourceRoute(
|
| 569 |
+
"GET", self._handle, self, expect_handler=expect_handler
|
| 570 |
+
),
|
| 571 |
+
"HEAD": ResourceRoute(
|
| 572 |
+
"HEAD", self._handle, self, expect_handler=expect_handler
|
| 573 |
+
),
|
| 574 |
+
}
|
| 575 |
+
|
| 576 |
+
def url_for( # type: ignore[override]
|
| 577 |
+
self,
|
| 578 |
+
*,
|
| 579 |
+
filename: Union[str, Path],
|
| 580 |
+
append_version: Optional[bool] = None,
|
| 581 |
+
) -> URL:
|
| 582 |
+
if append_version is None:
|
| 583 |
+
append_version = self._append_version
|
| 584 |
+
if isinstance(filename, Path):
|
| 585 |
+
filename = str(filename)
|
| 586 |
+
filename = filename.lstrip("/")
|
| 587 |
+
|
| 588 |
+
url = URL.build(path=self._prefix, encoded=True)
|
| 589 |
+
# filename is not encoded
|
| 590 |
+
if YARL_VERSION < (1, 6):
|
| 591 |
+
url = url / filename.replace("%", "%25")
|
| 592 |
+
else:
|
| 593 |
+
url = url / filename
|
| 594 |
+
|
| 595 |
+
if append_version:
|
| 596 |
+
try:
|
| 597 |
+
filepath = self._directory.joinpath(filename).resolve()
|
| 598 |
+
if not self._follow_symlinks:
|
| 599 |
+
filepath.relative_to(self._directory)
|
| 600 |
+
except (ValueError, FileNotFoundError):
|
| 601 |
+
# ValueError for case when path point to symlink
|
| 602 |
+
# with follow_symlinks is False
|
| 603 |
+
return url # relatively safe
|
| 604 |
+
if filepath.is_file():
|
| 605 |
+
# TODO cache file content
|
| 606 |
+
# with file watcher for cache invalidation
|
| 607 |
+
with filepath.open("rb") as f:
|
| 608 |
+
file_bytes = f.read()
|
| 609 |
+
h = self._get_file_hash(file_bytes)
|
| 610 |
+
url = url.with_query({self.VERSION_KEY: h})
|
| 611 |
+
return url
|
| 612 |
+
return url
|
| 613 |
+
|
| 614 |
+
@staticmethod
|
| 615 |
+
def _get_file_hash(byte_array: bytes) -> str:
|
| 616 |
+
m = hashlib.sha256() # todo sha256 can be configurable param
|
| 617 |
+
m.update(byte_array)
|
| 618 |
+
b64 = base64.urlsafe_b64encode(m.digest())
|
| 619 |
+
return b64.decode("ascii")
|
| 620 |
+
|
| 621 |
+
def get_info(self) -> _InfoDict:
|
| 622 |
+
return {
|
| 623 |
+
"directory": self._directory,
|
| 624 |
+
"prefix": self._prefix,
|
| 625 |
+
"routes": self._routes,
|
| 626 |
+
}
|
| 627 |
+
|
| 628 |
+
def set_options_route(self, handler: Handler) -> None:
|
| 629 |
+
if "OPTIONS" in self._routes:
|
| 630 |
+
raise RuntimeError("OPTIONS route was set already")
|
| 631 |
+
self._routes["OPTIONS"] = ResourceRoute(
|
| 632 |
+
"OPTIONS", handler, self, expect_handler=self._expect_handler
|
| 633 |
+
)
|
| 634 |
+
|
| 635 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 636 |
+
path = request.rel_url.raw_path
|
| 637 |
+
method = request.method
|
| 638 |
+
allowed_methods = set(self._routes)
|
| 639 |
+
if not path.startswith(self._prefix2) and path != self._prefix:
|
| 640 |
+
return None, set()
|
| 641 |
+
|
| 642 |
+
if method not in allowed_methods:
|
| 643 |
+
return None, allowed_methods
|
| 644 |
+
|
| 645 |
+
match_dict = {"filename": _unquote_path(path[len(self._prefix) + 1 :])}
|
| 646 |
+
return (UrlMappingMatchInfo(match_dict, self._routes[method]), allowed_methods)
|
| 647 |
+
|
| 648 |
+
def __len__(self) -> int:
|
| 649 |
+
return len(self._routes)
|
| 650 |
+
|
| 651 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 652 |
+
return iter(self._routes.values())
|
| 653 |
+
|
| 654 |
+
async def _handle(self, request: Request) -> StreamResponse:
|
| 655 |
+
rel_url = request.match_info["filename"]
|
| 656 |
+
try:
|
| 657 |
+
filename = Path(rel_url)
|
| 658 |
+
if filename.anchor:
|
| 659 |
+
# rel_url is an absolute name like
|
| 660 |
+
# /static/\\machine_name\c$ or /static/D:\path
|
| 661 |
+
# where the static dir is totally different
|
| 662 |
+
raise HTTPForbidden()
|
| 663 |
+
filepath = self._directory.joinpath(filename).resolve()
|
| 664 |
+
if not self._follow_symlinks:
|
| 665 |
+
filepath.relative_to(self._directory)
|
| 666 |
+
except (ValueError, FileNotFoundError) as error:
|
| 667 |
+
# relatively safe
|
| 668 |
+
raise HTTPNotFound() from error
|
| 669 |
+
except HTTPForbidden:
|
| 670 |
+
raise
|
| 671 |
+
except Exception as error:
|
| 672 |
+
# perm error or other kind!
|
| 673 |
+
request.app.logger.exception(error)
|
| 674 |
+
raise HTTPNotFound() from error
|
| 675 |
+
|
| 676 |
+
# on opening a dir, load its contents if allowed
|
| 677 |
+
if filepath.is_dir():
|
| 678 |
+
if self._show_index:
|
| 679 |
+
try:
|
| 680 |
+
return Response(
|
| 681 |
+
text=self._directory_as_html(filepath), content_type="text/html"
|
| 682 |
+
)
|
| 683 |
+
except PermissionError:
|
| 684 |
+
raise HTTPForbidden()
|
| 685 |
+
else:
|
| 686 |
+
raise HTTPForbidden()
|
| 687 |
+
elif filepath.is_file():
|
| 688 |
+
return FileResponse(filepath, chunk_size=self._chunk_size)
|
| 689 |
+
else:
|
| 690 |
+
raise HTTPNotFound
|
| 691 |
+
|
| 692 |
+
def _directory_as_html(self, filepath: Path) -> str:
|
| 693 |
+
# returns directory's index as html
|
| 694 |
+
|
| 695 |
+
# sanity check
|
| 696 |
+
assert filepath.is_dir()
|
| 697 |
+
|
| 698 |
+
relative_path_to_dir = filepath.relative_to(self._directory).as_posix()
|
| 699 |
+
index_of = f"Index of /{relative_path_to_dir}"
|
| 700 |
+
h1 = f"<h1>{index_of}</h1>"
|
| 701 |
+
|
| 702 |
+
index_list = []
|
| 703 |
+
dir_index = filepath.iterdir()
|
| 704 |
+
for _file in sorted(dir_index):
|
| 705 |
+
# show file url as relative to static path
|
| 706 |
+
rel_path = _file.relative_to(self._directory).as_posix()
|
| 707 |
+
file_url = self._prefix + "/" + rel_path
|
| 708 |
+
|
| 709 |
+
# if file is a directory, add '/' to the end of the name
|
| 710 |
+
if _file.is_dir():
|
| 711 |
+
file_name = f"{_file.name}/"
|
| 712 |
+
else:
|
| 713 |
+
file_name = _file.name
|
| 714 |
+
|
| 715 |
+
index_list.append(
|
| 716 |
+
'<li><a href="{url}">{name}</a></li>'.format(
|
| 717 |
+
url=file_url, name=file_name
|
| 718 |
+
)
|
| 719 |
+
)
|
| 720 |
+
ul = "<ul>\n{}\n</ul>".format("\n".join(index_list))
|
| 721 |
+
body = f"<body>\n{h1}\n{ul}\n</body>"
|
| 722 |
+
|
| 723 |
+
head_str = f"<head>\n<title>{index_of}</title>\n</head>"
|
| 724 |
+
html = f"<html>\n{head_str}\n{body}\n</html>"
|
| 725 |
+
|
| 726 |
+
return html
|
| 727 |
+
|
| 728 |
+
def __repr__(self) -> str:
|
| 729 |
+
name = "'" + self.name + "'" if self.name is not None else ""
|
| 730 |
+
return "<StaticResource {name} {path} -> {directory!r}>".format(
|
| 731 |
+
name=name, path=self._prefix, directory=self._directory
|
| 732 |
+
)
|
| 733 |
+
|
| 734 |
+
|
| 735 |
+
class PrefixedSubAppResource(PrefixResource):
|
| 736 |
+
def __init__(self, prefix: str, app: "Application") -> None:
|
| 737 |
+
super().__init__(prefix)
|
| 738 |
+
self._app = app
|
| 739 |
+
for resource in app.router.resources():
|
| 740 |
+
resource.add_prefix(prefix)
|
| 741 |
+
|
| 742 |
+
def add_prefix(self, prefix: str) -> None:
|
| 743 |
+
super().add_prefix(prefix)
|
| 744 |
+
for resource in self._app.router.resources():
|
| 745 |
+
resource.add_prefix(prefix)
|
| 746 |
+
|
| 747 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 748 |
+
raise RuntimeError(".url_for() is not supported " "by sub-application root")
|
| 749 |
+
|
| 750 |
+
def get_info(self) -> _InfoDict:
|
| 751 |
+
return {"app": self._app, "prefix": self._prefix}
|
| 752 |
+
|
| 753 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 754 |
+
if (
|
| 755 |
+
not request.url.raw_path.startswith(self._prefix2)
|
| 756 |
+
and request.url.raw_path != self._prefix
|
| 757 |
+
):
|
| 758 |
+
return None, set()
|
| 759 |
+
match_info = await self._app.router.resolve(request)
|
| 760 |
+
match_info.add_app(self._app)
|
| 761 |
+
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
|
| 762 |
+
methods = match_info.http_exception.allowed_methods
|
| 763 |
+
else:
|
| 764 |
+
methods = set()
|
| 765 |
+
return match_info, methods
|
| 766 |
+
|
| 767 |
+
def __len__(self) -> int:
|
| 768 |
+
return len(self._app.router.routes())
|
| 769 |
+
|
| 770 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 771 |
+
return iter(self._app.router.routes())
|
| 772 |
+
|
| 773 |
+
def __repr__(self) -> str:
|
| 774 |
+
return "<PrefixedSubAppResource {prefix} -> {app!r}>".format(
|
| 775 |
+
prefix=self._prefix, app=self._app
|
| 776 |
+
)
|
| 777 |
+
|
| 778 |
+
|
| 779 |
+
class AbstractRuleMatching(abc.ABC):
|
| 780 |
+
@abc.abstractmethod # pragma: no branch
|
| 781 |
+
async def match(self, request: Request) -> bool:
|
| 782 |
+
"""Return bool if the request satisfies the criteria"""
|
| 783 |
+
|
| 784 |
+
@abc.abstractmethod # pragma: no branch
|
| 785 |
+
def get_info(self) -> _InfoDict:
|
| 786 |
+
"""Return a dict with additional info useful for introspection"""
|
| 787 |
+
|
| 788 |
+
@property
|
| 789 |
+
@abc.abstractmethod # pragma: no branch
|
| 790 |
+
def canonical(self) -> str:
|
| 791 |
+
"""Return a str"""
|
| 792 |
+
|
| 793 |
+
|
| 794 |
+
class Domain(AbstractRuleMatching):
|
| 795 |
+
re_part = re.compile(r"(?!-)[a-z\d-]{1,63}(?<!-)")
|
| 796 |
+
|
| 797 |
+
def __init__(self, domain: str) -> None:
|
| 798 |
+
super().__init__()
|
| 799 |
+
self._domain = self.validation(domain)
|
| 800 |
+
|
| 801 |
+
@property
|
| 802 |
+
def canonical(self) -> str:
|
| 803 |
+
return self._domain
|
| 804 |
+
|
| 805 |
+
def validation(self, domain: str) -> str:
|
| 806 |
+
if not isinstance(domain, str):
|
| 807 |
+
raise TypeError("Domain must be str")
|
| 808 |
+
domain = domain.rstrip(".").lower()
|
| 809 |
+
if not domain:
|
| 810 |
+
raise ValueError("Domain cannot be empty")
|
| 811 |
+
elif "://" in domain:
|
| 812 |
+
raise ValueError("Scheme not supported")
|
| 813 |
+
url = URL("http://" + domain)
|
| 814 |
+
assert url.raw_host is not None
|
| 815 |
+
if not all(self.re_part.fullmatch(x) for x in url.raw_host.split(".")):
|
| 816 |
+
raise ValueError("Domain not valid")
|
| 817 |
+
if url.port == 80:
|
| 818 |
+
return url.raw_host
|
| 819 |
+
return f"{url.raw_host}:{url.port}"
|
| 820 |
+
|
| 821 |
+
async def match(self, request: Request) -> bool:
|
| 822 |
+
host = request.headers.get(hdrs.HOST)
|
| 823 |
+
if not host:
|
| 824 |
+
return False
|
| 825 |
+
return self.match_domain(host)
|
| 826 |
+
|
| 827 |
+
def match_domain(self, host: str) -> bool:
|
| 828 |
+
return host.lower() == self._domain
|
| 829 |
+
|
| 830 |
+
def get_info(self) -> _InfoDict:
|
| 831 |
+
return {"domain": self._domain}
|
| 832 |
+
|
| 833 |
+
|
| 834 |
+
class MaskDomain(Domain):
|
| 835 |
+
re_part = re.compile(r"(?!-)[a-z\d\*-]{1,63}(?<!-)")
|
| 836 |
+
|
| 837 |
+
def __init__(self, domain: str) -> None:
|
| 838 |
+
super().__init__(domain)
|
| 839 |
+
mask = self._domain.replace(".", r"\.").replace("*", ".*")
|
| 840 |
+
self._mask = re.compile(mask)
|
| 841 |
+
|
| 842 |
+
@property
|
| 843 |
+
def canonical(self) -> str:
|
| 844 |
+
return self._mask.pattern
|
| 845 |
+
|
| 846 |
+
def match_domain(self, host: str) -> bool:
|
| 847 |
+
return self._mask.fullmatch(host) is not None
|
| 848 |
+
|
| 849 |
+
|
| 850 |
+
class MatchedSubAppResource(PrefixedSubAppResource):
|
| 851 |
+
def __init__(self, rule: AbstractRuleMatching, app: "Application") -> None:
|
| 852 |
+
AbstractResource.__init__(self)
|
| 853 |
+
self._prefix = ""
|
| 854 |
+
self._app = app
|
| 855 |
+
self._rule = rule
|
| 856 |
+
|
| 857 |
+
@property
|
| 858 |
+
def canonical(self) -> str:
|
| 859 |
+
return self._rule.canonical
|
| 860 |
+
|
| 861 |
+
def get_info(self) -> _InfoDict:
|
| 862 |
+
return {"app": self._app, "rule": self._rule}
|
| 863 |
+
|
| 864 |
+
async def resolve(self, request: Request) -> _Resolve:
|
| 865 |
+
if not await self._rule.match(request):
|
| 866 |
+
return None, set()
|
| 867 |
+
match_info = await self._app.router.resolve(request)
|
| 868 |
+
match_info.add_app(self._app)
|
| 869 |
+
if isinstance(match_info.http_exception, HTTPMethodNotAllowed):
|
| 870 |
+
methods = match_info.http_exception.allowed_methods
|
| 871 |
+
else:
|
| 872 |
+
methods = set()
|
| 873 |
+
return match_info, methods
|
| 874 |
+
|
| 875 |
+
def __repr__(self) -> str:
|
| 876 |
+
return "<MatchedSubAppResource -> {app!r}>" "".format(app=self._app)
|
| 877 |
+
|
| 878 |
+
|
| 879 |
+
class ResourceRoute(AbstractRoute):
|
| 880 |
+
"""A route with resource"""
|
| 881 |
+
|
| 882 |
+
def __init__(
|
| 883 |
+
self,
|
| 884 |
+
method: str,
|
| 885 |
+
handler: Union[Handler, Type[AbstractView]],
|
| 886 |
+
resource: AbstractResource,
|
| 887 |
+
*,
|
| 888 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 889 |
+
) -> None:
|
| 890 |
+
super().__init__(
|
| 891 |
+
method, handler, expect_handler=expect_handler, resource=resource
|
| 892 |
+
)
|
| 893 |
+
|
| 894 |
+
def __repr__(self) -> str:
|
| 895 |
+
return "<ResourceRoute [{method}] {resource} -> {handler!r}".format(
|
| 896 |
+
method=self.method, resource=self._resource, handler=self.handler
|
| 897 |
+
)
|
| 898 |
+
|
| 899 |
+
@property
|
| 900 |
+
def name(self) -> Optional[str]:
|
| 901 |
+
if self._resource is None:
|
| 902 |
+
return None
|
| 903 |
+
return self._resource.name
|
| 904 |
+
|
| 905 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 906 |
+
"""Construct url for route with additional params."""
|
| 907 |
+
assert self._resource is not None
|
| 908 |
+
return self._resource.url_for(*args, **kwargs)
|
| 909 |
+
|
| 910 |
+
def get_info(self) -> _InfoDict:
|
| 911 |
+
assert self._resource is not None
|
| 912 |
+
return self._resource.get_info()
|
| 913 |
+
|
| 914 |
+
|
| 915 |
+
class SystemRoute(AbstractRoute):
|
| 916 |
+
def __init__(self, http_exception: HTTPException) -> None:
|
| 917 |
+
super().__init__(hdrs.METH_ANY, self._handle)
|
| 918 |
+
self._http_exception = http_exception
|
| 919 |
+
|
| 920 |
+
def url_for(self, *args: str, **kwargs: str) -> URL:
|
| 921 |
+
raise RuntimeError(".url_for() is not allowed for SystemRoute")
|
| 922 |
+
|
| 923 |
+
@property
|
| 924 |
+
def name(self) -> Optional[str]:
|
| 925 |
+
return None
|
| 926 |
+
|
| 927 |
+
def get_info(self) -> _InfoDict:
|
| 928 |
+
return {"http_exception": self._http_exception}
|
| 929 |
+
|
| 930 |
+
async def _handle(self, request: Request) -> StreamResponse:
|
| 931 |
+
raise self._http_exception
|
| 932 |
+
|
| 933 |
+
@property
|
| 934 |
+
def status(self) -> int:
|
| 935 |
+
return self._http_exception.status
|
| 936 |
+
|
| 937 |
+
@property
|
| 938 |
+
def reason(self) -> str:
|
| 939 |
+
return self._http_exception.reason
|
| 940 |
+
|
| 941 |
+
def __repr__(self) -> str:
|
| 942 |
+
return "<SystemRoute {self.status}: {self.reason}>".format(self=self)
|
| 943 |
+
|
| 944 |
+
|
| 945 |
+
class View(AbstractView):
|
| 946 |
+
async def _iter(self) -> StreamResponse:
|
| 947 |
+
if self.request.method not in hdrs.METH_ALL:
|
| 948 |
+
self._raise_allowed_methods()
|
| 949 |
+
method: Callable[[], Awaitable[StreamResponse]] = getattr(
|
| 950 |
+
self, self.request.method.lower(), None
|
| 951 |
+
)
|
| 952 |
+
if method is None:
|
| 953 |
+
self._raise_allowed_methods()
|
| 954 |
+
resp = await method()
|
| 955 |
+
return resp
|
| 956 |
+
|
| 957 |
+
def __await__(self) -> Generator[Any, None, StreamResponse]:
|
| 958 |
+
return self._iter().__await__()
|
| 959 |
+
|
| 960 |
+
def _raise_allowed_methods(self) -> None:
|
| 961 |
+
allowed_methods = {m for m in hdrs.METH_ALL if hasattr(self, m.lower())}
|
| 962 |
+
raise HTTPMethodNotAllowed(self.request.method, allowed_methods)
|
| 963 |
+
|
| 964 |
+
|
| 965 |
+
class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]):
|
| 966 |
+
def __init__(self, resources: List[AbstractResource]) -> None:
|
| 967 |
+
self._resources = resources
|
| 968 |
+
|
| 969 |
+
def __len__(self) -> int:
|
| 970 |
+
return len(self._resources)
|
| 971 |
+
|
| 972 |
+
def __iter__(self) -> Iterator[AbstractResource]:
|
| 973 |
+
yield from self._resources
|
| 974 |
+
|
| 975 |
+
def __contains__(self, resource: object) -> bool:
|
| 976 |
+
return resource in self._resources
|
| 977 |
+
|
| 978 |
+
|
| 979 |
+
class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]):
|
| 980 |
+
def __init__(self, resources: List[AbstractResource]):
|
| 981 |
+
self._routes = [] # type: List[AbstractRoute]
|
| 982 |
+
for resource in resources:
|
| 983 |
+
for route in resource:
|
| 984 |
+
self._routes.append(route)
|
| 985 |
+
|
| 986 |
+
def __len__(self) -> int:
|
| 987 |
+
return len(self._routes)
|
| 988 |
+
|
| 989 |
+
def __iter__(self) -> Iterator[AbstractRoute]:
|
| 990 |
+
yield from self._routes
|
| 991 |
+
|
| 992 |
+
def __contains__(self, route: object) -> bool:
|
| 993 |
+
return route in self._routes
|
| 994 |
+
|
| 995 |
+
|
| 996 |
+
class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]):
|
| 997 |
+
|
| 998 |
+
NAME_SPLIT_RE = re.compile(r"[.:-]")
|
| 999 |
+
|
| 1000 |
+
def __init__(self) -> None:
|
| 1001 |
+
super().__init__()
|
| 1002 |
+
self._resources = [] # type: List[AbstractResource]
|
| 1003 |
+
self._named_resources = {} # type: Dict[str, AbstractResource]
|
| 1004 |
+
|
| 1005 |
+
async def resolve(self, request: Request) -> UrlMappingMatchInfo:
|
| 1006 |
+
method = request.method
|
| 1007 |
+
allowed_methods = set() # type: Set[str]
|
| 1008 |
+
|
| 1009 |
+
for resource in self._resources:
|
| 1010 |
+
match_dict, allowed = await resource.resolve(request)
|
| 1011 |
+
if match_dict is not None:
|
| 1012 |
+
return match_dict
|
| 1013 |
+
else:
|
| 1014 |
+
allowed_methods |= allowed
|
| 1015 |
+
|
| 1016 |
+
if allowed_methods:
|
| 1017 |
+
return MatchInfoError(HTTPMethodNotAllowed(method, allowed_methods))
|
| 1018 |
+
else:
|
| 1019 |
+
return MatchInfoError(HTTPNotFound())
|
| 1020 |
+
|
| 1021 |
+
def __iter__(self) -> Iterator[str]:
|
| 1022 |
+
return iter(self._named_resources)
|
| 1023 |
+
|
| 1024 |
+
def __len__(self) -> int:
|
| 1025 |
+
return len(self._named_resources)
|
| 1026 |
+
|
| 1027 |
+
def __contains__(self, resource: object) -> bool:
|
| 1028 |
+
return resource in self._named_resources
|
| 1029 |
+
|
| 1030 |
+
def __getitem__(self, name: str) -> AbstractResource:
|
| 1031 |
+
return self._named_resources[name]
|
| 1032 |
+
|
| 1033 |
+
def resources(self) -> ResourcesView:
|
| 1034 |
+
return ResourcesView(self._resources)
|
| 1035 |
+
|
| 1036 |
+
def routes(self) -> RoutesView:
|
| 1037 |
+
return RoutesView(self._resources)
|
| 1038 |
+
|
| 1039 |
+
def named_resources(self) -> Mapping[str, AbstractResource]:
|
| 1040 |
+
return MappingProxyType(self._named_resources)
|
| 1041 |
+
|
| 1042 |
+
def register_resource(self, resource: AbstractResource) -> None:
|
| 1043 |
+
assert isinstance(
|
| 1044 |
+
resource, AbstractResource
|
| 1045 |
+
), f"Instance of AbstractResource class is required, got {resource!r}"
|
| 1046 |
+
if self.frozen:
|
| 1047 |
+
raise RuntimeError("Cannot register a resource into frozen router.")
|
| 1048 |
+
|
| 1049 |
+
name = resource.name
|
| 1050 |
+
|
| 1051 |
+
if name is not None:
|
| 1052 |
+
parts = self.NAME_SPLIT_RE.split(name)
|
| 1053 |
+
for part in parts:
|
| 1054 |
+
if keyword.iskeyword(part):
|
| 1055 |
+
raise ValueError(
|
| 1056 |
+
f"Incorrect route name {name!r}, "
|
| 1057 |
+
"python keywords cannot be used "
|
| 1058 |
+
"for route name"
|
| 1059 |
+
)
|
| 1060 |
+
if not part.isidentifier():
|
| 1061 |
+
raise ValueError(
|
| 1062 |
+
"Incorrect route name {!r}, "
|
| 1063 |
+
"the name should be a sequence of "
|
| 1064 |
+
"python identifiers separated "
|
| 1065 |
+
"by dash, dot or column".format(name)
|
| 1066 |
+
)
|
| 1067 |
+
if name in self._named_resources:
|
| 1068 |
+
raise ValueError(
|
| 1069 |
+
"Duplicate {!r}, "
|
| 1070 |
+
"already handled by {!r}".format(name, self._named_resources[name])
|
| 1071 |
+
)
|
| 1072 |
+
self._named_resources[name] = resource
|
| 1073 |
+
self._resources.append(resource)
|
| 1074 |
+
|
| 1075 |
+
def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource:
|
| 1076 |
+
if path and not path.startswith("/"):
|
| 1077 |
+
raise ValueError("path should be started with / or be empty")
|
| 1078 |
+
# Reuse last added resource if path and name are the same
|
| 1079 |
+
if self._resources:
|
| 1080 |
+
resource = self._resources[-1]
|
| 1081 |
+
if resource.name == name and resource.raw_match(path):
|
| 1082 |
+
return cast(Resource, resource)
|
| 1083 |
+
if not ("{" in path or "}" in path or ROUTE_RE.search(path)):
|
| 1084 |
+
resource = PlainResource(_requote_path(path), name=name)
|
| 1085 |
+
self.register_resource(resource)
|
| 1086 |
+
return resource
|
| 1087 |
+
resource = DynamicResource(path, name=name)
|
| 1088 |
+
self.register_resource(resource)
|
| 1089 |
+
return resource
|
| 1090 |
+
|
| 1091 |
+
def add_route(
|
| 1092 |
+
self,
|
| 1093 |
+
method: str,
|
| 1094 |
+
path: str,
|
| 1095 |
+
handler: Union[Handler, Type[AbstractView]],
|
| 1096 |
+
*,
|
| 1097 |
+
name: Optional[str] = None,
|
| 1098 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 1099 |
+
) -> AbstractRoute:
|
| 1100 |
+
resource = self.add_resource(path, name=name)
|
| 1101 |
+
return resource.add_route(method, handler, expect_handler=expect_handler)
|
| 1102 |
+
|
| 1103 |
+
def add_static(
|
| 1104 |
+
self,
|
| 1105 |
+
prefix: str,
|
| 1106 |
+
path: PathLike,
|
| 1107 |
+
*,
|
| 1108 |
+
name: Optional[str] = None,
|
| 1109 |
+
expect_handler: Optional[_ExpectHandler] = None,
|
| 1110 |
+
chunk_size: int = 256 * 1024,
|
| 1111 |
+
show_index: bool = False,
|
| 1112 |
+
follow_symlinks: bool = False,
|
| 1113 |
+
append_version: bool = False,
|
| 1114 |
+
) -> AbstractResource:
|
| 1115 |
+
"""Add static files view.
|
| 1116 |
+
|
| 1117 |
+
prefix - url prefix
|
| 1118 |
+
path - folder with files
|
| 1119 |
+
|
| 1120 |
+
"""
|
| 1121 |
+
assert prefix.startswith("/")
|
| 1122 |
+
if prefix.endswith("/"):
|
| 1123 |
+
prefix = prefix[:-1]
|
| 1124 |
+
resource = StaticResource(
|
| 1125 |
+
prefix,
|
| 1126 |
+
path,
|
| 1127 |
+
name=name,
|
| 1128 |
+
expect_handler=expect_handler,
|
| 1129 |
+
chunk_size=chunk_size,
|
| 1130 |
+
show_index=show_index,
|
| 1131 |
+
follow_symlinks=follow_symlinks,
|
| 1132 |
+
append_version=append_version,
|
| 1133 |
+
)
|
| 1134 |
+
self.register_resource(resource)
|
| 1135 |
+
return resource
|
| 1136 |
+
|
| 1137 |
+
def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1138 |
+
"""Shortcut for add_route with method HEAD."""
|
| 1139 |
+
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
|
| 1140 |
+
|
| 1141 |
+
def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1142 |
+
"""Shortcut for add_route with method OPTIONS."""
|
| 1143 |
+
return self.add_route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
| 1144 |
+
|
| 1145 |
+
def add_get(
|
| 1146 |
+
self,
|
| 1147 |
+
path: str,
|
| 1148 |
+
handler: Handler,
|
| 1149 |
+
*,
|
| 1150 |
+
name: Optional[str] = None,
|
| 1151 |
+
allow_head: bool = True,
|
| 1152 |
+
**kwargs: Any,
|
| 1153 |
+
) -> AbstractRoute:
|
| 1154 |
+
"""Shortcut for add_route with method GET.
|
| 1155 |
+
|
| 1156 |
+
If allow_head is true, another
|
| 1157 |
+
route is added allowing head requests to the same endpoint.
|
| 1158 |
+
"""
|
| 1159 |
+
resource = self.add_resource(path, name=name)
|
| 1160 |
+
if allow_head:
|
| 1161 |
+
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
|
| 1162 |
+
return resource.add_route(hdrs.METH_GET, handler, **kwargs)
|
| 1163 |
+
|
| 1164 |
+
def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1165 |
+
"""Shortcut for add_route with method POST."""
|
| 1166 |
+
return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
|
| 1167 |
+
|
| 1168 |
+
def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1169 |
+
"""Shortcut for add_route with method PUT."""
|
| 1170 |
+
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
|
| 1171 |
+
|
| 1172 |
+
def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1173 |
+
"""Shortcut for add_route with method PATCH."""
|
| 1174 |
+
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
|
| 1175 |
+
|
| 1176 |
+
def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
|
| 1177 |
+
"""Shortcut for add_route with method DELETE."""
|
| 1178 |
+
return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs)
|
| 1179 |
+
|
| 1180 |
+
def add_view(
|
| 1181 |
+
self, path: str, handler: Type[AbstractView], **kwargs: Any
|
| 1182 |
+
) -> AbstractRoute:
|
| 1183 |
+
"""Shortcut for add_route with ANY methods for a class-based view."""
|
| 1184 |
+
return self.add_route(hdrs.METH_ANY, path, handler, **kwargs)
|
| 1185 |
+
|
| 1186 |
+
def freeze(self) -> None:
|
| 1187 |
+
super().freeze()
|
| 1188 |
+
for resource in self._resources:
|
| 1189 |
+
resource.freeze()
|
| 1190 |
+
|
| 1191 |
+
def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]:
|
| 1192 |
+
"""Append routes to route table.
|
| 1193 |
+
|
| 1194 |
+
Parameter should be a sequence of RouteDef objects.
|
| 1195 |
+
|
| 1196 |
+
Returns a list of registered AbstractRoute instances.
|
| 1197 |
+
"""
|
| 1198 |
+
registered_routes = []
|
| 1199 |
+
for route_def in routes:
|
| 1200 |
+
registered_routes.extend(route_def.register(self))
|
| 1201 |
+
return registered_routes
|
| 1202 |
+
|
| 1203 |
+
|
| 1204 |
+
def _quote_path(value: str) -> str:
|
| 1205 |
+
if YARL_VERSION < (1, 6):
|
| 1206 |
+
value = value.replace("%", "%25")
|
| 1207 |
+
return URL.build(path=value, encoded=False).raw_path
|
| 1208 |
+
|
| 1209 |
+
|
| 1210 |
+
def _unquote_path(value: str) -> str:
|
| 1211 |
+
return URL.build(path=value, encoded=True).path
|
| 1212 |
+
|
| 1213 |
+
|
| 1214 |
+
def _requote_path(value: str) -> str:
|
| 1215 |
+
# Quote non-ascii characters and other characters which must be quoted,
|
| 1216 |
+
# but preserve existing %-sequences.
|
| 1217 |
+
result = _quote_path(value)
|
| 1218 |
+
if "%" in value:
|
| 1219 |
+
result = result.replace("%25", "%")
|
| 1220 |
+
return result
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/worker.py
ADDED
|
@@ -0,0 +1,269 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Async gunicorn worker for aiohttp.web"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import os
|
| 5 |
+
import re
|
| 6 |
+
import signal
|
| 7 |
+
import sys
|
| 8 |
+
from types import FrameType
|
| 9 |
+
from typing import Any, Awaitable, Callable, Optional, Union # noqa
|
| 10 |
+
|
| 11 |
+
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
|
| 12 |
+
from gunicorn.workers import base
|
| 13 |
+
|
| 14 |
+
from aiohttp import web
|
| 15 |
+
|
| 16 |
+
from .helpers import set_result
|
| 17 |
+
from .web_app import Application
|
| 18 |
+
from .web_log import AccessLogger
|
| 19 |
+
|
| 20 |
+
try:
|
| 21 |
+
import ssl
|
| 22 |
+
|
| 23 |
+
SSLContext = ssl.SSLContext
|
| 24 |
+
except ImportError: # pragma: no cover
|
| 25 |
+
ssl = None # type: ignore[assignment]
|
| 26 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
|
| 33 |
+
|
| 34 |
+
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
|
| 35 |
+
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
|
| 36 |
+
|
| 37 |
+
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
|
| 38 |
+
super().__init__(*args, **kw)
|
| 39 |
+
|
| 40 |
+
self._task = None # type: Optional[asyncio.Task[None]]
|
| 41 |
+
self.exit_code = 0
|
| 42 |
+
self._notify_waiter = None # type: Optional[asyncio.Future[bool]]
|
| 43 |
+
|
| 44 |
+
def init_process(self) -> None:
|
| 45 |
+
# create new event_loop after fork
|
| 46 |
+
asyncio.get_event_loop().close()
|
| 47 |
+
|
| 48 |
+
self.loop = asyncio.new_event_loop()
|
| 49 |
+
asyncio.set_event_loop(self.loop)
|
| 50 |
+
|
| 51 |
+
super().init_process()
|
| 52 |
+
|
| 53 |
+
def run(self) -> None:
|
| 54 |
+
self._task = self.loop.create_task(self._run())
|
| 55 |
+
|
| 56 |
+
try: # ignore all finalization problems
|
| 57 |
+
self.loop.run_until_complete(self._task)
|
| 58 |
+
except Exception:
|
| 59 |
+
self.log.exception("Exception in gunicorn worker")
|
| 60 |
+
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
|
| 61 |
+
self.loop.close()
|
| 62 |
+
|
| 63 |
+
sys.exit(self.exit_code)
|
| 64 |
+
|
| 65 |
+
async def _run(self) -> None:
|
| 66 |
+
runner = None
|
| 67 |
+
if isinstance(self.wsgi, Application):
|
| 68 |
+
app = self.wsgi
|
| 69 |
+
elif asyncio.iscoroutinefunction(self.wsgi):
|
| 70 |
+
wsgi = await self.wsgi()
|
| 71 |
+
if isinstance(wsgi, web.AppRunner):
|
| 72 |
+
runner = wsgi
|
| 73 |
+
app = runner.app
|
| 74 |
+
else:
|
| 75 |
+
app = wsgi
|
| 76 |
+
else:
|
| 77 |
+
raise RuntimeError(
|
| 78 |
+
"wsgi app should be either Application or "
|
| 79 |
+
"async function returning Application, got {}".format(self.wsgi)
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
if runner is None:
|
| 83 |
+
access_log = self.log.access_log if self.cfg.accesslog else None
|
| 84 |
+
runner = web.AppRunner(
|
| 85 |
+
app,
|
| 86 |
+
logger=self.log,
|
| 87 |
+
keepalive_timeout=self.cfg.keepalive,
|
| 88 |
+
access_log=access_log,
|
| 89 |
+
access_log_format=self._get_valid_log_format(
|
| 90 |
+
self.cfg.access_log_format
|
| 91 |
+
),
|
| 92 |
+
)
|
| 93 |
+
await runner.setup()
|
| 94 |
+
|
| 95 |
+
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
|
| 96 |
+
|
| 97 |
+
runner = runner
|
| 98 |
+
assert runner is not None
|
| 99 |
+
server = runner.server
|
| 100 |
+
assert server is not None
|
| 101 |
+
for sock in self.sockets:
|
| 102 |
+
site = web.SockSite(
|
| 103 |
+
runner,
|
| 104 |
+
sock,
|
| 105 |
+
ssl_context=ctx,
|
| 106 |
+
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
|
| 107 |
+
)
|
| 108 |
+
await site.start()
|
| 109 |
+
|
| 110 |
+
# If our parent changed then we shut down.
|
| 111 |
+
pid = os.getpid()
|
| 112 |
+
try:
|
| 113 |
+
while self.alive: # type: ignore[has-type]
|
| 114 |
+
self.notify()
|
| 115 |
+
|
| 116 |
+
cnt = server.requests_count
|
| 117 |
+
if self.cfg.max_requests and cnt > self.cfg.max_requests:
|
| 118 |
+
self.alive = False
|
| 119 |
+
self.log.info("Max requests, shutting down: %s", self)
|
| 120 |
+
|
| 121 |
+
elif pid == os.getpid() and self.ppid != os.getppid():
|
| 122 |
+
self.alive = False
|
| 123 |
+
self.log.info("Parent changed, shutting down: %s", self)
|
| 124 |
+
else:
|
| 125 |
+
await self._wait_next_notify()
|
| 126 |
+
except BaseException:
|
| 127 |
+
pass
|
| 128 |
+
|
| 129 |
+
await runner.cleanup()
|
| 130 |
+
|
| 131 |
+
def _wait_next_notify(self) -> "asyncio.Future[bool]":
|
| 132 |
+
self._notify_waiter_done()
|
| 133 |
+
|
| 134 |
+
loop = self.loop
|
| 135 |
+
assert loop is not None
|
| 136 |
+
self._notify_waiter = waiter = loop.create_future()
|
| 137 |
+
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
|
| 138 |
+
|
| 139 |
+
return waiter
|
| 140 |
+
|
| 141 |
+
def _notify_waiter_done(
|
| 142 |
+
self, waiter: Optional["asyncio.Future[bool]"] = None
|
| 143 |
+
) -> None:
|
| 144 |
+
if waiter is None:
|
| 145 |
+
waiter = self._notify_waiter
|
| 146 |
+
if waiter is not None:
|
| 147 |
+
set_result(waiter, True)
|
| 148 |
+
|
| 149 |
+
if waiter is self._notify_waiter:
|
| 150 |
+
self._notify_waiter = None
|
| 151 |
+
|
| 152 |
+
def init_signals(self) -> None:
|
| 153 |
+
# Set up signals through the event loop API.
|
| 154 |
+
|
| 155 |
+
self.loop.add_signal_handler(
|
| 156 |
+
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
self.loop.add_signal_handler(
|
| 160 |
+
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
self.loop.add_signal_handler(
|
| 164 |
+
signal.SIGINT, self.handle_quit, signal.SIGINT, None
|
| 165 |
+
)
|
| 166 |
+
|
| 167 |
+
self.loop.add_signal_handler(
|
| 168 |
+
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
self.loop.add_signal_handler(
|
| 172 |
+
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
+
self.loop.add_signal_handler(
|
| 176 |
+
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
|
| 177 |
+
)
|
| 178 |
+
|
| 179 |
+
# Don't let SIGTERM and SIGUSR1 disturb active requests
|
| 180 |
+
# by interrupting system calls
|
| 181 |
+
signal.siginterrupt(signal.SIGTERM, False)
|
| 182 |
+
signal.siginterrupt(signal.SIGUSR1, False)
|
| 183 |
+
# Reset signals so Gunicorn doesn't swallow subprocess return codes
|
| 184 |
+
# See: https://github.com/aio-libs/aiohttp/issues/6130
|
| 185 |
+
if sys.version_info < (3, 8):
|
| 186 |
+
# Starting from Python 3.8,
|
| 187 |
+
# the default child watcher is ThreadedChildWatcher.
|
| 188 |
+
# The watcher doesn't depend on SIGCHLD signal,
|
| 189 |
+
# there is no need to reset it.
|
| 190 |
+
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
|
| 191 |
+
|
| 192 |
+
def handle_quit(self, sig: int, frame: FrameType) -> None:
|
| 193 |
+
self.alive = False
|
| 194 |
+
|
| 195 |
+
# worker_int callback
|
| 196 |
+
self.cfg.worker_int(self)
|
| 197 |
+
|
| 198 |
+
# wakeup closing process
|
| 199 |
+
self._notify_waiter_done()
|
| 200 |
+
|
| 201 |
+
def handle_abort(self, sig: int, frame: FrameType) -> None:
|
| 202 |
+
self.alive = False
|
| 203 |
+
self.exit_code = 1
|
| 204 |
+
self.cfg.worker_abort(self)
|
| 205 |
+
sys.exit(1)
|
| 206 |
+
|
| 207 |
+
@staticmethod
|
| 208 |
+
def _create_ssl_context(cfg: Any) -> "SSLContext":
|
| 209 |
+
"""Creates SSLContext instance for usage in asyncio.create_server.
|
| 210 |
+
|
| 211 |
+
See ssl.SSLSocket.__init__ for more details.
|
| 212 |
+
"""
|
| 213 |
+
if ssl is None: # pragma: no cover
|
| 214 |
+
raise RuntimeError("SSL is not supported.")
|
| 215 |
+
|
| 216 |
+
ctx = ssl.SSLContext(cfg.ssl_version)
|
| 217 |
+
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
|
| 218 |
+
ctx.verify_mode = cfg.cert_reqs
|
| 219 |
+
if cfg.ca_certs:
|
| 220 |
+
ctx.load_verify_locations(cfg.ca_certs)
|
| 221 |
+
if cfg.ciphers:
|
| 222 |
+
ctx.set_ciphers(cfg.ciphers)
|
| 223 |
+
return ctx
|
| 224 |
+
|
| 225 |
+
def _get_valid_log_format(self, source_format: str) -> str:
|
| 226 |
+
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
|
| 227 |
+
return self.DEFAULT_AIOHTTP_LOG_FORMAT
|
| 228 |
+
elif re.search(r"%\([^\)]+\)", source_format):
|
| 229 |
+
raise ValueError(
|
| 230 |
+
"Gunicorn's style options in form of `%(name)s` are not "
|
| 231 |
+
"supported for the log formatting. Please use aiohttp's "
|
| 232 |
+
"format specification to configure access log formatting: "
|
| 233 |
+
"http://docs.aiohttp.org/en/stable/logging.html"
|
| 234 |
+
"#format-specification"
|
| 235 |
+
)
|
| 236 |
+
else:
|
| 237 |
+
return source_format
|
| 238 |
+
|
| 239 |
+
|
| 240 |
+
class GunicornUVLoopWebWorker(GunicornWebWorker):
|
| 241 |
+
def init_process(self) -> None:
|
| 242 |
+
import uvloop
|
| 243 |
+
|
| 244 |
+
# Close any existing event loop before setting a
|
| 245 |
+
# new policy.
|
| 246 |
+
asyncio.get_event_loop().close()
|
| 247 |
+
|
| 248 |
+
# Setup uvloop policy, so that every
|
| 249 |
+
# asyncio.get_event_loop() will create an instance
|
| 250 |
+
# of uvloop event loop.
|
| 251 |
+
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
| 252 |
+
|
| 253 |
+
super().init_process()
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
class GunicornTokioWebWorker(GunicornWebWorker):
|
| 257 |
+
def init_process(self) -> None: # pragma: no cover
|
| 258 |
+
import tokio
|
| 259 |
+
|
| 260 |
+
# Close any existing event loop before setting a
|
| 261 |
+
# new policy.
|
| 262 |
+
asyncio.get_event_loop().close()
|
| 263 |
+
|
| 264 |
+
# Setup tokio policy, so that every
|
| 265 |
+
# asyncio.get_event_loop() will create an instance
|
| 266 |
+
# of tokio event loop.
|
| 267 |
+
asyncio.set_event_loop_policy(tokio.EventLoopPolicy())
|
| 268 |
+
|
| 269 |
+
super().init_process()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/brotli/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
# flake8: noqa
|
| 3 |
+
from .brotli import (
|
| 4 |
+
decompress, Decompressor, compress, BrotliEncoderMode, DEFAULT_MODE,
|
| 5 |
+
Compressor, MODE_GENERIC, MODE_TEXT, MODE_FONT, error, Error
|
| 6 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/brotli/brotli.py
ADDED
|
@@ -0,0 +1,466 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import math
|
| 3 |
+
import enum
|
| 4 |
+
|
| 5 |
+
from ._brotli import ffi, lib
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class Error(Exception):
|
| 9 |
+
"""
|
| 10 |
+
Raised whenever an error is encountered with compressing or decompressing
|
| 11 |
+
data using brotlipy.
|
| 12 |
+
|
| 13 |
+
.. versionadded:: 0.5.1
|
| 14 |
+
"""
|
| 15 |
+
pass
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
#: An alias of :class:`Error <brotli.Error>` that exists for compatibility with
|
| 19 |
+
#: the original C brotli module.
|
| 20 |
+
#:
|
| 21 |
+
#: .. versionadded: 0.5.1
|
| 22 |
+
error = Error
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class BrotliEncoderMode(enum.IntEnum):
|
| 26 |
+
"""
|
| 27 |
+
Compression modes for the Brotli encoder.
|
| 28 |
+
|
| 29 |
+
.. versionadded:: 0.5.0
|
| 30 |
+
"""
|
| 31 |
+
#: Default compression mode. The compressor does not know anything in
|
| 32 |
+
#: advance about the properties of the input.
|
| 33 |
+
GENERIC = lib.BROTLI_MODE_GENERIC
|
| 34 |
+
|
| 35 |
+
#: Compression mode for UTF-8 format text input.
|
| 36 |
+
TEXT = lib.BROTLI_MODE_TEXT
|
| 37 |
+
|
| 38 |
+
#: Compression mode used in WOFF 2.0
|
| 39 |
+
FONT = lib.BROTLI_MODE_FONT
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
# Define some names for compatibility with the C module.
|
| 43 |
+
|
| 44 |
+
#: The default compression mode for brotli.
|
| 45 |
+
DEFAULT_MODE = BrotliEncoderMode(lib.BROTLI_DEFAULT_MODE)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
#: A compression mode where the compressor does not know anything in advance
|
| 49 |
+
#: about the properties of the input.
|
| 50 |
+
#:
|
| 51 |
+
#: .. note:: This name is defined for compatibility with the Brotli C
|
| 52 |
+
#: extension. If you're not interested in that compatibility, it is
|
| 53 |
+
#: recommended that you use :class:`BrotliEncoderMode
|
| 54 |
+
#: <brotli.BrotliEncoderMode>` instead.
|
| 55 |
+
#:
|
| 56 |
+
#: .. versionadded:: 0.5.0
|
| 57 |
+
MODE_GENERIC = BrotliEncoderMode.GENERIC
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
#: A compression mode for UTF-8 format text input.
|
| 61 |
+
#:
|
| 62 |
+
#: .. note:: This name is defined for compatibility with the Brotli C
|
| 63 |
+
#: extension. If you're not interested in that compatibility, it is
|
| 64 |
+
#: recommended that you use :class:`BrotliEncoderMode
|
| 65 |
+
#: <brotli.BrotliEncoderMode>` instead.
|
| 66 |
+
#:
|
| 67 |
+
#: .. versionadded:: 0.5.0
|
| 68 |
+
MODE_TEXT = BrotliEncoderMode.TEXT
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
#: The compression mode used in WOFF 2.0.
|
| 72 |
+
#:
|
| 73 |
+
#: .. note:: This name is defined for compatibility with the Brotli C
|
| 74 |
+
#: extension. If you're not interested in that compatibility, it is
|
| 75 |
+
#: recommended that you use :class:`BrotliEncoderMode
|
| 76 |
+
#: <brotli.BrotliEncoderMode>` instead.
|
| 77 |
+
#:
|
| 78 |
+
#: .. versionadded:: 0.5.0
|
| 79 |
+
MODE_FONT = BrotliEncoderMode.FONT
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def decompress(data):
|
| 83 |
+
"""
|
| 84 |
+
Decompress a complete Brotli-compressed string.
|
| 85 |
+
|
| 86 |
+
:param data: A bytestring containing Brotli-compressed data.
|
| 87 |
+
"""
|
| 88 |
+
d = Decompressor()
|
| 89 |
+
data = d.decompress(data)
|
| 90 |
+
d.finish()
|
| 91 |
+
return data
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
def compress(data,
|
| 95 |
+
mode=DEFAULT_MODE,
|
| 96 |
+
quality=lib.BROTLI_DEFAULT_QUALITY,
|
| 97 |
+
lgwin=lib.BROTLI_DEFAULT_WINDOW,
|
| 98 |
+
lgblock=0,
|
| 99 |
+
dictionary=b''):
|
| 100 |
+
"""
|
| 101 |
+
Compress a string using Brotli.
|
| 102 |
+
|
| 103 |
+
.. versionchanged:: 0.5.0
|
| 104 |
+
Added ``mode``, ``quality``, `lgwin``, ``lgblock``, and ``dictionary``
|
| 105 |
+
parameters.
|
| 106 |
+
|
| 107 |
+
:param data: A bytestring containing the data to compress.
|
| 108 |
+
:type data: ``bytes``
|
| 109 |
+
|
| 110 |
+
:param mode: The encoder mode.
|
| 111 |
+
:type mode: :class:`BrotliEncoderMode` or ``int``
|
| 112 |
+
|
| 113 |
+
:param quality: Controls the compression-speed vs compression-density
|
| 114 |
+
tradeoffs. The higher the quality, the slower the compression. The
|
| 115 |
+
range of this value is 0 to 11.
|
| 116 |
+
:type quality: ``int``
|
| 117 |
+
|
| 118 |
+
:param lgwin: The base-2 logarithm of the sliding window size. The range of
|
| 119 |
+
this value is 10 to 24.
|
| 120 |
+
:type lgwin: ``int``
|
| 121 |
+
|
| 122 |
+
:param lgblock: The base-2 logarithm of the maximum input block size. The
|
| 123 |
+
range of this value is 16 to 24. If set to 0, the value will be set
|
| 124 |
+
based on ``quality``.
|
| 125 |
+
:type lgblock: ``int``
|
| 126 |
+
|
| 127 |
+
:param dictionary: A pre-set dictionary for LZ77. Please use this with
|
| 128 |
+
caution: if a dictionary is used for compression, the same dictionary
|
| 129 |
+
**must** be used for decompression!
|
| 130 |
+
:type dictionary: ``bytes``
|
| 131 |
+
|
| 132 |
+
:returns: The compressed bytestring.
|
| 133 |
+
:rtype: ``bytes``
|
| 134 |
+
"""
|
| 135 |
+
# This method uses private variables on the Compressor object, and
|
| 136 |
+
# generally does a whole lot of stuff that's not supported by the public
|
| 137 |
+
# API. The goal here is to minimise the number of allocations and copies
|
| 138 |
+
# we have to do. Users should prefer this method over the Compressor if
|
| 139 |
+
# they know they have single-shot data.
|
| 140 |
+
compressor = Compressor(
|
| 141 |
+
mode=mode,
|
| 142 |
+
quality=quality,
|
| 143 |
+
lgwin=lgwin,
|
| 144 |
+
lgblock=lgblock,
|
| 145 |
+
dictionary=dictionary
|
| 146 |
+
)
|
| 147 |
+
compressed_data = compressor._compress(data, lib.BROTLI_OPERATION_FINISH)
|
| 148 |
+
assert lib.BrotliEncoderIsFinished(compressor._encoder) == lib.BROTLI_TRUE
|
| 149 |
+
assert (
|
| 150 |
+
lib.BrotliEncoderHasMoreOutput(compressor._encoder) == lib.BROTLI_FALSE
|
| 151 |
+
)
|
| 152 |
+
return compressed_data
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
def _validate_mode(val):
|
| 156 |
+
"""
|
| 157 |
+
Validate that the mode is valid.
|
| 158 |
+
"""
|
| 159 |
+
try:
|
| 160 |
+
val = BrotliEncoderMode(val)
|
| 161 |
+
except ValueError:
|
| 162 |
+
raise Error("%s is not a valid encoder mode" % val)
|
| 163 |
+
|
| 164 |
+
|
| 165 |
+
def _validate_quality(val):
|
| 166 |
+
"""
|
| 167 |
+
Validate that the quality setting is valid.
|
| 168 |
+
"""
|
| 169 |
+
if not (0 <= val <= 11):
|
| 170 |
+
raise Error(
|
| 171 |
+
"%d is not a valid quality, must be between 0 and 11" % val
|
| 172 |
+
)
|
| 173 |
+
|
| 174 |
+
|
| 175 |
+
def _validate_lgwin(val):
|
| 176 |
+
"""
|
| 177 |
+
Validate that the lgwin setting is valid.
|
| 178 |
+
"""
|
| 179 |
+
if not (10 <= val <= 24):
|
| 180 |
+
raise Error("%d is not a valid lgwin, must be between 10 and 24" % val)
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def _validate_lgblock(val):
|
| 184 |
+
"""
|
| 185 |
+
Validate that the lgblock setting is valid.
|
| 186 |
+
"""
|
| 187 |
+
if (val != 0) and not (16 <= val <= 24):
|
| 188 |
+
raise Error(
|
| 189 |
+
"%d is not a valid lgblock, must be either 0 or between 16 and 24"
|
| 190 |
+
% val
|
| 191 |
+
)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def _set_parameter(encoder, parameter, parameter_name, val):
|
| 195 |
+
"""
|
| 196 |
+
This helper function sets a specific Brotli encoder parameter, checking
|
| 197 |
+
the return code and raising :class:`Error <brotli.Error>` if it is
|
| 198 |
+
invalid.
|
| 199 |
+
"""
|
| 200 |
+
rc = lib.BrotliEncoderSetParameter(encoder, parameter, val)
|
| 201 |
+
|
| 202 |
+
if parameter == lib.BROTLI_PARAM_MODE:
|
| 203 |
+
_validate_mode(val)
|
| 204 |
+
elif parameter == lib.BROTLI_PARAM_QUALITY:
|
| 205 |
+
_validate_quality(val)
|
| 206 |
+
elif parameter == lib.BROTLI_PARAM_LGWIN:
|
| 207 |
+
_validate_lgwin(val)
|
| 208 |
+
elif parameter == lib.BROTLI_PARAM_LGBLOCK:
|
| 209 |
+
_validate_lgblock(val)
|
| 210 |
+
else: # pragma: no cover
|
| 211 |
+
raise RuntimeError("Unexpected parameter!")
|
| 212 |
+
|
| 213 |
+
# This block is defensive: I see no way to hit it, but as long as the
|
| 214 |
+
# function returns a value we can live in hope that the brotli folks will
|
| 215 |
+
# enforce their own constraints.
|
| 216 |
+
if rc != lib.BROTLI_TRUE: # pragma: no cover
|
| 217 |
+
raise Error(
|
| 218 |
+
"Error setting parameter %s: %d" % (parameter_name, val)
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
class Compressor(object):
|
| 223 |
+
"""
|
| 224 |
+
An object that allows for streaming compression of data using the Brotli
|
| 225 |
+
compression algorithm.
|
| 226 |
+
|
| 227 |
+
.. versionadded:: 0.5.0
|
| 228 |
+
|
| 229 |
+
:param mode: The encoder mode.
|
| 230 |
+
:type mode: :class:`BrotliEncoderMode` or ``int``
|
| 231 |
+
|
| 232 |
+
:param quality: Controls the compression-speed vs compression-density
|
| 233 |
+
tradeoffs. The higher the quality, the slower the compression. The
|
| 234 |
+
range of this value is 0 to 11.
|
| 235 |
+
:type quality: ``int``
|
| 236 |
+
|
| 237 |
+
:param lgwin: The base-2 logarithm of the sliding window size. The range of
|
| 238 |
+
this value is 10 to 24.
|
| 239 |
+
:type lgwin: ``int``
|
| 240 |
+
|
| 241 |
+
:param lgblock: The base-2 logarithm of the maximum input block size. The
|
| 242 |
+
range of this value is 16 to 24. If set to 0, the value will be set
|
| 243 |
+
based on ``quality``.
|
| 244 |
+
:type lgblock: ``int``
|
| 245 |
+
|
| 246 |
+
:param dictionary: A pre-set dictionary for LZ77. Please use this with
|
| 247 |
+
caution: if a dictionary is used for compression, the same dictionary
|
| 248 |
+
**must** be used for decompression!
|
| 249 |
+
:type dictionary: ``bytes``
|
| 250 |
+
"""
|
| 251 |
+
_dictionary = None
|
| 252 |
+
_dictionary_size = None
|
| 253 |
+
|
| 254 |
+
def __init__(self,
|
| 255 |
+
mode=DEFAULT_MODE,
|
| 256 |
+
quality=lib.BROTLI_DEFAULT_QUALITY,
|
| 257 |
+
lgwin=lib.BROTLI_DEFAULT_WINDOW,
|
| 258 |
+
lgblock=0,
|
| 259 |
+
dictionary=b''):
|
| 260 |
+
enc = lib.BrotliEncoderCreateInstance(
|
| 261 |
+
ffi.NULL, ffi.NULL, ffi.NULL
|
| 262 |
+
)
|
| 263 |
+
if not enc: # pragma: no cover
|
| 264 |
+
raise RuntimeError("Unable to allocate Brotli encoder!")
|
| 265 |
+
|
| 266 |
+
enc = ffi.gc(enc, lib.BrotliEncoderDestroyInstance)
|
| 267 |
+
|
| 268 |
+
# Configure the encoder appropriately.
|
| 269 |
+
_set_parameter(enc, lib.BROTLI_PARAM_MODE, "mode", mode)
|
| 270 |
+
_set_parameter(enc, lib.BROTLI_PARAM_QUALITY, "quality", quality)
|
| 271 |
+
_set_parameter(enc, lib.BROTLI_PARAM_LGWIN, "lgwin", lgwin)
|
| 272 |
+
_set_parameter(enc, lib.BROTLI_PARAM_LGBLOCK, "lgblock", lgblock)
|
| 273 |
+
|
| 274 |
+
if dictionary:
|
| 275 |
+
self._dictionary = ffi.new("uint8_t []", dictionary)
|
| 276 |
+
self._dictionary_size = len(dictionary)
|
| 277 |
+
lib.BrotliEncoderSetCustomDictionary(
|
| 278 |
+
enc, self._dictionary_size, self._dictionary
|
| 279 |
+
)
|
| 280 |
+
|
| 281 |
+
self._encoder = enc
|
| 282 |
+
|
| 283 |
+
def _compress(self, data, operation):
|
| 284 |
+
"""
|
| 285 |
+
This private method compresses some data in a given mode. This is used
|
| 286 |
+
because almost all of the code uses the exact same setup. It wouldn't
|
| 287 |
+
have to, but it doesn't hurt at all.
|
| 288 |
+
"""
|
| 289 |
+
# The 'algorithm' for working out how big to make this buffer is from
|
| 290 |
+
# the Brotli source code, brotlimodule.cc.
|
| 291 |
+
original_output_size = int(
|
| 292 |
+
math.ceil(len(data) + (len(data) >> 2) + 10240)
|
| 293 |
+
)
|
| 294 |
+
available_out = ffi.new("size_t *")
|
| 295 |
+
available_out[0] = original_output_size
|
| 296 |
+
output_buffer = ffi.new("uint8_t []", available_out[0])
|
| 297 |
+
ptr_to_output_buffer = ffi.new("uint8_t **", output_buffer)
|
| 298 |
+
input_size = ffi.new("size_t *", len(data))
|
| 299 |
+
input_buffer = ffi.new("uint8_t []", data)
|
| 300 |
+
ptr_to_input_buffer = ffi.new("uint8_t **", input_buffer)
|
| 301 |
+
|
| 302 |
+
rc = lib.BrotliEncoderCompressStream(
|
| 303 |
+
self._encoder,
|
| 304 |
+
operation,
|
| 305 |
+
input_size,
|
| 306 |
+
ptr_to_input_buffer,
|
| 307 |
+
available_out,
|
| 308 |
+
ptr_to_output_buffer,
|
| 309 |
+
ffi.NULL
|
| 310 |
+
)
|
| 311 |
+
if rc != lib.BROTLI_TRUE: # pragma: no cover
|
| 312 |
+
raise Error("Error encountered compressing data.")
|
| 313 |
+
|
| 314 |
+
assert not input_size[0]
|
| 315 |
+
|
| 316 |
+
size_of_output = original_output_size - available_out[0]
|
| 317 |
+
return ffi.buffer(output_buffer, size_of_output)[:]
|
| 318 |
+
|
| 319 |
+
def compress(self, data):
|
| 320 |
+
"""
|
| 321 |
+
Incrementally compress more data.
|
| 322 |
+
|
| 323 |
+
:param data: A bytestring containing data to compress.
|
| 324 |
+
:returns: A bytestring containing some compressed data. May return the
|
| 325 |
+
empty bytestring if not enough data has been inserted into the
|
| 326 |
+
compressor to create the output yet.
|
| 327 |
+
"""
|
| 328 |
+
return self._compress(data, lib.BROTLI_OPERATION_PROCESS)
|
| 329 |
+
|
| 330 |
+
def flush(self):
|
| 331 |
+
"""
|
| 332 |
+
Flush the compressor. This will emit the remaining output data, but
|
| 333 |
+
will not destroy the compressor. It can be used, for example, to ensure
|
| 334 |
+
that given chunks of content will decompress immediately.
|
| 335 |
+
"""
|
| 336 |
+
chunks = []
|
| 337 |
+
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH))
|
| 338 |
+
|
| 339 |
+
while lib.BrotliEncoderHasMoreOutput(self._encoder) == lib.BROTLI_TRUE:
|
| 340 |
+
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FLUSH))
|
| 341 |
+
|
| 342 |
+
return b''.join(chunks)
|
| 343 |
+
|
| 344 |
+
def finish(self):
|
| 345 |
+
"""
|
| 346 |
+
Finish the compressor. This will emit the remaining output data and
|
| 347 |
+
transition the compressor to a completed state. The compressor cannot
|
| 348 |
+
be used again after this point, and must be replaced.
|
| 349 |
+
"""
|
| 350 |
+
chunks = []
|
| 351 |
+
while lib.BrotliEncoderIsFinished(self._encoder) == lib.BROTLI_FALSE:
|
| 352 |
+
chunks.append(self._compress(b'', lib.BROTLI_OPERATION_FINISH))
|
| 353 |
+
|
| 354 |
+
return b''.join(chunks)
|
| 355 |
+
|
| 356 |
+
|
| 357 |
+
class Decompressor(object):
|
| 358 |
+
"""
|
| 359 |
+
An object that allows for streaming decompression of Brotli-compressed
|
| 360 |
+
data.
|
| 361 |
+
|
| 362 |
+
.. versionchanged:: 0.5.0
|
| 363 |
+
Added ``dictionary`` parameter.
|
| 364 |
+
|
| 365 |
+
:param dictionary: A pre-set dictionary for LZ77. Please use this with
|
| 366 |
+
caution: if a dictionary is used for compression, the same dictionary
|
| 367 |
+
**must** be used for decompression!
|
| 368 |
+
:type dictionary: ``bytes``
|
| 369 |
+
"""
|
| 370 |
+
_dictionary = None
|
| 371 |
+
_dictionary_size = None
|
| 372 |
+
|
| 373 |
+
def __init__(self, dictionary=b''):
|
| 374 |
+
dec = lib.BrotliDecoderCreateInstance(ffi.NULL, ffi.NULL, ffi.NULL)
|
| 375 |
+
self._decoder = ffi.gc(dec, lib.BrotliDecoderDestroyInstance)
|
| 376 |
+
|
| 377 |
+
if dictionary:
|
| 378 |
+
self._dictionary = ffi.new("uint8_t []", dictionary)
|
| 379 |
+
self._dictionary_size = len(dictionary)
|
| 380 |
+
lib.BrotliDecoderSetCustomDictionary(
|
| 381 |
+
self._decoder,
|
| 382 |
+
self._dictionary_size,
|
| 383 |
+
self._dictionary
|
| 384 |
+
)
|
| 385 |
+
|
| 386 |
+
def decompress(self, data):
|
| 387 |
+
"""
|
| 388 |
+
Decompress part of a complete Brotli-compressed string.
|
| 389 |
+
|
| 390 |
+
:param data: A bytestring containing Brotli-compressed data.
|
| 391 |
+
:returns: A bytestring containing the decompressed data.
|
| 392 |
+
"""
|
| 393 |
+
chunks = []
|
| 394 |
+
|
| 395 |
+
available_in = ffi.new("size_t *", len(data))
|
| 396 |
+
in_buffer = ffi.new("uint8_t[]", data)
|
| 397 |
+
next_in = ffi.new("uint8_t **", in_buffer)
|
| 398 |
+
|
| 399 |
+
while True:
|
| 400 |
+
# Allocate a buffer that's hopefully overlarge, but if it's not we
|
| 401 |
+
# don't mind: we'll spin around again.
|
| 402 |
+
buffer_size = 5 * len(data)
|
| 403 |
+
available_out = ffi.new("size_t *", buffer_size)
|
| 404 |
+
out_buffer = ffi.new("uint8_t[]", buffer_size)
|
| 405 |
+
next_out = ffi.new("uint8_t **", out_buffer)
|
| 406 |
+
|
| 407 |
+
rc = lib.BrotliDecoderDecompressStream(self._decoder,
|
| 408 |
+
available_in,
|
| 409 |
+
next_in,
|
| 410 |
+
available_out,
|
| 411 |
+
next_out,
|
| 412 |
+
ffi.NULL)
|
| 413 |
+
|
| 414 |
+
# First, check for errors.
|
| 415 |
+
if rc == lib.BROTLI_DECODER_RESULT_ERROR:
|
| 416 |
+
error_code = lib.BrotliDecoderGetErrorCode(self._decoder)
|
| 417 |
+
error_message = lib.BrotliDecoderErrorString(error_code)
|
| 418 |
+
raise Error(
|
| 419 |
+
"Decompression error: %s" % ffi.string(error_message)
|
| 420 |
+
)
|
| 421 |
+
|
| 422 |
+
# Next, copy the result out.
|
| 423 |
+
chunk = ffi.buffer(out_buffer, buffer_size - available_out[0])[:]
|
| 424 |
+
chunks.append(chunk)
|
| 425 |
+
|
| 426 |
+
if rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT:
|
| 427 |
+
assert available_in[0] == 0
|
| 428 |
+
break
|
| 429 |
+
elif rc == lib.BROTLI_DECODER_RESULT_SUCCESS:
|
| 430 |
+
break
|
| 431 |
+
else:
|
| 432 |
+
# It's cool if we need more output, we just loop again.
|
| 433 |
+
assert rc == lib.BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT
|
| 434 |
+
|
| 435 |
+
return b''.join(chunks)
|
| 436 |
+
|
| 437 |
+
def flush(self):
|
| 438 |
+
"""
|
| 439 |
+
Complete the decompression, return whatever data is remaining to be
|
| 440 |
+
decompressed.
|
| 441 |
+
|
| 442 |
+
.. deprecated:: 0.4.0
|
| 443 |
+
|
| 444 |
+
This method is no longer required, as decompress() will now
|
| 445 |
+
decompress eagerly.
|
| 446 |
+
|
| 447 |
+
:returns: A bytestring containing the remaining decompressed data.
|
| 448 |
+
"""
|
| 449 |
+
return b''
|
| 450 |
+
|
| 451 |
+
def finish(self):
|
| 452 |
+
"""
|
| 453 |
+
Finish the decompressor. As the decompressor decompresses eagerly, this
|
| 454 |
+
will never actually emit any data. However, it will potentially throw
|
| 455 |
+
errors if a truncated or damaged data stream has been used.
|
| 456 |
+
|
| 457 |
+
Note that, once this method is called, the decompressor is no longer
|
| 458 |
+
safe for further use and must be thrown away.
|
| 459 |
+
"""
|
| 460 |
+
assert (
|
| 461 |
+
lib.BrotliDecoderHasMoreOutput(self._decoder) == lib.BROTLI_FALSE
|
| 462 |
+
)
|
| 463 |
+
if lib.BrotliDecoderIsFinished(self._decoder) == lib.BROTLI_FALSE:
|
| 464 |
+
raise Error("Decompression error: incomplete compressed stream.")
|
| 465 |
+
|
| 466 |
+
return b''
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/brotli/build.py
ADDED
|
@@ -0,0 +1,224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# -*- coding: utf-8 -*-
|
| 2 |
+
import sys
|
| 3 |
+
|
| 4 |
+
from cffi import FFI
|
| 5 |
+
ffi = FFI()
|
| 6 |
+
|
| 7 |
+
libraries = ['libbrotli']
|
| 8 |
+
if 'win32' not in str(sys.platform).lower():
|
| 9 |
+
libraries.append('m')
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
ffi.set_source(
|
| 13 |
+
"_brotli",
|
| 14 |
+
"""#include <brotli/decode.h>
|
| 15 |
+
#include <brotli/encode.h>
|
| 16 |
+
""",
|
| 17 |
+
libraries=libraries,
|
| 18 |
+
include_dirs=["libbrotli", "libbrotli/include"]
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
ffi.cdef("""
|
| 22 |
+
/* common/types.h */
|
| 23 |
+
typedef bool BROTLI_BOOL;
|
| 24 |
+
#define BROTLI_TRUE ...
|
| 25 |
+
#define BROTLI_FALSE ...
|
| 26 |
+
|
| 27 |
+
/* dec/state.h */
|
| 28 |
+
/* Allocating function pointer. Function MUST return 0 in the case of
|
| 29 |
+
failure. Otherwise it MUST return a valid pointer to a memory region of
|
| 30 |
+
at least size length. Neither items nor size are allowed to be 0.
|
| 31 |
+
opaque argument is a pointer provided by client and could be used to
|
| 32 |
+
bind function to specific object (memory pool). */
|
| 33 |
+
typedef void* (*brotli_alloc_func)(void* opaque, size_t size);
|
| 34 |
+
|
| 35 |
+
/* Deallocating function pointer. Function SHOULD be no-op in the case the
|
| 36 |
+
address is 0. */
|
| 37 |
+
typedef void (*brotli_free_func)(void* opaque, void* address);
|
| 38 |
+
|
| 39 |
+
/* dec/decode.h */
|
| 40 |
+
|
| 41 |
+
typedef enum {
|
| 42 |
+
/* Decoding error, e.g. corrupt input or memory allocation problem */
|
| 43 |
+
BROTLI_DECODER_RESULT_ERROR = 0,
|
| 44 |
+
/* Decoding successfully completed */
|
| 45 |
+
BROTLI_DECODER_RESULT_SUCCESS = 1,
|
| 46 |
+
/* Partially done; should be called again with more input */
|
| 47 |
+
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT = 2,
|
| 48 |
+
/* Partially done; should be called again with more output */
|
| 49 |
+
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT = 3
|
| 50 |
+
} BrotliDecoderResult;
|
| 51 |
+
|
| 52 |
+
typedef enum {...} BrotliDecoderErrorCode;
|
| 53 |
+
typedef ... BrotliDecoderState;
|
| 54 |
+
|
| 55 |
+
/* Creates the instance of BrotliDecoderState and initializes it.
|
| 56 |
+
|alloc_func| and |free_func| MUST be both zero or both non-zero. In the
|
| 57 |
+
case they are both zero, default memory allocators are used. |opaque| is
|
| 58 |
+
passed to |alloc_func| and |free_func| when they are called. */
|
| 59 |
+
BrotliDecoderState* BrotliDecoderCreateInstance(brotli_alloc_func,
|
| 60 |
+
brotli_free_func,
|
| 61 |
+
void *);
|
| 62 |
+
|
| 63 |
+
/* Deinitializes and frees BrotliDecoderState instance. */
|
| 64 |
+
void BrotliDecoderDestroyInstance(BrotliDecoderState* state);
|
| 65 |
+
|
| 66 |
+
/* Decompresses the data. Supports partial input and output.
|
| 67 |
+
|
| 68 |
+
Must be called with an allocated input buffer in |*next_in| and an
|
| 69 |
+
allocated output buffer in |*next_out|. The values |*available_in| and
|
| 70 |
+
|*available_out| must specify the allocated size in |*next_in| and
|
| 71 |
+
|*next_out| respectively.
|
| 72 |
+
|
| 73 |
+
After each call, |*available_in| will be decremented by the amount of
|
| 74 |
+
input bytes consumed, and the |*next_in| pointer will be incremented by
|
| 75 |
+
that amount. Similarly, |*available_out| will be decremented by the
|
| 76 |
+
amount of output bytes written, and the |*next_out| pointer will be
|
| 77 |
+
incremented by that amount. |total_out|, if it is not a null-pointer,
|
| 78 |
+
will be set to the number of bytes decompressed since the last state
|
| 79 |
+
initialization.
|
| 80 |
+
|
| 81 |
+
Input is never overconsumed, so |next_in| and |available_in| could be
|
| 82 |
+
passed to the next consumer after decoding is complete. */
|
| 83 |
+
BrotliDecoderResult BrotliDecoderDecompressStream(BrotliDecoderState* s,
|
| 84 |
+
size_t* available_in,
|
| 85 |
+
const uint8_t** next_in,
|
| 86 |
+
size_t* available_out,
|
| 87 |
+
uint8_t** next_out,
|
| 88 |
+
size_t* total_out);
|
| 89 |
+
|
| 90 |
+
/* Fills the new state with a dictionary for LZ77, warming up the
|
| 91 |
+
ringbuffer, e.g. for custom static dictionaries for data formats.
|
| 92 |
+
Not to be confused with the built-in transformable dictionary of Brotli.
|
| 93 |
+
|size| should be less or equal to 2^24 (16MiB), otherwise the dictionary
|
| 94 |
+
will be ignored. The dictionary must exist in memory until decoding is
|
| 95 |
+
done and is owned by the caller. To use:
|
| 96 |
+
1) Allocate and initialize state with BrotliCreateInstance
|
| 97 |
+
2) Use BrotliSetCustomDictionary
|
| 98 |
+
3) Use BrotliDecompressStream
|
| 99 |
+
4) Clean up and free state with BrotliDestroyState
|
| 100 |
+
*/
|
| 101 |
+
void BrotliDecoderSetCustomDictionary(
|
| 102 |
+
BrotliDecoderState* s, size_t size, const uint8_t* dict);
|
| 103 |
+
|
| 104 |
+
/* Returns true, if decoder has some unconsumed output.
|
| 105 |
+
Otherwise returns false. */
|
| 106 |
+
BROTLI_BOOL BrotliDecoderHasMoreOutput(const BrotliDecoderState* s);
|
| 107 |
+
|
| 108 |
+
/* Returns true, if decoder has already received some input bytes.
|
| 109 |
+
Otherwise returns false. */
|
| 110 |
+
BROTLI_BOOL BrotliDecoderIsUsed(const BrotliDecoderState* s);
|
| 111 |
+
|
| 112 |
+
/* Returns true, if decoder is in a state where we reached the end of the
|
| 113 |
+
input and produced all of the output; returns false otherwise. */
|
| 114 |
+
BROTLI_BOOL BrotliDecoderIsFinished(const BrotliDecoderState* s);
|
| 115 |
+
|
| 116 |
+
/* Returns detailed error code after BrotliDecompressStream returns
|
| 117 |
+
BROTLI_DECODER_RESULT_ERROR. */
|
| 118 |
+
BrotliDecoderErrorCode BrotliDecoderGetErrorCode(
|
| 119 |
+
const BrotliDecoderState* s);
|
| 120 |
+
|
| 121 |
+
const char* BrotliDecoderErrorString(BrotliDecoderErrorCode c);
|
| 122 |
+
|
| 123 |
+
/* enc/encode.h */
|
| 124 |
+
typedef ... BrotliEncoderState;
|
| 125 |
+
|
| 126 |
+
typedef enum BrotliEncoderParameter {
|
| 127 |
+
BROTLI_PARAM_MODE = 0,
|
| 128 |
+
/* Controls the compression-speed vs compression-density tradeoffs. The
|
| 129 |
+
higher the quality, the slower the compression. Range is 0 to 11. */
|
| 130 |
+
BROTLI_PARAM_QUALITY = 1,
|
| 131 |
+
/* Base 2 logarithm of the sliding window size. Range is 10 to 24. */
|
| 132 |
+
BROTLI_PARAM_LGWIN = 2,
|
| 133 |
+
/* Base 2 logarithm of the maximum input block size. Range is 16 to 24.
|
| 134 |
+
If set to 0, the value will be set based on the quality. */
|
| 135 |
+
BROTLI_PARAM_LGBLOCK = 3
|
| 136 |
+
} BrotliEncoderParameter;
|
| 137 |
+
|
| 138 |
+
typedef enum BrotliEncoderMode {
|
| 139 |
+
/* Default compression mode. The compressor does not know anything in
|
| 140 |
+
advance about the properties of the input. */
|
| 141 |
+
BROTLI_MODE_GENERIC = 0,
|
| 142 |
+
/* Compression mode for UTF-8 format text input. */
|
| 143 |
+
BROTLI_MODE_TEXT = 1,
|
| 144 |
+
/* Compression mode used in WOFF 2.0. */
|
| 145 |
+
BROTLI_MODE_FONT = 2
|
| 146 |
+
} BrotliEncoderMode;
|
| 147 |
+
|
| 148 |
+
int BROTLI_DEFAULT_QUALITY = 11;
|
| 149 |
+
int BROTLI_DEFAULT_WINDOW = 22;
|
| 150 |
+
#define BROTLI_DEFAULT_MODE ...
|
| 151 |
+
|
| 152 |
+
typedef enum BrotliEncoderOperation {
|
| 153 |
+
BROTLI_OPERATION_PROCESS = 0,
|
| 154 |
+
/* Request output stream to flush. Performed when input stream is
|
| 155 |
+
depleted and there is enough space in output stream. */
|
| 156 |
+
BROTLI_OPERATION_FLUSH = 1,
|
| 157 |
+
/* Request output stream to finish. Performed when input stream is
|
| 158 |
+
depleted and there is enough space in output stream. */
|
| 159 |
+
BROTLI_OPERATION_FINISH = 2
|
| 160 |
+
} BrotliEncoderOperation;
|
| 161 |
+
|
| 162 |
+
/* Creates the instance of BrotliEncoderState and initializes it.
|
| 163 |
+
|alloc_func| and |free_func| MUST be both zero or both non-zero. In the
|
| 164 |
+
case they are both zero, default memory allocators are used. |opaque| is
|
| 165 |
+
passed to |alloc_func| and |free_func| when they are called. */
|
| 166 |
+
BrotliEncoderState* BrotliEncoderCreateInstance(brotli_alloc_func,
|
| 167 |
+
brotli_free_func,
|
| 168 |
+
void *);
|
| 169 |
+
|
| 170 |
+
/* Deinitializes and frees BrotliEncoderState instance. */
|
| 171 |
+
void BrotliEncoderDestroyInstance(BrotliEncoderState* state);
|
| 172 |
+
|
| 173 |
+
/* Compresses the data in |input_buffer| into |encoded_buffer|, and sets
|
| 174 |
+
|*encoded_size| to the compressed length.
|
| 175 |
+
BROTLI_DEFAULT_QUALITY, BROTLI_DEFAULT_WINDOW and BROTLI_DEFAULT_MODE
|
| 176 |
+
should be used as |quality|, |lgwin| and |mode| if there are no specific
|
| 177 |
+
requirements to encoder speed and compression ratio.
|
| 178 |
+
If compression fails, |*encoded_size| is set to 0.
|
| 179 |
+
If BrotliEncoderMaxCompressedSize(|input_size|) is not zero, then
|
| 180 |
+
|*encoded_size| is never set to the bigger value.
|
| 181 |
+
Returns false if there was an error and true otherwise. */
|
| 182 |
+
BROTLI_BOOL BrotliEncoderCompress(int quality,
|
| 183 |
+
int lgwin,
|
| 184 |
+
BrotliEncoderMode mode,
|
| 185 |
+
size_t input_size,
|
| 186 |
+
const uint8_t* input_buffer,
|
| 187 |
+
size_t* encoded_size,
|
| 188 |
+
uint8_t* encoded_buffer);
|
| 189 |
+
|
| 190 |
+
BROTLI_BOOL BrotliEncoderCompressStream(BrotliEncoderState* s,
|
| 191 |
+
BrotliEncoderOperation op,
|
| 192 |
+
size_t* available_in,
|
| 193 |
+
const uint8_t** next_in,
|
| 194 |
+
size_t* available_out,
|
| 195 |
+
uint8_t** next_out,
|
| 196 |
+
size_t* total_out);
|
| 197 |
+
|
| 198 |
+
BROTLI_BOOL BrotliEncoderSetParameter(BrotliEncoderState* state,
|
| 199 |
+
BrotliEncoderParameter p,
|
| 200 |
+
uint32_t value);
|
| 201 |
+
|
| 202 |
+
/* Fills the new state with a dictionary for LZ77, warming up the
|
| 203 |
+
ringbuffer, e.g. for custom static dictionaries for data formats.
|
| 204 |
+
Not to be confused with the built-in transformable dictionary of Brotli.
|
| 205 |
+
To decode, use BrotliSetCustomDictionary() of the decoder with the same
|
| 206 |
+
dictionary. */
|
| 207 |
+
void BrotliEncoderSetCustomDictionary(BrotliEncoderState* state,
|
| 208 |
+
size_t size,
|
| 209 |
+
const uint8_t* dict);
|
| 210 |
+
|
| 211 |
+
/* Check if encoder is in "finished" state, i.e. no more input is
|
| 212 |
+
acceptable and no more output will be produced.
|
| 213 |
+
Works only with BrotliEncoderCompressStream workflow.
|
| 214 |
+
Returns 1 if stream is finished and 0 otherwise. */
|
| 215 |
+
BROTLI_BOOL BrotliEncoderIsFinished(BrotliEncoderState* s);
|
| 216 |
+
|
| 217 |
+
/* Check if encoder has more output bytes in internal buffer.
|
| 218 |
+
Works only with BrotliEncoderCompressStream workflow.
|
| 219 |
+
Returns 1 if has more output (in internal buffer) and 0 otherwise. */
|
| 220 |
+
BROTLI_BOOL BrotliEncoderHasMoreOutput(BrotliEncoderState* s);
|
| 221 |
+
""")
|
| 222 |
+
|
| 223 |
+
if __name__ == '__main__':
|
| 224 |
+
ffi.compile()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/LICENSE-3RD-PARTY.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/LICENSE.txt
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
MIT License
|
| 2 |
+
|
| 3 |
+
Copyright (c) Olli-Pekka Heinisuo
|
| 4 |
+
|
| 5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
| 6 |
+
of this software and associated documentation files (the "Software"), to deal
|
| 7 |
+
in the Software without restriction, including without limitation the rights
|
| 8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
| 9 |
+
copies of the Software, and to permit persons to whom the Software is
|
| 10 |
+
furnished to do so, subject to the following conditions:
|
| 11 |
+
|
| 12 |
+
The above copyright notice and this permission notice shall be included in all
|
| 13 |
+
copies or substantial portions of the Software.
|
| 14 |
+
|
| 15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
| 16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
| 17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
| 18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
| 19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
| 20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
| 21 |
+
SOFTWARE.
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/__init__.pyi
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/cv2/config-3.py
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
PYTHON_EXTENSIONS_PATHS = [
|
| 2 |
+
LOADER_DIR
|
| 3 |
+
] + PYTHON_EXTENSIONS_PATHS
|
| 4 |
+
|
| 5 |
+
ci_and_not_headless = False
|
| 6 |
+
|
| 7 |
+
try:
|
| 8 |
+
from .version import ci_build, headless
|
| 9 |
+
|
| 10 |
+
ci_and_not_headless = ci_build and not headless
|
| 11 |
+
except:
|
| 12 |
+
pass
|
| 13 |
+
|
| 14 |
+
# the Qt plugin is included currently only in the pre-built wheels
|
| 15 |
+
if sys.platform.startswith("linux") and ci_and_not_headless:
|
| 16 |
+
os.environ["QT_QPA_PLATFORM_PLUGIN_PATH"] = os.path.join(
|
| 17 |
+
os.path.dirname(os.path.abspath(__file__)), "qt", "plugins"
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
# Qt will throw warning on Linux if fonts are not found
|
| 21 |
+
if sys.platform.startswith("linux") and ci_and_not_headless:
|
| 22 |
+
os.environ["QT_QPA_FONTDIR"] = os.path.join(
|
| 23 |
+
os.path.dirname(os.path.abspath(__file__)), "qt", "fonts"
|
| 24 |
+
)
|