Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +4 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_cparser.pxd +190 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_helpers.pyi +6 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_websocket.c +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/base_protocol.py +87 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client.py +1302 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_exceptions.py +342 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_ws.py +300 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/cookiejar.py +413 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/helpers.py +875 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http.py +72 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_parser.py +956 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/locks.py +41 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/log.py +8 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/payload.py +465 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/py.typed +1 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/resolver.py +160 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/streams.py +660 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/tracing.py +472 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/typedefs.py +64 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_middlewares.py +119 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_protocol.py +681 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_routedef.py +213 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_runner.py +381 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_server.py +62 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_ws.py +487 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/INSTALLER +1 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/METADATA +10 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/RECORD +119 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/REQUESTED +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/WHEEL +5 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/top_level.txt +1 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/__init__.pyi +484 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/_cmp.pyi +13 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/_config.py +33 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/_make.py +0 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/converters.py +155 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/exceptions.pyi +17 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/__init__.py +40 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/base.py +315 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/const.py +4 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/exc.py +46 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/fun.py +704 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/pack.py +1031 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/stream.py +730 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/typ.py +10 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/util.py +398 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_proxy.cpython-38-x86_64-linux-gnu.so +3 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5z.cpython-38-x86_64-linux-gnu.so +3 -0
- my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/_qhull.cpython-38-x86_64-linux-gnu.so +3 -0
.gitattributes
CHANGED
|
@@ -279,3 +279,7 @@ my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5p.cp
|
|
| 279 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5i.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 280 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_conv.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 281 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/zstandard/_cffi.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 279 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5i.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 280 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_conv.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 281 |
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/zstandard/_cffi.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 282 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_proxy.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 283 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/torchaudio/_torchaudio_ffmpeg.so filter=lfs diff=lfs merge=lfs -text
|
| 284 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5z.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
| 285 |
+
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/_qhull.cpython-38-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_cparser.pxd
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from libc.stdint cimport (
|
| 2 |
+
int8_t,
|
| 3 |
+
int16_t,
|
| 4 |
+
int32_t,
|
| 5 |
+
int64_t,
|
| 6 |
+
uint8_t,
|
| 7 |
+
uint16_t,
|
| 8 |
+
uint32_t,
|
| 9 |
+
uint64_t,
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
| 14 |
+
|
| 15 |
+
struct llhttp__internal_s:
|
| 16 |
+
int32_t _index
|
| 17 |
+
void* _span_pos0
|
| 18 |
+
void* _span_cb0
|
| 19 |
+
int32_t error
|
| 20 |
+
const char* reason
|
| 21 |
+
const char* error_pos
|
| 22 |
+
void* data
|
| 23 |
+
void* _current
|
| 24 |
+
uint64_t content_length
|
| 25 |
+
uint8_t type
|
| 26 |
+
uint8_t method
|
| 27 |
+
uint8_t http_major
|
| 28 |
+
uint8_t http_minor
|
| 29 |
+
uint8_t header_state
|
| 30 |
+
uint8_t lenient_flags
|
| 31 |
+
uint8_t upgrade
|
| 32 |
+
uint8_t finish
|
| 33 |
+
uint16_t flags
|
| 34 |
+
uint16_t status_code
|
| 35 |
+
void* settings
|
| 36 |
+
|
| 37 |
+
ctypedef llhttp__internal_s llhttp__internal_t
|
| 38 |
+
ctypedef llhttp__internal_t llhttp_t
|
| 39 |
+
|
| 40 |
+
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
| 41 |
+
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
| 42 |
+
|
| 43 |
+
struct llhttp_settings_s:
|
| 44 |
+
llhttp_cb on_message_begin
|
| 45 |
+
llhttp_data_cb on_url
|
| 46 |
+
llhttp_data_cb on_status
|
| 47 |
+
llhttp_data_cb on_header_field
|
| 48 |
+
llhttp_data_cb on_header_value
|
| 49 |
+
llhttp_cb on_headers_complete
|
| 50 |
+
llhttp_data_cb on_body
|
| 51 |
+
llhttp_cb on_message_complete
|
| 52 |
+
llhttp_cb on_chunk_header
|
| 53 |
+
llhttp_cb on_chunk_complete
|
| 54 |
+
|
| 55 |
+
llhttp_cb on_url_complete
|
| 56 |
+
llhttp_cb on_status_complete
|
| 57 |
+
llhttp_cb on_header_field_complete
|
| 58 |
+
llhttp_cb on_header_value_complete
|
| 59 |
+
|
| 60 |
+
ctypedef llhttp_settings_s llhttp_settings_t
|
| 61 |
+
|
| 62 |
+
enum llhttp_errno:
|
| 63 |
+
HPE_OK,
|
| 64 |
+
HPE_INTERNAL,
|
| 65 |
+
HPE_STRICT,
|
| 66 |
+
HPE_LF_EXPECTED,
|
| 67 |
+
HPE_UNEXPECTED_CONTENT_LENGTH,
|
| 68 |
+
HPE_CLOSED_CONNECTION,
|
| 69 |
+
HPE_INVALID_METHOD,
|
| 70 |
+
HPE_INVALID_URL,
|
| 71 |
+
HPE_INVALID_CONSTANT,
|
| 72 |
+
HPE_INVALID_VERSION,
|
| 73 |
+
HPE_INVALID_HEADER_TOKEN,
|
| 74 |
+
HPE_INVALID_CONTENT_LENGTH,
|
| 75 |
+
HPE_INVALID_CHUNK_SIZE,
|
| 76 |
+
HPE_INVALID_STATUS,
|
| 77 |
+
HPE_INVALID_EOF_STATE,
|
| 78 |
+
HPE_INVALID_TRANSFER_ENCODING,
|
| 79 |
+
HPE_CB_MESSAGE_BEGIN,
|
| 80 |
+
HPE_CB_HEADERS_COMPLETE,
|
| 81 |
+
HPE_CB_MESSAGE_COMPLETE,
|
| 82 |
+
HPE_CB_CHUNK_HEADER,
|
| 83 |
+
HPE_CB_CHUNK_COMPLETE,
|
| 84 |
+
HPE_PAUSED,
|
| 85 |
+
HPE_PAUSED_UPGRADE,
|
| 86 |
+
HPE_USER
|
| 87 |
+
|
| 88 |
+
ctypedef llhttp_errno llhttp_errno_t
|
| 89 |
+
|
| 90 |
+
enum llhttp_flags:
|
| 91 |
+
F_CONNECTION_KEEP_ALIVE,
|
| 92 |
+
F_CONNECTION_CLOSE,
|
| 93 |
+
F_CONNECTION_UPGRADE,
|
| 94 |
+
F_CHUNKED,
|
| 95 |
+
F_UPGRADE,
|
| 96 |
+
F_CONTENT_LENGTH,
|
| 97 |
+
F_SKIPBODY,
|
| 98 |
+
F_TRAILING,
|
| 99 |
+
F_TRANSFER_ENCODING
|
| 100 |
+
|
| 101 |
+
enum llhttp_lenient_flags:
|
| 102 |
+
LENIENT_HEADERS,
|
| 103 |
+
LENIENT_CHUNKED_LENGTH
|
| 104 |
+
|
| 105 |
+
enum llhttp_type:
|
| 106 |
+
HTTP_REQUEST,
|
| 107 |
+
HTTP_RESPONSE,
|
| 108 |
+
HTTP_BOTH
|
| 109 |
+
|
| 110 |
+
enum llhttp_finish_t:
|
| 111 |
+
HTTP_FINISH_SAFE,
|
| 112 |
+
HTTP_FINISH_SAFE_WITH_CB,
|
| 113 |
+
HTTP_FINISH_UNSAFE
|
| 114 |
+
|
| 115 |
+
enum llhttp_method:
|
| 116 |
+
HTTP_DELETE,
|
| 117 |
+
HTTP_GET,
|
| 118 |
+
HTTP_HEAD,
|
| 119 |
+
HTTP_POST,
|
| 120 |
+
HTTP_PUT,
|
| 121 |
+
HTTP_CONNECT,
|
| 122 |
+
HTTP_OPTIONS,
|
| 123 |
+
HTTP_TRACE,
|
| 124 |
+
HTTP_COPY,
|
| 125 |
+
HTTP_LOCK,
|
| 126 |
+
HTTP_MKCOL,
|
| 127 |
+
HTTP_MOVE,
|
| 128 |
+
HTTP_PROPFIND,
|
| 129 |
+
HTTP_PROPPATCH,
|
| 130 |
+
HTTP_SEARCH,
|
| 131 |
+
HTTP_UNLOCK,
|
| 132 |
+
HTTP_BIND,
|
| 133 |
+
HTTP_REBIND,
|
| 134 |
+
HTTP_UNBIND,
|
| 135 |
+
HTTP_ACL,
|
| 136 |
+
HTTP_REPORT,
|
| 137 |
+
HTTP_MKACTIVITY,
|
| 138 |
+
HTTP_CHECKOUT,
|
| 139 |
+
HTTP_MERGE,
|
| 140 |
+
HTTP_MSEARCH,
|
| 141 |
+
HTTP_NOTIFY,
|
| 142 |
+
HTTP_SUBSCRIBE,
|
| 143 |
+
HTTP_UNSUBSCRIBE,
|
| 144 |
+
HTTP_PATCH,
|
| 145 |
+
HTTP_PURGE,
|
| 146 |
+
HTTP_MKCALENDAR,
|
| 147 |
+
HTTP_LINK,
|
| 148 |
+
HTTP_UNLINK,
|
| 149 |
+
HTTP_SOURCE,
|
| 150 |
+
HTTP_PRI,
|
| 151 |
+
HTTP_DESCRIBE,
|
| 152 |
+
HTTP_ANNOUNCE,
|
| 153 |
+
HTTP_SETUP,
|
| 154 |
+
HTTP_PLAY,
|
| 155 |
+
HTTP_PAUSE,
|
| 156 |
+
HTTP_TEARDOWN,
|
| 157 |
+
HTTP_GET_PARAMETER,
|
| 158 |
+
HTTP_SET_PARAMETER,
|
| 159 |
+
HTTP_REDIRECT,
|
| 160 |
+
HTTP_RECORD,
|
| 161 |
+
HTTP_FLUSH
|
| 162 |
+
|
| 163 |
+
ctypedef llhttp_method llhttp_method_t;
|
| 164 |
+
|
| 165 |
+
void llhttp_settings_init(llhttp_settings_t* settings)
|
| 166 |
+
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
| 167 |
+
const llhttp_settings_t* settings)
|
| 168 |
+
|
| 169 |
+
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
| 170 |
+
llhttp_errno_t llhttp_finish(llhttp_t* parser)
|
| 171 |
+
|
| 172 |
+
int llhttp_message_needs_eof(const llhttp_t* parser)
|
| 173 |
+
|
| 174 |
+
int llhttp_should_keep_alive(const llhttp_t* parser)
|
| 175 |
+
|
| 176 |
+
void llhttp_pause(llhttp_t* parser)
|
| 177 |
+
void llhttp_resume(llhttp_t* parser)
|
| 178 |
+
|
| 179 |
+
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
| 180 |
+
|
| 181 |
+
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
| 182 |
+
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
| 183 |
+
void llhttp_set_error_reason(llhttp_t* parser, const char* reason)
|
| 184 |
+
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
| 185 |
+
const char* llhttp_errno_name(llhttp_errno_t err)
|
| 186 |
+
|
| 187 |
+
const char* llhttp_method_name(llhttp_method_t method)
|
| 188 |
+
|
| 189 |
+
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
| 190 |
+
void llhttp_set_lenient_chunked_length(llhttp_t* parser, int enabled)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_helpers.pyi
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
+
class reify:
|
| 4 |
+
def __init__(self, wrapped: Any) -> None: ...
|
| 5 |
+
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
| 6 |
+
def __set__(self, inst: Any, value: Any) -> None: ...
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/_websocket.c
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/base_protocol.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from typing import Optional, cast
|
| 3 |
+
|
| 4 |
+
from .tcp_helpers import tcp_nodelay
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class BaseProtocol(asyncio.Protocol):
|
| 8 |
+
__slots__ = (
|
| 9 |
+
"_loop",
|
| 10 |
+
"_paused",
|
| 11 |
+
"_drain_waiter",
|
| 12 |
+
"_connection_lost",
|
| 13 |
+
"_reading_paused",
|
| 14 |
+
"transport",
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 18 |
+
self._loop = loop # type: asyncio.AbstractEventLoop
|
| 19 |
+
self._paused = False
|
| 20 |
+
self._drain_waiter = None # type: Optional[asyncio.Future[None]]
|
| 21 |
+
self._connection_lost = False
|
| 22 |
+
self._reading_paused = False
|
| 23 |
+
|
| 24 |
+
self.transport = None # type: Optional[asyncio.Transport]
|
| 25 |
+
|
| 26 |
+
def pause_writing(self) -> None:
|
| 27 |
+
assert not self._paused
|
| 28 |
+
self._paused = True
|
| 29 |
+
|
| 30 |
+
def resume_writing(self) -> None:
|
| 31 |
+
assert self._paused
|
| 32 |
+
self._paused = False
|
| 33 |
+
|
| 34 |
+
waiter = self._drain_waiter
|
| 35 |
+
if waiter is not None:
|
| 36 |
+
self._drain_waiter = None
|
| 37 |
+
if not waiter.done():
|
| 38 |
+
waiter.set_result(None)
|
| 39 |
+
|
| 40 |
+
def pause_reading(self) -> None:
|
| 41 |
+
if not self._reading_paused and self.transport is not None:
|
| 42 |
+
try:
|
| 43 |
+
self.transport.pause_reading()
|
| 44 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
| 45 |
+
pass
|
| 46 |
+
self._reading_paused = True
|
| 47 |
+
|
| 48 |
+
def resume_reading(self) -> None:
|
| 49 |
+
if self._reading_paused and self.transport is not None:
|
| 50 |
+
try:
|
| 51 |
+
self.transport.resume_reading()
|
| 52 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
| 53 |
+
pass
|
| 54 |
+
self._reading_paused = False
|
| 55 |
+
|
| 56 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 57 |
+
tr = cast(asyncio.Transport, transport)
|
| 58 |
+
tcp_nodelay(tr, True)
|
| 59 |
+
self.transport = tr
|
| 60 |
+
|
| 61 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 62 |
+
self._connection_lost = True
|
| 63 |
+
# Wake up the writer if currently paused.
|
| 64 |
+
self.transport = None
|
| 65 |
+
if not self._paused:
|
| 66 |
+
return
|
| 67 |
+
waiter = self._drain_waiter
|
| 68 |
+
if waiter is None:
|
| 69 |
+
return
|
| 70 |
+
self._drain_waiter = None
|
| 71 |
+
if waiter.done():
|
| 72 |
+
return
|
| 73 |
+
if exc is None:
|
| 74 |
+
waiter.set_result(None)
|
| 75 |
+
else:
|
| 76 |
+
waiter.set_exception(exc)
|
| 77 |
+
|
| 78 |
+
async def _drain_helper(self) -> None:
|
| 79 |
+
if self._connection_lost:
|
| 80 |
+
raise ConnectionResetError("Connection lost")
|
| 81 |
+
if not self._paused:
|
| 82 |
+
return
|
| 83 |
+
waiter = self._drain_waiter
|
| 84 |
+
if waiter is None:
|
| 85 |
+
waiter = self._loop.create_future()
|
| 86 |
+
self._drain_waiter = waiter
|
| 87 |
+
await asyncio.shield(waiter)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client.py
ADDED
|
@@ -0,0 +1,1302 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP Client for asyncio."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import hashlib
|
| 6 |
+
import json
|
| 7 |
+
import os
|
| 8 |
+
import sys
|
| 9 |
+
import traceback
|
| 10 |
+
import warnings
|
| 11 |
+
from contextlib import suppress
|
| 12 |
+
from types import SimpleNamespace, TracebackType
|
| 13 |
+
from typing import (
|
| 14 |
+
Any,
|
| 15 |
+
Awaitable,
|
| 16 |
+
Callable,
|
| 17 |
+
Coroutine,
|
| 18 |
+
FrozenSet,
|
| 19 |
+
Generator,
|
| 20 |
+
Generic,
|
| 21 |
+
Iterable,
|
| 22 |
+
List,
|
| 23 |
+
Mapping,
|
| 24 |
+
Optional,
|
| 25 |
+
Set,
|
| 26 |
+
Tuple,
|
| 27 |
+
Type,
|
| 28 |
+
TypeVar,
|
| 29 |
+
Union,
|
| 30 |
+
)
|
| 31 |
+
|
| 32 |
+
import attr
|
| 33 |
+
from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
|
| 34 |
+
from yarl import URL
|
| 35 |
+
|
| 36 |
+
from . import hdrs, http, payload
|
| 37 |
+
from .abc import AbstractCookieJar
|
| 38 |
+
from .client_exceptions import (
|
| 39 |
+
ClientConnectionError as ClientConnectionError,
|
| 40 |
+
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
| 41 |
+
ClientConnectorError as ClientConnectorError,
|
| 42 |
+
ClientConnectorSSLError as ClientConnectorSSLError,
|
| 43 |
+
ClientError as ClientError,
|
| 44 |
+
ClientHttpProxyError as ClientHttpProxyError,
|
| 45 |
+
ClientOSError as ClientOSError,
|
| 46 |
+
ClientPayloadError as ClientPayloadError,
|
| 47 |
+
ClientProxyConnectionError as ClientProxyConnectionError,
|
| 48 |
+
ClientResponseError as ClientResponseError,
|
| 49 |
+
ClientSSLError as ClientSSLError,
|
| 50 |
+
ContentTypeError as ContentTypeError,
|
| 51 |
+
InvalidURL as InvalidURL,
|
| 52 |
+
ServerConnectionError as ServerConnectionError,
|
| 53 |
+
ServerDisconnectedError as ServerDisconnectedError,
|
| 54 |
+
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
| 55 |
+
ServerTimeoutError as ServerTimeoutError,
|
| 56 |
+
TooManyRedirects as TooManyRedirects,
|
| 57 |
+
WSServerHandshakeError as WSServerHandshakeError,
|
| 58 |
+
)
|
| 59 |
+
from .client_reqrep import (
|
| 60 |
+
ClientRequest as ClientRequest,
|
| 61 |
+
ClientResponse as ClientResponse,
|
| 62 |
+
Fingerprint as Fingerprint,
|
| 63 |
+
RequestInfo as RequestInfo,
|
| 64 |
+
_merge_ssl_params,
|
| 65 |
+
)
|
| 66 |
+
from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
|
| 67 |
+
from .connector import (
|
| 68 |
+
BaseConnector as BaseConnector,
|
| 69 |
+
NamedPipeConnector as NamedPipeConnector,
|
| 70 |
+
TCPConnector as TCPConnector,
|
| 71 |
+
UnixConnector as UnixConnector,
|
| 72 |
+
)
|
| 73 |
+
from .cookiejar import CookieJar
|
| 74 |
+
from .helpers import (
|
| 75 |
+
DEBUG,
|
| 76 |
+
PY_36,
|
| 77 |
+
BasicAuth,
|
| 78 |
+
TimeoutHandle,
|
| 79 |
+
ceil_timeout,
|
| 80 |
+
get_env_proxy_for_url,
|
| 81 |
+
get_running_loop,
|
| 82 |
+
sentinel,
|
| 83 |
+
strip_auth_from_url,
|
| 84 |
+
)
|
| 85 |
+
from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
|
| 86 |
+
from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
|
| 87 |
+
from .streams import FlowControlDataQueue
|
| 88 |
+
from .tracing import Trace, TraceConfig
|
| 89 |
+
from .typedefs import Final, JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
|
| 90 |
+
|
| 91 |
+
__all__ = (
|
| 92 |
+
# client_exceptions
|
| 93 |
+
"ClientConnectionError",
|
| 94 |
+
"ClientConnectorCertificateError",
|
| 95 |
+
"ClientConnectorError",
|
| 96 |
+
"ClientConnectorSSLError",
|
| 97 |
+
"ClientError",
|
| 98 |
+
"ClientHttpProxyError",
|
| 99 |
+
"ClientOSError",
|
| 100 |
+
"ClientPayloadError",
|
| 101 |
+
"ClientProxyConnectionError",
|
| 102 |
+
"ClientResponseError",
|
| 103 |
+
"ClientSSLError",
|
| 104 |
+
"ContentTypeError",
|
| 105 |
+
"InvalidURL",
|
| 106 |
+
"ServerConnectionError",
|
| 107 |
+
"ServerDisconnectedError",
|
| 108 |
+
"ServerFingerprintMismatch",
|
| 109 |
+
"ServerTimeoutError",
|
| 110 |
+
"TooManyRedirects",
|
| 111 |
+
"WSServerHandshakeError",
|
| 112 |
+
# client_reqrep
|
| 113 |
+
"ClientRequest",
|
| 114 |
+
"ClientResponse",
|
| 115 |
+
"Fingerprint",
|
| 116 |
+
"RequestInfo",
|
| 117 |
+
# connector
|
| 118 |
+
"BaseConnector",
|
| 119 |
+
"TCPConnector",
|
| 120 |
+
"UnixConnector",
|
| 121 |
+
"NamedPipeConnector",
|
| 122 |
+
# client_ws
|
| 123 |
+
"ClientWebSocketResponse",
|
| 124 |
+
# client
|
| 125 |
+
"ClientSession",
|
| 126 |
+
"ClientTimeout",
|
| 127 |
+
"request",
|
| 128 |
+
)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
try:
|
| 132 |
+
from ssl import SSLContext
|
| 133 |
+
except ImportError: # pragma: no cover
|
| 134 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 138 |
+
class ClientTimeout:
|
| 139 |
+
total: Optional[float] = None
|
| 140 |
+
connect: Optional[float] = None
|
| 141 |
+
sock_read: Optional[float] = None
|
| 142 |
+
sock_connect: Optional[float] = None
|
| 143 |
+
|
| 144 |
+
# pool_queue_timeout: Optional[float] = None
|
| 145 |
+
# dns_resolution_timeout: Optional[float] = None
|
| 146 |
+
# socket_connect_timeout: Optional[float] = None
|
| 147 |
+
# connection_acquiring_timeout: Optional[float] = None
|
| 148 |
+
# new_connection_timeout: Optional[float] = None
|
| 149 |
+
# http_header_timeout: Optional[float] = None
|
| 150 |
+
# response_body_timeout: Optional[float] = None
|
| 151 |
+
|
| 152 |
+
# to create a timeout specific for a single request, either
|
| 153 |
+
# - create a completely new one to overwrite the default
|
| 154 |
+
# - or use http://www.attrs.org/en/stable/api.html#attr.evolve
|
| 155 |
+
# to overwrite the defaults
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
# 5 Minute default read timeout
|
| 159 |
+
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
|
| 160 |
+
|
| 161 |
+
_RetType = TypeVar("_RetType")
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class ClientSession:
|
| 165 |
+
"""First-class interface for making HTTP requests."""
|
| 166 |
+
|
| 167 |
+
ATTRS = frozenset(
|
| 168 |
+
[
|
| 169 |
+
"_base_url",
|
| 170 |
+
"_source_traceback",
|
| 171 |
+
"_connector",
|
| 172 |
+
"requote_redirect_url",
|
| 173 |
+
"_loop",
|
| 174 |
+
"_cookie_jar",
|
| 175 |
+
"_connector_owner",
|
| 176 |
+
"_default_auth",
|
| 177 |
+
"_version",
|
| 178 |
+
"_json_serialize",
|
| 179 |
+
"_requote_redirect_url",
|
| 180 |
+
"_timeout",
|
| 181 |
+
"_raise_for_status",
|
| 182 |
+
"_auto_decompress",
|
| 183 |
+
"_trust_env",
|
| 184 |
+
"_default_headers",
|
| 185 |
+
"_skip_auto_headers",
|
| 186 |
+
"_request_class",
|
| 187 |
+
"_response_class",
|
| 188 |
+
"_ws_response_class",
|
| 189 |
+
"_trace_configs",
|
| 190 |
+
"_read_bufsize",
|
| 191 |
+
]
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
_source_traceback = None
|
| 195 |
+
|
| 196 |
+
def __init__(
|
| 197 |
+
self,
|
| 198 |
+
base_url: Optional[StrOrURL] = None,
|
| 199 |
+
*,
|
| 200 |
+
connector: Optional[BaseConnector] = None,
|
| 201 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 202 |
+
cookies: Optional[LooseCookies] = None,
|
| 203 |
+
headers: Optional[LooseHeaders] = None,
|
| 204 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 205 |
+
auth: Optional[BasicAuth] = None,
|
| 206 |
+
json_serialize: JSONEncoder = json.dumps,
|
| 207 |
+
request_class: Type[ClientRequest] = ClientRequest,
|
| 208 |
+
response_class: Type[ClientResponse] = ClientResponse,
|
| 209 |
+
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
|
| 210 |
+
version: HttpVersion = http.HttpVersion11,
|
| 211 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
| 212 |
+
connector_owner: bool = True,
|
| 213 |
+
raise_for_status: bool = False,
|
| 214 |
+
read_timeout: Union[float, object] = sentinel,
|
| 215 |
+
conn_timeout: Optional[float] = None,
|
| 216 |
+
timeout: Union[object, ClientTimeout] = sentinel,
|
| 217 |
+
auto_decompress: bool = True,
|
| 218 |
+
trust_env: bool = False,
|
| 219 |
+
requote_redirect_url: bool = True,
|
| 220 |
+
trace_configs: Optional[List[TraceConfig]] = None,
|
| 221 |
+
read_bufsize: int = 2 ** 16,
|
| 222 |
+
) -> None:
|
| 223 |
+
if loop is None:
|
| 224 |
+
if connector is not None:
|
| 225 |
+
loop = connector._loop
|
| 226 |
+
|
| 227 |
+
loop = get_running_loop(loop)
|
| 228 |
+
|
| 229 |
+
if base_url is None or isinstance(base_url, URL):
|
| 230 |
+
self._base_url: Optional[URL] = base_url
|
| 231 |
+
else:
|
| 232 |
+
self._base_url = URL(base_url)
|
| 233 |
+
assert (
|
| 234 |
+
self._base_url.origin() == self._base_url
|
| 235 |
+
), "Only absolute URLs without path part are supported"
|
| 236 |
+
|
| 237 |
+
if connector is None:
|
| 238 |
+
connector = TCPConnector(loop=loop)
|
| 239 |
+
|
| 240 |
+
if connector._loop is not loop:
|
| 241 |
+
raise RuntimeError("Session and connector has to use same event loop")
|
| 242 |
+
|
| 243 |
+
self._loop = loop
|
| 244 |
+
|
| 245 |
+
if loop.get_debug():
|
| 246 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
| 247 |
+
|
| 248 |
+
if cookie_jar is None:
|
| 249 |
+
cookie_jar = CookieJar(loop=loop)
|
| 250 |
+
self._cookie_jar = cookie_jar
|
| 251 |
+
|
| 252 |
+
if cookies is not None:
|
| 253 |
+
self._cookie_jar.update_cookies(cookies)
|
| 254 |
+
|
| 255 |
+
self._connector = connector # type: Optional[BaseConnector]
|
| 256 |
+
self._connector_owner = connector_owner
|
| 257 |
+
self._default_auth = auth
|
| 258 |
+
self._version = version
|
| 259 |
+
self._json_serialize = json_serialize
|
| 260 |
+
if timeout is sentinel:
|
| 261 |
+
self._timeout = DEFAULT_TIMEOUT
|
| 262 |
+
if read_timeout is not sentinel:
|
| 263 |
+
warnings.warn(
|
| 264 |
+
"read_timeout is deprecated, " "use timeout argument instead",
|
| 265 |
+
DeprecationWarning,
|
| 266 |
+
stacklevel=2,
|
| 267 |
+
)
|
| 268 |
+
self._timeout = attr.evolve(self._timeout, total=read_timeout)
|
| 269 |
+
if conn_timeout is not None:
|
| 270 |
+
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
|
| 271 |
+
warnings.warn(
|
| 272 |
+
"conn_timeout is deprecated, " "use timeout argument instead",
|
| 273 |
+
DeprecationWarning,
|
| 274 |
+
stacklevel=2,
|
| 275 |
+
)
|
| 276 |
+
else:
|
| 277 |
+
self._timeout = timeout # type: ignore[assignment]
|
| 278 |
+
if read_timeout is not sentinel:
|
| 279 |
+
raise ValueError(
|
| 280 |
+
"read_timeout and timeout parameters "
|
| 281 |
+
"conflict, please setup "
|
| 282 |
+
"timeout.read"
|
| 283 |
+
)
|
| 284 |
+
if conn_timeout is not None:
|
| 285 |
+
raise ValueError(
|
| 286 |
+
"conn_timeout and timeout parameters "
|
| 287 |
+
"conflict, please setup "
|
| 288 |
+
"timeout.connect"
|
| 289 |
+
)
|
| 290 |
+
self._raise_for_status = raise_for_status
|
| 291 |
+
self._auto_decompress = auto_decompress
|
| 292 |
+
self._trust_env = trust_env
|
| 293 |
+
self._requote_redirect_url = requote_redirect_url
|
| 294 |
+
self._read_bufsize = read_bufsize
|
| 295 |
+
|
| 296 |
+
# Convert to list of tuples
|
| 297 |
+
if headers:
|
| 298 |
+
real_headers = CIMultiDict(headers) # type: CIMultiDict[str]
|
| 299 |
+
else:
|
| 300 |
+
real_headers = CIMultiDict()
|
| 301 |
+
self._default_headers = real_headers # type: CIMultiDict[str]
|
| 302 |
+
if skip_auto_headers is not None:
|
| 303 |
+
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
|
| 304 |
+
else:
|
| 305 |
+
self._skip_auto_headers = frozenset()
|
| 306 |
+
|
| 307 |
+
self._request_class = request_class
|
| 308 |
+
self._response_class = response_class
|
| 309 |
+
self._ws_response_class = ws_response_class
|
| 310 |
+
|
| 311 |
+
self._trace_configs = trace_configs or []
|
| 312 |
+
for trace_config in self._trace_configs:
|
| 313 |
+
trace_config.freeze()
|
| 314 |
+
|
| 315 |
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
|
| 316 |
+
warnings.warn(
|
| 317 |
+
"Inheritance class {} from ClientSession "
|
| 318 |
+
"is discouraged".format(cls.__name__),
|
| 319 |
+
DeprecationWarning,
|
| 320 |
+
stacklevel=2,
|
| 321 |
+
)
|
| 322 |
+
|
| 323 |
+
if DEBUG:
|
| 324 |
+
|
| 325 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
| 326 |
+
if name not in self.ATTRS:
|
| 327 |
+
warnings.warn(
|
| 328 |
+
"Setting custom ClientSession.{} attribute "
|
| 329 |
+
"is discouraged".format(name),
|
| 330 |
+
DeprecationWarning,
|
| 331 |
+
stacklevel=2,
|
| 332 |
+
)
|
| 333 |
+
super().__setattr__(name, val)
|
| 334 |
+
|
| 335 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
| 336 |
+
if not self.closed:
|
| 337 |
+
if PY_36:
|
| 338 |
+
kwargs = {"source": self}
|
| 339 |
+
else:
|
| 340 |
+
kwargs = {}
|
| 341 |
+
_warnings.warn(
|
| 342 |
+
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
|
| 343 |
+
)
|
| 344 |
+
context = {"client_session": self, "message": "Unclosed client session"}
|
| 345 |
+
if self._source_traceback is not None:
|
| 346 |
+
context["source_traceback"] = self._source_traceback
|
| 347 |
+
self._loop.call_exception_handler(context)
|
| 348 |
+
|
| 349 |
+
def request(
|
| 350 |
+
self, method: str, url: StrOrURL, **kwargs: Any
|
| 351 |
+
) -> "_RequestContextManager":
|
| 352 |
+
"""Perform HTTP request."""
|
| 353 |
+
return _RequestContextManager(self._request(method, url, **kwargs))
|
| 354 |
+
|
| 355 |
+
def _build_url(self, str_or_url: StrOrURL) -> URL:
|
| 356 |
+
url = URL(str_or_url)
|
| 357 |
+
if self._base_url is None:
|
| 358 |
+
return url
|
| 359 |
+
else:
|
| 360 |
+
assert not url.is_absolute() and url.path.startswith("/")
|
| 361 |
+
return self._base_url.join(url)
|
| 362 |
+
|
| 363 |
+
async def _request(
|
| 364 |
+
self,
|
| 365 |
+
method: str,
|
| 366 |
+
str_or_url: StrOrURL,
|
| 367 |
+
*,
|
| 368 |
+
params: Optional[Mapping[str, str]] = None,
|
| 369 |
+
data: Any = None,
|
| 370 |
+
json: Any = None,
|
| 371 |
+
cookies: Optional[LooseCookies] = None,
|
| 372 |
+
headers: Optional[LooseHeaders] = None,
|
| 373 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 374 |
+
auth: Optional[BasicAuth] = None,
|
| 375 |
+
allow_redirects: bool = True,
|
| 376 |
+
max_redirects: int = 10,
|
| 377 |
+
compress: Optional[str] = None,
|
| 378 |
+
chunked: Optional[bool] = None,
|
| 379 |
+
expect100: bool = False,
|
| 380 |
+
raise_for_status: Optional[bool] = None,
|
| 381 |
+
read_until_eof: bool = True,
|
| 382 |
+
proxy: Optional[StrOrURL] = None,
|
| 383 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 384 |
+
timeout: Union[ClientTimeout, object] = sentinel,
|
| 385 |
+
verify_ssl: Optional[bool] = None,
|
| 386 |
+
fingerprint: Optional[bytes] = None,
|
| 387 |
+
ssl_context: Optional[SSLContext] = None,
|
| 388 |
+
ssl: Optional[Union[SSLContext, bool, Fingerprint]] = None,
|
| 389 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 390 |
+
trace_request_ctx: Optional[SimpleNamespace] = None,
|
| 391 |
+
read_bufsize: Optional[int] = None,
|
| 392 |
+
) -> ClientResponse:
|
| 393 |
+
|
| 394 |
+
# NOTE: timeout clamps existing connect and read timeouts. We cannot
|
| 395 |
+
# set the default to None because we need to detect if the user wants
|
| 396 |
+
# to use the existing timeouts by setting timeout to None.
|
| 397 |
+
|
| 398 |
+
if self.closed:
|
| 399 |
+
raise RuntimeError("Session is closed")
|
| 400 |
+
|
| 401 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 402 |
+
|
| 403 |
+
if data is not None and json is not None:
|
| 404 |
+
raise ValueError(
|
| 405 |
+
"data and json parameters can not be used at the same time"
|
| 406 |
+
)
|
| 407 |
+
elif json is not None:
|
| 408 |
+
data = payload.JsonPayload(json, dumps=self._json_serialize)
|
| 409 |
+
|
| 410 |
+
if not isinstance(chunked, bool) and chunked is not None:
|
| 411 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
| 412 |
+
|
| 413 |
+
redirects = 0
|
| 414 |
+
history = []
|
| 415 |
+
version = self._version
|
| 416 |
+
|
| 417 |
+
# Merge with default headers and transform to CIMultiDict
|
| 418 |
+
headers = self._prepare_headers(headers)
|
| 419 |
+
proxy_headers = self._prepare_headers(proxy_headers)
|
| 420 |
+
|
| 421 |
+
try:
|
| 422 |
+
url = self._build_url(str_or_url)
|
| 423 |
+
except ValueError as e:
|
| 424 |
+
raise InvalidURL(str_or_url) from e
|
| 425 |
+
|
| 426 |
+
skip_headers = set(self._skip_auto_headers)
|
| 427 |
+
if skip_auto_headers is not None:
|
| 428 |
+
for i in skip_auto_headers:
|
| 429 |
+
skip_headers.add(istr(i))
|
| 430 |
+
|
| 431 |
+
if proxy is not None:
|
| 432 |
+
try:
|
| 433 |
+
proxy = URL(proxy)
|
| 434 |
+
except ValueError as e:
|
| 435 |
+
raise InvalidURL(proxy) from e
|
| 436 |
+
|
| 437 |
+
if timeout is sentinel:
|
| 438 |
+
real_timeout = self._timeout # type: ClientTimeout
|
| 439 |
+
else:
|
| 440 |
+
if not isinstance(timeout, ClientTimeout):
|
| 441 |
+
real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
|
| 442 |
+
else:
|
| 443 |
+
real_timeout = timeout
|
| 444 |
+
# timeout is cumulative for all request operations
|
| 445 |
+
# (request, redirects, responses, data consuming)
|
| 446 |
+
tm = TimeoutHandle(self._loop, real_timeout.total)
|
| 447 |
+
handle = tm.start()
|
| 448 |
+
|
| 449 |
+
if read_bufsize is None:
|
| 450 |
+
read_bufsize = self._read_bufsize
|
| 451 |
+
|
| 452 |
+
traces = [
|
| 453 |
+
Trace(
|
| 454 |
+
self,
|
| 455 |
+
trace_config,
|
| 456 |
+
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
|
| 457 |
+
)
|
| 458 |
+
for trace_config in self._trace_configs
|
| 459 |
+
]
|
| 460 |
+
|
| 461 |
+
for trace in traces:
|
| 462 |
+
await trace.send_request_start(method, url.update_query(params), headers)
|
| 463 |
+
|
| 464 |
+
timer = tm.timer()
|
| 465 |
+
try:
|
| 466 |
+
with timer:
|
| 467 |
+
while True:
|
| 468 |
+
url, auth_from_url = strip_auth_from_url(url)
|
| 469 |
+
if auth and auth_from_url:
|
| 470 |
+
raise ValueError(
|
| 471 |
+
"Cannot combine AUTH argument with "
|
| 472 |
+
"credentials encoded in URL"
|
| 473 |
+
)
|
| 474 |
+
|
| 475 |
+
if auth is None:
|
| 476 |
+
auth = auth_from_url
|
| 477 |
+
if auth is None:
|
| 478 |
+
auth = self._default_auth
|
| 479 |
+
# It would be confusing if we support explicit
|
| 480 |
+
# Authorization header with auth argument
|
| 481 |
+
if (
|
| 482 |
+
headers is not None
|
| 483 |
+
and auth is not None
|
| 484 |
+
and hdrs.AUTHORIZATION in headers
|
| 485 |
+
):
|
| 486 |
+
raise ValueError(
|
| 487 |
+
"Cannot combine AUTHORIZATION header "
|
| 488 |
+
"with AUTH argument or credentials "
|
| 489 |
+
"encoded in URL"
|
| 490 |
+
)
|
| 491 |
+
|
| 492 |
+
all_cookies = self._cookie_jar.filter_cookies(url)
|
| 493 |
+
|
| 494 |
+
if cookies is not None:
|
| 495 |
+
tmp_cookie_jar = CookieJar()
|
| 496 |
+
tmp_cookie_jar.update_cookies(cookies)
|
| 497 |
+
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
| 498 |
+
if req_cookies:
|
| 499 |
+
all_cookies.load(req_cookies)
|
| 500 |
+
|
| 501 |
+
if proxy is not None:
|
| 502 |
+
proxy = URL(proxy)
|
| 503 |
+
elif self._trust_env:
|
| 504 |
+
with suppress(LookupError):
|
| 505 |
+
proxy, proxy_auth = get_env_proxy_for_url(url)
|
| 506 |
+
|
| 507 |
+
req = self._request_class(
|
| 508 |
+
method,
|
| 509 |
+
url,
|
| 510 |
+
params=params,
|
| 511 |
+
headers=headers,
|
| 512 |
+
skip_auto_headers=skip_headers,
|
| 513 |
+
data=data,
|
| 514 |
+
cookies=all_cookies,
|
| 515 |
+
auth=auth,
|
| 516 |
+
version=version,
|
| 517 |
+
compress=compress,
|
| 518 |
+
chunked=chunked,
|
| 519 |
+
expect100=expect100,
|
| 520 |
+
loop=self._loop,
|
| 521 |
+
response_class=self._response_class,
|
| 522 |
+
proxy=proxy,
|
| 523 |
+
proxy_auth=proxy_auth,
|
| 524 |
+
timer=timer,
|
| 525 |
+
session=self,
|
| 526 |
+
ssl=ssl,
|
| 527 |
+
proxy_headers=proxy_headers,
|
| 528 |
+
traces=traces,
|
| 529 |
+
)
|
| 530 |
+
|
| 531 |
+
# connection timeout
|
| 532 |
+
try:
|
| 533 |
+
async with ceil_timeout(real_timeout.connect):
|
| 534 |
+
assert self._connector is not None
|
| 535 |
+
conn = await self._connector.connect(
|
| 536 |
+
req, traces=traces, timeout=real_timeout
|
| 537 |
+
)
|
| 538 |
+
except asyncio.TimeoutError as exc:
|
| 539 |
+
raise ServerTimeoutError(
|
| 540 |
+
"Connection timeout " "to host {}".format(url)
|
| 541 |
+
) from exc
|
| 542 |
+
|
| 543 |
+
assert conn.transport is not None
|
| 544 |
+
|
| 545 |
+
assert conn.protocol is not None
|
| 546 |
+
conn.protocol.set_response_params(
|
| 547 |
+
timer=timer,
|
| 548 |
+
skip_payload=method.upper() == "HEAD",
|
| 549 |
+
read_until_eof=read_until_eof,
|
| 550 |
+
auto_decompress=self._auto_decompress,
|
| 551 |
+
read_timeout=real_timeout.sock_read,
|
| 552 |
+
read_bufsize=read_bufsize,
|
| 553 |
+
)
|
| 554 |
+
|
| 555 |
+
try:
|
| 556 |
+
try:
|
| 557 |
+
resp = await req.send(conn)
|
| 558 |
+
try:
|
| 559 |
+
await resp.start(conn)
|
| 560 |
+
except BaseException:
|
| 561 |
+
resp.close()
|
| 562 |
+
raise
|
| 563 |
+
except BaseException:
|
| 564 |
+
conn.close()
|
| 565 |
+
raise
|
| 566 |
+
except ClientError:
|
| 567 |
+
raise
|
| 568 |
+
except OSError as exc:
|
| 569 |
+
raise ClientOSError(*exc.args) from exc
|
| 570 |
+
|
| 571 |
+
self._cookie_jar.update_cookies(resp.cookies, resp.url)
|
| 572 |
+
|
| 573 |
+
# redirects
|
| 574 |
+
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
|
| 575 |
+
|
| 576 |
+
for trace in traces:
|
| 577 |
+
await trace.send_request_redirect(
|
| 578 |
+
method, url.update_query(params), headers, resp
|
| 579 |
+
)
|
| 580 |
+
|
| 581 |
+
redirects += 1
|
| 582 |
+
history.append(resp)
|
| 583 |
+
if max_redirects and redirects >= max_redirects:
|
| 584 |
+
resp.close()
|
| 585 |
+
raise TooManyRedirects(
|
| 586 |
+
history[0].request_info, tuple(history)
|
| 587 |
+
)
|
| 588 |
+
|
| 589 |
+
# For 301 and 302, mimic IE, now changed in RFC
|
| 590 |
+
# https://github.com/kennethreitz/requests/pull/269
|
| 591 |
+
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
|
| 592 |
+
resp.status in (301, 302) and resp.method == hdrs.METH_POST
|
| 593 |
+
):
|
| 594 |
+
method = hdrs.METH_GET
|
| 595 |
+
data = None
|
| 596 |
+
if headers.get(hdrs.CONTENT_LENGTH):
|
| 597 |
+
headers.pop(hdrs.CONTENT_LENGTH)
|
| 598 |
+
|
| 599 |
+
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
|
| 600 |
+
hdrs.URI
|
| 601 |
+
)
|
| 602 |
+
if r_url is None:
|
| 603 |
+
# see github.com/aio-libs/aiohttp/issues/2022
|
| 604 |
+
break
|
| 605 |
+
else:
|
| 606 |
+
# reading from correct redirection
|
| 607 |
+
# response is forbidden
|
| 608 |
+
resp.release()
|
| 609 |
+
|
| 610 |
+
try:
|
| 611 |
+
parsed_url = URL(
|
| 612 |
+
r_url, encoded=not self._requote_redirect_url
|
| 613 |
+
)
|
| 614 |
+
|
| 615 |
+
except ValueError as e:
|
| 616 |
+
raise InvalidURL(r_url) from e
|
| 617 |
+
|
| 618 |
+
scheme = parsed_url.scheme
|
| 619 |
+
if scheme not in ("http", "https", ""):
|
| 620 |
+
resp.close()
|
| 621 |
+
raise ValueError("Can redirect only to http or https")
|
| 622 |
+
elif not scheme:
|
| 623 |
+
parsed_url = url.join(parsed_url)
|
| 624 |
+
|
| 625 |
+
if url.origin() != parsed_url.origin():
|
| 626 |
+
auth = None
|
| 627 |
+
headers.pop(hdrs.AUTHORIZATION, None)
|
| 628 |
+
|
| 629 |
+
url = parsed_url
|
| 630 |
+
params = None
|
| 631 |
+
resp.release()
|
| 632 |
+
continue
|
| 633 |
+
|
| 634 |
+
break
|
| 635 |
+
|
| 636 |
+
# check response status
|
| 637 |
+
if raise_for_status is None:
|
| 638 |
+
raise_for_status = self._raise_for_status
|
| 639 |
+
if raise_for_status:
|
| 640 |
+
resp.raise_for_status()
|
| 641 |
+
|
| 642 |
+
# register connection
|
| 643 |
+
if handle is not None:
|
| 644 |
+
if resp.connection is not None:
|
| 645 |
+
resp.connection.add_callback(handle.cancel)
|
| 646 |
+
else:
|
| 647 |
+
handle.cancel()
|
| 648 |
+
|
| 649 |
+
resp._history = tuple(history)
|
| 650 |
+
|
| 651 |
+
for trace in traces:
|
| 652 |
+
await trace.send_request_end(
|
| 653 |
+
method, url.update_query(params), headers, resp
|
| 654 |
+
)
|
| 655 |
+
return resp
|
| 656 |
+
|
| 657 |
+
except BaseException as e:
|
| 658 |
+
# cleanup timer
|
| 659 |
+
tm.close()
|
| 660 |
+
if handle:
|
| 661 |
+
handle.cancel()
|
| 662 |
+
handle = None
|
| 663 |
+
|
| 664 |
+
for trace in traces:
|
| 665 |
+
await trace.send_request_exception(
|
| 666 |
+
method, url.update_query(params), headers, e
|
| 667 |
+
)
|
| 668 |
+
raise
|
| 669 |
+
|
| 670 |
+
def ws_connect(
|
| 671 |
+
self,
|
| 672 |
+
url: StrOrURL,
|
| 673 |
+
*,
|
| 674 |
+
method: str = hdrs.METH_GET,
|
| 675 |
+
protocols: Iterable[str] = (),
|
| 676 |
+
timeout: float = 10.0,
|
| 677 |
+
receive_timeout: Optional[float] = None,
|
| 678 |
+
autoclose: bool = True,
|
| 679 |
+
autoping: bool = True,
|
| 680 |
+
heartbeat: Optional[float] = None,
|
| 681 |
+
auth: Optional[BasicAuth] = None,
|
| 682 |
+
origin: Optional[str] = None,
|
| 683 |
+
params: Optional[Mapping[str, str]] = None,
|
| 684 |
+
headers: Optional[LooseHeaders] = None,
|
| 685 |
+
proxy: Optional[StrOrURL] = None,
|
| 686 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 687 |
+
ssl: Union[SSLContext, bool, None, Fingerprint] = None,
|
| 688 |
+
verify_ssl: Optional[bool] = None,
|
| 689 |
+
fingerprint: Optional[bytes] = None,
|
| 690 |
+
ssl_context: Optional[SSLContext] = None,
|
| 691 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 692 |
+
compress: int = 0,
|
| 693 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 694 |
+
) -> "_WSRequestContextManager":
|
| 695 |
+
"""Initiate websocket connection."""
|
| 696 |
+
return _WSRequestContextManager(
|
| 697 |
+
self._ws_connect(
|
| 698 |
+
url,
|
| 699 |
+
method=method,
|
| 700 |
+
protocols=protocols,
|
| 701 |
+
timeout=timeout,
|
| 702 |
+
receive_timeout=receive_timeout,
|
| 703 |
+
autoclose=autoclose,
|
| 704 |
+
autoping=autoping,
|
| 705 |
+
heartbeat=heartbeat,
|
| 706 |
+
auth=auth,
|
| 707 |
+
origin=origin,
|
| 708 |
+
params=params,
|
| 709 |
+
headers=headers,
|
| 710 |
+
proxy=proxy,
|
| 711 |
+
proxy_auth=proxy_auth,
|
| 712 |
+
ssl=ssl,
|
| 713 |
+
verify_ssl=verify_ssl,
|
| 714 |
+
fingerprint=fingerprint,
|
| 715 |
+
ssl_context=ssl_context,
|
| 716 |
+
proxy_headers=proxy_headers,
|
| 717 |
+
compress=compress,
|
| 718 |
+
max_msg_size=max_msg_size,
|
| 719 |
+
)
|
| 720 |
+
)
|
| 721 |
+
|
| 722 |
+
async def _ws_connect(
|
| 723 |
+
self,
|
| 724 |
+
url: StrOrURL,
|
| 725 |
+
*,
|
| 726 |
+
method: str = hdrs.METH_GET,
|
| 727 |
+
protocols: Iterable[str] = (),
|
| 728 |
+
timeout: float = 10.0,
|
| 729 |
+
receive_timeout: Optional[float] = None,
|
| 730 |
+
autoclose: bool = True,
|
| 731 |
+
autoping: bool = True,
|
| 732 |
+
heartbeat: Optional[float] = None,
|
| 733 |
+
auth: Optional[BasicAuth] = None,
|
| 734 |
+
origin: Optional[str] = None,
|
| 735 |
+
params: Optional[Mapping[str, str]] = None,
|
| 736 |
+
headers: Optional[LooseHeaders] = None,
|
| 737 |
+
proxy: Optional[StrOrURL] = None,
|
| 738 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 739 |
+
ssl: Union[SSLContext, bool, None, Fingerprint] = None,
|
| 740 |
+
verify_ssl: Optional[bool] = None,
|
| 741 |
+
fingerprint: Optional[bytes] = None,
|
| 742 |
+
ssl_context: Optional[SSLContext] = None,
|
| 743 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
| 744 |
+
compress: int = 0,
|
| 745 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 746 |
+
) -> ClientWebSocketResponse:
|
| 747 |
+
|
| 748 |
+
if headers is None:
|
| 749 |
+
real_headers = CIMultiDict() # type: CIMultiDict[str]
|
| 750 |
+
else:
|
| 751 |
+
real_headers = CIMultiDict(headers)
|
| 752 |
+
|
| 753 |
+
default_headers = {
|
| 754 |
+
hdrs.UPGRADE: "websocket",
|
| 755 |
+
hdrs.CONNECTION: "upgrade",
|
| 756 |
+
hdrs.SEC_WEBSOCKET_VERSION: "13",
|
| 757 |
+
}
|
| 758 |
+
|
| 759 |
+
for key, value in default_headers.items():
|
| 760 |
+
real_headers.setdefault(key, value)
|
| 761 |
+
|
| 762 |
+
sec_key = base64.b64encode(os.urandom(16))
|
| 763 |
+
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
|
| 764 |
+
|
| 765 |
+
if protocols:
|
| 766 |
+
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
|
| 767 |
+
if origin is not None:
|
| 768 |
+
real_headers[hdrs.ORIGIN] = origin
|
| 769 |
+
if compress:
|
| 770 |
+
extstr = ws_ext_gen(compress=compress)
|
| 771 |
+
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
|
| 772 |
+
|
| 773 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
| 774 |
+
|
| 775 |
+
# send request
|
| 776 |
+
resp = await self.request(
|
| 777 |
+
method,
|
| 778 |
+
url,
|
| 779 |
+
params=params,
|
| 780 |
+
headers=real_headers,
|
| 781 |
+
read_until_eof=False,
|
| 782 |
+
auth=auth,
|
| 783 |
+
proxy=proxy,
|
| 784 |
+
proxy_auth=proxy_auth,
|
| 785 |
+
ssl=ssl,
|
| 786 |
+
proxy_headers=proxy_headers,
|
| 787 |
+
)
|
| 788 |
+
|
| 789 |
+
try:
|
| 790 |
+
# check handshake
|
| 791 |
+
if resp.status != 101:
|
| 792 |
+
raise WSServerHandshakeError(
|
| 793 |
+
resp.request_info,
|
| 794 |
+
resp.history,
|
| 795 |
+
message="Invalid response status",
|
| 796 |
+
status=resp.status,
|
| 797 |
+
headers=resp.headers,
|
| 798 |
+
)
|
| 799 |
+
|
| 800 |
+
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
|
| 801 |
+
raise WSServerHandshakeError(
|
| 802 |
+
resp.request_info,
|
| 803 |
+
resp.history,
|
| 804 |
+
message="Invalid upgrade header",
|
| 805 |
+
status=resp.status,
|
| 806 |
+
headers=resp.headers,
|
| 807 |
+
)
|
| 808 |
+
|
| 809 |
+
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
|
| 810 |
+
raise WSServerHandshakeError(
|
| 811 |
+
resp.request_info,
|
| 812 |
+
resp.history,
|
| 813 |
+
message="Invalid connection header",
|
| 814 |
+
status=resp.status,
|
| 815 |
+
headers=resp.headers,
|
| 816 |
+
)
|
| 817 |
+
|
| 818 |
+
# key calculation
|
| 819 |
+
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
|
| 820 |
+
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
|
| 821 |
+
if r_key != match:
|
| 822 |
+
raise WSServerHandshakeError(
|
| 823 |
+
resp.request_info,
|
| 824 |
+
resp.history,
|
| 825 |
+
message="Invalid challenge response",
|
| 826 |
+
status=resp.status,
|
| 827 |
+
headers=resp.headers,
|
| 828 |
+
)
|
| 829 |
+
|
| 830 |
+
# websocket protocol
|
| 831 |
+
protocol = None
|
| 832 |
+
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
|
| 833 |
+
resp_protocols = [
|
| 834 |
+
proto.strip()
|
| 835 |
+
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
| 836 |
+
]
|
| 837 |
+
|
| 838 |
+
for proto in resp_protocols:
|
| 839 |
+
if proto in protocols:
|
| 840 |
+
protocol = proto
|
| 841 |
+
break
|
| 842 |
+
|
| 843 |
+
# websocket compress
|
| 844 |
+
notakeover = False
|
| 845 |
+
if compress:
|
| 846 |
+
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
|
| 847 |
+
if compress_hdrs:
|
| 848 |
+
try:
|
| 849 |
+
compress, notakeover = ws_ext_parse(compress_hdrs)
|
| 850 |
+
except WSHandshakeError as exc:
|
| 851 |
+
raise WSServerHandshakeError(
|
| 852 |
+
resp.request_info,
|
| 853 |
+
resp.history,
|
| 854 |
+
message=exc.args[0],
|
| 855 |
+
status=resp.status,
|
| 856 |
+
headers=resp.headers,
|
| 857 |
+
) from exc
|
| 858 |
+
else:
|
| 859 |
+
compress = 0
|
| 860 |
+
notakeover = False
|
| 861 |
+
|
| 862 |
+
conn = resp.connection
|
| 863 |
+
assert conn is not None
|
| 864 |
+
conn_proto = conn.protocol
|
| 865 |
+
assert conn_proto is not None
|
| 866 |
+
transport = conn.transport
|
| 867 |
+
assert transport is not None
|
| 868 |
+
reader = FlowControlDataQueue(
|
| 869 |
+
conn_proto, 2 ** 16, loop=self._loop
|
| 870 |
+
) # type: FlowControlDataQueue[WSMessage]
|
| 871 |
+
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
|
| 872 |
+
writer = WebSocketWriter(
|
| 873 |
+
conn_proto,
|
| 874 |
+
transport,
|
| 875 |
+
use_mask=True,
|
| 876 |
+
compress=compress,
|
| 877 |
+
notakeover=notakeover,
|
| 878 |
+
)
|
| 879 |
+
except BaseException:
|
| 880 |
+
resp.close()
|
| 881 |
+
raise
|
| 882 |
+
else:
|
| 883 |
+
return self._ws_response_class(
|
| 884 |
+
reader,
|
| 885 |
+
writer,
|
| 886 |
+
protocol,
|
| 887 |
+
resp,
|
| 888 |
+
timeout,
|
| 889 |
+
autoclose,
|
| 890 |
+
autoping,
|
| 891 |
+
self._loop,
|
| 892 |
+
receive_timeout=receive_timeout,
|
| 893 |
+
heartbeat=heartbeat,
|
| 894 |
+
compress=compress,
|
| 895 |
+
client_notakeover=notakeover,
|
| 896 |
+
)
|
| 897 |
+
|
| 898 |
+
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
|
| 899 |
+
"""Add default headers and transform it to CIMultiDict"""
|
| 900 |
+
# Convert headers to MultiDict
|
| 901 |
+
result = CIMultiDict(self._default_headers)
|
| 902 |
+
if headers:
|
| 903 |
+
if not isinstance(headers, (MultiDictProxy, MultiDict)):
|
| 904 |
+
headers = CIMultiDict(headers)
|
| 905 |
+
added_names = set() # type: Set[str]
|
| 906 |
+
for key, value in headers.items():
|
| 907 |
+
if key in added_names:
|
| 908 |
+
result.add(key, value)
|
| 909 |
+
else:
|
| 910 |
+
result[key] = value
|
| 911 |
+
added_names.add(key)
|
| 912 |
+
return result
|
| 913 |
+
|
| 914 |
+
def get(
|
| 915 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
| 916 |
+
) -> "_RequestContextManager":
|
| 917 |
+
"""Perform HTTP GET request."""
|
| 918 |
+
return _RequestContextManager(
|
| 919 |
+
self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
|
| 920 |
+
)
|
| 921 |
+
|
| 922 |
+
def options(
|
| 923 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
| 924 |
+
) -> "_RequestContextManager":
|
| 925 |
+
"""Perform HTTP OPTIONS request."""
|
| 926 |
+
return _RequestContextManager(
|
| 927 |
+
self._request(
|
| 928 |
+
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
|
| 929 |
+
)
|
| 930 |
+
)
|
| 931 |
+
|
| 932 |
+
def head(
|
| 933 |
+
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
|
| 934 |
+
) -> "_RequestContextManager":
|
| 935 |
+
"""Perform HTTP HEAD request."""
|
| 936 |
+
return _RequestContextManager(
|
| 937 |
+
self._request(
|
| 938 |
+
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
|
| 939 |
+
)
|
| 940 |
+
)
|
| 941 |
+
|
| 942 |
+
def post(
|
| 943 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 944 |
+
) -> "_RequestContextManager":
|
| 945 |
+
"""Perform HTTP POST request."""
|
| 946 |
+
return _RequestContextManager(
|
| 947 |
+
self._request(hdrs.METH_POST, url, data=data, **kwargs)
|
| 948 |
+
)
|
| 949 |
+
|
| 950 |
+
def put(
|
| 951 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 952 |
+
) -> "_RequestContextManager":
|
| 953 |
+
"""Perform HTTP PUT request."""
|
| 954 |
+
return _RequestContextManager(
|
| 955 |
+
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
|
| 956 |
+
)
|
| 957 |
+
|
| 958 |
+
def patch(
|
| 959 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
| 960 |
+
) -> "_RequestContextManager":
|
| 961 |
+
"""Perform HTTP PATCH request."""
|
| 962 |
+
return _RequestContextManager(
|
| 963 |
+
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
|
| 964 |
+
)
|
| 965 |
+
|
| 966 |
+
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
|
| 967 |
+
"""Perform HTTP DELETE request."""
|
| 968 |
+
return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
|
| 969 |
+
|
| 970 |
+
async def close(self) -> None:
|
| 971 |
+
"""Close underlying connector.
|
| 972 |
+
|
| 973 |
+
Release all acquired resources.
|
| 974 |
+
"""
|
| 975 |
+
if not self.closed:
|
| 976 |
+
if self._connector is not None and self._connector_owner:
|
| 977 |
+
await self._connector.close()
|
| 978 |
+
self._connector = None
|
| 979 |
+
|
| 980 |
+
@property
|
| 981 |
+
def closed(self) -> bool:
|
| 982 |
+
"""Is client session closed.
|
| 983 |
+
|
| 984 |
+
A readonly property.
|
| 985 |
+
"""
|
| 986 |
+
return self._connector is None or self._connector.closed
|
| 987 |
+
|
| 988 |
+
@property
|
| 989 |
+
def connector(self) -> Optional[BaseConnector]:
|
| 990 |
+
"""Connector instance used for the session."""
|
| 991 |
+
return self._connector
|
| 992 |
+
|
| 993 |
+
@property
|
| 994 |
+
def cookie_jar(self) -> AbstractCookieJar:
|
| 995 |
+
"""The session cookies."""
|
| 996 |
+
return self._cookie_jar
|
| 997 |
+
|
| 998 |
+
@property
|
| 999 |
+
def version(self) -> Tuple[int, int]:
|
| 1000 |
+
"""The session HTTP protocol version."""
|
| 1001 |
+
return self._version
|
| 1002 |
+
|
| 1003 |
+
@property
|
| 1004 |
+
def requote_redirect_url(self) -> bool:
|
| 1005 |
+
"""Do URL requoting on redirection handling."""
|
| 1006 |
+
return self._requote_redirect_url
|
| 1007 |
+
|
| 1008 |
+
@requote_redirect_url.setter
|
| 1009 |
+
def requote_redirect_url(self, val: bool) -> None:
|
| 1010 |
+
"""Do URL requoting on redirection handling."""
|
| 1011 |
+
warnings.warn(
|
| 1012 |
+
"session.requote_redirect_url modification " "is deprecated #2778",
|
| 1013 |
+
DeprecationWarning,
|
| 1014 |
+
stacklevel=2,
|
| 1015 |
+
)
|
| 1016 |
+
self._requote_redirect_url = val
|
| 1017 |
+
|
| 1018 |
+
@property
|
| 1019 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
| 1020 |
+
"""Session's loop."""
|
| 1021 |
+
warnings.warn(
|
| 1022 |
+
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
|
| 1023 |
+
)
|
| 1024 |
+
return self._loop
|
| 1025 |
+
|
| 1026 |
+
@property
|
| 1027 |
+
def timeout(self) -> Union[object, ClientTimeout]:
|
| 1028 |
+
"""Timeout for the session."""
|
| 1029 |
+
return self._timeout
|
| 1030 |
+
|
| 1031 |
+
@property
|
| 1032 |
+
def headers(self) -> "CIMultiDict[str]":
|
| 1033 |
+
"""The default headers of the client session."""
|
| 1034 |
+
return self._default_headers
|
| 1035 |
+
|
| 1036 |
+
@property
|
| 1037 |
+
def skip_auto_headers(self) -> FrozenSet[istr]:
|
| 1038 |
+
"""Headers for which autogeneration should be skipped"""
|
| 1039 |
+
return self._skip_auto_headers
|
| 1040 |
+
|
| 1041 |
+
@property
|
| 1042 |
+
def auth(self) -> Optional[BasicAuth]:
|
| 1043 |
+
"""An object that represents HTTP Basic Authorization"""
|
| 1044 |
+
return self._default_auth
|
| 1045 |
+
|
| 1046 |
+
@property
|
| 1047 |
+
def json_serialize(self) -> JSONEncoder:
|
| 1048 |
+
"""Json serializer callable"""
|
| 1049 |
+
return self._json_serialize
|
| 1050 |
+
|
| 1051 |
+
@property
|
| 1052 |
+
def connector_owner(self) -> bool:
|
| 1053 |
+
"""Should connector be closed on session closing"""
|
| 1054 |
+
return self._connector_owner
|
| 1055 |
+
|
| 1056 |
+
@property
|
| 1057 |
+
def raise_for_status(
|
| 1058 |
+
self,
|
| 1059 |
+
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
|
| 1060 |
+
"""Should `ClientResponse.raise_for_status()` be called for each response."""
|
| 1061 |
+
return self._raise_for_status
|
| 1062 |
+
|
| 1063 |
+
@property
|
| 1064 |
+
def auto_decompress(self) -> bool:
|
| 1065 |
+
"""Should the body response be automatically decompressed."""
|
| 1066 |
+
return self._auto_decompress
|
| 1067 |
+
|
| 1068 |
+
@property
|
| 1069 |
+
def trust_env(self) -> bool:
|
| 1070 |
+
"""
|
| 1071 |
+
Should proxies information from environment or netrc be trusted.
|
| 1072 |
+
|
| 1073 |
+
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
|
| 1074 |
+
or ~/.netrc file if present.
|
| 1075 |
+
"""
|
| 1076 |
+
return self._trust_env
|
| 1077 |
+
|
| 1078 |
+
@property
|
| 1079 |
+
def trace_configs(self) -> List[TraceConfig]:
|
| 1080 |
+
"""A list of TraceConfig instances used for client tracing"""
|
| 1081 |
+
return self._trace_configs
|
| 1082 |
+
|
| 1083 |
+
def detach(self) -> None:
|
| 1084 |
+
"""Detach connector from session without closing the former.
|
| 1085 |
+
|
| 1086 |
+
Session is switched to closed state anyway.
|
| 1087 |
+
"""
|
| 1088 |
+
self._connector = None
|
| 1089 |
+
|
| 1090 |
+
def __enter__(self) -> None:
|
| 1091 |
+
raise TypeError("Use async with instead")
|
| 1092 |
+
|
| 1093 |
+
def __exit__(
|
| 1094 |
+
self,
|
| 1095 |
+
exc_type: Optional[Type[BaseException]],
|
| 1096 |
+
exc_val: Optional[BaseException],
|
| 1097 |
+
exc_tb: Optional[TracebackType],
|
| 1098 |
+
) -> None:
|
| 1099 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
| 1100 |
+
pass # pragma: no cover
|
| 1101 |
+
|
| 1102 |
+
async def __aenter__(self) -> "ClientSession":
|
| 1103 |
+
return self
|
| 1104 |
+
|
| 1105 |
+
async def __aexit__(
|
| 1106 |
+
self,
|
| 1107 |
+
exc_type: Optional[Type[BaseException]],
|
| 1108 |
+
exc_val: Optional[BaseException],
|
| 1109 |
+
exc_tb: Optional[TracebackType],
|
| 1110 |
+
) -> None:
|
| 1111 |
+
await self.close()
|
| 1112 |
+
|
| 1113 |
+
|
| 1114 |
+
class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
|
| 1115 |
+
|
| 1116 |
+
__slots__ = ("_coro", "_resp")
|
| 1117 |
+
|
| 1118 |
+
def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
|
| 1119 |
+
self._coro = coro
|
| 1120 |
+
|
| 1121 |
+
def send(self, arg: None) -> "asyncio.Future[Any]":
|
| 1122 |
+
return self._coro.send(arg)
|
| 1123 |
+
|
| 1124 |
+
def throw(self, arg: BaseException) -> None: # type: ignore[arg-type,override]
|
| 1125 |
+
self._coro.throw(arg)
|
| 1126 |
+
|
| 1127 |
+
def close(self) -> None:
|
| 1128 |
+
return self._coro.close()
|
| 1129 |
+
|
| 1130 |
+
def __await__(self) -> Generator[Any, None, _RetType]:
|
| 1131 |
+
ret = self._coro.__await__()
|
| 1132 |
+
return ret
|
| 1133 |
+
|
| 1134 |
+
def __iter__(self) -> Generator[Any, None, _RetType]:
|
| 1135 |
+
return self.__await__()
|
| 1136 |
+
|
| 1137 |
+
async def __aenter__(self) -> _RetType:
|
| 1138 |
+
self._resp = await self._coro
|
| 1139 |
+
return self._resp
|
| 1140 |
+
|
| 1141 |
+
|
| 1142 |
+
class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
|
| 1143 |
+
__slots__ = ()
|
| 1144 |
+
|
| 1145 |
+
async def __aexit__(
|
| 1146 |
+
self,
|
| 1147 |
+
exc_type: Optional[Type[BaseException]],
|
| 1148 |
+
exc: Optional[BaseException],
|
| 1149 |
+
tb: Optional[TracebackType],
|
| 1150 |
+
) -> None:
|
| 1151 |
+
# We're basing behavior on the exception as it can be caused by
|
| 1152 |
+
# user code unrelated to the status of the connection. If you
|
| 1153 |
+
# would like to close a connection you must do that
|
| 1154 |
+
# explicitly. Otherwise connection error handling should kick in
|
| 1155 |
+
# and close/recycle the connection as required.
|
| 1156 |
+
self._resp.release()
|
| 1157 |
+
|
| 1158 |
+
|
| 1159 |
+
class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
|
| 1160 |
+
__slots__ = ()
|
| 1161 |
+
|
| 1162 |
+
async def __aexit__(
|
| 1163 |
+
self,
|
| 1164 |
+
exc_type: Optional[Type[BaseException]],
|
| 1165 |
+
exc: Optional[BaseException],
|
| 1166 |
+
tb: Optional[TracebackType],
|
| 1167 |
+
) -> None:
|
| 1168 |
+
await self._resp.close()
|
| 1169 |
+
|
| 1170 |
+
|
| 1171 |
+
class _SessionRequestContextManager:
|
| 1172 |
+
|
| 1173 |
+
__slots__ = ("_coro", "_resp", "_session")
|
| 1174 |
+
|
| 1175 |
+
def __init__(
|
| 1176 |
+
self,
|
| 1177 |
+
coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
|
| 1178 |
+
session: ClientSession,
|
| 1179 |
+
) -> None:
|
| 1180 |
+
self._coro = coro
|
| 1181 |
+
self._resp = None # type: Optional[ClientResponse]
|
| 1182 |
+
self._session = session
|
| 1183 |
+
|
| 1184 |
+
async def __aenter__(self) -> ClientResponse:
|
| 1185 |
+
try:
|
| 1186 |
+
self._resp = await self._coro
|
| 1187 |
+
except BaseException:
|
| 1188 |
+
await self._session.close()
|
| 1189 |
+
raise
|
| 1190 |
+
else:
|
| 1191 |
+
return self._resp
|
| 1192 |
+
|
| 1193 |
+
async def __aexit__(
|
| 1194 |
+
self,
|
| 1195 |
+
exc_type: Optional[Type[BaseException]],
|
| 1196 |
+
exc: Optional[BaseException],
|
| 1197 |
+
tb: Optional[TracebackType],
|
| 1198 |
+
) -> None:
|
| 1199 |
+
assert self._resp is not None
|
| 1200 |
+
self._resp.close()
|
| 1201 |
+
await self._session.close()
|
| 1202 |
+
|
| 1203 |
+
|
| 1204 |
+
def request(
|
| 1205 |
+
method: str,
|
| 1206 |
+
url: StrOrURL,
|
| 1207 |
+
*,
|
| 1208 |
+
params: Optional[Mapping[str, str]] = None,
|
| 1209 |
+
data: Any = None,
|
| 1210 |
+
json: Any = None,
|
| 1211 |
+
headers: Optional[LooseHeaders] = None,
|
| 1212 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
| 1213 |
+
auth: Optional[BasicAuth] = None,
|
| 1214 |
+
allow_redirects: bool = True,
|
| 1215 |
+
max_redirects: int = 10,
|
| 1216 |
+
compress: Optional[str] = None,
|
| 1217 |
+
chunked: Optional[bool] = None,
|
| 1218 |
+
expect100: bool = False,
|
| 1219 |
+
raise_for_status: Optional[bool] = None,
|
| 1220 |
+
read_until_eof: bool = True,
|
| 1221 |
+
proxy: Optional[StrOrURL] = None,
|
| 1222 |
+
proxy_auth: Optional[BasicAuth] = None,
|
| 1223 |
+
timeout: Union[ClientTimeout, object] = sentinel,
|
| 1224 |
+
cookies: Optional[LooseCookies] = None,
|
| 1225 |
+
version: HttpVersion = http.HttpVersion11,
|
| 1226 |
+
connector: Optional[BaseConnector] = None,
|
| 1227 |
+
read_bufsize: Optional[int] = None,
|
| 1228 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 1229 |
+
) -> _SessionRequestContextManager:
|
| 1230 |
+
"""Constructs and sends a request.
|
| 1231 |
+
|
| 1232 |
+
Returns response object.
|
| 1233 |
+
method - HTTP method
|
| 1234 |
+
url - request url
|
| 1235 |
+
params - (optional) Dictionary or bytes to be sent in the query
|
| 1236 |
+
string of the new request
|
| 1237 |
+
data - (optional) Dictionary, bytes, or file-like object to
|
| 1238 |
+
send in the body of the request
|
| 1239 |
+
json - (optional) Any json compatible python object
|
| 1240 |
+
headers - (optional) Dictionary of HTTP Headers to send with
|
| 1241 |
+
the request
|
| 1242 |
+
cookies - (optional) Dict object to send with the request
|
| 1243 |
+
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
|
| 1244 |
+
auth - aiohttp.helpers.BasicAuth
|
| 1245 |
+
allow_redirects - (optional) If set to False, do not follow
|
| 1246 |
+
redirects
|
| 1247 |
+
version - Request HTTP version.
|
| 1248 |
+
compress - Set to True if request has to be compressed
|
| 1249 |
+
with deflate encoding.
|
| 1250 |
+
chunked - Set to chunk size for chunked transfer encoding.
|
| 1251 |
+
expect100 - Expect 100-continue response from server.
|
| 1252 |
+
connector - BaseConnector sub-class instance to support
|
| 1253 |
+
connection pooling.
|
| 1254 |
+
read_until_eof - Read response until eof if response
|
| 1255 |
+
does not have Content-Length header.
|
| 1256 |
+
loop - Optional event loop.
|
| 1257 |
+
timeout - Optional ClientTimeout settings structure, 5min
|
| 1258 |
+
total timeout by default.
|
| 1259 |
+
Usage::
|
| 1260 |
+
>>> import aiohttp
|
| 1261 |
+
>>> resp = await aiohttp.request('GET', 'http://python.org/')
|
| 1262 |
+
>>> resp
|
| 1263 |
+
<ClientResponse(python.org/) [200]>
|
| 1264 |
+
>>> data = await resp.read()
|
| 1265 |
+
"""
|
| 1266 |
+
connector_owner = False
|
| 1267 |
+
if connector is None:
|
| 1268 |
+
connector_owner = True
|
| 1269 |
+
connector = TCPConnector(loop=loop, force_close=True)
|
| 1270 |
+
|
| 1271 |
+
session = ClientSession(
|
| 1272 |
+
loop=loop,
|
| 1273 |
+
cookies=cookies,
|
| 1274 |
+
version=version,
|
| 1275 |
+
timeout=timeout,
|
| 1276 |
+
connector=connector,
|
| 1277 |
+
connector_owner=connector_owner,
|
| 1278 |
+
)
|
| 1279 |
+
|
| 1280 |
+
return _SessionRequestContextManager(
|
| 1281 |
+
session._request(
|
| 1282 |
+
method,
|
| 1283 |
+
url,
|
| 1284 |
+
params=params,
|
| 1285 |
+
data=data,
|
| 1286 |
+
json=json,
|
| 1287 |
+
headers=headers,
|
| 1288 |
+
skip_auto_headers=skip_auto_headers,
|
| 1289 |
+
auth=auth,
|
| 1290 |
+
allow_redirects=allow_redirects,
|
| 1291 |
+
max_redirects=max_redirects,
|
| 1292 |
+
compress=compress,
|
| 1293 |
+
chunked=chunked,
|
| 1294 |
+
expect100=expect100,
|
| 1295 |
+
raise_for_status=raise_for_status,
|
| 1296 |
+
read_until_eof=read_until_eof,
|
| 1297 |
+
proxy=proxy,
|
| 1298 |
+
proxy_auth=proxy_auth,
|
| 1299 |
+
read_bufsize=read_bufsize,
|
| 1300 |
+
),
|
| 1301 |
+
session,
|
| 1302 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_exceptions.py
ADDED
|
@@ -0,0 +1,342 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""HTTP related errors."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import warnings
|
| 5 |
+
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
| 6 |
+
|
| 7 |
+
from .http_parser import RawResponseMessage
|
| 8 |
+
from .typedefs import LooseHeaders
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import ssl
|
| 12 |
+
|
| 13 |
+
SSLContext = ssl.SSLContext
|
| 14 |
+
except ImportError: # pragma: no cover
|
| 15 |
+
ssl = SSLContext = None # type: ignore[assignment]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 19 |
+
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
| 20 |
+
else:
|
| 21 |
+
RequestInfo = ClientResponse = ConnectionKey = None
|
| 22 |
+
|
| 23 |
+
__all__ = (
|
| 24 |
+
"ClientError",
|
| 25 |
+
"ClientConnectionError",
|
| 26 |
+
"ClientOSError",
|
| 27 |
+
"ClientConnectorError",
|
| 28 |
+
"ClientProxyConnectionError",
|
| 29 |
+
"ClientSSLError",
|
| 30 |
+
"ClientConnectorSSLError",
|
| 31 |
+
"ClientConnectorCertificateError",
|
| 32 |
+
"ServerConnectionError",
|
| 33 |
+
"ServerTimeoutError",
|
| 34 |
+
"ServerDisconnectedError",
|
| 35 |
+
"ServerFingerprintMismatch",
|
| 36 |
+
"ClientResponseError",
|
| 37 |
+
"ClientHttpProxyError",
|
| 38 |
+
"WSServerHandshakeError",
|
| 39 |
+
"ContentTypeError",
|
| 40 |
+
"ClientPayloadError",
|
| 41 |
+
"InvalidURL",
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class ClientError(Exception):
|
| 46 |
+
"""Base class for client connection errors."""
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class ClientResponseError(ClientError):
|
| 50 |
+
"""Connection error during reading response.
|
| 51 |
+
|
| 52 |
+
request_info: instance of RequestInfo
|
| 53 |
+
"""
|
| 54 |
+
|
| 55 |
+
def __init__(
|
| 56 |
+
self,
|
| 57 |
+
request_info: RequestInfo,
|
| 58 |
+
history: Tuple[ClientResponse, ...],
|
| 59 |
+
*,
|
| 60 |
+
code: Optional[int] = None,
|
| 61 |
+
status: Optional[int] = None,
|
| 62 |
+
message: str = "",
|
| 63 |
+
headers: Optional[LooseHeaders] = None,
|
| 64 |
+
) -> None:
|
| 65 |
+
self.request_info = request_info
|
| 66 |
+
if code is not None:
|
| 67 |
+
if status is not None:
|
| 68 |
+
raise ValueError(
|
| 69 |
+
"Both code and status arguments are provided; "
|
| 70 |
+
"code is deprecated, use status instead"
|
| 71 |
+
)
|
| 72 |
+
warnings.warn(
|
| 73 |
+
"code argument is deprecated, use status instead",
|
| 74 |
+
DeprecationWarning,
|
| 75 |
+
stacklevel=2,
|
| 76 |
+
)
|
| 77 |
+
if status is not None:
|
| 78 |
+
self.status = status
|
| 79 |
+
elif code is not None:
|
| 80 |
+
self.status = code
|
| 81 |
+
else:
|
| 82 |
+
self.status = 0
|
| 83 |
+
self.message = message
|
| 84 |
+
self.headers = headers
|
| 85 |
+
self.history = history
|
| 86 |
+
self.args = (request_info, history)
|
| 87 |
+
|
| 88 |
+
def __str__(self) -> str:
|
| 89 |
+
return "{}, message={!r}, url={!r}".format(
|
| 90 |
+
self.status,
|
| 91 |
+
self.message,
|
| 92 |
+
self.request_info.real_url,
|
| 93 |
+
)
|
| 94 |
+
|
| 95 |
+
def __repr__(self) -> str:
|
| 96 |
+
args = f"{self.request_info!r}, {self.history!r}"
|
| 97 |
+
if self.status != 0:
|
| 98 |
+
args += f", status={self.status!r}"
|
| 99 |
+
if self.message != "":
|
| 100 |
+
args += f", message={self.message!r}"
|
| 101 |
+
if self.headers is not None:
|
| 102 |
+
args += f", headers={self.headers!r}"
|
| 103 |
+
return f"{type(self).__name__}({args})"
|
| 104 |
+
|
| 105 |
+
@property
|
| 106 |
+
def code(self) -> int:
|
| 107 |
+
warnings.warn(
|
| 108 |
+
"code property is deprecated, use status instead",
|
| 109 |
+
DeprecationWarning,
|
| 110 |
+
stacklevel=2,
|
| 111 |
+
)
|
| 112 |
+
return self.status
|
| 113 |
+
|
| 114 |
+
@code.setter
|
| 115 |
+
def code(self, value: int) -> None:
|
| 116 |
+
warnings.warn(
|
| 117 |
+
"code property is deprecated, use status instead",
|
| 118 |
+
DeprecationWarning,
|
| 119 |
+
stacklevel=2,
|
| 120 |
+
)
|
| 121 |
+
self.status = value
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class ContentTypeError(ClientResponseError):
|
| 125 |
+
"""ContentType found is not valid."""
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
class WSServerHandshakeError(ClientResponseError):
|
| 129 |
+
"""websocket server handshake error."""
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class ClientHttpProxyError(ClientResponseError):
|
| 133 |
+
"""HTTP proxy error.
|
| 134 |
+
|
| 135 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 136 |
+
proxy responds with status other than ``200 OK``
|
| 137 |
+
on ``CONNECT`` request.
|
| 138 |
+
"""
|
| 139 |
+
|
| 140 |
+
|
| 141 |
+
class TooManyRedirects(ClientResponseError):
|
| 142 |
+
"""Client was redirected too many times."""
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
class ClientConnectionError(ClientError):
|
| 146 |
+
"""Base class for client socket errors."""
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
class ClientOSError(ClientConnectionError, OSError):
|
| 150 |
+
"""OSError error."""
|
| 151 |
+
|
| 152 |
+
|
| 153 |
+
class ClientConnectorError(ClientOSError):
|
| 154 |
+
"""Client connector error.
|
| 155 |
+
|
| 156 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 157 |
+
connection to proxy can not be established.
|
| 158 |
+
"""
|
| 159 |
+
|
| 160 |
+
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
| 161 |
+
self._conn_key = connection_key
|
| 162 |
+
self._os_error = os_error
|
| 163 |
+
super().__init__(os_error.errno, os_error.strerror)
|
| 164 |
+
self.args = (connection_key, os_error)
|
| 165 |
+
|
| 166 |
+
@property
|
| 167 |
+
def os_error(self) -> OSError:
|
| 168 |
+
return self._os_error
|
| 169 |
+
|
| 170 |
+
@property
|
| 171 |
+
def host(self) -> str:
|
| 172 |
+
return self._conn_key.host
|
| 173 |
+
|
| 174 |
+
@property
|
| 175 |
+
def port(self) -> Optional[int]:
|
| 176 |
+
return self._conn_key.port
|
| 177 |
+
|
| 178 |
+
@property
|
| 179 |
+
def ssl(self) -> Union[SSLContext, None, bool, "Fingerprint"]:
|
| 180 |
+
return self._conn_key.ssl
|
| 181 |
+
|
| 182 |
+
def __str__(self) -> str:
|
| 183 |
+
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
| 184 |
+
self, self.ssl if self.ssl is not None else "default", self.strerror
|
| 185 |
+
)
|
| 186 |
+
|
| 187 |
+
# OSError.__reduce__ does too much black magick
|
| 188 |
+
__reduce__ = BaseException.__reduce__
|
| 189 |
+
|
| 190 |
+
|
| 191 |
+
class ClientProxyConnectionError(ClientConnectorError):
|
| 192 |
+
"""Proxy connection error.
|
| 193 |
+
|
| 194 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
| 195 |
+
connection to proxy can not be established.
|
| 196 |
+
"""
|
| 197 |
+
|
| 198 |
+
|
| 199 |
+
class UnixClientConnectorError(ClientConnectorError):
|
| 200 |
+
"""Unix connector error.
|
| 201 |
+
|
| 202 |
+
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
| 203 |
+
if connection to unix socket can not be established.
|
| 204 |
+
"""
|
| 205 |
+
|
| 206 |
+
def __init__(
|
| 207 |
+
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
| 208 |
+
) -> None:
|
| 209 |
+
self._path = path
|
| 210 |
+
super().__init__(connection_key, os_error)
|
| 211 |
+
|
| 212 |
+
@property
|
| 213 |
+
def path(self) -> str:
|
| 214 |
+
return self._path
|
| 215 |
+
|
| 216 |
+
def __str__(self) -> str:
|
| 217 |
+
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
| 218 |
+
self, self.ssl if self.ssl is not None else "default", self.strerror
|
| 219 |
+
)
|
| 220 |
+
|
| 221 |
+
|
| 222 |
+
class ServerConnectionError(ClientConnectionError):
|
| 223 |
+
"""Server connection errors."""
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
class ServerDisconnectedError(ServerConnectionError):
|
| 227 |
+
"""Server disconnected."""
|
| 228 |
+
|
| 229 |
+
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
| 230 |
+
if message is None:
|
| 231 |
+
message = "Server disconnected"
|
| 232 |
+
|
| 233 |
+
self.args = (message,)
|
| 234 |
+
self.message = message
|
| 235 |
+
|
| 236 |
+
|
| 237 |
+
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
| 238 |
+
"""Server timeout error."""
|
| 239 |
+
|
| 240 |
+
|
| 241 |
+
class ServerFingerprintMismatch(ServerConnectionError):
|
| 242 |
+
"""SSL certificate does not match expected fingerprint."""
|
| 243 |
+
|
| 244 |
+
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
| 245 |
+
self.expected = expected
|
| 246 |
+
self.got = got
|
| 247 |
+
self.host = host
|
| 248 |
+
self.port = port
|
| 249 |
+
self.args = (expected, got, host, port)
|
| 250 |
+
|
| 251 |
+
def __repr__(self) -> str:
|
| 252 |
+
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
| 253 |
+
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
| 254 |
+
)
|
| 255 |
+
|
| 256 |
+
|
| 257 |
+
class ClientPayloadError(ClientError):
|
| 258 |
+
"""Response payload error."""
|
| 259 |
+
|
| 260 |
+
|
| 261 |
+
class InvalidURL(ClientError, ValueError):
|
| 262 |
+
"""Invalid URL.
|
| 263 |
+
|
| 264 |
+
URL used for fetching is malformed, e.g. it doesn't contains host
|
| 265 |
+
part.
|
| 266 |
+
"""
|
| 267 |
+
|
| 268 |
+
# Derive from ValueError for backward compatibility
|
| 269 |
+
|
| 270 |
+
def __init__(self, url: Any) -> None:
|
| 271 |
+
# The type of url is not yarl.URL because the exception can be raised
|
| 272 |
+
# on URL(url) call
|
| 273 |
+
super().__init__(url)
|
| 274 |
+
|
| 275 |
+
@property
|
| 276 |
+
def url(self) -> Any:
|
| 277 |
+
return self.args[0]
|
| 278 |
+
|
| 279 |
+
def __repr__(self) -> str:
|
| 280 |
+
return f"<{self.__class__.__name__} {self.url}>"
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
class ClientSSLError(ClientConnectorError):
|
| 284 |
+
"""Base error for ssl.*Errors."""
|
| 285 |
+
|
| 286 |
+
|
| 287 |
+
if ssl is not None:
|
| 288 |
+
cert_errors = (ssl.CertificateError,)
|
| 289 |
+
cert_errors_bases = (
|
| 290 |
+
ClientSSLError,
|
| 291 |
+
ssl.CertificateError,
|
| 292 |
+
)
|
| 293 |
+
|
| 294 |
+
ssl_errors = (ssl.SSLError,)
|
| 295 |
+
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
| 296 |
+
else: # pragma: no cover
|
| 297 |
+
cert_errors = tuple()
|
| 298 |
+
cert_errors_bases = (
|
| 299 |
+
ClientSSLError,
|
| 300 |
+
ValueError,
|
| 301 |
+
)
|
| 302 |
+
|
| 303 |
+
ssl_errors = tuple()
|
| 304 |
+
ssl_error_bases = (ClientSSLError,)
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
| 308 |
+
"""Response ssl error."""
|
| 309 |
+
|
| 310 |
+
|
| 311 |
+
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
| 312 |
+
"""Response certificate error."""
|
| 313 |
+
|
| 314 |
+
def __init__(
|
| 315 |
+
self, connection_key: ConnectionKey, certificate_error: Exception
|
| 316 |
+
) -> None:
|
| 317 |
+
self._conn_key = connection_key
|
| 318 |
+
self._certificate_error = certificate_error
|
| 319 |
+
self.args = (connection_key, certificate_error)
|
| 320 |
+
|
| 321 |
+
@property
|
| 322 |
+
def certificate_error(self) -> Exception:
|
| 323 |
+
return self._certificate_error
|
| 324 |
+
|
| 325 |
+
@property
|
| 326 |
+
def host(self) -> str:
|
| 327 |
+
return self._conn_key.host
|
| 328 |
+
|
| 329 |
+
@property
|
| 330 |
+
def port(self) -> Optional[int]:
|
| 331 |
+
return self._conn_key.port
|
| 332 |
+
|
| 333 |
+
@property
|
| 334 |
+
def ssl(self) -> bool:
|
| 335 |
+
return self._conn_key.is_ssl
|
| 336 |
+
|
| 337 |
+
def __str__(self) -> str:
|
| 338 |
+
return (
|
| 339 |
+
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
| 340 |
+
"[{0.certificate_error.__class__.__name__}: "
|
| 341 |
+
"{0.certificate_error.args}]".format(self)
|
| 342 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/client_ws.py
ADDED
|
@@ -0,0 +1,300 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""WebSocket client for asyncio."""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
from typing import Any, Optional, cast
|
| 5 |
+
|
| 6 |
+
import async_timeout
|
| 7 |
+
|
| 8 |
+
from .client_exceptions import ClientError
|
| 9 |
+
from .client_reqrep import ClientResponse
|
| 10 |
+
from .helpers import call_later, set_result
|
| 11 |
+
from .http import (
|
| 12 |
+
WS_CLOSED_MESSAGE,
|
| 13 |
+
WS_CLOSING_MESSAGE,
|
| 14 |
+
WebSocketError,
|
| 15 |
+
WSCloseCode,
|
| 16 |
+
WSMessage,
|
| 17 |
+
WSMsgType,
|
| 18 |
+
)
|
| 19 |
+
from .http_websocket import WebSocketWriter # WSMessage
|
| 20 |
+
from .streams import EofStream, FlowControlDataQueue
|
| 21 |
+
from .typedefs import (
|
| 22 |
+
DEFAULT_JSON_DECODER,
|
| 23 |
+
DEFAULT_JSON_ENCODER,
|
| 24 |
+
JSONDecoder,
|
| 25 |
+
JSONEncoder,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class ClientWebSocketResponse:
|
| 30 |
+
def __init__(
|
| 31 |
+
self,
|
| 32 |
+
reader: "FlowControlDataQueue[WSMessage]",
|
| 33 |
+
writer: WebSocketWriter,
|
| 34 |
+
protocol: Optional[str],
|
| 35 |
+
response: ClientResponse,
|
| 36 |
+
timeout: float,
|
| 37 |
+
autoclose: bool,
|
| 38 |
+
autoping: bool,
|
| 39 |
+
loop: asyncio.AbstractEventLoop,
|
| 40 |
+
*,
|
| 41 |
+
receive_timeout: Optional[float] = None,
|
| 42 |
+
heartbeat: Optional[float] = None,
|
| 43 |
+
compress: int = 0,
|
| 44 |
+
client_notakeover: bool = False,
|
| 45 |
+
) -> None:
|
| 46 |
+
self._response = response
|
| 47 |
+
self._conn = response.connection
|
| 48 |
+
|
| 49 |
+
self._writer = writer
|
| 50 |
+
self._reader = reader
|
| 51 |
+
self._protocol = protocol
|
| 52 |
+
self._closed = False
|
| 53 |
+
self._closing = False
|
| 54 |
+
self._close_code = None # type: Optional[int]
|
| 55 |
+
self._timeout = timeout
|
| 56 |
+
self._receive_timeout = receive_timeout
|
| 57 |
+
self._autoclose = autoclose
|
| 58 |
+
self._autoping = autoping
|
| 59 |
+
self._heartbeat = heartbeat
|
| 60 |
+
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
| 61 |
+
if heartbeat is not None:
|
| 62 |
+
self._pong_heartbeat = heartbeat / 2.0
|
| 63 |
+
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
| 64 |
+
self._loop = loop
|
| 65 |
+
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
| 66 |
+
self._exception = None # type: Optional[BaseException]
|
| 67 |
+
self._compress = compress
|
| 68 |
+
self._client_notakeover = client_notakeover
|
| 69 |
+
|
| 70 |
+
self._reset_heartbeat()
|
| 71 |
+
|
| 72 |
+
def _cancel_heartbeat(self) -> None:
|
| 73 |
+
if self._pong_response_cb is not None:
|
| 74 |
+
self._pong_response_cb.cancel()
|
| 75 |
+
self._pong_response_cb = None
|
| 76 |
+
|
| 77 |
+
if self._heartbeat_cb is not None:
|
| 78 |
+
self._heartbeat_cb.cancel()
|
| 79 |
+
self._heartbeat_cb = None
|
| 80 |
+
|
| 81 |
+
def _reset_heartbeat(self) -> None:
|
| 82 |
+
self._cancel_heartbeat()
|
| 83 |
+
|
| 84 |
+
if self._heartbeat is not None:
|
| 85 |
+
self._heartbeat_cb = call_later(
|
| 86 |
+
self._send_heartbeat, self._heartbeat, self._loop
|
| 87 |
+
)
|
| 88 |
+
|
| 89 |
+
def _send_heartbeat(self) -> None:
|
| 90 |
+
if self._heartbeat is not None and not self._closed:
|
| 91 |
+
# fire-and-forget a task is not perfect but maybe ok for
|
| 92 |
+
# sending ping. Otherwise we need a long-living heartbeat
|
| 93 |
+
# task in the class.
|
| 94 |
+
self._loop.create_task(self._writer.ping())
|
| 95 |
+
|
| 96 |
+
if self._pong_response_cb is not None:
|
| 97 |
+
self._pong_response_cb.cancel()
|
| 98 |
+
self._pong_response_cb = call_later(
|
| 99 |
+
self._pong_not_received, self._pong_heartbeat, self._loop
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
def _pong_not_received(self) -> None:
|
| 103 |
+
if not self._closed:
|
| 104 |
+
self._closed = True
|
| 105 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 106 |
+
self._exception = asyncio.TimeoutError()
|
| 107 |
+
self._response.close()
|
| 108 |
+
|
| 109 |
+
@property
|
| 110 |
+
def closed(self) -> bool:
|
| 111 |
+
return self._closed
|
| 112 |
+
|
| 113 |
+
@property
|
| 114 |
+
def close_code(self) -> Optional[int]:
|
| 115 |
+
return self._close_code
|
| 116 |
+
|
| 117 |
+
@property
|
| 118 |
+
def protocol(self) -> Optional[str]:
|
| 119 |
+
return self._protocol
|
| 120 |
+
|
| 121 |
+
@property
|
| 122 |
+
def compress(self) -> int:
|
| 123 |
+
return self._compress
|
| 124 |
+
|
| 125 |
+
@property
|
| 126 |
+
def client_notakeover(self) -> bool:
|
| 127 |
+
return self._client_notakeover
|
| 128 |
+
|
| 129 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
| 130 |
+
"""extra info from connection transport"""
|
| 131 |
+
conn = self._response.connection
|
| 132 |
+
if conn is None:
|
| 133 |
+
return default
|
| 134 |
+
transport = conn.transport
|
| 135 |
+
if transport is None:
|
| 136 |
+
return default
|
| 137 |
+
return transport.get_extra_info(name, default)
|
| 138 |
+
|
| 139 |
+
def exception(self) -> Optional[BaseException]:
|
| 140 |
+
return self._exception
|
| 141 |
+
|
| 142 |
+
async def ping(self, message: bytes = b"") -> None:
|
| 143 |
+
await self._writer.ping(message)
|
| 144 |
+
|
| 145 |
+
async def pong(self, message: bytes = b"") -> None:
|
| 146 |
+
await self._writer.pong(message)
|
| 147 |
+
|
| 148 |
+
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
| 149 |
+
if not isinstance(data, str):
|
| 150 |
+
raise TypeError("data argument must be str (%r)" % type(data))
|
| 151 |
+
await self._writer.send(data, binary=False, compress=compress)
|
| 152 |
+
|
| 153 |
+
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
| 154 |
+
if not isinstance(data, (bytes, bytearray, memoryview)):
|
| 155 |
+
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
| 156 |
+
await self._writer.send(data, binary=True, compress=compress)
|
| 157 |
+
|
| 158 |
+
async def send_json(
|
| 159 |
+
self,
|
| 160 |
+
data: Any,
|
| 161 |
+
compress: Optional[int] = None,
|
| 162 |
+
*,
|
| 163 |
+
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
| 164 |
+
) -> None:
|
| 165 |
+
await self.send_str(dumps(data), compress=compress)
|
| 166 |
+
|
| 167 |
+
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
| 168 |
+
# we need to break `receive()` cycle first,
|
| 169 |
+
# `close()` may be called from different task
|
| 170 |
+
if self._waiting is not None and not self._closed:
|
| 171 |
+
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
| 172 |
+
await self._waiting
|
| 173 |
+
|
| 174 |
+
if not self._closed:
|
| 175 |
+
self._cancel_heartbeat()
|
| 176 |
+
self._closed = True
|
| 177 |
+
try:
|
| 178 |
+
await self._writer.close(code, message)
|
| 179 |
+
except asyncio.CancelledError:
|
| 180 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 181 |
+
self._response.close()
|
| 182 |
+
raise
|
| 183 |
+
except Exception as exc:
|
| 184 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 185 |
+
self._exception = exc
|
| 186 |
+
self._response.close()
|
| 187 |
+
return True
|
| 188 |
+
|
| 189 |
+
if self._closing:
|
| 190 |
+
self._response.close()
|
| 191 |
+
return True
|
| 192 |
+
|
| 193 |
+
while True:
|
| 194 |
+
try:
|
| 195 |
+
async with async_timeout.timeout(self._timeout):
|
| 196 |
+
msg = await self._reader.read()
|
| 197 |
+
except asyncio.CancelledError:
|
| 198 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 199 |
+
self._response.close()
|
| 200 |
+
raise
|
| 201 |
+
except Exception as exc:
|
| 202 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 203 |
+
self._exception = exc
|
| 204 |
+
self._response.close()
|
| 205 |
+
return True
|
| 206 |
+
|
| 207 |
+
if msg.type == WSMsgType.CLOSE:
|
| 208 |
+
self._close_code = msg.data
|
| 209 |
+
self._response.close()
|
| 210 |
+
return True
|
| 211 |
+
else:
|
| 212 |
+
return False
|
| 213 |
+
|
| 214 |
+
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
| 215 |
+
while True:
|
| 216 |
+
if self._waiting is not None:
|
| 217 |
+
raise RuntimeError("Concurrent call to receive() is not allowed")
|
| 218 |
+
|
| 219 |
+
if self._closed:
|
| 220 |
+
return WS_CLOSED_MESSAGE
|
| 221 |
+
elif self._closing:
|
| 222 |
+
await self.close()
|
| 223 |
+
return WS_CLOSED_MESSAGE
|
| 224 |
+
|
| 225 |
+
try:
|
| 226 |
+
self._waiting = self._loop.create_future()
|
| 227 |
+
try:
|
| 228 |
+
async with async_timeout.timeout(timeout or self._receive_timeout):
|
| 229 |
+
msg = await self._reader.read()
|
| 230 |
+
self._reset_heartbeat()
|
| 231 |
+
finally:
|
| 232 |
+
waiter = self._waiting
|
| 233 |
+
self._waiting = None
|
| 234 |
+
set_result(waiter, True)
|
| 235 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 236 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 237 |
+
raise
|
| 238 |
+
except EofStream:
|
| 239 |
+
self._close_code = WSCloseCode.OK
|
| 240 |
+
await self.close()
|
| 241 |
+
return WSMessage(WSMsgType.CLOSED, None, None)
|
| 242 |
+
except ClientError:
|
| 243 |
+
self._closed = True
|
| 244 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 245 |
+
return WS_CLOSED_MESSAGE
|
| 246 |
+
except WebSocketError as exc:
|
| 247 |
+
self._close_code = exc.code
|
| 248 |
+
await self.close(code=exc.code)
|
| 249 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 250 |
+
except Exception as exc:
|
| 251 |
+
self._exception = exc
|
| 252 |
+
self._closing = True
|
| 253 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 254 |
+
await self.close()
|
| 255 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 256 |
+
|
| 257 |
+
if msg.type == WSMsgType.CLOSE:
|
| 258 |
+
self._closing = True
|
| 259 |
+
self._close_code = msg.data
|
| 260 |
+
if not self._closed and self._autoclose:
|
| 261 |
+
await self.close()
|
| 262 |
+
elif msg.type == WSMsgType.CLOSING:
|
| 263 |
+
self._closing = True
|
| 264 |
+
elif msg.type == WSMsgType.PING and self._autoping:
|
| 265 |
+
await self.pong(msg.data)
|
| 266 |
+
continue
|
| 267 |
+
elif msg.type == WSMsgType.PONG and self._autoping:
|
| 268 |
+
continue
|
| 269 |
+
|
| 270 |
+
return msg
|
| 271 |
+
|
| 272 |
+
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
| 273 |
+
msg = await self.receive(timeout)
|
| 274 |
+
if msg.type != WSMsgType.TEXT:
|
| 275 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
| 276 |
+
return cast(str, msg.data)
|
| 277 |
+
|
| 278 |
+
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
| 279 |
+
msg = await self.receive(timeout)
|
| 280 |
+
if msg.type != WSMsgType.BINARY:
|
| 281 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
| 282 |
+
return cast(bytes, msg.data)
|
| 283 |
+
|
| 284 |
+
async def receive_json(
|
| 285 |
+
self,
|
| 286 |
+
*,
|
| 287 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
| 288 |
+
timeout: Optional[float] = None,
|
| 289 |
+
) -> Any:
|
| 290 |
+
data = await self.receive_str(timeout=timeout)
|
| 291 |
+
return loads(data)
|
| 292 |
+
|
| 293 |
+
def __aiter__(self) -> "ClientWebSocketResponse":
|
| 294 |
+
return self
|
| 295 |
+
|
| 296 |
+
async def __anext__(self) -> WSMessage:
|
| 297 |
+
msg = await self.receive()
|
| 298 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
| 299 |
+
raise StopAsyncIteration
|
| 300 |
+
return msg
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/cookiejar.py
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import contextlib
|
| 3 |
+
import datetime
|
| 4 |
+
import os # noqa
|
| 5 |
+
import pathlib
|
| 6 |
+
import pickle
|
| 7 |
+
import re
|
| 8 |
+
from collections import defaultdict
|
| 9 |
+
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
| 10 |
+
from typing import ( # noqa
|
| 11 |
+
DefaultDict,
|
| 12 |
+
Dict,
|
| 13 |
+
Iterable,
|
| 14 |
+
Iterator,
|
| 15 |
+
List,
|
| 16 |
+
Mapping,
|
| 17 |
+
Optional,
|
| 18 |
+
Set,
|
| 19 |
+
Tuple,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from yarl import URL
|
| 25 |
+
|
| 26 |
+
from .abc import AbstractCookieJar, ClearCookiePredicate
|
| 27 |
+
from .helpers import is_ip_address, next_whole_second
|
| 28 |
+
from .typedefs import LooseCookies, PathLike, StrOrURL
|
| 29 |
+
|
| 30 |
+
__all__ = ("CookieJar", "DummyCookieJar")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
CookieItem = Union[str, "Morsel[str]"]
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class CookieJar(AbstractCookieJar):
|
| 37 |
+
"""Implements cookie storage adhering to RFC 6265."""
|
| 38 |
+
|
| 39 |
+
DATE_TOKENS_RE = re.compile(
|
| 40 |
+
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
| 41 |
+
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
| 42 |
+
)
|
| 43 |
+
|
| 44 |
+
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
| 45 |
+
|
| 46 |
+
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
| 47 |
+
|
| 48 |
+
DATE_MONTH_RE = re.compile(
|
| 49 |
+
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
| 50 |
+
re.I,
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
| 54 |
+
|
| 55 |
+
MAX_TIME = datetime.datetime.max.replace(tzinfo=datetime.timezone.utc)
|
| 56 |
+
|
| 57 |
+
MAX_32BIT_TIME = datetime.datetime.utcfromtimestamp(2 ** 31 - 1)
|
| 58 |
+
|
| 59 |
+
def __init__(
|
| 60 |
+
self,
|
| 61 |
+
*,
|
| 62 |
+
unsafe: bool = False,
|
| 63 |
+
quote_cookie: bool = True,
|
| 64 |
+
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
| 65 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 66 |
+
) -> None:
|
| 67 |
+
super().__init__(loop=loop)
|
| 68 |
+
self._cookies = defaultdict(
|
| 69 |
+
SimpleCookie
|
| 70 |
+
) # type: DefaultDict[str, SimpleCookie[str]]
|
| 71 |
+
self._host_only_cookies = set() # type: Set[Tuple[str, str]]
|
| 72 |
+
self._unsafe = unsafe
|
| 73 |
+
self._quote_cookie = quote_cookie
|
| 74 |
+
if treat_as_secure_origin is None:
|
| 75 |
+
treat_as_secure_origin = []
|
| 76 |
+
elif isinstance(treat_as_secure_origin, URL):
|
| 77 |
+
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
| 78 |
+
elif isinstance(treat_as_secure_origin, str):
|
| 79 |
+
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
| 80 |
+
else:
|
| 81 |
+
treat_as_secure_origin = [
|
| 82 |
+
URL(url).origin() if isinstance(url, str) else url.origin()
|
| 83 |
+
for url in treat_as_secure_origin
|
| 84 |
+
]
|
| 85 |
+
self._treat_as_secure_origin = treat_as_secure_origin
|
| 86 |
+
self._next_expiration = next_whole_second()
|
| 87 |
+
self._expirations = {} # type: Dict[Tuple[str, str], datetime.datetime]
|
| 88 |
+
# #4515: datetime.max may not be representable on 32-bit platforms
|
| 89 |
+
self._max_time = self.MAX_TIME
|
| 90 |
+
try:
|
| 91 |
+
self._max_time.timestamp()
|
| 92 |
+
except OverflowError:
|
| 93 |
+
self._max_time = self.MAX_32BIT_TIME
|
| 94 |
+
|
| 95 |
+
def save(self, file_path: PathLike) -> None:
|
| 96 |
+
file_path = pathlib.Path(file_path)
|
| 97 |
+
with file_path.open(mode="wb") as f:
|
| 98 |
+
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
| 99 |
+
|
| 100 |
+
def load(self, file_path: PathLike) -> None:
|
| 101 |
+
file_path = pathlib.Path(file_path)
|
| 102 |
+
with file_path.open(mode="rb") as f:
|
| 103 |
+
self._cookies = pickle.load(f)
|
| 104 |
+
|
| 105 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 106 |
+
if predicate is None:
|
| 107 |
+
self._next_expiration = next_whole_second()
|
| 108 |
+
self._cookies.clear()
|
| 109 |
+
self._host_only_cookies.clear()
|
| 110 |
+
self._expirations.clear()
|
| 111 |
+
return
|
| 112 |
+
|
| 113 |
+
to_del = []
|
| 114 |
+
now = datetime.datetime.now(datetime.timezone.utc)
|
| 115 |
+
for domain, cookie in self._cookies.items():
|
| 116 |
+
for name, morsel in cookie.items():
|
| 117 |
+
key = (domain, name)
|
| 118 |
+
if (
|
| 119 |
+
key in self._expirations and self._expirations[key] <= now
|
| 120 |
+
) or predicate(morsel):
|
| 121 |
+
to_del.append(key)
|
| 122 |
+
|
| 123 |
+
for domain, name in to_del:
|
| 124 |
+
key = (domain, name)
|
| 125 |
+
self._host_only_cookies.discard(key)
|
| 126 |
+
if key in self._expirations:
|
| 127 |
+
del self._expirations[(domain, name)]
|
| 128 |
+
self._cookies[domain].pop(name, None)
|
| 129 |
+
|
| 130 |
+
next_expiration = min(self._expirations.values(), default=self._max_time)
|
| 131 |
+
try:
|
| 132 |
+
self._next_expiration = next_expiration.replace(
|
| 133 |
+
microsecond=0
|
| 134 |
+
) + datetime.timedelta(seconds=1)
|
| 135 |
+
except OverflowError:
|
| 136 |
+
self._next_expiration = self._max_time
|
| 137 |
+
|
| 138 |
+
def clear_domain(self, domain: str) -> None:
|
| 139 |
+
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
| 140 |
+
|
| 141 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
| 142 |
+
self._do_expiration()
|
| 143 |
+
for val in self._cookies.values():
|
| 144 |
+
yield from val.values()
|
| 145 |
+
|
| 146 |
+
def __len__(self) -> int:
|
| 147 |
+
return sum(1 for i in self)
|
| 148 |
+
|
| 149 |
+
def _do_expiration(self) -> None:
|
| 150 |
+
self.clear(lambda x: False)
|
| 151 |
+
|
| 152 |
+
def _expire_cookie(self, when: datetime.datetime, domain: str, name: str) -> None:
|
| 153 |
+
self._next_expiration = min(self._next_expiration, when)
|
| 154 |
+
self._expirations[(domain, name)] = when
|
| 155 |
+
|
| 156 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 157 |
+
"""Update cookies."""
|
| 158 |
+
hostname = response_url.raw_host
|
| 159 |
+
|
| 160 |
+
if not self._unsafe and is_ip_address(hostname):
|
| 161 |
+
# Don't accept cookies from IPs
|
| 162 |
+
return
|
| 163 |
+
|
| 164 |
+
if isinstance(cookies, Mapping):
|
| 165 |
+
cookies = cookies.items()
|
| 166 |
+
|
| 167 |
+
for name, cookie in cookies:
|
| 168 |
+
if not isinstance(cookie, Morsel):
|
| 169 |
+
tmp = SimpleCookie() # type: SimpleCookie[str]
|
| 170 |
+
tmp[name] = cookie # type: ignore[assignment]
|
| 171 |
+
cookie = tmp[name]
|
| 172 |
+
|
| 173 |
+
domain = cookie["domain"]
|
| 174 |
+
|
| 175 |
+
# ignore domains with trailing dots
|
| 176 |
+
if domain.endswith("."):
|
| 177 |
+
domain = ""
|
| 178 |
+
del cookie["domain"]
|
| 179 |
+
|
| 180 |
+
if not domain and hostname is not None:
|
| 181 |
+
# Set the cookie's domain to the response hostname
|
| 182 |
+
# and set its host-only-flag
|
| 183 |
+
self._host_only_cookies.add((hostname, name))
|
| 184 |
+
domain = cookie["domain"] = hostname
|
| 185 |
+
|
| 186 |
+
if domain.startswith("."):
|
| 187 |
+
# Remove leading dot
|
| 188 |
+
domain = domain[1:]
|
| 189 |
+
cookie["domain"] = domain
|
| 190 |
+
|
| 191 |
+
if hostname and not self._is_domain_match(domain, hostname):
|
| 192 |
+
# Setting cookies for different domains is not allowed
|
| 193 |
+
continue
|
| 194 |
+
|
| 195 |
+
path = cookie["path"]
|
| 196 |
+
if not path or not path.startswith("/"):
|
| 197 |
+
# Set the cookie's path to the response path
|
| 198 |
+
path = response_url.path
|
| 199 |
+
if not path.startswith("/"):
|
| 200 |
+
path = "/"
|
| 201 |
+
else:
|
| 202 |
+
# Cut everything from the last slash to the end
|
| 203 |
+
path = "/" + path[1 : path.rfind("/")]
|
| 204 |
+
cookie["path"] = path
|
| 205 |
+
|
| 206 |
+
max_age = cookie["max-age"]
|
| 207 |
+
if max_age:
|
| 208 |
+
try:
|
| 209 |
+
delta_seconds = int(max_age)
|
| 210 |
+
try:
|
| 211 |
+
max_age_expiration = datetime.datetime.now(
|
| 212 |
+
datetime.timezone.utc
|
| 213 |
+
) + datetime.timedelta(seconds=delta_seconds)
|
| 214 |
+
except OverflowError:
|
| 215 |
+
max_age_expiration = self._max_time
|
| 216 |
+
self._expire_cookie(max_age_expiration, domain, name)
|
| 217 |
+
except ValueError:
|
| 218 |
+
cookie["max-age"] = ""
|
| 219 |
+
|
| 220 |
+
else:
|
| 221 |
+
expires = cookie["expires"]
|
| 222 |
+
if expires:
|
| 223 |
+
expire_time = self._parse_date(expires)
|
| 224 |
+
if expire_time:
|
| 225 |
+
self._expire_cookie(expire_time, domain, name)
|
| 226 |
+
else:
|
| 227 |
+
cookie["expires"] = ""
|
| 228 |
+
|
| 229 |
+
self._cookies[domain][name] = cookie
|
| 230 |
+
|
| 231 |
+
self._do_expiration()
|
| 232 |
+
|
| 233 |
+
def filter_cookies(
|
| 234 |
+
self, request_url: URL = URL()
|
| 235 |
+
) -> Union["BaseCookie[str]", "SimpleCookie[str]"]:
|
| 236 |
+
"""Returns this jar's cookies filtered by their attributes."""
|
| 237 |
+
self._do_expiration()
|
| 238 |
+
request_url = URL(request_url)
|
| 239 |
+
filtered: Union["SimpleCookie[str]", "BaseCookie[str]"] = (
|
| 240 |
+
SimpleCookie() if self._quote_cookie else BaseCookie()
|
| 241 |
+
)
|
| 242 |
+
hostname = request_url.raw_host or ""
|
| 243 |
+
request_origin = URL()
|
| 244 |
+
with contextlib.suppress(ValueError):
|
| 245 |
+
request_origin = request_url.origin()
|
| 246 |
+
|
| 247 |
+
is_not_secure = (
|
| 248 |
+
request_url.scheme not in ("https", "wss")
|
| 249 |
+
and request_origin not in self._treat_as_secure_origin
|
| 250 |
+
)
|
| 251 |
+
|
| 252 |
+
for cookie in self:
|
| 253 |
+
name = cookie.key
|
| 254 |
+
domain = cookie["domain"]
|
| 255 |
+
|
| 256 |
+
# Send shared cookies
|
| 257 |
+
if not domain:
|
| 258 |
+
filtered[name] = cookie.value
|
| 259 |
+
continue
|
| 260 |
+
|
| 261 |
+
if not self._unsafe and is_ip_address(hostname):
|
| 262 |
+
continue
|
| 263 |
+
|
| 264 |
+
if (domain, name) in self._host_only_cookies:
|
| 265 |
+
if domain != hostname:
|
| 266 |
+
continue
|
| 267 |
+
elif not self._is_domain_match(domain, hostname):
|
| 268 |
+
continue
|
| 269 |
+
|
| 270 |
+
if not self._is_path_match(request_url.path, cookie["path"]):
|
| 271 |
+
continue
|
| 272 |
+
|
| 273 |
+
if is_not_secure and cookie["secure"]:
|
| 274 |
+
continue
|
| 275 |
+
|
| 276 |
+
# It's critical we use the Morsel so the coded_value
|
| 277 |
+
# (based on cookie version) is preserved
|
| 278 |
+
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
| 279 |
+
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
| 280 |
+
filtered[name] = mrsl_val
|
| 281 |
+
|
| 282 |
+
return filtered
|
| 283 |
+
|
| 284 |
+
@staticmethod
|
| 285 |
+
def _is_domain_match(domain: str, hostname: str) -> bool:
|
| 286 |
+
"""Implements domain matching adhering to RFC 6265."""
|
| 287 |
+
if hostname == domain:
|
| 288 |
+
return True
|
| 289 |
+
|
| 290 |
+
if not hostname.endswith(domain):
|
| 291 |
+
return False
|
| 292 |
+
|
| 293 |
+
non_matching = hostname[: -len(domain)]
|
| 294 |
+
|
| 295 |
+
if not non_matching.endswith("."):
|
| 296 |
+
return False
|
| 297 |
+
|
| 298 |
+
return not is_ip_address(hostname)
|
| 299 |
+
|
| 300 |
+
@staticmethod
|
| 301 |
+
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
| 302 |
+
"""Implements path matching adhering to RFC 6265."""
|
| 303 |
+
if not req_path.startswith("/"):
|
| 304 |
+
req_path = "/"
|
| 305 |
+
|
| 306 |
+
if req_path == cookie_path:
|
| 307 |
+
return True
|
| 308 |
+
|
| 309 |
+
if not req_path.startswith(cookie_path):
|
| 310 |
+
return False
|
| 311 |
+
|
| 312 |
+
if cookie_path.endswith("/"):
|
| 313 |
+
return True
|
| 314 |
+
|
| 315 |
+
non_matching = req_path[len(cookie_path) :]
|
| 316 |
+
|
| 317 |
+
return non_matching.startswith("/")
|
| 318 |
+
|
| 319 |
+
@classmethod
|
| 320 |
+
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
|
| 321 |
+
"""Implements date string parsing adhering to RFC 6265."""
|
| 322 |
+
if not date_str:
|
| 323 |
+
return None
|
| 324 |
+
|
| 325 |
+
found_time = False
|
| 326 |
+
found_day = False
|
| 327 |
+
found_month = False
|
| 328 |
+
found_year = False
|
| 329 |
+
|
| 330 |
+
hour = minute = second = 0
|
| 331 |
+
day = 0
|
| 332 |
+
month = 0
|
| 333 |
+
year = 0
|
| 334 |
+
|
| 335 |
+
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
| 336 |
+
|
| 337 |
+
token = token_match.group("token")
|
| 338 |
+
|
| 339 |
+
if not found_time:
|
| 340 |
+
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
| 341 |
+
if time_match:
|
| 342 |
+
found_time = True
|
| 343 |
+
hour, minute, second = (int(s) for s in time_match.groups())
|
| 344 |
+
continue
|
| 345 |
+
|
| 346 |
+
if not found_day:
|
| 347 |
+
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
| 348 |
+
if day_match:
|
| 349 |
+
found_day = True
|
| 350 |
+
day = int(day_match.group())
|
| 351 |
+
continue
|
| 352 |
+
|
| 353 |
+
if not found_month:
|
| 354 |
+
month_match = cls.DATE_MONTH_RE.match(token)
|
| 355 |
+
if month_match:
|
| 356 |
+
found_month = True
|
| 357 |
+
assert month_match.lastindex is not None
|
| 358 |
+
month = month_match.lastindex
|
| 359 |
+
continue
|
| 360 |
+
|
| 361 |
+
if not found_year:
|
| 362 |
+
year_match = cls.DATE_YEAR_RE.match(token)
|
| 363 |
+
if year_match:
|
| 364 |
+
found_year = True
|
| 365 |
+
year = int(year_match.group())
|
| 366 |
+
|
| 367 |
+
if 70 <= year <= 99:
|
| 368 |
+
year += 1900
|
| 369 |
+
elif 0 <= year <= 69:
|
| 370 |
+
year += 2000
|
| 371 |
+
|
| 372 |
+
if False in (found_day, found_month, found_year, found_time):
|
| 373 |
+
return None
|
| 374 |
+
|
| 375 |
+
if not 1 <= day <= 31:
|
| 376 |
+
return None
|
| 377 |
+
|
| 378 |
+
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
| 379 |
+
return None
|
| 380 |
+
|
| 381 |
+
return datetime.datetime(
|
| 382 |
+
year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class DummyCookieJar(AbstractCookieJar):
|
| 387 |
+
"""Implements a dummy cookie storage.
|
| 388 |
+
|
| 389 |
+
It can be used with the ClientSession when no cookie processing is needed.
|
| 390 |
+
|
| 391 |
+
"""
|
| 392 |
+
|
| 393 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 394 |
+
super().__init__(loop=loop)
|
| 395 |
+
|
| 396 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
| 397 |
+
while False:
|
| 398 |
+
yield None
|
| 399 |
+
|
| 400 |
+
def __len__(self) -> int:
|
| 401 |
+
return 0
|
| 402 |
+
|
| 403 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
| 404 |
+
pass
|
| 405 |
+
|
| 406 |
+
def clear_domain(self, domain: str) -> None:
|
| 407 |
+
pass
|
| 408 |
+
|
| 409 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
| 410 |
+
pass
|
| 411 |
+
|
| 412 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
| 413 |
+
return SimpleCookie()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/helpers.py
ADDED
|
@@ -0,0 +1,875 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Various helper functions"""
|
| 2 |
+
|
| 3 |
+
import asyncio
|
| 4 |
+
import base64
|
| 5 |
+
import binascii
|
| 6 |
+
import cgi
|
| 7 |
+
import datetime
|
| 8 |
+
import functools
|
| 9 |
+
import inspect
|
| 10 |
+
import netrc
|
| 11 |
+
import os
|
| 12 |
+
import platform
|
| 13 |
+
import re
|
| 14 |
+
import sys
|
| 15 |
+
import time
|
| 16 |
+
import warnings
|
| 17 |
+
import weakref
|
| 18 |
+
from collections import namedtuple
|
| 19 |
+
from contextlib import suppress
|
| 20 |
+
from email.utils import parsedate
|
| 21 |
+
from math import ceil
|
| 22 |
+
from pathlib import Path
|
| 23 |
+
from types import TracebackType
|
| 24 |
+
from typing import (
|
| 25 |
+
Any,
|
| 26 |
+
Callable,
|
| 27 |
+
ContextManager,
|
| 28 |
+
Dict,
|
| 29 |
+
Generator,
|
| 30 |
+
Generic,
|
| 31 |
+
Iterable,
|
| 32 |
+
Iterator,
|
| 33 |
+
List,
|
| 34 |
+
Mapping,
|
| 35 |
+
Optional,
|
| 36 |
+
Pattern,
|
| 37 |
+
Set,
|
| 38 |
+
Tuple,
|
| 39 |
+
Type,
|
| 40 |
+
TypeVar,
|
| 41 |
+
Union,
|
| 42 |
+
cast,
|
| 43 |
+
)
|
| 44 |
+
from urllib.parse import quote
|
| 45 |
+
from urllib.request import getproxies, proxy_bypass
|
| 46 |
+
|
| 47 |
+
import async_timeout
|
| 48 |
+
import attr
|
| 49 |
+
from multidict import MultiDict, MultiDictProxy
|
| 50 |
+
from yarl import URL
|
| 51 |
+
|
| 52 |
+
from . import hdrs
|
| 53 |
+
from .log import client_logger, internal_logger
|
| 54 |
+
from .typedefs import PathLike, Protocol # noqa
|
| 55 |
+
|
| 56 |
+
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
| 57 |
+
|
| 58 |
+
IS_MACOS = platform.system() == "Darwin"
|
| 59 |
+
IS_WINDOWS = platform.system() == "Windows"
|
| 60 |
+
|
| 61 |
+
PY_36 = sys.version_info >= (3, 6)
|
| 62 |
+
PY_37 = sys.version_info >= (3, 7)
|
| 63 |
+
PY_38 = sys.version_info >= (3, 8)
|
| 64 |
+
PY_310 = sys.version_info >= (3, 10)
|
| 65 |
+
|
| 66 |
+
if sys.version_info < (3, 7):
|
| 67 |
+
import idna_ssl
|
| 68 |
+
|
| 69 |
+
idna_ssl.patch_match_hostname()
|
| 70 |
+
|
| 71 |
+
def all_tasks(
|
| 72 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 73 |
+
) -> Set["asyncio.Task[Any]"]:
|
| 74 |
+
tasks = list(asyncio.Task.all_tasks(loop))
|
| 75 |
+
return {t for t in tasks if not t.done()}
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
else:
|
| 79 |
+
all_tasks = asyncio.all_tasks
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
_T = TypeVar("_T")
|
| 83 |
+
_S = TypeVar("_S")
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
sentinel = object() # type: Any
|
| 87 |
+
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
|
| 88 |
+
|
| 89 |
+
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
|
| 90 |
+
# for compatibility with older versions
|
| 91 |
+
DEBUG = getattr(sys.flags, "dev_mode", False) or (
|
| 92 |
+
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
| 93 |
+
) # type: bool
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
CHAR = {chr(i) for i in range(0, 128)}
|
| 97 |
+
CTL = {chr(i) for i in range(0, 32)} | {
|
| 98 |
+
chr(127),
|
| 99 |
+
}
|
| 100 |
+
SEPARATORS = {
|
| 101 |
+
"(",
|
| 102 |
+
")",
|
| 103 |
+
"<",
|
| 104 |
+
">",
|
| 105 |
+
"@",
|
| 106 |
+
",",
|
| 107 |
+
";",
|
| 108 |
+
":",
|
| 109 |
+
"\\",
|
| 110 |
+
'"',
|
| 111 |
+
"/",
|
| 112 |
+
"[",
|
| 113 |
+
"]",
|
| 114 |
+
"?",
|
| 115 |
+
"=",
|
| 116 |
+
"{",
|
| 117 |
+
"}",
|
| 118 |
+
" ",
|
| 119 |
+
chr(9),
|
| 120 |
+
}
|
| 121 |
+
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
class noop:
|
| 125 |
+
def __await__(self) -> Generator[None, None, None]:
|
| 126 |
+
yield
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
| 130 |
+
"""Http basic authentication helper."""
|
| 131 |
+
|
| 132 |
+
def __new__(
|
| 133 |
+
cls, login: str, password: str = "", encoding: str = "latin1"
|
| 134 |
+
) -> "BasicAuth":
|
| 135 |
+
if login is None:
|
| 136 |
+
raise ValueError("None is not allowed as login value")
|
| 137 |
+
|
| 138 |
+
if password is None:
|
| 139 |
+
raise ValueError("None is not allowed as password value")
|
| 140 |
+
|
| 141 |
+
if ":" in login:
|
| 142 |
+
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
| 143 |
+
|
| 144 |
+
return super().__new__(cls, login, password, encoding)
|
| 145 |
+
|
| 146 |
+
@classmethod
|
| 147 |
+
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
| 148 |
+
"""Create a BasicAuth object from an Authorization HTTP header."""
|
| 149 |
+
try:
|
| 150 |
+
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
| 151 |
+
except ValueError:
|
| 152 |
+
raise ValueError("Could not parse authorization header.")
|
| 153 |
+
|
| 154 |
+
if auth_type.lower() != "basic":
|
| 155 |
+
raise ValueError("Unknown authorization method %s" % auth_type)
|
| 156 |
+
|
| 157 |
+
try:
|
| 158 |
+
decoded = base64.b64decode(
|
| 159 |
+
encoded_credentials.encode("ascii"), validate=True
|
| 160 |
+
).decode(encoding)
|
| 161 |
+
except binascii.Error:
|
| 162 |
+
raise ValueError("Invalid base64 encoding.")
|
| 163 |
+
|
| 164 |
+
try:
|
| 165 |
+
# RFC 2617 HTTP Authentication
|
| 166 |
+
# https://www.ietf.org/rfc/rfc2617.txt
|
| 167 |
+
# the colon must be present, but the username and password may be
|
| 168 |
+
# otherwise blank.
|
| 169 |
+
username, password = decoded.split(":", 1)
|
| 170 |
+
except ValueError:
|
| 171 |
+
raise ValueError("Invalid credentials.")
|
| 172 |
+
|
| 173 |
+
return cls(username, password, encoding=encoding)
|
| 174 |
+
|
| 175 |
+
@classmethod
|
| 176 |
+
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
| 177 |
+
"""Create BasicAuth from url."""
|
| 178 |
+
if not isinstance(url, URL):
|
| 179 |
+
raise TypeError("url should be yarl.URL instance")
|
| 180 |
+
if url.user is None:
|
| 181 |
+
return None
|
| 182 |
+
return cls(url.user, url.password or "", encoding=encoding)
|
| 183 |
+
|
| 184 |
+
def encode(self) -> str:
|
| 185 |
+
"""Encode credentials."""
|
| 186 |
+
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
| 187 |
+
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
| 188 |
+
|
| 189 |
+
|
| 190 |
+
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 191 |
+
auth = BasicAuth.from_url(url)
|
| 192 |
+
if auth is None:
|
| 193 |
+
return url, None
|
| 194 |
+
else:
|
| 195 |
+
return url.with_user(None), auth
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
def netrc_from_env() -> Optional[netrc.netrc]:
|
| 199 |
+
"""Load netrc from file.
|
| 200 |
+
|
| 201 |
+
Attempt to load it from the path specified by the env-var
|
| 202 |
+
NETRC or in the default location in the user's home directory.
|
| 203 |
+
|
| 204 |
+
Returns None if it couldn't be found or fails to parse.
|
| 205 |
+
"""
|
| 206 |
+
netrc_env = os.environ.get("NETRC")
|
| 207 |
+
|
| 208 |
+
if netrc_env is not None:
|
| 209 |
+
netrc_path = Path(netrc_env)
|
| 210 |
+
else:
|
| 211 |
+
try:
|
| 212 |
+
home_dir = Path.home()
|
| 213 |
+
except RuntimeError as e: # pragma: no cover
|
| 214 |
+
# if pathlib can't resolve home, it may raise a RuntimeError
|
| 215 |
+
client_logger.debug(
|
| 216 |
+
"Could not resolve home directory when "
|
| 217 |
+
"trying to look for .netrc file: %s",
|
| 218 |
+
e,
|
| 219 |
+
)
|
| 220 |
+
return None
|
| 221 |
+
|
| 222 |
+
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
| 223 |
+
|
| 224 |
+
try:
|
| 225 |
+
return netrc.netrc(str(netrc_path))
|
| 226 |
+
except netrc.NetrcParseError as e:
|
| 227 |
+
client_logger.warning("Could not parse .netrc file: %s", e)
|
| 228 |
+
except OSError as e:
|
| 229 |
+
# we couldn't read the file (doesn't exist, permissions, etc.)
|
| 230 |
+
if netrc_env or netrc_path.is_file():
|
| 231 |
+
# only warn if the environment wanted us to load it,
|
| 232 |
+
# or it appears like the default file does actually exist
|
| 233 |
+
client_logger.warning("Could not read .netrc file: %s", e)
|
| 234 |
+
|
| 235 |
+
return None
|
| 236 |
+
|
| 237 |
+
|
| 238 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 239 |
+
class ProxyInfo:
|
| 240 |
+
proxy: URL
|
| 241 |
+
proxy_auth: Optional[BasicAuth]
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
| 245 |
+
proxy_urls = {
|
| 246 |
+
k: URL(v)
|
| 247 |
+
for k, v in getproxies().items()
|
| 248 |
+
if k in ("http", "https", "ws", "wss")
|
| 249 |
+
}
|
| 250 |
+
netrc_obj = netrc_from_env()
|
| 251 |
+
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
| 252 |
+
ret = {}
|
| 253 |
+
for proto, val in stripped.items():
|
| 254 |
+
proxy, auth = val
|
| 255 |
+
if proxy.scheme in ("https", "wss"):
|
| 256 |
+
client_logger.warning(
|
| 257 |
+
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
| 258 |
+
)
|
| 259 |
+
continue
|
| 260 |
+
if netrc_obj and auth is None:
|
| 261 |
+
auth_from_netrc = None
|
| 262 |
+
if proxy.host is not None:
|
| 263 |
+
auth_from_netrc = netrc_obj.authenticators(proxy.host)
|
| 264 |
+
if auth_from_netrc is not None:
|
| 265 |
+
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
|
| 266 |
+
# `user` and `account` both can be username,
|
| 267 |
+
# if `user` is None, use `account`
|
| 268 |
+
*logins, password = auth_from_netrc
|
| 269 |
+
login = logins[0] if logins[0] else logins[-1]
|
| 270 |
+
auth = BasicAuth(cast(str, login), cast(str, password))
|
| 271 |
+
ret[proto] = ProxyInfo(proxy, auth)
|
| 272 |
+
return ret
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def current_task(
|
| 276 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 277 |
+
) -> "Optional[asyncio.Task[Any]]":
|
| 278 |
+
if sys.version_info >= (3, 7):
|
| 279 |
+
return asyncio.current_task(loop=loop)
|
| 280 |
+
else:
|
| 281 |
+
return asyncio.Task.current_task(loop=loop)
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
def get_running_loop(
|
| 285 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 286 |
+
) -> asyncio.AbstractEventLoop:
|
| 287 |
+
if loop is None:
|
| 288 |
+
loop = asyncio.get_event_loop()
|
| 289 |
+
if not loop.is_running():
|
| 290 |
+
warnings.warn(
|
| 291 |
+
"The object should be created within an async function",
|
| 292 |
+
DeprecationWarning,
|
| 293 |
+
stacklevel=3,
|
| 294 |
+
)
|
| 295 |
+
if loop.get_debug():
|
| 296 |
+
internal_logger.warning(
|
| 297 |
+
"The object should be created within an async function", stack_info=True
|
| 298 |
+
)
|
| 299 |
+
return loop
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
def isasyncgenfunction(obj: Any) -> bool:
|
| 303 |
+
func = getattr(inspect, "isasyncgenfunction", None)
|
| 304 |
+
if func is not None:
|
| 305 |
+
return func(obj) # type: ignore[no-any-return]
|
| 306 |
+
else:
|
| 307 |
+
return False
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
| 311 |
+
"""Get a permitted proxy for the given URL from the env."""
|
| 312 |
+
if url.host is not None and proxy_bypass(url.host):
|
| 313 |
+
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
| 314 |
+
|
| 315 |
+
proxies_in_env = proxies_from_env()
|
| 316 |
+
try:
|
| 317 |
+
proxy_info = proxies_in_env[url.scheme]
|
| 318 |
+
except KeyError:
|
| 319 |
+
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
| 320 |
+
else:
|
| 321 |
+
return proxy_info.proxy, proxy_info.proxy_auth
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 325 |
+
class MimeType:
|
| 326 |
+
type: str
|
| 327 |
+
subtype: str
|
| 328 |
+
suffix: str
|
| 329 |
+
parameters: "MultiDictProxy[str]"
|
| 330 |
+
|
| 331 |
+
|
| 332 |
+
@functools.lru_cache(maxsize=56)
|
| 333 |
+
def parse_mimetype(mimetype: str) -> MimeType:
|
| 334 |
+
"""Parses a MIME type into its components.
|
| 335 |
+
|
| 336 |
+
mimetype is a MIME type string.
|
| 337 |
+
|
| 338 |
+
Returns a MimeType object.
|
| 339 |
+
|
| 340 |
+
Example:
|
| 341 |
+
|
| 342 |
+
>>> parse_mimetype('text/html; charset=utf-8')
|
| 343 |
+
MimeType(type='text', subtype='html', suffix='',
|
| 344 |
+
parameters={'charset': 'utf-8'})
|
| 345 |
+
|
| 346 |
+
"""
|
| 347 |
+
if not mimetype:
|
| 348 |
+
return MimeType(
|
| 349 |
+
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
| 350 |
+
)
|
| 351 |
+
|
| 352 |
+
parts = mimetype.split(";")
|
| 353 |
+
params = MultiDict() # type: MultiDict[str]
|
| 354 |
+
for item in parts[1:]:
|
| 355 |
+
if not item:
|
| 356 |
+
continue
|
| 357 |
+
key, value = cast(
|
| 358 |
+
Tuple[str, str], item.split("=", 1) if "=" in item else (item, "")
|
| 359 |
+
)
|
| 360 |
+
params.add(key.lower().strip(), value.strip(' "'))
|
| 361 |
+
|
| 362 |
+
fulltype = parts[0].strip().lower()
|
| 363 |
+
if fulltype == "*":
|
| 364 |
+
fulltype = "*/*"
|
| 365 |
+
|
| 366 |
+
mtype, stype = (
|
| 367 |
+
cast(Tuple[str, str], fulltype.split("/", 1))
|
| 368 |
+
if "/" in fulltype
|
| 369 |
+
else (fulltype, "")
|
| 370 |
+
)
|
| 371 |
+
stype, suffix = (
|
| 372 |
+
cast(Tuple[str, str], stype.split("+", 1)) if "+" in stype else (stype, "")
|
| 373 |
+
)
|
| 374 |
+
|
| 375 |
+
return MimeType(
|
| 376 |
+
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
|
| 380 |
+
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
| 381 |
+
name = getattr(obj, "name", None)
|
| 382 |
+
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
| 383 |
+
return Path(name).name
|
| 384 |
+
return default
|
| 385 |
+
|
| 386 |
+
|
| 387 |
+
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
| 388 |
+
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
| 389 |
+
|
| 390 |
+
|
| 391 |
+
def quoted_string(content: str) -> str:
|
| 392 |
+
"""Return 7-bit content as quoted-string.
|
| 393 |
+
|
| 394 |
+
Format content into a quoted-string as defined in RFC5322 for
|
| 395 |
+
Internet Message Format. Notice that this is not the 8-bit HTTP
|
| 396 |
+
format, but the 7-bit email format. Content must be in usascii or
|
| 397 |
+
a ValueError is raised.
|
| 398 |
+
"""
|
| 399 |
+
if not (QCONTENT > set(content)):
|
| 400 |
+
raise ValueError(f"bad content for quoted-string {content!r}")
|
| 401 |
+
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def content_disposition_header(
|
| 405 |
+
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
| 406 |
+
) -> str:
|
| 407 |
+
"""Sets ``Content-Disposition`` header for MIME.
|
| 408 |
+
|
| 409 |
+
This is the MIME payload Content-Disposition header from RFC 2183
|
| 410 |
+
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
| 411 |
+
RFC 6266.
|
| 412 |
+
|
| 413 |
+
disptype is a disposition type: inline, attachment, form-data.
|
| 414 |
+
Should be valid extension token (see RFC 2183)
|
| 415 |
+
|
| 416 |
+
quote_fields performs value quoting to 7-bit MIME headers
|
| 417 |
+
according to RFC 7578. Set to quote_fields to False if recipient
|
| 418 |
+
can take 8-bit file names and field values.
|
| 419 |
+
|
| 420 |
+
_charset specifies the charset to use when quote_fields is True.
|
| 421 |
+
|
| 422 |
+
params is a dict with disposition params.
|
| 423 |
+
"""
|
| 424 |
+
if not disptype or not (TOKEN > set(disptype)):
|
| 425 |
+
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
| 426 |
+
|
| 427 |
+
value = disptype
|
| 428 |
+
if params:
|
| 429 |
+
lparams = []
|
| 430 |
+
for key, val in params.items():
|
| 431 |
+
if not key or not (TOKEN > set(key)):
|
| 432 |
+
raise ValueError(
|
| 433 |
+
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
| 434 |
+
)
|
| 435 |
+
if quote_fields:
|
| 436 |
+
if key.lower() == "filename":
|
| 437 |
+
qval = quote(val, "", encoding=_charset)
|
| 438 |
+
lparams.append((key, '"%s"' % qval))
|
| 439 |
+
else:
|
| 440 |
+
try:
|
| 441 |
+
qval = quoted_string(val)
|
| 442 |
+
except ValueError:
|
| 443 |
+
qval = "".join(
|
| 444 |
+
(_charset, "''", quote(val, "", encoding=_charset))
|
| 445 |
+
)
|
| 446 |
+
lparams.append((key + "*", qval))
|
| 447 |
+
else:
|
| 448 |
+
lparams.append((key, '"%s"' % qval))
|
| 449 |
+
else:
|
| 450 |
+
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
| 451 |
+
lparams.append((key, '"%s"' % qval))
|
| 452 |
+
sparams = "; ".join("=".join(pair) for pair in lparams)
|
| 453 |
+
value = "; ".join((value, sparams))
|
| 454 |
+
return value
|
| 455 |
+
|
| 456 |
+
|
| 457 |
+
class _TSelf(Protocol, Generic[_T]):
|
| 458 |
+
_cache: Dict[str, _T]
|
| 459 |
+
|
| 460 |
+
|
| 461 |
+
class reify(Generic[_T]):
|
| 462 |
+
"""Use as a class method decorator.
|
| 463 |
+
|
| 464 |
+
It operates almost exactly like
|
| 465 |
+
the Python `@property` decorator, but it puts the result of the
|
| 466 |
+
method it decorates into the instance dict after the first call,
|
| 467 |
+
effectively replacing the function it decorates with an instance
|
| 468 |
+
variable. It is, in Python parlance, a data descriptor.
|
| 469 |
+
"""
|
| 470 |
+
|
| 471 |
+
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
| 472 |
+
self.wrapped = wrapped
|
| 473 |
+
self.__doc__ = wrapped.__doc__
|
| 474 |
+
self.name = wrapped.__name__
|
| 475 |
+
|
| 476 |
+
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
| 477 |
+
try:
|
| 478 |
+
try:
|
| 479 |
+
return inst._cache[self.name]
|
| 480 |
+
except KeyError:
|
| 481 |
+
val = self.wrapped(inst)
|
| 482 |
+
inst._cache[self.name] = val
|
| 483 |
+
return val
|
| 484 |
+
except AttributeError:
|
| 485 |
+
if inst is None:
|
| 486 |
+
return self
|
| 487 |
+
raise
|
| 488 |
+
|
| 489 |
+
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
| 490 |
+
raise AttributeError("reified property is read-only")
|
| 491 |
+
|
| 492 |
+
|
| 493 |
+
reify_py = reify
|
| 494 |
+
|
| 495 |
+
try:
|
| 496 |
+
from ._helpers import reify as reify_c
|
| 497 |
+
|
| 498 |
+
if not NO_EXTENSIONS:
|
| 499 |
+
reify = reify_c # type: ignore[misc,assignment]
|
| 500 |
+
except ImportError:
|
| 501 |
+
pass
|
| 502 |
+
|
| 503 |
+
_ipv4_pattern = (
|
| 504 |
+
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
| 505 |
+
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
| 506 |
+
)
|
| 507 |
+
_ipv6_pattern = (
|
| 508 |
+
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
| 509 |
+
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
| 510 |
+
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
| 511 |
+
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
| 512 |
+
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
| 513 |
+
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
| 514 |
+
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
| 515 |
+
r":|:(:[A-F0-9]{1,4}){7})$"
|
| 516 |
+
)
|
| 517 |
+
_ipv4_regex = re.compile(_ipv4_pattern)
|
| 518 |
+
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
| 519 |
+
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
| 520 |
+
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
| 521 |
+
|
| 522 |
+
|
| 523 |
+
def _is_ip_address(
|
| 524 |
+
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
| 525 |
+
) -> bool:
|
| 526 |
+
if host is None:
|
| 527 |
+
return False
|
| 528 |
+
if isinstance(host, str):
|
| 529 |
+
return bool(regex.match(host))
|
| 530 |
+
elif isinstance(host, (bytes, bytearray, memoryview)):
|
| 531 |
+
return bool(regexb.match(host))
|
| 532 |
+
else:
|
| 533 |
+
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
| 537 |
+
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
| 538 |
+
|
| 539 |
+
|
| 540 |
+
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
| 541 |
+
return is_ipv4_address(host) or is_ipv6_address(host)
|
| 542 |
+
|
| 543 |
+
|
| 544 |
+
def next_whole_second() -> datetime.datetime:
|
| 545 |
+
"""Return current time rounded up to the next whole second."""
|
| 546 |
+
return datetime.datetime.now(datetime.timezone.utc).replace(
|
| 547 |
+
microsecond=0
|
| 548 |
+
) + datetime.timedelta(seconds=0)
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
_cached_current_datetime = None # type: Optional[int]
|
| 552 |
+
_cached_formatted_datetime = ""
|
| 553 |
+
|
| 554 |
+
|
| 555 |
+
def rfc822_formatted_time() -> str:
|
| 556 |
+
global _cached_current_datetime
|
| 557 |
+
global _cached_formatted_datetime
|
| 558 |
+
|
| 559 |
+
now = int(time.time())
|
| 560 |
+
if now != _cached_current_datetime:
|
| 561 |
+
# Weekday and month names for HTTP date/time formatting;
|
| 562 |
+
# always English!
|
| 563 |
+
# Tuples are constants stored in codeobject!
|
| 564 |
+
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
| 565 |
+
_monthname = (
|
| 566 |
+
"", # Dummy so we can use 1-based month numbers
|
| 567 |
+
"Jan",
|
| 568 |
+
"Feb",
|
| 569 |
+
"Mar",
|
| 570 |
+
"Apr",
|
| 571 |
+
"May",
|
| 572 |
+
"Jun",
|
| 573 |
+
"Jul",
|
| 574 |
+
"Aug",
|
| 575 |
+
"Sep",
|
| 576 |
+
"Oct",
|
| 577 |
+
"Nov",
|
| 578 |
+
"Dec",
|
| 579 |
+
)
|
| 580 |
+
|
| 581 |
+
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
| 582 |
+
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
| 583 |
+
_weekdayname[wd],
|
| 584 |
+
day,
|
| 585 |
+
_monthname[month],
|
| 586 |
+
year,
|
| 587 |
+
hh,
|
| 588 |
+
mm,
|
| 589 |
+
ss,
|
| 590 |
+
)
|
| 591 |
+
_cached_current_datetime = now
|
| 592 |
+
return _cached_formatted_datetime
|
| 593 |
+
|
| 594 |
+
|
| 595 |
+
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
| 596 |
+
ref, name = info
|
| 597 |
+
ob = ref()
|
| 598 |
+
if ob is not None:
|
| 599 |
+
with suppress(Exception):
|
| 600 |
+
getattr(ob, name)()
|
| 601 |
+
|
| 602 |
+
|
| 603 |
+
def weakref_handle(
|
| 604 |
+
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
|
| 605 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 606 |
+
if timeout is not None and timeout > 0:
|
| 607 |
+
when = loop.time() + timeout
|
| 608 |
+
if timeout >= 5:
|
| 609 |
+
when = ceil(when)
|
| 610 |
+
|
| 611 |
+
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
| 612 |
+
return None
|
| 613 |
+
|
| 614 |
+
|
| 615 |
+
def call_later(
|
| 616 |
+
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
|
| 617 |
+
) -> Optional[asyncio.TimerHandle]:
|
| 618 |
+
if timeout is not None and timeout > 0:
|
| 619 |
+
when = loop.time() + timeout
|
| 620 |
+
if timeout > 5:
|
| 621 |
+
when = ceil(when)
|
| 622 |
+
return loop.call_at(when, cb)
|
| 623 |
+
return None
|
| 624 |
+
|
| 625 |
+
|
| 626 |
+
class TimeoutHandle:
|
| 627 |
+
"""Timeout handle"""
|
| 628 |
+
|
| 629 |
+
def __init__(
|
| 630 |
+
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
|
| 631 |
+
) -> None:
|
| 632 |
+
self._timeout = timeout
|
| 633 |
+
self._loop = loop
|
| 634 |
+
self._callbacks = (
|
| 635 |
+
[]
|
| 636 |
+
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
|
| 637 |
+
|
| 638 |
+
def register(
|
| 639 |
+
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
| 640 |
+
) -> None:
|
| 641 |
+
self._callbacks.append((callback, args, kwargs))
|
| 642 |
+
|
| 643 |
+
def close(self) -> None:
|
| 644 |
+
self._callbacks.clear()
|
| 645 |
+
|
| 646 |
+
def start(self) -> Optional[asyncio.Handle]:
|
| 647 |
+
timeout = self._timeout
|
| 648 |
+
if timeout is not None and timeout > 0:
|
| 649 |
+
when = self._loop.time() + timeout
|
| 650 |
+
if timeout >= 5:
|
| 651 |
+
when = ceil(when)
|
| 652 |
+
return self._loop.call_at(when, self.__call__)
|
| 653 |
+
else:
|
| 654 |
+
return None
|
| 655 |
+
|
| 656 |
+
def timer(self) -> "BaseTimerContext":
|
| 657 |
+
if self._timeout is not None and self._timeout > 0:
|
| 658 |
+
timer = TimerContext(self._loop)
|
| 659 |
+
self.register(timer.timeout)
|
| 660 |
+
return timer
|
| 661 |
+
else:
|
| 662 |
+
return TimerNoop()
|
| 663 |
+
|
| 664 |
+
def __call__(self) -> None:
|
| 665 |
+
for cb, args, kwargs in self._callbacks:
|
| 666 |
+
with suppress(Exception):
|
| 667 |
+
cb(*args, **kwargs)
|
| 668 |
+
|
| 669 |
+
self._callbacks.clear()
|
| 670 |
+
|
| 671 |
+
|
| 672 |
+
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
| 673 |
+
pass
|
| 674 |
+
|
| 675 |
+
|
| 676 |
+
class TimerNoop(BaseTimerContext):
|
| 677 |
+
def __enter__(self) -> BaseTimerContext:
|
| 678 |
+
return self
|
| 679 |
+
|
| 680 |
+
def __exit__(
|
| 681 |
+
self,
|
| 682 |
+
exc_type: Optional[Type[BaseException]],
|
| 683 |
+
exc_val: Optional[BaseException],
|
| 684 |
+
exc_tb: Optional[TracebackType],
|
| 685 |
+
) -> None:
|
| 686 |
+
return
|
| 687 |
+
|
| 688 |
+
|
| 689 |
+
class TimerContext(BaseTimerContext):
|
| 690 |
+
"""Low resolution timeout context manager"""
|
| 691 |
+
|
| 692 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 693 |
+
self._loop = loop
|
| 694 |
+
self._tasks = [] # type: List[asyncio.Task[Any]]
|
| 695 |
+
self._cancelled = False
|
| 696 |
+
|
| 697 |
+
def __enter__(self) -> BaseTimerContext:
|
| 698 |
+
task = current_task(loop=self._loop)
|
| 699 |
+
|
| 700 |
+
if task is None:
|
| 701 |
+
raise RuntimeError(
|
| 702 |
+
"Timeout context manager should be used " "inside a task"
|
| 703 |
+
)
|
| 704 |
+
|
| 705 |
+
if self._cancelled:
|
| 706 |
+
raise asyncio.TimeoutError from None
|
| 707 |
+
|
| 708 |
+
self._tasks.append(task)
|
| 709 |
+
return self
|
| 710 |
+
|
| 711 |
+
def __exit__(
|
| 712 |
+
self,
|
| 713 |
+
exc_type: Optional[Type[BaseException]],
|
| 714 |
+
exc_val: Optional[BaseException],
|
| 715 |
+
exc_tb: Optional[TracebackType],
|
| 716 |
+
) -> Optional[bool]:
|
| 717 |
+
if self._tasks:
|
| 718 |
+
self._tasks.pop()
|
| 719 |
+
|
| 720 |
+
if exc_type is asyncio.CancelledError and self._cancelled:
|
| 721 |
+
raise asyncio.TimeoutError from None
|
| 722 |
+
return None
|
| 723 |
+
|
| 724 |
+
def timeout(self) -> None:
|
| 725 |
+
if not self._cancelled:
|
| 726 |
+
for task in set(self._tasks):
|
| 727 |
+
task.cancel()
|
| 728 |
+
|
| 729 |
+
self._cancelled = True
|
| 730 |
+
|
| 731 |
+
|
| 732 |
+
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
|
| 733 |
+
if delay is None or delay <= 0:
|
| 734 |
+
return async_timeout.timeout(None)
|
| 735 |
+
|
| 736 |
+
loop = get_running_loop()
|
| 737 |
+
now = loop.time()
|
| 738 |
+
when = now + delay
|
| 739 |
+
if delay > 5:
|
| 740 |
+
when = ceil(when)
|
| 741 |
+
return async_timeout.timeout_at(when)
|
| 742 |
+
|
| 743 |
+
|
| 744 |
+
class HeadersMixin:
|
| 745 |
+
|
| 746 |
+
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
| 747 |
+
|
| 748 |
+
_content_type = None # type: Optional[str]
|
| 749 |
+
_content_dict = None # type: Optional[Dict[str, str]]
|
| 750 |
+
_stored_content_type = sentinel
|
| 751 |
+
|
| 752 |
+
def _parse_content_type(self, raw: str) -> None:
|
| 753 |
+
self._stored_content_type = raw
|
| 754 |
+
if raw is None:
|
| 755 |
+
# default value according to RFC 2616
|
| 756 |
+
self._content_type = "application/octet-stream"
|
| 757 |
+
self._content_dict = {}
|
| 758 |
+
else:
|
| 759 |
+
self._content_type, self._content_dict = cgi.parse_header(raw)
|
| 760 |
+
|
| 761 |
+
@property
|
| 762 |
+
def content_type(self) -> str:
|
| 763 |
+
"""The value of content part for Content-Type HTTP header."""
|
| 764 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
| 765 |
+
if self._stored_content_type != raw:
|
| 766 |
+
self._parse_content_type(raw)
|
| 767 |
+
return self._content_type # type: ignore[return-value]
|
| 768 |
+
|
| 769 |
+
@property
|
| 770 |
+
def charset(self) -> Optional[str]:
|
| 771 |
+
"""The value of charset part for Content-Type HTTP header."""
|
| 772 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
|
| 773 |
+
if self._stored_content_type != raw:
|
| 774 |
+
self._parse_content_type(raw)
|
| 775 |
+
return self._content_dict.get("charset") # type: ignore[union-attr]
|
| 776 |
+
|
| 777 |
+
@property
|
| 778 |
+
def content_length(self) -> Optional[int]:
|
| 779 |
+
"""The value of Content-Length HTTP header."""
|
| 780 |
+
content_length = self._headers.get( # type: ignore[attr-defined]
|
| 781 |
+
hdrs.CONTENT_LENGTH
|
| 782 |
+
)
|
| 783 |
+
|
| 784 |
+
if content_length is not None:
|
| 785 |
+
return int(content_length)
|
| 786 |
+
else:
|
| 787 |
+
return None
|
| 788 |
+
|
| 789 |
+
|
| 790 |
+
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
| 791 |
+
if not fut.done():
|
| 792 |
+
fut.set_result(result)
|
| 793 |
+
|
| 794 |
+
|
| 795 |
+
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
|
| 796 |
+
if not fut.done():
|
| 797 |
+
fut.set_exception(exc)
|
| 798 |
+
|
| 799 |
+
|
| 800 |
+
class ChainMapProxy(Mapping[str, Any]):
|
| 801 |
+
__slots__ = ("_maps",)
|
| 802 |
+
|
| 803 |
+
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
|
| 804 |
+
self._maps = tuple(maps)
|
| 805 |
+
|
| 806 |
+
def __init_subclass__(cls) -> None:
|
| 807 |
+
raise TypeError(
|
| 808 |
+
"Inheritance class {} from ChainMapProxy "
|
| 809 |
+
"is forbidden".format(cls.__name__)
|
| 810 |
+
)
|
| 811 |
+
|
| 812 |
+
def __getitem__(self, key: str) -> Any:
|
| 813 |
+
for mapping in self._maps:
|
| 814 |
+
try:
|
| 815 |
+
return mapping[key]
|
| 816 |
+
except KeyError:
|
| 817 |
+
pass
|
| 818 |
+
raise KeyError(key)
|
| 819 |
+
|
| 820 |
+
def get(self, key: str, default: Any = None) -> Any:
|
| 821 |
+
return self[key] if key in self else default
|
| 822 |
+
|
| 823 |
+
def __len__(self) -> int:
|
| 824 |
+
# reuses stored hash values if possible
|
| 825 |
+
return len(set().union(*self._maps)) # type: ignore[arg-type]
|
| 826 |
+
|
| 827 |
+
def __iter__(self) -> Iterator[str]:
|
| 828 |
+
d = {} # type: Dict[str, Any]
|
| 829 |
+
for mapping in reversed(self._maps):
|
| 830 |
+
# reuses stored hash values if possible
|
| 831 |
+
d.update(mapping)
|
| 832 |
+
return iter(d)
|
| 833 |
+
|
| 834 |
+
def __contains__(self, key: object) -> bool:
|
| 835 |
+
return any(key in m for m in self._maps)
|
| 836 |
+
|
| 837 |
+
def __bool__(self) -> bool:
|
| 838 |
+
return any(self._maps)
|
| 839 |
+
|
| 840 |
+
def __repr__(self) -> str:
|
| 841 |
+
content = ", ".join(map(repr, self._maps))
|
| 842 |
+
return f"ChainMapProxy({content})"
|
| 843 |
+
|
| 844 |
+
|
| 845 |
+
# https://tools.ietf.org/html/rfc7232#section-2.3
|
| 846 |
+
_ETAGC = r"[!#-}\x80-\xff]+"
|
| 847 |
+
_ETAGC_RE = re.compile(_ETAGC)
|
| 848 |
+
_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"'
|
| 849 |
+
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
| 850 |
+
LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
| 851 |
+
|
| 852 |
+
ETAG_ANY = "*"
|
| 853 |
+
|
| 854 |
+
|
| 855 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 856 |
+
class ETag:
|
| 857 |
+
value: str
|
| 858 |
+
is_weak: bool = False
|
| 859 |
+
|
| 860 |
+
|
| 861 |
+
def validate_etag_value(value: str) -> None:
|
| 862 |
+
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
| 863 |
+
raise ValueError(
|
| 864 |
+
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
| 865 |
+
)
|
| 866 |
+
|
| 867 |
+
|
| 868 |
+
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
| 869 |
+
"""Process a date string, return a datetime object"""
|
| 870 |
+
if date_str is not None:
|
| 871 |
+
timetuple = parsedate(date_str)
|
| 872 |
+
if timetuple is not None:
|
| 873 |
+
with suppress(ValueError):
|
| 874 |
+
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
| 875 |
+
return None
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import http.server
|
| 2 |
+
import sys
|
| 3 |
+
from typing import Mapping, Tuple
|
| 4 |
+
|
| 5 |
+
from . import __version__
|
| 6 |
+
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
| 7 |
+
from .http_parser import (
|
| 8 |
+
HeadersParser as HeadersParser,
|
| 9 |
+
HttpParser as HttpParser,
|
| 10 |
+
HttpRequestParser as HttpRequestParser,
|
| 11 |
+
HttpResponseParser as HttpResponseParser,
|
| 12 |
+
RawRequestMessage as RawRequestMessage,
|
| 13 |
+
RawResponseMessage as RawResponseMessage,
|
| 14 |
+
)
|
| 15 |
+
from .http_websocket import (
|
| 16 |
+
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
| 17 |
+
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
| 18 |
+
WS_KEY as WS_KEY,
|
| 19 |
+
WebSocketError as WebSocketError,
|
| 20 |
+
WebSocketReader as WebSocketReader,
|
| 21 |
+
WebSocketWriter as WebSocketWriter,
|
| 22 |
+
WSCloseCode as WSCloseCode,
|
| 23 |
+
WSMessage as WSMessage,
|
| 24 |
+
WSMsgType as WSMsgType,
|
| 25 |
+
ws_ext_gen as ws_ext_gen,
|
| 26 |
+
ws_ext_parse as ws_ext_parse,
|
| 27 |
+
)
|
| 28 |
+
from .http_writer import (
|
| 29 |
+
HttpVersion as HttpVersion,
|
| 30 |
+
HttpVersion10 as HttpVersion10,
|
| 31 |
+
HttpVersion11 as HttpVersion11,
|
| 32 |
+
StreamWriter as StreamWriter,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
__all__ = (
|
| 36 |
+
"HttpProcessingError",
|
| 37 |
+
"RESPONSES",
|
| 38 |
+
"SERVER_SOFTWARE",
|
| 39 |
+
# .http_writer
|
| 40 |
+
"StreamWriter",
|
| 41 |
+
"HttpVersion",
|
| 42 |
+
"HttpVersion10",
|
| 43 |
+
"HttpVersion11",
|
| 44 |
+
# .http_parser
|
| 45 |
+
"HeadersParser",
|
| 46 |
+
"HttpParser",
|
| 47 |
+
"HttpRequestParser",
|
| 48 |
+
"HttpResponseParser",
|
| 49 |
+
"RawRequestMessage",
|
| 50 |
+
"RawResponseMessage",
|
| 51 |
+
# .http_websocket
|
| 52 |
+
"WS_CLOSED_MESSAGE",
|
| 53 |
+
"WS_CLOSING_MESSAGE",
|
| 54 |
+
"WS_KEY",
|
| 55 |
+
"WebSocketReader",
|
| 56 |
+
"WebSocketWriter",
|
| 57 |
+
"ws_ext_gen",
|
| 58 |
+
"ws_ext_parse",
|
| 59 |
+
"WSMessage",
|
| 60 |
+
"WebSocketError",
|
| 61 |
+
"WSMsgType",
|
| 62 |
+
"WSCloseCode",
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
SERVER_SOFTWARE = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
| 67 |
+
sys.version_info, __version__
|
| 68 |
+
) # type: str
|
| 69 |
+
|
| 70 |
+
RESPONSES = (
|
| 71 |
+
http.server.BaseHTTPRequestHandler.responses
|
| 72 |
+
) # type: Mapping[int, Tuple[str, str]]
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/http_parser.py
ADDED
|
@@ -0,0 +1,956 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import asyncio
|
| 3 |
+
import collections
|
| 4 |
+
import re
|
| 5 |
+
import string
|
| 6 |
+
import zlib
|
| 7 |
+
from contextlib import suppress
|
| 8 |
+
from enum import IntEnum
|
| 9 |
+
from typing import (
|
| 10 |
+
Any,
|
| 11 |
+
Generic,
|
| 12 |
+
List,
|
| 13 |
+
NamedTuple,
|
| 14 |
+
Optional,
|
| 15 |
+
Pattern,
|
| 16 |
+
Set,
|
| 17 |
+
Tuple,
|
| 18 |
+
Type,
|
| 19 |
+
TypeVar,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
| 25 |
+
from yarl import URL
|
| 26 |
+
|
| 27 |
+
from . import hdrs
|
| 28 |
+
from .base_protocol import BaseProtocol
|
| 29 |
+
from .helpers import NO_EXTENSIONS, BaseTimerContext
|
| 30 |
+
from .http_exceptions import (
|
| 31 |
+
BadHttpMessage,
|
| 32 |
+
BadStatusLine,
|
| 33 |
+
ContentEncodingError,
|
| 34 |
+
ContentLengthError,
|
| 35 |
+
InvalidHeader,
|
| 36 |
+
LineTooLong,
|
| 37 |
+
TransferEncodingError,
|
| 38 |
+
)
|
| 39 |
+
from .http_writer import HttpVersion, HttpVersion10
|
| 40 |
+
from .log import internal_logger
|
| 41 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 42 |
+
from .typedefs import Final, RawHeaders
|
| 43 |
+
|
| 44 |
+
try:
|
| 45 |
+
import brotli
|
| 46 |
+
|
| 47 |
+
HAS_BROTLI = True
|
| 48 |
+
except ImportError: # pragma: no cover
|
| 49 |
+
HAS_BROTLI = False
|
| 50 |
+
|
| 51 |
+
|
| 52 |
+
__all__ = (
|
| 53 |
+
"HeadersParser",
|
| 54 |
+
"HttpParser",
|
| 55 |
+
"HttpRequestParser",
|
| 56 |
+
"HttpResponseParser",
|
| 57 |
+
"RawRequestMessage",
|
| 58 |
+
"RawResponseMessage",
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
| 62 |
+
|
| 63 |
+
# See https://tools.ietf.org/html/rfc7230#section-3.1.1
|
| 64 |
+
# and https://tools.ietf.org/html/rfc7230#appendix-B
|
| 65 |
+
#
|
| 66 |
+
# method = token
|
| 67 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
| 68 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
| 69 |
+
# token = 1*tchar
|
| 70 |
+
METHRE: Final[Pattern[str]] = re.compile(r"[!#$%&'*+\-.^_`|~0-9A-Za-z]+")
|
| 71 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d+).(\d+)")
|
| 72 |
+
HDRRE: Final[Pattern[bytes]] = re.compile(rb"[\x00-\x1F\x7F()<>@,;:\[\]={} \t\\\\\"]")
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
class RawRequestMessage(NamedTuple):
|
| 76 |
+
method: str
|
| 77 |
+
path: str
|
| 78 |
+
version: HttpVersion
|
| 79 |
+
headers: "CIMultiDictProxy[str]"
|
| 80 |
+
raw_headers: RawHeaders
|
| 81 |
+
should_close: bool
|
| 82 |
+
compression: Optional[str]
|
| 83 |
+
upgrade: bool
|
| 84 |
+
chunked: bool
|
| 85 |
+
url: URL
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
RawResponseMessage = collections.namedtuple(
|
| 89 |
+
"RawResponseMessage",
|
| 90 |
+
[
|
| 91 |
+
"version",
|
| 92 |
+
"code",
|
| 93 |
+
"reason",
|
| 94 |
+
"headers",
|
| 95 |
+
"raw_headers",
|
| 96 |
+
"should_close",
|
| 97 |
+
"compression",
|
| 98 |
+
"upgrade",
|
| 99 |
+
"chunked",
|
| 100 |
+
],
|
| 101 |
+
)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
class ParseState(IntEnum):
|
| 108 |
+
|
| 109 |
+
PARSE_NONE = 0
|
| 110 |
+
PARSE_LENGTH = 1
|
| 111 |
+
PARSE_CHUNKED = 2
|
| 112 |
+
PARSE_UNTIL_EOF = 3
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
class ChunkState(IntEnum):
|
| 116 |
+
PARSE_CHUNKED_SIZE = 0
|
| 117 |
+
PARSE_CHUNKED_CHUNK = 1
|
| 118 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
| 119 |
+
PARSE_MAYBE_TRAILERS = 3
|
| 120 |
+
PARSE_TRAILERS = 4
|
| 121 |
+
|
| 122 |
+
|
| 123 |
+
class HeadersParser:
|
| 124 |
+
def __init__(
|
| 125 |
+
self,
|
| 126 |
+
max_line_size: int = 8190,
|
| 127 |
+
max_headers: int = 32768,
|
| 128 |
+
max_field_size: int = 8190,
|
| 129 |
+
) -> None:
|
| 130 |
+
self.max_line_size = max_line_size
|
| 131 |
+
self.max_headers = max_headers
|
| 132 |
+
self.max_field_size = max_field_size
|
| 133 |
+
|
| 134 |
+
def parse_headers(
|
| 135 |
+
self, lines: List[bytes]
|
| 136 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
| 137 |
+
headers = CIMultiDict() # type: CIMultiDict[str]
|
| 138 |
+
raw_headers = []
|
| 139 |
+
|
| 140 |
+
lines_idx = 1
|
| 141 |
+
line = lines[1]
|
| 142 |
+
line_count = len(lines)
|
| 143 |
+
|
| 144 |
+
while line:
|
| 145 |
+
# Parse initial header name : value pair.
|
| 146 |
+
try:
|
| 147 |
+
bname, bvalue = line.split(b":", 1)
|
| 148 |
+
except ValueError:
|
| 149 |
+
raise InvalidHeader(line) from None
|
| 150 |
+
|
| 151 |
+
bname = bname.strip(b" \t")
|
| 152 |
+
bvalue = bvalue.lstrip()
|
| 153 |
+
if HDRRE.search(bname):
|
| 154 |
+
raise InvalidHeader(bname)
|
| 155 |
+
if len(bname) > self.max_field_size:
|
| 156 |
+
raise LineTooLong(
|
| 157 |
+
"request header name {}".format(
|
| 158 |
+
bname.decode("utf8", "xmlcharrefreplace")
|
| 159 |
+
),
|
| 160 |
+
str(self.max_field_size),
|
| 161 |
+
str(len(bname)),
|
| 162 |
+
)
|
| 163 |
+
|
| 164 |
+
header_length = len(bvalue)
|
| 165 |
+
|
| 166 |
+
# next line
|
| 167 |
+
lines_idx += 1
|
| 168 |
+
line = lines[lines_idx]
|
| 169 |
+
|
| 170 |
+
# consume continuation lines
|
| 171 |
+
continuation = line and line[0] in (32, 9) # (' ', '\t')
|
| 172 |
+
|
| 173 |
+
if continuation:
|
| 174 |
+
bvalue_lst = [bvalue]
|
| 175 |
+
while continuation:
|
| 176 |
+
header_length += len(line)
|
| 177 |
+
if header_length > self.max_field_size:
|
| 178 |
+
raise LineTooLong(
|
| 179 |
+
"request header field {}".format(
|
| 180 |
+
bname.decode("utf8", "xmlcharrefreplace")
|
| 181 |
+
),
|
| 182 |
+
str(self.max_field_size),
|
| 183 |
+
str(header_length),
|
| 184 |
+
)
|
| 185 |
+
bvalue_lst.append(line)
|
| 186 |
+
|
| 187 |
+
# next line
|
| 188 |
+
lines_idx += 1
|
| 189 |
+
if lines_idx < line_count:
|
| 190 |
+
line = lines[lines_idx]
|
| 191 |
+
if line:
|
| 192 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
| 193 |
+
else:
|
| 194 |
+
line = b""
|
| 195 |
+
break
|
| 196 |
+
bvalue = b"".join(bvalue_lst)
|
| 197 |
+
else:
|
| 198 |
+
if header_length > self.max_field_size:
|
| 199 |
+
raise LineTooLong(
|
| 200 |
+
"request header field {}".format(
|
| 201 |
+
bname.decode("utf8", "xmlcharrefreplace")
|
| 202 |
+
),
|
| 203 |
+
str(self.max_field_size),
|
| 204 |
+
str(header_length),
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
bvalue = bvalue.strip()
|
| 208 |
+
name = bname.decode("utf-8", "surrogateescape")
|
| 209 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
| 210 |
+
|
| 211 |
+
headers.add(name, value)
|
| 212 |
+
raw_headers.append((bname, bvalue))
|
| 213 |
+
|
| 214 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
| 218 |
+
def __init__(
|
| 219 |
+
self,
|
| 220 |
+
protocol: Optional[BaseProtocol] = None,
|
| 221 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 222 |
+
limit: int = 2 ** 16,
|
| 223 |
+
max_line_size: int = 8190,
|
| 224 |
+
max_headers: int = 32768,
|
| 225 |
+
max_field_size: int = 8190,
|
| 226 |
+
timer: Optional[BaseTimerContext] = None,
|
| 227 |
+
code: Optional[int] = None,
|
| 228 |
+
method: Optional[str] = None,
|
| 229 |
+
readall: bool = False,
|
| 230 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
| 231 |
+
response_with_body: bool = True,
|
| 232 |
+
read_until_eof: bool = False,
|
| 233 |
+
auto_decompress: bool = True,
|
| 234 |
+
) -> None:
|
| 235 |
+
self.protocol = protocol
|
| 236 |
+
self.loop = loop
|
| 237 |
+
self.max_line_size = max_line_size
|
| 238 |
+
self.max_headers = max_headers
|
| 239 |
+
self.max_field_size = max_field_size
|
| 240 |
+
self.timer = timer
|
| 241 |
+
self.code = code
|
| 242 |
+
self.method = method
|
| 243 |
+
self.readall = readall
|
| 244 |
+
self.payload_exception = payload_exception
|
| 245 |
+
self.response_with_body = response_with_body
|
| 246 |
+
self.read_until_eof = read_until_eof
|
| 247 |
+
|
| 248 |
+
self._lines = [] # type: List[bytes]
|
| 249 |
+
self._tail = b""
|
| 250 |
+
self._upgraded = False
|
| 251 |
+
self._payload = None
|
| 252 |
+
self._payload_parser = None # type: Optional[HttpPayloadParser]
|
| 253 |
+
self._auto_decompress = auto_decompress
|
| 254 |
+
self._limit = limit
|
| 255 |
+
self._headers_parser = HeadersParser(max_line_size, max_headers, max_field_size)
|
| 256 |
+
|
| 257 |
+
@abc.abstractmethod
|
| 258 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT:
|
| 259 |
+
pass
|
| 260 |
+
|
| 261 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
| 262 |
+
if self._payload_parser is not None:
|
| 263 |
+
self._payload_parser.feed_eof()
|
| 264 |
+
self._payload_parser = None
|
| 265 |
+
else:
|
| 266 |
+
# try to extract partial message
|
| 267 |
+
if self._tail:
|
| 268 |
+
self._lines.append(self._tail)
|
| 269 |
+
|
| 270 |
+
if self._lines:
|
| 271 |
+
if self._lines[-1] != "\r\n":
|
| 272 |
+
self._lines.append(b"")
|
| 273 |
+
with suppress(Exception):
|
| 274 |
+
return self.parse_message(self._lines)
|
| 275 |
+
return None
|
| 276 |
+
|
| 277 |
+
def feed_data(
|
| 278 |
+
self,
|
| 279 |
+
data: bytes,
|
| 280 |
+
SEP: bytes = b"\r\n",
|
| 281 |
+
EMPTY: bytes = b"",
|
| 282 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
| 283 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
| 284 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
| 285 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
| 286 |
+
|
| 287 |
+
messages = []
|
| 288 |
+
|
| 289 |
+
if self._tail:
|
| 290 |
+
data, self._tail = self._tail + data, b""
|
| 291 |
+
|
| 292 |
+
data_len = len(data)
|
| 293 |
+
start_pos = 0
|
| 294 |
+
loop = self.loop
|
| 295 |
+
|
| 296 |
+
while start_pos < data_len:
|
| 297 |
+
|
| 298 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
| 299 |
+
# and split by lines
|
| 300 |
+
if self._payload_parser is None and not self._upgraded:
|
| 301 |
+
pos = data.find(SEP, start_pos)
|
| 302 |
+
# consume \r\n
|
| 303 |
+
if pos == start_pos and not self._lines:
|
| 304 |
+
start_pos = pos + 2
|
| 305 |
+
continue
|
| 306 |
+
|
| 307 |
+
if pos >= start_pos:
|
| 308 |
+
# line found
|
| 309 |
+
self._lines.append(data[start_pos:pos])
|
| 310 |
+
start_pos = pos + 2
|
| 311 |
+
|
| 312 |
+
# \r\n\r\n found
|
| 313 |
+
if self._lines[-1] == EMPTY:
|
| 314 |
+
try:
|
| 315 |
+
msg: _MsgT = self.parse_message(self._lines)
|
| 316 |
+
finally:
|
| 317 |
+
self._lines.clear()
|
| 318 |
+
|
| 319 |
+
def get_content_length() -> Optional[int]:
|
| 320 |
+
# payload length
|
| 321 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
| 322 |
+
if length_hdr is None:
|
| 323 |
+
return None
|
| 324 |
+
|
| 325 |
+
try:
|
| 326 |
+
length = int(length_hdr)
|
| 327 |
+
except ValueError:
|
| 328 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
| 329 |
+
|
| 330 |
+
if length < 0:
|
| 331 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
| 332 |
+
|
| 333 |
+
return length
|
| 334 |
+
|
| 335 |
+
length = get_content_length()
|
| 336 |
+
# do not support old websocket spec
|
| 337 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
| 338 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
| 339 |
+
|
| 340 |
+
self._upgraded = msg.upgrade
|
| 341 |
+
|
| 342 |
+
method = getattr(msg, "method", self.method)
|
| 343 |
+
|
| 344 |
+
assert self.protocol is not None
|
| 345 |
+
# calculate payload
|
| 346 |
+
if (
|
| 347 |
+
(length is not None and length > 0)
|
| 348 |
+
or msg.chunked
|
| 349 |
+
and not msg.upgrade
|
| 350 |
+
):
|
| 351 |
+
payload = StreamReader(
|
| 352 |
+
self.protocol,
|
| 353 |
+
timer=self.timer,
|
| 354 |
+
loop=loop,
|
| 355 |
+
limit=self._limit,
|
| 356 |
+
)
|
| 357 |
+
payload_parser = HttpPayloadParser(
|
| 358 |
+
payload,
|
| 359 |
+
length=length,
|
| 360 |
+
chunked=msg.chunked,
|
| 361 |
+
method=method,
|
| 362 |
+
compression=msg.compression,
|
| 363 |
+
code=self.code,
|
| 364 |
+
readall=self.readall,
|
| 365 |
+
response_with_body=self.response_with_body,
|
| 366 |
+
auto_decompress=self._auto_decompress,
|
| 367 |
+
)
|
| 368 |
+
if not payload_parser.done:
|
| 369 |
+
self._payload_parser = payload_parser
|
| 370 |
+
elif method == METH_CONNECT:
|
| 371 |
+
assert isinstance(msg, RawRequestMessage)
|
| 372 |
+
payload = StreamReader(
|
| 373 |
+
self.protocol,
|
| 374 |
+
timer=self.timer,
|
| 375 |
+
loop=loop,
|
| 376 |
+
limit=self._limit,
|
| 377 |
+
)
|
| 378 |
+
self._upgraded = True
|
| 379 |
+
self._payload_parser = HttpPayloadParser(
|
| 380 |
+
payload,
|
| 381 |
+
method=msg.method,
|
| 382 |
+
compression=msg.compression,
|
| 383 |
+
readall=True,
|
| 384 |
+
auto_decompress=self._auto_decompress,
|
| 385 |
+
)
|
| 386 |
+
else:
|
| 387 |
+
if (
|
| 388 |
+
getattr(msg, "code", 100) >= 199
|
| 389 |
+
and length is None
|
| 390 |
+
and self.read_until_eof
|
| 391 |
+
):
|
| 392 |
+
payload = StreamReader(
|
| 393 |
+
self.protocol,
|
| 394 |
+
timer=self.timer,
|
| 395 |
+
loop=loop,
|
| 396 |
+
limit=self._limit,
|
| 397 |
+
)
|
| 398 |
+
payload_parser = HttpPayloadParser(
|
| 399 |
+
payload,
|
| 400 |
+
length=length,
|
| 401 |
+
chunked=msg.chunked,
|
| 402 |
+
method=method,
|
| 403 |
+
compression=msg.compression,
|
| 404 |
+
code=self.code,
|
| 405 |
+
readall=True,
|
| 406 |
+
response_with_body=self.response_with_body,
|
| 407 |
+
auto_decompress=self._auto_decompress,
|
| 408 |
+
)
|
| 409 |
+
if not payload_parser.done:
|
| 410 |
+
self._payload_parser = payload_parser
|
| 411 |
+
else:
|
| 412 |
+
payload = EMPTY_PAYLOAD
|
| 413 |
+
|
| 414 |
+
messages.append((msg, payload))
|
| 415 |
+
else:
|
| 416 |
+
self._tail = data[start_pos:]
|
| 417 |
+
data = EMPTY
|
| 418 |
+
break
|
| 419 |
+
|
| 420 |
+
# no parser, just store
|
| 421 |
+
elif self._payload_parser is None and self._upgraded:
|
| 422 |
+
assert not self._lines
|
| 423 |
+
break
|
| 424 |
+
|
| 425 |
+
# feed payload
|
| 426 |
+
elif data and start_pos < data_len:
|
| 427 |
+
assert not self._lines
|
| 428 |
+
assert self._payload_parser is not None
|
| 429 |
+
try:
|
| 430 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:])
|
| 431 |
+
except BaseException as exc:
|
| 432 |
+
if self.payload_exception is not None:
|
| 433 |
+
self._payload_parser.payload.set_exception(
|
| 434 |
+
self.payload_exception(str(exc))
|
| 435 |
+
)
|
| 436 |
+
else:
|
| 437 |
+
self._payload_parser.payload.set_exception(exc)
|
| 438 |
+
|
| 439 |
+
eof = True
|
| 440 |
+
data = b""
|
| 441 |
+
|
| 442 |
+
if eof:
|
| 443 |
+
start_pos = 0
|
| 444 |
+
data_len = len(data)
|
| 445 |
+
self._payload_parser = None
|
| 446 |
+
continue
|
| 447 |
+
else:
|
| 448 |
+
break
|
| 449 |
+
|
| 450 |
+
if data and start_pos < data_len:
|
| 451 |
+
data = data[start_pos:]
|
| 452 |
+
else:
|
| 453 |
+
data = EMPTY
|
| 454 |
+
|
| 455 |
+
return messages, self._upgraded, data
|
| 456 |
+
|
| 457 |
+
def parse_headers(
|
| 458 |
+
self, lines: List[bytes]
|
| 459 |
+
) -> Tuple[
|
| 460 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
| 461 |
+
]:
|
| 462 |
+
"""Parses RFC 5322 headers from a stream.
|
| 463 |
+
|
| 464 |
+
Line continuations are supported. Returns list of header name
|
| 465 |
+
and value pairs. Header name is in upper case.
|
| 466 |
+
"""
|
| 467 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
| 468 |
+
close_conn = None
|
| 469 |
+
encoding = None
|
| 470 |
+
upgrade = False
|
| 471 |
+
chunked = False
|
| 472 |
+
|
| 473 |
+
# keep-alive
|
| 474 |
+
conn = headers.get(hdrs.CONNECTION)
|
| 475 |
+
if conn:
|
| 476 |
+
v = conn.lower()
|
| 477 |
+
if v == "close":
|
| 478 |
+
close_conn = True
|
| 479 |
+
elif v == "keep-alive":
|
| 480 |
+
close_conn = False
|
| 481 |
+
elif v == "upgrade":
|
| 482 |
+
upgrade = True
|
| 483 |
+
|
| 484 |
+
# encoding
|
| 485 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
| 486 |
+
if enc:
|
| 487 |
+
enc = enc.lower()
|
| 488 |
+
if enc in ("gzip", "deflate", "br"):
|
| 489 |
+
encoding = enc
|
| 490 |
+
|
| 491 |
+
# chunking
|
| 492 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
| 493 |
+
if te is not None:
|
| 494 |
+
if "chunked" == te.lower():
|
| 495 |
+
chunked = True
|
| 496 |
+
else:
|
| 497 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
| 498 |
+
|
| 499 |
+
if hdrs.CONTENT_LENGTH in headers:
|
| 500 |
+
raise BadHttpMessage(
|
| 501 |
+
"Content-Length can't be present with Transfer-Encoding",
|
| 502 |
+
)
|
| 503 |
+
|
| 504 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
| 505 |
+
|
| 506 |
+
def set_upgraded(self, val: bool) -> None:
|
| 507 |
+
"""Set connection upgraded (to websocket) mode.
|
| 508 |
+
|
| 509 |
+
:param bool val: new state.
|
| 510 |
+
"""
|
| 511 |
+
self._upgraded = val
|
| 512 |
+
|
| 513 |
+
|
| 514 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
| 515 |
+
"""Read request status line.
|
| 516 |
+
|
| 517 |
+
Exception .http_exceptions.BadStatusLine
|
| 518 |
+
could be raised in case of any errors in status line.
|
| 519 |
+
Returns RawRequestMessage.
|
| 520 |
+
"""
|
| 521 |
+
|
| 522 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
| 523 |
+
# request line
|
| 524 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 525 |
+
try:
|
| 526 |
+
method, path, version = line.split(None, 2)
|
| 527 |
+
except ValueError:
|
| 528 |
+
raise BadStatusLine(line) from None
|
| 529 |
+
|
| 530 |
+
if len(path) > self.max_line_size:
|
| 531 |
+
raise LineTooLong(
|
| 532 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
| 533 |
+
)
|
| 534 |
+
|
| 535 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
| 536 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
| 537 |
+
|
| 538 |
+
# method
|
| 539 |
+
if not METHRE.match(method):
|
| 540 |
+
raise BadStatusLine(method)
|
| 541 |
+
|
| 542 |
+
# version
|
| 543 |
+
try:
|
| 544 |
+
if version.startswith("HTTP/"):
|
| 545 |
+
n1, n2 = version[5:].split(".", 1)
|
| 546 |
+
version_o = HttpVersion(int(n1), int(n2))
|
| 547 |
+
else:
|
| 548 |
+
raise BadStatusLine(version)
|
| 549 |
+
except Exception:
|
| 550 |
+
raise BadStatusLine(version)
|
| 551 |
+
|
| 552 |
+
# read headers
|
| 553 |
+
(
|
| 554 |
+
headers,
|
| 555 |
+
raw_headers,
|
| 556 |
+
close,
|
| 557 |
+
compression,
|
| 558 |
+
upgrade,
|
| 559 |
+
chunked,
|
| 560 |
+
) = self.parse_headers(lines)
|
| 561 |
+
|
| 562 |
+
if close is None: # then the headers weren't set in the request
|
| 563 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
| 564 |
+
close = True
|
| 565 |
+
else: # HTTP 1.1 must ask to close.
|
| 566 |
+
close = False
|
| 567 |
+
|
| 568 |
+
return RawRequestMessage(
|
| 569 |
+
method,
|
| 570 |
+
path,
|
| 571 |
+
version_o,
|
| 572 |
+
headers,
|
| 573 |
+
raw_headers,
|
| 574 |
+
close,
|
| 575 |
+
compression,
|
| 576 |
+
upgrade,
|
| 577 |
+
chunked,
|
| 578 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
| 579 |
+
# NOTE: parser does, otherwise it results into the same
|
| 580 |
+
# NOTE: HTTP Request-Line input producing different
|
| 581 |
+
# NOTE: `yarl.URL()` objects
|
| 582 |
+
URL.build(
|
| 583 |
+
path=path_part,
|
| 584 |
+
query_string=qs_part,
|
| 585 |
+
fragment=url_fragment,
|
| 586 |
+
encoded=True,
|
| 587 |
+
),
|
| 588 |
+
)
|
| 589 |
+
|
| 590 |
+
|
| 591 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
| 592 |
+
"""Read response status line and headers.
|
| 593 |
+
|
| 594 |
+
BadStatusLine could be raised in case of any errors in status line.
|
| 595 |
+
Returns RawResponseMessage.
|
| 596 |
+
"""
|
| 597 |
+
|
| 598 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
| 599 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
| 600 |
+
try:
|
| 601 |
+
version, status = line.split(None, 1)
|
| 602 |
+
except ValueError:
|
| 603 |
+
raise BadStatusLine(line) from None
|
| 604 |
+
|
| 605 |
+
try:
|
| 606 |
+
status, reason = status.split(None, 1)
|
| 607 |
+
except ValueError:
|
| 608 |
+
reason = ""
|
| 609 |
+
|
| 610 |
+
if len(reason) > self.max_line_size:
|
| 611 |
+
raise LineTooLong(
|
| 612 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
| 613 |
+
)
|
| 614 |
+
|
| 615 |
+
# version
|
| 616 |
+
match = VERSRE.match(version)
|
| 617 |
+
if match is None:
|
| 618 |
+
raise BadStatusLine(line)
|
| 619 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
| 620 |
+
|
| 621 |
+
# The status code is a three-digit number
|
| 622 |
+
try:
|
| 623 |
+
status_i = int(status)
|
| 624 |
+
except ValueError:
|
| 625 |
+
raise BadStatusLine(line) from None
|
| 626 |
+
|
| 627 |
+
if status_i > 999:
|
| 628 |
+
raise BadStatusLine(line)
|
| 629 |
+
|
| 630 |
+
# read headers
|
| 631 |
+
(
|
| 632 |
+
headers,
|
| 633 |
+
raw_headers,
|
| 634 |
+
close,
|
| 635 |
+
compression,
|
| 636 |
+
upgrade,
|
| 637 |
+
chunked,
|
| 638 |
+
) = self.parse_headers(lines)
|
| 639 |
+
|
| 640 |
+
if close is None:
|
| 641 |
+
close = version_o <= HttpVersion10
|
| 642 |
+
|
| 643 |
+
return RawResponseMessage(
|
| 644 |
+
version_o,
|
| 645 |
+
status_i,
|
| 646 |
+
reason.strip(),
|
| 647 |
+
headers,
|
| 648 |
+
raw_headers,
|
| 649 |
+
close,
|
| 650 |
+
compression,
|
| 651 |
+
upgrade,
|
| 652 |
+
chunked,
|
| 653 |
+
)
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
class HttpPayloadParser:
|
| 657 |
+
def __init__(
|
| 658 |
+
self,
|
| 659 |
+
payload: StreamReader,
|
| 660 |
+
length: Optional[int] = None,
|
| 661 |
+
chunked: bool = False,
|
| 662 |
+
compression: Optional[str] = None,
|
| 663 |
+
code: Optional[int] = None,
|
| 664 |
+
method: Optional[str] = None,
|
| 665 |
+
readall: bool = False,
|
| 666 |
+
response_with_body: bool = True,
|
| 667 |
+
auto_decompress: bool = True,
|
| 668 |
+
) -> None:
|
| 669 |
+
self._length = 0
|
| 670 |
+
self._type = ParseState.PARSE_NONE
|
| 671 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 672 |
+
self._chunk_size = 0
|
| 673 |
+
self._chunk_tail = b""
|
| 674 |
+
self._auto_decompress = auto_decompress
|
| 675 |
+
self.done = False
|
| 676 |
+
|
| 677 |
+
# payload decompression wrapper
|
| 678 |
+
if response_with_body and compression and self._auto_decompress:
|
| 679 |
+
real_payload = DeflateBuffer(
|
| 680 |
+
payload, compression
|
| 681 |
+
) # type: Union[StreamReader, DeflateBuffer]
|
| 682 |
+
else:
|
| 683 |
+
real_payload = payload
|
| 684 |
+
|
| 685 |
+
# payload parser
|
| 686 |
+
if not response_with_body:
|
| 687 |
+
# don't parse payload if it's not expected to be received
|
| 688 |
+
self._type = ParseState.PARSE_NONE
|
| 689 |
+
real_payload.feed_eof()
|
| 690 |
+
self.done = True
|
| 691 |
+
|
| 692 |
+
elif chunked:
|
| 693 |
+
self._type = ParseState.PARSE_CHUNKED
|
| 694 |
+
elif length is not None:
|
| 695 |
+
self._type = ParseState.PARSE_LENGTH
|
| 696 |
+
self._length = length
|
| 697 |
+
if self._length == 0:
|
| 698 |
+
real_payload.feed_eof()
|
| 699 |
+
self.done = True
|
| 700 |
+
else:
|
| 701 |
+
if readall and code != 204:
|
| 702 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
| 703 |
+
elif method in ("PUT", "POST"):
|
| 704 |
+
internal_logger.warning( # pragma: no cover
|
| 705 |
+
"Content-Length or Transfer-Encoding header is required"
|
| 706 |
+
)
|
| 707 |
+
self._type = ParseState.PARSE_NONE
|
| 708 |
+
real_payload.feed_eof()
|
| 709 |
+
self.done = True
|
| 710 |
+
|
| 711 |
+
self.payload = real_payload
|
| 712 |
+
|
| 713 |
+
def feed_eof(self) -> None:
|
| 714 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
| 715 |
+
self.payload.feed_eof()
|
| 716 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
| 717 |
+
raise ContentLengthError(
|
| 718 |
+
"Not enough data for satisfy content length header."
|
| 719 |
+
)
|
| 720 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 721 |
+
raise TransferEncodingError(
|
| 722 |
+
"Not enough data for satisfy transfer length header."
|
| 723 |
+
)
|
| 724 |
+
|
| 725 |
+
def feed_data(
|
| 726 |
+
self, chunk: bytes, SEP: bytes = b"\r\n", CHUNK_EXT: bytes = b";"
|
| 727 |
+
) -> Tuple[bool, bytes]:
|
| 728 |
+
# Read specified amount of bytes
|
| 729 |
+
if self._type == ParseState.PARSE_LENGTH:
|
| 730 |
+
required = self._length
|
| 731 |
+
chunk_len = len(chunk)
|
| 732 |
+
|
| 733 |
+
if required >= chunk_len:
|
| 734 |
+
self._length = required - chunk_len
|
| 735 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 736 |
+
if self._length == 0:
|
| 737 |
+
self.payload.feed_eof()
|
| 738 |
+
return True, b""
|
| 739 |
+
else:
|
| 740 |
+
self._length = 0
|
| 741 |
+
self.payload.feed_data(chunk[:required], required)
|
| 742 |
+
self.payload.feed_eof()
|
| 743 |
+
return True, chunk[required:]
|
| 744 |
+
|
| 745 |
+
# Chunked transfer encoding parser
|
| 746 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
| 747 |
+
if self._chunk_tail:
|
| 748 |
+
chunk = self._chunk_tail + chunk
|
| 749 |
+
self._chunk_tail = b""
|
| 750 |
+
|
| 751 |
+
while chunk:
|
| 752 |
+
|
| 753 |
+
# read next chunk size
|
| 754 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
| 755 |
+
pos = chunk.find(SEP)
|
| 756 |
+
if pos >= 0:
|
| 757 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
| 758 |
+
if i >= 0:
|
| 759 |
+
size_b = chunk[:i] # strip chunk-extensions
|
| 760 |
+
else:
|
| 761 |
+
size_b = chunk[:pos]
|
| 762 |
+
|
| 763 |
+
try:
|
| 764 |
+
size = int(bytes(size_b), 16)
|
| 765 |
+
except ValueError:
|
| 766 |
+
exc = TransferEncodingError(
|
| 767 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
| 768 |
+
)
|
| 769 |
+
self.payload.set_exception(exc)
|
| 770 |
+
raise exc from None
|
| 771 |
+
|
| 772 |
+
chunk = chunk[pos + 2 :]
|
| 773 |
+
if size == 0: # eof marker
|
| 774 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 775 |
+
else:
|
| 776 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
| 777 |
+
self._chunk_size = size
|
| 778 |
+
self.payload.begin_http_chunk_receiving()
|
| 779 |
+
else:
|
| 780 |
+
self._chunk_tail = chunk
|
| 781 |
+
return False, b""
|
| 782 |
+
|
| 783 |
+
# read chunk and feed buffer
|
| 784 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
| 785 |
+
required = self._chunk_size
|
| 786 |
+
chunk_len = len(chunk)
|
| 787 |
+
|
| 788 |
+
if required > chunk_len:
|
| 789 |
+
self._chunk_size = required - chunk_len
|
| 790 |
+
self.payload.feed_data(chunk, chunk_len)
|
| 791 |
+
return False, b""
|
| 792 |
+
else:
|
| 793 |
+
self._chunk_size = 0
|
| 794 |
+
self.payload.feed_data(chunk[:required], required)
|
| 795 |
+
chunk = chunk[required:]
|
| 796 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
| 797 |
+
self.payload.end_http_chunk_receiving()
|
| 798 |
+
|
| 799 |
+
# toss the CRLF at the end of the chunk
|
| 800 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
| 801 |
+
if chunk[:2] == SEP:
|
| 802 |
+
chunk = chunk[2:]
|
| 803 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
| 804 |
+
else:
|
| 805 |
+
self._chunk_tail = chunk
|
| 806 |
+
return False, b""
|
| 807 |
+
|
| 808 |
+
# if stream does not contain trailer, after 0\r\n
|
| 809 |
+
# we should get another \r\n otherwise
|
| 810 |
+
# trailers needs to be skiped until \r\n\r\n
|
| 811 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
| 812 |
+
head = chunk[:2]
|
| 813 |
+
if head == SEP:
|
| 814 |
+
# end of stream
|
| 815 |
+
self.payload.feed_eof()
|
| 816 |
+
return True, chunk[2:]
|
| 817 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
| 818 |
+
# expected that CRLF or LF will be shown at the very first
|
| 819 |
+
# byte next time, otherwise trailers should come. The last
|
| 820 |
+
# CRLF which marks the end of response might not be
|
| 821 |
+
# contained in the same TCP segment which delivered the
|
| 822 |
+
# size indicator.
|
| 823 |
+
if not head:
|
| 824 |
+
return False, b""
|
| 825 |
+
if head == SEP[:1]:
|
| 826 |
+
self._chunk_tail = head
|
| 827 |
+
return False, b""
|
| 828 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
| 829 |
+
|
| 830 |
+
# read and discard trailer up to the CRLF terminator
|
| 831 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
| 832 |
+
pos = chunk.find(SEP)
|
| 833 |
+
if pos >= 0:
|
| 834 |
+
chunk = chunk[pos + 2 :]
|
| 835 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
| 836 |
+
else:
|
| 837 |
+
self._chunk_tail = chunk
|
| 838 |
+
return False, b""
|
| 839 |
+
|
| 840 |
+
# Read all bytes until eof
|
| 841 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
| 842 |
+
self.payload.feed_data(chunk, len(chunk))
|
| 843 |
+
|
| 844 |
+
return False, b""
|
| 845 |
+
|
| 846 |
+
|
| 847 |
+
class DeflateBuffer:
|
| 848 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
| 849 |
+
|
| 850 |
+
decompressor: Any
|
| 851 |
+
|
| 852 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
| 853 |
+
self.out = out
|
| 854 |
+
self.size = 0
|
| 855 |
+
self.encoding = encoding
|
| 856 |
+
self._started_decoding = False
|
| 857 |
+
|
| 858 |
+
if encoding == "br":
|
| 859 |
+
if not HAS_BROTLI: # pragma: no cover
|
| 860 |
+
raise ContentEncodingError(
|
| 861 |
+
"Can not decode content-encoding: brotli (br). "
|
| 862 |
+
"Please install `Brotli`"
|
| 863 |
+
)
|
| 864 |
+
|
| 865 |
+
class BrotliDecoder:
|
| 866 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
| 867 |
+
# since they share an import name. The top branches
|
| 868 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
| 869 |
+
def __init__(self) -> None:
|
| 870 |
+
self._obj = brotli.Decompressor()
|
| 871 |
+
|
| 872 |
+
def decompress(self, data: bytes) -> bytes:
|
| 873 |
+
if hasattr(self._obj, "decompress"):
|
| 874 |
+
return cast(bytes, self._obj.decompress(data))
|
| 875 |
+
return cast(bytes, self._obj.process(data))
|
| 876 |
+
|
| 877 |
+
def flush(self) -> bytes:
|
| 878 |
+
if hasattr(self._obj, "flush"):
|
| 879 |
+
return cast(bytes, self._obj.flush())
|
| 880 |
+
return b""
|
| 881 |
+
|
| 882 |
+
self.decompressor = BrotliDecoder()
|
| 883 |
+
else:
|
| 884 |
+
zlib_mode = 16 + zlib.MAX_WBITS if encoding == "gzip" else zlib.MAX_WBITS
|
| 885 |
+
self.decompressor = zlib.decompressobj(wbits=zlib_mode)
|
| 886 |
+
|
| 887 |
+
def set_exception(self, exc: BaseException) -> None:
|
| 888 |
+
self.out.set_exception(exc)
|
| 889 |
+
|
| 890 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
| 891 |
+
if not size:
|
| 892 |
+
return
|
| 893 |
+
|
| 894 |
+
self.size += size
|
| 895 |
+
|
| 896 |
+
# RFC1950
|
| 897 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
| 898 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
| 899 |
+
if (
|
| 900 |
+
not self._started_decoding
|
| 901 |
+
and self.encoding == "deflate"
|
| 902 |
+
and chunk[0] & 0xF != 8
|
| 903 |
+
):
|
| 904 |
+
# Change the decoder to decompress incorrectly compressed data
|
| 905 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
| 906 |
+
self.decompressor = zlib.decompressobj(wbits=-zlib.MAX_WBITS)
|
| 907 |
+
|
| 908 |
+
try:
|
| 909 |
+
chunk = self.decompressor.decompress(chunk)
|
| 910 |
+
except Exception:
|
| 911 |
+
raise ContentEncodingError(
|
| 912 |
+
"Can not decode content-encoding: %s" % self.encoding
|
| 913 |
+
)
|
| 914 |
+
|
| 915 |
+
self._started_decoding = True
|
| 916 |
+
|
| 917 |
+
if chunk:
|
| 918 |
+
self.out.feed_data(chunk, len(chunk))
|
| 919 |
+
|
| 920 |
+
def feed_eof(self) -> None:
|
| 921 |
+
chunk = self.decompressor.flush()
|
| 922 |
+
|
| 923 |
+
if chunk or self.size > 0:
|
| 924 |
+
self.out.feed_data(chunk, len(chunk))
|
| 925 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
| 926 |
+
raise ContentEncodingError("deflate")
|
| 927 |
+
|
| 928 |
+
self.out.feed_eof()
|
| 929 |
+
|
| 930 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 931 |
+
self.out.begin_http_chunk_receiving()
|
| 932 |
+
|
| 933 |
+
def end_http_chunk_receiving(self) -> None:
|
| 934 |
+
self.out.end_http_chunk_receiving()
|
| 935 |
+
|
| 936 |
+
|
| 937 |
+
HttpRequestParserPy = HttpRequestParser
|
| 938 |
+
HttpResponseParserPy = HttpResponseParser
|
| 939 |
+
RawRequestMessagePy = RawRequestMessage
|
| 940 |
+
RawResponseMessagePy = RawResponseMessage
|
| 941 |
+
|
| 942 |
+
try:
|
| 943 |
+
if not NO_EXTENSIONS:
|
| 944 |
+
from ._http_parser import ( # type: ignore[import,no-redef]
|
| 945 |
+
HttpRequestParser,
|
| 946 |
+
HttpResponseParser,
|
| 947 |
+
RawRequestMessage,
|
| 948 |
+
RawResponseMessage,
|
| 949 |
+
)
|
| 950 |
+
|
| 951 |
+
HttpRequestParserC = HttpRequestParser
|
| 952 |
+
HttpResponseParserC = HttpResponseParser
|
| 953 |
+
RawRequestMessageC = RawRequestMessage
|
| 954 |
+
RawResponseMessageC = RawResponseMessage
|
| 955 |
+
except ImportError: # pragma: no cover
|
| 956 |
+
pass
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/locks.py
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections
|
| 3 |
+
from typing import Any, Deque, Optional
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class EventResultOrError:
|
| 7 |
+
"""Event asyncio lock helper class.
|
| 8 |
+
|
| 9 |
+
Wraps the Event asyncio lock allowing either to awake the
|
| 10 |
+
locked Tasks without any error or raising an exception.
|
| 11 |
+
|
| 12 |
+
thanks to @vorpalsmith for the simple design.
|
| 13 |
+
"""
|
| 14 |
+
|
| 15 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 16 |
+
self._loop = loop
|
| 17 |
+
self._exc = None # type: Optional[BaseException]
|
| 18 |
+
self._event = asyncio.Event()
|
| 19 |
+
self._waiters = collections.deque() # type: Deque[asyncio.Future[Any]]
|
| 20 |
+
|
| 21 |
+
def set(self, exc: Optional[BaseException] = None) -> None:
|
| 22 |
+
self._exc = exc
|
| 23 |
+
self._event.set()
|
| 24 |
+
|
| 25 |
+
async def wait(self) -> Any:
|
| 26 |
+
waiter = self._loop.create_task(self._event.wait())
|
| 27 |
+
self._waiters.append(waiter)
|
| 28 |
+
try:
|
| 29 |
+
val = await waiter
|
| 30 |
+
finally:
|
| 31 |
+
self._waiters.remove(waiter)
|
| 32 |
+
|
| 33 |
+
if self._exc is not None:
|
| 34 |
+
raise self._exc
|
| 35 |
+
|
| 36 |
+
return val
|
| 37 |
+
|
| 38 |
+
def cancel(self) -> None:
|
| 39 |
+
"""Cancel all waiters"""
|
| 40 |
+
for waiter in self._waiters:
|
| 41 |
+
waiter.cancel()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/log.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import logging
|
| 2 |
+
|
| 3 |
+
access_logger = logging.getLogger("aiohttp.access")
|
| 4 |
+
client_logger = logging.getLogger("aiohttp.client")
|
| 5 |
+
internal_logger = logging.getLogger("aiohttp.internal")
|
| 6 |
+
server_logger = logging.getLogger("aiohttp.server")
|
| 7 |
+
web_logger = logging.getLogger("aiohttp.web")
|
| 8 |
+
ws_logger = logging.getLogger("aiohttp.websocket")
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/payload.py
ADDED
|
@@ -0,0 +1,465 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import enum
|
| 3 |
+
import io
|
| 4 |
+
import json
|
| 5 |
+
import mimetypes
|
| 6 |
+
import os
|
| 7 |
+
import warnings
|
| 8 |
+
from abc import ABC, abstractmethod
|
| 9 |
+
from itertools import chain
|
| 10 |
+
from typing import (
|
| 11 |
+
IO,
|
| 12 |
+
TYPE_CHECKING,
|
| 13 |
+
Any,
|
| 14 |
+
ByteString,
|
| 15 |
+
Dict,
|
| 16 |
+
Iterable,
|
| 17 |
+
Optional,
|
| 18 |
+
TextIO,
|
| 19 |
+
Tuple,
|
| 20 |
+
Type,
|
| 21 |
+
Union,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
from multidict import CIMultiDict
|
| 25 |
+
|
| 26 |
+
from . import hdrs
|
| 27 |
+
from .abc import AbstractStreamWriter
|
| 28 |
+
from .helpers import (
|
| 29 |
+
PY_36,
|
| 30 |
+
content_disposition_header,
|
| 31 |
+
guess_filename,
|
| 32 |
+
parse_mimetype,
|
| 33 |
+
sentinel,
|
| 34 |
+
)
|
| 35 |
+
from .streams import StreamReader
|
| 36 |
+
from .typedefs import Final, JSONEncoder, _CIMultiDict
|
| 37 |
+
|
| 38 |
+
__all__ = (
|
| 39 |
+
"PAYLOAD_REGISTRY",
|
| 40 |
+
"get_payload",
|
| 41 |
+
"payload_type",
|
| 42 |
+
"Payload",
|
| 43 |
+
"BytesPayload",
|
| 44 |
+
"StringPayload",
|
| 45 |
+
"IOBasePayload",
|
| 46 |
+
"BytesIOPayload",
|
| 47 |
+
"BufferedReaderPayload",
|
| 48 |
+
"TextIOPayload",
|
| 49 |
+
"StringIOPayload",
|
| 50 |
+
"JsonPayload",
|
| 51 |
+
"AsyncIterablePayload",
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
TOO_LARGE_BYTES_BODY: Final[int] = 2 ** 20 # 1 MB
|
| 55 |
+
|
| 56 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 57 |
+
from typing import List
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class LookupError(Exception):
|
| 61 |
+
pass
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class Order(str, enum.Enum):
|
| 65 |
+
normal = "normal"
|
| 66 |
+
try_first = "try_first"
|
| 67 |
+
try_last = "try_last"
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload":
|
| 71 |
+
return PAYLOAD_REGISTRY.get(data, *args, **kwargs)
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
def register_payload(
|
| 75 |
+
factory: Type["Payload"], type: Any, *, order: Order = Order.normal
|
| 76 |
+
) -> None:
|
| 77 |
+
PAYLOAD_REGISTRY.register(factory, type, order=order)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class payload_type:
|
| 81 |
+
def __init__(self, type: Any, *, order: Order = Order.normal) -> None:
|
| 82 |
+
self.type = type
|
| 83 |
+
self.order = order
|
| 84 |
+
|
| 85 |
+
def __call__(self, factory: Type["Payload"]) -> Type["Payload"]:
|
| 86 |
+
register_payload(factory, self.type, order=self.order)
|
| 87 |
+
return factory
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
PayloadType = Type["Payload"]
|
| 91 |
+
_PayloadRegistryItem = Tuple[PayloadType, Any]
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
class PayloadRegistry:
|
| 95 |
+
"""Payload registry.
|
| 96 |
+
|
| 97 |
+
note: we need zope.interface for more efficient adapter search
|
| 98 |
+
"""
|
| 99 |
+
|
| 100 |
+
def __init__(self) -> None:
|
| 101 |
+
self._first = [] # type: List[_PayloadRegistryItem]
|
| 102 |
+
self._normal = [] # type: List[_PayloadRegistryItem]
|
| 103 |
+
self._last = [] # type: List[_PayloadRegistryItem]
|
| 104 |
+
|
| 105 |
+
def get(
|
| 106 |
+
self,
|
| 107 |
+
data: Any,
|
| 108 |
+
*args: Any,
|
| 109 |
+
_CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain,
|
| 110 |
+
**kwargs: Any,
|
| 111 |
+
) -> "Payload":
|
| 112 |
+
if isinstance(data, Payload):
|
| 113 |
+
return data
|
| 114 |
+
for factory, type in _CHAIN(self._first, self._normal, self._last):
|
| 115 |
+
if isinstance(data, type):
|
| 116 |
+
return factory(data, *args, **kwargs)
|
| 117 |
+
|
| 118 |
+
raise LookupError()
|
| 119 |
+
|
| 120 |
+
def register(
|
| 121 |
+
self, factory: PayloadType, type: Any, *, order: Order = Order.normal
|
| 122 |
+
) -> None:
|
| 123 |
+
if order is Order.try_first:
|
| 124 |
+
self._first.append((factory, type))
|
| 125 |
+
elif order is Order.normal:
|
| 126 |
+
self._normal.append((factory, type))
|
| 127 |
+
elif order is Order.try_last:
|
| 128 |
+
self._last.append((factory, type))
|
| 129 |
+
else:
|
| 130 |
+
raise ValueError(f"Unsupported order {order!r}")
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
class Payload(ABC):
|
| 134 |
+
|
| 135 |
+
_default_content_type = "application/octet-stream" # type: str
|
| 136 |
+
_size = None # type: Optional[int]
|
| 137 |
+
|
| 138 |
+
def __init__(
|
| 139 |
+
self,
|
| 140 |
+
value: Any,
|
| 141 |
+
headers: Optional[
|
| 142 |
+
Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]]
|
| 143 |
+
] = None,
|
| 144 |
+
content_type: Optional[str] = sentinel,
|
| 145 |
+
filename: Optional[str] = None,
|
| 146 |
+
encoding: Optional[str] = None,
|
| 147 |
+
**kwargs: Any,
|
| 148 |
+
) -> None:
|
| 149 |
+
self._encoding = encoding
|
| 150 |
+
self._filename = filename
|
| 151 |
+
self._headers = CIMultiDict() # type: _CIMultiDict
|
| 152 |
+
self._value = value
|
| 153 |
+
if content_type is not sentinel and content_type is not None:
|
| 154 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 155 |
+
elif self._filename is not None:
|
| 156 |
+
content_type = mimetypes.guess_type(self._filename)[0]
|
| 157 |
+
if content_type is None:
|
| 158 |
+
content_type = self._default_content_type
|
| 159 |
+
self._headers[hdrs.CONTENT_TYPE] = content_type
|
| 160 |
+
else:
|
| 161 |
+
self._headers[hdrs.CONTENT_TYPE] = self._default_content_type
|
| 162 |
+
self._headers.update(headers or {})
|
| 163 |
+
|
| 164 |
+
@property
|
| 165 |
+
def size(self) -> Optional[int]:
|
| 166 |
+
"""Size of the payload."""
|
| 167 |
+
return self._size
|
| 168 |
+
|
| 169 |
+
@property
|
| 170 |
+
def filename(self) -> Optional[str]:
|
| 171 |
+
"""Filename of the payload."""
|
| 172 |
+
return self._filename
|
| 173 |
+
|
| 174 |
+
@property
|
| 175 |
+
def headers(self) -> _CIMultiDict:
|
| 176 |
+
"""Custom item headers"""
|
| 177 |
+
return self._headers
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
def _binary_headers(self) -> bytes:
|
| 181 |
+
return (
|
| 182 |
+
"".join([k + ": " + v + "\r\n" for k, v in self.headers.items()]).encode(
|
| 183 |
+
"utf-8"
|
| 184 |
+
)
|
| 185 |
+
+ b"\r\n"
|
| 186 |
+
)
|
| 187 |
+
|
| 188 |
+
@property
|
| 189 |
+
def encoding(self) -> Optional[str]:
|
| 190 |
+
"""Payload encoding"""
|
| 191 |
+
return self._encoding
|
| 192 |
+
|
| 193 |
+
@property
|
| 194 |
+
def content_type(self) -> str:
|
| 195 |
+
"""Content type"""
|
| 196 |
+
return self._headers[hdrs.CONTENT_TYPE]
|
| 197 |
+
|
| 198 |
+
def set_content_disposition(
|
| 199 |
+
self,
|
| 200 |
+
disptype: str,
|
| 201 |
+
quote_fields: bool = True,
|
| 202 |
+
_charset: str = "utf-8",
|
| 203 |
+
**params: Any,
|
| 204 |
+
) -> None:
|
| 205 |
+
"""Sets ``Content-Disposition`` header."""
|
| 206 |
+
self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header(
|
| 207 |
+
disptype, quote_fields=quote_fields, _charset=_charset, **params
|
| 208 |
+
)
|
| 209 |
+
|
| 210 |
+
@abstractmethod
|
| 211 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 212 |
+
"""Write payload.
|
| 213 |
+
|
| 214 |
+
writer is an AbstractStreamWriter instance:
|
| 215 |
+
"""
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
class BytesPayload(Payload):
|
| 219 |
+
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
|
| 220 |
+
if not isinstance(value, (bytes, bytearray, memoryview)):
|
| 221 |
+
raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
|
| 222 |
+
|
| 223 |
+
if "content_type" not in kwargs:
|
| 224 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 225 |
+
|
| 226 |
+
super().__init__(value, *args, **kwargs)
|
| 227 |
+
|
| 228 |
+
if isinstance(value, memoryview):
|
| 229 |
+
self._size = value.nbytes
|
| 230 |
+
else:
|
| 231 |
+
self._size = len(value)
|
| 232 |
+
|
| 233 |
+
if self._size > TOO_LARGE_BYTES_BODY:
|
| 234 |
+
if PY_36:
|
| 235 |
+
kwargs = {"source": self}
|
| 236 |
+
else:
|
| 237 |
+
kwargs = {}
|
| 238 |
+
warnings.warn(
|
| 239 |
+
"Sending a large body directly with raw bytes might"
|
| 240 |
+
" lock the event loop. You should probably pass an "
|
| 241 |
+
"io.BytesIO object instead",
|
| 242 |
+
ResourceWarning,
|
| 243 |
+
**kwargs,
|
| 244 |
+
)
|
| 245 |
+
|
| 246 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 247 |
+
await writer.write(self._value)
|
| 248 |
+
|
| 249 |
+
|
| 250 |
+
class StringPayload(BytesPayload):
|
| 251 |
+
def __init__(
|
| 252 |
+
self,
|
| 253 |
+
value: str,
|
| 254 |
+
*args: Any,
|
| 255 |
+
encoding: Optional[str] = None,
|
| 256 |
+
content_type: Optional[str] = None,
|
| 257 |
+
**kwargs: Any,
|
| 258 |
+
) -> None:
|
| 259 |
+
|
| 260 |
+
if encoding is None:
|
| 261 |
+
if content_type is None:
|
| 262 |
+
real_encoding = "utf-8"
|
| 263 |
+
content_type = "text/plain; charset=utf-8"
|
| 264 |
+
else:
|
| 265 |
+
mimetype = parse_mimetype(content_type)
|
| 266 |
+
real_encoding = mimetype.parameters.get("charset", "utf-8")
|
| 267 |
+
else:
|
| 268 |
+
if content_type is None:
|
| 269 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 270 |
+
real_encoding = encoding
|
| 271 |
+
|
| 272 |
+
super().__init__(
|
| 273 |
+
value.encode(real_encoding),
|
| 274 |
+
encoding=real_encoding,
|
| 275 |
+
content_type=content_type,
|
| 276 |
+
*args,
|
| 277 |
+
**kwargs,
|
| 278 |
+
)
|
| 279 |
+
|
| 280 |
+
|
| 281 |
+
class StringIOPayload(StringPayload):
|
| 282 |
+
def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
|
| 283 |
+
super().__init__(value.read(), *args, **kwargs)
|
| 284 |
+
|
| 285 |
+
|
| 286 |
+
class IOBasePayload(Payload):
|
| 287 |
+
_value: IO[Any]
|
| 288 |
+
|
| 289 |
+
def __init__(
|
| 290 |
+
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
|
| 291 |
+
) -> None:
|
| 292 |
+
if "filename" not in kwargs:
|
| 293 |
+
kwargs["filename"] = guess_filename(value)
|
| 294 |
+
|
| 295 |
+
super().__init__(value, *args, **kwargs)
|
| 296 |
+
|
| 297 |
+
if self._filename is not None and disposition is not None:
|
| 298 |
+
if hdrs.CONTENT_DISPOSITION not in self.headers:
|
| 299 |
+
self.set_content_disposition(disposition, filename=self._filename)
|
| 300 |
+
|
| 301 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 302 |
+
loop = asyncio.get_event_loop()
|
| 303 |
+
try:
|
| 304 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
| 305 |
+
while chunk:
|
| 306 |
+
await writer.write(chunk)
|
| 307 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
| 308 |
+
finally:
|
| 309 |
+
await loop.run_in_executor(None, self._value.close)
|
| 310 |
+
|
| 311 |
+
|
| 312 |
+
class TextIOPayload(IOBasePayload):
|
| 313 |
+
_value: TextIO
|
| 314 |
+
|
| 315 |
+
def __init__(
|
| 316 |
+
self,
|
| 317 |
+
value: TextIO,
|
| 318 |
+
*args: Any,
|
| 319 |
+
encoding: Optional[str] = None,
|
| 320 |
+
content_type: Optional[str] = None,
|
| 321 |
+
**kwargs: Any,
|
| 322 |
+
) -> None:
|
| 323 |
+
|
| 324 |
+
if encoding is None:
|
| 325 |
+
if content_type is None:
|
| 326 |
+
encoding = "utf-8"
|
| 327 |
+
content_type = "text/plain; charset=utf-8"
|
| 328 |
+
else:
|
| 329 |
+
mimetype = parse_mimetype(content_type)
|
| 330 |
+
encoding = mimetype.parameters.get("charset", "utf-8")
|
| 331 |
+
else:
|
| 332 |
+
if content_type is None:
|
| 333 |
+
content_type = "text/plain; charset=%s" % encoding
|
| 334 |
+
|
| 335 |
+
super().__init__(
|
| 336 |
+
value,
|
| 337 |
+
content_type=content_type,
|
| 338 |
+
encoding=encoding,
|
| 339 |
+
*args,
|
| 340 |
+
**kwargs,
|
| 341 |
+
)
|
| 342 |
+
|
| 343 |
+
@property
|
| 344 |
+
def size(self) -> Optional[int]:
|
| 345 |
+
try:
|
| 346 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 347 |
+
except OSError:
|
| 348 |
+
return None
|
| 349 |
+
|
| 350 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 351 |
+
loop = asyncio.get_event_loop()
|
| 352 |
+
try:
|
| 353 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
| 354 |
+
while chunk:
|
| 355 |
+
data = (
|
| 356 |
+
chunk.encode(encoding=self._encoding)
|
| 357 |
+
if self._encoding
|
| 358 |
+
else chunk.encode()
|
| 359 |
+
)
|
| 360 |
+
await writer.write(data)
|
| 361 |
+
chunk = await loop.run_in_executor(None, self._value.read, 2 ** 16)
|
| 362 |
+
finally:
|
| 363 |
+
await loop.run_in_executor(None, self._value.close)
|
| 364 |
+
|
| 365 |
+
|
| 366 |
+
class BytesIOPayload(IOBasePayload):
|
| 367 |
+
@property
|
| 368 |
+
def size(self) -> int:
|
| 369 |
+
position = self._value.tell()
|
| 370 |
+
end = self._value.seek(0, os.SEEK_END)
|
| 371 |
+
self._value.seek(position)
|
| 372 |
+
return end - position
|
| 373 |
+
|
| 374 |
+
|
| 375 |
+
class BufferedReaderPayload(IOBasePayload):
|
| 376 |
+
@property
|
| 377 |
+
def size(self) -> Optional[int]:
|
| 378 |
+
try:
|
| 379 |
+
return os.fstat(self._value.fileno()).st_size - self._value.tell()
|
| 380 |
+
except OSError:
|
| 381 |
+
# data.fileno() is not supported, e.g.
|
| 382 |
+
# io.BufferedReader(io.BytesIO(b'data'))
|
| 383 |
+
return None
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class JsonPayload(BytesPayload):
|
| 387 |
+
def __init__(
|
| 388 |
+
self,
|
| 389 |
+
value: Any,
|
| 390 |
+
encoding: str = "utf-8",
|
| 391 |
+
content_type: str = "application/json",
|
| 392 |
+
dumps: JSONEncoder = json.dumps,
|
| 393 |
+
*args: Any,
|
| 394 |
+
**kwargs: Any,
|
| 395 |
+
) -> None:
|
| 396 |
+
|
| 397 |
+
super().__init__(
|
| 398 |
+
dumps(value).encode(encoding),
|
| 399 |
+
content_type=content_type,
|
| 400 |
+
encoding=encoding,
|
| 401 |
+
*args,
|
| 402 |
+
**kwargs,
|
| 403 |
+
)
|
| 404 |
+
|
| 405 |
+
|
| 406 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 407 |
+
from typing import AsyncIterable, AsyncIterator
|
| 408 |
+
|
| 409 |
+
_AsyncIterator = AsyncIterator[bytes]
|
| 410 |
+
_AsyncIterable = AsyncIterable[bytes]
|
| 411 |
+
else:
|
| 412 |
+
from collections.abc import AsyncIterable, AsyncIterator
|
| 413 |
+
|
| 414 |
+
_AsyncIterator = AsyncIterator
|
| 415 |
+
_AsyncIterable = AsyncIterable
|
| 416 |
+
|
| 417 |
+
|
| 418 |
+
class AsyncIterablePayload(Payload):
|
| 419 |
+
|
| 420 |
+
_iter = None # type: Optional[_AsyncIterator]
|
| 421 |
+
|
| 422 |
+
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
|
| 423 |
+
if not isinstance(value, AsyncIterable):
|
| 424 |
+
raise TypeError(
|
| 425 |
+
"value argument must support "
|
| 426 |
+
"collections.abc.AsyncIterablebe interface, "
|
| 427 |
+
"got {!r}".format(type(value))
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
if "content_type" not in kwargs:
|
| 431 |
+
kwargs["content_type"] = "application/octet-stream"
|
| 432 |
+
|
| 433 |
+
super().__init__(value, *args, **kwargs)
|
| 434 |
+
|
| 435 |
+
self._iter = value.__aiter__()
|
| 436 |
+
|
| 437 |
+
async def write(self, writer: AbstractStreamWriter) -> None:
|
| 438 |
+
if self._iter:
|
| 439 |
+
try:
|
| 440 |
+
# iter is not None check prevents rare cases
|
| 441 |
+
# when the case iterable is used twice
|
| 442 |
+
while True:
|
| 443 |
+
chunk = await self._iter.__anext__()
|
| 444 |
+
await writer.write(chunk)
|
| 445 |
+
except StopAsyncIteration:
|
| 446 |
+
self._iter = None
|
| 447 |
+
|
| 448 |
+
|
| 449 |
+
class StreamReaderPayload(AsyncIterablePayload):
|
| 450 |
+
def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
|
| 451 |
+
super().__init__(value.iter_any(), *args, **kwargs)
|
| 452 |
+
|
| 453 |
+
|
| 454 |
+
PAYLOAD_REGISTRY = PayloadRegistry()
|
| 455 |
+
PAYLOAD_REGISTRY.register(BytesPayload, (bytes, bytearray, memoryview))
|
| 456 |
+
PAYLOAD_REGISTRY.register(StringPayload, str)
|
| 457 |
+
PAYLOAD_REGISTRY.register(StringIOPayload, io.StringIO)
|
| 458 |
+
PAYLOAD_REGISTRY.register(TextIOPayload, io.TextIOBase)
|
| 459 |
+
PAYLOAD_REGISTRY.register(BytesIOPayload, io.BytesIO)
|
| 460 |
+
PAYLOAD_REGISTRY.register(BufferedReaderPayload, (io.BufferedReader, io.BufferedRandom))
|
| 461 |
+
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
|
| 462 |
+
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
|
| 463 |
+
# try_last for giving a chance to more specialized async interables like
|
| 464 |
+
# multidict.BodyPartReaderPayload override the default
|
| 465 |
+
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/py.typed
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
Marker
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/resolver.py
ADDED
|
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import socket
|
| 3 |
+
from typing import Any, Dict, List, Optional, Type, Union
|
| 4 |
+
|
| 5 |
+
from .abc import AbstractResolver
|
| 6 |
+
from .helpers import get_running_loop
|
| 7 |
+
|
| 8 |
+
__all__ = ("ThreadedResolver", "AsyncResolver", "DefaultResolver")
|
| 9 |
+
|
| 10 |
+
try:
|
| 11 |
+
import aiodns
|
| 12 |
+
|
| 13 |
+
# aiodns_default = hasattr(aiodns.DNSResolver, 'gethostbyname')
|
| 14 |
+
except ImportError: # pragma: no cover
|
| 15 |
+
aiodns = None
|
| 16 |
+
|
| 17 |
+
aiodns_default = False
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class ThreadedResolver(AbstractResolver):
|
| 21 |
+
"""Threaded resolver.
|
| 22 |
+
|
| 23 |
+
Uses an Executor for synchronous getaddrinfo() calls.
|
| 24 |
+
concurrent.futures.ThreadPoolExecutor is used by default.
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
| 28 |
+
self._loop = get_running_loop(loop)
|
| 29 |
+
|
| 30 |
+
async def resolve(
|
| 31 |
+
self, hostname: str, port: int = 0, family: int = socket.AF_INET
|
| 32 |
+
) -> List[Dict[str, Any]]:
|
| 33 |
+
infos = await self._loop.getaddrinfo(
|
| 34 |
+
hostname,
|
| 35 |
+
port,
|
| 36 |
+
type=socket.SOCK_STREAM,
|
| 37 |
+
family=family,
|
| 38 |
+
flags=socket.AI_ADDRCONFIG,
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
hosts = []
|
| 42 |
+
for family, _, proto, _, address in infos:
|
| 43 |
+
if family == socket.AF_INET6:
|
| 44 |
+
if len(address) < 3:
|
| 45 |
+
# IPv6 is not supported by Python build,
|
| 46 |
+
# or IPv6 is not enabled in the host
|
| 47 |
+
continue
|
| 48 |
+
if address[3]: # type: ignore[misc]
|
| 49 |
+
# This is essential for link-local IPv6 addresses.
|
| 50 |
+
# LL IPv6 is a VERY rare case. Strictly speaking, we should use
|
| 51 |
+
# getnameinfo() unconditionally, but performance makes sense.
|
| 52 |
+
host, _port = socket.getnameinfo(
|
| 53 |
+
address, socket.NI_NUMERICHOST | socket.NI_NUMERICSERV
|
| 54 |
+
)
|
| 55 |
+
port = int(_port)
|
| 56 |
+
else:
|
| 57 |
+
host, port = address[:2]
|
| 58 |
+
else: # IPv4
|
| 59 |
+
assert family == socket.AF_INET
|
| 60 |
+
host, port = address # type: ignore[misc]
|
| 61 |
+
hosts.append(
|
| 62 |
+
{
|
| 63 |
+
"hostname": hostname,
|
| 64 |
+
"host": host,
|
| 65 |
+
"port": port,
|
| 66 |
+
"family": family,
|
| 67 |
+
"proto": proto,
|
| 68 |
+
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
| 69 |
+
}
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
return hosts
|
| 73 |
+
|
| 74 |
+
async def close(self) -> None:
|
| 75 |
+
pass
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class AsyncResolver(AbstractResolver):
|
| 79 |
+
"""Use the `aiodns` package to make asynchronous DNS lookups"""
|
| 80 |
+
|
| 81 |
+
def __init__(
|
| 82 |
+
self,
|
| 83 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 84 |
+
*args: Any,
|
| 85 |
+
**kwargs: Any
|
| 86 |
+
) -> None:
|
| 87 |
+
if aiodns is None:
|
| 88 |
+
raise RuntimeError("Resolver requires aiodns library")
|
| 89 |
+
|
| 90 |
+
self._loop = get_running_loop(loop)
|
| 91 |
+
self._resolver = aiodns.DNSResolver(*args, loop=loop, **kwargs)
|
| 92 |
+
|
| 93 |
+
if not hasattr(self._resolver, "gethostbyname"):
|
| 94 |
+
# aiodns 1.1 is not available, fallback to DNSResolver.query
|
| 95 |
+
self.resolve = self._resolve_with_query # type: ignore
|
| 96 |
+
|
| 97 |
+
async def resolve(
|
| 98 |
+
self, host: str, port: int = 0, family: int = socket.AF_INET
|
| 99 |
+
) -> List[Dict[str, Any]]:
|
| 100 |
+
try:
|
| 101 |
+
resp = await self._resolver.gethostbyname(host, family)
|
| 102 |
+
except aiodns.error.DNSError as exc:
|
| 103 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 104 |
+
raise OSError(msg) from exc
|
| 105 |
+
hosts = []
|
| 106 |
+
for address in resp.addresses:
|
| 107 |
+
hosts.append(
|
| 108 |
+
{
|
| 109 |
+
"hostname": host,
|
| 110 |
+
"host": address,
|
| 111 |
+
"port": port,
|
| 112 |
+
"family": family,
|
| 113 |
+
"proto": 0,
|
| 114 |
+
"flags": socket.AI_NUMERICHOST | socket.AI_NUMERICSERV,
|
| 115 |
+
}
|
| 116 |
+
)
|
| 117 |
+
|
| 118 |
+
if not hosts:
|
| 119 |
+
raise OSError("DNS lookup failed")
|
| 120 |
+
|
| 121 |
+
return hosts
|
| 122 |
+
|
| 123 |
+
async def _resolve_with_query(
|
| 124 |
+
self, host: str, port: int = 0, family: int = socket.AF_INET
|
| 125 |
+
) -> List[Dict[str, Any]]:
|
| 126 |
+
if family == socket.AF_INET6:
|
| 127 |
+
qtype = "AAAA"
|
| 128 |
+
else:
|
| 129 |
+
qtype = "A"
|
| 130 |
+
|
| 131 |
+
try:
|
| 132 |
+
resp = await self._resolver.query(host, qtype)
|
| 133 |
+
except aiodns.error.DNSError as exc:
|
| 134 |
+
msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed"
|
| 135 |
+
raise OSError(msg) from exc
|
| 136 |
+
|
| 137 |
+
hosts = []
|
| 138 |
+
for rr in resp:
|
| 139 |
+
hosts.append(
|
| 140 |
+
{
|
| 141 |
+
"hostname": host,
|
| 142 |
+
"host": rr.host,
|
| 143 |
+
"port": port,
|
| 144 |
+
"family": family,
|
| 145 |
+
"proto": 0,
|
| 146 |
+
"flags": socket.AI_NUMERICHOST,
|
| 147 |
+
}
|
| 148 |
+
)
|
| 149 |
+
|
| 150 |
+
if not hosts:
|
| 151 |
+
raise OSError("DNS lookup failed")
|
| 152 |
+
|
| 153 |
+
return hosts
|
| 154 |
+
|
| 155 |
+
async def close(self) -> None:
|
| 156 |
+
self._resolver.cancel()
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]]
|
| 160 |
+
DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/streams.py
ADDED
|
@@ -0,0 +1,660 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import collections
|
| 3 |
+
import warnings
|
| 4 |
+
from typing import Awaitable, Callable, Deque, Generic, List, Optional, Tuple, TypeVar
|
| 5 |
+
|
| 6 |
+
from .base_protocol import BaseProtocol
|
| 7 |
+
from .helpers import BaseTimerContext, set_exception, set_result
|
| 8 |
+
from .log import internal_logger
|
| 9 |
+
from .typedefs import Final
|
| 10 |
+
|
| 11 |
+
__all__ = (
|
| 12 |
+
"EMPTY_PAYLOAD",
|
| 13 |
+
"EofStream",
|
| 14 |
+
"StreamReader",
|
| 15 |
+
"DataQueue",
|
| 16 |
+
"FlowControlDataQueue",
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
_T = TypeVar("_T")
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class EofStream(Exception):
|
| 23 |
+
"""eof stream indication."""
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class AsyncStreamIterator(Generic[_T]):
|
| 27 |
+
def __init__(self, read_func: Callable[[], Awaitable[_T]]) -> None:
|
| 28 |
+
self.read_func = read_func
|
| 29 |
+
|
| 30 |
+
def __aiter__(self) -> "AsyncStreamIterator[_T]":
|
| 31 |
+
return self
|
| 32 |
+
|
| 33 |
+
async def __anext__(self) -> _T:
|
| 34 |
+
try:
|
| 35 |
+
rv = await self.read_func()
|
| 36 |
+
except EofStream:
|
| 37 |
+
raise StopAsyncIteration
|
| 38 |
+
if rv == b"":
|
| 39 |
+
raise StopAsyncIteration
|
| 40 |
+
return rv
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
class ChunkTupleAsyncStreamIterator:
|
| 44 |
+
def __init__(self, stream: "StreamReader") -> None:
|
| 45 |
+
self._stream = stream
|
| 46 |
+
|
| 47 |
+
def __aiter__(self) -> "ChunkTupleAsyncStreamIterator":
|
| 48 |
+
return self
|
| 49 |
+
|
| 50 |
+
async def __anext__(self) -> Tuple[bytes, bool]:
|
| 51 |
+
rv = await self._stream.readchunk()
|
| 52 |
+
if rv == (b"", False):
|
| 53 |
+
raise StopAsyncIteration
|
| 54 |
+
return rv
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
class AsyncStreamReaderMixin:
|
| 58 |
+
def __aiter__(self) -> AsyncStreamIterator[bytes]:
|
| 59 |
+
return AsyncStreamIterator(self.readline) # type: ignore[attr-defined]
|
| 60 |
+
|
| 61 |
+
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
|
| 62 |
+
"""Returns an asynchronous iterator that yields chunks of size n.
|
| 63 |
+
|
| 64 |
+
Python-3.5 available for Python 3.5+ only
|
| 65 |
+
"""
|
| 66 |
+
return AsyncStreamIterator(
|
| 67 |
+
lambda: self.read(n) # type: ignore[attr-defined,no-any-return]
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
def iter_any(self) -> AsyncStreamIterator[bytes]:
|
| 71 |
+
"""Yield all available data as soon as it is received.
|
| 72 |
+
|
| 73 |
+
Python-3.5 available for Python 3.5+ only
|
| 74 |
+
"""
|
| 75 |
+
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]
|
| 76 |
+
|
| 77 |
+
def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
|
| 78 |
+
"""Yield chunks of data as they are received by the server.
|
| 79 |
+
|
| 80 |
+
The yielded objects are tuples
|
| 81 |
+
of (bytes, bool) as returned by the StreamReader.readchunk method.
|
| 82 |
+
|
| 83 |
+
Python-3.5 available for Python 3.5+ only
|
| 84 |
+
"""
|
| 85 |
+
return ChunkTupleAsyncStreamIterator(self) # type: ignore[arg-type]
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
class StreamReader(AsyncStreamReaderMixin):
|
| 89 |
+
"""An enhancement of asyncio.StreamReader.
|
| 90 |
+
|
| 91 |
+
Supports asynchronous iteration by line, chunk or as available::
|
| 92 |
+
|
| 93 |
+
async for line in reader:
|
| 94 |
+
...
|
| 95 |
+
async for chunk in reader.iter_chunked(1024):
|
| 96 |
+
...
|
| 97 |
+
async for slice in reader.iter_any():
|
| 98 |
+
...
|
| 99 |
+
|
| 100 |
+
"""
|
| 101 |
+
|
| 102 |
+
total_bytes = 0
|
| 103 |
+
|
| 104 |
+
def __init__(
|
| 105 |
+
self,
|
| 106 |
+
protocol: BaseProtocol,
|
| 107 |
+
limit: int,
|
| 108 |
+
*,
|
| 109 |
+
timer: Optional[BaseTimerContext] = None,
|
| 110 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 111 |
+
) -> None:
|
| 112 |
+
self._protocol = protocol
|
| 113 |
+
self._low_water = limit
|
| 114 |
+
self._high_water = limit * 2
|
| 115 |
+
if loop is None:
|
| 116 |
+
loop = asyncio.get_event_loop()
|
| 117 |
+
self._loop = loop
|
| 118 |
+
self._size = 0
|
| 119 |
+
self._cursor = 0
|
| 120 |
+
self._http_chunk_splits = None # type: Optional[List[int]]
|
| 121 |
+
self._buffer = collections.deque() # type: Deque[bytes]
|
| 122 |
+
self._buffer_offset = 0
|
| 123 |
+
self._eof = False
|
| 124 |
+
self._waiter = None # type: Optional[asyncio.Future[None]]
|
| 125 |
+
self._eof_waiter = None # type: Optional[asyncio.Future[None]]
|
| 126 |
+
self._exception = None # type: Optional[BaseException]
|
| 127 |
+
self._timer = timer
|
| 128 |
+
self._eof_callbacks = [] # type: List[Callable[[], None]]
|
| 129 |
+
|
| 130 |
+
def __repr__(self) -> str:
|
| 131 |
+
info = [self.__class__.__name__]
|
| 132 |
+
if self._size:
|
| 133 |
+
info.append("%d bytes" % self._size)
|
| 134 |
+
if self._eof:
|
| 135 |
+
info.append("eof")
|
| 136 |
+
if self._low_water != 2 ** 16: # default limit
|
| 137 |
+
info.append("low=%d high=%d" % (self._low_water, self._high_water))
|
| 138 |
+
if self._waiter:
|
| 139 |
+
info.append("w=%r" % self._waiter)
|
| 140 |
+
if self._exception:
|
| 141 |
+
info.append("e=%r" % self._exception)
|
| 142 |
+
return "<%s>" % " ".join(info)
|
| 143 |
+
|
| 144 |
+
def get_read_buffer_limits(self) -> Tuple[int, int]:
|
| 145 |
+
return (self._low_water, self._high_water)
|
| 146 |
+
|
| 147 |
+
def exception(self) -> Optional[BaseException]:
|
| 148 |
+
return self._exception
|
| 149 |
+
|
| 150 |
+
def set_exception(self, exc: BaseException) -> None:
|
| 151 |
+
self._exception = exc
|
| 152 |
+
self._eof_callbacks.clear()
|
| 153 |
+
|
| 154 |
+
waiter = self._waiter
|
| 155 |
+
if waiter is not None:
|
| 156 |
+
self._waiter = None
|
| 157 |
+
set_exception(waiter, exc)
|
| 158 |
+
|
| 159 |
+
waiter = self._eof_waiter
|
| 160 |
+
if waiter is not None:
|
| 161 |
+
self._eof_waiter = None
|
| 162 |
+
set_exception(waiter, exc)
|
| 163 |
+
|
| 164 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
| 165 |
+
if self._eof:
|
| 166 |
+
try:
|
| 167 |
+
callback()
|
| 168 |
+
except Exception:
|
| 169 |
+
internal_logger.exception("Exception in eof callback")
|
| 170 |
+
else:
|
| 171 |
+
self._eof_callbacks.append(callback)
|
| 172 |
+
|
| 173 |
+
def feed_eof(self) -> None:
|
| 174 |
+
self._eof = True
|
| 175 |
+
|
| 176 |
+
waiter = self._waiter
|
| 177 |
+
if waiter is not None:
|
| 178 |
+
self._waiter = None
|
| 179 |
+
set_result(waiter, None)
|
| 180 |
+
|
| 181 |
+
waiter = self._eof_waiter
|
| 182 |
+
if waiter is not None:
|
| 183 |
+
self._eof_waiter = None
|
| 184 |
+
set_result(waiter, None)
|
| 185 |
+
|
| 186 |
+
for cb in self._eof_callbacks:
|
| 187 |
+
try:
|
| 188 |
+
cb()
|
| 189 |
+
except Exception:
|
| 190 |
+
internal_logger.exception("Exception in eof callback")
|
| 191 |
+
|
| 192 |
+
self._eof_callbacks.clear()
|
| 193 |
+
|
| 194 |
+
def is_eof(self) -> bool:
|
| 195 |
+
"""Return True if 'feed_eof' was called."""
|
| 196 |
+
return self._eof
|
| 197 |
+
|
| 198 |
+
def at_eof(self) -> bool:
|
| 199 |
+
"""Return True if the buffer is empty and 'feed_eof' was called."""
|
| 200 |
+
return self._eof and not self._buffer
|
| 201 |
+
|
| 202 |
+
async def wait_eof(self) -> None:
|
| 203 |
+
if self._eof:
|
| 204 |
+
return
|
| 205 |
+
|
| 206 |
+
assert self._eof_waiter is None
|
| 207 |
+
self._eof_waiter = self._loop.create_future()
|
| 208 |
+
try:
|
| 209 |
+
await self._eof_waiter
|
| 210 |
+
finally:
|
| 211 |
+
self._eof_waiter = None
|
| 212 |
+
|
| 213 |
+
def unread_data(self, data: bytes) -> None:
|
| 214 |
+
"""rollback reading some data from stream, inserting it to buffer head."""
|
| 215 |
+
warnings.warn(
|
| 216 |
+
"unread_data() is deprecated "
|
| 217 |
+
"and will be removed in future releases (#3260)",
|
| 218 |
+
DeprecationWarning,
|
| 219 |
+
stacklevel=2,
|
| 220 |
+
)
|
| 221 |
+
if not data:
|
| 222 |
+
return
|
| 223 |
+
|
| 224 |
+
if self._buffer_offset:
|
| 225 |
+
self._buffer[0] = self._buffer[0][self._buffer_offset :]
|
| 226 |
+
self._buffer_offset = 0
|
| 227 |
+
self._size += len(data)
|
| 228 |
+
self._cursor -= len(data)
|
| 229 |
+
self._buffer.appendleft(data)
|
| 230 |
+
self._eof_counter = 0
|
| 231 |
+
|
| 232 |
+
# TODO: size is ignored, remove the param later
|
| 233 |
+
def feed_data(self, data: bytes, size: int = 0) -> None:
|
| 234 |
+
assert not self._eof, "feed_data after feed_eof"
|
| 235 |
+
|
| 236 |
+
if not data:
|
| 237 |
+
return
|
| 238 |
+
|
| 239 |
+
self._size += len(data)
|
| 240 |
+
self._buffer.append(data)
|
| 241 |
+
self.total_bytes += len(data)
|
| 242 |
+
|
| 243 |
+
waiter = self._waiter
|
| 244 |
+
if waiter is not None:
|
| 245 |
+
self._waiter = None
|
| 246 |
+
set_result(waiter, None)
|
| 247 |
+
|
| 248 |
+
if self._size > self._high_water and not self._protocol._reading_paused:
|
| 249 |
+
self._protocol.pause_reading()
|
| 250 |
+
|
| 251 |
+
def begin_http_chunk_receiving(self) -> None:
|
| 252 |
+
if self._http_chunk_splits is None:
|
| 253 |
+
if self.total_bytes:
|
| 254 |
+
raise RuntimeError(
|
| 255 |
+
"Called begin_http_chunk_receiving when" "some data was already fed"
|
| 256 |
+
)
|
| 257 |
+
self._http_chunk_splits = []
|
| 258 |
+
|
| 259 |
+
def end_http_chunk_receiving(self) -> None:
|
| 260 |
+
if self._http_chunk_splits is None:
|
| 261 |
+
raise RuntimeError(
|
| 262 |
+
"Called end_chunk_receiving without calling "
|
| 263 |
+
"begin_chunk_receiving first"
|
| 264 |
+
)
|
| 265 |
+
|
| 266 |
+
# self._http_chunk_splits contains logical byte offsets from start of
|
| 267 |
+
# the body transfer. Each offset is the offset of the end of a chunk.
|
| 268 |
+
# "Logical" means bytes, accessible for a user.
|
| 269 |
+
# If no chunks containig logical data were received, current position
|
| 270 |
+
# is difinitely zero.
|
| 271 |
+
pos = self._http_chunk_splits[-1] if self._http_chunk_splits else 0
|
| 272 |
+
|
| 273 |
+
if self.total_bytes == pos:
|
| 274 |
+
# We should not add empty chunks here. So we check for that.
|
| 275 |
+
# Note, when chunked + gzip is used, we can receive a chunk
|
| 276 |
+
# of compressed data, but that data may not be enough for gzip FSM
|
| 277 |
+
# to yield any uncompressed data. That's why current position may
|
| 278 |
+
# not change after receiving a chunk.
|
| 279 |
+
return
|
| 280 |
+
|
| 281 |
+
self._http_chunk_splits.append(self.total_bytes)
|
| 282 |
+
|
| 283 |
+
# wake up readchunk when end of http chunk received
|
| 284 |
+
waiter = self._waiter
|
| 285 |
+
if waiter is not None:
|
| 286 |
+
self._waiter = None
|
| 287 |
+
set_result(waiter, None)
|
| 288 |
+
|
| 289 |
+
async def _wait(self, func_name: str) -> None:
|
| 290 |
+
# StreamReader uses a future to link the protocol feed_data() method
|
| 291 |
+
# to a read coroutine. Running two read coroutines at the same time
|
| 292 |
+
# would have an unexpected behaviour. It would not possible to know
|
| 293 |
+
# which coroutine would get the next data.
|
| 294 |
+
if self._waiter is not None:
|
| 295 |
+
raise RuntimeError(
|
| 296 |
+
"%s() called while another coroutine is "
|
| 297 |
+
"already waiting for incoming data" % func_name
|
| 298 |
+
)
|
| 299 |
+
|
| 300 |
+
waiter = self._waiter = self._loop.create_future()
|
| 301 |
+
try:
|
| 302 |
+
if self._timer:
|
| 303 |
+
with self._timer:
|
| 304 |
+
await waiter
|
| 305 |
+
else:
|
| 306 |
+
await waiter
|
| 307 |
+
finally:
|
| 308 |
+
self._waiter = None
|
| 309 |
+
|
| 310 |
+
async def readline(self) -> bytes:
|
| 311 |
+
return await self.readuntil()
|
| 312 |
+
|
| 313 |
+
async def readuntil(self, separator: bytes = b"\n") -> bytes:
|
| 314 |
+
seplen = len(separator)
|
| 315 |
+
if seplen == 0:
|
| 316 |
+
raise ValueError("Separator should be at least one-byte string")
|
| 317 |
+
|
| 318 |
+
if self._exception is not None:
|
| 319 |
+
raise self._exception
|
| 320 |
+
|
| 321 |
+
chunk = b""
|
| 322 |
+
chunk_size = 0
|
| 323 |
+
not_enough = True
|
| 324 |
+
|
| 325 |
+
while not_enough:
|
| 326 |
+
while self._buffer and not_enough:
|
| 327 |
+
offset = self._buffer_offset
|
| 328 |
+
ichar = self._buffer[0].find(separator, offset) + 1
|
| 329 |
+
# Read from current offset to found separator or to the end.
|
| 330 |
+
data = self._read_nowait_chunk(ichar - offset if ichar else -1)
|
| 331 |
+
chunk += data
|
| 332 |
+
chunk_size += len(data)
|
| 333 |
+
if ichar:
|
| 334 |
+
not_enough = False
|
| 335 |
+
|
| 336 |
+
if chunk_size > self._high_water:
|
| 337 |
+
raise ValueError("Chunk too big")
|
| 338 |
+
|
| 339 |
+
if self._eof:
|
| 340 |
+
break
|
| 341 |
+
|
| 342 |
+
if not_enough:
|
| 343 |
+
await self._wait("readuntil")
|
| 344 |
+
|
| 345 |
+
return chunk
|
| 346 |
+
|
| 347 |
+
async def read(self, n: int = -1) -> bytes:
|
| 348 |
+
if self._exception is not None:
|
| 349 |
+
raise self._exception
|
| 350 |
+
|
| 351 |
+
# migration problem; with DataQueue you have to catch
|
| 352 |
+
# EofStream exception, so common way is to run payload.read() inside
|
| 353 |
+
# infinite loop. what can cause real infinite loop with StreamReader
|
| 354 |
+
# lets keep this code one major release.
|
| 355 |
+
if __debug__:
|
| 356 |
+
if self._eof and not self._buffer:
|
| 357 |
+
self._eof_counter = getattr(self, "_eof_counter", 0) + 1
|
| 358 |
+
if self._eof_counter > 5:
|
| 359 |
+
internal_logger.warning(
|
| 360 |
+
"Multiple access to StreamReader in eof state, "
|
| 361 |
+
"might be infinite loop.",
|
| 362 |
+
stack_info=True,
|
| 363 |
+
)
|
| 364 |
+
|
| 365 |
+
if not n:
|
| 366 |
+
return b""
|
| 367 |
+
|
| 368 |
+
if n < 0:
|
| 369 |
+
# This used to just loop creating a new waiter hoping to
|
| 370 |
+
# collect everything in self._buffer, but that would
|
| 371 |
+
# deadlock if the subprocess sends more than self.limit
|
| 372 |
+
# bytes. So just call self.readany() until EOF.
|
| 373 |
+
blocks = []
|
| 374 |
+
while True:
|
| 375 |
+
block = await self.readany()
|
| 376 |
+
if not block:
|
| 377 |
+
break
|
| 378 |
+
blocks.append(block)
|
| 379 |
+
return b"".join(blocks)
|
| 380 |
+
|
| 381 |
+
# TODO: should be `if` instead of `while`
|
| 382 |
+
# because waiter maybe triggered on chunk end,
|
| 383 |
+
# without feeding any data
|
| 384 |
+
while not self._buffer and not self._eof:
|
| 385 |
+
await self._wait("read")
|
| 386 |
+
|
| 387 |
+
return self._read_nowait(n)
|
| 388 |
+
|
| 389 |
+
async def readany(self) -> bytes:
|
| 390 |
+
if self._exception is not None:
|
| 391 |
+
raise self._exception
|
| 392 |
+
|
| 393 |
+
# TODO: should be `if` instead of `while`
|
| 394 |
+
# because waiter maybe triggered on chunk end,
|
| 395 |
+
# without feeding any data
|
| 396 |
+
while not self._buffer and not self._eof:
|
| 397 |
+
await self._wait("readany")
|
| 398 |
+
|
| 399 |
+
return self._read_nowait(-1)
|
| 400 |
+
|
| 401 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
| 402 |
+
"""Returns a tuple of (data, end_of_http_chunk).
|
| 403 |
+
|
| 404 |
+
When chunked transfer
|
| 405 |
+
encoding is used, end_of_http_chunk is a boolean indicating if the end
|
| 406 |
+
of the data corresponds to the end of a HTTP chunk , otherwise it is
|
| 407 |
+
always False.
|
| 408 |
+
"""
|
| 409 |
+
while True:
|
| 410 |
+
if self._exception is not None:
|
| 411 |
+
raise self._exception
|
| 412 |
+
|
| 413 |
+
while self._http_chunk_splits:
|
| 414 |
+
pos = self._http_chunk_splits.pop(0)
|
| 415 |
+
if pos == self._cursor:
|
| 416 |
+
return (b"", True)
|
| 417 |
+
if pos > self._cursor:
|
| 418 |
+
return (self._read_nowait(pos - self._cursor), True)
|
| 419 |
+
internal_logger.warning(
|
| 420 |
+
"Skipping HTTP chunk end due to data "
|
| 421 |
+
"consumption beyond chunk boundary"
|
| 422 |
+
)
|
| 423 |
+
|
| 424 |
+
if self._buffer:
|
| 425 |
+
return (self._read_nowait_chunk(-1), False)
|
| 426 |
+
# return (self._read_nowait(-1), False)
|
| 427 |
+
|
| 428 |
+
if self._eof:
|
| 429 |
+
# Special case for signifying EOF.
|
| 430 |
+
# (b'', True) is not a final return value actually.
|
| 431 |
+
return (b"", False)
|
| 432 |
+
|
| 433 |
+
await self._wait("readchunk")
|
| 434 |
+
|
| 435 |
+
async def readexactly(self, n: int) -> bytes:
|
| 436 |
+
if self._exception is not None:
|
| 437 |
+
raise self._exception
|
| 438 |
+
|
| 439 |
+
blocks = [] # type: List[bytes]
|
| 440 |
+
while n > 0:
|
| 441 |
+
block = await self.read(n)
|
| 442 |
+
if not block:
|
| 443 |
+
partial = b"".join(blocks)
|
| 444 |
+
raise asyncio.IncompleteReadError(partial, len(partial) + n)
|
| 445 |
+
blocks.append(block)
|
| 446 |
+
n -= len(block)
|
| 447 |
+
|
| 448 |
+
return b"".join(blocks)
|
| 449 |
+
|
| 450 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
| 451 |
+
# default was changed to be consistent with .read(-1)
|
| 452 |
+
#
|
| 453 |
+
# I believe the most users don't know about the method and
|
| 454 |
+
# they are not affected.
|
| 455 |
+
if self._exception is not None:
|
| 456 |
+
raise self._exception
|
| 457 |
+
|
| 458 |
+
if self._waiter and not self._waiter.done():
|
| 459 |
+
raise RuntimeError(
|
| 460 |
+
"Called while some coroutine is waiting for incoming data."
|
| 461 |
+
)
|
| 462 |
+
|
| 463 |
+
return self._read_nowait(n)
|
| 464 |
+
|
| 465 |
+
def _read_nowait_chunk(self, n: int) -> bytes:
|
| 466 |
+
first_buffer = self._buffer[0]
|
| 467 |
+
offset = self._buffer_offset
|
| 468 |
+
if n != -1 and len(first_buffer) - offset > n:
|
| 469 |
+
data = first_buffer[offset : offset + n]
|
| 470 |
+
self._buffer_offset += n
|
| 471 |
+
|
| 472 |
+
elif offset:
|
| 473 |
+
self._buffer.popleft()
|
| 474 |
+
data = first_buffer[offset:]
|
| 475 |
+
self._buffer_offset = 0
|
| 476 |
+
|
| 477 |
+
else:
|
| 478 |
+
data = self._buffer.popleft()
|
| 479 |
+
|
| 480 |
+
self._size -= len(data)
|
| 481 |
+
self._cursor += len(data)
|
| 482 |
+
|
| 483 |
+
chunk_splits = self._http_chunk_splits
|
| 484 |
+
# Prevent memory leak: drop useless chunk splits
|
| 485 |
+
while chunk_splits and chunk_splits[0] < self._cursor:
|
| 486 |
+
chunk_splits.pop(0)
|
| 487 |
+
|
| 488 |
+
if self._size < self._low_water and self._protocol._reading_paused:
|
| 489 |
+
self._protocol.resume_reading()
|
| 490 |
+
return data
|
| 491 |
+
|
| 492 |
+
def _read_nowait(self, n: int) -> bytes:
|
| 493 |
+
"""Read not more than n bytes, or whole buffer if n == -1"""
|
| 494 |
+
chunks = []
|
| 495 |
+
|
| 496 |
+
while self._buffer:
|
| 497 |
+
chunk = self._read_nowait_chunk(n)
|
| 498 |
+
chunks.append(chunk)
|
| 499 |
+
if n != -1:
|
| 500 |
+
n -= len(chunk)
|
| 501 |
+
if n == 0:
|
| 502 |
+
break
|
| 503 |
+
|
| 504 |
+
return b"".join(chunks) if chunks else b""
|
| 505 |
+
|
| 506 |
+
|
| 507 |
+
class EmptyStreamReader(StreamReader): # lgtm [py/missing-call-to-init]
|
| 508 |
+
def __init__(self) -> None:
|
| 509 |
+
pass
|
| 510 |
+
|
| 511 |
+
def exception(self) -> Optional[BaseException]:
|
| 512 |
+
return None
|
| 513 |
+
|
| 514 |
+
def set_exception(self, exc: BaseException) -> None:
|
| 515 |
+
pass
|
| 516 |
+
|
| 517 |
+
def on_eof(self, callback: Callable[[], None]) -> None:
|
| 518 |
+
try:
|
| 519 |
+
callback()
|
| 520 |
+
except Exception:
|
| 521 |
+
internal_logger.exception("Exception in eof callback")
|
| 522 |
+
|
| 523 |
+
def feed_eof(self) -> None:
|
| 524 |
+
pass
|
| 525 |
+
|
| 526 |
+
def is_eof(self) -> bool:
|
| 527 |
+
return True
|
| 528 |
+
|
| 529 |
+
def at_eof(self) -> bool:
|
| 530 |
+
return True
|
| 531 |
+
|
| 532 |
+
async def wait_eof(self) -> None:
|
| 533 |
+
return
|
| 534 |
+
|
| 535 |
+
def feed_data(self, data: bytes, n: int = 0) -> None:
|
| 536 |
+
pass
|
| 537 |
+
|
| 538 |
+
async def readline(self) -> bytes:
|
| 539 |
+
return b""
|
| 540 |
+
|
| 541 |
+
async def read(self, n: int = -1) -> bytes:
|
| 542 |
+
return b""
|
| 543 |
+
|
| 544 |
+
# TODO add async def readuntil
|
| 545 |
+
|
| 546 |
+
async def readany(self) -> bytes:
|
| 547 |
+
return b""
|
| 548 |
+
|
| 549 |
+
async def readchunk(self) -> Tuple[bytes, bool]:
|
| 550 |
+
return (b"", True)
|
| 551 |
+
|
| 552 |
+
async def readexactly(self, n: int) -> bytes:
|
| 553 |
+
raise asyncio.IncompleteReadError(b"", n)
|
| 554 |
+
|
| 555 |
+
def read_nowait(self, n: int = -1) -> bytes:
|
| 556 |
+
return b""
|
| 557 |
+
|
| 558 |
+
|
| 559 |
+
EMPTY_PAYLOAD: Final[StreamReader] = EmptyStreamReader()
|
| 560 |
+
|
| 561 |
+
|
| 562 |
+
class DataQueue(Generic[_T]):
|
| 563 |
+
"""DataQueue is a general-purpose blocking queue with one reader."""
|
| 564 |
+
|
| 565 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
| 566 |
+
self._loop = loop
|
| 567 |
+
self._eof = False
|
| 568 |
+
self._waiter = None # type: Optional[asyncio.Future[None]]
|
| 569 |
+
self._exception = None # type: Optional[BaseException]
|
| 570 |
+
self._size = 0
|
| 571 |
+
self._buffer = collections.deque() # type: Deque[Tuple[_T, int]]
|
| 572 |
+
|
| 573 |
+
def __len__(self) -> int:
|
| 574 |
+
return len(self._buffer)
|
| 575 |
+
|
| 576 |
+
def is_eof(self) -> bool:
|
| 577 |
+
return self._eof
|
| 578 |
+
|
| 579 |
+
def at_eof(self) -> bool:
|
| 580 |
+
return self._eof and not self._buffer
|
| 581 |
+
|
| 582 |
+
def exception(self) -> Optional[BaseException]:
|
| 583 |
+
return self._exception
|
| 584 |
+
|
| 585 |
+
def set_exception(self, exc: BaseException) -> None:
|
| 586 |
+
self._eof = True
|
| 587 |
+
self._exception = exc
|
| 588 |
+
|
| 589 |
+
waiter = self._waiter
|
| 590 |
+
if waiter is not None:
|
| 591 |
+
self._waiter = None
|
| 592 |
+
set_exception(waiter, exc)
|
| 593 |
+
|
| 594 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
| 595 |
+
self._size += size
|
| 596 |
+
self._buffer.append((data, size))
|
| 597 |
+
|
| 598 |
+
waiter = self._waiter
|
| 599 |
+
if waiter is not None:
|
| 600 |
+
self._waiter = None
|
| 601 |
+
set_result(waiter, None)
|
| 602 |
+
|
| 603 |
+
def feed_eof(self) -> None:
|
| 604 |
+
self._eof = True
|
| 605 |
+
|
| 606 |
+
waiter = self._waiter
|
| 607 |
+
if waiter is not None:
|
| 608 |
+
self._waiter = None
|
| 609 |
+
set_result(waiter, None)
|
| 610 |
+
|
| 611 |
+
async def read(self) -> _T:
|
| 612 |
+
if not self._buffer and not self._eof:
|
| 613 |
+
assert not self._waiter
|
| 614 |
+
self._waiter = self._loop.create_future()
|
| 615 |
+
try:
|
| 616 |
+
await self._waiter
|
| 617 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 618 |
+
self._waiter = None
|
| 619 |
+
raise
|
| 620 |
+
|
| 621 |
+
if self._buffer:
|
| 622 |
+
data, size = self._buffer.popleft()
|
| 623 |
+
self._size -= size
|
| 624 |
+
return data
|
| 625 |
+
else:
|
| 626 |
+
if self._exception is not None:
|
| 627 |
+
raise self._exception
|
| 628 |
+
else:
|
| 629 |
+
raise EofStream
|
| 630 |
+
|
| 631 |
+
def __aiter__(self) -> AsyncStreamIterator[_T]:
|
| 632 |
+
return AsyncStreamIterator(self.read)
|
| 633 |
+
|
| 634 |
+
|
| 635 |
+
class FlowControlDataQueue(DataQueue[_T]):
|
| 636 |
+
"""FlowControlDataQueue resumes and pauses an underlying stream.
|
| 637 |
+
|
| 638 |
+
It is a destination for parsed data.
|
| 639 |
+
"""
|
| 640 |
+
|
| 641 |
+
def __init__(
|
| 642 |
+
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
|
| 643 |
+
) -> None:
|
| 644 |
+
super().__init__(loop=loop)
|
| 645 |
+
|
| 646 |
+
self._protocol = protocol
|
| 647 |
+
self._limit = limit * 2
|
| 648 |
+
|
| 649 |
+
def feed_data(self, data: _T, size: int = 0) -> None:
|
| 650 |
+
super().feed_data(data, size)
|
| 651 |
+
|
| 652 |
+
if self._size > self._limit and not self._protocol._reading_paused:
|
| 653 |
+
self._protocol.pause_reading()
|
| 654 |
+
|
| 655 |
+
async def read(self) -> _T:
|
| 656 |
+
try:
|
| 657 |
+
return await super().read()
|
| 658 |
+
finally:
|
| 659 |
+
if self._size < self._limit and self._protocol._reading_paused:
|
| 660 |
+
self._protocol.resume_reading()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/tracing.py
ADDED
|
@@ -0,0 +1,472 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from types import SimpleNamespace
|
| 2 |
+
from typing import TYPE_CHECKING, Awaitable, Optional, Type, TypeVar
|
| 3 |
+
|
| 4 |
+
import attr
|
| 5 |
+
from aiosignal import Signal
|
| 6 |
+
from multidict import CIMultiDict
|
| 7 |
+
from yarl import URL
|
| 8 |
+
|
| 9 |
+
from .client_reqrep import ClientResponse
|
| 10 |
+
|
| 11 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 12 |
+
from .client import ClientSession
|
| 13 |
+
from .typedefs import Protocol
|
| 14 |
+
|
| 15 |
+
_ParamT_contra = TypeVar("_ParamT_contra", contravariant=True)
|
| 16 |
+
|
| 17 |
+
class _SignalCallback(Protocol[_ParamT_contra]):
|
| 18 |
+
def __call__(
|
| 19 |
+
self,
|
| 20 |
+
__client_session: ClientSession,
|
| 21 |
+
__trace_config_ctx: SimpleNamespace,
|
| 22 |
+
__params: _ParamT_contra,
|
| 23 |
+
) -> Awaitable[None]:
|
| 24 |
+
...
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
__all__ = (
|
| 28 |
+
"TraceConfig",
|
| 29 |
+
"TraceRequestStartParams",
|
| 30 |
+
"TraceRequestEndParams",
|
| 31 |
+
"TraceRequestExceptionParams",
|
| 32 |
+
"TraceConnectionQueuedStartParams",
|
| 33 |
+
"TraceConnectionQueuedEndParams",
|
| 34 |
+
"TraceConnectionCreateStartParams",
|
| 35 |
+
"TraceConnectionCreateEndParams",
|
| 36 |
+
"TraceConnectionReuseconnParams",
|
| 37 |
+
"TraceDnsResolveHostStartParams",
|
| 38 |
+
"TraceDnsResolveHostEndParams",
|
| 39 |
+
"TraceDnsCacheHitParams",
|
| 40 |
+
"TraceDnsCacheMissParams",
|
| 41 |
+
"TraceRequestRedirectParams",
|
| 42 |
+
"TraceRequestChunkSentParams",
|
| 43 |
+
"TraceResponseChunkReceivedParams",
|
| 44 |
+
"TraceRequestHeadersSentParams",
|
| 45 |
+
)
|
| 46 |
+
|
| 47 |
+
|
| 48 |
+
class TraceConfig:
|
| 49 |
+
"""First-class used to trace requests launched via ClientSession objects."""
|
| 50 |
+
|
| 51 |
+
def __init__(
|
| 52 |
+
self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace
|
| 53 |
+
) -> None:
|
| 54 |
+
self._on_request_start = Signal(
|
| 55 |
+
self
|
| 56 |
+
) # type: Signal[_SignalCallback[TraceRequestStartParams]]
|
| 57 |
+
self._on_request_chunk_sent = Signal(
|
| 58 |
+
self
|
| 59 |
+
) # type: Signal[_SignalCallback[TraceRequestChunkSentParams]]
|
| 60 |
+
self._on_response_chunk_received = Signal(
|
| 61 |
+
self
|
| 62 |
+
) # type: Signal[_SignalCallback[TraceResponseChunkReceivedParams]]
|
| 63 |
+
self._on_request_end = Signal(
|
| 64 |
+
self
|
| 65 |
+
) # type: Signal[_SignalCallback[TraceRequestEndParams]]
|
| 66 |
+
self._on_request_exception = Signal(
|
| 67 |
+
self
|
| 68 |
+
) # type: Signal[_SignalCallback[TraceRequestExceptionParams]]
|
| 69 |
+
self._on_request_redirect = Signal(
|
| 70 |
+
self
|
| 71 |
+
) # type: Signal[_SignalCallback[TraceRequestRedirectParams]]
|
| 72 |
+
self._on_connection_queued_start = Signal(
|
| 73 |
+
self
|
| 74 |
+
) # type: Signal[_SignalCallback[TraceConnectionQueuedStartParams]]
|
| 75 |
+
self._on_connection_queued_end = Signal(
|
| 76 |
+
self
|
| 77 |
+
) # type: Signal[_SignalCallback[TraceConnectionQueuedEndParams]]
|
| 78 |
+
self._on_connection_create_start = Signal(
|
| 79 |
+
self
|
| 80 |
+
) # type: Signal[_SignalCallback[TraceConnectionCreateStartParams]]
|
| 81 |
+
self._on_connection_create_end = Signal(
|
| 82 |
+
self
|
| 83 |
+
) # type: Signal[_SignalCallback[TraceConnectionCreateEndParams]]
|
| 84 |
+
self._on_connection_reuseconn = Signal(
|
| 85 |
+
self
|
| 86 |
+
) # type: Signal[_SignalCallback[TraceConnectionReuseconnParams]]
|
| 87 |
+
self._on_dns_resolvehost_start = Signal(
|
| 88 |
+
self
|
| 89 |
+
) # type: Signal[_SignalCallback[TraceDnsResolveHostStartParams]]
|
| 90 |
+
self._on_dns_resolvehost_end = Signal(
|
| 91 |
+
self
|
| 92 |
+
) # type: Signal[_SignalCallback[TraceDnsResolveHostEndParams]]
|
| 93 |
+
self._on_dns_cache_hit = Signal(
|
| 94 |
+
self
|
| 95 |
+
) # type: Signal[_SignalCallback[TraceDnsCacheHitParams]]
|
| 96 |
+
self._on_dns_cache_miss = Signal(
|
| 97 |
+
self
|
| 98 |
+
) # type: Signal[_SignalCallback[TraceDnsCacheMissParams]]
|
| 99 |
+
self._on_request_headers_sent = Signal(
|
| 100 |
+
self
|
| 101 |
+
) # type: Signal[_SignalCallback[TraceRequestHeadersSentParams]]
|
| 102 |
+
|
| 103 |
+
self._trace_config_ctx_factory = trace_config_ctx_factory
|
| 104 |
+
|
| 105 |
+
def trace_config_ctx(
|
| 106 |
+
self, trace_request_ctx: Optional[SimpleNamespace] = None
|
| 107 |
+
) -> SimpleNamespace:
|
| 108 |
+
"""Return a new trace_config_ctx instance"""
|
| 109 |
+
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
|
| 110 |
+
|
| 111 |
+
def freeze(self) -> None:
|
| 112 |
+
self._on_request_start.freeze()
|
| 113 |
+
self._on_request_chunk_sent.freeze()
|
| 114 |
+
self._on_response_chunk_received.freeze()
|
| 115 |
+
self._on_request_end.freeze()
|
| 116 |
+
self._on_request_exception.freeze()
|
| 117 |
+
self._on_request_redirect.freeze()
|
| 118 |
+
self._on_connection_queued_start.freeze()
|
| 119 |
+
self._on_connection_queued_end.freeze()
|
| 120 |
+
self._on_connection_create_start.freeze()
|
| 121 |
+
self._on_connection_create_end.freeze()
|
| 122 |
+
self._on_connection_reuseconn.freeze()
|
| 123 |
+
self._on_dns_resolvehost_start.freeze()
|
| 124 |
+
self._on_dns_resolvehost_end.freeze()
|
| 125 |
+
self._on_dns_cache_hit.freeze()
|
| 126 |
+
self._on_dns_cache_miss.freeze()
|
| 127 |
+
self._on_request_headers_sent.freeze()
|
| 128 |
+
|
| 129 |
+
@property
|
| 130 |
+
def on_request_start(self) -> "Signal[_SignalCallback[TraceRequestStartParams]]":
|
| 131 |
+
return self._on_request_start
|
| 132 |
+
|
| 133 |
+
@property
|
| 134 |
+
def on_request_chunk_sent(
|
| 135 |
+
self,
|
| 136 |
+
) -> "Signal[_SignalCallback[TraceRequestChunkSentParams]]":
|
| 137 |
+
return self._on_request_chunk_sent
|
| 138 |
+
|
| 139 |
+
@property
|
| 140 |
+
def on_response_chunk_received(
|
| 141 |
+
self,
|
| 142 |
+
) -> "Signal[_SignalCallback[TraceResponseChunkReceivedParams]]":
|
| 143 |
+
return self._on_response_chunk_received
|
| 144 |
+
|
| 145 |
+
@property
|
| 146 |
+
def on_request_end(self) -> "Signal[_SignalCallback[TraceRequestEndParams]]":
|
| 147 |
+
return self._on_request_end
|
| 148 |
+
|
| 149 |
+
@property
|
| 150 |
+
def on_request_exception(
|
| 151 |
+
self,
|
| 152 |
+
) -> "Signal[_SignalCallback[TraceRequestExceptionParams]]":
|
| 153 |
+
return self._on_request_exception
|
| 154 |
+
|
| 155 |
+
@property
|
| 156 |
+
def on_request_redirect(
|
| 157 |
+
self,
|
| 158 |
+
) -> "Signal[_SignalCallback[TraceRequestRedirectParams]]":
|
| 159 |
+
return self._on_request_redirect
|
| 160 |
+
|
| 161 |
+
@property
|
| 162 |
+
def on_connection_queued_start(
|
| 163 |
+
self,
|
| 164 |
+
) -> "Signal[_SignalCallback[TraceConnectionQueuedStartParams]]":
|
| 165 |
+
return self._on_connection_queued_start
|
| 166 |
+
|
| 167 |
+
@property
|
| 168 |
+
def on_connection_queued_end(
|
| 169 |
+
self,
|
| 170 |
+
) -> "Signal[_SignalCallback[TraceConnectionQueuedEndParams]]":
|
| 171 |
+
return self._on_connection_queued_end
|
| 172 |
+
|
| 173 |
+
@property
|
| 174 |
+
def on_connection_create_start(
|
| 175 |
+
self,
|
| 176 |
+
) -> "Signal[_SignalCallback[TraceConnectionCreateStartParams]]":
|
| 177 |
+
return self._on_connection_create_start
|
| 178 |
+
|
| 179 |
+
@property
|
| 180 |
+
def on_connection_create_end(
|
| 181 |
+
self,
|
| 182 |
+
) -> "Signal[_SignalCallback[TraceConnectionCreateEndParams]]":
|
| 183 |
+
return self._on_connection_create_end
|
| 184 |
+
|
| 185 |
+
@property
|
| 186 |
+
def on_connection_reuseconn(
|
| 187 |
+
self,
|
| 188 |
+
) -> "Signal[_SignalCallback[TraceConnectionReuseconnParams]]":
|
| 189 |
+
return self._on_connection_reuseconn
|
| 190 |
+
|
| 191 |
+
@property
|
| 192 |
+
def on_dns_resolvehost_start(
|
| 193 |
+
self,
|
| 194 |
+
) -> "Signal[_SignalCallback[TraceDnsResolveHostStartParams]]":
|
| 195 |
+
return self._on_dns_resolvehost_start
|
| 196 |
+
|
| 197 |
+
@property
|
| 198 |
+
def on_dns_resolvehost_end(
|
| 199 |
+
self,
|
| 200 |
+
) -> "Signal[_SignalCallback[TraceDnsResolveHostEndParams]]":
|
| 201 |
+
return self._on_dns_resolvehost_end
|
| 202 |
+
|
| 203 |
+
@property
|
| 204 |
+
def on_dns_cache_hit(self) -> "Signal[_SignalCallback[TraceDnsCacheHitParams]]":
|
| 205 |
+
return self._on_dns_cache_hit
|
| 206 |
+
|
| 207 |
+
@property
|
| 208 |
+
def on_dns_cache_miss(self) -> "Signal[_SignalCallback[TraceDnsCacheMissParams]]":
|
| 209 |
+
return self._on_dns_cache_miss
|
| 210 |
+
|
| 211 |
+
@property
|
| 212 |
+
def on_request_headers_sent(
|
| 213 |
+
self,
|
| 214 |
+
) -> "Signal[_SignalCallback[TraceRequestHeadersSentParams]]":
|
| 215 |
+
return self._on_request_headers_sent
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 219 |
+
class TraceRequestStartParams:
|
| 220 |
+
"""Parameters sent by the `on_request_start` signal"""
|
| 221 |
+
|
| 222 |
+
method: str
|
| 223 |
+
url: URL
|
| 224 |
+
headers: "CIMultiDict[str]"
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 228 |
+
class TraceRequestChunkSentParams:
|
| 229 |
+
"""Parameters sent by the `on_request_chunk_sent` signal"""
|
| 230 |
+
|
| 231 |
+
method: str
|
| 232 |
+
url: URL
|
| 233 |
+
chunk: bytes
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 237 |
+
class TraceResponseChunkReceivedParams:
|
| 238 |
+
"""Parameters sent by the `on_response_chunk_received` signal"""
|
| 239 |
+
|
| 240 |
+
method: str
|
| 241 |
+
url: URL
|
| 242 |
+
chunk: bytes
|
| 243 |
+
|
| 244 |
+
|
| 245 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 246 |
+
class TraceRequestEndParams:
|
| 247 |
+
"""Parameters sent by the `on_request_end` signal"""
|
| 248 |
+
|
| 249 |
+
method: str
|
| 250 |
+
url: URL
|
| 251 |
+
headers: "CIMultiDict[str]"
|
| 252 |
+
response: ClientResponse
|
| 253 |
+
|
| 254 |
+
|
| 255 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 256 |
+
class TraceRequestExceptionParams:
|
| 257 |
+
"""Parameters sent by the `on_request_exception` signal"""
|
| 258 |
+
|
| 259 |
+
method: str
|
| 260 |
+
url: URL
|
| 261 |
+
headers: "CIMultiDict[str]"
|
| 262 |
+
exception: BaseException
|
| 263 |
+
|
| 264 |
+
|
| 265 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 266 |
+
class TraceRequestRedirectParams:
|
| 267 |
+
"""Parameters sent by the `on_request_redirect` signal"""
|
| 268 |
+
|
| 269 |
+
method: str
|
| 270 |
+
url: URL
|
| 271 |
+
headers: "CIMultiDict[str]"
|
| 272 |
+
response: ClientResponse
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 276 |
+
class TraceConnectionQueuedStartParams:
|
| 277 |
+
"""Parameters sent by the `on_connection_queued_start` signal"""
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 281 |
+
class TraceConnectionQueuedEndParams:
|
| 282 |
+
"""Parameters sent by the `on_connection_queued_end` signal"""
|
| 283 |
+
|
| 284 |
+
|
| 285 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 286 |
+
class TraceConnectionCreateStartParams:
|
| 287 |
+
"""Parameters sent by the `on_connection_create_start` signal"""
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 291 |
+
class TraceConnectionCreateEndParams:
|
| 292 |
+
"""Parameters sent by the `on_connection_create_end` signal"""
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 296 |
+
class TraceConnectionReuseconnParams:
|
| 297 |
+
"""Parameters sent by the `on_connection_reuseconn` signal"""
|
| 298 |
+
|
| 299 |
+
|
| 300 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 301 |
+
class TraceDnsResolveHostStartParams:
|
| 302 |
+
"""Parameters sent by the `on_dns_resolvehost_start` signal"""
|
| 303 |
+
|
| 304 |
+
host: str
|
| 305 |
+
|
| 306 |
+
|
| 307 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 308 |
+
class TraceDnsResolveHostEndParams:
|
| 309 |
+
"""Parameters sent by the `on_dns_resolvehost_end` signal"""
|
| 310 |
+
|
| 311 |
+
host: str
|
| 312 |
+
|
| 313 |
+
|
| 314 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 315 |
+
class TraceDnsCacheHitParams:
|
| 316 |
+
"""Parameters sent by the `on_dns_cache_hit` signal"""
|
| 317 |
+
|
| 318 |
+
host: str
|
| 319 |
+
|
| 320 |
+
|
| 321 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 322 |
+
class TraceDnsCacheMissParams:
|
| 323 |
+
"""Parameters sent by the `on_dns_cache_miss` signal"""
|
| 324 |
+
|
| 325 |
+
host: str
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 329 |
+
class TraceRequestHeadersSentParams:
|
| 330 |
+
"""Parameters sent by the `on_request_headers_sent` signal"""
|
| 331 |
+
|
| 332 |
+
method: str
|
| 333 |
+
url: URL
|
| 334 |
+
headers: "CIMultiDict[str]"
|
| 335 |
+
|
| 336 |
+
|
| 337 |
+
class Trace:
|
| 338 |
+
"""Internal dependency holder class.
|
| 339 |
+
|
| 340 |
+
Used to keep together the main dependencies used
|
| 341 |
+
at the moment of send a signal.
|
| 342 |
+
"""
|
| 343 |
+
|
| 344 |
+
def __init__(
|
| 345 |
+
self,
|
| 346 |
+
session: "ClientSession",
|
| 347 |
+
trace_config: TraceConfig,
|
| 348 |
+
trace_config_ctx: SimpleNamespace,
|
| 349 |
+
) -> None:
|
| 350 |
+
self._trace_config = trace_config
|
| 351 |
+
self._trace_config_ctx = trace_config_ctx
|
| 352 |
+
self._session = session
|
| 353 |
+
|
| 354 |
+
async def send_request_start(
|
| 355 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
| 356 |
+
) -> None:
|
| 357 |
+
return await self._trace_config.on_request_start.send(
|
| 358 |
+
self._session,
|
| 359 |
+
self._trace_config_ctx,
|
| 360 |
+
TraceRequestStartParams(method, url, headers),
|
| 361 |
+
)
|
| 362 |
+
|
| 363 |
+
async def send_request_chunk_sent(
|
| 364 |
+
self, method: str, url: URL, chunk: bytes
|
| 365 |
+
) -> None:
|
| 366 |
+
return await self._trace_config.on_request_chunk_sent.send(
|
| 367 |
+
self._session,
|
| 368 |
+
self._trace_config_ctx,
|
| 369 |
+
TraceRequestChunkSentParams(method, url, chunk),
|
| 370 |
+
)
|
| 371 |
+
|
| 372 |
+
async def send_response_chunk_received(
|
| 373 |
+
self, method: str, url: URL, chunk: bytes
|
| 374 |
+
) -> None:
|
| 375 |
+
return await self._trace_config.on_response_chunk_received.send(
|
| 376 |
+
self._session,
|
| 377 |
+
self._trace_config_ctx,
|
| 378 |
+
TraceResponseChunkReceivedParams(method, url, chunk),
|
| 379 |
+
)
|
| 380 |
+
|
| 381 |
+
async def send_request_end(
|
| 382 |
+
self,
|
| 383 |
+
method: str,
|
| 384 |
+
url: URL,
|
| 385 |
+
headers: "CIMultiDict[str]",
|
| 386 |
+
response: ClientResponse,
|
| 387 |
+
) -> None:
|
| 388 |
+
return await self._trace_config.on_request_end.send(
|
| 389 |
+
self._session,
|
| 390 |
+
self._trace_config_ctx,
|
| 391 |
+
TraceRequestEndParams(method, url, headers, response),
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
async def send_request_exception(
|
| 395 |
+
self,
|
| 396 |
+
method: str,
|
| 397 |
+
url: URL,
|
| 398 |
+
headers: "CIMultiDict[str]",
|
| 399 |
+
exception: BaseException,
|
| 400 |
+
) -> None:
|
| 401 |
+
return await self._trace_config.on_request_exception.send(
|
| 402 |
+
self._session,
|
| 403 |
+
self._trace_config_ctx,
|
| 404 |
+
TraceRequestExceptionParams(method, url, headers, exception),
|
| 405 |
+
)
|
| 406 |
+
|
| 407 |
+
async def send_request_redirect(
|
| 408 |
+
self,
|
| 409 |
+
method: str,
|
| 410 |
+
url: URL,
|
| 411 |
+
headers: "CIMultiDict[str]",
|
| 412 |
+
response: ClientResponse,
|
| 413 |
+
) -> None:
|
| 414 |
+
return await self._trace_config._on_request_redirect.send(
|
| 415 |
+
self._session,
|
| 416 |
+
self._trace_config_ctx,
|
| 417 |
+
TraceRequestRedirectParams(method, url, headers, response),
|
| 418 |
+
)
|
| 419 |
+
|
| 420 |
+
async def send_connection_queued_start(self) -> None:
|
| 421 |
+
return await self._trace_config.on_connection_queued_start.send(
|
| 422 |
+
self._session, self._trace_config_ctx, TraceConnectionQueuedStartParams()
|
| 423 |
+
)
|
| 424 |
+
|
| 425 |
+
async def send_connection_queued_end(self) -> None:
|
| 426 |
+
return await self._trace_config.on_connection_queued_end.send(
|
| 427 |
+
self._session, self._trace_config_ctx, TraceConnectionQueuedEndParams()
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
async def send_connection_create_start(self) -> None:
|
| 431 |
+
return await self._trace_config.on_connection_create_start.send(
|
| 432 |
+
self._session, self._trace_config_ctx, TraceConnectionCreateStartParams()
|
| 433 |
+
)
|
| 434 |
+
|
| 435 |
+
async def send_connection_create_end(self) -> None:
|
| 436 |
+
return await self._trace_config.on_connection_create_end.send(
|
| 437 |
+
self._session, self._trace_config_ctx, TraceConnectionCreateEndParams()
|
| 438 |
+
)
|
| 439 |
+
|
| 440 |
+
async def send_connection_reuseconn(self) -> None:
|
| 441 |
+
return await self._trace_config.on_connection_reuseconn.send(
|
| 442 |
+
self._session, self._trace_config_ctx, TraceConnectionReuseconnParams()
|
| 443 |
+
)
|
| 444 |
+
|
| 445 |
+
async def send_dns_resolvehost_start(self, host: str) -> None:
|
| 446 |
+
return await self._trace_config.on_dns_resolvehost_start.send(
|
| 447 |
+
self._session, self._trace_config_ctx, TraceDnsResolveHostStartParams(host)
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
async def send_dns_resolvehost_end(self, host: str) -> None:
|
| 451 |
+
return await self._trace_config.on_dns_resolvehost_end.send(
|
| 452 |
+
self._session, self._trace_config_ctx, TraceDnsResolveHostEndParams(host)
|
| 453 |
+
)
|
| 454 |
+
|
| 455 |
+
async def send_dns_cache_hit(self, host: str) -> None:
|
| 456 |
+
return await self._trace_config.on_dns_cache_hit.send(
|
| 457 |
+
self._session, self._trace_config_ctx, TraceDnsCacheHitParams(host)
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
async def send_dns_cache_miss(self, host: str) -> None:
|
| 461 |
+
return await self._trace_config.on_dns_cache_miss.send(
|
| 462 |
+
self._session, self._trace_config_ctx, TraceDnsCacheMissParams(host)
|
| 463 |
+
)
|
| 464 |
+
|
| 465 |
+
async def send_request_headers(
|
| 466 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
| 467 |
+
) -> None:
|
| 468 |
+
return await self._trace_config._on_request_headers_sent.send(
|
| 469 |
+
self._session,
|
| 470 |
+
self._trace_config_ctx,
|
| 471 |
+
TraceRequestHeadersSentParams(method, url, headers),
|
| 472 |
+
)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/typedefs.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
from typing import (
|
| 5 |
+
TYPE_CHECKING,
|
| 6 |
+
Any,
|
| 7 |
+
Awaitable,
|
| 8 |
+
Callable,
|
| 9 |
+
Iterable,
|
| 10 |
+
Mapping,
|
| 11 |
+
Tuple,
|
| 12 |
+
Union,
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
|
| 16 |
+
from yarl import URL
|
| 17 |
+
|
| 18 |
+
# These are for other modules to use (to avoid repeating the conditional import).
|
| 19 |
+
if sys.version_info >= (3, 8):
|
| 20 |
+
from typing import Final as Final, Protocol as Protocol, TypedDict as TypedDict
|
| 21 |
+
else:
|
| 22 |
+
from typing_extensions import ( # noqa: F401
|
| 23 |
+
Final,
|
| 24 |
+
Protocol as Protocol,
|
| 25 |
+
TypedDict as TypedDict,
|
| 26 |
+
)
|
| 27 |
+
|
| 28 |
+
DEFAULT_JSON_ENCODER = json.dumps
|
| 29 |
+
DEFAULT_JSON_DECODER = json.loads
|
| 30 |
+
|
| 31 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 32 |
+
_CIMultiDict = CIMultiDict[str]
|
| 33 |
+
_CIMultiDictProxy = CIMultiDictProxy[str]
|
| 34 |
+
_MultiDict = MultiDict[str]
|
| 35 |
+
_MultiDictProxy = MultiDictProxy[str]
|
| 36 |
+
from http.cookies import BaseCookie, Morsel
|
| 37 |
+
|
| 38 |
+
from .web import Request, StreamResponse
|
| 39 |
+
else:
|
| 40 |
+
_CIMultiDict = CIMultiDict
|
| 41 |
+
_CIMultiDictProxy = CIMultiDictProxy
|
| 42 |
+
_MultiDict = MultiDict
|
| 43 |
+
_MultiDictProxy = MultiDictProxy
|
| 44 |
+
|
| 45 |
+
Byteish = Union[bytes, bytearray, memoryview]
|
| 46 |
+
JSONEncoder = Callable[[Any], str]
|
| 47 |
+
JSONDecoder = Callable[[str], Any]
|
| 48 |
+
LooseHeaders = Union[Mapping[Union[str, istr], str], _CIMultiDict, _CIMultiDictProxy]
|
| 49 |
+
RawHeaders = Tuple[Tuple[bytes, bytes], ...]
|
| 50 |
+
StrOrURL = Union[str, URL]
|
| 51 |
+
|
| 52 |
+
LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 53 |
+
LooseCookiesIterables = Iterable[
|
| 54 |
+
Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]]
|
| 55 |
+
]
|
| 56 |
+
LooseCookies = Union[
|
| 57 |
+
LooseCookiesMappings,
|
| 58 |
+
LooseCookiesIterables,
|
| 59 |
+
"BaseCookie[str]",
|
| 60 |
+
]
|
| 61 |
+
|
| 62 |
+
Handler = Callable[["Request"], Awaitable["StreamResponse"]]
|
| 63 |
+
|
| 64 |
+
PathLike = Union[str, "os.PathLike[str]"]
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_middlewares.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
|
| 3 |
+
|
| 4 |
+
from .typedefs import Handler
|
| 5 |
+
from .web_exceptions import HTTPPermanentRedirect, _HTTPMove
|
| 6 |
+
from .web_request import Request
|
| 7 |
+
from .web_response import StreamResponse
|
| 8 |
+
from .web_urldispatcher import SystemRoute
|
| 9 |
+
|
| 10 |
+
__all__ = (
|
| 11 |
+
"middleware",
|
| 12 |
+
"normalize_path_middleware",
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 16 |
+
from .web_app import Application
|
| 17 |
+
|
| 18 |
+
_Func = TypeVar("_Func")
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]:
|
| 22 |
+
alt_request = request.clone(rel_url=path)
|
| 23 |
+
|
| 24 |
+
match_info = await request.app.router.resolve(alt_request)
|
| 25 |
+
alt_request._match_info = match_info
|
| 26 |
+
|
| 27 |
+
if match_info.http_exception is None:
|
| 28 |
+
return True, alt_request
|
| 29 |
+
|
| 30 |
+
return False, request
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
def middleware(f: _Func) -> _Func:
|
| 34 |
+
f.__middleware_version__ = 1 # type: ignore[attr-defined]
|
| 35 |
+
return f
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def normalize_path_middleware(
|
| 42 |
+
*,
|
| 43 |
+
append_slash: bool = True,
|
| 44 |
+
remove_slash: bool = False,
|
| 45 |
+
merge_slashes: bool = True,
|
| 46 |
+
redirect_class: Type[_HTTPMove] = HTTPPermanentRedirect,
|
| 47 |
+
) -> _Middleware:
|
| 48 |
+
"""Factory for producing a middleware that normalizes the path of a request.
|
| 49 |
+
|
| 50 |
+
Normalizing means:
|
| 51 |
+
- Add or remove a trailing slash to the path.
|
| 52 |
+
- Double slashes are replaced by one.
|
| 53 |
+
|
| 54 |
+
The middleware returns as soon as it finds a path that resolves
|
| 55 |
+
correctly. The order if both merge and append/remove are enabled is
|
| 56 |
+
1) merge slashes
|
| 57 |
+
2) append/remove slash
|
| 58 |
+
3) both merge slashes and append/remove slash.
|
| 59 |
+
If the path resolves with at least one of those conditions, it will
|
| 60 |
+
redirect to the new path.
|
| 61 |
+
|
| 62 |
+
Only one of `append_slash` and `remove_slash` can be enabled. If both
|
| 63 |
+
are `True` the factory will raise an assertion error
|
| 64 |
+
|
| 65 |
+
If `append_slash` is `True` the middleware will append a slash when
|
| 66 |
+
needed. If a resource is defined with trailing slash and the request
|
| 67 |
+
comes without it, it will append it automatically.
|
| 68 |
+
|
| 69 |
+
If `remove_slash` is `True`, `append_slash` must be `False`. When enabled
|
| 70 |
+
the middleware will remove trailing slashes and redirect if the resource
|
| 71 |
+
is defined
|
| 72 |
+
|
| 73 |
+
If merge_slashes is True, merge multiple consecutive slashes in the
|
| 74 |
+
path into one.
|
| 75 |
+
"""
|
| 76 |
+
correct_configuration = not (append_slash and remove_slash)
|
| 77 |
+
assert correct_configuration, "Cannot both remove and append slash"
|
| 78 |
+
|
| 79 |
+
@middleware
|
| 80 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
| 81 |
+
if isinstance(request.match_info.route, SystemRoute):
|
| 82 |
+
paths_to_check = []
|
| 83 |
+
if "?" in request.raw_path:
|
| 84 |
+
path, query = request.raw_path.split("?", 1)
|
| 85 |
+
query = "?" + query
|
| 86 |
+
else:
|
| 87 |
+
query = ""
|
| 88 |
+
path = request.raw_path
|
| 89 |
+
|
| 90 |
+
if merge_slashes:
|
| 91 |
+
paths_to_check.append(re.sub("//+", "/", path))
|
| 92 |
+
if append_slash and not request.path.endswith("/"):
|
| 93 |
+
paths_to_check.append(path + "/")
|
| 94 |
+
if remove_slash and request.path.endswith("/"):
|
| 95 |
+
paths_to_check.append(path[:-1])
|
| 96 |
+
if merge_slashes and append_slash:
|
| 97 |
+
paths_to_check.append(re.sub("//+", "/", path + "/"))
|
| 98 |
+
if merge_slashes and remove_slash:
|
| 99 |
+
merged_slashes = re.sub("//+", "/", path)
|
| 100 |
+
paths_to_check.append(merged_slashes[:-1])
|
| 101 |
+
|
| 102 |
+
for path in paths_to_check:
|
| 103 |
+
path = re.sub("^//+", "/", path) # SECURITY: GHSA-v6wp-4m6f-gcjg
|
| 104 |
+
resolves, request = await _check_request_resolves(request, path)
|
| 105 |
+
if resolves:
|
| 106 |
+
raise redirect_class(request.raw_path + query)
|
| 107 |
+
|
| 108 |
+
return await handler(request)
|
| 109 |
+
|
| 110 |
+
return impl
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
def _fix_request_current_app(app: "Application") -> _Middleware:
|
| 114 |
+
@middleware
|
| 115 |
+
async def impl(request: Request, handler: Handler) -> StreamResponse:
|
| 116 |
+
with request.match_info.set_current_app(app):
|
| 117 |
+
return await handler(request)
|
| 118 |
+
|
| 119 |
+
return impl
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_protocol.py
ADDED
|
@@ -0,0 +1,681 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import asyncio.streams
|
| 3 |
+
import traceback
|
| 4 |
+
import warnings
|
| 5 |
+
from collections import deque
|
| 6 |
+
from contextlib import suppress
|
| 7 |
+
from html import escape as html_escape
|
| 8 |
+
from http import HTTPStatus
|
| 9 |
+
from logging import Logger
|
| 10 |
+
from typing import (
|
| 11 |
+
TYPE_CHECKING,
|
| 12 |
+
Any,
|
| 13 |
+
Awaitable,
|
| 14 |
+
Callable,
|
| 15 |
+
Deque,
|
| 16 |
+
Optional,
|
| 17 |
+
Sequence,
|
| 18 |
+
Tuple,
|
| 19 |
+
Type,
|
| 20 |
+
Union,
|
| 21 |
+
cast,
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
import attr
|
| 25 |
+
import yarl
|
| 26 |
+
|
| 27 |
+
from .abc import AbstractAccessLogger, AbstractStreamWriter
|
| 28 |
+
from .base_protocol import BaseProtocol
|
| 29 |
+
from .helpers import ceil_timeout
|
| 30 |
+
from .http import (
|
| 31 |
+
HttpProcessingError,
|
| 32 |
+
HttpRequestParser,
|
| 33 |
+
HttpVersion10,
|
| 34 |
+
RawRequestMessage,
|
| 35 |
+
StreamWriter,
|
| 36 |
+
)
|
| 37 |
+
from .log import access_logger, server_logger
|
| 38 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
| 39 |
+
from .tcp_helpers import tcp_keepalive
|
| 40 |
+
from .web_exceptions import HTTPException
|
| 41 |
+
from .web_log import AccessLogger
|
| 42 |
+
from .web_request import BaseRequest
|
| 43 |
+
from .web_response import Response, StreamResponse
|
| 44 |
+
|
| 45 |
+
__all__ = ("RequestHandler", "RequestPayloadError", "PayloadAccessError")
|
| 46 |
+
|
| 47 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 48 |
+
from .web_server import Server
|
| 49 |
+
|
| 50 |
+
|
| 51 |
+
_RequestFactory = Callable[
|
| 52 |
+
[
|
| 53 |
+
RawRequestMessage,
|
| 54 |
+
StreamReader,
|
| 55 |
+
"RequestHandler",
|
| 56 |
+
AbstractStreamWriter,
|
| 57 |
+
"asyncio.Task[None]",
|
| 58 |
+
],
|
| 59 |
+
BaseRequest,
|
| 60 |
+
]
|
| 61 |
+
|
| 62 |
+
_RequestHandler = Callable[[BaseRequest], Awaitable[StreamResponse]]
|
| 63 |
+
|
| 64 |
+
ERROR = RawRequestMessage(
|
| 65 |
+
"UNKNOWN",
|
| 66 |
+
"/",
|
| 67 |
+
HttpVersion10,
|
| 68 |
+
{}, # type: ignore[arg-type]
|
| 69 |
+
{}, # type: ignore[arg-type]
|
| 70 |
+
True,
|
| 71 |
+
None,
|
| 72 |
+
False,
|
| 73 |
+
False,
|
| 74 |
+
yarl.URL("/"),
|
| 75 |
+
)
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class RequestPayloadError(Exception):
|
| 79 |
+
"""Payload parsing error."""
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class PayloadAccessError(Exception):
|
| 83 |
+
"""Payload was accessed after response was sent."""
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 87 |
+
class _ErrInfo:
|
| 88 |
+
status: int
|
| 89 |
+
exc: BaseException
|
| 90 |
+
message: str
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader]
|
| 94 |
+
|
| 95 |
+
|
| 96 |
+
class RequestHandler(BaseProtocol):
|
| 97 |
+
"""HTTP protocol implementation.
|
| 98 |
+
|
| 99 |
+
RequestHandler handles incoming HTTP request. It reads request line,
|
| 100 |
+
request headers and request payload and calls handle_request() method.
|
| 101 |
+
By default it always returns with 404 response.
|
| 102 |
+
|
| 103 |
+
RequestHandler handles errors in incoming request, like bad
|
| 104 |
+
status line, bad headers or incomplete payload. If any error occurs,
|
| 105 |
+
connection gets closed.
|
| 106 |
+
|
| 107 |
+
keepalive_timeout -- number of seconds before closing
|
| 108 |
+
keep-alive connection
|
| 109 |
+
|
| 110 |
+
tcp_keepalive -- TCP keep-alive is on, default is on
|
| 111 |
+
|
| 112 |
+
debug -- enable debug mode
|
| 113 |
+
|
| 114 |
+
logger -- custom logger object
|
| 115 |
+
|
| 116 |
+
access_log_class -- custom class for access_logger
|
| 117 |
+
|
| 118 |
+
access_log -- custom logging object
|
| 119 |
+
|
| 120 |
+
access_log_format -- access log format string
|
| 121 |
+
|
| 122 |
+
loop -- Optional event loop
|
| 123 |
+
|
| 124 |
+
max_line_size -- Optional maximum header line size
|
| 125 |
+
|
| 126 |
+
max_field_size -- Optional maximum header field size
|
| 127 |
+
|
| 128 |
+
max_headers -- Optional maximum header size
|
| 129 |
+
|
| 130 |
+
"""
|
| 131 |
+
|
| 132 |
+
KEEPALIVE_RESCHEDULE_DELAY = 1
|
| 133 |
+
|
| 134 |
+
__slots__ = (
|
| 135 |
+
"_request_count",
|
| 136 |
+
"_keepalive",
|
| 137 |
+
"_manager",
|
| 138 |
+
"_request_handler",
|
| 139 |
+
"_request_factory",
|
| 140 |
+
"_tcp_keepalive",
|
| 141 |
+
"_keepalive_time",
|
| 142 |
+
"_keepalive_handle",
|
| 143 |
+
"_keepalive_timeout",
|
| 144 |
+
"_lingering_time",
|
| 145 |
+
"_messages",
|
| 146 |
+
"_message_tail",
|
| 147 |
+
"_waiter",
|
| 148 |
+
"_task_handler",
|
| 149 |
+
"_upgrade",
|
| 150 |
+
"_payload_parser",
|
| 151 |
+
"_request_parser",
|
| 152 |
+
"_reading_paused",
|
| 153 |
+
"logger",
|
| 154 |
+
"debug",
|
| 155 |
+
"access_log",
|
| 156 |
+
"access_logger",
|
| 157 |
+
"_close",
|
| 158 |
+
"_force_close",
|
| 159 |
+
"_current_request",
|
| 160 |
+
)
|
| 161 |
+
|
| 162 |
+
def __init__(
|
| 163 |
+
self,
|
| 164 |
+
manager: "Server",
|
| 165 |
+
*,
|
| 166 |
+
loop: asyncio.AbstractEventLoop,
|
| 167 |
+
keepalive_timeout: float = 75.0, # NGINX default is 75 secs
|
| 168 |
+
tcp_keepalive: bool = True,
|
| 169 |
+
logger: Logger = server_logger,
|
| 170 |
+
access_log_class: Type[AbstractAccessLogger] = AccessLogger,
|
| 171 |
+
access_log: Logger = access_logger,
|
| 172 |
+
access_log_format: str = AccessLogger.LOG_FORMAT,
|
| 173 |
+
debug: bool = False,
|
| 174 |
+
max_line_size: int = 8190,
|
| 175 |
+
max_headers: int = 32768,
|
| 176 |
+
max_field_size: int = 8190,
|
| 177 |
+
lingering_time: float = 10.0,
|
| 178 |
+
read_bufsize: int = 2 ** 16,
|
| 179 |
+
auto_decompress: bool = True,
|
| 180 |
+
):
|
| 181 |
+
super().__init__(loop)
|
| 182 |
+
|
| 183 |
+
self._request_count = 0
|
| 184 |
+
self._keepalive = False
|
| 185 |
+
self._current_request = None # type: Optional[BaseRequest]
|
| 186 |
+
self._manager = manager # type: Optional[Server]
|
| 187 |
+
self._request_handler: Optional[_RequestHandler] = manager.request_handler
|
| 188 |
+
self._request_factory: Optional[_RequestFactory] = manager.request_factory
|
| 189 |
+
|
| 190 |
+
self._tcp_keepalive = tcp_keepalive
|
| 191 |
+
# placeholder to be replaced on keepalive timeout setup
|
| 192 |
+
self._keepalive_time = 0.0
|
| 193 |
+
self._keepalive_handle = None # type: Optional[asyncio.Handle]
|
| 194 |
+
self._keepalive_timeout = keepalive_timeout
|
| 195 |
+
self._lingering_time = float(lingering_time)
|
| 196 |
+
|
| 197 |
+
self._messages: Deque[_MsgType] = deque()
|
| 198 |
+
self._message_tail = b""
|
| 199 |
+
|
| 200 |
+
self._waiter = None # type: Optional[asyncio.Future[None]]
|
| 201 |
+
self._task_handler = None # type: Optional[asyncio.Task[None]]
|
| 202 |
+
|
| 203 |
+
self._upgrade = False
|
| 204 |
+
self._payload_parser = None # type: Any
|
| 205 |
+
self._request_parser = HttpRequestParser(
|
| 206 |
+
self,
|
| 207 |
+
loop,
|
| 208 |
+
read_bufsize,
|
| 209 |
+
max_line_size=max_line_size,
|
| 210 |
+
max_field_size=max_field_size,
|
| 211 |
+
max_headers=max_headers,
|
| 212 |
+
payload_exception=RequestPayloadError,
|
| 213 |
+
auto_decompress=auto_decompress,
|
| 214 |
+
) # type: Optional[HttpRequestParser]
|
| 215 |
+
|
| 216 |
+
self.logger = logger
|
| 217 |
+
self.debug = debug
|
| 218 |
+
self.access_log = access_log
|
| 219 |
+
if access_log:
|
| 220 |
+
self.access_logger = access_log_class(
|
| 221 |
+
access_log, access_log_format
|
| 222 |
+
) # type: Optional[AbstractAccessLogger]
|
| 223 |
+
else:
|
| 224 |
+
self.access_logger = None
|
| 225 |
+
|
| 226 |
+
self._close = False
|
| 227 |
+
self._force_close = False
|
| 228 |
+
|
| 229 |
+
def __repr__(self) -> str:
|
| 230 |
+
return "<{} {}>".format(
|
| 231 |
+
self.__class__.__name__,
|
| 232 |
+
"connected" if self.transport is not None else "disconnected",
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
@property
|
| 236 |
+
def keepalive_timeout(self) -> float:
|
| 237 |
+
return self._keepalive_timeout
|
| 238 |
+
|
| 239 |
+
async def shutdown(self, timeout: Optional[float] = 15.0) -> None:
|
| 240 |
+
"""Do worker process exit preparations.
|
| 241 |
+
|
| 242 |
+
We need to clean up everything and stop accepting requests.
|
| 243 |
+
It is especially important for keep-alive connections.
|
| 244 |
+
"""
|
| 245 |
+
self._force_close = True
|
| 246 |
+
|
| 247 |
+
if self._keepalive_handle is not None:
|
| 248 |
+
self._keepalive_handle.cancel()
|
| 249 |
+
|
| 250 |
+
if self._waiter:
|
| 251 |
+
self._waiter.cancel()
|
| 252 |
+
|
| 253 |
+
# wait for handlers
|
| 254 |
+
with suppress(asyncio.CancelledError, asyncio.TimeoutError):
|
| 255 |
+
async with ceil_timeout(timeout):
|
| 256 |
+
if self._current_request is not None:
|
| 257 |
+
self._current_request._cancel(asyncio.CancelledError())
|
| 258 |
+
|
| 259 |
+
if self._task_handler is not None and not self._task_handler.done():
|
| 260 |
+
await self._task_handler
|
| 261 |
+
|
| 262 |
+
# force-close non-idle handler
|
| 263 |
+
if self._task_handler is not None:
|
| 264 |
+
self._task_handler.cancel()
|
| 265 |
+
|
| 266 |
+
if self.transport is not None:
|
| 267 |
+
self.transport.close()
|
| 268 |
+
self.transport = None
|
| 269 |
+
|
| 270 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
| 271 |
+
super().connection_made(transport)
|
| 272 |
+
|
| 273 |
+
real_transport = cast(asyncio.Transport, transport)
|
| 274 |
+
if self._tcp_keepalive:
|
| 275 |
+
tcp_keepalive(real_transport)
|
| 276 |
+
|
| 277 |
+
self._task_handler = self._loop.create_task(self.start())
|
| 278 |
+
assert self._manager is not None
|
| 279 |
+
self._manager.connection_made(self, real_transport)
|
| 280 |
+
|
| 281 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
| 282 |
+
if self._manager is None:
|
| 283 |
+
return
|
| 284 |
+
self._manager.connection_lost(self, exc)
|
| 285 |
+
|
| 286 |
+
super().connection_lost(exc)
|
| 287 |
+
|
| 288 |
+
self._manager = None
|
| 289 |
+
self._force_close = True
|
| 290 |
+
self._request_factory = None
|
| 291 |
+
self._request_handler = None
|
| 292 |
+
self._request_parser = None
|
| 293 |
+
|
| 294 |
+
if self._keepalive_handle is not None:
|
| 295 |
+
self._keepalive_handle.cancel()
|
| 296 |
+
|
| 297 |
+
if self._current_request is not None:
|
| 298 |
+
if exc is None:
|
| 299 |
+
exc = ConnectionResetError("Connection lost")
|
| 300 |
+
self._current_request._cancel(exc)
|
| 301 |
+
|
| 302 |
+
if self._task_handler is not None:
|
| 303 |
+
self._task_handler.cancel()
|
| 304 |
+
if self._waiter is not None:
|
| 305 |
+
self._waiter.cancel()
|
| 306 |
+
|
| 307 |
+
self._task_handler = None
|
| 308 |
+
|
| 309 |
+
if self._payload_parser is not None:
|
| 310 |
+
self._payload_parser.feed_eof()
|
| 311 |
+
self._payload_parser = None
|
| 312 |
+
|
| 313 |
+
def set_parser(self, parser: Any) -> None:
|
| 314 |
+
# Actual type is WebReader
|
| 315 |
+
assert self._payload_parser is None
|
| 316 |
+
|
| 317 |
+
self._payload_parser = parser
|
| 318 |
+
|
| 319 |
+
if self._message_tail:
|
| 320 |
+
self._payload_parser.feed_data(self._message_tail)
|
| 321 |
+
self._message_tail = b""
|
| 322 |
+
|
| 323 |
+
def eof_received(self) -> None:
|
| 324 |
+
pass
|
| 325 |
+
|
| 326 |
+
def data_received(self, data: bytes) -> None:
|
| 327 |
+
if self._force_close or self._close:
|
| 328 |
+
return
|
| 329 |
+
# parse http messages
|
| 330 |
+
messages: Sequence[_MsgType]
|
| 331 |
+
if self._payload_parser is None and not self._upgrade:
|
| 332 |
+
assert self._request_parser is not None
|
| 333 |
+
try:
|
| 334 |
+
messages, upgraded, tail = self._request_parser.feed_data(data)
|
| 335 |
+
except HttpProcessingError as exc:
|
| 336 |
+
messages = [
|
| 337 |
+
(_ErrInfo(status=400, exc=exc, message=exc.message), EMPTY_PAYLOAD)
|
| 338 |
+
]
|
| 339 |
+
upgraded = False
|
| 340 |
+
tail = b""
|
| 341 |
+
|
| 342 |
+
for msg, payload in messages or ():
|
| 343 |
+
self._request_count += 1
|
| 344 |
+
self._messages.append((msg, payload))
|
| 345 |
+
|
| 346 |
+
waiter = self._waiter
|
| 347 |
+
if messages and waiter is not None and not waiter.done():
|
| 348 |
+
# don't set result twice
|
| 349 |
+
waiter.set_result(None)
|
| 350 |
+
|
| 351 |
+
self._upgrade = upgraded
|
| 352 |
+
if upgraded and tail:
|
| 353 |
+
self._message_tail = tail
|
| 354 |
+
|
| 355 |
+
# no parser, just store
|
| 356 |
+
elif self._payload_parser is None and self._upgrade and data:
|
| 357 |
+
self._message_tail += data
|
| 358 |
+
|
| 359 |
+
# feed payload
|
| 360 |
+
elif data:
|
| 361 |
+
eof, tail = self._payload_parser.feed_data(data)
|
| 362 |
+
if eof:
|
| 363 |
+
self.close()
|
| 364 |
+
|
| 365 |
+
def keep_alive(self, val: bool) -> None:
|
| 366 |
+
"""Set keep-alive connection mode.
|
| 367 |
+
|
| 368 |
+
:param bool val: new state.
|
| 369 |
+
"""
|
| 370 |
+
self._keepalive = val
|
| 371 |
+
if self._keepalive_handle:
|
| 372 |
+
self._keepalive_handle.cancel()
|
| 373 |
+
self._keepalive_handle = None
|
| 374 |
+
|
| 375 |
+
def close(self) -> None:
|
| 376 |
+
"""Close connection.
|
| 377 |
+
|
| 378 |
+
Stop accepting new pipelining messages and close
|
| 379 |
+
connection when handlers done processing messages.
|
| 380 |
+
"""
|
| 381 |
+
self._close = True
|
| 382 |
+
if self._waiter:
|
| 383 |
+
self._waiter.cancel()
|
| 384 |
+
|
| 385 |
+
def force_close(self) -> None:
|
| 386 |
+
"""Forcefully close connection."""
|
| 387 |
+
self._force_close = True
|
| 388 |
+
if self._waiter:
|
| 389 |
+
self._waiter.cancel()
|
| 390 |
+
if self.transport is not None:
|
| 391 |
+
self.transport.close()
|
| 392 |
+
self.transport = None
|
| 393 |
+
|
| 394 |
+
def log_access(
|
| 395 |
+
self, request: BaseRequest, response: StreamResponse, time: float
|
| 396 |
+
) -> None:
|
| 397 |
+
if self.access_logger is not None:
|
| 398 |
+
self.access_logger.log(request, response, self._loop.time() - time)
|
| 399 |
+
|
| 400 |
+
def log_debug(self, *args: Any, **kw: Any) -> None:
|
| 401 |
+
if self.debug:
|
| 402 |
+
self.logger.debug(*args, **kw)
|
| 403 |
+
|
| 404 |
+
def log_exception(self, *args: Any, **kw: Any) -> None:
|
| 405 |
+
self.logger.exception(*args, **kw)
|
| 406 |
+
|
| 407 |
+
def _process_keepalive(self) -> None:
|
| 408 |
+
if self._force_close or not self._keepalive:
|
| 409 |
+
return
|
| 410 |
+
|
| 411 |
+
next = self._keepalive_time + self._keepalive_timeout
|
| 412 |
+
|
| 413 |
+
# handler in idle state
|
| 414 |
+
if self._waiter:
|
| 415 |
+
if self._loop.time() > next:
|
| 416 |
+
self.force_close()
|
| 417 |
+
return
|
| 418 |
+
|
| 419 |
+
# not all request handlers are done,
|
| 420 |
+
# reschedule itself to next second
|
| 421 |
+
self._keepalive_handle = self._loop.call_later(
|
| 422 |
+
self.KEEPALIVE_RESCHEDULE_DELAY, self._process_keepalive
|
| 423 |
+
)
|
| 424 |
+
|
| 425 |
+
async def _handle_request(
|
| 426 |
+
self,
|
| 427 |
+
request: BaseRequest,
|
| 428 |
+
start_time: float,
|
| 429 |
+
request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]],
|
| 430 |
+
) -> Tuple[StreamResponse, bool]:
|
| 431 |
+
assert self._request_handler is not None
|
| 432 |
+
try:
|
| 433 |
+
try:
|
| 434 |
+
self._current_request = request
|
| 435 |
+
resp = await request_handler(request)
|
| 436 |
+
finally:
|
| 437 |
+
self._current_request = None
|
| 438 |
+
except HTTPException as exc:
|
| 439 |
+
resp = exc
|
| 440 |
+
reset = await self.finish_response(request, resp, start_time)
|
| 441 |
+
except asyncio.CancelledError:
|
| 442 |
+
raise
|
| 443 |
+
except asyncio.TimeoutError as exc:
|
| 444 |
+
self.log_debug("Request handler timed out.", exc_info=exc)
|
| 445 |
+
resp = self.handle_error(request, 504)
|
| 446 |
+
reset = await self.finish_response(request, resp, start_time)
|
| 447 |
+
except Exception as exc:
|
| 448 |
+
resp = self.handle_error(request, 500, exc)
|
| 449 |
+
reset = await self.finish_response(request, resp, start_time)
|
| 450 |
+
else:
|
| 451 |
+
# Deprecation warning (See #2415)
|
| 452 |
+
if getattr(resp, "__http_exception__", False):
|
| 453 |
+
warnings.warn(
|
| 454 |
+
"returning HTTPException object is deprecated "
|
| 455 |
+
"(#2415) and will be removed, "
|
| 456 |
+
"please raise the exception instead",
|
| 457 |
+
DeprecationWarning,
|
| 458 |
+
)
|
| 459 |
+
|
| 460 |
+
reset = await self.finish_response(request, resp, start_time)
|
| 461 |
+
|
| 462 |
+
return resp, reset
|
| 463 |
+
|
| 464 |
+
async def start(self) -> None:
|
| 465 |
+
"""Process incoming request.
|
| 466 |
+
|
| 467 |
+
It reads request line, request headers and request payload, then
|
| 468 |
+
calls handle_request() method. Subclass has to override
|
| 469 |
+
handle_request(). start() handles various exceptions in request
|
| 470 |
+
or response handling. Connection is being closed always unless
|
| 471 |
+
keep_alive(True) specified.
|
| 472 |
+
"""
|
| 473 |
+
loop = self._loop
|
| 474 |
+
handler = self._task_handler
|
| 475 |
+
assert handler is not None
|
| 476 |
+
manager = self._manager
|
| 477 |
+
assert manager is not None
|
| 478 |
+
keepalive_timeout = self._keepalive_timeout
|
| 479 |
+
resp = None
|
| 480 |
+
assert self._request_factory is not None
|
| 481 |
+
assert self._request_handler is not None
|
| 482 |
+
|
| 483 |
+
while not self._force_close:
|
| 484 |
+
if not self._messages:
|
| 485 |
+
try:
|
| 486 |
+
# wait for next request
|
| 487 |
+
self._waiter = loop.create_future()
|
| 488 |
+
await self._waiter
|
| 489 |
+
except asyncio.CancelledError:
|
| 490 |
+
break
|
| 491 |
+
finally:
|
| 492 |
+
self._waiter = None
|
| 493 |
+
|
| 494 |
+
message, payload = self._messages.popleft()
|
| 495 |
+
|
| 496 |
+
start = loop.time()
|
| 497 |
+
|
| 498 |
+
manager.requests_count += 1
|
| 499 |
+
writer = StreamWriter(self, loop)
|
| 500 |
+
if isinstance(message, _ErrInfo):
|
| 501 |
+
# make request_factory work
|
| 502 |
+
request_handler = self._make_error_handler(message)
|
| 503 |
+
message = ERROR
|
| 504 |
+
else:
|
| 505 |
+
request_handler = self._request_handler
|
| 506 |
+
|
| 507 |
+
request = self._request_factory(message, payload, self, writer, handler)
|
| 508 |
+
try:
|
| 509 |
+
# a new task is used for copy context vars (#3406)
|
| 510 |
+
task = self._loop.create_task(
|
| 511 |
+
self._handle_request(request, start, request_handler)
|
| 512 |
+
)
|
| 513 |
+
try:
|
| 514 |
+
resp, reset = await task
|
| 515 |
+
except (asyncio.CancelledError, ConnectionError):
|
| 516 |
+
self.log_debug("Ignored premature client disconnection")
|
| 517 |
+
break
|
| 518 |
+
|
| 519 |
+
# Drop the processed task from asyncio.Task.all_tasks() early
|
| 520 |
+
del task
|
| 521 |
+
if reset:
|
| 522 |
+
self.log_debug("Ignored premature client disconnection 2")
|
| 523 |
+
break
|
| 524 |
+
|
| 525 |
+
# notify server about keep-alive
|
| 526 |
+
self._keepalive = bool(resp.keep_alive)
|
| 527 |
+
|
| 528 |
+
# check payload
|
| 529 |
+
if not payload.is_eof():
|
| 530 |
+
lingering_time = self._lingering_time
|
| 531 |
+
if not self._force_close and lingering_time:
|
| 532 |
+
self.log_debug(
|
| 533 |
+
"Start lingering close timer for %s sec.", lingering_time
|
| 534 |
+
)
|
| 535 |
+
|
| 536 |
+
now = loop.time()
|
| 537 |
+
end_t = now + lingering_time
|
| 538 |
+
|
| 539 |
+
with suppress(asyncio.TimeoutError, asyncio.CancelledError):
|
| 540 |
+
while not payload.is_eof() and now < end_t:
|
| 541 |
+
async with ceil_timeout(end_t - now):
|
| 542 |
+
# read and ignore
|
| 543 |
+
await payload.readany()
|
| 544 |
+
now = loop.time()
|
| 545 |
+
|
| 546 |
+
# if payload still uncompleted
|
| 547 |
+
if not payload.is_eof() and not self._force_close:
|
| 548 |
+
self.log_debug("Uncompleted request.")
|
| 549 |
+
self.close()
|
| 550 |
+
|
| 551 |
+
payload.set_exception(PayloadAccessError())
|
| 552 |
+
|
| 553 |
+
except asyncio.CancelledError:
|
| 554 |
+
self.log_debug("Ignored premature client disconnection ")
|
| 555 |
+
break
|
| 556 |
+
except RuntimeError as exc:
|
| 557 |
+
if self.debug:
|
| 558 |
+
self.log_exception("Unhandled runtime exception", exc_info=exc)
|
| 559 |
+
self.force_close()
|
| 560 |
+
except Exception as exc:
|
| 561 |
+
self.log_exception("Unhandled exception", exc_info=exc)
|
| 562 |
+
self.force_close()
|
| 563 |
+
finally:
|
| 564 |
+
if self.transport is None and resp is not None:
|
| 565 |
+
self.log_debug("Ignored premature client disconnection.")
|
| 566 |
+
elif not self._force_close:
|
| 567 |
+
if self._keepalive and not self._close:
|
| 568 |
+
# start keep-alive timer
|
| 569 |
+
if keepalive_timeout is not None:
|
| 570 |
+
now = self._loop.time()
|
| 571 |
+
self._keepalive_time = now
|
| 572 |
+
if self._keepalive_handle is None:
|
| 573 |
+
self._keepalive_handle = loop.call_at(
|
| 574 |
+
now + keepalive_timeout, self._process_keepalive
|
| 575 |
+
)
|
| 576 |
+
else:
|
| 577 |
+
break
|
| 578 |
+
|
| 579 |
+
# remove handler, close transport if no handlers left
|
| 580 |
+
if not self._force_close:
|
| 581 |
+
self._task_handler = None
|
| 582 |
+
if self.transport is not None:
|
| 583 |
+
self.transport.close()
|
| 584 |
+
|
| 585 |
+
async def finish_response(
|
| 586 |
+
self, request: BaseRequest, resp: StreamResponse, start_time: float
|
| 587 |
+
) -> bool:
|
| 588 |
+
"""Prepare the response and write_eof, then log access.
|
| 589 |
+
|
| 590 |
+
This has to
|
| 591 |
+
be called within the context of any exception so the access logger
|
| 592 |
+
can get exception information. Returns True if the client disconnects
|
| 593 |
+
prematurely.
|
| 594 |
+
"""
|
| 595 |
+
if self._request_parser is not None:
|
| 596 |
+
self._request_parser.set_upgraded(False)
|
| 597 |
+
self._upgrade = False
|
| 598 |
+
if self._message_tail:
|
| 599 |
+
self._request_parser.feed_data(self._message_tail)
|
| 600 |
+
self._message_tail = b""
|
| 601 |
+
try:
|
| 602 |
+
prepare_meth = resp.prepare
|
| 603 |
+
except AttributeError:
|
| 604 |
+
if resp is None:
|
| 605 |
+
raise RuntimeError("Missing return " "statement on request handler")
|
| 606 |
+
else:
|
| 607 |
+
raise RuntimeError(
|
| 608 |
+
"Web-handler should return "
|
| 609 |
+
"a response instance, "
|
| 610 |
+
"got {!r}".format(resp)
|
| 611 |
+
)
|
| 612 |
+
try:
|
| 613 |
+
await prepare_meth(request)
|
| 614 |
+
await resp.write_eof()
|
| 615 |
+
except ConnectionError:
|
| 616 |
+
self.log_access(request, resp, start_time)
|
| 617 |
+
return True
|
| 618 |
+
else:
|
| 619 |
+
self.log_access(request, resp, start_time)
|
| 620 |
+
return False
|
| 621 |
+
|
| 622 |
+
def handle_error(
|
| 623 |
+
self,
|
| 624 |
+
request: BaseRequest,
|
| 625 |
+
status: int = 500,
|
| 626 |
+
exc: Optional[BaseException] = None,
|
| 627 |
+
message: Optional[str] = None,
|
| 628 |
+
) -> StreamResponse:
|
| 629 |
+
"""Handle errors.
|
| 630 |
+
|
| 631 |
+
Returns HTTP response with specific status code. Logs additional
|
| 632 |
+
information. It always closes current connection.
|
| 633 |
+
"""
|
| 634 |
+
self.log_exception("Error handling request", exc_info=exc)
|
| 635 |
+
|
| 636 |
+
# some data already got sent, connection is broken
|
| 637 |
+
if request.writer.output_size > 0:
|
| 638 |
+
raise ConnectionError(
|
| 639 |
+
"Response is sent already, cannot send another response "
|
| 640 |
+
"with the error message"
|
| 641 |
+
)
|
| 642 |
+
|
| 643 |
+
ct = "text/plain"
|
| 644 |
+
if status == HTTPStatus.INTERNAL_SERVER_ERROR:
|
| 645 |
+
title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR)
|
| 646 |
+
msg = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
| 647 |
+
tb = None
|
| 648 |
+
if self.debug:
|
| 649 |
+
with suppress(Exception):
|
| 650 |
+
tb = traceback.format_exc()
|
| 651 |
+
|
| 652 |
+
if "text/html" in request.headers.get("Accept", ""):
|
| 653 |
+
if tb:
|
| 654 |
+
tb = html_escape(tb)
|
| 655 |
+
msg = f"<h2>Traceback:</h2>\n<pre>{tb}</pre>"
|
| 656 |
+
message = (
|
| 657 |
+
"<html><head>"
|
| 658 |
+
"<title>{title}</title>"
|
| 659 |
+
"</head><body>\n<h1>{title}</h1>"
|
| 660 |
+
"\n{msg}\n</body></html>\n"
|
| 661 |
+
).format(title=title, msg=msg)
|
| 662 |
+
ct = "text/html"
|
| 663 |
+
else:
|
| 664 |
+
if tb:
|
| 665 |
+
msg = tb
|
| 666 |
+
message = title + "\n\n" + msg
|
| 667 |
+
|
| 668 |
+
resp = Response(status=status, text=message, content_type=ct)
|
| 669 |
+
resp.force_close()
|
| 670 |
+
|
| 671 |
+
return resp
|
| 672 |
+
|
| 673 |
+
def _make_error_handler(
|
| 674 |
+
self, err_info: _ErrInfo
|
| 675 |
+
) -> Callable[[BaseRequest], Awaitable[StreamResponse]]:
|
| 676 |
+
async def handler(request: BaseRequest) -> StreamResponse:
|
| 677 |
+
return self.handle_error(
|
| 678 |
+
request, err_info.status, err_info.exc, err_info.message
|
| 679 |
+
)
|
| 680 |
+
|
| 681 |
+
return handler
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_routedef.py
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import abc
|
| 2 |
+
import os # noqa
|
| 3 |
+
from typing import (
|
| 4 |
+
TYPE_CHECKING,
|
| 5 |
+
Any,
|
| 6 |
+
Callable,
|
| 7 |
+
Dict,
|
| 8 |
+
Iterator,
|
| 9 |
+
List,
|
| 10 |
+
Optional,
|
| 11 |
+
Sequence,
|
| 12 |
+
Type,
|
| 13 |
+
Union,
|
| 14 |
+
overload,
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
import attr
|
| 18 |
+
|
| 19 |
+
from . import hdrs
|
| 20 |
+
from .abc import AbstractView
|
| 21 |
+
from .typedefs import Handler, PathLike
|
| 22 |
+
|
| 23 |
+
if TYPE_CHECKING: # pragma: no cover
|
| 24 |
+
from .web_request import Request
|
| 25 |
+
from .web_response import StreamResponse
|
| 26 |
+
from .web_urldispatcher import AbstractRoute, UrlDispatcher
|
| 27 |
+
else:
|
| 28 |
+
Request = StreamResponse = UrlDispatcher = AbstractRoute = None
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
__all__ = (
|
| 32 |
+
"AbstractRouteDef",
|
| 33 |
+
"RouteDef",
|
| 34 |
+
"StaticDef",
|
| 35 |
+
"RouteTableDef",
|
| 36 |
+
"head",
|
| 37 |
+
"options",
|
| 38 |
+
"get",
|
| 39 |
+
"post",
|
| 40 |
+
"patch",
|
| 41 |
+
"put",
|
| 42 |
+
"delete",
|
| 43 |
+
"route",
|
| 44 |
+
"view",
|
| 45 |
+
"static",
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
class AbstractRouteDef(abc.ABC):
|
| 50 |
+
@abc.abstractmethod
|
| 51 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 52 |
+
pass # pragma: no cover
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
_HandlerType = Union[Type[AbstractView], Handler]
|
| 56 |
+
|
| 57 |
+
|
| 58 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 59 |
+
class RouteDef(AbstractRouteDef):
|
| 60 |
+
method: str
|
| 61 |
+
path: str
|
| 62 |
+
handler: _HandlerType
|
| 63 |
+
kwargs: Dict[str, Any]
|
| 64 |
+
|
| 65 |
+
def __repr__(self) -> str:
|
| 66 |
+
info = []
|
| 67 |
+
for name, value in sorted(self.kwargs.items()):
|
| 68 |
+
info.append(f", {name}={value!r}")
|
| 69 |
+
return "<RouteDef {method} {path} -> {handler.__name__!r}" "{info}>".format(
|
| 70 |
+
method=self.method, path=self.path, handler=self.handler, info="".join(info)
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 74 |
+
if self.method in hdrs.METH_ALL:
|
| 75 |
+
reg = getattr(router, "add_" + self.method.lower())
|
| 76 |
+
return [reg(self.path, self.handler, **self.kwargs)]
|
| 77 |
+
else:
|
| 78 |
+
return [
|
| 79 |
+
router.add_route(self.method, self.path, self.handler, **self.kwargs)
|
| 80 |
+
]
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
@attr.s(auto_attribs=True, frozen=True, repr=False, slots=True)
|
| 84 |
+
class StaticDef(AbstractRouteDef):
|
| 85 |
+
prefix: str
|
| 86 |
+
path: PathLike
|
| 87 |
+
kwargs: Dict[str, Any]
|
| 88 |
+
|
| 89 |
+
def __repr__(self) -> str:
|
| 90 |
+
info = []
|
| 91 |
+
for name, value in sorted(self.kwargs.items()):
|
| 92 |
+
info.append(f", {name}={value!r}")
|
| 93 |
+
return "<StaticDef {prefix} -> {path}" "{info}>".format(
|
| 94 |
+
prefix=self.prefix, path=self.path, info="".join(info)
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
|
| 98 |
+
resource = router.add_static(self.prefix, self.path, **self.kwargs)
|
| 99 |
+
routes = resource.get_info().get("routes", {})
|
| 100 |
+
return list(routes.values())
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
def route(method: str, path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 104 |
+
return RouteDef(method, path, handler, kwargs)
|
| 105 |
+
|
| 106 |
+
|
| 107 |
+
def head(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 108 |
+
return route(hdrs.METH_HEAD, path, handler, **kwargs)
|
| 109 |
+
|
| 110 |
+
|
| 111 |
+
def options(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 112 |
+
return route(hdrs.METH_OPTIONS, path, handler, **kwargs)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
def get(
|
| 116 |
+
path: str,
|
| 117 |
+
handler: _HandlerType,
|
| 118 |
+
*,
|
| 119 |
+
name: Optional[str] = None,
|
| 120 |
+
allow_head: bool = True,
|
| 121 |
+
**kwargs: Any,
|
| 122 |
+
) -> RouteDef:
|
| 123 |
+
return route(
|
| 124 |
+
hdrs.METH_GET, path, handler, name=name, allow_head=allow_head, **kwargs
|
| 125 |
+
)
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def post(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 129 |
+
return route(hdrs.METH_POST, path, handler, **kwargs)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def put(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 133 |
+
return route(hdrs.METH_PUT, path, handler, **kwargs)
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
def patch(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 137 |
+
return route(hdrs.METH_PATCH, path, handler, **kwargs)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef:
|
| 141 |
+
return route(hdrs.METH_DELETE, path, handler, **kwargs)
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef:
|
| 145 |
+
return route(hdrs.METH_ANY, path, handler, **kwargs)
|
| 146 |
+
|
| 147 |
+
|
| 148 |
+
def static(prefix: str, path: PathLike, **kwargs: Any) -> StaticDef:
|
| 149 |
+
return StaticDef(prefix, path, kwargs)
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
_Deco = Callable[[_HandlerType], _HandlerType]
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
class RouteTableDef(Sequence[AbstractRouteDef]):
|
| 156 |
+
"""Route definition table"""
|
| 157 |
+
|
| 158 |
+
def __init__(self) -> None:
|
| 159 |
+
self._items = [] # type: List[AbstractRouteDef]
|
| 160 |
+
|
| 161 |
+
def __repr__(self) -> str:
|
| 162 |
+
return f"<RouteTableDef count={len(self._items)}>"
|
| 163 |
+
|
| 164 |
+
@overload
|
| 165 |
+
def __getitem__(self, index: int) -> AbstractRouteDef:
|
| 166 |
+
...
|
| 167 |
+
|
| 168 |
+
@overload
|
| 169 |
+
def __getitem__(self, index: slice) -> List[AbstractRouteDef]:
|
| 170 |
+
...
|
| 171 |
+
|
| 172 |
+
def __getitem__(self, index): # type: ignore[no-untyped-def]
|
| 173 |
+
return self._items[index]
|
| 174 |
+
|
| 175 |
+
def __iter__(self) -> Iterator[AbstractRouteDef]:
|
| 176 |
+
return iter(self._items)
|
| 177 |
+
|
| 178 |
+
def __len__(self) -> int:
|
| 179 |
+
return len(self._items)
|
| 180 |
+
|
| 181 |
+
def __contains__(self, item: object) -> bool:
|
| 182 |
+
return item in self._items
|
| 183 |
+
|
| 184 |
+
def route(self, method: str, path: str, **kwargs: Any) -> _Deco:
|
| 185 |
+
def inner(handler: _HandlerType) -> _HandlerType:
|
| 186 |
+
self._items.append(RouteDef(method, path, handler, kwargs))
|
| 187 |
+
return handler
|
| 188 |
+
|
| 189 |
+
return inner
|
| 190 |
+
|
| 191 |
+
def head(self, path: str, **kwargs: Any) -> _Deco:
|
| 192 |
+
return self.route(hdrs.METH_HEAD, path, **kwargs)
|
| 193 |
+
|
| 194 |
+
def get(self, path: str, **kwargs: Any) -> _Deco:
|
| 195 |
+
return self.route(hdrs.METH_GET, path, **kwargs)
|
| 196 |
+
|
| 197 |
+
def post(self, path: str, **kwargs: Any) -> _Deco:
|
| 198 |
+
return self.route(hdrs.METH_POST, path, **kwargs)
|
| 199 |
+
|
| 200 |
+
def put(self, path: str, **kwargs: Any) -> _Deco:
|
| 201 |
+
return self.route(hdrs.METH_PUT, path, **kwargs)
|
| 202 |
+
|
| 203 |
+
def patch(self, path: str, **kwargs: Any) -> _Deco:
|
| 204 |
+
return self.route(hdrs.METH_PATCH, path, **kwargs)
|
| 205 |
+
|
| 206 |
+
def delete(self, path: str, **kwargs: Any) -> _Deco:
|
| 207 |
+
return self.route(hdrs.METH_DELETE, path, **kwargs)
|
| 208 |
+
|
| 209 |
+
def view(self, path: str, **kwargs: Any) -> _Deco:
|
| 210 |
+
return self.route(hdrs.METH_ANY, path, **kwargs)
|
| 211 |
+
|
| 212 |
+
def static(self, prefix: str, path: PathLike, **kwargs: Any) -> None:
|
| 213 |
+
self._items.append(StaticDef(prefix, path, kwargs))
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_runner.py
ADDED
|
@@ -0,0 +1,381 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import signal
|
| 3 |
+
import socket
|
| 4 |
+
from abc import ABC, abstractmethod
|
| 5 |
+
from typing import Any, List, Optional, Set
|
| 6 |
+
|
| 7 |
+
from yarl import URL
|
| 8 |
+
|
| 9 |
+
from .web_app import Application
|
| 10 |
+
from .web_server import Server
|
| 11 |
+
|
| 12 |
+
try:
|
| 13 |
+
from ssl import SSLContext
|
| 14 |
+
except ImportError:
|
| 15 |
+
SSLContext = object # type: ignore[misc,assignment]
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
__all__ = (
|
| 19 |
+
"BaseSite",
|
| 20 |
+
"TCPSite",
|
| 21 |
+
"UnixSite",
|
| 22 |
+
"NamedPipeSite",
|
| 23 |
+
"SockSite",
|
| 24 |
+
"BaseRunner",
|
| 25 |
+
"AppRunner",
|
| 26 |
+
"ServerRunner",
|
| 27 |
+
"GracefulExit",
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class GracefulExit(SystemExit):
|
| 32 |
+
code = 1
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def _raise_graceful_exit() -> None:
|
| 36 |
+
raise GracefulExit()
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class BaseSite(ABC):
|
| 40 |
+
__slots__ = ("_runner", "_shutdown_timeout", "_ssl_context", "_backlog", "_server")
|
| 41 |
+
|
| 42 |
+
def __init__(
|
| 43 |
+
self,
|
| 44 |
+
runner: "BaseRunner",
|
| 45 |
+
*,
|
| 46 |
+
shutdown_timeout: float = 60.0,
|
| 47 |
+
ssl_context: Optional[SSLContext] = None,
|
| 48 |
+
backlog: int = 128,
|
| 49 |
+
) -> None:
|
| 50 |
+
if runner.server is None:
|
| 51 |
+
raise RuntimeError("Call runner.setup() before making a site")
|
| 52 |
+
self._runner = runner
|
| 53 |
+
self._shutdown_timeout = shutdown_timeout
|
| 54 |
+
self._ssl_context = ssl_context
|
| 55 |
+
self._backlog = backlog
|
| 56 |
+
self._server = None # type: Optional[asyncio.AbstractServer]
|
| 57 |
+
|
| 58 |
+
@property
|
| 59 |
+
@abstractmethod
|
| 60 |
+
def name(self) -> str:
|
| 61 |
+
pass # pragma: no cover
|
| 62 |
+
|
| 63 |
+
@abstractmethod
|
| 64 |
+
async def start(self) -> None:
|
| 65 |
+
self._runner._reg_site(self)
|
| 66 |
+
|
| 67 |
+
async def stop(self) -> None:
|
| 68 |
+
self._runner._check_site(self)
|
| 69 |
+
if self._server is None:
|
| 70 |
+
self._runner._unreg_site(self)
|
| 71 |
+
return # not started yet
|
| 72 |
+
self._server.close()
|
| 73 |
+
# named pipes do not have wait_closed property
|
| 74 |
+
if hasattr(self._server, "wait_closed"):
|
| 75 |
+
await self._server.wait_closed()
|
| 76 |
+
await self._runner.shutdown()
|
| 77 |
+
assert self._runner.server
|
| 78 |
+
await self._runner.server.shutdown(self._shutdown_timeout)
|
| 79 |
+
self._runner._unreg_site(self)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class TCPSite(BaseSite):
|
| 83 |
+
__slots__ = ("_host", "_port", "_reuse_address", "_reuse_port")
|
| 84 |
+
|
| 85 |
+
def __init__(
|
| 86 |
+
self,
|
| 87 |
+
runner: "BaseRunner",
|
| 88 |
+
host: Optional[str] = None,
|
| 89 |
+
port: Optional[int] = None,
|
| 90 |
+
*,
|
| 91 |
+
shutdown_timeout: float = 60.0,
|
| 92 |
+
ssl_context: Optional[SSLContext] = None,
|
| 93 |
+
backlog: int = 128,
|
| 94 |
+
reuse_address: Optional[bool] = None,
|
| 95 |
+
reuse_port: Optional[bool] = None,
|
| 96 |
+
) -> None:
|
| 97 |
+
super().__init__(
|
| 98 |
+
runner,
|
| 99 |
+
shutdown_timeout=shutdown_timeout,
|
| 100 |
+
ssl_context=ssl_context,
|
| 101 |
+
backlog=backlog,
|
| 102 |
+
)
|
| 103 |
+
self._host = host
|
| 104 |
+
if port is None:
|
| 105 |
+
port = 8443 if self._ssl_context else 8080
|
| 106 |
+
self._port = port
|
| 107 |
+
self._reuse_address = reuse_address
|
| 108 |
+
self._reuse_port = reuse_port
|
| 109 |
+
|
| 110 |
+
@property
|
| 111 |
+
def name(self) -> str:
|
| 112 |
+
scheme = "https" if self._ssl_context else "http"
|
| 113 |
+
host = "0.0.0.0" if self._host is None else self._host
|
| 114 |
+
return str(URL.build(scheme=scheme, host=host, port=self._port))
|
| 115 |
+
|
| 116 |
+
async def start(self) -> None:
|
| 117 |
+
await super().start()
|
| 118 |
+
loop = asyncio.get_event_loop()
|
| 119 |
+
server = self._runner.server
|
| 120 |
+
assert server is not None
|
| 121 |
+
self._server = await loop.create_server(
|
| 122 |
+
server,
|
| 123 |
+
self._host,
|
| 124 |
+
self._port,
|
| 125 |
+
ssl=self._ssl_context,
|
| 126 |
+
backlog=self._backlog,
|
| 127 |
+
reuse_address=self._reuse_address,
|
| 128 |
+
reuse_port=self._reuse_port,
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
class UnixSite(BaseSite):
|
| 133 |
+
__slots__ = ("_path",)
|
| 134 |
+
|
| 135 |
+
def __init__(
|
| 136 |
+
self,
|
| 137 |
+
runner: "BaseRunner",
|
| 138 |
+
path: str,
|
| 139 |
+
*,
|
| 140 |
+
shutdown_timeout: float = 60.0,
|
| 141 |
+
ssl_context: Optional[SSLContext] = None,
|
| 142 |
+
backlog: int = 128,
|
| 143 |
+
) -> None:
|
| 144 |
+
super().__init__(
|
| 145 |
+
runner,
|
| 146 |
+
shutdown_timeout=shutdown_timeout,
|
| 147 |
+
ssl_context=ssl_context,
|
| 148 |
+
backlog=backlog,
|
| 149 |
+
)
|
| 150 |
+
self._path = path
|
| 151 |
+
|
| 152 |
+
@property
|
| 153 |
+
def name(self) -> str:
|
| 154 |
+
scheme = "https" if self._ssl_context else "http"
|
| 155 |
+
return f"{scheme}://unix:{self._path}:"
|
| 156 |
+
|
| 157 |
+
async def start(self) -> None:
|
| 158 |
+
await super().start()
|
| 159 |
+
loop = asyncio.get_event_loop()
|
| 160 |
+
server = self._runner.server
|
| 161 |
+
assert server is not None
|
| 162 |
+
self._server = await loop.create_unix_server(
|
| 163 |
+
server, self._path, ssl=self._ssl_context, backlog=self._backlog
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
|
| 167 |
+
class NamedPipeSite(BaseSite):
|
| 168 |
+
__slots__ = ("_path",)
|
| 169 |
+
|
| 170 |
+
def __init__(
|
| 171 |
+
self, runner: "BaseRunner", path: str, *, shutdown_timeout: float = 60.0
|
| 172 |
+
) -> None:
|
| 173 |
+
loop = asyncio.get_event_loop()
|
| 174 |
+
if not isinstance(
|
| 175 |
+
loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
| 176 |
+
):
|
| 177 |
+
raise RuntimeError(
|
| 178 |
+
"Named Pipes only available in proactor" "loop under windows"
|
| 179 |
+
)
|
| 180 |
+
super().__init__(runner, shutdown_timeout=shutdown_timeout)
|
| 181 |
+
self._path = path
|
| 182 |
+
|
| 183 |
+
@property
|
| 184 |
+
def name(self) -> str:
|
| 185 |
+
return self._path
|
| 186 |
+
|
| 187 |
+
async def start(self) -> None:
|
| 188 |
+
await super().start()
|
| 189 |
+
loop = asyncio.get_event_loop()
|
| 190 |
+
server = self._runner.server
|
| 191 |
+
assert server is not None
|
| 192 |
+
_server = await loop.start_serving_pipe( # type: ignore[attr-defined]
|
| 193 |
+
server, self._path
|
| 194 |
+
)
|
| 195 |
+
self._server = _server[0]
|
| 196 |
+
|
| 197 |
+
|
| 198 |
+
class SockSite(BaseSite):
|
| 199 |
+
__slots__ = ("_sock", "_name")
|
| 200 |
+
|
| 201 |
+
def __init__(
|
| 202 |
+
self,
|
| 203 |
+
runner: "BaseRunner",
|
| 204 |
+
sock: socket.socket,
|
| 205 |
+
*,
|
| 206 |
+
shutdown_timeout: float = 60.0,
|
| 207 |
+
ssl_context: Optional[SSLContext] = None,
|
| 208 |
+
backlog: int = 128,
|
| 209 |
+
) -> None:
|
| 210 |
+
super().__init__(
|
| 211 |
+
runner,
|
| 212 |
+
shutdown_timeout=shutdown_timeout,
|
| 213 |
+
ssl_context=ssl_context,
|
| 214 |
+
backlog=backlog,
|
| 215 |
+
)
|
| 216 |
+
self._sock = sock
|
| 217 |
+
scheme = "https" if self._ssl_context else "http"
|
| 218 |
+
if hasattr(socket, "AF_UNIX") and sock.family == socket.AF_UNIX:
|
| 219 |
+
name = f"{scheme}://unix:{sock.getsockname()}:"
|
| 220 |
+
else:
|
| 221 |
+
host, port = sock.getsockname()[:2]
|
| 222 |
+
name = str(URL.build(scheme=scheme, host=host, port=port))
|
| 223 |
+
self._name = name
|
| 224 |
+
|
| 225 |
+
@property
|
| 226 |
+
def name(self) -> str:
|
| 227 |
+
return self._name
|
| 228 |
+
|
| 229 |
+
async def start(self) -> None:
|
| 230 |
+
await super().start()
|
| 231 |
+
loop = asyncio.get_event_loop()
|
| 232 |
+
server = self._runner.server
|
| 233 |
+
assert server is not None
|
| 234 |
+
self._server = await loop.create_server(
|
| 235 |
+
server, sock=self._sock, ssl=self._ssl_context, backlog=self._backlog
|
| 236 |
+
)
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
class BaseRunner(ABC):
|
| 240 |
+
__slots__ = ("_handle_signals", "_kwargs", "_server", "_sites")
|
| 241 |
+
|
| 242 |
+
def __init__(self, *, handle_signals: bool = False, **kwargs: Any) -> None:
|
| 243 |
+
self._handle_signals = handle_signals
|
| 244 |
+
self._kwargs = kwargs
|
| 245 |
+
self._server = None # type: Optional[Server]
|
| 246 |
+
self._sites = [] # type: List[BaseSite]
|
| 247 |
+
|
| 248 |
+
@property
|
| 249 |
+
def server(self) -> Optional[Server]:
|
| 250 |
+
return self._server
|
| 251 |
+
|
| 252 |
+
@property
|
| 253 |
+
def addresses(self) -> List[Any]:
|
| 254 |
+
ret = [] # type: List[Any]
|
| 255 |
+
for site in self._sites:
|
| 256 |
+
server = site._server
|
| 257 |
+
if server is not None:
|
| 258 |
+
sockets = server.sockets
|
| 259 |
+
if sockets is not None:
|
| 260 |
+
for sock in sockets:
|
| 261 |
+
ret.append(sock.getsockname())
|
| 262 |
+
return ret
|
| 263 |
+
|
| 264 |
+
@property
|
| 265 |
+
def sites(self) -> Set[BaseSite]:
|
| 266 |
+
return set(self._sites)
|
| 267 |
+
|
| 268 |
+
async def setup(self) -> None:
|
| 269 |
+
loop = asyncio.get_event_loop()
|
| 270 |
+
|
| 271 |
+
if self._handle_signals:
|
| 272 |
+
try:
|
| 273 |
+
loop.add_signal_handler(signal.SIGINT, _raise_graceful_exit)
|
| 274 |
+
loop.add_signal_handler(signal.SIGTERM, _raise_graceful_exit)
|
| 275 |
+
except NotImplementedError: # pragma: no cover
|
| 276 |
+
# add_signal_handler is not implemented on Windows
|
| 277 |
+
pass
|
| 278 |
+
|
| 279 |
+
self._server = await self._make_server()
|
| 280 |
+
|
| 281 |
+
@abstractmethod
|
| 282 |
+
async def shutdown(self) -> None:
|
| 283 |
+
pass # pragma: no cover
|
| 284 |
+
|
| 285 |
+
async def cleanup(self) -> None:
|
| 286 |
+
loop = asyncio.get_event_loop()
|
| 287 |
+
|
| 288 |
+
# The loop over sites is intentional, an exception on gather()
|
| 289 |
+
# leaves self._sites in unpredictable state.
|
| 290 |
+
# The loop guaranties that a site is either deleted on success or
|
| 291 |
+
# still present on failure
|
| 292 |
+
for site in list(self._sites):
|
| 293 |
+
await site.stop()
|
| 294 |
+
await self._cleanup_server()
|
| 295 |
+
self._server = None
|
| 296 |
+
if self._handle_signals:
|
| 297 |
+
try:
|
| 298 |
+
loop.remove_signal_handler(signal.SIGINT)
|
| 299 |
+
loop.remove_signal_handler(signal.SIGTERM)
|
| 300 |
+
except NotImplementedError: # pragma: no cover
|
| 301 |
+
# remove_signal_handler is not implemented on Windows
|
| 302 |
+
pass
|
| 303 |
+
|
| 304 |
+
@abstractmethod
|
| 305 |
+
async def _make_server(self) -> Server:
|
| 306 |
+
pass # pragma: no cover
|
| 307 |
+
|
| 308 |
+
@abstractmethod
|
| 309 |
+
async def _cleanup_server(self) -> None:
|
| 310 |
+
pass # pragma: no cover
|
| 311 |
+
|
| 312 |
+
def _reg_site(self, site: BaseSite) -> None:
|
| 313 |
+
if site in self._sites:
|
| 314 |
+
raise RuntimeError(f"Site {site} is already registered in runner {self}")
|
| 315 |
+
self._sites.append(site)
|
| 316 |
+
|
| 317 |
+
def _check_site(self, site: BaseSite) -> None:
|
| 318 |
+
if site not in self._sites:
|
| 319 |
+
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
| 320 |
+
|
| 321 |
+
def _unreg_site(self, site: BaseSite) -> None:
|
| 322 |
+
if site not in self._sites:
|
| 323 |
+
raise RuntimeError(f"Site {site} is not registered in runner {self}")
|
| 324 |
+
self._sites.remove(site)
|
| 325 |
+
|
| 326 |
+
|
| 327 |
+
class ServerRunner(BaseRunner):
|
| 328 |
+
"""Low-level web server runner"""
|
| 329 |
+
|
| 330 |
+
__slots__ = ("_web_server",)
|
| 331 |
+
|
| 332 |
+
def __init__(
|
| 333 |
+
self, web_server: Server, *, handle_signals: bool = False, **kwargs: Any
|
| 334 |
+
) -> None:
|
| 335 |
+
super().__init__(handle_signals=handle_signals, **kwargs)
|
| 336 |
+
self._web_server = web_server
|
| 337 |
+
|
| 338 |
+
async def shutdown(self) -> None:
|
| 339 |
+
pass
|
| 340 |
+
|
| 341 |
+
async def _make_server(self) -> Server:
|
| 342 |
+
return self._web_server
|
| 343 |
+
|
| 344 |
+
async def _cleanup_server(self) -> None:
|
| 345 |
+
pass
|
| 346 |
+
|
| 347 |
+
|
| 348 |
+
class AppRunner(BaseRunner):
|
| 349 |
+
"""Web Application runner"""
|
| 350 |
+
|
| 351 |
+
__slots__ = ("_app",)
|
| 352 |
+
|
| 353 |
+
def __init__(
|
| 354 |
+
self, app: Application, *, handle_signals: bool = False, **kwargs: Any
|
| 355 |
+
) -> None:
|
| 356 |
+
super().__init__(handle_signals=handle_signals, **kwargs)
|
| 357 |
+
if not isinstance(app, Application):
|
| 358 |
+
raise TypeError(
|
| 359 |
+
"The first argument should be web.Application "
|
| 360 |
+
"instance, got {!r}".format(app)
|
| 361 |
+
)
|
| 362 |
+
self._app = app
|
| 363 |
+
|
| 364 |
+
@property
|
| 365 |
+
def app(self) -> Application:
|
| 366 |
+
return self._app
|
| 367 |
+
|
| 368 |
+
async def shutdown(self) -> None:
|
| 369 |
+
await self._app.shutdown()
|
| 370 |
+
|
| 371 |
+
async def _make_server(self) -> Server:
|
| 372 |
+
loop = asyncio.get_event_loop()
|
| 373 |
+
self._app._set_loop(loop)
|
| 374 |
+
self._app.on_startup.freeze()
|
| 375 |
+
await self._app.startup()
|
| 376 |
+
self._app.freeze()
|
| 377 |
+
|
| 378 |
+
return self._app._make_handler(loop=loop, **self._kwargs)
|
| 379 |
+
|
| 380 |
+
async def _cleanup_server(self) -> None:
|
| 381 |
+
await self._app.cleanup()
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_server.py
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Low level HTTP server."""
|
| 2 |
+
import asyncio
|
| 3 |
+
from typing import Any, Awaitable, Callable, Dict, List, Optional # noqa
|
| 4 |
+
|
| 5 |
+
from .abc import AbstractStreamWriter
|
| 6 |
+
from .helpers import get_running_loop
|
| 7 |
+
from .http_parser import RawRequestMessage
|
| 8 |
+
from .streams import StreamReader
|
| 9 |
+
from .web_protocol import RequestHandler, _RequestFactory, _RequestHandler
|
| 10 |
+
from .web_request import BaseRequest
|
| 11 |
+
|
| 12 |
+
__all__ = ("Server",)
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class Server:
|
| 16 |
+
def __init__(
|
| 17 |
+
self,
|
| 18 |
+
handler: _RequestHandler,
|
| 19 |
+
*,
|
| 20 |
+
request_factory: Optional[_RequestFactory] = None,
|
| 21 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
| 22 |
+
**kwargs: Any
|
| 23 |
+
) -> None:
|
| 24 |
+
self._loop = get_running_loop(loop)
|
| 25 |
+
self._connections = {} # type: Dict[RequestHandler, asyncio.Transport]
|
| 26 |
+
self._kwargs = kwargs
|
| 27 |
+
self.requests_count = 0
|
| 28 |
+
self.request_handler = handler
|
| 29 |
+
self.request_factory = request_factory or self._make_request
|
| 30 |
+
|
| 31 |
+
@property
|
| 32 |
+
def connections(self) -> List[RequestHandler]:
|
| 33 |
+
return list(self._connections.keys())
|
| 34 |
+
|
| 35 |
+
def connection_made(
|
| 36 |
+
self, handler: RequestHandler, transport: asyncio.Transport
|
| 37 |
+
) -> None:
|
| 38 |
+
self._connections[handler] = transport
|
| 39 |
+
|
| 40 |
+
def connection_lost(
|
| 41 |
+
self, handler: RequestHandler, exc: Optional[BaseException] = None
|
| 42 |
+
) -> None:
|
| 43 |
+
if handler in self._connections:
|
| 44 |
+
del self._connections[handler]
|
| 45 |
+
|
| 46 |
+
def _make_request(
|
| 47 |
+
self,
|
| 48 |
+
message: RawRequestMessage,
|
| 49 |
+
payload: StreamReader,
|
| 50 |
+
protocol: RequestHandler,
|
| 51 |
+
writer: AbstractStreamWriter,
|
| 52 |
+
task: "asyncio.Task[None]",
|
| 53 |
+
) -> BaseRequest:
|
| 54 |
+
return BaseRequest(message, payload, protocol, writer, task, self._loop)
|
| 55 |
+
|
| 56 |
+
async def shutdown(self, timeout: Optional[float] = None) -> None:
|
| 57 |
+
coros = [conn.shutdown(timeout) for conn in self._connections]
|
| 58 |
+
await asyncio.gather(*coros)
|
| 59 |
+
self._connections.clear()
|
| 60 |
+
|
| 61 |
+
def __call__(self) -> RequestHandler:
|
| 62 |
+
return RequestHandler(self, loop=self._loop, **self._kwargs)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/aiohttp/web_ws.py
ADDED
|
@@ -0,0 +1,487 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import base64
|
| 3 |
+
import binascii
|
| 4 |
+
import hashlib
|
| 5 |
+
import json
|
| 6 |
+
from typing import Any, Iterable, Optional, Tuple, cast
|
| 7 |
+
|
| 8 |
+
import async_timeout
|
| 9 |
+
import attr
|
| 10 |
+
from multidict import CIMultiDict
|
| 11 |
+
|
| 12 |
+
from . import hdrs
|
| 13 |
+
from .abc import AbstractStreamWriter
|
| 14 |
+
from .helpers import call_later, set_result
|
| 15 |
+
from .http import (
|
| 16 |
+
WS_CLOSED_MESSAGE,
|
| 17 |
+
WS_CLOSING_MESSAGE,
|
| 18 |
+
WS_KEY,
|
| 19 |
+
WebSocketError,
|
| 20 |
+
WebSocketReader,
|
| 21 |
+
WebSocketWriter,
|
| 22 |
+
WSCloseCode,
|
| 23 |
+
WSMessage,
|
| 24 |
+
WSMsgType as WSMsgType,
|
| 25 |
+
ws_ext_gen,
|
| 26 |
+
ws_ext_parse,
|
| 27 |
+
)
|
| 28 |
+
from .log import ws_logger
|
| 29 |
+
from .streams import EofStream, FlowControlDataQueue
|
| 30 |
+
from .typedefs import Final, JSONDecoder, JSONEncoder
|
| 31 |
+
from .web_exceptions import HTTPBadRequest, HTTPException
|
| 32 |
+
from .web_request import BaseRequest
|
| 33 |
+
from .web_response import StreamResponse
|
| 34 |
+
|
| 35 |
+
__all__ = (
|
| 36 |
+
"WebSocketResponse",
|
| 37 |
+
"WebSocketReady",
|
| 38 |
+
"WSMsgType",
|
| 39 |
+
)
|
| 40 |
+
|
| 41 |
+
THRESHOLD_CONNLOST_ACCESS: Final[int] = 5
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
| 45 |
+
class WebSocketReady:
|
| 46 |
+
ok: bool
|
| 47 |
+
protocol: Optional[str]
|
| 48 |
+
|
| 49 |
+
def __bool__(self) -> bool:
|
| 50 |
+
return self.ok
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class WebSocketResponse(StreamResponse):
|
| 54 |
+
|
| 55 |
+
_length_check = False
|
| 56 |
+
|
| 57 |
+
def __init__(
|
| 58 |
+
self,
|
| 59 |
+
*,
|
| 60 |
+
timeout: float = 10.0,
|
| 61 |
+
receive_timeout: Optional[float] = None,
|
| 62 |
+
autoclose: bool = True,
|
| 63 |
+
autoping: bool = True,
|
| 64 |
+
heartbeat: Optional[float] = None,
|
| 65 |
+
protocols: Iterable[str] = (),
|
| 66 |
+
compress: bool = True,
|
| 67 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
| 68 |
+
) -> None:
|
| 69 |
+
super().__init__(status=101)
|
| 70 |
+
self._protocols = protocols
|
| 71 |
+
self._ws_protocol = None # type: Optional[str]
|
| 72 |
+
self._writer = None # type: Optional[WebSocketWriter]
|
| 73 |
+
self._reader = None # type: Optional[FlowControlDataQueue[WSMessage]]
|
| 74 |
+
self._closed = False
|
| 75 |
+
self._closing = False
|
| 76 |
+
self._conn_lost = 0
|
| 77 |
+
self._close_code = None # type: Optional[int]
|
| 78 |
+
self._loop = None # type: Optional[asyncio.AbstractEventLoop]
|
| 79 |
+
self._waiting = None # type: Optional[asyncio.Future[bool]]
|
| 80 |
+
self._exception = None # type: Optional[BaseException]
|
| 81 |
+
self._timeout = timeout
|
| 82 |
+
self._receive_timeout = receive_timeout
|
| 83 |
+
self._autoclose = autoclose
|
| 84 |
+
self._autoping = autoping
|
| 85 |
+
self._heartbeat = heartbeat
|
| 86 |
+
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
| 87 |
+
if heartbeat is not None:
|
| 88 |
+
self._pong_heartbeat = heartbeat / 2.0
|
| 89 |
+
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
| 90 |
+
self._compress = compress
|
| 91 |
+
self._max_msg_size = max_msg_size
|
| 92 |
+
|
| 93 |
+
def _cancel_heartbeat(self) -> None:
|
| 94 |
+
if self._pong_response_cb is not None:
|
| 95 |
+
self._pong_response_cb.cancel()
|
| 96 |
+
self._pong_response_cb = None
|
| 97 |
+
|
| 98 |
+
if self._heartbeat_cb is not None:
|
| 99 |
+
self._heartbeat_cb.cancel()
|
| 100 |
+
self._heartbeat_cb = None
|
| 101 |
+
|
| 102 |
+
def _reset_heartbeat(self) -> None:
|
| 103 |
+
self._cancel_heartbeat()
|
| 104 |
+
|
| 105 |
+
if self._heartbeat is not None:
|
| 106 |
+
assert self._loop is not None
|
| 107 |
+
self._heartbeat_cb = call_later(
|
| 108 |
+
self._send_heartbeat, self._heartbeat, self._loop
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
def _send_heartbeat(self) -> None:
|
| 112 |
+
if self._heartbeat is not None and not self._closed:
|
| 113 |
+
assert self._loop is not None
|
| 114 |
+
# fire-and-forget a task is not perfect but maybe ok for
|
| 115 |
+
# sending ping. Otherwise we need a long-living heartbeat
|
| 116 |
+
# task in the class.
|
| 117 |
+
self._loop.create_task(self._writer.ping()) # type: ignore[union-attr]
|
| 118 |
+
|
| 119 |
+
if self._pong_response_cb is not None:
|
| 120 |
+
self._pong_response_cb.cancel()
|
| 121 |
+
self._pong_response_cb = call_later(
|
| 122 |
+
self._pong_not_received, self._pong_heartbeat, self._loop
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
def _pong_not_received(self) -> None:
|
| 126 |
+
if self._req is not None and self._req.transport is not None:
|
| 127 |
+
self._closed = True
|
| 128 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 129 |
+
self._exception = asyncio.TimeoutError()
|
| 130 |
+
self._req.transport.close()
|
| 131 |
+
|
| 132 |
+
async def prepare(self, request: BaseRequest) -> AbstractStreamWriter:
|
| 133 |
+
# make pre-check to don't hide it by do_handshake() exceptions
|
| 134 |
+
if self._payload_writer is not None:
|
| 135 |
+
return self._payload_writer
|
| 136 |
+
|
| 137 |
+
protocol, writer = self._pre_start(request)
|
| 138 |
+
payload_writer = await super().prepare(request)
|
| 139 |
+
assert payload_writer is not None
|
| 140 |
+
self._post_start(request, protocol, writer)
|
| 141 |
+
await payload_writer.drain()
|
| 142 |
+
return payload_writer
|
| 143 |
+
|
| 144 |
+
def _handshake(
|
| 145 |
+
self, request: BaseRequest
|
| 146 |
+
) -> Tuple["CIMultiDict[str]", str, bool, bool]:
|
| 147 |
+
headers = request.headers
|
| 148 |
+
if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip():
|
| 149 |
+
raise HTTPBadRequest(
|
| 150 |
+
text=(
|
| 151 |
+
"No WebSocket UPGRADE hdr: {}\n Can "
|
| 152 |
+
'"Upgrade" only to "WebSocket".'
|
| 153 |
+
).format(headers.get(hdrs.UPGRADE))
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower():
|
| 157 |
+
raise HTTPBadRequest(
|
| 158 |
+
text="No CONNECTION upgrade hdr: {}".format(
|
| 159 |
+
headers.get(hdrs.CONNECTION)
|
| 160 |
+
)
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
# find common sub-protocol between client and server
|
| 164 |
+
protocol = None
|
| 165 |
+
if hdrs.SEC_WEBSOCKET_PROTOCOL in headers:
|
| 166 |
+
req_protocols = [
|
| 167 |
+
str(proto.strip())
|
| 168 |
+
for proto in headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
| 169 |
+
]
|
| 170 |
+
|
| 171 |
+
for proto in req_protocols:
|
| 172 |
+
if proto in self._protocols:
|
| 173 |
+
protocol = proto
|
| 174 |
+
break
|
| 175 |
+
else:
|
| 176 |
+
# No overlap found: Return no protocol as per spec
|
| 177 |
+
ws_logger.warning(
|
| 178 |
+
"Client protocols %r don’t overlap server-known ones %r",
|
| 179 |
+
req_protocols,
|
| 180 |
+
self._protocols,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
# check supported version
|
| 184 |
+
version = headers.get(hdrs.SEC_WEBSOCKET_VERSION, "")
|
| 185 |
+
if version not in ("13", "8", "7"):
|
| 186 |
+
raise HTTPBadRequest(text=f"Unsupported version: {version}")
|
| 187 |
+
|
| 188 |
+
# check client handshake for validity
|
| 189 |
+
key = headers.get(hdrs.SEC_WEBSOCKET_KEY)
|
| 190 |
+
try:
|
| 191 |
+
if not key or len(base64.b64decode(key)) != 16:
|
| 192 |
+
raise HTTPBadRequest(text=f"Handshake error: {key!r}")
|
| 193 |
+
except binascii.Error:
|
| 194 |
+
raise HTTPBadRequest(text=f"Handshake error: {key!r}") from None
|
| 195 |
+
|
| 196 |
+
accept_val = base64.b64encode(
|
| 197 |
+
hashlib.sha1(key.encode() + WS_KEY).digest()
|
| 198 |
+
).decode()
|
| 199 |
+
response_headers = CIMultiDict( # type: ignore[var-annotated]
|
| 200 |
+
{
|
| 201 |
+
hdrs.UPGRADE: "websocket", # type: ignore[arg-type]
|
| 202 |
+
hdrs.CONNECTION: "upgrade",
|
| 203 |
+
hdrs.SEC_WEBSOCKET_ACCEPT: accept_val,
|
| 204 |
+
}
|
| 205 |
+
)
|
| 206 |
+
|
| 207 |
+
notakeover = False
|
| 208 |
+
compress = 0
|
| 209 |
+
if self._compress:
|
| 210 |
+
extensions = headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
|
| 211 |
+
# Server side always get return with no exception.
|
| 212 |
+
# If something happened, just drop compress extension
|
| 213 |
+
compress, notakeover = ws_ext_parse(extensions, isserver=True)
|
| 214 |
+
if compress:
|
| 215 |
+
enabledext = ws_ext_gen(
|
| 216 |
+
compress=compress, isserver=True, server_notakeover=notakeover
|
| 217 |
+
)
|
| 218 |
+
response_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = enabledext
|
| 219 |
+
|
| 220 |
+
if protocol:
|
| 221 |
+
response_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = protocol
|
| 222 |
+
return (
|
| 223 |
+
response_headers,
|
| 224 |
+
protocol,
|
| 225 |
+
compress,
|
| 226 |
+
notakeover,
|
| 227 |
+
) # type: ignore[return-value]
|
| 228 |
+
|
| 229 |
+
def _pre_start(self, request: BaseRequest) -> Tuple[str, WebSocketWriter]:
|
| 230 |
+
self._loop = request._loop
|
| 231 |
+
|
| 232 |
+
headers, protocol, compress, notakeover = self._handshake(request)
|
| 233 |
+
|
| 234 |
+
self.set_status(101)
|
| 235 |
+
self.headers.update(headers)
|
| 236 |
+
self.force_close()
|
| 237 |
+
self._compress = compress
|
| 238 |
+
transport = request._protocol.transport
|
| 239 |
+
assert transport is not None
|
| 240 |
+
writer = WebSocketWriter(
|
| 241 |
+
request._protocol, transport, compress=compress, notakeover=notakeover
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
return protocol, writer
|
| 245 |
+
|
| 246 |
+
def _post_start(
|
| 247 |
+
self, request: BaseRequest, protocol: str, writer: WebSocketWriter
|
| 248 |
+
) -> None:
|
| 249 |
+
self._ws_protocol = protocol
|
| 250 |
+
self._writer = writer
|
| 251 |
+
|
| 252 |
+
self._reset_heartbeat()
|
| 253 |
+
|
| 254 |
+
loop = self._loop
|
| 255 |
+
assert loop is not None
|
| 256 |
+
self._reader = FlowControlDataQueue(request._protocol, 2 ** 16, loop=loop)
|
| 257 |
+
request.protocol.set_parser(
|
| 258 |
+
WebSocketReader(self._reader, self._max_msg_size, compress=self._compress)
|
| 259 |
+
)
|
| 260 |
+
# disable HTTP keepalive for WebSocket
|
| 261 |
+
request.protocol.keep_alive(False)
|
| 262 |
+
|
| 263 |
+
def can_prepare(self, request: BaseRequest) -> WebSocketReady:
|
| 264 |
+
if self._writer is not None:
|
| 265 |
+
raise RuntimeError("Already started")
|
| 266 |
+
try:
|
| 267 |
+
_, protocol, _, _ = self._handshake(request)
|
| 268 |
+
except HTTPException:
|
| 269 |
+
return WebSocketReady(False, None)
|
| 270 |
+
else:
|
| 271 |
+
return WebSocketReady(True, protocol)
|
| 272 |
+
|
| 273 |
+
@property
|
| 274 |
+
def closed(self) -> bool:
|
| 275 |
+
return self._closed
|
| 276 |
+
|
| 277 |
+
@property
|
| 278 |
+
def close_code(self) -> Optional[int]:
|
| 279 |
+
return self._close_code
|
| 280 |
+
|
| 281 |
+
@property
|
| 282 |
+
def ws_protocol(self) -> Optional[str]:
|
| 283 |
+
return self._ws_protocol
|
| 284 |
+
|
| 285 |
+
@property
|
| 286 |
+
def compress(self) -> bool:
|
| 287 |
+
return self._compress
|
| 288 |
+
|
| 289 |
+
def exception(self) -> Optional[BaseException]:
|
| 290 |
+
return self._exception
|
| 291 |
+
|
| 292 |
+
async def ping(self, message: bytes = b"") -> None:
|
| 293 |
+
if self._writer is None:
|
| 294 |
+
raise RuntimeError("Call .prepare() first")
|
| 295 |
+
await self._writer.ping(message)
|
| 296 |
+
|
| 297 |
+
async def pong(self, message: bytes = b"") -> None:
|
| 298 |
+
# unsolicited pong
|
| 299 |
+
if self._writer is None:
|
| 300 |
+
raise RuntimeError("Call .prepare() first")
|
| 301 |
+
await self._writer.pong(message)
|
| 302 |
+
|
| 303 |
+
async def send_str(self, data: str, compress: Optional[bool] = None) -> None:
|
| 304 |
+
if self._writer is None:
|
| 305 |
+
raise RuntimeError("Call .prepare() first")
|
| 306 |
+
if not isinstance(data, str):
|
| 307 |
+
raise TypeError("data argument must be str (%r)" % type(data))
|
| 308 |
+
await self._writer.send(data, binary=False, compress=compress)
|
| 309 |
+
|
| 310 |
+
async def send_bytes(self, data: bytes, compress: Optional[bool] = None) -> None:
|
| 311 |
+
if self._writer is None:
|
| 312 |
+
raise RuntimeError("Call .prepare() first")
|
| 313 |
+
if not isinstance(data, (bytes, bytearray, memoryview)):
|
| 314 |
+
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
| 315 |
+
await self._writer.send(data, binary=True, compress=compress)
|
| 316 |
+
|
| 317 |
+
async def send_json(
|
| 318 |
+
self,
|
| 319 |
+
data: Any,
|
| 320 |
+
compress: Optional[bool] = None,
|
| 321 |
+
*,
|
| 322 |
+
dumps: JSONEncoder = json.dumps,
|
| 323 |
+
) -> None:
|
| 324 |
+
await self.send_str(dumps(data), compress=compress)
|
| 325 |
+
|
| 326 |
+
async def write_eof(self) -> None: # type: ignore[override]
|
| 327 |
+
if self._eof_sent:
|
| 328 |
+
return
|
| 329 |
+
if self._payload_writer is None:
|
| 330 |
+
raise RuntimeError("Response has not been started")
|
| 331 |
+
|
| 332 |
+
await self.close()
|
| 333 |
+
self._eof_sent = True
|
| 334 |
+
|
| 335 |
+
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
| 336 |
+
if self._writer is None:
|
| 337 |
+
raise RuntimeError("Call .prepare() first")
|
| 338 |
+
|
| 339 |
+
self._cancel_heartbeat()
|
| 340 |
+
reader = self._reader
|
| 341 |
+
assert reader is not None
|
| 342 |
+
|
| 343 |
+
# we need to break `receive()` cycle first,
|
| 344 |
+
# `close()` may be called from different task
|
| 345 |
+
if self._waiting is not None and not self._closed:
|
| 346 |
+
reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
| 347 |
+
await self._waiting
|
| 348 |
+
|
| 349 |
+
if not self._closed:
|
| 350 |
+
self._closed = True
|
| 351 |
+
try:
|
| 352 |
+
await self._writer.close(code, message)
|
| 353 |
+
writer = self._payload_writer
|
| 354 |
+
assert writer is not None
|
| 355 |
+
await writer.drain()
|
| 356 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 357 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 358 |
+
raise
|
| 359 |
+
except Exception as exc:
|
| 360 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 361 |
+
self._exception = exc
|
| 362 |
+
return True
|
| 363 |
+
|
| 364 |
+
if self._closing:
|
| 365 |
+
return True
|
| 366 |
+
|
| 367 |
+
reader = self._reader
|
| 368 |
+
assert reader is not None
|
| 369 |
+
try:
|
| 370 |
+
async with async_timeout.timeout(self._timeout):
|
| 371 |
+
msg = await reader.read()
|
| 372 |
+
except asyncio.CancelledError:
|
| 373 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 374 |
+
raise
|
| 375 |
+
except Exception as exc:
|
| 376 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 377 |
+
self._exception = exc
|
| 378 |
+
return True
|
| 379 |
+
|
| 380 |
+
if msg.type == WSMsgType.CLOSE:
|
| 381 |
+
self._close_code = msg.data
|
| 382 |
+
return True
|
| 383 |
+
|
| 384 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 385 |
+
self._exception = asyncio.TimeoutError()
|
| 386 |
+
return True
|
| 387 |
+
else:
|
| 388 |
+
return False
|
| 389 |
+
|
| 390 |
+
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
| 391 |
+
if self._reader is None:
|
| 392 |
+
raise RuntimeError("Call .prepare() first")
|
| 393 |
+
|
| 394 |
+
loop = self._loop
|
| 395 |
+
assert loop is not None
|
| 396 |
+
while True:
|
| 397 |
+
if self._waiting is not None:
|
| 398 |
+
raise RuntimeError("Concurrent call to receive() is not allowed")
|
| 399 |
+
|
| 400 |
+
if self._closed:
|
| 401 |
+
self._conn_lost += 1
|
| 402 |
+
if self._conn_lost >= THRESHOLD_CONNLOST_ACCESS:
|
| 403 |
+
raise RuntimeError("WebSocket connection is closed.")
|
| 404 |
+
return WS_CLOSED_MESSAGE
|
| 405 |
+
elif self._closing:
|
| 406 |
+
return WS_CLOSING_MESSAGE
|
| 407 |
+
|
| 408 |
+
try:
|
| 409 |
+
self._waiting = loop.create_future()
|
| 410 |
+
try:
|
| 411 |
+
async with async_timeout.timeout(timeout or self._receive_timeout):
|
| 412 |
+
msg = await self._reader.read()
|
| 413 |
+
self._reset_heartbeat()
|
| 414 |
+
finally:
|
| 415 |
+
waiter = self._waiting
|
| 416 |
+
set_result(waiter, True)
|
| 417 |
+
self._waiting = None
|
| 418 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
| 419 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 420 |
+
raise
|
| 421 |
+
except EofStream:
|
| 422 |
+
self._close_code = WSCloseCode.OK
|
| 423 |
+
await self.close()
|
| 424 |
+
return WSMessage(WSMsgType.CLOSED, None, None)
|
| 425 |
+
except WebSocketError as exc:
|
| 426 |
+
self._close_code = exc.code
|
| 427 |
+
await self.close(code=exc.code)
|
| 428 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 429 |
+
except Exception as exc:
|
| 430 |
+
self._exception = exc
|
| 431 |
+
self._closing = True
|
| 432 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
| 433 |
+
await self.close()
|
| 434 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
| 435 |
+
|
| 436 |
+
if msg.type == WSMsgType.CLOSE:
|
| 437 |
+
self._closing = True
|
| 438 |
+
self._close_code = msg.data
|
| 439 |
+
if not self._closed and self._autoclose:
|
| 440 |
+
await self.close()
|
| 441 |
+
elif msg.type == WSMsgType.CLOSING:
|
| 442 |
+
self._closing = True
|
| 443 |
+
elif msg.type == WSMsgType.PING and self._autoping:
|
| 444 |
+
await self.pong(msg.data)
|
| 445 |
+
continue
|
| 446 |
+
elif msg.type == WSMsgType.PONG and self._autoping:
|
| 447 |
+
continue
|
| 448 |
+
|
| 449 |
+
return msg
|
| 450 |
+
|
| 451 |
+
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
| 452 |
+
msg = await self.receive(timeout)
|
| 453 |
+
if msg.type != WSMsgType.TEXT:
|
| 454 |
+
raise TypeError(
|
| 455 |
+
"Received message {}:{!r} is not WSMsgType.TEXT".format(
|
| 456 |
+
msg.type, msg.data
|
| 457 |
+
)
|
| 458 |
+
)
|
| 459 |
+
return cast(str, msg.data)
|
| 460 |
+
|
| 461 |
+
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
| 462 |
+
msg = await self.receive(timeout)
|
| 463 |
+
if msg.type != WSMsgType.BINARY:
|
| 464 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
| 465 |
+
return cast(bytes, msg.data)
|
| 466 |
+
|
| 467 |
+
async def receive_json(
|
| 468 |
+
self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None
|
| 469 |
+
) -> Any:
|
| 470 |
+
data = await self.receive_str(timeout=timeout)
|
| 471 |
+
return loads(data)
|
| 472 |
+
|
| 473 |
+
async def write(self, data: bytes) -> None:
|
| 474 |
+
raise RuntimeError("Cannot call .write() for websocket")
|
| 475 |
+
|
| 476 |
+
def __aiter__(self) -> "WebSocketResponse":
|
| 477 |
+
return self
|
| 478 |
+
|
| 479 |
+
async def __anext__(self) -> WSMessage:
|
| 480 |
+
msg = await self.receive()
|
| 481 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
| 482 |
+
raise StopAsyncIteration
|
| 483 |
+
return msg
|
| 484 |
+
|
| 485 |
+
def _cancel(self, exc: BaseException) -> None:
|
| 486 |
+
if self._reader is not None:
|
| 487 |
+
self._reader.set_exception(exc)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/INSTALLER
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
pip
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/METADATA
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Metadata-Version: 2.1
|
| 2 |
+
Name: antlr4-python3-runtime
|
| 3 |
+
Version: 4.9.3
|
| 4 |
+
Summary: ANTLR 4.9.3 runtime for Python 3.7
|
| 5 |
+
Home-page: http://www.antlr.org
|
| 6 |
+
Author: Eric Vergnaud, Terence Parr, Sam Harwell
|
| 7 |
+
Author-email: eric.vergnaud@wanadoo.fr
|
| 8 |
+
License: BSD
|
| 9 |
+
Requires-Dist: typing ; python_version < "3.5"
|
| 10 |
+
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/RECORD
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
../../../bin/pygrun,sha256=MBVjE9upRGiOwLc7_ldLSW9L_KPHA_5riogxOL9AcJU,6139
|
| 2 |
+
antlr4/BufferedTokenStream.py,sha256=_BwmzOH1TO6yL2yC_ZaUzkghq8wzc0UPHfI3UpnZUwM,10780
|
| 3 |
+
antlr4/CommonTokenFactory.py,sha256=Tv16zg_pWD1Dv3IphsxFu8nwWdLeXYcqJ8CC5yHwjH8,2110
|
| 4 |
+
antlr4/CommonTokenStream.py,sha256=NNJHXwRg2_Zn46ZhJyDxZtvZzsPWhb6JjXa7BjM45eg,2770
|
| 5 |
+
antlr4/FileStream.py,sha256=-ZR_-jl_If9IIBYLINIwlQrlTSmu5k1VUKDc3ie7WR4,868
|
| 6 |
+
antlr4/InputStream.py,sha256=sggjE2jEGvSgQmxFvqeeuT3aOVgcH5tS7mMybW8wKS4,2334
|
| 7 |
+
antlr4/IntervalSet.py,sha256=Cd0WKhd_kYbiLYKkDNncgSM19GAuS7OaTOC4-5Yubs4,5965
|
| 8 |
+
antlr4/LL1Analyzer.py,sha256=oJBvO7_S8cAlb_D4qWNxd2IlK0qP4ka-oeoDxx16CZ4,7752
|
| 9 |
+
antlr4/Lexer.py,sha256=C72hqayfkympxb46AcSnhPD9kVZ0quWgboGxa6gcIcg,11542
|
| 10 |
+
antlr4/ListTokenSource.py,sha256=IffLMo7YQnD_CjKryrrgNWSk0q5QSYd7puZyyUk7vOk,5356
|
| 11 |
+
antlr4/Parser.py,sha256=F2Q25z0-__KHfa354KQhDu3ZOVzLFfag3s2ixJ4dl_o,22883
|
| 12 |
+
antlr4/ParserInterpreter.py,sha256=-QU9kn4x3WCQ-LSA99R231HoicTqakiHZ5KM72l-hIo,7206
|
| 13 |
+
antlr4/ParserRuleContext.py,sha256=wHAVdOxMAO5jkUqloTXVzn_xYnJhiHbvvuhZpth0ZF8,6762
|
| 14 |
+
antlr4/PredictionContext.py,sha256=cb4KI6EGpS7sRzJ8UvPEkxphINZuWhyiZ95752g3prI,22977
|
| 15 |
+
antlr4/Recognizer.py,sha256=vmKAtSjIgR9LQr5YzuK5OmPZWMJ3x69OuVZQ_FTzQHE,5383
|
| 16 |
+
antlr4/RuleContext.py,sha256=GiviRv2k_al1IBgdJOEEoD0ohJaVd-_h5T_CPG_Bsmg,8099
|
| 17 |
+
antlr4/StdinStream.py,sha256=MMSH4zN8T6i_nu-3_TlN-3E4nPM4b5KgK4GT6n_FUQA,303
|
| 18 |
+
antlr4/Token.py,sha256=OtWCab4Ut52X_nLLAA-8x4Zl6xaF6TEN-0033uaoaEo,5206
|
| 19 |
+
antlr4/TokenStreamRewriter.py,sha256=cuErQTrXwC_0kqVv3MsTWGZSm-E1Vy1yzA-3SOhKd_s,10324
|
| 20 |
+
antlr4/Utils.py,sha256=Oyg8CJCRL1TrF_QSB_LLlVdWOB4loVcKOgFNT-icO7c,931
|
| 21 |
+
antlr4/__init__.py,sha256=g8UGpflnlMWcAyLtihejzrgAP1Uo3b9GhwfI8QnZjtw,1125
|
| 22 |
+
antlr4/__pycache__/BufferedTokenStream.cpython-38.pyc,,
|
| 23 |
+
antlr4/__pycache__/CommonTokenFactory.cpython-38.pyc,,
|
| 24 |
+
antlr4/__pycache__/CommonTokenStream.cpython-38.pyc,,
|
| 25 |
+
antlr4/__pycache__/FileStream.cpython-38.pyc,,
|
| 26 |
+
antlr4/__pycache__/InputStream.cpython-38.pyc,,
|
| 27 |
+
antlr4/__pycache__/IntervalSet.cpython-38.pyc,,
|
| 28 |
+
antlr4/__pycache__/LL1Analyzer.cpython-38.pyc,,
|
| 29 |
+
antlr4/__pycache__/Lexer.cpython-38.pyc,,
|
| 30 |
+
antlr4/__pycache__/ListTokenSource.cpython-38.pyc,,
|
| 31 |
+
antlr4/__pycache__/Parser.cpython-38.pyc,,
|
| 32 |
+
antlr4/__pycache__/ParserInterpreter.cpython-38.pyc,,
|
| 33 |
+
antlr4/__pycache__/ParserRuleContext.cpython-38.pyc,,
|
| 34 |
+
antlr4/__pycache__/PredictionContext.cpython-38.pyc,,
|
| 35 |
+
antlr4/__pycache__/Recognizer.cpython-38.pyc,,
|
| 36 |
+
antlr4/__pycache__/RuleContext.cpython-38.pyc,,
|
| 37 |
+
antlr4/__pycache__/StdinStream.cpython-38.pyc,,
|
| 38 |
+
antlr4/__pycache__/Token.cpython-38.pyc,,
|
| 39 |
+
antlr4/__pycache__/TokenStreamRewriter.cpython-38.pyc,,
|
| 40 |
+
antlr4/__pycache__/Utils.cpython-38.pyc,,
|
| 41 |
+
antlr4/__pycache__/__init__.cpython-38.pyc,,
|
| 42 |
+
antlr4/atn/ATN.py,sha256=LYE8kT-D8FpUd5fpOtyOLqvXLFkUSa83TVFowhCWAiY,5789
|
| 43 |
+
antlr4/atn/ATNConfig.py,sha256=tNdIC6_GrxXllHBx3npAWyDh6KrohLZDV_XyPrydRMY,6565
|
| 44 |
+
antlr4/atn/ATNConfigSet.py,sha256=qRzVsBeMqk2txjG3DrGptwF6Vb2hHC5w3umkSL0GNJw,8312
|
| 45 |
+
antlr4/atn/ATNDeserializationOptions.py,sha256=lUV_bGW6mxj7t20esda5Yv-X9m-U_x1-0xaLifhXIPo,1010
|
| 46 |
+
antlr4/atn/ATNDeserializer.py,sha256=aYLDDtQ-wyo3gId6A-wD1E3QmpfrPZlXxj4_IDm-mUY,22252
|
| 47 |
+
antlr4/atn/ATNSimulator.py,sha256=mDc-G3GF3kSeqpfGDabUOLJ0WLVTqibxZlkvXQYmBRk,2298
|
| 48 |
+
antlr4/atn/ATNState.py,sha256=NbndISWUwFDF_vuBfbTiZZ8GPHoQa6UXdqbD-yjJE7c,7663
|
| 49 |
+
antlr4/atn/ATNType.py,sha256=xgv8AMVU7tc07U73_hRTm1AiZ7MvGhoaP5fTiOrrCGg,422
|
| 50 |
+
antlr4/atn/LexerATNSimulator.py,sha256=kYXRwUvHptSRU8T_K9pSrGlCk9YypWeHlAcjgry1VVo,25465
|
| 51 |
+
antlr4/atn/LexerAction.py,sha256=KUeJwKekBch0m1poSPskHIh-15dcKAG4lR7zlq98tzc,10014
|
| 52 |
+
antlr4/atn/LexerActionExecutor.py,sha256=7rlg17THcwLsuTmh7NsLrTbRH4DTrm8qIdW9_235CEc,6420
|
| 53 |
+
antlr4/atn/ParserATNSimulator.py,sha256=IKCzsDLcznROSVojU-daAygKr3svl0DmK5DhkUllASY,80365
|
| 54 |
+
antlr4/atn/PredictionMode.py,sha256=i8B7MULA7v-qbXeCY_xp6sgi21kHM6kybqIrG6rSrro,22486
|
| 55 |
+
antlr4/atn/SemanticContext.py,sha256=ds0TmM4qenb0LN-rl2Fp_N_xB959abN67I19EF6rs8o,10495
|
| 56 |
+
antlr4/atn/Transition.py,sha256=ZAsEFpa5I_n-zxD6U-DauM5_33jFK65x3PWu6-NW0RA,8762
|
| 57 |
+
antlr4/atn/__init__.py,sha256=gsnQdtTH8IUgCiVUpQfzhxx2pFRvksW76SjwIk3fYSk,28
|
| 58 |
+
antlr4/atn/__pycache__/ATN.cpython-38.pyc,,
|
| 59 |
+
antlr4/atn/__pycache__/ATNConfig.cpython-38.pyc,,
|
| 60 |
+
antlr4/atn/__pycache__/ATNConfigSet.cpython-38.pyc,,
|
| 61 |
+
antlr4/atn/__pycache__/ATNDeserializationOptions.cpython-38.pyc,,
|
| 62 |
+
antlr4/atn/__pycache__/ATNDeserializer.cpython-38.pyc,,
|
| 63 |
+
antlr4/atn/__pycache__/ATNSimulator.cpython-38.pyc,,
|
| 64 |
+
antlr4/atn/__pycache__/ATNState.cpython-38.pyc,,
|
| 65 |
+
antlr4/atn/__pycache__/ATNType.cpython-38.pyc,,
|
| 66 |
+
antlr4/atn/__pycache__/LexerATNSimulator.cpython-38.pyc,,
|
| 67 |
+
antlr4/atn/__pycache__/LexerAction.cpython-38.pyc,,
|
| 68 |
+
antlr4/atn/__pycache__/LexerActionExecutor.cpython-38.pyc,,
|
| 69 |
+
antlr4/atn/__pycache__/ParserATNSimulator.cpython-38.pyc,,
|
| 70 |
+
antlr4/atn/__pycache__/PredictionMode.cpython-38.pyc,,
|
| 71 |
+
antlr4/atn/__pycache__/SemanticContext.cpython-38.pyc,,
|
| 72 |
+
antlr4/atn/__pycache__/Transition.cpython-38.pyc,,
|
| 73 |
+
antlr4/atn/__pycache__/__init__.cpython-38.pyc,,
|
| 74 |
+
antlr4/dfa/DFA.py,sha256=weIh0uaRfakP12mFvHo7U0tqO3GONV3-nHFkc2xk-ZE,5388
|
| 75 |
+
antlr4/dfa/DFASerializer.py,sha256=1st_HO85yXLYy7gInTEnkztgA6am4CT-yReh-mazp9E,2518
|
| 76 |
+
antlr4/dfa/DFAState.py,sha256=R7JwKf0GtAEs9J_MD_Y0WKcuzdt0BVX1sow-uv9yFYc,5583
|
| 77 |
+
antlr4/dfa/__init__.py,sha256=gsnQdtTH8IUgCiVUpQfzhxx2pFRvksW76SjwIk3fYSk,28
|
| 78 |
+
antlr4/dfa/__pycache__/DFA.cpython-38.pyc,,
|
| 79 |
+
antlr4/dfa/__pycache__/DFASerializer.cpython-38.pyc,,
|
| 80 |
+
antlr4/dfa/__pycache__/DFAState.cpython-38.pyc,,
|
| 81 |
+
antlr4/dfa/__pycache__/__init__.cpython-38.pyc,,
|
| 82 |
+
antlr4/error/DiagnosticErrorListener.py,sha256=EwS2D_Ox6CmvCa16NPJ9ud4QYPHmlPXt6-Wdn1h5Kg8,4430
|
| 83 |
+
antlr4/error/ErrorListener.py,sha256=yP_MDguol4Cj0_pEPyNzeH3v4ZvUjW5iwDjhYTVAHbE,2722
|
| 84 |
+
antlr4/error/ErrorStrategy.py,sha256=0mhzFL57ZVnjKkGrtadta93Zm3NXdF-HW10DVD07VXs,30391
|
| 85 |
+
antlr4/error/Errors.py,sha256=hlKngclBfXdkDiAymhYsvh2OCXlvmHM2kTl_A1vgp-w,6759
|
| 86 |
+
antlr4/error/__init__.py,sha256=gsnQdtTH8IUgCiVUpQfzhxx2pFRvksW76SjwIk3fYSk,28
|
| 87 |
+
antlr4/error/__pycache__/DiagnosticErrorListener.cpython-38.pyc,,
|
| 88 |
+
antlr4/error/__pycache__/ErrorListener.cpython-38.pyc,,
|
| 89 |
+
antlr4/error/__pycache__/ErrorStrategy.cpython-38.pyc,,
|
| 90 |
+
antlr4/error/__pycache__/Errors.cpython-38.pyc,,
|
| 91 |
+
antlr4/error/__pycache__/__init__.cpython-38.pyc,,
|
| 92 |
+
antlr4/tree/Chunk.py,sha256=oCIZjolLq9xkxtVDROEDxfUGgndcEnsDW0eUmLM7Gpk,695
|
| 93 |
+
antlr4/tree/ParseTreeMatch.py,sha256=Dc6GVWSUqoIAFXUaUZqUwCUlZfTcgUbGLGzNf6QxQvE,4485
|
| 94 |
+
antlr4/tree/ParseTreePattern.py,sha256=ASBNaQORh3f7f8KnFeZJC2yWFFx4uQlxvC2Y55ifhY0,2825
|
| 95 |
+
antlr4/tree/ParseTreePatternMatcher.py,sha256=HtE9yi1Urr2QPLGLJDBvr0lxv6bjuj9CHl-4clahSe8,16388
|
| 96 |
+
antlr4/tree/RuleTagToken.py,sha256=n4zXcmrrfsGyl91pj5ZYcc_CeKMhPrvYkUdppgMBpbY,2022
|
| 97 |
+
antlr4/tree/TokenTagToken.py,sha256=S3o3DJhfzL5kpClxsKyI-Il-xvuuZQiBAIsLCKFjRHo,1576
|
| 98 |
+
antlr4/tree/Tree.py,sha256=ZI7U_5IxBLm_IrnfJOtb12BCPIWyzfeZtLnhHKVVZIw,5572
|
| 99 |
+
antlr4/tree/Trees.py,sha256=JtQ7cYWmKwI9TIBP6y9XIgjlNS4mYjv3ARwOfwWc5Vg,3968
|
| 100 |
+
antlr4/tree/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 101 |
+
antlr4/tree/__pycache__/Chunk.cpython-38.pyc,,
|
| 102 |
+
antlr4/tree/__pycache__/ParseTreeMatch.cpython-38.pyc,,
|
| 103 |
+
antlr4/tree/__pycache__/ParseTreePattern.cpython-38.pyc,,
|
| 104 |
+
antlr4/tree/__pycache__/ParseTreePatternMatcher.cpython-38.pyc,,
|
| 105 |
+
antlr4/tree/__pycache__/RuleTagToken.cpython-38.pyc,,
|
| 106 |
+
antlr4/tree/__pycache__/TokenTagToken.cpython-38.pyc,,
|
| 107 |
+
antlr4/tree/__pycache__/Tree.cpython-38.pyc,,
|
| 108 |
+
antlr4/tree/__pycache__/Trees.cpython-38.pyc,,
|
| 109 |
+
antlr4/tree/__pycache__/__init__.cpython-38.pyc,,
|
| 110 |
+
antlr4/xpath/XPath.py,sha256=O9s4-EDvLbAbYytP_bae9Z2khLl0iAtRzPAtVbuWUM4,13015
|
| 111 |
+
antlr4/xpath/__init__.py,sha256=gsnQdtTH8IUgCiVUpQfzhxx2pFRvksW76SjwIk3fYSk,28
|
| 112 |
+
antlr4/xpath/__pycache__/XPath.cpython-38.pyc,,
|
| 113 |
+
antlr4/xpath/__pycache__/__init__.cpython-38.pyc,,
|
| 114 |
+
antlr4_python3_runtime-4.9.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
| 115 |
+
antlr4_python3_runtime-4.9.3.dist-info/METADATA,sha256=dTjDfhCFn6_28g1-_VewZKwBUrfG-ttt11anMui4jRU,291
|
| 116 |
+
antlr4_python3_runtime-4.9.3.dist-info/RECORD,,
|
| 117 |
+
antlr4_python3_runtime-4.9.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
| 118 |
+
antlr4_python3_runtime-4.9.3.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
|
| 119 |
+
antlr4_python3_runtime-4.9.3.dist-info/top_level.txt,sha256=OsoZsh9bb30wgXb2zBUjdDwYg46MfV-RVZA6Pk8pcB0,7
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/REQUESTED
ADDED
|
File without changes
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/WHEEL
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Wheel-Version: 1.0
|
| 2 |
+
Generator: bdist_wheel (0.36.2)
|
| 3 |
+
Root-Is-Purelib: true
|
| 4 |
+
Tag: py3-none-any
|
| 5 |
+
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/antlr4_python3_runtime-4.9.3.dist-info/top_level.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
antlr4
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/__init__.pyi
ADDED
|
@@ -0,0 +1,484 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
|
| 3 |
+
from typing import (
|
| 4 |
+
Any,
|
| 5 |
+
Callable,
|
| 6 |
+
Dict,
|
| 7 |
+
Generic,
|
| 8 |
+
List,
|
| 9 |
+
Mapping,
|
| 10 |
+
Optional,
|
| 11 |
+
Sequence,
|
| 12 |
+
Tuple,
|
| 13 |
+
Type,
|
| 14 |
+
TypeVar,
|
| 15 |
+
Union,
|
| 16 |
+
overload,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
# `import X as X` is required to make these public
|
| 20 |
+
from . import converters as converters
|
| 21 |
+
from . import exceptions as exceptions
|
| 22 |
+
from . import filters as filters
|
| 23 |
+
from . import setters as setters
|
| 24 |
+
from . import validators as validators
|
| 25 |
+
from ._version_info import VersionInfo
|
| 26 |
+
|
| 27 |
+
__version__: str
|
| 28 |
+
__version_info__: VersionInfo
|
| 29 |
+
__title__: str
|
| 30 |
+
__description__: str
|
| 31 |
+
__url__: str
|
| 32 |
+
__uri__: str
|
| 33 |
+
__author__: str
|
| 34 |
+
__email__: str
|
| 35 |
+
__license__: str
|
| 36 |
+
__copyright__: str
|
| 37 |
+
|
| 38 |
+
_T = TypeVar("_T")
|
| 39 |
+
_C = TypeVar("_C", bound=type)
|
| 40 |
+
|
| 41 |
+
_EqOrderType = Union[bool, Callable[[Any], Any]]
|
| 42 |
+
_ValidatorType = Callable[[Any, Attribute[_T], _T], Any]
|
| 43 |
+
_ConverterType = Callable[[Any], Any]
|
| 44 |
+
_FilterType = Callable[[Attribute[_T], _T], bool]
|
| 45 |
+
_ReprType = Callable[[Any], str]
|
| 46 |
+
_ReprArgType = Union[bool, _ReprType]
|
| 47 |
+
_OnSetAttrType = Callable[[Any, Attribute[Any], Any], Any]
|
| 48 |
+
_OnSetAttrArgType = Union[
|
| 49 |
+
_OnSetAttrType, List[_OnSetAttrType], setters._NoOpType
|
| 50 |
+
]
|
| 51 |
+
_FieldTransformer = Callable[
|
| 52 |
+
[type, List[Attribute[Any]]], List[Attribute[Any]]
|
| 53 |
+
]
|
| 54 |
+
_CompareWithType = Callable[[Any, Any], bool]
|
| 55 |
+
# FIXME: in reality, if multiple validators are passed they must be in a list
|
| 56 |
+
# or tuple, but those are invariant and so would prevent subtypes of
|
| 57 |
+
# _ValidatorType from working when passed in a list or tuple.
|
| 58 |
+
_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]]
|
| 59 |
+
|
| 60 |
+
# _make --
|
| 61 |
+
|
| 62 |
+
NOTHING: object
|
| 63 |
+
|
| 64 |
+
# NOTE: Factory lies about its return type to make this possible:
|
| 65 |
+
# `x: List[int] # = Factory(list)`
|
| 66 |
+
# Work around mypy issue #4554 in the common case by using an overload.
|
| 67 |
+
if sys.version_info >= (3, 8):
|
| 68 |
+
from typing import Literal
|
| 69 |
+
@overload
|
| 70 |
+
def Factory(factory: Callable[[], _T]) -> _T: ...
|
| 71 |
+
@overload
|
| 72 |
+
def Factory(
|
| 73 |
+
factory: Callable[[Any], _T],
|
| 74 |
+
takes_self: Literal[True],
|
| 75 |
+
) -> _T: ...
|
| 76 |
+
@overload
|
| 77 |
+
def Factory(
|
| 78 |
+
factory: Callable[[], _T],
|
| 79 |
+
takes_self: Literal[False],
|
| 80 |
+
) -> _T: ...
|
| 81 |
+
|
| 82 |
+
else:
|
| 83 |
+
@overload
|
| 84 |
+
def Factory(factory: Callable[[], _T]) -> _T: ...
|
| 85 |
+
@overload
|
| 86 |
+
def Factory(
|
| 87 |
+
factory: Union[Callable[[Any], _T], Callable[[], _T]],
|
| 88 |
+
takes_self: bool = ...,
|
| 89 |
+
) -> _T: ...
|
| 90 |
+
|
| 91 |
+
# Static type inference support via __dataclass_transform__ implemented as per:
|
| 92 |
+
# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md
|
| 93 |
+
# This annotation must be applied to all overloads of "define" and "attrs"
|
| 94 |
+
#
|
| 95 |
+
# NOTE: This is a typing construct and does not exist at runtime. Extensions
|
| 96 |
+
# wrapping attrs decorators should declare a separate __dataclass_transform__
|
| 97 |
+
# signature in the extension module using the specification linked above to
|
| 98 |
+
# provide pyright support.
|
| 99 |
+
def __dataclass_transform__(
|
| 100 |
+
*,
|
| 101 |
+
eq_default: bool = True,
|
| 102 |
+
order_default: bool = False,
|
| 103 |
+
kw_only_default: bool = False,
|
| 104 |
+
field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()),
|
| 105 |
+
) -> Callable[[_T], _T]: ...
|
| 106 |
+
|
| 107 |
+
class Attribute(Generic[_T]):
|
| 108 |
+
name: str
|
| 109 |
+
default: Optional[_T]
|
| 110 |
+
validator: Optional[_ValidatorType[_T]]
|
| 111 |
+
repr: _ReprArgType
|
| 112 |
+
cmp: _EqOrderType
|
| 113 |
+
eq: _EqOrderType
|
| 114 |
+
order: _EqOrderType
|
| 115 |
+
hash: Optional[bool]
|
| 116 |
+
init: bool
|
| 117 |
+
converter: Optional[_ConverterType]
|
| 118 |
+
metadata: Dict[Any, Any]
|
| 119 |
+
type: Optional[Type[_T]]
|
| 120 |
+
kw_only: bool
|
| 121 |
+
on_setattr: _OnSetAttrType
|
| 122 |
+
def evolve(self, **changes: Any) -> "Attribute[Any]": ...
|
| 123 |
+
|
| 124 |
+
# NOTE: We had several choices for the annotation to use for type arg:
|
| 125 |
+
# 1) Type[_T]
|
| 126 |
+
# - Pros: Handles simple cases correctly
|
| 127 |
+
# - Cons: Might produce less informative errors in the case of conflicting
|
| 128 |
+
# TypeVars e.g. `attr.ib(default='bad', type=int)`
|
| 129 |
+
# 2) Callable[..., _T]
|
| 130 |
+
# - Pros: Better error messages than #1 for conflicting TypeVars
|
| 131 |
+
# - Cons: Terrible error messages for validator checks.
|
| 132 |
+
# e.g. attr.ib(type=int, validator=validate_str)
|
| 133 |
+
# -> error: Cannot infer function type argument
|
| 134 |
+
# 3) type (and do all of the work in the mypy plugin)
|
| 135 |
+
# - Pros: Simple here, and we could customize the plugin with our own errors.
|
| 136 |
+
# - Cons: Would need to write mypy plugin code to handle all the cases.
|
| 137 |
+
# We chose option #1.
|
| 138 |
+
|
| 139 |
+
# `attr` lies about its return type to make the following possible:
|
| 140 |
+
# attr() -> Any
|
| 141 |
+
# attr(8) -> int
|
| 142 |
+
# attr(validator=<some callable>) -> Whatever the callable expects.
|
| 143 |
+
# This makes this type of assignments possible:
|
| 144 |
+
# x: int = attr(8)
|
| 145 |
+
#
|
| 146 |
+
# This form catches explicit None or no default but with no other arguments
|
| 147 |
+
# returns Any.
|
| 148 |
+
@overload
|
| 149 |
+
def attrib(
|
| 150 |
+
default: None = ...,
|
| 151 |
+
validator: None = ...,
|
| 152 |
+
repr: _ReprArgType = ...,
|
| 153 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 154 |
+
hash: Optional[bool] = ...,
|
| 155 |
+
init: bool = ...,
|
| 156 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 157 |
+
type: None = ...,
|
| 158 |
+
converter: None = ...,
|
| 159 |
+
factory: None = ...,
|
| 160 |
+
kw_only: bool = ...,
|
| 161 |
+
eq: Optional[_EqOrderType] = ...,
|
| 162 |
+
order: Optional[_EqOrderType] = ...,
|
| 163 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 164 |
+
) -> Any: ...
|
| 165 |
+
|
| 166 |
+
# This form catches an explicit None or no default and infers the type from the
|
| 167 |
+
# other arguments.
|
| 168 |
+
@overload
|
| 169 |
+
def attrib(
|
| 170 |
+
default: None = ...,
|
| 171 |
+
validator: Optional[_ValidatorArgType[_T]] = ...,
|
| 172 |
+
repr: _ReprArgType = ...,
|
| 173 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 174 |
+
hash: Optional[bool] = ...,
|
| 175 |
+
init: bool = ...,
|
| 176 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 177 |
+
type: Optional[Type[_T]] = ...,
|
| 178 |
+
converter: Optional[_ConverterType] = ...,
|
| 179 |
+
factory: Optional[Callable[[], _T]] = ...,
|
| 180 |
+
kw_only: bool = ...,
|
| 181 |
+
eq: Optional[_EqOrderType] = ...,
|
| 182 |
+
order: Optional[_EqOrderType] = ...,
|
| 183 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 184 |
+
) -> _T: ...
|
| 185 |
+
|
| 186 |
+
# This form catches an explicit default argument.
|
| 187 |
+
@overload
|
| 188 |
+
def attrib(
|
| 189 |
+
default: _T,
|
| 190 |
+
validator: Optional[_ValidatorArgType[_T]] = ...,
|
| 191 |
+
repr: _ReprArgType = ...,
|
| 192 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 193 |
+
hash: Optional[bool] = ...,
|
| 194 |
+
init: bool = ...,
|
| 195 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 196 |
+
type: Optional[Type[_T]] = ...,
|
| 197 |
+
converter: Optional[_ConverterType] = ...,
|
| 198 |
+
factory: Optional[Callable[[], _T]] = ...,
|
| 199 |
+
kw_only: bool = ...,
|
| 200 |
+
eq: Optional[_EqOrderType] = ...,
|
| 201 |
+
order: Optional[_EqOrderType] = ...,
|
| 202 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 203 |
+
) -> _T: ...
|
| 204 |
+
|
| 205 |
+
# This form covers type=non-Type: e.g. forward references (str), Any
|
| 206 |
+
@overload
|
| 207 |
+
def attrib(
|
| 208 |
+
default: Optional[_T] = ...,
|
| 209 |
+
validator: Optional[_ValidatorArgType[_T]] = ...,
|
| 210 |
+
repr: _ReprArgType = ...,
|
| 211 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 212 |
+
hash: Optional[bool] = ...,
|
| 213 |
+
init: bool = ...,
|
| 214 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 215 |
+
type: object = ...,
|
| 216 |
+
converter: Optional[_ConverterType] = ...,
|
| 217 |
+
factory: Optional[Callable[[], _T]] = ...,
|
| 218 |
+
kw_only: bool = ...,
|
| 219 |
+
eq: Optional[_EqOrderType] = ...,
|
| 220 |
+
order: Optional[_EqOrderType] = ...,
|
| 221 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 222 |
+
) -> Any: ...
|
| 223 |
+
@overload
|
| 224 |
+
def field(
|
| 225 |
+
*,
|
| 226 |
+
default: None = ...,
|
| 227 |
+
validator: None = ...,
|
| 228 |
+
repr: _ReprArgType = ...,
|
| 229 |
+
hash: Optional[bool] = ...,
|
| 230 |
+
init: bool = ...,
|
| 231 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 232 |
+
converter: None = ...,
|
| 233 |
+
factory: None = ...,
|
| 234 |
+
kw_only: bool = ...,
|
| 235 |
+
eq: Optional[bool] = ...,
|
| 236 |
+
order: Optional[bool] = ...,
|
| 237 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 238 |
+
) -> Any: ...
|
| 239 |
+
|
| 240 |
+
# This form catches an explicit None or no default and infers the type from the
|
| 241 |
+
# other arguments.
|
| 242 |
+
@overload
|
| 243 |
+
def field(
|
| 244 |
+
*,
|
| 245 |
+
default: None = ...,
|
| 246 |
+
validator: Optional[_ValidatorArgType[_T]] = ...,
|
| 247 |
+
repr: _ReprArgType = ...,
|
| 248 |
+
hash: Optional[bool] = ...,
|
| 249 |
+
init: bool = ...,
|
| 250 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 251 |
+
converter: Optional[_ConverterType] = ...,
|
| 252 |
+
factory: Optional[Callable[[], _T]] = ...,
|
| 253 |
+
kw_only: bool = ...,
|
| 254 |
+
eq: Optional[_EqOrderType] = ...,
|
| 255 |
+
order: Optional[_EqOrderType] = ...,
|
| 256 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 257 |
+
) -> _T: ...
|
| 258 |
+
|
| 259 |
+
# This form catches an explicit default argument.
|
| 260 |
+
@overload
|
| 261 |
+
def field(
|
| 262 |
+
*,
|
| 263 |
+
default: _T,
|
| 264 |
+
validator: Optional[_ValidatorArgType[_T]] = ...,
|
| 265 |
+
repr: _ReprArgType = ...,
|
| 266 |
+
hash: Optional[bool] = ...,
|
| 267 |
+
init: bool = ...,
|
| 268 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 269 |
+
converter: Optional[_ConverterType] = ...,
|
| 270 |
+
factory: Optional[Callable[[], _T]] = ...,
|
| 271 |
+
kw_only: bool = ...,
|
| 272 |
+
eq: Optional[_EqOrderType] = ...,
|
| 273 |
+
order: Optional[_EqOrderType] = ...,
|
| 274 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 275 |
+
) -> _T: ...
|
| 276 |
+
|
| 277 |
+
# This form covers type=non-Type: e.g. forward references (str), Any
|
| 278 |
+
@overload
|
| 279 |
+
def field(
|
| 280 |
+
*,
|
| 281 |
+
default: Optional[_T] = ...,
|
| 282 |
+
validator: Optional[_ValidatorArgType[_T]] = ...,
|
| 283 |
+
repr: _ReprArgType = ...,
|
| 284 |
+
hash: Optional[bool] = ...,
|
| 285 |
+
init: bool = ...,
|
| 286 |
+
metadata: Optional[Mapping[Any, Any]] = ...,
|
| 287 |
+
converter: Optional[_ConverterType] = ...,
|
| 288 |
+
factory: Optional[Callable[[], _T]] = ...,
|
| 289 |
+
kw_only: bool = ...,
|
| 290 |
+
eq: Optional[_EqOrderType] = ...,
|
| 291 |
+
order: Optional[_EqOrderType] = ...,
|
| 292 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 293 |
+
) -> Any: ...
|
| 294 |
+
@overload
|
| 295 |
+
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
| 296 |
+
def attrs(
|
| 297 |
+
maybe_cls: _C,
|
| 298 |
+
these: Optional[Dict[str, Any]] = ...,
|
| 299 |
+
repr_ns: Optional[str] = ...,
|
| 300 |
+
repr: bool = ...,
|
| 301 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 302 |
+
hash: Optional[bool] = ...,
|
| 303 |
+
init: bool = ...,
|
| 304 |
+
slots: bool = ...,
|
| 305 |
+
frozen: bool = ...,
|
| 306 |
+
weakref_slot: bool = ...,
|
| 307 |
+
str: bool = ...,
|
| 308 |
+
auto_attribs: bool = ...,
|
| 309 |
+
kw_only: bool = ...,
|
| 310 |
+
cache_hash: bool = ...,
|
| 311 |
+
auto_exc: bool = ...,
|
| 312 |
+
eq: Optional[_EqOrderType] = ...,
|
| 313 |
+
order: Optional[_EqOrderType] = ...,
|
| 314 |
+
auto_detect: bool = ...,
|
| 315 |
+
collect_by_mro: bool = ...,
|
| 316 |
+
getstate_setstate: Optional[bool] = ...,
|
| 317 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 318 |
+
field_transformer: Optional[_FieldTransformer] = ...,
|
| 319 |
+
match_args: bool = ...,
|
| 320 |
+
) -> _C: ...
|
| 321 |
+
@overload
|
| 322 |
+
@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field))
|
| 323 |
+
def attrs(
|
| 324 |
+
maybe_cls: None = ...,
|
| 325 |
+
these: Optional[Dict[str, Any]] = ...,
|
| 326 |
+
repr_ns: Optional[str] = ...,
|
| 327 |
+
repr: bool = ...,
|
| 328 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 329 |
+
hash: Optional[bool] = ...,
|
| 330 |
+
init: bool = ...,
|
| 331 |
+
slots: bool = ...,
|
| 332 |
+
frozen: bool = ...,
|
| 333 |
+
weakref_slot: bool = ...,
|
| 334 |
+
str: bool = ...,
|
| 335 |
+
auto_attribs: bool = ...,
|
| 336 |
+
kw_only: bool = ...,
|
| 337 |
+
cache_hash: bool = ...,
|
| 338 |
+
auto_exc: bool = ...,
|
| 339 |
+
eq: Optional[_EqOrderType] = ...,
|
| 340 |
+
order: Optional[_EqOrderType] = ...,
|
| 341 |
+
auto_detect: bool = ...,
|
| 342 |
+
collect_by_mro: bool = ...,
|
| 343 |
+
getstate_setstate: Optional[bool] = ...,
|
| 344 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 345 |
+
field_transformer: Optional[_FieldTransformer] = ...,
|
| 346 |
+
match_args: bool = ...,
|
| 347 |
+
) -> Callable[[_C], _C]: ...
|
| 348 |
+
@overload
|
| 349 |
+
@__dataclass_transform__(field_descriptors=(attrib, field))
|
| 350 |
+
def define(
|
| 351 |
+
maybe_cls: _C,
|
| 352 |
+
*,
|
| 353 |
+
these: Optional[Dict[str, Any]] = ...,
|
| 354 |
+
repr: bool = ...,
|
| 355 |
+
hash: Optional[bool] = ...,
|
| 356 |
+
init: bool = ...,
|
| 357 |
+
slots: bool = ...,
|
| 358 |
+
frozen: bool = ...,
|
| 359 |
+
weakref_slot: bool = ...,
|
| 360 |
+
str: bool = ...,
|
| 361 |
+
auto_attribs: bool = ...,
|
| 362 |
+
kw_only: bool = ...,
|
| 363 |
+
cache_hash: bool = ...,
|
| 364 |
+
auto_exc: bool = ...,
|
| 365 |
+
eq: Optional[bool] = ...,
|
| 366 |
+
order: Optional[bool] = ...,
|
| 367 |
+
auto_detect: bool = ...,
|
| 368 |
+
getstate_setstate: Optional[bool] = ...,
|
| 369 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 370 |
+
field_transformer: Optional[_FieldTransformer] = ...,
|
| 371 |
+
match_args: bool = ...,
|
| 372 |
+
) -> _C: ...
|
| 373 |
+
@overload
|
| 374 |
+
@__dataclass_transform__(field_descriptors=(attrib, field))
|
| 375 |
+
def define(
|
| 376 |
+
maybe_cls: None = ...,
|
| 377 |
+
*,
|
| 378 |
+
these: Optional[Dict[str, Any]] = ...,
|
| 379 |
+
repr: bool = ...,
|
| 380 |
+
hash: Optional[bool] = ...,
|
| 381 |
+
init: bool = ...,
|
| 382 |
+
slots: bool = ...,
|
| 383 |
+
frozen: bool = ...,
|
| 384 |
+
weakref_slot: bool = ...,
|
| 385 |
+
str: bool = ...,
|
| 386 |
+
auto_attribs: bool = ...,
|
| 387 |
+
kw_only: bool = ...,
|
| 388 |
+
cache_hash: bool = ...,
|
| 389 |
+
auto_exc: bool = ...,
|
| 390 |
+
eq: Optional[bool] = ...,
|
| 391 |
+
order: Optional[bool] = ...,
|
| 392 |
+
auto_detect: bool = ...,
|
| 393 |
+
getstate_setstate: Optional[bool] = ...,
|
| 394 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 395 |
+
field_transformer: Optional[_FieldTransformer] = ...,
|
| 396 |
+
match_args: bool = ...,
|
| 397 |
+
) -> Callable[[_C], _C]: ...
|
| 398 |
+
|
| 399 |
+
mutable = define
|
| 400 |
+
frozen = define # they differ only in their defaults
|
| 401 |
+
|
| 402 |
+
# TODO: add support for returning NamedTuple from the mypy plugin
|
| 403 |
+
class _Fields(Tuple[Attribute[Any], ...]):
|
| 404 |
+
def __getattr__(self, name: str) -> Attribute[Any]: ...
|
| 405 |
+
|
| 406 |
+
def fields(cls: type) -> _Fields: ...
|
| 407 |
+
def fields_dict(cls: type) -> Dict[str, Attribute[Any]]: ...
|
| 408 |
+
def validate(inst: Any) -> None: ...
|
| 409 |
+
def resolve_types(
|
| 410 |
+
cls: _C,
|
| 411 |
+
globalns: Optional[Dict[str, Any]] = ...,
|
| 412 |
+
localns: Optional[Dict[str, Any]] = ...,
|
| 413 |
+
attribs: Optional[List[Attribute[Any]]] = ...,
|
| 414 |
+
) -> _C: ...
|
| 415 |
+
|
| 416 |
+
# TODO: add support for returning a proper attrs class from the mypy plugin
|
| 417 |
+
# we use Any instead of _CountingAttr so that e.g. `make_class('Foo',
|
| 418 |
+
# [attr.ib()])` is valid
|
| 419 |
+
def make_class(
|
| 420 |
+
name: str,
|
| 421 |
+
attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]],
|
| 422 |
+
bases: Tuple[type, ...] = ...,
|
| 423 |
+
repr_ns: Optional[str] = ...,
|
| 424 |
+
repr: bool = ...,
|
| 425 |
+
cmp: Optional[_EqOrderType] = ...,
|
| 426 |
+
hash: Optional[bool] = ...,
|
| 427 |
+
init: bool = ...,
|
| 428 |
+
slots: bool = ...,
|
| 429 |
+
frozen: bool = ...,
|
| 430 |
+
weakref_slot: bool = ...,
|
| 431 |
+
str: bool = ...,
|
| 432 |
+
auto_attribs: bool = ...,
|
| 433 |
+
kw_only: bool = ...,
|
| 434 |
+
cache_hash: bool = ...,
|
| 435 |
+
auto_exc: bool = ...,
|
| 436 |
+
eq: Optional[_EqOrderType] = ...,
|
| 437 |
+
order: Optional[_EqOrderType] = ...,
|
| 438 |
+
collect_by_mro: bool = ...,
|
| 439 |
+
on_setattr: Optional[_OnSetAttrArgType] = ...,
|
| 440 |
+
field_transformer: Optional[_FieldTransformer] = ...,
|
| 441 |
+
) -> type: ...
|
| 442 |
+
|
| 443 |
+
# _funcs --
|
| 444 |
+
|
| 445 |
+
# TODO: add support for returning TypedDict from the mypy plugin
|
| 446 |
+
# FIXME: asdict/astuple do not honor their factory args. Waiting on one of
|
| 447 |
+
# these:
|
| 448 |
+
# https://github.com/python/mypy/issues/4236
|
| 449 |
+
# https://github.com/python/typing/issues/253
|
| 450 |
+
# XXX: remember to fix attrs.asdict/astuple too!
|
| 451 |
+
def asdict(
|
| 452 |
+
inst: Any,
|
| 453 |
+
recurse: bool = ...,
|
| 454 |
+
filter: Optional[_FilterType[Any]] = ...,
|
| 455 |
+
dict_factory: Type[Mapping[Any, Any]] = ...,
|
| 456 |
+
retain_collection_types: bool = ...,
|
| 457 |
+
value_serializer: Optional[
|
| 458 |
+
Callable[[type, Attribute[Any], Any], Any]
|
| 459 |
+
] = ...,
|
| 460 |
+
tuple_keys: Optional[bool] = ...,
|
| 461 |
+
) -> Dict[str, Any]: ...
|
| 462 |
+
|
| 463 |
+
# TODO: add support for returning NamedTuple from the mypy plugin
|
| 464 |
+
def astuple(
|
| 465 |
+
inst: Any,
|
| 466 |
+
recurse: bool = ...,
|
| 467 |
+
filter: Optional[_FilterType[Any]] = ...,
|
| 468 |
+
tuple_factory: Type[Sequence[Any]] = ...,
|
| 469 |
+
retain_collection_types: bool = ...,
|
| 470 |
+
) -> Tuple[Any, ...]: ...
|
| 471 |
+
def has(cls: type) -> bool: ...
|
| 472 |
+
def assoc(inst: _T, **changes: Any) -> _T: ...
|
| 473 |
+
def evolve(inst: _T, **changes: Any) -> _T: ...
|
| 474 |
+
|
| 475 |
+
# _config --
|
| 476 |
+
|
| 477 |
+
def set_run_validators(run: bool) -> None: ...
|
| 478 |
+
def get_run_validators() -> bool: ...
|
| 479 |
+
|
| 480 |
+
# aliases --
|
| 481 |
+
|
| 482 |
+
s = attributes = attrs
|
| 483 |
+
ib = attr = attrib
|
| 484 |
+
dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;)
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/_cmp.pyi
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Type
|
| 2 |
+
|
| 3 |
+
from . import _CompareWithType
|
| 4 |
+
|
| 5 |
+
def cmp_using(
|
| 6 |
+
eq: Optional[_CompareWithType],
|
| 7 |
+
lt: Optional[_CompareWithType],
|
| 8 |
+
le: Optional[_CompareWithType],
|
| 9 |
+
gt: Optional[_CompareWithType],
|
| 10 |
+
ge: Optional[_CompareWithType],
|
| 11 |
+
require_same_type: bool,
|
| 12 |
+
class_name: str,
|
| 13 |
+
) -> Type: ...
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/_config.py
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
from __future__ import absolute_import, division, print_function
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
__all__ = ["set_run_validators", "get_run_validators"]
|
| 7 |
+
|
| 8 |
+
_run_validators = True
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def set_run_validators(run):
|
| 12 |
+
"""
|
| 13 |
+
Set whether or not validators are run. By default, they are run.
|
| 14 |
+
|
| 15 |
+
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
| 16 |
+
moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()`
|
| 17 |
+
instead.
|
| 18 |
+
"""
|
| 19 |
+
if not isinstance(run, bool):
|
| 20 |
+
raise TypeError("'run' must be bool.")
|
| 21 |
+
global _run_validators
|
| 22 |
+
_run_validators = run
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def get_run_validators():
|
| 26 |
+
"""
|
| 27 |
+
Return whether or not validators are run.
|
| 28 |
+
|
| 29 |
+
.. deprecated:: 21.3.0 It will not be removed, but it also will not be
|
| 30 |
+
moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()`
|
| 31 |
+
instead.
|
| 32 |
+
"""
|
| 33 |
+
return _run_validators
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/_make.py
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/converters.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# SPDX-License-Identifier: MIT
|
| 2 |
+
|
| 3 |
+
"""
|
| 4 |
+
Commonly useful converters.
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from __future__ import absolute_import, division, print_function
|
| 8 |
+
|
| 9 |
+
from ._compat import PY2
|
| 10 |
+
from ._make import NOTHING, Factory, pipe
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
if not PY2:
|
| 14 |
+
import inspect
|
| 15 |
+
import typing
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
__all__ = [
|
| 19 |
+
"default_if_none",
|
| 20 |
+
"optional",
|
| 21 |
+
"pipe",
|
| 22 |
+
"to_bool",
|
| 23 |
+
]
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def optional(converter):
|
| 27 |
+
"""
|
| 28 |
+
A converter that allows an attribute to be optional. An optional attribute
|
| 29 |
+
is one which can be set to ``None``.
|
| 30 |
+
|
| 31 |
+
Type annotations will be inferred from the wrapped converter's, if it
|
| 32 |
+
has any.
|
| 33 |
+
|
| 34 |
+
:param callable converter: the converter that is used for non-``None``
|
| 35 |
+
values.
|
| 36 |
+
|
| 37 |
+
.. versionadded:: 17.1.0
|
| 38 |
+
"""
|
| 39 |
+
|
| 40 |
+
def optional_converter(val):
|
| 41 |
+
if val is None:
|
| 42 |
+
return None
|
| 43 |
+
return converter(val)
|
| 44 |
+
|
| 45 |
+
if not PY2:
|
| 46 |
+
sig = None
|
| 47 |
+
try:
|
| 48 |
+
sig = inspect.signature(converter)
|
| 49 |
+
except (ValueError, TypeError): # inspect failed
|
| 50 |
+
pass
|
| 51 |
+
if sig:
|
| 52 |
+
params = list(sig.parameters.values())
|
| 53 |
+
if params and params[0].annotation is not inspect.Parameter.empty:
|
| 54 |
+
optional_converter.__annotations__["val"] = typing.Optional[
|
| 55 |
+
params[0].annotation
|
| 56 |
+
]
|
| 57 |
+
if sig.return_annotation is not inspect.Signature.empty:
|
| 58 |
+
optional_converter.__annotations__["return"] = typing.Optional[
|
| 59 |
+
sig.return_annotation
|
| 60 |
+
]
|
| 61 |
+
|
| 62 |
+
return optional_converter
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def default_if_none(default=NOTHING, factory=None):
|
| 66 |
+
"""
|
| 67 |
+
A converter that allows to replace ``None`` values by *default* or the
|
| 68 |
+
result of *factory*.
|
| 69 |
+
|
| 70 |
+
:param default: Value to be used if ``None`` is passed. Passing an instance
|
| 71 |
+
of `attrs.Factory` is supported, however the ``takes_self`` option
|
| 72 |
+
is *not*.
|
| 73 |
+
:param callable factory: A callable that takes no parameters whose result
|
| 74 |
+
is used if ``None`` is passed.
|
| 75 |
+
|
| 76 |
+
:raises TypeError: If **neither** *default* or *factory* is passed.
|
| 77 |
+
:raises TypeError: If **both** *default* and *factory* are passed.
|
| 78 |
+
:raises ValueError: If an instance of `attrs.Factory` is passed with
|
| 79 |
+
``takes_self=True``.
|
| 80 |
+
|
| 81 |
+
.. versionadded:: 18.2.0
|
| 82 |
+
"""
|
| 83 |
+
if default is NOTHING and factory is None:
|
| 84 |
+
raise TypeError("Must pass either `default` or `factory`.")
|
| 85 |
+
|
| 86 |
+
if default is not NOTHING and factory is not None:
|
| 87 |
+
raise TypeError(
|
| 88 |
+
"Must pass either `default` or `factory` but not both."
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
if factory is not None:
|
| 92 |
+
default = Factory(factory)
|
| 93 |
+
|
| 94 |
+
if isinstance(default, Factory):
|
| 95 |
+
if default.takes_self:
|
| 96 |
+
raise ValueError(
|
| 97 |
+
"`takes_self` is not supported by default_if_none."
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
def default_if_none_converter(val):
|
| 101 |
+
if val is not None:
|
| 102 |
+
return val
|
| 103 |
+
|
| 104 |
+
return default.factory()
|
| 105 |
+
|
| 106 |
+
else:
|
| 107 |
+
|
| 108 |
+
def default_if_none_converter(val):
|
| 109 |
+
if val is not None:
|
| 110 |
+
return val
|
| 111 |
+
|
| 112 |
+
return default
|
| 113 |
+
|
| 114 |
+
return default_if_none_converter
|
| 115 |
+
|
| 116 |
+
|
| 117 |
+
def to_bool(val):
|
| 118 |
+
"""
|
| 119 |
+
Convert "boolean" strings (e.g., from env. vars.) to real booleans.
|
| 120 |
+
|
| 121 |
+
Values mapping to :code:`True`:
|
| 122 |
+
|
| 123 |
+
- :code:`True`
|
| 124 |
+
- :code:`"true"` / :code:`"t"`
|
| 125 |
+
- :code:`"yes"` / :code:`"y"`
|
| 126 |
+
- :code:`"on"`
|
| 127 |
+
- :code:`"1"`
|
| 128 |
+
- :code:`1`
|
| 129 |
+
|
| 130 |
+
Values mapping to :code:`False`:
|
| 131 |
+
|
| 132 |
+
- :code:`False`
|
| 133 |
+
- :code:`"false"` / :code:`"f"`
|
| 134 |
+
- :code:`"no"` / :code:`"n"`
|
| 135 |
+
- :code:`"off"`
|
| 136 |
+
- :code:`"0"`
|
| 137 |
+
- :code:`0`
|
| 138 |
+
|
| 139 |
+
:raises ValueError: for any other value.
|
| 140 |
+
|
| 141 |
+
.. versionadded:: 21.3.0
|
| 142 |
+
"""
|
| 143 |
+
if isinstance(val, str):
|
| 144 |
+
val = val.lower()
|
| 145 |
+
truthy = {True, "true", "t", "yes", "y", "on", "1", 1}
|
| 146 |
+
falsy = {False, "false", "f", "no", "n", "off", "0", 0}
|
| 147 |
+
try:
|
| 148 |
+
if val in truthy:
|
| 149 |
+
return True
|
| 150 |
+
if val in falsy:
|
| 151 |
+
return False
|
| 152 |
+
except TypeError:
|
| 153 |
+
# Raised when "val" is not hashable (e.g., lists)
|
| 154 |
+
pass
|
| 155 |
+
raise ValueError("Cannot convert value to bool: {}".format(val))
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/attr/exceptions.pyi
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Any
|
| 2 |
+
|
| 3 |
+
class FrozenError(AttributeError):
|
| 4 |
+
msg: str = ...
|
| 5 |
+
|
| 6 |
+
class FrozenInstanceError(FrozenError): ...
|
| 7 |
+
class FrozenAttributeError(FrozenError): ...
|
| 8 |
+
class AttrsAttributeNotFoundError(ValueError): ...
|
| 9 |
+
class NotAnAttrsClassError(ValueError): ...
|
| 10 |
+
class DefaultAlreadySetError(RuntimeError): ...
|
| 11 |
+
class UnannotatedAttributeError(RuntimeError): ...
|
| 12 |
+
class PythonTooOldError(RuntimeError): ...
|
| 13 |
+
|
| 14 |
+
class NotCallableError(TypeError):
|
| 15 |
+
msg: str = ...
|
| 16 |
+
value: Any = ...
|
| 17 |
+
def __init__(self, msg: str, value: Any) -> None: ...
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/__init__.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
"""Initialize the object database module"""
|
| 6 |
+
|
| 7 |
+
import sys
|
| 8 |
+
import os
|
| 9 |
+
|
| 10 |
+
#{ Initialization
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def _init_externals():
|
| 14 |
+
"""Initialize external projects by putting them into the path"""
|
| 15 |
+
for module in ('smmap',):
|
| 16 |
+
if 'PYOXIDIZER' not in os.environ:
|
| 17 |
+
sys.path.append(os.path.join(os.path.dirname(__file__), 'ext', module))
|
| 18 |
+
|
| 19 |
+
try:
|
| 20 |
+
__import__(module)
|
| 21 |
+
except ImportError as e:
|
| 22 |
+
raise ImportError("'%s' could not be imported, assure it is located in your PYTHONPATH" % module) from e
|
| 23 |
+
# END verify import
|
| 24 |
+
# END handel imports
|
| 25 |
+
|
| 26 |
+
#} END initialization
|
| 27 |
+
|
| 28 |
+
_init_externals()
|
| 29 |
+
|
| 30 |
+
__author__ = "Sebastian Thiel"
|
| 31 |
+
__contact__ = "byronimo@gmail.com"
|
| 32 |
+
__homepage__ = "https://github.com/gitpython-developers/gitdb"
|
| 33 |
+
version_info = (4, 0, 9)
|
| 34 |
+
__version__ = '.'.join(str(i) for i in version_info)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
# default imports
|
| 38 |
+
from gitdb.base import *
|
| 39 |
+
from gitdb.db import *
|
| 40 |
+
from gitdb.stream import *
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/base.py
ADDED
|
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
"""Module with basic data structures - they are designed to be lightweight and fast"""
|
| 6 |
+
from gitdb.util import bin_to_hex
|
| 7 |
+
|
| 8 |
+
from gitdb.fun import (
|
| 9 |
+
type_id_to_type_map,
|
| 10 |
+
type_to_type_id_map
|
| 11 |
+
)
|
| 12 |
+
|
| 13 |
+
__all__ = ('OInfo', 'OPackInfo', 'ODeltaPackInfo',
|
| 14 |
+
'OStream', 'OPackStream', 'ODeltaPackStream',
|
| 15 |
+
'IStream', 'InvalidOInfo', 'InvalidOStream')
|
| 16 |
+
|
| 17 |
+
#{ ODB Bases
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class OInfo(tuple):
|
| 21 |
+
|
| 22 |
+
"""Carries information about an object in an ODB, providing information
|
| 23 |
+
about the binary sha of the object, the type_string as well as the uncompressed size
|
| 24 |
+
in bytes.
|
| 25 |
+
|
| 26 |
+
It can be accessed using tuple notation and using attribute access notation::
|
| 27 |
+
|
| 28 |
+
assert dbi[0] == dbi.binsha
|
| 29 |
+
assert dbi[1] == dbi.type
|
| 30 |
+
assert dbi[2] == dbi.size
|
| 31 |
+
|
| 32 |
+
The type is designed to be as lightweight as possible."""
|
| 33 |
+
__slots__ = tuple()
|
| 34 |
+
|
| 35 |
+
def __new__(cls, sha, type, size):
|
| 36 |
+
return tuple.__new__(cls, (sha, type, size))
|
| 37 |
+
|
| 38 |
+
def __init__(self, *args):
|
| 39 |
+
tuple.__init__(self)
|
| 40 |
+
|
| 41 |
+
#{ Interface
|
| 42 |
+
@property
|
| 43 |
+
def binsha(self):
|
| 44 |
+
""":return: our sha as binary, 20 bytes"""
|
| 45 |
+
return self[0]
|
| 46 |
+
|
| 47 |
+
@property
|
| 48 |
+
def hexsha(self):
|
| 49 |
+
""":return: our sha, hex encoded, 40 bytes"""
|
| 50 |
+
return bin_to_hex(self[0])
|
| 51 |
+
|
| 52 |
+
@property
|
| 53 |
+
def type(self):
|
| 54 |
+
return self[1]
|
| 55 |
+
|
| 56 |
+
@property
|
| 57 |
+
def type_id(self):
|
| 58 |
+
return type_to_type_id_map[self[1]]
|
| 59 |
+
|
| 60 |
+
@property
|
| 61 |
+
def size(self):
|
| 62 |
+
return self[2]
|
| 63 |
+
#} END interface
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
class OPackInfo(tuple):
|
| 67 |
+
|
| 68 |
+
"""As OInfo, but provides a type_id property to retrieve the numerical type id, and
|
| 69 |
+
does not include a sha.
|
| 70 |
+
|
| 71 |
+
Additionally, the pack_offset is the absolute offset into the packfile at which
|
| 72 |
+
all object information is located. The data_offset property points to the absolute
|
| 73 |
+
location in the pack at which that actual data stream can be found."""
|
| 74 |
+
__slots__ = tuple()
|
| 75 |
+
|
| 76 |
+
def __new__(cls, packoffset, type, size):
|
| 77 |
+
return tuple.__new__(cls, (packoffset, type, size))
|
| 78 |
+
|
| 79 |
+
def __init__(self, *args):
|
| 80 |
+
tuple.__init__(self)
|
| 81 |
+
|
| 82 |
+
#{ Interface
|
| 83 |
+
|
| 84 |
+
@property
|
| 85 |
+
def pack_offset(self):
|
| 86 |
+
return self[0]
|
| 87 |
+
|
| 88 |
+
@property
|
| 89 |
+
def type(self):
|
| 90 |
+
return type_id_to_type_map[self[1]]
|
| 91 |
+
|
| 92 |
+
@property
|
| 93 |
+
def type_id(self):
|
| 94 |
+
return self[1]
|
| 95 |
+
|
| 96 |
+
@property
|
| 97 |
+
def size(self):
|
| 98 |
+
return self[2]
|
| 99 |
+
|
| 100 |
+
#} END interface
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
class ODeltaPackInfo(OPackInfo):
|
| 104 |
+
|
| 105 |
+
"""Adds delta specific information,
|
| 106 |
+
Either the 20 byte sha which points to some object in the database,
|
| 107 |
+
or the negative offset from the pack_offset, so that pack_offset - delta_info yields
|
| 108 |
+
the pack offset of the base object"""
|
| 109 |
+
__slots__ = tuple()
|
| 110 |
+
|
| 111 |
+
def __new__(cls, packoffset, type, size, delta_info):
|
| 112 |
+
return tuple.__new__(cls, (packoffset, type, size, delta_info))
|
| 113 |
+
|
| 114 |
+
#{ Interface
|
| 115 |
+
@property
|
| 116 |
+
def delta_info(self):
|
| 117 |
+
return self[3]
|
| 118 |
+
#} END interface
|
| 119 |
+
|
| 120 |
+
|
| 121 |
+
class OStream(OInfo):
|
| 122 |
+
|
| 123 |
+
"""Base for object streams retrieved from the database, providing additional
|
| 124 |
+
information about the stream.
|
| 125 |
+
Generally, ODB streams are read-only as objects are immutable"""
|
| 126 |
+
__slots__ = tuple()
|
| 127 |
+
|
| 128 |
+
def __new__(cls, sha, type, size, stream, *args, **kwargs):
|
| 129 |
+
"""Helps with the initialization of subclasses"""
|
| 130 |
+
return tuple.__new__(cls, (sha, type, size, stream))
|
| 131 |
+
|
| 132 |
+
def __init__(self, *args, **kwargs):
|
| 133 |
+
tuple.__init__(self)
|
| 134 |
+
|
| 135 |
+
#{ Stream Reader Interface
|
| 136 |
+
|
| 137 |
+
def read(self, size=-1):
|
| 138 |
+
return self[3].read(size)
|
| 139 |
+
|
| 140 |
+
@property
|
| 141 |
+
def stream(self):
|
| 142 |
+
return self[3]
|
| 143 |
+
|
| 144 |
+
#} END stream reader interface
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
class ODeltaStream(OStream):
|
| 148 |
+
|
| 149 |
+
"""Uses size info of its stream, delaying reads"""
|
| 150 |
+
|
| 151 |
+
def __new__(cls, sha, type, size, stream, *args, **kwargs):
|
| 152 |
+
"""Helps with the initialization of subclasses"""
|
| 153 |
+
return tuple.__new__(cls, (sha, type, size, stream))
|
| 154 |
+
|
| 155 |
+
#{ Stream Reader Interface
|
| 156 |
+
|
| 157 |
+
@property
|
| 158 |
+
def size(self):
|
| 159 |
+
return self[3].size
|
| 160 |
+
|
| 161 |
+
#} END stream reader interface
|
| 162 |
+
|
| 163 |
+
|
| 164 |
+
class OPackStream(OPackInfo):
|
| 165 |
+
|
| 166 |
+
"""Next to pack object information, a stream outputting an undeltified base object
|
| 167 |
+
is provided"""
|
| 168 |
+
__slots__ = tuple()
|
| 169 |
+
|
| 170 |
+
def __new__(cls, packoffset, type, size, stream, *args):
|
| 171 |
+
"""Helps with the initialization of subclasses"""
|
| 172 |
+
return tuple.__new__(cls, (packoffset, type, size, stream))
|
| 173 |
+
|
| 174 |
+
#{ Stream Reader Interface
|
| 175 |
+
def read(self, size=-1):
|
| 176 |
+
return self[3].read(size)
|
| 177 |
+
|
| 178 |
+
@property
|
| 179 |
+
def stream(self):
|
| 180 |
+
return self[3]
|
| 181 |
+
#} END stream reader interface
|
| 182 |
+
|
| 183 |
+
|
| 184 |
+
class ODeltaPackStream(ODeltaPackInfo):
|
| 185 |
+
|
| 186 |
+
"""Provides a stream outputting the uncompressed offset delta information"""
|
| 187 |
+
__slots__ = tuple()
|
| 188 |
+
|
| 189 |
+
def __new__(cls, packoffset, type, size, delta_info, stream):
|
| 190 |
+
return tuple.__new__(cls, (packoffset, type, size, delta_info, stream))
|
| 191 |
+
|
| 192 |
+
#{ Stream Reader Interface
|
| 193 |
+
def read(self, size=-1):
|
| 194 |
+
return self[4].read(size)
|
| 195 |
+
|
| 196 |
+
@property
|
| 197 |
+
def stream(self):
|
| 198 |
+
return self[4]
|
| 199 |
+
#} END stream reader interface
|
| 200 |
+
|
| 201 |
+
|
| 202 |
+
class IStream(list):
|
| 203 |
+
|
| 204 |
+
"""Represents an input content stream to be fed into the ODB. It is mutable to allow
|
| 205 |
+
the ODB to record information about the operations outcome right in this instance.
|
| 206 |
+
|
| 207 |
+
It provides interfaces for the OStream and a StreamReader to allow the instance
|
| 208 |
+
to blend in without prior conversion.
|
| 209 |
+
|
| 210 |
+
The only method your content stream must support is 'read'"""
|
| 211 |
+
__slots__ = tuple()
|
| 212 |
+
|
| 213 |
+
def __new__(cls, type, size, stream, sha=None):
|
| 214 |
+
return list.__new__(cls, (sha, type, size, stream, None))
|
| 215 |
+
|
| 216 |
+
def __init__(self, type, size, stream, sha=None):
|
| 217 |
+
list.__init__(self, (sha, type, size, stream, None))
|
| 218 |
+
|
| 219 |
+
#{ Interface
|
| 220 |
+
@property
|
| 221 |
+
def hexsha(self):
|
| 222 |
+
""":return: our sha, hex encoded, 40 bytes"""
|
| 223 |
+
return bin_to_hex(self[0])
|
| 224 |
+
|
| 225 |
+
def _error(self):
|
| 226 |
+
""":return: the error that occurred when processing the stream, or None"""
|
| 227 |
+
return self[4]
|
| 228 |
+
|
| 229 |
+
def _set_error(self, exc):
|
| 230 |
+
"""Set this input stream to the given exc, may be None to reset the error"""
|
| 231 |
+
self[4] = exc
|
| 232 |
+
|
| 233 |
+
error = property(_error, _set_error)
|
| 234 |
+
|
| 235 |
+
#} END interface
|
| 236 |
+
|
| 237 |
+
#{ Stream Reader Interface
|
| 238 |
+
|
| 239 |
+
def read(self, size=-1):
|
| 240 |
+
"""Implements a simple stream reader interface, passing the read call on
|
| 241 |
+
to our internal stream"""
|
| 242 |
+
return self[3].read(size)
|
| 243 |
+
|
| 244 |
+
#} END stream reader interface
|
| 245 |
+
|
| 246 |
+
#{ interface
|
| 247 |
+
|
| 248 |
+
def _set_binsha(self, binsha):
|
| 249 |
+
self[0] = binsha
|
| 250 |
+
|
| 251 |
+
def _binsha(self):
|
| 252 |
+
return self[0]
|
| 253 |
+
|
| 254 |
+
binsha = property(_binsha, _set_binsha)
|
| 255 |
+
|
| 256 |
+
def _type(self):
|
| 257 |
+
return self[1]
|
| 258 |
+
|
| 259 |
+
def _set_type(self, type):
|
| 260 |
+
self[1] = type
|
| 261 |
+
|
| 262 |
+
type = property(_type, _set_type)
|
| 263 |
+
|
| 264 |
+
def _size(self):
|
| 265 |
+
return self[2]
|
| 266 |
+
|
| 267 |
+
def _set_size(self, size):
|
| 268 |
+
self[2] = size
|
| 269 |
+
|
| 270 |
+
size = property(_size, _set_size)
|
| 271 |
+
|
| 272 |
+
def _stream(self):
|
| 273 |
+
return self[3]
|
| 274 |
+
|
| 275 |
+
def _set_stream(self, stream):
|
| 276 |
+
self[3] = stream
|
| 277 |
+
|
| 278 |
+
stream = property(_stream, _set_stream)
|
| 279 |
+
|
| 280 |
+
#} END odb info interface
|
| 281 |
+
|
| 282 |
+
|
| 283 |
+
class InvalidOInfo(tuple):
|
| 284 |
+
|
| 285 |
+
"""Carries information about a sha identifying an object which is invalid in
|
| 286 |
+
the queried database. The exception attribute provides more information about
|
| 287 |
+
the cause of the issue"""
|
| 288 |
+
__slots__ = tuple()
|
| 289 |
+
|
| 290 |
+
def __new__(cls, sha, exc):
|
| 291 |
+
return tuple.__new__(cls, (sha, exc))
|
| 292 |
+
|
| 293 |
+
def __init__(self, sha, exc):
|
| 294 |
+
tuple.__init__(self, (sha, exc))
|
| 295 |
+
|
| 296 |
+
@property
|
| 297 |
+
def binsha(self):
|
| 298 |
+
return self[0]
|
| 299 |
+
|
| 300 |
+
@property
|
| 301 |
+
def hexsha(self):
|
| 302 |
+
return bin_to_hex(self[0])
|
| 303 |
+
|
| 304 |
+
@property
|
| 305 |
+
def error(self):
|
| 306 |
+
""":return: exception instance explaining the failure"""
|
| 307 |
+
return self[1]
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
class InvalidOStream(InvalidOInfo):
|
| 311 |
+
|
| 312 |
+
"""Carries information about an invalid ODB stream"""
|
| 313 |
+
__slots__ = tuple()
|
| 314 |
+
|
| 315 |
+
#} END ODB Bases
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/const.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
BYTE_SPACE = b' '
|
| 2 |
+
NULL_BYTE = b'\0'
|
| 3 |
+
NULL_HEX_SHA = "0" * 40
|
| 4 |
+
NULL_BIN_SHA = NULL_BYTE * 20
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/exc.py
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
"""Module with common exceptions"""
|
| 6 |
+
from gitdb.util import to_hex_sha
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class ODBError(Exception):
|
| 10 |
+
"""All errors thrown by the object database"""
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class InvalidDBRoot(ODBError):
|
| 14 |
+
"""Thrown if an object database cannot be initialized at the given path"""
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class BadObject(ODBError):
|
| 18 |
+
"""The object with the given SHA does not exist. Instantiate with the
|
| 19 |
+
failed sha"""
|
| 20 |
+
|
| 21 |
+
def __str__(self):
|
| 22 |
+
return "BadObject: %s" % to_hex_sha(self.args[0])
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class BadName(ODBError):
|
| 26 |
+
"""A name provided to rev_parse wasn't understood"""
|
| 27 |
+
|
| 28 |
+
def __str__(self):
|
| 29 |
+
return "Ref '%s' did not resolve to an object" % self.args[0]
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class ParseError(ODBError):
|
| 33 |
+
"""Thrown if the parsing of a file failed due to an invalid format"""
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class AmbiguousObjectName(ODBError):
|
| 37 |
+
"""Thrown if a possibly shortened name does not uniquely represent a single object
|
| 38 |
+
in the database"""
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class BadObjectType(ODBError):
|
| 42 |
+
"""The object had an unsupported type"""
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
class UnsupportedOperation(ODBError):
|
| 46 |
+
"""Thrown if the given operation cannot be supported by the object database"""
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/fun.py
ADDED
|
@@ -0,0 +1,704 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
"""Contains basic c-functions which usually contain performance critical code
|
| 6 |
+
Keeping this code separate from the beginning makes it easier to out-source
|
| 7 |
+
it into c later, if required"""
|
| 8 |
+
|
| 9 |
+
import zlib
|
| 10 |
+
from gitdb.util import byte_ord
|
| 11 |
+
decompressobj = zlib.decompressobj
|
| 12 |
+
|
| 13 |
+
import mmap
|
| 14 |
+
from itertools import islice
|
| 15 |
+
from functools import reduce
|
| 16 |
+
|
| 17 |
+
from gitdb.const import NULL_BYTE, BYTE_SPACE
|
| 18 |
+
from gitdb.utils.encoding import force_text
|
| 19 |
+
from gitdb.typ import (
|
| 20 |
+
str_blob_type,
|
| 21 |
+
str_commit_type,
|
| 22 |
+
str_tree_type,
|
| 23 |
+
str_tag_type,
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
from io import StringIO
|
| 27 |
+
|
| 28 |
+
# INVARIANTS
|
| 29 |
+
OFS_DELTA = 6
|
| 30 |
+
REF_DELTA = 7
|
| 31 |
+
delta_types = (OFS_DELTA, REF_DELTA)
|
| 32 |
+
|
| 33 |
+
type_id_to_type_map = {
|
| 34 |
+
0: b'', # EXT 1
|
| 35 |
+
1: str_commit_type,
|
| 36 |
+
2: str_tree_type,
|
| 37 |
+
3: str_blob_type,
|
| 38 |
+
4: str_tag_type,
|
| 39 |
+
5: b'', # EXT 2
|
| 40 |
+
OFS_DELTA: "OFS_DELTA", # OFFSET DELTA
|
| 41 |
+
REF_DELTA: "REF_DELTA" # REFERENCE DELTA
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
type_to_type_id_map = {
|
| 45 |
+
str_commit_type: 1,
|
| 46 |
+
str_tree_type: 2,
|
| 47 |
+
str_blob_type: 3,
|
| 48 |
+
str_tag_type: 4,
|
| 49 |
+
"OFS_DELTA": OFS_DELTA,
|
| 50 |
+
"REF_DELTA": REF_DELTA,
|
| 51 |
+
}
|
| 52 |
+
|
| 53 |
+
# used when dealing with larger streams
|
| 54 |
+
chunk_size = 1000 * mmap.PAGESIZE
|
| 55 |
+
|
| 56 |
+
__all__ = ('is_loose_object', 'loose_object_header_info', 'msb_size', 'pack_object_header_info',
|
| 57 |
+
'write_object', 'loose_object_header', 'stream_copy', 'apply_delta_data',
|
| 58 |
+
'is_equal_canonical_sha', 'connect_deltas', 'DeltaChunkList', 'create_pack_object_header')
|
| 59 |
+
|
| 60 |
+
|
| 61 |
+
#{ Structures
|
| 62 |
+
|
| 63 |
+
def _set_delta_rbound(d, size):
|
| 64 |
+
"""Truncate the given delta to the given size
|
| 65 |
+
:param size: size relative to our target offset, may not be 0, must be smaller or equal
|
| 66 |
+
to our size
|
| 67 |
+
:return: d"""
|
| 68 |
+
d.ts = size
|
| 69 |
+
|
| 70 |
+
# NOTE: data is truncated automatically when applying the delta
|
| 71 |
+
# MUST NOT DO THIS HERE
|
| 72 |
+
return d
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
def _move_delta_lbound(d, bytes):
|
| 76 |
+
"""Move the delta by the given amount of bytes, reducing its size so that its
|
| 77 |
+
right bound stays static
|
| 78 |
+
:param bytes: amount of bytes to move, must be smaller than delta size
|
| 79 |
+
:return: d"""
|
| 80 |
+
if bytes == 0:
|
| 81 |
+
return
|
| 82 |
+
|
| 83 |
+
d.to += bytes
|
| 84 |
+
d.so += bytes
|
| 85 |
+
d.ts -= bytes
|
| 86 |
+
if d.data is not None:
|
| 87 |
+
d.data = d.data[bytes:]
|
| 88 |
+
# END handle data
|
| 89 |
+
|
| 90 |
+
return d
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
def delta_duplicate(src):
|
| 94 |
+
return DeltaChunk(src.to, src.ts, src.so, src.data)
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
def delta_chunk_apply(dc, bbuf, write):
|
| 98 |
+
"""Apply own data to the target buffer
|
| 99 |
+
:param bbuf: buffer providing source bytes for copy operations
|
| 100 |
+
:param write: write method to call with data to write"""
|
| 101 |
+
if dc.data is None:
|
| 102 |
+
# COPY DATA FROM SOURCE
|
| 103 |
+
write(bbuf[dc.so:dc.so + dc.ts])
|
| 104 |
+
else:
|
| 105 |
+
# APPEND DATA
|
| 106 |
+
# whats faster: if + 4 function calls or just a write with a slice ?
|
| 107 |
+
# Considering data can be larger than 127 bytes now, it should be worth it
|
| 108 |
+
if dc.ts < len(dc.data):
|
| 109 |
+
write(dc.data[:dc.ts])
|
| 110 |
+
else:
|
| 111 |
+
write(dc.data)
|
| 112 |
+
# END handle truncation
|
| 113 |
+
# END handle chunk mode
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
class DeltaChunk(object):
|
| 117 |
+
|
| 118 |
+
"""Represents a piece of a delta, it can either add new data, or copy existing
|
| 119 |
+
one from a source buffer"""
|
| 120 |
+
__slots__ = (
|
| 121 |
+
'to', # start offset in the target buffer in bytes
|
| 122 |
+
'ts', # size of this chunk in the target buffer in bytes
|
| 123 |
+
'so', # start offset in the source buffer in bytes or None
|
| 124 |
+
'data', # chunk of bytes to be added to the target buffer,
|
| 125 |
+
# DeltaChunkList to use as base, or None
|
| 126 |
+
)
|
| 127 |
+
|
| 128 |
+
def __init__(self, to, ts, so, data):
|
| 129 |
+
self.to = to
|
| 130 |
+
self.ts = ts
|
| 131 |
+
self.so = so
|
| 132 |
+
self.data = data
|
| 133 |
+
|
| 134 |
+
def __repr__(self):
|
| 135 |
+
return "DeltaChunk(%i, %i, %s, %s)" % (self.to, self.ts, self.so, self.data or "")
|
| 136 |
+
|
| 137 |
+
#{ Interface
|
| 138 |
+
|
| 139 |
+
def rbound(self):
|
| 140 |
+
return self.to + self.ts
|
| 141 |
+
|
| 142 |
+
def has_data(self):
|
| 143 |
+
""":return: True if the instance has data to add to the target stream"""
|
| 144 |
+
return self.data is not None
|
| 145 |
+
|
| 146 |
+
#} END interface
|
| 147 |
+
|
| 148 |
+
|
| 149 |
+
def _closest_index(dcl, absofs):
|
| 150 |
+
""":return: index at which the given absofs should be inserted. The index points
|
| 151 |
+
to the DeltaChunk with a target buffer absofs that equals or is greater than
|
| 152 |
+
absofs.
|
| 153 |
+
**Note:** global method for performance only, it belongs to DeltaChunkList"""
|
| 154 |
+
lo = 0
|
| 155 |
+
hi = len(dcl)
|
| 156 |
+
while lo < hi:
|
| 157 |
+
mid = (lo + hi) / 2
|
| 158 |
+
dc = dcl[mid]
|
| 159 |
+
if dc.to > absofs:
|
| 160 |
+
hi = mid
|
| 161 |
+
elif dc.rbound() > absofs or dc.to == absofs:
|
| 162 |
+
return mid
|
| 163 |
+
else:
|
| 164 |
+
lo = mid + 1
|
| 165 |
+
# END handle bound
|
| 166 |
+
# END for each delta absofs
|
| 167 |
+
return len(dcl) - 1
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
def delta_list_apply(dcl, bbuf, write):
|
| 171 |
+
"""Apply the chain's changes and write the final result using the passed
|
| 172 |
+
write function.
|
| 173 |
+
:param bbuf: base buffer containing the base of all deltas contained in this
|
| 174 |
+
list. It will only be used if the chunk in question does not have a base
|
| 175 |
+
chain.
|
| 176 |
+
:param write: function taking a string of bytes to write to the output"""
|
| 177 |
+
for dc in dcl:
|
| 178 |
+
delta_chunk_apply(dc, bbuf, write)
|
| 179 |
+
# END for each dc
|
| 180 |
+
|
| 181 |
+
|
| 182 |
+
def delta_list_slice(dcl, absofs, size, ndcl):
|
| 183 |
+
""":return: Subsection of this list at the given absolute offset, with the given
|
| 184 |
+
size in bytes.
|
| 185 |
+
:return: None"""
|
| 186 |
+
cdi = _closest_index(dcl, absofs) # delta start index
|
| 187 |
+
cd = dcl[cdi]
|
| 188 |
+
slen = len(dcl)
|
| 189 |
+
lappend = ndcl.append
|
| 190 |
+
|
| 191 |
+
if cd.to != absofs:
|
| 192 |
+
tcd = DeltaChunk(cd.to, cd.ts, cd.so, cd.data)
|
| 193 |
+
_move_delta_lbound(tcd, absofs - cd.to)
|
| 194 |
+
tcd.ts = min(tcd.ts, size)
|
| 195 |
+
lappend(tcd)
|
| 196 |
+
size -= tcd.ts
|
| 197 |
+
cdi += 1
|
| 198 |
+
# END lbound overlap handling
|
| 199 |
+
|
| 200 |
+
while cdi < slen and size:
|
| 201 |
+
# are we larger than the current block
|
| 202 |
+
cd = dcl[cdi]
|
| 203 |
+
if cd.ts <= size:
|
| 204 |
+
lappend(DeltaChunk(cd.to, cd.ts, cd.so, cd.data))
|
| 205 |
+
size -= cd.ts
|
| 206 |
+
else:
|
| 207 |
+
tcd = DeltaChunk(cd.to, cd.ts, cd.so, cd.data)
|
| 208 |
+
tcd.ts = size
|
| 209 |
+
lappend(tcd)
|
| 210 |
+
size -= tcd.ts
|
| 211 |
+
break
|
| 212 |
+
# END hadle size
|
| 213 |
+
cdi += 1
|
| 214 |
+
# END for each chunk
|
| 215 |
+
|
| 216 |
+
|
| 217 |
+
class DeltaChunkList(list):
|
| 218 |
+
|
| 219 |
+
"""List with special functionality to deal with DeltaChunks.
|
| 220 |
+
There are two types of lists we represent. The one was created bottom-up, working
|
| 221 |
+
towards the latest delta, the other kind was created top-down, working from the
|
| 222 |
+
latest delta down to the earliest ancestor. This attribute is queryable
|
| 223 |
+
after all processing with is_reversed."""
|
| 224 |
+
|
| 225 |
+
__slots__ = tuple()
|
| 226 |
+
|
| 227 |
+
def rbound(self):
|
| 228 |
+
""":return: rightmost extend in bytes, absolute"""
|
| 229 |
+
if len(self) == 0:
|
| 230 |
+
return 0
|
| 231 |
+
return self[-1].rbound()
|
| 232 |
+
|
| 233 |
+
def lbound(self):
|
| 234 |
+
""":return: leftmost byte at which this chunklist starts"""
|
| 235 |
+
if len(self) == 0:
|
| 236 |
+
return 0
|
| 237 |
+
return self[0].to
|
| 238 |
+
|
| 239 |
+
def size(self):
|
| 240 |
+
""":return: size of bytes as measured by our delta chunks"""
|
| 241 |
+
return self.rbound() - self.lbound()
|
| 242 |
+
|
| 243 |
+
def apply(self, bbuf, write):
|
| 244 |
+
"""Only used by public clients, internally we only use the global routines
|
| 245 |
+
for performance"""
|
| 246 |
+
return delta_list_apply(self, bbuf, write)
|
| 247 |
+
|
| 248 |
+
def compress(self):
|
| 249 |
+
"""Alter the list to reduce the amount of nodes. Currently we concatenate
|
| 250 |
+
add-chunks
|
| 251 |
+
:return: self"""
|
| 252 |
+
slen = len(self)
|
| 253 |
+
if slen < 2:
|
| 254 |
+
return self
|
| 255 |
+
i = 0
|
| 256 |
+
|
| 257 |
+
first_data_index = None
|
| 258 |
+
while i < slen:
|
| 259 |
+
dc = self[i]
|
| 260 |
+
i += 1
|
| 261 |
+
if dc.data is None:
|
| 262 |
+
if first_data_index is not None and i - 2 - first_data_index > 1:
|
| 263 |
+
# if first_data_index is not None:
|
| 264 |
+
nd = StringIO() # new data
|
| 265 |
+
so = self[first_data_index].to # start offset in target buffer
|
| 266 |
+
for x in range(first_data_index, i - 1):
|
| 267 |
+
xdc = self[x]
|
| 268 |
+
nd.write(xdc.data[:xdc.ts])
|
| 269 |
+
# END collect data
|
| 270 |
+
|
| 271 |
+
del(self[first_data_index:i - 1])
|
| 272 |
+
buf = nd.getvalue()
|
| 273 |
+
self.insert(first_data_index, DeltaChunk(so, len(buf), 0, buf))
|
| 274 |
+
|
| 275 |
+
slen = len(self)
|
| 276 |
+
i = first_data_index + 1
|
| 277 |
+
|
| 278 |
+
# END concatenate data
|
| 279 |
+
first_data_index = None
|
| 280 |
+
continue
|
| 281 |
+
# END skip non-data chunks
|
| 282 |
+
|
| 283 |
+
if first_data_index is None:
|
| 284 |
+
first_data_index = i - 1
|
| 285 |
+
# END iterate list
|
| 286 |
+
|
| 287 |
+
# if slen_orig != len(self):
|
| 288 |
+
# print "INFO: Reduced delta list len to %f %% of former size" % ((float(len(self)) / slen_orig) * 100)
|
| 289 |
+
return self
|
| 290 |
+
|
| 291 |
+
def check_integrity(self, target_size=-1):
|
| 292 |
+
"""Verify the list has non-overlapping chunks only, and the total size matches
|
| 293 |
+
target_size
|
| 294 |
+
:param target_size: if not -1, the total size of the chain must be target_size
|
| 295 |
+
:raise AssertionError: if the size doen't match"""
|
| 296 |
+
if target_size > -1:
|
| 297 |
+
assert self[-1].rbound() == target_size
|
| 298 |
+
assert reduce(lambda x, y: x + y, (d.ts for d in self), 0) == target_size
|
| 299 |
+
# END target size verification
|
| 300 |
+
|
| 301 |
+
if len(self) < 2:
|
| 302 |
+
return
|
| 303 |
+
|
| 304 |
+
# check data
|
| 305 |
+
for dc in self:
|
| 306 |
+
assert dc.ts > 0
|
| 307 |
+
if dc.has_data():
|
| 308 |
+
assert len(dc.data) >= dc.ts
|
| 309 |
+
# END for each dc
|
| 310 |
+
|
| 311 |
+
left = islice(self, 0, len(self) - 1)
|
| 312 |
+
right = iter(self)
|
| 313 |
+
right.next()
|
| 314 |
+
# this is very pythonic - we might have just use index based access here,
|
| 315 |
+
# but this could actually be faster
|
| 316 |
+
for lft, rgt in zip(left, right):
|
| 317 |
+
assert lft.rbound() == rgt.to
|
| 318 |
+
assert lft.to + lft.ts == rgt.to
|
| 319 |
+
# END for each pair
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
class TopdownDeltaChunkList(DeltaChunkList):
|
| 323 |
+
|
| 324 |
+
"""Represents a list which is generated by feeding its ancestor streams one by
|
| 325 |
+
one"""
|
| 326 |
+
__slots__ = tuple()
|
| 327 |
+
|
| 328 |
+
def connect_with_next_base(self, bdcl):
|
| 329 |
+
"""Connect this chain with the next level of our base delta chunklist.
|
| 330 |
+
The goal in this game is to mark as many of our chunks rigid, hence they
|
| 331 |
+
cannot be changed by any of the upcoming bases anymore. Once all our
|
| 332 |
+
chunks are marked like that, we can stop all processing
|
| 333 |
+
:param bdcl: data chunk list being one of our bases. They must be fed in
|
| 334 |
+
consequtively and in order, towards the earliest ancestor delta
|
| 335 |
+
:return: True if processing was done. Use it to abort processing of
|
| 336 |
+
remaining streams if False is returned"""
|
| 337 |
+
nfc = 0 # number of frozen chunks
|
| 338 |
+
dci = 0 # delta chunk index
|
| 339 |
+
slen = len(self) # len of self
|
| 340 |
+
ccl = list() # temporary list
|
| 341 |
+
while dci < slen:
|
| 342 |
+
dc = self[dci]
|
| 343 |
+
dci += 1
|
| 344 |
+
|
| 345 |
+
# all add-chunks which are already topmost don't need additional processing
|
| 346 |
+
if dc.data is not None:
|
| 347 |
+
nfc += 1
|
| 348 |
+
continue
|
| 349 |
+
# END skip add chunks
|
| 350 |
+
|
| 351 |
+
# copy chunks
|
| 352 |
+
# integrate the portion of the base list into ourselves. Lists
|
| 353 |
+
# dont support efficient insertion ( just one at a time ), but for now
|
| 354 |
+
# we live with it. Internally, its all just a 32/64bit pointer, and
|
| 355 |
+
# the portions of moved memory should be smallish. Maybe we just rebuild
|
| 356 |
+
# ourselves in order to reduce the amount of insertions ...
|
| 357 |
+
del(ccl[:])
|
| 358 |
+
delta_list_slice(bdcl, dc.so, dc.ts, ccl)
|
| 359 |
+
|
| 360 |
+
# move the target bounds into place to match with our chunk
|
| 361 |
+
ofs = dc.to - dc.so
|
| 362 |
+
for cdc in ccl:
|
| 363 |
+
cdc.to += ofs
|
| 364 |
+
# END update target bounds
|
| 365 |
+
|
| 366 |
+
if len(ccl) == 1:
|
| 367 |
+
self[dci - 1] = ccl[0]
|
| 368 |
+
else:
|
| 369 |
+
# maybe try to compute the expenses here, and pick the right algorithm
|
| 370 |
+
# It would normally be faster than copying everything physically though
|
| 371 |
+
# TODO: Use a deque here, and decide by the index whether to extend
|
| 372 |
+
# or extend left !
|
| 373 |
+
post_dci = self[dci:]
|
| 374 |
+
del(self[dci - 1:]) # include deletion of dc
|
| 375 |
+
self.extend(ccl)
|
| 376 |
+
self.extend(post_dci)
|
| 377 |
+
|
| 378 |
+
slen = len(self)
|
| 379 |
+
dci += len(ccl) - 1 # deleted dc, added rest
|
| 380 |
+
|
| 381 |
+
# END handle chunk replacement
|
| 382 |
+
# END for each chunk
|
| 383 |
+
|
| 384 |
+
if nfc == slen:
|
| 385 |
+
return False
|
| 386 |
+
# END handle completeness
|
| 387 |
+
return True
|
| 388 |
+
|
| 389 |
+
|
| 390 |
+
#} END structures
|
| 391 |
+
|
| 392 |
+
#{ Routines
|
| 393 |
+
|
| 394 |
+
def is_loose_object(m):
|
| 395 |
+
"""
|
| 396 |
+
:return: True the file contained in memory map m appears to be a loose object.
|
| 397 |
+
Only the first two bytes are needed"""
|
| 398 |
+
b0, b1 = map(ord, m[:2])
|
| 399 |
+
word = (b0 << 8) + b1
|
| 400 |
+
return b0 == 0x78 and (word % 31) == 0
|
| 401 |
+
|
| 402 |
+
|
| 403 |
+
def loose_object_header_info(m):
|
| 404 |
+
"""
|
| 405 |
+
:return: tuple(type_string, uncompressed_size_in_bytes) the type string of the
|
| 406 |
+
object as well as its uncompressed size in bytes.
|
| 407 |
+
:param m: memory map from which to read the compressed object data"""
|
| 408 |
+
decompress_size = 8192 # is used in cgit as well
|
| 409 |
+
hdr = decompressobj().decompress(m, decompress_size)
|
| 410 |
+
type_name, size = hdr[:hdr.find(NULL_BYTE)].split(BYTE_SPACE)
|
| 411 |
+
|
| 412 |
+
return type_name, int(size)
|
| 413 |
+
|
| 414 |
+
|
| 415 |
+
def pack_object_header_info(data):
|
| 416 |
+
"""
|
| 417 |
+
:return: tuple(type_id, uncompressed_size_in_bytes, byte_offset)
|
| 418 |
+
The type_id should be interpreted according to the ``type_id_to_type_map`` map
|
| 419 |
+
The byte-offset specifies the start of the actual zlib compressed datastream
|
| 420 |
+
:param m: random-access memory, like a string or memory map"""
|
| 421 |
+
c = byte_ord(data[0]) # first byte
|
| 422 |
+
i = 1 # next char to read
|
| 423 |
+
type_id = (c >> 4) & 7 # numeric type
|
| 424 |
+
size = c & 15 # starting size
|
| 425 |
+
s = 4 # starting bit-shift size
|
| 426 |
+
while c & 0x80:
|
| 427 |
+
c = byte_ord(data[i])
|
| 428 |
+
i += 1
|
| 429 |
+
size += (c & 0x7f) << s
|
| 430 |
+
s += 7
|
| 431 |
+
# END character loop
|
| 432 |
+
# end performance at expense of maintenance ...
|
| 433 |
+
return (type_id, size, i)
|
| 434 |
+
|
| 435 |
+
|
| 436 |
+
def create_pack_object_header(obj_type, obj_size):
|
| 437 |
+
"""
|
| 438 |
+
:return: string defining the pack header comprised of the object type
|
| 439 |
+
and its incompressed size in bytes
|
| 440 |
+
|
| 441 |
+
:param obj_type: pack type_id of the object
|
| 442 |
+
:param obj_size: uncompressed size in bytes of the following object stream"""
|
| 443 |
+
c = 0 # 1 byte
|
| 444 |
+
hdr = bytearray() # output string
|
| 445 |
+
|
| 446 |
+
c = (obj_type << 4) | (obj_size & 0xf)
|
| 447 |
+
obj_size >>= 4
|
| 448 |
+
while obj_size:
|
| 449 |
+
hdr.append(c | 0x80)
|
| 450 |
+
c = obj_size & 0x7f
|
| 451 |
+
obj_size >>= 7
|
| 452 |
+
# END until size is consumed
|
| 453 |
+
hdr.append(c)
|
| 454 |
+
# end handle interpreter
|
| 455 |
+
return hdr
|
| 456 |
+
|
| 457 |
+
|
| 458 |
+
def msb_size(data, offset=0):
|
| 459 |
+
"""
|
| 460 |
+
:return: tuple(read_bytes, size) read the msb size from the given random
|
| 461 |
+
access data starting at the given byte offset"""
|
| 462 |
+
size = 0
|
| 463 |
+
i = 0
|
| 464 |
+
l = len(data)
|
| 465 |
+
hit_msb = False
|
| 466 |
+
while i < l:
|
| 467 |
+
c = data[i + offset]
|
| 468 |
+
size |= (c & 0x7f) << i * 7
|
| 469 |
+
i += 1
|
| 470 |
+
if not c & 0x80:
|
| 471 |
+
hit_msb = True
|
| 472 |
+
break
|
| 473 |
+
# END check msb bit
|
| 474 |
+
# END while in range
|
| 475 |
+
# end performance ...
|
| 476 |
+
if not hit_msb:
|
| 477 |
+
raise AssertionError("Could not find terminating MSB byte in data stream")
|
| 478 |
+
return i + offset, size
|
| 479 |
+
|
| 480 |
+
|
| 481 |
+
def loose_object_header(type, size):
|
| 482 |
+
"""
|
| 483 |
+
:return: bytes representing the loose object header, which is immediately
|
| 484 |
+
followed by the content stream of size 'size'"""
|
| 485 |
+
return ('%s %i\0' % (force_text(type), size)).encode('ascii')
|
| 486 |
+
|
| 487 |
+
|
| 488 |
+
def write_object(type, size, read, write, chunk_size=chunk_size):
|
| 489 |
+
"""
|
| 490 |
+
Write the object as identified by type, size and source_stream into the
|
| 491 |
+
target_stream
|
| 492 |
+
|
| 493 |
+
:param type: type string of the object
|
| 494 |
+
:param size: amount of bytes to write from source_stream
|
| 495 |
+
:param read: read method of a stream providing the content data
|
| 496 |
+
:param write: write method of the output stream
|
| 497 |
+
:param close_target_stream: if True, the target stream will be closed when
|
| 498 |
+
the routine exits, even if an error is thrown
|
| 499 |
+
:return: The actual amount of bytes written to stream, which includes the header and a trailing newline"""
|
| 500 |
+
tbw = 0 # total num bytes written
|
| 501 |
+
|
| 502 |
+
# WRITE HEADER: type SP size NULL
|
| 503 |
+
tbw += write(loose_object_header(type, size))
|
| 504 |
+
tbw += stream_copy(read, write, size, chunk_size)
|
| 505 |
+
|
| 506 |
+
return tbw
|
| 507 |
+
|
| 508 |
+
|
| 509 |
+
def stream_copy(read, write, size, chunk_size):
|
| 510 |
+
"""
|
| 511 |
+
Copy a stream up to size bytes using the provided read and write methods,
|
| 512 |
+
in chunks of chunk_size
|
| 513 |
+
|
| 514 |
+
**Note:** its much like stream_copy utility, but operates just using methods"""
|
| 515 |
+
dbw = 0 # num data bytes written
|
| 516 |
+
|
| 517 |
+
# WRITE ALL DATA UP TO SIZE
|
| 518 |
+
while True:
|
| 519 |
+
cs = min(chunk_size, size - dbw)
|
| 520 |
+
# NOTE: not all write methods return the amount of written bytes, like
|
| 521 |
+
# mmap.write. Its bad, but we just deal with it ... perhaps its not
|
| 522 |
+
# even less efficient
|
| 523 |
+
# data_len = write(read(cs))
|
| 524 |
+
# dbw += data_len
|
| 525 |
+
data = read(cs)
|
| 526 |
+
data_len = len(data)
|
| 527 |
+
dbw += data_len
|
| 528 |
+
write(data)
|
| 529 |
+
if data_len < cs or dbw == size:
|
| 530 |
+
break
|
| 531 |
+
# END check for stream end
|
| 532 |
+
# END duplicate data
|
| 533 |
+
return dbw
|
| 534 |
+
|
| 535 |
+
|
| 536 |
+
def connect_deltas(dstreams):
|
| 537 |
+
"""
|
| 538 |
+
Read the condensed delta chunk information from dstream and merge its information
|
| 539 |
+
into a list of existing delta chunks
|
| 540 |
+
|
| 541 |
+
:param dstreams: iterable of delta stream objects, the delta to be applied last
|
| 542 |
+
comes first, then all its ancestors in order
|
| 543 |
+
:return: DeltaChunkList, containing all operations to apply"""
|
| 544 |
+
tdcl = None # topmost dcl
|
| 545 |
+
|
| 546 |
+
dcl = tdcl = TopdownDeltaChunkList()
|
| 547 |
+
for dsi, ds in enumerate(dstreams):
|
| 548 |
+
# print "Stream", dsi
|
| 549 |
+
db = ds.read()
|
| 550 |
+
delta_buf_size = ds.size
|
| 551 |
+
|
| 552 |
+
# read header
|
| 553 |
+
i, base_size = msb_size(db)
|
| 554 |
+
i, target_size = msb_size(db, i)
|
| 555 |
+
|
| 556 |
+
# interpret opcodes
|
| 557 |
+
tbw = 0 # amount of target bytes written
|
| 558 |
+
while i < delta_buf_size:
|
| 559 |
+
c = ord(db[i])
|
| 560 |
+
i += 1
|
| 561 |
+
if c & 0x80:
|
| 562 |
+
cp_off, cp_size = 0, 0
|
| 563 |
+
if (c & 0x01):
|
| 564 |
+
cp_off = ord(db[i])
|
| 565 |
+
i += 1
|
| 566 |
+
if (c & 0x02):
|
| 567 |
+
cp_off |= (ord(db[i]) << 8)
|
| 568 |
+
i += 1
|
| 569 |
+
if (c & 0x04):
|
| 570 |
+
cp_off |= (ord(db[i]) << 16)
|
| 571 |
+
i += 1
|
| 572 |
+
if (c & 0x08):
|
| 573 |
+
cp_off |= (ord(db[i]) << 24)
|
| 574 |
+
i += 1
|
| 575 |
+
if (c & 0x10):
|
| 576 |
+
cp_size = ord(db[i])
|
| 577 |
+
i += 1
|
| 578 |
+
if (c & 0x20):
|
| 579 |
+
cp_size |= (ord(db[i]) << 8)
|
| 580 |
+
i += 1
|
| 581 |
+
if (c & 0x40):
|
| 582 |
+
cp_size |= (ord(db[i]) << 16)
|
| 583 |
+
i += 1
|
| 584 |
+
|
| 585 |
+
if not cp_size:
|
| 586 |
+
cp_size = 0x10000
|
| 587 |
+
|
| 588 |
+
rbound = cp_off + cp_size
|
| 589 |
+
if (rbound < cp_size or
|
| 590 |
+
rbound > base_size):
|
| 591 |
+
break
|
| 592 |
+
|
| 593 |
+
dcl.append(DeltaChunk(tbw, cp_size, cp_off, None))
|
| 594 |
+
tbw += cp_size
|
| 595 |
+
elif c:
|
| 596 |
+
# NOTE: in C, the data chunks should probably be concatenated here.
|
| 597 |
+
# In python, we do it as a post-process
|
| 598 |
+
dcl.append(DeltaChunk(tbw, c, 0, db[i:i + c]))
|
| 599 |
+
i += c
|
| 600 |
+
tbw += c
|
| 601 |
+
else:
|
| 602 |
+
raise ValueError("unexpected delta opcode 0")
|
| 603 |
+
# END handle command byte
|
| 604 |
+
# END while processing delta data
|
| 605 |
+
|
| 606 |
+
dcl.compress()
|
| 607 |
+
|
| 608 |
+
# merge the lists !
|
| 609 |
+
if dsi > 0:
|
| 610 |
+
if not tdcl.connect_with_next_base(dcl):
|
| 611 |
+
break
|
| 612 |
+
# END handle merge
|
| 613 |
+
|
| 614 |
+
# prepare next base
|
| 615 |
+
dcl = DeltaChunkList()
|
| 616 |
+
# END for each delta stream
|
| 617 |
+
|
| 618 |
+
return tdcl
|
| 619 |
+
|
| 620 |
+
|
| 621 |
+
def apply_delta_data(src_buf, src_buf_size, delta_buf, delta_buf_size, write):
|
| 622 |
+
"""
|
| 623 |
+
Apply data from a delta buffer using a source buffer to the target file
|
| 624 |
+
|
| 625 |
+
:param src_buf: random access data from which the delta was created
|
| 626 |
+
:param src_buf_size: size of the source buffer in bytes
|
| 627 |
+
:param delta_buf_size: size fo the delta buffer in bytes
|
| 628 |
+
:param delta_buf: random access delta data
|
| 629 |
+
:param write: write method taking a chunk of bytes
|
| 630 |
+
|
| 631 |
+
**Note:** transcribed to python from the similar routine in patch-delta.c"""
|
| 632 |
+
i = 0
|
| 633 |
+
db = delta_buf
|
| 634 |
+
while i < delta_buf_size:
|
| 635 |
+
c = db[i]
|
| 636 |
+
i += 1
|
| 637 |
+
if c & 0x80:
|
| 638 |
+
cp_off, cp_size = 0, 0
|
| 639 |
+
if (c & 0x01):
|
| 640 |
+
cp_off = db[i]
|
| 641 |
+
i += 1
|
| 642 |
+
if (c & 0x02):
|
| 643 |
+
cp_off |= (db[i] << 8)
|
| 644 |
+
i += 1
|
| 645 |
+
if (c & 0x04):
|
| 646 |
+
cp_off |= (db[i] << 16)
|
| 647 |
+
i += 1
|
| 648 |
+
if (c & 0x08):
|
| 649 |
+
cp_off |= (db[i] << 24)
|
| 650 |
+
i += 1
|
| 651 |
+
if (c & 0x10):
|
| 652 |
+
cp_size = db[i]
|
| 653 |
+
i += 1
|
| 654 |
+
if (c & 0x20):
|
| 655 |
+
cp_size |= (db[i] << 8)
|
| 656 |
+
i += 1
|
| 657 |
+
if (c & 0x40):
|
| 658 |
+
cp_size |= (db[i] << 16)
|
| 659 |
+
i += 1
|
| 660 |
+
|
| 661 |
+
if not cp_size:
|
| 662 |
+
cp_size = 0x10000
|
| 663 |
+
|
| 664 |
+
rbound = cp_off + cp_size
|
| 665 |
+
if (rbound < cp_size or
|
| 666 |
+
rbound > src_buf_size):
|
| 667 |
+
break
|
| 668 |
+
write(src_buf[cp_off:cp_off + cp_size])
|
| 669 |
+
elif c:
|
| 670 |
+
write(db[i:i + c])
|
| 671 |
+
i += c
|
| 672 |
+
else:
|
| 673 |
+
raise ValueError("unexpected delta opcode 0")
|
| 674 |
+
# END handle command byte
|
| 675 |
+
# END while processing delta data
|
| 676 |
+
|
| 677 |
+
# yes, lets use the exact same error message that git uses :)
|
| 678 |
+
assert i == delta_buf_size, "delta replay has gone wild"
|
| 679 |
+
|
| 680 |
+
|
| 681 |
+
def is_equal_canonical_sha(canonical_length, match, sha1):
|
| 682 |
+
"""
|
| 683 |
+
:return: True if the given lhs and rhs 20 byte binary shas
|
| 684 |
+
The comparison will take the canonical_length of the match sha into account,
|
| 685 |
+
hence the comparison will only use the last 4 bytes for uneven canonical representations
|
| 686 |
+
:param match: less than 20 byte sha
|
| 687 |
+
:param sha1: 20 byte sha"""
|
| 688 |
+
binary_length = canonical_length // 2
|
| 689 |
+
if match[:binary_length] != sha1[:binary_length]:
|
| 690 |
+
return False
|
| 691 |
+
|
| 692 |
+
if canonical_length - binary_length and \
|
| 693 |
+
(byte_ord(match[-1]) ^ byte_ord(sha1[len(match) - 1])) & 0xf0:
|
| 694 |
+
return False
|
| 695 |
+
# END handle uneven canonnical length
|
| 696 |
+
return True
|
| 697 |
+
|
| 698 |
+
#} END routines
|
| 699 |
+
|
| 700 |
+
|
| 701 |
+
try:
|
| 702 |
+
from gitdb_speedups._perf import connect_deltas
|
| 703 |
+
except ImportError:
|
| 704 |
+
pass
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/pack.py
ADDED
|
@@ -0,0 +1,1031 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
"""Contains PackIndexFile and PackFile implementations"""
|
| 6 |
+
import zlib
|
| 7 |
+
|
| 8 |
+
from gitdb.exc import (
|
| 9 |
+
BadObject,
|
| 10 |
+
AmbiguousObjectName,
|
| 11 |
+
UnsupportedOperation,
|
| 12 |
+
ParseError
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
from gitdb.util import (
|
| 16 |
+
mman,
|
| 17 |
+
LazyMixin,
|
| 18 |
+
unpack_from,
|
| 19 |
+
bin_to_hex,
|
| 20 |
+
byte_ord,
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
from gitdb.fun import (
|
| 24 |
+
create_pack_object_header,
|
| 25 |
+
pack_object_header_info,
|
| 26 |
+
is_equal_canonical_sha,
|
| 27 |
+
type_id_to_type_map,
|
| 28 |
+
write_object,
|
| 29 |
+
stream_copy,
|
| 30 |
+
chunk_size,
|
| 31 |
+
delta_types,
|
| 32 |
+
OFS_DELTA,
|
| 33 |
+
REF_DELTA,
|
| 34 |
+
msb_size
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
try:
|
| 38 |
+
from gitdb_speedups._perf import PackIndexFile_sha_to_index
|
| 39 |
+
except ImportError:
|
| 40 |
+
pass
|
| 41 |
+
# END try c module
|
| 42 |
+
|
| 43 |
+
from gitdb.base import ( # Amazing !
|
| 44 |
+
OInfo,
|
| 45 |
+
OStream,
|
| 46 |
+
OPackInfo,
|
| 47 |
+
OPackStream,
|
| 48 |
+
ODeltaStream,
|
| 49 |
+
ODeltaPackInfo,
|
| 50 |
+
ODeltaPackStream,
|
| 51 |
+
)
|
| 52 |
+
|
| 53 |
+
from gitdb.stream import (
|
| 54 |
+
DecompressMemMapReader,
|
| 55 |
+
DeltaApplyReader,
|
| 56 |
+
Sha1Writer,
|
| 57 |
+
NullStream,
|
| 58 |
+
FlexibleSha1Writer
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
from struct import pack
|
| 62 |
+
from binascii import crc32
|
| 63 |
+
|
| 64 |
+
from gitdb.const import NULL_BYTE
|
| 65 |
+
|
| 66 |
+
import tempfile
|
| 67 |
+
import array
|
| 68 |
+
import os
|
| 69 |
+
import sys
|
| 70 |
+
|
| 71 |
+
__all__ = ('PackIndexFile', 'PackFile', 'PackEntity')
|
| 72 |
+
|
| 73 |
+
|
| 74 |
+
#{ Utilities
|
| 75 |
+
|
| 76 |
+
def pack_object_at(cursor, offset, as_stream):
|
| 77 |
+
"""
|
| 78 |
+
:return: Tuple(abs_data_offset, PackInfo|PackStream)
|
| 79 |
+
an object of the correct type according to the type_id of the object.
|
| 80 |
+
If as_stream is True, the object will contain a stream, allowing the
|
| 81 |
+
data to be read decompressed.
|
| 82 |
+
:param data: random accessible data containing all required information
|
| 83 |
+
:parma offset: offset in to the data at which the object information is located
|
| 84 |
+
:param as_stream: if True, a stream object will be returned that can read
|
| 85 |
+
the data, otherwise you receive an info object only"""
|
| 86 |
+
data = cursor.use_region(offset).buffer()
|
| 87 |
+
type_id, uncomp_size, data_rela_offset = pack_object_header_info(data)
|
| 88 |
+
total_rela_offset = None # set later, actual offset until data stream begins
|
| 89 |
+
delta_info = None
|
| 90 |
+
|
| 91 |
+
# OFFSET DELTA
|
| 92 |
+
if type_id == OFS_DELTA:
|
| 93 |
+
i = data_rela_offset
|
| 94 |
+
c = byte_ord(data[i])
|
| 95 |
+
i += 1
|
| 96 |
+
delta_offset = c & 0x7f
|
| 97 |
+
while c & 0x80:
|
| 98 |
+
c = byte_ord(data[i])
|
| 99 |
+
i += 1
|
| 100 |
+
delta_offset += 1
|
| 101 |
+
delta_offset = (delta_offset << 7) + (c & 0x7f)
|
| 102 |
+
# END character loop
|
| 103 |
+
delta_info = delta_offset
|
| 104 |
+
total_rela_offset = i
|
| 105 |
+
# REF DELTA
|
| 106 |
+
elif type_id == REF_DELTA:
|
| 107 |
+
total_rela_offset = data_rela_offset + 20
|
| 108 |
+
delta_info = data[data_rela_offset:total_rela_offset]
|
| 109 |
+
# BASE OBJECT
|
| 110 |
+
else:
|
| 111 |
+
# assume its a base object
|
| 112 |
+
total_rela_offset = data_rela_offset
|
| 113 |
+
# END handle type id
|
| 114 |
+
abs_data_offset = offset + total_rela_offset
|
| 115 |
+
if as_stream:
|
| 116 |
+
stream = DecompressMemMapReader(data[total_rela_offset:], False, uncomp_size)
|
| 117 |
+
if delta_info is None:
|
| 118 |
+
return abs_data_offset, OPackStream(offset, type_id, uncomp_size, stream)
|
| 119 |
+
else:
|
| 120 |
+
return abs_data_offset, ODeltaPackStream(offset, type_id, uncomp_size, delta_info, stream)
|
| 121 |
+
else:
|
| 122 |
+
if delta_info is None:
|
| 123 |
+
return abs_data_offset, OPackInfo(offset, type_id, uncomp_size)
|
| 124 |
+
else:
|
| 125 |
+
return abs_data_offset, ODeltaPackInfo(offset, type_id, uncomp_size, delta_info)
|
| 126 |
+
# END handle info
|
| 127 |
+
# END handle stream
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def write_stream_to_pack(read, write, zstream, base_crc=None):
|
| 131 |
+
"""Copy a stream as read from read function, zip it, and write the result.
|
| 132 |
+
Count the number of written bytes and return it
|
| 133 |
+
:param base_crc: if not None, the crc will be the base for all compressed data
|
| 134 |
+
we consecutively write and generate a crc32 from. If None, no crc will be generated
|
| 135 |
+
:return: tuple(no bytes read, no bytes written, crc32) crc might be 0 if base_crc
|
| 136 |
+
was false"""
|
| 137 |
+
br = 0 # bytes read
|
| 138 |
+
bw = 0 # bytes written
|
| 139 |
+
want_crc = base_crc is not None
|
| 140 |
+
crc = 0
|
| 141 |
+
if want_crc:
|
| 142 |
+
crc = base_crc
|
| 143 |
+
# END initialize crc
|
| 144 |
+
|
| 145 |
+
while True:
|
| 146 |
+
chunk = read(chunk_size)
|
| 147 |
+
br += len(chunk)
|
| 148 |
+
compressed = zstream.compress(chunk)
|
| 149 |
+
bw += len(compressed)
|
| 150 |
+
write(compressed) # cannot assume return value
|
| 151 |
+
|
| 152 |
+
if want_crc:
|
| 153 |
+
crc = crc32(compressed, crc)
|
| 154 |
+
# END handle crc
|
| 155 |
+
|
| 156 |
+
if len(chunk) != chunk_size:
|
| 157 |
+
break
|
| 158 |
+
# END copy loop
|
| 159 |
+
|
| 160 |
+
compressed = zstream.flush()
|
| 161 |
+
bw += len(compressed)
|
| 162 |
+
write(compressed)
|
| 163 |
+
if want_crc:
|
| 164 |
+
crc = crc32(compressed, crc)
|
| 165 |
+
# END handle crc
|
| 166 |
+
|
| 167 |
+
return (br, bw, crc)
|
| 168 |
+
|
| 169 |
+
|
| 170 |
+
#} END utilities
|
| 171 |
+
|
| 172 |
+
|
| 173 |
+
class IndexWriter(object):
|
| 174 |
+
|
| 175 |
+
"""Utility to cache index information, allowing to write all information later
|
| 176 |
+
in one go to the given stream
|
| 177 |
+
**Note:** currently only writes v2 indices"""
|
| 178 |
+
__slots__ = '_objs'
|
| 179 |
+
|
| 180 |
+
def __init__(self):
|
| 181 |
+
self._objs = list()
|
| 182 |
+
|
| 183 |
+
def append(self, binsha, crc, offset):
|
| 184 |
+
"""Append one piece of object information"""
|
| 185 |
+
self._objs.append((binsha, crc, offset))
|
| 186 |
+
|
| 187 |
+
def write(self, pack_sha, write):
|
| 188 |
+
"""Write the index file using the given write method
|
| 189 |
+
:param pack_sha: binary sha over the whole pack that we index
|
| 190 |
+
:return: sha1 binary sha over all index file contents"""
|
| 191 |
+
# sort for sha1 hash
|
| 192 |
+
self._objs.sort(key=lambda o: o[0])
|
| 193 |
+
|
| 194 |
+
sha_writer = FlexibleSha1Writer(write)
|
| 195 |
+
sha_write = sha_writer.write
|
| 196 |
+
sha_write(PackIndexFile.index_v2_signature)
|
| 197 |
+
sha_write(pack(">L", PackIndexFile.index_version_default))
|
| 198 |
+
|
| 199 |
+
# fanout
|
| 200 |
+
tmplist = list((0,) * 256) # fanout or list with 64 bit offsets
|
| 201 |
+
for t in self._objs:
|
| 202 |
+
tmplist[byte_ord(t[0][0])] += 1
|
| 203 |
+
# END prepare fanout
|
| 204 |
+
for i in range(255):
|
| 205 |
+
v = tmplist[i]
|
| 206 |
+
sha_write(pack('>L', v))
|
| 207 |
+
tmplist[i + 1] += v
|
| 208 |
+
# END write each fanout entry
|
| 209 |
+
sha_write(pack('>L', tmplist[255]))
|
| 210 |
+
|
| 211 |
+
# sha1 ordered
|
| 212 |
+
# save calls, that is push them into c
|
| 213 |
+
sha_write(b''.join(t[0] for t in self._objs))
|
| 214 |
+
|
| 215 |
+
# crc32
|
| 216 |
+
for t in self._objs:
|
| 217 |
+
sha_write(pack('>L', t[1] & 0xffffffff))
|
| 218 |
+
# END for each crc
|
| 219 |
+
|
| 220 |
+
tmplist = list()
|
| 221 |
+
# offset 32
|
| 222 |
+
for t in self._objs:
|
| 223 |
+
ofs = t[2]
|
| 224 |
+
if ofs > 0x7fffffff:
|
| 225 |
+
tmplist.append(ofs)
|
| 226 |
+
ofs = 0x80000000 + len(tmplist) - 1
|
| 227 |
+
# END hande 64 bit offsets
|
| 228 |
+
sha_write(pack('>L', ofs & 0xffffffff))
|
| 229 |
+
# END for each offset
|
| 230 |
+
|
| 231 |
+
# offset 64
|
| 232 |
+
for ofs in tmplist:
|
| 233 |
+
sha_write(pack(">Q", ofs))
|
| 234 |
+
# END for each offset
|
| 235 |
+
|
| 236 |
+
# trailer
|
| 237 |
+
assert(len(pack_sha) == 20)
|
| 238 |
+
sha_write(pack_sha)
|
| 239 |
+
sha = sha_writer.sha(as_hex=False)
|
| 240 |
+
write(sha)
|
| 241 |
+
return sha
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
class PackIndexFile(LazyMixin):
|
| 245 |
+
|
| 246 |
+
"""A pack index provides offsets into the corresponding pack, allowing to find
|
| 247 |
+
locations for offsets faster."""
|
| 248 |
+
|
| 249 |
+
# Dont use slots as we dynamically bind functions for each version, need a dict for this
|
| 250 |
+
# The slots you see here are just to keep track of our instance variables
|
| 251 |
+
# __slots__ = ('_indexpath', '_fanout_table', '_cursor', '_version',
|
| 252 |
+
# '_sha_list_offset', '_crc_list_offset', '_pack_offset', '_pack_64_offset')
|
| 253 |
+
|
| 254 |
+
# used in v2 indices
|
| 255 |
+
_sha_list_offset = 8 + 1024
|
| 256 |
+
index_v2_signature = b'\xfftOc'
|
| 257 |
+
index_version_default = 2
|
| 258 |
+
|
| 259 |
+
def __init__(self, indexpath):
|
| 260 |
+
super(PackIndexFile, self).__init__()
|
| 261 |
+
self._indexpath = indexpath
|
| 262 |
+
|
| 263 |
+
def close(self):
|
| 264 |
+
mman.force_map_handle_removal_win(self._indexpath)
|
| 265 |
+
self._cursor = None
|
| 266 |
+
|
| 267 |
+
def _set_cache_(self, attr):
|
| 268 |
+
if attr == "_packfile_checksum":
|
| 269 |
+
self._packfile_checksum = self._cursor.map()[-40:-20]
|
| 270 |
+
elif attr == "_packfile_checksum":
|
| 271 |
+
self._packfile_checksum = self._cursor.map()[-20:]
|
| 272 |
+
elif attr == "_cursor":
|
| 273 |
+
# Note: We don't lock the file when reading as we cannot be sure
|
| 274 |
+
# that we can actually write to the location - it could be a read-only
|
| 275 |
+
# alternate for instance
|
| 276 |
+
self._cursor = mman.make_cursor(self._indexpath).use_region()
|
| 277 |
+
# We will assume that the index will always fully fit into memory !
|
| 278 |
+
if mman.window_size() > 0 and self._cursor.file_size() > mman.window_size():
|
| 279 |
+
raise AssertionError("The index file at %s is too large to fit into a mapped window (%i > %i). This is a limitation of the implementation" % (
|
| 280 |
+
self._indexpath, self._cursor.file_size(), mman.window_size()))
|
| 281 |
+
# END assert window size
|
| 282 |
+
else:
|
| 283 |
+
# now its time to initialize everything - if we are here, someone wants
|
| 284 |
+
# to access the fanout table or related properties
|
| 285 |
+
|
| 286 |
+
# CHECK VERSION
|
| 287 |
+
mmap = self._cursor.map()
|
| 288 |
+
self._version = (mmap[:4] == self.index_v2_signature and 2) or 1
|
| 289 |
+
if self._version == 2:
|
| 290 |
+
version_id = unpack_from(">L", mmap, 4)[0]
|
| 291 |
+
assert version_id == self._version, "Unsupported index version: %i" % version_id
|
| 292 |
+
# END assert version
|
| 293 |
+
|
| 294 |
+
# SETUP FUNCTIONS
|
| 295 |
+
# setup our functions according to the actual version
|
| 296 |
+
for fname in ('entry', 'offset', 'sha', 'crc'):
|
| 297 |
+
setattr(self, fname, getattr(self, "_%s_v%i" % (fname, self._version)))
|
| 298 |
+
# END for each function to initialize
|
| 299 |
+
|
| 300 |
+
# INITIALIZE DATA
|
| 301 |
+
# byte offset is 8 if version is 2, 0 otherwise
|
| 302 |
+
self._initialize()
|
| 303 |
+
# END handle attributes
|
| 304 |
+
|
| 305 |
+
#{ Access V1
|
| 306 |
+
|
| 307 |
+
def _entry_v1(self, i):
|
| 308 |
+
""":return: tuple(offset, binsha, 0)"""
|
| 309 |
+
return unpack_from(">L20s", self._cursor.map(), 1024 + i * 24) + (0, )
|
| 310 |
+
|
| 311 |
+
def _offset_v1(self, i):
|
| 312 |
+
"""see ``_offset_v2``"""
|
| 313 |
+
return unpack_from(">L", self._cursor.map(), 1024 + i * 24)[0]
|
| 314 |
+
|
| 315 |
+
def _sha_v1(self, i):
|
| 316 |
+
"""see ``_sha_v2``"""
|
| 317 |
+
base = 1024 + (i * 24) + 4
|
| 318 |
+
return self._cursor.map()[base:base + 20]
|
| 319 |
+
|
| 320 |
+
def _crc_v1(self, i):
|
| 321 |
+
"""unsupported"""
|
| 322 |
+
return 0
|
| 323 |
+
|
| 324 |
+
#} END access V1
|
| 325 |
+
|
| 326 |
+
#{ Access V2
|
| 327 |
+
def _entry_v2(self, i):
|
| 328 |
+
""":return: tuple(offset, binsha, crc)"""
|
| 329 |
+
return (self._offset_v2(i), self._sha_v2(i), self._crc_v2(i))
|
| 330 |
+
|
| 331 |
+
def _offset_v2(self, i):
|
| 332 |
+
""":return: 32 or 64 byte offset into pack files. 64 byte offsets will only
|
| 333 |
+
be returned if the pack is larger than 4 GiB, or 2^32"""
|
| 334 |
+
offset = unpack_from(">L", self._cursor.map(), self._pack_offset + i * 4)[0]
|
| 335 |
+
|
| 336 |
+
# if the high-bit is set, this indicates that we have to lookup the offset
|
| 337 |
+
# in the 64 bit region of the file. The current offset ( lower 31 bits )
|
| 338 |
+
# are the index into it
|
| 339 |
+
if offset & 0x80000000:
|
| 340 |
+
offset = unpack_from(">Q", self._cursor.map(), self._pack_64_offset + (offset & ~0x80000000) * 8)[0]
|
| 341 |
+
# END handle 64 bit offset
|
| 342 |
+
|
| 343 |
+
return offset
|
| 344 |
+
|
| 345 |
+
def _sha_v2(self, i):
|
| 346 |
+
""":return: sha at the given index of this file index instance"""
|
| 347 |
+
base = self._sha_list_offset + i * 20
|
| 348 |
+
return self._cursor.map()[base:base + 20]
|
| 349 |
+
|
| 350 |
+
def _crc_v2(self, i):
|
| 351 |
+
""":return: 4 bytes crc for the object at index i"""
|
| 352 |
+
return unpack_from(">L", self._cursor.map(), self._crc_list_offset + i * 4)[0]
|
| 353 |
+
|
| 354 |
+
#} END access V2
|
| 355 |
+
|
| 356 |
+
#{ Initialization
|
| 357 |
+
|
| 358 |
+
def _initialize(self):
|
| 359 |
+
"""initialize base data"""
|
| 360 |
+
self._fanout_table = self._read_fanout((self._version == 2) * 8)
|
| 361 |
+
|
| 362 |
+
if self._version == 2:
|
| 363 |
+
self._crc_list_offset = self._sha_list_offset + self.size() * 20
|
| 364 |
+
self._pack_offset = self._crc_list_offset + self.size() * 4
|
| 365 |
+
self._pack_64_offset = self._pack_offset + self.size() * 4
|
| 366 |
+
# END setup base
|
| 367 |
+
|
| 368 |
+
def _read_fanout(self, byte_offset):
|
| 369 |
+
"""Generate a fanout table from our data"""
|
| 370 |
+
d = self._cursor.map()
|
| 371 |
+
out = list()
|
| 372 |
+
append = out.append
|
| 373 |
+
for i in range(256):
|
| 374 |
+
append(unpack_from('>L', d, byte_offset + i * 4)[0])
|
| 375 |
+
# END for each entry
|
| 376 |
+
return out
|
| 377 |
+
|
| 378 |
+
#} END initialization
|
| 379 |
+
|
| 380 |
+
#{ Properties
|
| 381 |
+
def version(self):
|
| 382 |
+
return self._version
|
| 383 |
+
|
| 384 |
+
def size(self):
|
| 385 |
+
""":return: amount of objects referred to by this index"""
|
| 386 |
+
return self._fanout_table[255]
|
| 387 |
+
|
| 388 |
+
def path(self):
|
| 389 |
+
""":return: path to the packindexfile"""
|
| 390 |
+
return self._indexpath
|
| 391 |
+
|
| 392 |
+
def packfile_checksum(self):
|
| 393 |
+
""":return: 20 byte sha representing the sha1 hash of the pack file"""
|
| 394 |
+
return self._cursor.map()[-40:-20]
|
| 395 |
+
|
| 396 |
+
def indexfile_checksum(self):
|
| 397 |
+
""":return: 20 byte sha representing the sha1 hash of this index file"""
|
| 398 |
+
return self._cursor.map()[-20:]
|
| 399 |
+
|
| 400 |
+
def offsets(self):
|
| 401 |
+
""":return: sequence of all offsets in the order in which they were written
|
| 402 |
+
|
| 403 |
+
**Note:** return value can be random accessed, but may be immmutable"""
|
| 404 |
+
if self._version == 2:
|
| 405 |
+
# read stream to array, convert to tuple
|
| 406 |
+
a = array.array('I') # 4 byte unsigned int, long are 8 byte on 64 bit it appears
|
| 407 |
+
a.frombytes(self._cursor.map()[self._pack_offset:self._pack_64_offset])
|
| 408 |
+
|
| 409 |
+
# networkbyteorder to something array likes more
|
| 410 |
+
if sys.byteorder == 'little':
|
| 411 |
+
a.byteswap()
|
| 412 |
+
return a
|
| 413 |
+
else:
|
| 414 |
+
return tuple(self.offset(index) for index in range(self.size()))
|
| 415 |
+
# END handle version
|
| 416 |
+
|
| 417 |
+
def sha_to_index(self, sha):
|
| 418 |
+
"""
|
| 419 |
+
:return: index usable with the ``offset`` or ``entry`` method, or None
|
| 420 |
+
if the sha was not found in this pack index
|
| 421 |
+
:param sha: 20 byte sha to lookup"""
|
| 422 |
+
first_byte = byte_ord(sha[0])
|
| 423 |
+
get_sha = self.sha
|
| 424 |
+
lo = 0 # lower index, the left bound of the bisection
|
| 425 |
+
if first_byte != 0:
|
| 426 |
+
lo = self._fanout_table[first_byte - 1]
|
| 427 |
+
hi = self._fanout_table[first_byte] # the upper, right bound of the bisection
|
| 428 |
+
|
| 429 |
+
# bisect until we have the sha
|
| 430 |
+
while lo < hi:
|
| 431 |
+
mid = (lo + hi) // 2
|
| 432 |
+
mid_sha = get_sha(mid)
|
| 433 |
+
if sha < mid_sha:
|
| 434 |
+
hi = mid
|
| 435 |
+
elif sha == mid_sha:
|
| 436 |
+
return mid
|
| 437 |
+
else:
|
| 438 |
+
lo = mid + 1
|
| 439 |
+
# END handle midpoint
|
| 440 |
+
# END bisect
|
| 441 |
+
return None
|
| 442 |
+
|
| 443 |
+
def partial_sha_to_index(self, partial_bin_sha, canonical_length):
|
| 444 |
+
"""
|
| 445 |
+
:return: index as in `sha_to_index` or None if the sha was not found in this
|
| 446 |
+
index file
|
| 447 |
+
:param partial_bin_sha: an at least two bytes of a partial binary sha as bytes
|
| 448 |
+
:param canonical_length: length of the original hexadecimal representation of the
|
| 449 |
+
given partial binary sha
|
| 450 |
+
:raise AmbiguousObjectName:"""
|
| 451 |
+
if len(partial_bin_sha) < 2:
|
| 452 |
+
raise ValueError("Require at least 2 bytes of partial sha")
|
| 453 |
+
|
| 454 |
+
assert isinstance(partial_bin_sha, bytes), "partial_bin_sha must be bytes"
|
| 455 |
+
first_byte = byte_ord(partial_bin_sha[0])
|
| 456 |
+
|
| 457 |
+
get_sha = self.sha
|
| 458 |
+
lo = 0 # lower index, the left bound of the bisection
|
| 459 |
+
if first_byte != 0:
|
| 460 |
+
lo = self._fanout_table[first_byte - 1]
|
| 461 |
+
hi = self._fanout_table[first_byte] # the upper, right bound of the bisection
|
| 462 |
+
|
| 463 |
+
# fill the partial to full 20 bytes
|
| 464 |
+
filled_sha = partial_bin_sha + NULL_BYTE * (20 - len(partial_bin_sha))
|
| 465 |
+
|
| 466 |
+
# find lowest
|
| 467 |
+
while lo < hi:
|
| 468 |
+
mid = (lo + hi) // 2
|
| 469 |
+
mid_sha = get_sha(mid)
|
| 470 |
+
if filled_sha < mid_sha:
|
| 471 |
+
hi = mid
|
| 472 |
+
elif filled_sha == mid_sha:
|
| 473 |
+
# perfect match
|
| 474 |
+
lo = mid
|
| 475 |
+
break
|
| 476 |
+
else:
|
| 477 |
+
lo = mid + 1
|
| 478 |
+
# END handle midpoint
|
| 479 |
+
# END bisect
|
| 480 |
+
|
| 481 |
+
if lo < self.size():
|
| 482 |
+
cur_sha = get_sha(lo)
|
| 483 |
+
if is_equal_canonical_sha(canonical_length, partial_bin_sha, cur_sha):
|
| 484 |
+
next_sha = None
|
| 485 |
+
if lo + 1 < self.size():
|
| 486 |
+
next_sha = get_sha(lo + 1)
|
| 487 |
+
if next_sha and next_sha == cur_sha:
|
| 488 |
+
raise AmbiguousObjectName(partial_bin_sha)
|
| 489 |
+
return lo
|
| 490 |
+
# END if we have a match
|
| 491 |
+
# END if we found something
|
| 492 |
+
return None
|
| 493 |
+
|
| 494 |
+
if 'PackIndexFile_sha_to_index' in globals():
|
| 495 |
+
# NOTE: Its just about 25% faster, the major bottleneck might be the attr
|
| 496 |
+
# accesses
|
| 497 |
+
def sha_to_index(self, sha):
|
| 498 |
+
return PackIndexFile_sha_to_index(self, sha)
|
| 499 |
+
# END redefine heavy-hitter with c version
|
| 500 |
+
|
| 501 |
+
#} END properties
|
| 502 |
+
|
| 503 |
+
|
| 504 |
+
class PackFile(LazyMixin):
|
| 505 |
+
|
| 506 |
+
"""A pack is a file written according to the Version 2 for git packs
|
| 507 |
+
|
| 508 |
+
As we currently use memory maps, it could be assumed that the maximum size of
|
| 509 |
+
packs therefor is 32 bit on 32 bit systems. On 64 bit systems, this should be
|
| 510 |
+
fine though.
|
| 511 |
+
|
| 512 |
+
**Note:** at some point, this might be implemented using streams as well, or
|
| 513 |
+
streams are an alternate path in the case memory maps cannot be created
|
| 514 |
+
for some reason - one clearly doesn't want to read 10GB at once in that
|
| 515 |
+
case"""
|
| 516 |
+
|
| 517 |
+
__slots__ = ('_packpath', '_cursor', '_size', '_version')
|
| 518 |
+
pack_signature = 0x5041434b # 'PACK'
|
| 519 |
+
pack_version_default = 2
|
| 520 |
+
|
| 521 |
+
# offset into our data at which the first object starts
|
| 522 |
+
first_object_offset = 3 * 4 # header bytes
|
| 523 |
+
footer_size = 20 # final sha
|
| 524 |
+
|
| 525 |
+
def __init__(self, packpath):
|
| 526 |
+
self._packpath = packpath
|
| 527 |
+
|
| 528 |
+
def close(self):
|
| 529 |
+
mman.force_map_handle_removal_win(self._packpath)
|
| 530 |
+
self._cursor = None
|
| 531 |
+
|
| 532 |
+
def _set_cache_(self, attr):
|
| 533 |
+
# we fill the whole cache, whichever attribute gets queried first
|
| 534 |
+
self._cursor = mman.make_cursor(self._packpath).use_region()
|
| 535 |
+
|
| 536 |
+
# read the header information
|
| 537 |
+
type_id, self._version, self._size = unpack_from(">LLL", self._cursor.map(), 0)
|
| 538 |
+
|
| 539 |
+
# TODO: figure out whether we should better keep the lock, or maybe
|
| 540 |
+
# add a .keep file instead ?
|
| 541 |
+
if type_id != self.pack_signature:
|
| 542 |
+
raise ParseError("Invalid pack signature: %i" % type_id)
|
| 543 |
+
|
| 544 |
+
def _iter_objects(self, start_offset, as_stream=True):
|
| 545 |
+
"""Handle the actual iteration of objects within this pack"""
|
| 546 |
+
c = self._cursor
|
| 547 |
+
content_size = c.file_size() - self.footer_size
|
| 548 |
+
cur_offset = start_offset or self.first_object_offset
|
| 549 |
+
|
| 550 |
+
null = NullStream()
|
| 551 |
+
while cur_offset < content_size:
|
| 552 |
+
data_offset, ostream = pack_object_at(c, cur_offset, True)
|
| 553 |
+
# scrub the stream to the end - this decompresses the object, but yields
|
| 554 |
+
# the amount of compressed bytes we need to get to the next offset
|
| 555 |
+
|
| 556 |
+
stream_copy(ostream.read, null.write, ostream.size, chunk_size)
|
| 557 |
+
assert ostream.stream._br == ostream.size
|
| 558 |
+
cur_offset += (data_offset - ostream.pack_offset) + ostream.stream.compressed_bytes_read()
|
| 559 |
+
|
| 560 |
+
# if a stream is requested, reset it beforehand
|
| 561 |
+
# Otherwise return the Stream object directly, its derived from the
|
| 562 |
+
# info object
|
| 563 |
+
if as_stream:
|
| 564 |
+
ostream.stream.seek(0)
|
| 565 |
+
yield ostream
|
| 566 |
+
# END until we have read everything
|
| 567 |
+
|
| 568 |
+
#{ Pack Information
|
| 569 |
+
|
| 570 |
+
def size(self):
|
| 571 |
+
""":return: The amount of objects stored in this pack"""
|
| 572 |
+
return self._size
|
| 573 |
+
|
| 574 |
+
def version(self):
|
| 575 |
+
""":return: the version of this pack"""
|
| 576 |
+
return self._version
|
| 577 |
+
|
| 578 |
+
def data(self):
|
| 579 |
+
"""
|
| 580 |
+
:return: read-only data of this pack. It provides random access and usually
|
| 581 |
+
is a memory map.
|
| 582 |
+
:note: This method is unsafe as it returns a window into a file which might be larger than than the actual window size"""
|
| 583 |
+
# can use map as we are starting at offset 0. Otherwise we would have to use buffer()
|
| 584 |
+
return self._cursor.use_region().map()
|
| 585 |
+
|
| 586 |
+
def checksum(self):
|
| 587 |
+
""":return: 20 byte sha1 hash on all object sha's contained in this file"""
|
| 588 |
+
return self._cursor.use_region(self._cursor.file_size() - 20).buffer()[:]
|
| 589 |
+
|
| 590 |
+
def path(self):
|
| 591 |
+
""":return: path to the packfile"""
|
| 592 |
+
return self._packpath
|
| 593 |
+
#} END pack information
|
| 594 |
+
|
| 595 |
+
#{ Pack Specific
|
| 596 |
+
|
| 597 |
+
def collect_streams(self, offset):
|
| 598 |
+
"""
|
| 599 |
+
:return: list of pack streams which are required to build the object
|
| 600 |
+
at the given offset. The first entry of the list is the object at offset,
|
| 601 |
+
the last one is either a full object, or a REF_Delta stream. The latter
|
| 602 |
+
type needs its reference object to be locked up in an ODB to form a valid
|
| 603 |
+
delta chain.
|
| 604 |
+
If the object at offset is no delta, the size of the list is 1.
|
| 605 |
+
:param offset: specifies the first byte of the object within this pack"""
|
| 606 |
+
out = list()
|
| 607 |
+
c = self._cursor
|
| 608 |
+
while True:
|
| 609 |
+
ostream = pack_object_at(c, offset, True)[1]
|
| 610 |
+
out.append(ostream)
|
| 611 |
+
if ostream.type_id == OFS_DELTA:
|
| 612 |
+
offset = ostream.pack_offset - ostream.delta_info
|
| 613 |
+
else:
|
| 614 |
+
# the only thing we can lookup are OFFSET deltas. Everything
|
| 615 |
+
# else is either an object, or a ref delta, in the latter
|
| 616 |
+
# case someone else has to find it
|
| 617 |
+
break
|
| 618 |
+
# END handle type
|
| 619 |
+
# END while chaining streams
|
| 620 |
+
return out
|
| 621 |
+
|
| 622 |
+
#} END pack specific
|
| 623 |
+
|
| 624 |
+
#{ Read-Database like Interface
|
| 625 |
+
|
| 626 |
+
def info(self, offset):
|
| 627 |
+
"""Retrieve information about the object at the given file-absolute offset
|
| 628 |
+
|
| 629 |
+
:param offset: byte offset
|
| 630 |
+
:return: OPackInfo instance, the actual type differs depending on the type_id attribute"""
|
| 631 |
+
return pack_object_at(self._cursor, offset or self.first_object_offset, False)[1]
|
| 632 |
+
|
| 633 |
+
def stream(self, offset):
|
| 634 |
+
"""Retrieve an object at the given file-relative offset as stream along with its information
|
| 635 |
+
|
| 636 |
+
:param offset: byte offset
|
| 637 |
+
:return: OPackStream instance, the actual type differs depending on the type_id attribute"""
|
| 638 |
+
return pack_object_at(self._cursor, offset or self.first_object_offset, True)[1]
|
| 639 |
+
|
| 640 |
+
def stream_iter(self, start_offset=0):
|
| 641 |
+
"""
|
| 642 |
+
:return: iterator yielding OPackStream compatible instances, allowing
|
| 643 |
+
to access the data in the pack directly.
|
| 644 |
+
:param start_offset: offset to the first object to iterate. If 0, iteration
|
| 645 |
+
starts at the very first object in the pack.
|
| 646 |
+
|
| 647 |
+
**Note:** Iterating a pack directly is costly as the datastream has to be decompressed
|
| 648 |
+
to determine the bounds between the objects"""
|
| 649 |
+
return self._iter_objects(start_offset, as_stream=True)
|
| 650 |
+
|
| 651 |
+
#} END Read-Database like Interface
|
| 652 |
+
|
| 653 |
+
|
| 654 |
+
class PackEntity(LazyMixin):
|
| 655 |
+
|
| 656 |
+
"""Combines the PackIndexFile and the PackFile into one, allowing the
|
| 657 |
+
actual objects to be resolved and iterated"""
|
| 658 |
+
|
| 659 |
+
__slots__ = ('_index', # our index file
|
| 660 |
+
'_pack', # our pack file
|
| 661 |
+
'_offset_map' # on demand dict mapping one offset to the next consecutive one
|
| 662 |
+
)
|
| 663 |
+
|
| 664 |
+
IndexFileCls = PackIndexFile
|
| 665 |
+
PackFileCls = PackFile
|
| 666 |
+
|
| 667 |
+
def __init__(self, pack_or_index_path):
|
| 668 |
+
"""Initialize ourselves with the path to the respective pack or index file"""
|
| 669 |
+
basename, ext = os.path.splitext(pack_or_index_path)
|
| 670 |
+
self._index = self.IndexFileCls("%s.idx" % basename) # PackIndexFile instance
|
| 671 |
+
self._pack = self.PackFileCls("%s.pack" % basename) # corresponding PackFile instance
|
| 672 |
+
|
| 673 |
+
def close(self):
|
| 674 |
+
self._index.close()
|
| 675 |
+
self._pack.close()
|
| 676 |
+
|
| 677 |
+
def _set_cache_(self, attr):
|
| 678 |
+
# currently this can only be _offset_map
|
| 679 |
+
# TODO: make this a simple sorted offset array which can be bisected
|
| 680 |
+
# to find the respective entry, from which we can take a +1 easily
|
| 681 |
+
# This might be slower, but should also be much lighter in memory !
|
| 682 |
+
offsets_sorted = sorted(self._index.offsets())
|
| 683 |
+
last_offset = len(self._pack.data()) - self._pack.footer_size
|
| 684 |
+
assert offsets_sorted, "Cannot handle empty indices"
|
| 685 |
+
|
| 686 |
+
offset_map = None
|
| 687 |
+
if len(offsets_sorted) == 1:
|
| 688 |
+
offset_map = {offsets_sorted[0]: last_offset}
|
| 689 |
+
else:
|
| 690 |
+
iter_offsets = iter(offsets_sorted)
|
| 691 |
+
iter_offsets_plus_one = iter(offsets_sorted)
|
| 692 |
+
next(iter_offsets_plus_one)
|
| 693 |
+
consecutive = zip(iter_offsets, iter_offsets_plus_one)
|
| 694 |
+
|
| 695 |
+
offset_map = dict(consecutive)
|
| 696 |
+
|
| 697 |
+
# the last offset is not yet set
|
| 698 |
+
offset_map[offsets_sorted[-1]] = last_offset
|
| 699 |
+
# END handle offset amount
|
| 700 |
+
self._offset_map = offset_map
|
| 701 |
+
|
| 702 |
+
def _sha_to_index(self, sha):
|
| 703 |
+
""":return: index for the given sha, or raise"""
|
| 704 |
+
index = self._index.sha_to_index(sha)
|
| 705 |
+
if index is None:
|
| 706 |
+
raise BadObject(sha)
|
| 707 |
+
return index
|
| 708 |
+
|
| 709 |
+
def _iter_objects(self, as_stream):
|
| 710 |
+
"""Iterate over all objects in our index and yield their OInfo or OStream instences"""
|
| 711 |
+
_sha = self._index.sha
|
| 712 |
+
_object = self._object
|
| 713 |
+
for index in range(self._index.size()):
|
| 714 |
+
yield _object(_sha(index), as_stream, index)
|
| 715 |
+
# END for each index
|
| 716 |
+
|
| 717 |
+
def _object(self, sha, as_stream, index=-1):
|
| 718 |
+
""":return: OInfo or OStream object providing information about the given sha
|
| 719 |
+
:param index: if not -1, its assumed to be the sha's index in the IndexFile"""
|
| 720 |
+
# its a little bit redundant here, but it needs to be efficient
|
| 721 |
+
if index < 0:
|
| 722 |
+
index = self._sha_to_index(sha)
|
| 723 |
+
if sha is None:
|
| 724 |
+
sha = self._index.sha(index)
|
| 725 |
+
# END assure sha is present ( in output )
|
| 726 |
+
offset = self._index.offset(index)
|
| 727 |
+
type_id, uncomp_size, data_rela_offset = pack_object_header_info(self._pack._cursor.use_region(offset).buffer())
|
| 728 |
+
if as_stream:
|
| 729 |
+
if type_id not in delta_types:
|
| 730 |
+
packstream = self._pack.stream(offset)
|
| 731 |
+
return OStream(sha, packstream.type, packstream.size, packstream.stream)
|
| 732 |
+
# END handle non-deltas
|
| 733 |
+
|
| 734 |
+
# produce a delta stream containing all info
|
| 735 |
+
# To prevent it from applying the deltas when querying the size,
|
| 736 |
+
# we extract it from the delta stream ourselves
|
| 737 |
+
streams = self.collect_streams_at_offset(offset)
|
| 738 |
+
dstream = DeltaApplyReader.new(streams)
|
| 739 |
+
|
| 740 |
+
return ODeltaStream(sha, dstream.type, None, dstream)
|
| 741 |
+
else:
|
| 742 |
+
if type_id not in delta_types:
|
| 743 |
+
return OInfo(sha, type_id_to_type_map[type_id], uncomp_size)
|
| 744 |
+
# END handle non-deltas
|
| 745 |
+
|
| 746 |
+
# deltas are a little tougher - unpack the first bytes to obtain
|
| 747 |
+
# the actual target size, as opposed to the size of the delta data
|
| 748 |
+
streams = self.collect_streams_at_offset(offset)
|
| 749 |
+
buf = streams[0].read(512)
|
| 750 |
+
offset, src_size = msb_size(buf)
|
| 751 |
+
offset, target_size = msb_size(buf, offset)
|
| 752 |
+
|
| 753 |
+
# collect the streams to obtain the actual object type
|
| 754 |
+
if streams[-1].type_id in delta_types:
|
| 755 |
+
raise BadObject(sha, "Could not resolve delta object")
|
| 756 |
+
return OInfo(sha, streams[-1].type, target_size)
|
| 757 |
+
# END handle stream
|
| 758 |
+
|
| 759 |
+
#{ Read-Database like Interface
|
| 760 |
+
|
| 761 |
+
def info(self, sha):
|
| 762 |
+
"""Retrieve information about the object identified by the given sha
|
| 763 |
+
|
| 764 |
+
:param sha: 20 byte sha1
|
| 765 |
+
:raise BadObject:
|
| 766 |
+
:return: OInfo instance, with 20 byte sha"""
|
| 767 |
+
return self._object(sha, False)
|
| 768 |
+
|
| 769 |
+
def stream(self, sha):
|
| 770 |
+
"""Retrieve an object stream along with its information as identified by the given sha
|
| 771 |
+
|
| 772 |
+
:param sha: 20 byte sha1
|
| 773 |
+
:raise BadObject:
|
| 774 |
+
:return: OStream instance, with 20 byte sha"""
|
| 775 |
+
return self._object(sha, True)
|
| 776 |
+
|
| 777 |
+
def info_at_index(self, index):
|
| 778 |
+
"""As ``info``, but uses a PackIndexFile compatible index to refer to the object"""
|
| 779 |
+
return self._object(None, False, index)
|
| 780 |
+
|
| 781 |
+
def stream_at_index(self, index):
|
| 782 |
+
"""As ``stream``, but uses a PackIndexFile compatible index to refer to the
|
| 783 |
+
object"""
|
| 784 |
+
return self._object(None, True, index)
|
| 785 |
+
|
| 786 |
+
#} END Read-Database like Interface
|
| 787 |
+
|
| 788 |
+
#{ Interface
|
| 789 |
+
|
| 790 |
+
def pack(self):
|
| 791 |
+
""":return: the underlying pack file instance"""
|
| 792 |
+
return self._pack
|
| 793 |
+
|
| 794 |
+
def index(self):
|
| 795 |
+
""":return: the underlying pack index file instance"""
|
| 796 |
+
return self._index
|
| 797 |
+
|
| 798 |
+
def is_valid_stream(self, sha, use_crc=False):
|
| 799 |
+
"""
|
| 800 |
+
Verify that the stream at the given sha is valid.
|
| 801 |
+
|
| 802 |
+
:param use_crc: if True, the index' crc is run over the compressed stream of
|
| 803 |
+
the object, which is much faster than checking the sha1. It is also
|
| 804 |
+
more prone to unnoticed corruption or manipulation.
|
| 805 |
+
:param sha: 20 byte sha1 of the object whose stream to verify
|
| 806 |
+
whether the compressed stream of the object is valid. If it is
|
| 807 |
+
a delta, this only verifies that the delta's data is valid, not the
|
| 808 |
+
data of the actual undeltified object, as it depends on more than
|
| 809 |
+
just this stream.
|
| 810 |
+
If False, the object will be decompressed and the sha generated. It must
|
| 811 |
+
match the given sha
|
| 812 |
+
|
| 813 |
+
:return: True if the stream is valid
|
| 814 |
+
:raise UnsupportedOperation: If the index is version 1 only
|
| 815 |
+
:raise BadObject: sha was not found"""
|
| 816 |
+
if use_crc:
|
| 817 |
+
if self._index.version() < 2:
|
| 818 |
+
raise UnsupportedOperation("Version 1 indices do not contain crc's, verify by sha instead")
|
| 819 |
+
# END handle index version
|
| 820 |
+
|
| 821 |
+
index = self._sha_to_index(sha)
|
| 822 |
+
offset = self._index.offset(index)
|
| 823 |
+
next_offset = self._offset_map[offset]
|
| 824 |
+
crc_value = self._index.crc(index)
|
| 825 |
+
|
| 826 |
+
# create the current crc value, on the compressed object data
|
| 827 |
+
# Read it in chunks, without copying the data
|
| 828 |
+
crc_update = zlib.crc32
|
| 829 |
+
pack_data = self._pack.data()
|
| 830 |
+
cur_pos = offset
|
| 831 |
+
this_crc_value = 0
|
| 832 |
+
while cur_pos < next_offset:
|
| 833 |
+
rbound = min(cur_pos + chunk_size, next_offset)
|
| 834 |
+
size = rbound - cur_pos
|
| 835 |
+
this_crc_value = crc_update(pack_data[cur_pos:cur_pos + size], this_crc_value)
|
| 836 |
+
cur_pos += size
|
| 837 |
+
# END window size loop
|
| 838 |
+
|
| 839 |
+
# crc returns signed 32 bit numbers, the AND op forces it into unsigned
|
| 840 |
+
# mode ... wow, sneaky, from dulwich.
|
| 841 |
+
return (this_crc_value & 0xffffffff) == crc_value
|
| 842 |
+
else:
|
| 843 |
+
shawriter = Sha1Writer()
|
| 844 |
+
stream = self._object(sha, as_stream=True)
|
| 845 |
+
# write a loose object, which is the basis for the sha
|
| 846 |
+
write_object(stream.type, stream.size, stream.read, shawriter.write)
|
| 847 |
+
|
| 848 |
+
assert shawriter.sha(as_hex=False) == sha
|
| 849 |
+
return shawriter.sha(as_hex=False) == sha
|
| 850 |
+
# END handle crc/sha verification
|
| 851 |
+
return True
|
| 852 |
+
|
| 853 |
+
def info_iter(self):
|
| 854 |
+
"""
|
| 855 |
+
:return: Iterator over all objects in this pack. The iterator yields
|
| 856 |
+
OInfo instances"""
|
| 857 |
+
return self._iter_objects(as_stream=False)
|
| 858 |
+
|
| 859 |
+
def stream_iter(self):
|
| 860 |
+
"""
|
| 861 |
+
:return: iterator over all objects in this pack. The iterator yields
|
| 862 |
+
OStream instances"""
|
| 863 |
+
return self._iter_objects(as_stream=True)
|
| 864 |
+
|
| 865 |
+
def collect_streams_at_offset(self, offset):
|
| 866 |
+
"""
|
| 867 |
+
As the version in the PackFile, but can resolve REF deltas within this pack
|
| 868 |
+
For more info, see ``collect_streams``
|
| 869 |
+
|
| 870 |
+
:param offset: offset into the pack file at which the object can be found"""
|
| 871 |
+
streams = self._pack.collect_streams(offset)
|
| 872 |
+
|
| 873 |
+
# try to resolve the last one if needed. It is assumed to be either
|
| 874 |
+
# a REF delta, or a base object, as OFFSET deltas are resolved by the pack
|
| 875 |
+
if streams[-1].type_id == REF_DELTA:
|
| 876 |
+
stream = streams[-1]
|
| 877 |
+
while stream.type_id in delta_types:
|
| 878 |
+
if stream.type_id == REF_DELTA:
|
| 879 |
+
# smmap can return memory view objects, which can't be compared as buffers/bytes can ...
|
| 880 |
+
if isinstance(stream.delta_info, memoryview):
|
| 881 |
+
sindex = self._index.sha_to_index(stream.delta_info.tobytes())
|
| 882 |
+
else:
|
| 883 |
+
sindex = self._index.sha_to_index(stream.delta_info)
|
| 884 |
+
if sindex is None:
|
| 885 |
+
break
|
| 886 |
+
stream = self._pack.stream(self._index.offset(sindex))
|
| 887 |
+
streams.append(stream)
|
| 888 |
+
else:
|
| 889 |
+
# must be another OFS DELTA - this could happen if a REF
|
| 890 |
+
# delta we resolve previously points to an OFS delta. Who
|
| 891 |
+
# would do that ;) ? We can handle it though
|
| 892 |
+
stream = self._pack.stream(stream.delta_info)
|
| 893 |
+
streams.append(stream)
|
| 894 |
+
# END handle ref delta
|
| 895 |
+
# END resolve ref streams
|
| 896 |
+
# END resolve streams
|
| 897 |
+
|
| 898 |
+
return streams
|
| 899 |
+
|
| 900 |
+
def collect_streams(self, sha):
|
| 901 |
+
"""
|
| 902 |
+
As ``PackFile.collect_streams``, but takes a sha instead of an offset.
|
| 903 |
+
Additionally, ref_delta streams will be resolved within this pack.
|
| 904 |
+
If this is not possible, the stream will be left alone, hence it is adivsed
|
| 905 |
+
to check for unresolved ref-deltas and resolve them before attempting to
|
| 906 |
+
construct a delta stream.
|
| 907 |
+
|
| 908 |
+
:param sha: 20 byte sha1 specifying the object whose related streams you want to collect
|
| 909 |
+
:return: list of streams, first being the actual object delta, the last being
|
| 910 |
+
a possibly unresolved base object.
|
| 911 |
+
:raise BadObject:"""
|
| 912 |
+
return self.collect_streams_at_offset(self._index.offset(self._sha_to_index(sha)))
|
| 913 |
+
|
| 914 |
+
@classmethod
|
| 915 |
+
def write_pack(cls, object_iter, pack_write, index_write=None,
|
| 916 |
+
object_count=None, zlib_compression=zlib.Z_BEST_SPEED):
|
| 917 |
+
"""
|
| 918 |
+
Create a new pack by putting all objects obtained by the object_iterator
|
| 919 |
+
into a pack which is written using the pack_write method.
|
| 920 |
+
The respective index is produced as well if index_write is not Non.
|
| 921 |
+
|
| 922 |
+
:param object_iter: iterator yielding odb output objects
|
| 923 |
+
:param pack_write: function to receive strings to write into the pack stream
|
| 924 |
+
:param indx_write: if not None, the function writes the index file corresponding
|
| 925 |
+
to the pack.
|
| 926 |
+
:param object_count: if you can provide the amount of objects in your iteration,
|
| 927 |
+
this would be the place to put it. Otherwise we have to pre-iterate and store
|
| 928 |
+
all items into a list to get the number, which uses more memory than necessary.
|
| 929 |
+
:param zlib_compression: the zlib compression level to use
|
| 930 |
+
:return: tuple(pack_sha, index_binsha) binary sha over all the contents of the pack
|
| 931 |
+
and over all contents of the index. If index_write was None, index_binsha will be None
|
| 932 |
+
|
| 933 |
+
**Note:** The destination of the write functions is up to the user. It could
|
| 934 |
+
be a socket, or a file for instance
|
| 935 |
+
|
| 936 |
+
**Note:** writes only undeltified objects"""
|
| 937 |
+
objs = object_iter
|
| 938 |
+
if not object_count:
|
| 939 |
+
if not isinstance(object_iter, (tuple, list)):
|
| 940 |
+
objs = list(object_iter)
|
| 941 |
+
# END handle list type
|
| 942 |
+
object_count = len(objs)
|
| 943 |
+
# END handle object
|
| 944 |
+
|
| 945 |
+
pack_writer = FlexibleSha1Writer(pack_write)
|
| 946 |
+
pwrite = pack_writer.write
|
| 947 |
+
ofs = 0 # current offset into the pack file
|
| 948 |
+
index = None
|
| 949 |
+
wants_index = index_write is not None
|
| 950 |
+
|
| 951 |
+
# write header
|
| 952 |
+
pwrite(pack('>LLL', PackFile.pack_signature, PackFile.pack_version_default, object_count))
|
| 953 |
+
ofs += 12
|
| 954 |
+
|
| 955 |
+
if wants_index:
|
| 956 |
+
index = IndexWriter()
|
| 957 |
+
# END handle index header
|
| 958 |
+
|
| 959 |
+
actual_count = 0
|
| 960 |
+
for obj in objs:
|
| 961 |
+
actual_count += 1
|
| 962 |
+
crc = 0
|
| 963 |
+
|
| 964 |
+
# object header
|
| 965 |
+
hdr = create_pack_object_header(obj.type_id, obj.size)
|
| 966 |
+
if index_write:
|
| 967 |
+
crc = crc32(hdr)
|
| 968 |
+
else:
|
| 969 |
+
crc = None
|
| 970 |
+
# END handle crc
|
| 971 |
+
pwrite(hdr)
|
| 972 |
+
|
| 973 |
+
# data stream
|
| 974 |
+
zstream = zlib.compressobj(zlib_compression)
|
| 975 |
+
ostream = obj.stream
|
| 976 |
+
br, bw, crc = write_stream_to_pack(ostream.read, pwrite, zstream, base_crc=crc)
|
| 977 |
+
assert(br == obj.size)
|
| 978 |
+
if wants_index:
|
| 979 |
+
index.append(obj.binsha, crc, ofs)
|
| 980 |
+
# END handle index
|
| 981 |
+
|
| 982 |
+
ofs += len(hdr) + bw
|
| 983 |
+
if actual_count == object_count:
|
| 984 |
+
break
|
| 985 |
+
# END abort once we are done
|
| 986 |
+
# END for each object
|
| 987 |
+
|
| 988 |
+
if actual_count != object_count:
|
| 989 |
+
raise ValueError(
|
| 990 |
+
"Expected to write %i objects into pack, but received only %i from iterators" % (object_count, actual_count))
|
| 991 |
+
# END count assertion
|
| 992 |
+
|
| 993 |
+
# write footer
|
| 994 |
+
pack_sha = pack_writer.sha(as_hex=False)
|
| 995 |
+
assert len(pack_sha) == 20
|
| 996 |
+
pack_write(pack_sha)
|
| 997 |
+
ofs += len(pack_sha) # just for completeness ;)
|
| 998 |
+
|
| 999 |
+
index_sha = None
|
| 1000 |
+
if wants_index:
|
| 1001 |
+
index_sha = index.write(pack_sha, index_write)
|
| 1002 |
+
# END handle index
|
| 1003 |
+
|
| 1004 |
+
return pack_sha, index_sha
|
| 1005 |
+
|
| 1006 |
+
@classmethod
|
| 1007 |
+
def create(cls, object_iter, base_dir, object_count=None, zlib_compression=zlib.Z_BEST_SPEED):
|
| 1008 |
+
"""Create a new on-disk entity comprised of a properly named pack file and a properly named
|
| 1009 |
+
and corresponding index file. The pack contains all OStream objects contained in object iter.
|
| 1010 |
+
:param base_dir: directory which is to contain the files
|
| 1011 |
+
:return: PackEntity instance initialized with the new pack
|
| 1012 |
+
|
| 1013 |
+
**Note:** for more information on the other parameters see the write_pack method"""
|
| 1014 |
+
pack_fd, pack_path = tempfile.mkstemp('', 'pack', base_dir)
|
| 1015 |
+
index_fd, index_path = tempfile.mkstemp('', 'index', base_dir)
|
| 1016 |
+
pack_write = lambda d: os.write(pack_fd, d)
|
| 1017 |
+
index_write = lambda d: os.write(index_fd, d)
|
| 1018 |
+
|
| 1019 |
+
pack_binsha, index_binsha = cls.write_pack(object_iter, pack_write, index_write, object_count, zlib_compression)
|
| 1020 |
+
os.close(pack_fd)
|
| 1021 |
+
os.close(index_fd)
|
| 1022 |
+
|
| 1023 |
+
fmt = "pack-%s.%s"
|
| 1024 |
+
new_pack_path = os.path.join(base_dir, fmt % (bin_to_hex(pack_binsha), 'pack'))
|
| 1025 |
+
new_index_path = os.path.join(base_dir, fmt % (bin_to_hex(pack_binsha), 'idx'))
|
| 1026 |
+
os.rename(pack_path, new_pack_path)
|
| 1027 |
+
os.rename(index_path, new_index_path)
|
| 1028 |
+
|
| 1029 |
+
return cls(new_pack_path)
|
| 1030 |
+
|
| 1031 |
+
#} END interface
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/stream.py
ADDED
|
@@ -0,0 +1,730 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
|
| 6 |
+
from io import BytesIO
|
| 7 |
+
|
| 8 |
+
import mmap
|
| 9 |
+
import os
|
| 10 |
+
import sys
|
| 11 |
+
import zlib
|
| 12 |
+
|
| 13 |
+
from gitdb.fun import (
|
| 14 |
+
msb_size,
|
| 15 |
+
stream_copy,
|
| 16 |
+
apply_delta_data,
|
| 17 |
+
connect_deltas,
|
| 18 |
+
delta_types
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
from gitdb.util import (
|
| 22 |
+
allocate_memory,
|
| 23 |
+
LazyMixin,
|
| 24 |
+
make_sha,
|
| 25 |
+
write,
|
| 26 |
+
close,
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
from gitdb.const import NULL_BYTE, BYTE_SPACE
|
| 30 |
+
from gitdb.utils.encoding import force_bytes
|
| 31 |
+
|
| 32 |
+
has_perf_mod = False
|
| 33 |
+
try:
|
| 34 |
+
from gitdb_speedups._perf import apply_delta as c_apply_delta
|
| 35 |
+
has_perf_mod = True
|
| 36 |
+
except ImportError:
|
| 37 |
+
pass
|
| 38 |
+
|
| 39 |
+
__all__ = ('DecompressMemMapReader', 'FDCompressedSha1Writer', 'DeltaApplyReader',
|
| 40 |
+
'Sha1Writer', 'FlexibleSha1Writer', 'ZippedStoreShaWriter', 'FDCompressedSha1Writer',
|
| 41 |
+
'FDStream', 'NullStream')
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
#{ RO Streams
|
| 45 |
+
|
| 46 |
+
class DecompressMemMapReader(LazyMixin):
|
| 47 |
+
|
| 48 |
+
"""Reads data in chunks from a memory map and decompresses it. The client sees
|
| 49 |
+
only the uncompressed data, respective file-like read calls are handling on-demand
|
| 50 |
+
buffered decompression accordingly
|
| 51 |
+
|
| 52 |
+
A constraint on the total size of bytes is activated, simulating
|
| 53 |
+
a logical file within a possibly larger physical memory area
|
| 54 |
+
|
| 55 |
+
To read efficiently, you clearly don't want to read individual bytes, instead,
|
| 56 |
+
read a few kilobytes at least.
|
| 57 |
+
|
| 58 |
+
**Note:** The chunk-size should be carefully selected as it will involve quite a bit
|
| 59 |
+
of string copying due to the way the zlib is implemented. Its very wasteful,
|
| 60 |
+
hence we try to find a good tradeoff between allocation time and number of
|
| 61 |
+
times we actually allocate. An own zlib implementation would be good here
|
| 62 |
+
to better support streamed reading - it would only need to keep the mmap
|
| 63 |
+
and decompress it into chunks, that's all ... """
|
| 64 |
+
__slots__ = ('_m', '_zip', '_buf', '_buflen', '_br', '_cws', '_cwe', '_s', '_close',
|
| 65 |
+
'_cbr', '_phi')
|
| 66 |
+
|
| 67 |
+
max_read_size = 512 * 1024 # currently unused
|
| 68 |
+
|
| 69 |
+
def __init__(self, m, close_on_deletion, size=None):
|
| 70 |
+
"""Initialize with mmap for stream reading
|
| 71 |
+
:param m: must be content data - use new if you have object data and no size"""
|
| 72 |
+
self._m = m
|
| 73 |
+
self._zip = zlib.decompressobj()
|
| 74 |
+
self._buf = None # buffer of decompressed bytes
|
| 75 |
+
self._buflen = 0 # length of bytes in buffer
|
| 76 |
+
if size is not None:
|
| 77 |
+
self._s = size # size of uncompressed data to read in total
|
| 78 |
+
self._br = 0 # num uncompressed bytes read
|
| 79 |
+
self._cws = 0 # start byte of compression window
|
| 80 |
+
self._cwe = 0 # end byte of compression window
|
| 81 |
+
self._cbr = 0 # number of compressed bytes read
|
| 82 |
+
self._phi = False # is True if we parsed the header info
|
| 83 |
+
self._close = close_on_deletion # close the memmap on deletion ?
|
| 84 |
+
|
| 85 |
+
def _set_cache_(self, attr):
|
| 86 |
+
assert attr == '_s'
|
| 87 |
+
# only happens for size, which is a marker to indicate we still
|
| 88 |
+
# have to parse the header from the stream
|
| 89 |
+
self._parse_header_info()
|
| 90 |
+
|
| 91 |
+
def __del__(self):
|
| 92 |
+
self.close()
|
| 93 |
+
|
| 94 |
+
def _parse_header_info(self):
|
| 95 |
+
"""If this stream contains object data, parse the header info and skip the
|
| 96 |
+
stream to a point where each read will yield object content
|
| 97 |
+
|
| 98 |
+
:return: parsed type_string, size"""
|
| 99 |
+
# read header
|
| 100 |
+
# should really be enough, cgit uses 8192 I believe
|
| 101 |
+
# And for good reason !! This needs to be that high for the header to be read correctly in all cases
|
| 102 |
+
maxb = 8192
|
| 103 |
+
self._s = maxb
|
| 104 |
+
hdr = self.read(maxb)
|
| 105 |
+
hdrend = hdr.find(NULL_BYTE)
|
| 106 |
+
typ, size = hdr[:hdrend].split(BYTE_SPACE)
|
| 107 |
+
size = int(size)
|
| 108 |
+
self._s = size
|
| 109 |
+
|
| 110 |
+
# adjust internal state to match actual header length that we ignore
|
| 111 |
+
# The buffer will be depleted first on future reads
|
| 112 |
+
self._br = 0
|
| 113 |
+
hdrend += 1
|
| 114 |
+
self._buf = BytesIO(hdr[hdrend:])
|
| 115 |
+
self._buflen = len(hdr) - hdrend
|
| 116 |
+
|
| 117 |
+
self._phi = True
|
| 118 |
+
|
| 119 |
+
return typ, size
|
| 120 |
+
|
| 121 |
+
#{ Interface
|
| 122 |
+
|
| 123 |
+
@classmethod
|
| 124 |
+
def new(self, m, close_on_deletion=False):
|
| 125 |
+
"""Create a new DecompressMemMapReader instance for acting as a read-only stream
|
| 126 |
+
This method parses the object header from m and returns the parsed
|
| 127 |
+
type and size, as well as the created stream instance.
|
| 128 |
+
|
| 129 |
+
:param m: memory map on which to operate. It must be object data ( header + contents )
|
| 130 |
+
:param close_on_deletion: if True, the memory map will be closed once we are
|
| 131 |
+
being deleted"""
|
| 132 |
+
inst = DecompressMemMapReader(m, close_on_deletion, 0)
|
| 133 |
+
typ, size = inst._parse_header_info()
|
| 134 |
+
return typ, size, inst
|
| 135 |
+
|
| 136 |
+
def data(self):
|
| 137 |
+
""":return: random access compatible data we are working on"""
|
| 138 |
+
return self._m
|
| 139 |
+
|
| 140 |
+
def close(self):
|
| 141 |
+
"""Close our underlying stream of compressed bytes if this was allowed during initialization
|
| 142 |
+
:return: True if we closed the underlying stream
|
| 143 |
+
:note: can be called safely
|
| 144 |
+
"""
|
| 145 |
+
if self._close:
|
| 146 |
+
if hasattr(self._m, 'close'):
|
| 147 |
+
self._m.close()
|
| 148 |
+
self._close = False
|
| 149 |
+
# END handle resource freeing
|
| 150 |
+
|
| 151 |
+
def compressed_bytes_read(self):
|
| 152 |
+
"""
|
| 153 |
+
:return: number of compressed bytes read. This includes the bytes it
|
| 154 |
+
took to decompress the header ( if there was one )"""
|
| 155 |
+
# ABSTRACT: When decompressing a byte stream, it can be that the first
|
| 156 |
+
# x bytes which were requested match the first x bytes in the loosely
|
| 157 |
+
# compressed datastream. This is the worst-case assumption that the reader
|
| 158 |
+
# does, it assumes that it will get at least X bytes from X compressed bytes
|
| 159 |
+
# in call cases.
|
| 160 |
+
# The caveat is that the object, according to our known uncompressed size,
|
| 161 |
+
# is already complete, but there are still some bytes left in the compressed
|
| 162 |
+
# stream that contribute to the amount of compressed bytes.
|
| 163 |
+
# How can we know that we are truly done, and have read all bytes we need
|
| 164 |
+
# to read ?
|
| 165 |
+
# Without help, we cannot know, as we need to obtain the status of the
|
| 166 |
+
# decompression. If it is not finished, we need to decompress more data
|
| 167 |
+
# until it is finished, to yield the actual number of compressed bytes
|
| 168 |
+
# belonging to the decompressed object
|
| 169 |
+
# We are using a custom zlib module for this, if its not present,
|
| 170 |
+
# we try to put in additional bytes up for decompression if feasible
|
| 171 |
+
# and check for the unused_data.
|
| 172 |
+
|
| 173 |
+
# Only scrub the stream forward if we are officially done with the
|
| 174 |
+
# bytes we were to have.
|
| 175 |
+
if self._br == self._s and not self._zip.unused_data:
|
| 176 |
+
# manipulate the bytes-read to allow our own read method to continue
|
| 177 |
+
# but keep the window at its current position
|
| 178 |
+
self._br = 0
|
| 179 |
+
if hasattr(self._zip, 'status'):
|
| 180 |
+
while self._zip.status == zlib.Z_OK:
|
| 181 |
+
self.read(mmap.PAGESIZE)
|
| 182 |
+
# END scrub-loop custom zlib
|
| 183 |
+
else:
|
| 184 |
+
# pass in additional pages, until we have unused data
|
| 185 |
+
while not self._zip.unused_data and self._cbr != len(self._m):
|
| 186 |
+
self.read(mmap.PAGESIZE)
|
| 187 |
+
# END scrub-loop default zlib
|
| 188 |
+
# END handle stream scrubbing
|
| 189 |
+
|
| 190 |
+
# reset bytes read, just to be sure
|
| 191 |
+
self._br = self._s
|
| 192 |
+
# END handle stream scrubbing
|
| 193 |
+
|
| 194 |
+
# unused data ends up in the unconsumed tail, which was removed
|
| 195 |
+
# from the count already
|
| 196 |
+
return self._cbr
|
| 197 |
+
|
| 198 |
+
#} END interface
|
| 199 |
+
|
| 200 |
+
def seek(self, offset, whence=getattr(os, 'SEEK_SET', 0)):
|
| 201 |
+
"""Allows to reset the stream to restart reading
|
| 202 |
+
:raise ValueError: If offset and whence are not 0"""
|
| 203 |
+
if offset != 0 or whence != getattr(os, 'SEEK_SET', 0):
|
| 204 |
+
raise ValueError("Can only seek to position 0")
|
| 205 |
+
# END handle offset
|
| 206 |
+
|
| 207 |
+
self._zip = zlib.decompressobj()
|
| 208 |
+
self._br = self._cws = self._cwe = self._cbr = 0
|
| 209 |
+
if self._phi:
|
| 210 |
+
self._phi = False
|
| 211 |
+
del(self._s) # trigger header parsing on first access
|
| 212 |
+
# END skip header
|
| 213 |
+
|
| 214 |
+
def read(self, size=-1):
|
| 215 |
+
if size < 1:
|
| 216 |
+
size = self._s - self._br
|
| 217 |
+
else:
|
| 218 |
+
size = min(size, self._s - self._br)
|
| 219 |
+
# END clamp size
|
| 220 |
+
|
| 221 |
+
if size == 0:
|
| 222 |
+
return bytes()
|
| 223 |
+
# END handle depletion
|
| 224 |
+
|
| 225 |
+
# deplete the buffer, then just continue using the decompress object
|
| 226 |
+
# which has an own buffer. We just need this to transparently parse the
|
| 227 |
+
# header from the zlib stream
|
| 228 |
+
dat = bytes()
|
| 229 |
+
if self._buf:
|
| 230 |
+
if self._buflen >= size:
|
| 231 |
+
# have enough data
|
| 232 |
+
dat = self._buf.read(size)
|
| 233 |
+
self._buflen -= size
|
| 234 |
+
self._br += size
|
| 235 |
+
return dat
|
| 236 |
+
else:
|
| 237 |
+
dat = self._buf.read() # ouch, duplicates data
|
| 238 |
+
size -= self._buflen
|
| 239 |
+
self._br += self._buflen
|
| 240 |
+
|
| 241 |
+
self._buflen = 0
|
| 242 |
+
self._buf = None
|
| 243 |
+
# END handle buffer len
|
| 244 |
+
# END handle buffer
|
| 245 |
+
|
| 246 |
+
# decompress some data
|
| 247 |
+
# Abstract: zlib needs to operate on chunks of our memory map ( which may
|
| 248 |
+
# be large ), as it will otherwise and always fill in the 'unconsumed_tail'
|
| 249 |
+
# attribute which possible reads our whole map to the end, forcing
|
| 250 |
+
# everything to be read from disk even though just a portion was requested.
|
| 251 |
+
# As this would be a nogo, we workaround it by passing only chunks of data,
|
| 252 |
+
# moving the window into the memory map along as we decompress, which keeps
|
| 253 |
+
# the tail smaller than our chunk-size. This causes 'only' the chunk to be
|
| 254 |
+
# copied once, and another copy of a part of it when it creates the unconsumed
|
| 255 |
+
# tail. We have to use it to hand in the appropriate amount of bytes during
|
| 256 |
+
# the next read.
|
| 257 |
+
tail = self._zip.unconsumed_tail
|
| 258 |
+
if tail:
|
| 259 |
+
# move the window, make it as large as size demands. For code-clarity,
|
| 260 |
+
# we just take the chunk from our map again instead of reusing the unconsumed
|
| 261 |
+
# tail. The latter one would safe some memory copying, but we could end up
|
| 262 |
+
# with not getting enough data uncompressed, so we had to sort that out as well.
|
| 263 |
+
# Now we just assume the worst case, hence the data is uncompressed and the window
|
| 264 |
+
# needs to be as large as the uncompressed bytes we want to read.
|
| 265 |
+
self._cws = self._cwe - len(tail)
|
| 266 |
+
self._cwe = self._cws + size
|
| 267 |
+
else:
|
| 268 |
+
cws = self._cws
|
| 269 |
+
self._cws = self._cwe
|
| 270 |
+
self._cwe = cws + size
|
| 271 |
+
# END handle tail
|
| 272 |
+
|
| 273 |
+
# if window is too small, make it larger so zip can decompress something
|
| 274 |
+
if self._cwe - self._cws < 8:
|
| 275 |
+
self._cwe = self._cws + 8
|
| 276 |
+
# END adjust winsize
|
| 277 |
+
|
| 278 |
+
# takes a slice, but doesn't copy the data, it says ...
|
| 279 |
+
indata = self._m[self._cws:self._cwe]
|
| 280 |
+
|
| 281 |
+
# get the actual window end to be sure we don't use it for computations
|
| 282 |
+
self._cwe = self._cws + len(indata)
|
| 283 |
+
dcompdat = self._zip.decompress(indata, size)
|
| 284 |
+
# update the amount of compressed bytes read
|
| 285 |
+
# We feed possibly overlapping chunks, which is why the unconsumed tail
|
| 286 |
+
# has to be taken into consideration, as well as the unused data
|
| 287 |
+
# if we hit the end of the stream
|
| 288 |
+
# NOTE: Behavior changed in PY2.7 onward, which requires special handling to make the tests work properly.
|
| 289 |
+
# They are thorough, and I assume it is truly working.
|
| 290 |
+
# Why is this logic as convoluted as it is ? Please look at the table in
|
| 291 |
+
# https://github.com/gitpython-developers/gitdb/issues/19 to learn about the test-results.
|
| 292 |
+
# Bascially, on py2.6, you want to use branch 1, whereas on all other python version, the second branch
|
| 293 |
+
# will be the one that works.
|
| 294 |
+
# However, the zlib VERSIONs as well as the platform check is used to further match the entries in the
|
| 295 |
+
# table in the github issue. This is it ... it was the only way I could make this work everywhere.
|
| 296 |
+
# IT's CERTAINLY GOING TO BITE US IN THE FUTURE ... .
|
| 297 |
+
if zlib.ZLIB_VERSION in ('1.2.7', '1.2.5') and not sys.platform == 'darwin':
|
| 298 |
+
unused_datalen = len(self._zip.unconsumed_tail)
|
| 299 |
+
else:
|
| 300 |
+
unused_datalen = len(self._zip.unconsumed_tail) + len(self._zip.unused_data)
|
| 301 |
+
# # end handle very special case ...
|
| 302 |
+
|
| 303 |
+
self._cbr += len(indata) - unused_datalen
|
| 304 |
+
self._br += len(dcompdat)
|
| 305 |
+
|
| 306 |
+
if dat:
|
| 307 |
+
dcompdat = dat + dcompdat
|
| 308 |
+
# END prepend our cached data
|
| 309 |
+
|
| 310 |
+
# it can happen, depending on the compression, that we get less bytes
|
| 311 |
+
# than ordered as it needs the final portion of the data as well.
|
| 312 |
+
# Recursively resolve that.
|
| 313 |
+
# Note: dcompdat can be empty even though we still appear to have bytes
|
| 314 |
+
# to read, if we are called by compressed_bytes_read - it manipulates
|
| 315 |
+
# us to empty the stream
|
| 316 |
+
if dcompdat and (len(dcompdat) - len(dat)) < size and self._br < self._s:
|
| 317 |
+
dcompdat += self.read(size - len(dcompdat))
|
| 318 |
+
# END handle special case
|
| 319 |
+
return dcompdat
|
| 320 |
+
|
| 321 |
+
|
| 322 |
+
class DeltaApplyReader(LazyMixin):
|
| 323 |
+
|
| 324 |
+
"""A reader which dynamically applies pack deltas to a base object, keeping the
|
| 325 |
+
memory demands to a minimum.
|
| 326 |
+
|
| 327 |
+
The size of the final object is only obtainable once all deltas have been
|
| 328 |
+
applied, unless it is retrieved from a pack index.
|
| 329 |
+
|
| 330 |
+
The uncompressed Delta has the following layout (MSB being a most significant
|
| 331 |
+
bit encoded dynamic size):
|
| 332 |
+
|
| 333 |
+
* MSB Source Size - the size of the base against which the delta was created
|
| 334 |
+
* MSB Target Size - the size of the resulting data after the delta was applied
|
| 335 |
+
* A list of one byte commands (cmd) which are followed by a specific protocol:
|
| 336 |
+
|
| 337 |
+
* cmd & 0x80 - copy delta_data[offset:offset+size]
|
| 338 |
+
|
| 339 |
+
* Followed by an encoded offset into the delta data
|
| 340 |
+
* Followed by an encoded size of the chunk to copy
|
| 341 |
+
|
| 342 |
+
* cmd & 0x7f - insert
|
| 343 |
+
|
| 344 |
+
* insert cmd bytes from the delta buffer into the output stream
|
| 345 |
+
|
| 346 |
+
* cmd == 0 - invalid operation ( or error in delta stream )
|
| 347 |
+
"""
|
| 348 |
+
__slots__ = (
|
| 349 |
+
"_bstream", # base stream to which to apply the deltas
|
| 350 |
+
"_dstreams", # tuple of delta stream readers
|
| 351 |
+
"_mm_target", # memory map of the delta-applied data
|
| 352 |
+
"_size", # actual number of bytes in _mm_target
|
| 353 |
+
"_br" # number of bytes read
|
| 354 |
+
)
|
| 355 |
+
|
| 356 |
+
#{ Configuration
|
| 357 |
+
k_max_memory_move = 250 * 1000 * 1000
|
| 358 |
+
#} END configuration
|
| 359 |
+
|
| 360 |
+
def __init__(self, stream_list):
|
| 361 |
+
"""Initialize this instance with a list of streams, the first stream being
|
| 362 |
+
the delta to apply on top of all following deltas, the last stream being the
|
| 363 |
+
base object onto which to apply the deltas"""
|
| 364 |
+
assert len(stream_list) > 1, "Need at least one delta and one base stream"
|
| 365 |
+
|
| 366 |
+
self._bstream = stream_list[-1]
|
| 367 |
+
self._dstreams = tuple(stream_list[:-1])
|
| 368 |
+
self._br = 0
|
| 369 |
+
|
| 370 |
+
def _set_cache_too_slow_without_c(self, attr):
|
| 371 |
+
# the direct algorithm is fastest and most direct if there is only one
|
| 372 |
+
# delta. Also, the extra overhead might not be worth it for items smaller
|
| 373 |
+
# than X - definitely the case in python, every function call costs
|
| 374 |
+
# huge amounts of time
|
| 375 |
+
# if len(self._dstreams) * self._bstream.size < self.k_max_memory_move:
|
| 376 |
+
if len(self._dstreams) == 1:
|
| 377 |
+
return self._set_cache_brute_(attr)
|
| 378 |
+
|
| 379 |
+
# Aggregate all deltas into one delta in reverse order. Hence we take
|
| 380 |
+
# the last delta, and reverse-merge its ancestor delta, until we receive
|
| 381 |
+
# the final delta data stream.
|
| 382 |
+
dcl = connect_deltas(self._dstreams)
|
| 383 |
+
|
| 384 |
+
# call len directly, as the (optional) c version doesn't implement the sequence
|
| 385 |
+
# protocol
|
| 386 |
+
if dcl.rbound() == 0:
|
| 387 |
+
self._size = 0
|
| 388 |
+
self._mm_target = allocate_memory(0)
|
| 389 |
+
return
|
| 390 |
+
# END handle empty list
|
| 391 |
+
|
| 392 |
+
self._size = dcl.rbound()
|
| 393 |
+
self._mm_target = allocate_memory(self._size)
|
| 394 |
+
|
| 395 |
+
bbuf = allocate_memory(self._bstream.size)
|
| 396 |
+
stream_copy(self._bstream.read, bbuf.write, self._bstream.size, 256 * mmap.PAGESIZE)
|
| 397 |
+
|
| 398 |
+
# APPLY CHUNKS
|
| 399 |
+
write = self._mm_target.write
|
| 400 |
+
dcl.apply(bbuf, write)
|
| 401 |
+
|
| 402 |
+
self._mm_target.seek(0)
|
| 403 |
+
|
| 404 |
+
def _set_cache_brute_(self, attr):
|
| 405 |
+
"""If we are here, we apply the actual deltas"""
|
| 406 |
+
# TODO: There should be a special case if there is only one stream
|
| 407 |
+
# Then the default-git algorithm should perform a tad faster, as the
|
| 408 |
+
# delta is not peaked into, causing less overhead.
|
| 409 |
+
buffer_info_list = list()
|
| 410 |
+
max_target_size = 0
|
| 411 |
+
for dstream in self._dstreams:
|
| 412 |
+
buf = dstream.read(512) # read the header information + X
|
| 413 |
+
offset, src_size = msb_size(buf)
|
| 414 |
+
offset, target_size = msb_size(buf, offset)
|
| 415 |
+
buffer_info_list.append((buf[offset:], offset, src_size, target_size))
|
| 416 |
+
max_target_size = max(max_target_size, target_size)
|
| 417 |
+
# END for each delta stream
|
| 418 |
+
|
| 419 |
+
# sanity check - the first delta to apply should have the same source
|
| 420 |
+
# size as our actual base stream
|
| 421 |
+
base_size = self._bstream.size
|
| 422 |
+
target_size = max_target_size
|
| 423 |
+
|
| 424 |
+
# if we have more than 1 delta to apply, we will swap buffers, hence we must
|
| 425 |
+
# assure that all buffers we use are large enough to hold all the results
|
| 426 |
+
if len(self._dstreams) > 1:
|
| 427 |
+
base_size = target_size = max(base_size, max_target_size)
|
| 428 |
+
# END adjust buffer sizes
|
| 429 |
+
|
| 430 |
+
# Allocate private memory map big enough to hold the first base buffer
|
| 431 |
+
# We need random access to it
|
| 432 |
+
bbuf = allocate_memory(base_size)
|
| 433 |
+
stream_copy(self._bstream.read, bbuf.write, base_size, 256 * mmap.PAGESIZE)
|
| 434 |
+
|
| 435 |
+
# allocate memory map large enough for the largest (intermediate) target
|
| 436 |
+
# We will use it as scratch space for all delta ops. If the final
|
| 437 |
+
# target buffer is smaller than our allocated space, we just use parts
|
| 438 |
+
# of it upon return.
|
| 439 |
+
tbuf = allocate_memory(target_size)
|
| 440 |
+
|
| 441 |
+
# for each delta to apply, memory map the decompressed delta and
|
| 442 |
+
# work on the op-codes to reconstruct everything.
|
| 443 |
+
# For the actual copying, we use a seek and write pattern of buffer
|
| 444 |
+
# slices.
|
| 445 |
+
final_target_size = None
|
| 446 |
+
for (dbuf, offset, src_size, target_size), dstream in zip(reversed(buffer_info_list), reversed(self._dstreams)):
|
| 447 |
+
# allocate a buffer to hold all delta data - fill in the data for
|
| 448 |
+
# fast access. We do this as we know that reading individual bytes
|
| 449 |
+
# from our stream would be slower than necessary ( although possible )
|
| 450 |
+
# The dbuf buffer contains commands after the first two MSB sizes, the
|
| 451 |
+
# offset specifies the amount of bytes read to get the sizes.
|
| 452 |
+
ddata = allocate_memory(dstream.size - offset)
|
| 453 |
+
ddata.write(dbuf)
|
| 454 |
+
# read the rest from the stream. The size we give is larger than necessary
|
| 455 |
+
stream_copy(dstream.read, ddata.write, dstream.size, 256 * mmap.PAGESIZE)
|
| 456 |
+
|
| 457 |
+
#######################################################################
|
| 458 |
+
if 'c_apply_delta' in globals():
|
| 459 |
+
c_apply_delta(bbuf, ddata, tbuf)
|
| 460 |
+
else:
|
| 461 |
+
apply_delta_data(bbuf, src_size, ddata, len(ddata), tbuf.write)
|
| 462 |
+
#######################################################################
|
| 463 |
+
|
| 464 |
+
# finally, swap out source and target buffers. The target is now the
|
| 465 |
+
# base for the next delta to apply
|
| 466 |
+
bbuf, tbuf = tbuf, bbuf
|
| 467 |
+
bbuf.seek(0)
|
| 468 |
+
tbuf.seek(0)
|
| 469 |
+
final_target_size = target_size
|
| 470 |
+
# END for each delta to apply
|
| 471 |
+
|
| 472 |
+
# its already seeked to 0, constrain it to the actual size
|
| 473 |
+
# NOTE: in the end of the loop, it swaps buffers, hence our target buffer
|
| 474 |
+
# is not tbuf, but bbuf !
|
| 475 |
+
self._mm_target = bbuf
|
| 476 |
+
self._size = final_target_size
|
| 477 |
+
|
| 478 |
+
#{ Configuration
|
| 479 |
+
if not has_perf_mod:
|
| 480 |
+
_set_cache_ = _set_cache_brute_
|
| 481 |
+
else:
|
| 482 |
+
_set_cache_ = _set_cache_too_slow_without_c
|
| 483 |
+
|
| 484 |
+
#} END configuration
|
| 485 |
+
|
| 486 |
+
def read(self, count=0):
|
| 487 |
+
bl = self._size - self._br # bytes left
|
| 488 |
+
if count < 1 or count > bl:
|
| 489 |
+
count = bl
|
| 490 |
+
# NOTE: we could check for certain size limits, and possibly
|
| 491 |
+
# return buffers instead of strings to prevent byte copying
|
| 492 |
+
data = self._mm_target.read(count)
|
| 493 |
+
self._br += len(data)
|
| 494 |
+
return data
|
| 495 |
+
|
| 496 |
+
def seek(self, offset, whence=getattr(os, 'SEEK_SET', 0)):
|
| 497 |
+
"""Allows to reset the stream to restart reading
|
| 498 |
+
|
| 499 |
+
:raise ValueError: If offset and whence are not 0"""
|
| 500 |
+
if offset != 0 or whence != getattr(os, 'SEEK_SET', 0):
|
| 501 |
+
raise ValueError("Can only seek to position 0")
|
| 502 |
+
# END handle offset
|
| 503 |
+
self._br = 0
|
| 504 |
+
self._mm_target.seek(0)
|
| 505 |
+
|
| 506 |
+
#{ Interface
|
| 507 |
+
|
| 508 |
+
@classmethod
|
| 509 |
+
def new(cls, stream_list):
|
| 510 |
+
"""
|
| 511 |
+
Convert the given list of streams into a stream which resolves deltas
|
| 512 |
+
when reading from it.
|
| 513 |
+
|
| 514 |
+
:param stream_list: two or more stream objects, first stream is a Delta
|
| 515 |
+
to the object that you want to resolve, followed by N additional delta
|
| 516 |
+
streams. The list's last stream must be a non-delta stream.
|
| 517 |
+
|
| 518 |
+
:return: Non-Delta OPackStream object whose stream can be used to obtain
|
| 519 |
+
the decompressed resolved data
|
| 520 |
+
:raise ValueError: if the stream list cannot be handled"""
|
| 521 |
+
if len(stream_list) < 2:
|
| 522 |
+
raise ValueError("Need at least two streams")
|
| 523 |
+
# END single object special handling
|
| 524 |
+
|
| 525 |
+
if stream_list[-1].type_id in delta_types:
|
| 526 |
+
raise ValueError(
|
| 527 |
+
"Cannot resolve deltas if there is no base object stream, last one was type: %s" % stream_list[-1].type)
|
| 528 |
+
# END check stream
|
| 529 |
+
return cls(stream_list)
|
| 530 |
+
|
| 531 |
+
#} END interface
|
| 532 |
+
|
| 533 |
+
#{ OInfo like Interface
|
| 534 |
+
|
| 535 |
+
@property
|
| 536 |
+
def type(self):
|
| 537 |
+
return self._bstream.type
|
| 538 |
+
|
| 539 |
+
@property
|
| 540 |
+
def type_id(self):
|
| 541 |
+
return self._bstream.type_id
|
| 542 |
+
|
| 543 |
+
@property
|
| 544 |
+
def size(self):
|
| 545 |
+
""":return: number of uncompressed bytes in the stream"""
|
| 546 |
+
return self._size
|
| 547 |
+
|
| 548 |
+
#} END oinfo like interface
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
#} END RO streams
|
| 552 |
+
|
| 553 |
+
|
| 554 |
+
#{ W Streams
|
| 555 |
+
|
| 556 |
+
class Sha1Writer(object):
|
| 557 |
+
|
| 558 |
+
"""Simple stream writer which produces a sha whenever you like as it degests
|
| 559 |
+
everything it is supposed to write"""
|
| 560 |
+
__slots__ = "sha1"
|
| 561 |
+
|
| 562 |
+
def __init__(self):
|
| 563 |
+
self.sha1 = make_sha()
|
| 564 |
+
|
| 565 |
+
#{ Stream Interface
|
| 566 |
+
|
| 567 |
+
def write(self, data):
|
| 568 |
+
""":raise IOError: If not all bytes could be written
|
| 569 |
+
:param data: byte object
|
| 570 |
+
:return: length of incoming data"""
|
| 571 |
+
|
| 572 |
+
self.sha1.update(data)
|
| 573 |
+
|
| 574 |
+
return len(data)
|
| 575 |
+
|
| 576 |
+
# END stream interface
|
| 577 |
+
|
| 578 |
+
#{ Interface
|
| 579 |
+
|
| 580 |
+
def sha(self, as_hex=False):
|
| 581 |
+
""":return: sha so far
|
| 582 |
+
:param as_hex: if True, sha will be hex-encoded, binary otherwise"""
|
| 583 |
+
if as_hex:
|
| 584 |
+
return self.sha1.hexdigest()
|
| 585 |
+
return self.sha1.digest()
|
| 586 |
+
|
| 587 |
+
#} END interface
|
| 588 |
+
|
| 589 |
+
|
| 590 |
+
class FlexibleSha1Writer(Sha1Writer):
|
| 591 |
+
|
| 592 |
+
"""Writer producing a sha1 while passing on the written bytes to the given
|
| 593 |
+
write function"""
|
| 594 |
+
__slots__ = 'writer'
|
| 595 |
+
|
| 596 |
+
def __init__(self, writer):
|
| 597 |
+
Sha1Writer.__init__(self)
|
| 598 |
+
self.writer = writer
|
| 599 |
+
|
| 600 |
+
def write(self, data):
|
| 601 |
+
Sha1Writer.write(self, data)
|
| 602 |
+
self.writer(data)
|
| 603 |
+
|
| 604 |
+
|
| 605 |
+
class ZippedStoreShaWriter(Sha1Writer):
|
| 606 |
+
|
| 607 |
+
"""Remembers everything someone writes to it and generates a sha"""
|
| 608 |
+
__slots__ = ('buf', 'zip')
|
| 609 |
+
|
| 610 |
+
def __init__(self):
|
| 611 |
+
Sha1Writer.__init__(self)
|
| 612 |
+
self.buf = BytesIO()
|
| 613 |
+
self.zip = zlib.compressobj(zlib.Z_BEST_SPEED)
|
| 614 |
+
|
| 615 |
+
def __getattr__(self, attr):
|
| 616 |
+
return getattr(self.buf, attr)
|
| 617 |
+
|
| 618 |
+
def write(self, data):
|
| 619 |
+
alen = Sha1Writer.write(self, data)
|
| 620 |
+
self.buf.write(self.zip.compress(data))
|
| 621 |
+
|
| 622 |
+
return alen
|
| 623 |
+
|
| 624 |
+
def close(self):
|
| 625 |
+
self.buf.write(self.zip.flush())
|
| 626 |
+
|
| 627 |
+
def seek(self, offset, whence=getattr(os, 'SEEK_SET', 0)):
|
| 628 |
+
"""Seeking currently only supports to rewind written data
|
| 629 |
+
Multiple writes are not supported"""
|
| 630 |
+
if offset != 0 or whence != getattr(os, 'SEEK_SET', 0):
|
| 631 |
+
raise ValueError("Can only seek to position 0")
|
| 632 |
+
# END handle offset
|
| 633 |
+
self.buf.seek(0)
|
| 634 |
+
|
| 635 |
+
def getvalue(self):
|
| 636 |
+
""":return: string value from the current stream position to the end"""
|
| 637 |
+
return self.buf.getvalue()
|
| 638 |
+
|
| 639 |
+
|
| 640 |
+
class FDCompressedSha1Writer(Sha1Writer):
|
| 641 |
+
|
| 642 |
+
"""Digests data written to it, making the sha available, then compress the
|
| 643 |
+
data and write it to the file descriptor
|
| 644 |
+
|
| 645 |
+
**Note:** operates on raw file descriptors
|
| 646 |
+
**Note:** for this to work, you have to use the close-method of this instance"""
|
| 647 |
+
__slots__ = ("fd", "sha1", "zip")
|
| 648 |
+
|
| 649 |
+
# default exception
|
| 650 |
+
exc = IOError("Failed to write all bytes to filedescriptor")
|
| 651 |
+
|
| 652 |
+
def __init__(self, fd):
|
| 653 |
+
super(FDCompressedSha1Writer, self).__init__()
|
| 654 |
+
self.fd = fd
|
| 655 |
+
self.zip = zlib.compressobj(zlib.Z_BEST_SPEED)
|
| 656 |
+
|
| 657 |
+
#{ Stream Interface
|
| 658 |
+
|
| 659 |
+
def write(self, data):
|
| 660 |
+
""":raise IOError: If not all bytes could be written
|
| 661 |
+
:return: length of incoming data"""
|
| 662 |
+
self.sha1.update(data)
|
| 663 |
+
cdata = self.zip.compress(data)
|
| 664 |
+
bytes_written = write(self.fd, cdata)
|
| 665 |
+
|
| 666 |
+
if bytes_written != len(cdata):
|
| 667 |
+
raise self.exc
|
| 668 |
+
|
| 669 |
+
return len(data)
|
| 670 |
+
|
| 671 |
+
def close(self):
|
| 672 |
+
remainder = self.zip.flush()
|
| 673 |
+
if write(self.fd, remainder) != len(remainder):
|
| 674 |
+
raise self.exc
|
| 675 |
+
return close(self.fd)
|
| 676 |
+
|
| 677 |
+
#} END stream interface
|
| 678 |
+
|
| 679 |
+
|
| 680 |
+
class FDStream(object):
|
| 681 |
+
|
| 682 |
+
"""A simple wrapper providing the most basic functions on a file descriptor
|
| 683 |
+
with the fileobject interface. Cannot use os.fdopen as the resulting stream
|
| 684 |
+
takes ownership"""
|
| 685 |
+
__slots__ = ("_fd", '_pos')
|
| 686 |
+
|
| 687 |
+
def __init__(self, fd):
|
| 688 |
+
self._fd = fd
|
| 689 |
+
self._pos = 0
|
| 690 |
+
|
| 691 |
+
def write(self, data):
|
| 692 |
+
self._pos += len(data)
|
| 693 |
+
os.write(self._fd, data)
|
| 694 |
+
|
| 695 |
+
def read(self, count=0):
|
| 696 |
+
if count == 0:
|
| 697 |
+
count = os.path.getsize(self._filepath)
|
| 698 |
+
# END handle read everything
|
| 699 |
+
|
| 700 |
+
bytes = os.read(self._fd, count)
|
| 701 |
+
self._pos += len(bytes)
|
| 702 |
+
return bytes
|
| 703 |
+
|
| 704 |
+
def fileno(self):
|
| 705 |
+
return self._fd
|
| 706 |
+
|
| 707 |
+
def tell(self):
|
| 708 |
+
return self._pos
|
| 709 |
+
|
| 710 |
+
def close(self):
|
| 711 |
+
close(self._fd)
|
| 712 |
+
|
| 713 |
+
|
| 714 |
+
class NullStream(object):
|
| 715 |
+
|
| 716 |
+
"""A stream that does nothing but providing a stream interface.
|
| 717 |
+
Use it like /dev/null"""
|
| 718 |
+
__slots__ = tuple()
|
| 719 |
+
|
| 720 |
+
def read(self, size=0):
|
| 721 |
+
return ''
|
| 722 |
+
|
| 723 |
+
def close(self):
|
| 724 |
+
pass
|
| 725 |
+
|
| 726 |
+
def write(self, data):
|
| 727 |
+
return len(data)
|
| 728 |
+
|
| 729 |
+
|
| 730 |
+
#} END W streams
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/typ.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
"""Module containing information about types known to the database"""
|
| 6 |
+
|
| 7 |
+
str_blob_type = b'blob'
|
| 8 |
+
str_commit_type = b'commit'
|
| 9 |
+
str_tree_type = b'tree'
|
| 10 |
+
str_tag_type = b'tag'
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/gitdb/util.py
ADDED
|
@@ -0,0 +1,398 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Copyright (C) 2010, 2011 Sebastian Thiel (byronimo@gmail.com) and contributors
|
| 2 |
+
#
|
| 3 |
+
# This module is part of GitDB and is released under
|
| 4 |
+
# the New BSD License: http://www.opensource.org/licenses/bsd-license.php
|
| 5 |
+
import binascii
|
| 6 |
+
import os
|
| 7 |
+
import mmap
|
| 8 |
+
import sys
|
| 9 |
+
import time
|
| 10 |
+
import errno
|
| 11 |
+
|
| 12 |
+
from io import BytesIO
|
| 13 |
+
|
| 14 |
+
from smmap import (
|
| 15 |
+
StaticWindowMapManager,
|
| 16 |
+
SlidingWindowMapManager,
|
| 17 |
+
SlidingWindowMapBuffer
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
# initialize our global memory manager instance
|
| 21 |
+
# Use it to free cached (and unused) resources.
|
| 22 |
+
mman = SlidingWindowMapManager()
|
| 23 |
+
# END handle mman
|
| 24 |
+
|
| 25 |
+
import hashlib
|
| 26 |
+
|
| 27 |
+
try:
|
| 28 |
+
from struct import unpack_from
|
| 29 |
+
except ImportError:
|
| 30 |
+
from struct import unpack, calcsize
|
| 31 |
+
__calcsize_cache = dict()
|
| 32 |
+
|
| 33 |
+
def unpack_from(fmt, data, offset=0):
|
| 34 |
+
try:
|
| 35 |
+
size = __calcsize_cache[fmt]
|
| 36 |
+
except KeyError:
|
| 37 |
+
size = calcsize(fmt)
|
| 38 |
+
__calcsize_cache[fmt] = size
|
| 39 |
+
# END exception handling
|
| 40 |
+
return unpack(fmt, data[offset: offset + size])
|
| 41 |
+
# END own unpack_from implementation
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
#{ Aliases
|
| 45 |
+
|
| 46 |
+
hex_to_bin = binascii.a2b_hex
|
| 47 |
+
bin_to_hex = binascii.b2a_hex
|
| 48 |
+
|
| 49 |
+
# errors
|
| 50 |
+
ENOENT = errno.ENOENT
|
| 51 |
+
|
| 52 |
+
# os shortcuts
|
| 53 |
+
exists = os.path.exists
|
| 54 |
+
mkdir = os.mkdir
|
| 55 |
+
chmod = os.chmod
|
| 56 |
+
isdir = os.path.isdir
|
| 57 |
+
isfile = os.path.isfile
|
| 58 |
+
rename = os.rename
|
| 59 |
+
dirname = os.path.dirname
|
| 60 |
+
basename = os.path.basename
|
| 61 |
+
join = os.path.join
|
| 62 |
+
read = os.read
|
| 63 |
+
write = os.write
|
| 64 |
+
close = os.close
|
| 65 |
+
fsync = os.fsync
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
def _retry(func, *args, **kwargs):
|
| 69 |
+
# Wrapper around functions, that are problematic on "Windows". Sometimes
|
| 70 |
+
# the OS or someone else has still a handle to the file
|
| 71 |
+
if sys.platform == "win32":
|
| 72 |
+
for _ in range(10):
|
| 73 |
+
try:
|
| 74 |
+
return func(*args, **kwargs)
|
| 75 |
+
except Exception:
|
| 76 |
+
time.sleep(0.1)
|
| 77 |
+
return func(*args, **kwargs)
|
| 78 |
+
else:
|
| 79 |
+
return func(*args, **kwargs)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
def remove(*args, **kwargs):
|
| 83 |
+
return _retry(os.remove, *args, **kwargs)
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
# Backwards compatibility imports
|
| 87 |
+
from gitdb.const import (
|
| 88 |
+
NULL_BIN_SHA,
|
| 89 |
+
NULL_HEX_SHA
|
| 90 |
+
)
|
| 91 |
+
|
| 92 |
+
#} END Aliases
|
| 93 |
+
|
| 94 |
+
#{ compatibility stuff ...
|
| 95 |
+
|
| 96 |
+
|
| 97 |
+
class _RandomAccessBytesIO(object):
|
| 98 |
+
|
| 99 |
+
"""Wrapper to provide required functionality in case memory maps cannot or may
|
| 100 |
+
not be used. This is only really required in python 2.4"""
|
| 101 |
+
__slots__ = '_sio'
|
| 102 |
+
|
| 103 |
+
def __init__(self, buf=''):
|
| 104 |
+
self._sio = BytesIO(buf)
|
| 105 |
+
|
| 106 |
+
def __getattr__(self, attr):
|
| 107 |
+
return getattr(self._sio, attr)
|
| 108 |
+
|
| 109 |
+
def __len__(self):
|
| 110 |
+
return len(self.getvalue())
|
| 111 |
+
|
| 112 |
+
def __getitem__(self, i):
|
| 113 |
+
return self.getvalue()[i]
|
| 114 |
+
|
| 115 |
+
def __getslice__(self, start, end):
|
| 116 |
+
return self.getvalue()[start:end]
|
| 117 |
+
|
| 118 |
+
|
| 119 |
+
def byte_ord(b):
|
| 120 |
+
"""
|
| 121 |
+
Return the integer representation of the byte string. This supports Python
|
| 122 |
+
3 byte arrays as well as standard strings.
|
| 123 |
+
"""
|
| 124 |
+
try:
|
| 125 |
+
return ord(b)
|
| 126 |
+
except TypeError:
|
| 127 |
+
return b
|
| 128 |
+
|
| 129 |
+
#} END compatibility stuff ...
|
| 130 |
+
|
| 131 |
+
#{ Routines
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
def make_sha(source=''.encode("ascii")):
|
| 135 |
+
"""A python2.4 workaround for the sha/hashlib module fiasco
|
| 136 |
+
|
| 137 |
+
**Note** From the dulwich project """
|
| 138 |
+
try:
|
| 139 |
+
return hashlib.sha1(source)
|
| 140 |
+
except NameError:
|
| 141 |
+
import sha
|
| 142 |
+
sha1 = sha.sha(source)
|
| 143 |
+
return sha1
|
| 144 |
+
|
| 145 |
+
|
| 146 |
+
def allocate_memory(size):
|
| 147 |
+
""":return: a file-protocol accessible memory block of the given size"""
|
| 148 |
+
if size == 0:
|
| 149 |
+
return _RandomAccessBytesIO(b'')
|
| 150 |
+
# END handle empty chunks gracefully
|
| 151 |
+
|
| 152 |
+
try:
|
| 153 |
+
return mmap.mmap(-1, size) # read-write by default
|
| 154 |
+
except EnvironmentError:
|
| 155 |
+
# setup real memory instead
|
| 156 |
+
# this of course may fail if the amount of memory is not available in
|
| 157 |
+
# one chunk - would only be the case in python 2.4, being more likely on
|
| 158 |
+
# 32 bit systems.
|
| 159 |
+
return _RandomAccessBytesIO(b"\0" * size)
|
| 160 |
+
# END handle memory allocation
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def file_contents_ro(fd, stream=False, allow_mmap=True):
|
| 164 |
+
""":return: read-only contents of the file represented by the file descriptor fd
|
| 165 |
+
|
| 166 |
+
:param fd: file descriptor opened for reading
|
| 167 |
+
:param stream: if False, random access is provided, otherwise the stream interface
|
| 168 |
+
is provided.
|
| 169 |
+
:param allow_mmap: if True, its allowed to map the contents into memory, which
|
| 170 |
+
allows large files to be handled and accessed efficiently. The file-descriptor
|
| 171 |
+
will change its position if this is False"""
|
| 172 |
+
try:
|
| 173 |
+
if allow_mmap:
|
| 174 |
+
# supports stream and random access
|
| 175 |
+
try:
|
| 176 |
+
return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
|
| 177 |
+
except EnvironmentError:
|
| 178 |
+
# python 2.4 issue, 0 wants to be the actual size
|
| 179 |
+
return mmap.mmap(fd, os.fstat(fd).st_size, access=mmap.ACCESS_READ)
|
| 180 |
+
# END handle python 2.4
|
| 181 |
+
except OSError:
|
| 182 |
+
pass
|
| 183 |
+
# END exception handling
|
| 184 |
+
|
| 185 |
+
# read manully
|
| 186 |
+
contents = os.read(fd, os.fstat(fd).st_size)
|
| 187 |
+
if stream:
|
| 188 |
+
return _RandomAccessBytesIO(contents)
|
| 189 |
+
return contents
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
def file_contents_ro_filepath(filepath, stream=False, allow_mmap=True, flags=0):
|
| 193 |
+
"""Get the file contents at filepath as fast as possible
|
| 194 |
+
|
| 195 |
+
:return: random access compatible memory of the given filepath
|
| 196 |
+
:param stream: see ``file_contents_ro``
|
| 197 |
+
:param allow_mmap: see ``file_contents_ro``
|
| 198 |
+
:param flags: additional flags to pass to os.open
|
| 199 |
+
:raise OSError: If the file could not be opened
|
| 200 |
+
|
| 201 |
+
**Note** for now we don't try to use O_NOATIME directly as the right value needs to be
|
| 202 |
+
shared per database in fact. It only makes a real difference for loose object
|
| 203 |
+
databases anyway, and they use it with the help of the ``flags`` parameter"""
|
| 204 |
+
fd = os.open(filepath, os.O_RDONLY | getattr(os, 'O_BINARY', 0) | flags)
|
| 205 |
+
try:
|
| 206 |
+
return file_contents_ro(fd, stream, allow_mmap)
|
| 207 |
+
finally:
|
| 208 |
+
close(fd)
|
| 209 |
+
# END assure file is closed
|
| 210 |
+
|
| 211 |
+
|
| 212 |
+
def sliding_ro_buffer(filepath, flags=0):
|
| 213 |
+
"""
|
| 214 |
+
:return: a buffer compatible object which uses our mapped memory manager internally
|
| 215 |
+
ready to read the whole given filepath"""
|
| 216 |
+
return SlidingWindowMapBuffer(mman.make_cursor(filepath), flags=flags)
|
| 217 |
+
|
| 218 |
+
|
| 219 |
+
def to_hex_sha(sha):
|
| 220 |
+
""":return: hexified version of sha"""
|
| 221 |
+
if len(sha) == 40:
|
| 222 |
+
return sha
|
| 223 |
+
return bin_to_hex(sha)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
def to_bin_sha(sha):
|
| 227 |
+
if len(sha) == 20:
|
| 228 |
+
return sha
|
| 229 |
+
return hex_to_bin(sha)
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
#} END routines
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
#{ Utilities
|
| 236 |
+
|
| 237 |
+
class LazyMixin(object):
|
| 238 |
+
|
| 239 |
+
"""
|
| 240 |
+
Base class providing an interface to lazily retrieve attribute values upon
|
| 241 |
+
first access. If slots are used, memory will only be reserved once the attribute
|
| 242 |
+
is actually accessed and retrieved the first time. All future accesses will
|
| 243 |
+
return the cached value as stored in the Instance's dict or slot.
|
| 244 |
+
"""
|
| 245 |
+
|
| 246 |
+
__slots__ = tuple()
|
| 247 |
+
|
| 248 |
+
def __getattr__(self, attr):
|
| 249 |
+
"""
|
| 250 |
+
Whenever an attribute is requested that we do not know, we allow it
|
| 251 |
+
to be created and set. Next time the same attribute is reqeusted, it is simply
|
| 252 |
+
returned from our dict/slots. """
|
| 253 |
+
self._set_cache_(attr)
|
| 254 |
+
# will raise in case the cache was not created
|
| 255 |
+
return object.__getattribute__(self, attr)
|
| 256 |
+
|
| 257 |
+
def _set_cache_(self, attr):
|
| 258 |
+
"""
|
| 259 |
+
This method should be overridden in the derived class.
|
| 260 |
+
It should check whether the attribute named by attr can be created
|
| 261 |
+
and cached. Do nothing if you do not know the attribute or call your subclass
|
| 262 |
+
|
| 263 |
+
The derived class may create as many additional attributes as it deems
|
| 264 |
+
necessary in case a git command returns more information than represented
|
| 265 |
+
in the single attribute."""
|
| 266 |
+
pass
|
| 267 |
+
|
| 268 |
+
|
| 269 |
+
class LockedFD(object):
|
| 270 |
+
|
| 271 |
+
"""
|
| 272 |
+
This class facilitates a safe read and write operation to a file on disk.
|
| 273 |
+
If we write to 'file', we obtain a lock file at 'file.lock' and write to
|
| 274 |
+
that instead. If we succeed, the lock file will be renamed to overwrite
|
| 275 |
+
the original file.
|
| 276 |
+
|
| 277 |
+
When reading, we obtain a lock file, but to prevent other writers from
|
| 278 |
+
succeeding while we are reading the file.
|
| 279 |
+
|
| 280 |
+
This type handles error correctly in that it will assure a consistent state
|
| 281 |
+
on destruction.
|
| 282 |
+
|
| 283 |
+
**note** with this setup, parallel reading is not possible"""
|
| 284 |
+
__slots__ = ("_filepath", '_fd', '_write')
|
| 285 |
+
|
| 286 |
+
def __init__(self, filepath):
|
| 287 |
+
"""Initialize an instance with the givne filepath"""
|
| 288 |
+
self._filepath = filepath
|
| 289 |
+
self._fd = None
|
| 290 |
+
self._write = None # if True, we write a file
|
| 291 |
+
|
| 292 |
+
def __del__(self):
|
| 293 |
+
# will do nothing if the file descriptor is already closed
|
| 294 |
+
if self._fd is not None:
|
| 295 |
+
self.rollback()
|
| 296 |
+
|
| 297 |
+
def _lockfilepath(self):
|
| 298 |
+
return "%s.lock" % self._filepath
|
| 299 |
+
|
| 300 |
+
def open(self, write=False, stream=False):
|
| 301 |
+
"""
|
| 302 |
+
Open the file descriptor for reading or writing, both in binary mode.
|
| 303 |
+
|
| 304 |
+
:param write: if True, the file descriptor will be opened for writing. Other
|
| 305 |
+
wise it will be opened read-only.
|
| 306 |
+
:param stream: if True, the file descriptor will be wrapped into a simple stream
|
| 307 |
+
object which supports only reading or writing
|
| 308 |
+
:return: fd to read from or write to. It is still maintained by this instance
|
| 309 |
+
and must not be closed directly
|
| 310 |
+
:raise IOError: if the lock could not be retrieved
|
| 311 |
+
:raise OSError: If the actual file could not be opened for reading
|
| 312 |
+
|
| 313 |
+
**note** must only be called once"""
|
| 314 |
+
if self._write is not None:
|
| 315 |
+
raise AssertionError("Called %s multiple times" % self.open)
|
| 316 |
+
|
| 317 |
+
self._write = write
|
| 318 |
+
|
| 319 |
+
# try to open the lock file
|
| 320 |
+
binary = getattr(os, 'O_BINARY', 0)
|
| 321 |
+
lockmode = os.O_WRONLY | os.O_CREAT | os.O_EXCL | binary
|
| 322 |
+
try:
|
| 323 |
+
fd = os.open(self._lockfilepath(), lockmode, int("600", 8))
|
| 324 |
+
if not write:
|
| 325 |
+
os.close(fd)
|
| 326 |
+
else:
|
| 327 |
+
self._fd = fd
|
| 328 |
+
# END handle file descriptor
|
| 329 |
+
except OSError as e:
|
| 330 |
+
raise IOError("Lock at %r could not be obtained" % self._lockfilepath()) from e
|
| 331 |
+
# END handle lock retrieval
|
| 332 |
+
|
| 333 |
+
# open actual file if required
|
| 334 |
+
if self._fd is None:
|
| 335 |
+
# we could specify exlusive here, as we obtained the lock anyway
|
| 336 |
+
try:
|
| 337 |
+
self._fd = os.open(self._filepath, os.O_RDONLY | binary)
|
| 338 |
+
except:
|
| 339 |
+
# assure we release our lockfile
|
| 340 |
+
remove(self._lockfilepath())
|
| 341 |
+
raise
|
| 342 |
+
# END handle lockfile
|
| 343 |
+
# END open descriptor for reading
|
| 344 |
+
|
| 345 |
+
if stream:
|
| 346 |
+
# need delayed import
|
| 347 |
+
from gitdb.stream import FDStream
|
| 348 |
+
return FDStream(self._fd)
|
| 349 |
+
else:
|
| 350 |
+
return self._fd
|
| 351 |
+
# END handle stream
|
| 352 |
+
|
| 353 |
+
def commit(self):
|
| 354 |
+
"""When done writing, call this function to commit your changes into the
|
| 355 |
+
actual file.
|
| 356 |
+
The file descriptor will be closed, and the lockfile handled.
|
| 357 |
+
|
| 358 |
+
**Note** can be called multiple times"""
|
| 359 |
+
self._end_writing(successful=True)
|
| 360 |
+
|
| 361 |
+
def rollback(self):
|
| 362 |
+
"""Abort your operation without any changes. The file descriptor will be
|
| 363 |
+
closed, and the lock released.
|
| 364 |
+
|
| 365 |
+
**Note** can be called multiple times"""
|
| 366 |
+
self._end_writing(successful=False)
|
| 367 |
+
|
| 368 |
+
def _end_writing(self, successful=True):
|
| 369 |
+
"""Handle the lock according to the write mode """
|
| 370 |
+
if self._write is None:
|
| 371 |
+
raise AssertionError("Cannot end operation if it wasn't started yet")
|
| 372 |
+
|
| 373 |
+
if self._fd is None:
|
| 374 |
+
return
|
| 375 |
+
|
| 376 |
+
os.close(self._fd)
|
| 377 |
+
self._fd = None
|
| 378 |
+
|
| 379 |
+
lockfile = self._lockfilepath()
|
| 380 |
+
if self._write and successful:
|
| 381 |
+
# on windows, rename does not silently overwrite the existing one
|
| 382 |
+
if sys.platform == "win32":
|
| 383 |
+
if isfile(self._filepath):
|
| 384 |
+
remove(self._filepath)
|
| 385 |
+
# END remove if exists
|
| 386 |
+
# END win32 special handling
|
| 387 |
+
os.rename(lockfile, self._filepath)
|
| 388 |
+
|
| 389 |
+
# assure others can at least read the file - the tmpfile left it at rw--
|
| 390 |
+
# We may also write that file, on windows that boils down to a remove-
|
| 391 |
+
# protection as well
|
| 392 |
+
chmod(self._filepath, int("644", 8))
|
| 393 |
+
else:
|
| 394 |
+
# just delete the file so far, we failed
|
| 395 |
+
remove(lockfile)
|
| 396 |
+
# END successful handling
|
| 397 |
+
|
| 398 |
+
#} END utilities
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/_proxy.cpython-38-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:67f77a59361389216643d54c6f9bf38e83b4583cf45d1f96599af196e22db881
|
| 3 |
+
size 113016
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/h5py/h5z.cpython-38-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:99b3d967ef4ae35b57e6797e8079d43c71a0aa146fa76e592f2b05499905d4b9
|
| 3 |
+
size 104872
|
my_container_sandbox/workspace/anaconda3/lib/python3.8/site-packages/matplotlib/_qhull.cpython-38-x86_64-linux-gnu.so
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:7a1dcc9c9db6d1bd1713b8d40b4b3211af3a7bb2be660e2740aba98aad37750d
|
| 3 |
+
size 1935422
|