repo
stringlengths
7
55
path
stringlengths
4
223
func_name
stringlengths
1
134
original_string
stringlengths
75
104k
language
stringclasses
1 value
code
stringlengths
75
104k
code_tokens
listlengths
19
28.4k
docstring
stringlengths
1
46.9k
docstring_tokens
listlengths
1
1.97k
sha
stringlengths
40
40
url
stringlengths
87
315
partition
stringclasses
1 value
tornadoweb/tornado
tornado/queues.py
Queue.get_nowait
def get_nowait(self) -> _T: """Remove and return an item from the queue without blocking. Return an item if one is immediately available, else raise `QueueEmpty`. """ self._consume_expired() if self._putters: assert self.full(), "queue not full, why are putters waiting?" item, putter = self._putters.popleft() self.__put_internal(item) future_set_result_unless_cancelled(putter, None) return self._get() elif self.qsize(): return self._get() else: raise QueueEmpty
python
def get_nowait(self) -> _T: """Remove and return an item from the queue without blocking. Return an item if one is immediately available, else raise `QueueEmpty`. """ self._consume_expired() if self._putters: assert self.full(), "queue not full, why are putters waiting?" item, putter = self._putters.popleft() self.__put_internal(item) future_set_result_unless_cancelled(putter, None) return self._get() elif self.qsize(): return self._get() else: raise QueueEmpty
[ "def", "get_nowait", "(", "self", ")", "->", "_T", ":", "self", ".", "_consume_expired", "(", ")", "if", "self", ".", "_putters", ":", "assert", "self", ".", "full", "(", ")", ",", "\"queue not full, why are putters waiting?\"", "item", ",", "putter", "=", "self", ".", "_putters", ".", "popleft", "(", ")", "self", ".", "__put_internal", "(", "item", ")", "future_set_result_unless_cancelled", "(", "putter", ",", "None", ")", "return", "self", ".", "_get", "(", ")", "elif", "self", ".", "qsize", "(", ")", ":", "return", "self", ".", "_get", "(", ")", "else", ":", "raise", "QueueEmpty" ]
Remove and return an item from the queue without blocking. Return an item if one is immediately available, else raise `QueueEmpty`.
[ "Remove", "and", "return", "an", "item", "from", "the", "queue", "without", "blocking", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/queues.py#L254-L270
train
tornadoweb/tornado
tornado/queues.py
Queue.task_done
def task_done(self) -> None: """Indicate that a formerly enqueued task is complete. Used by queue consumers. For each `.get` used to fetch a task, a subsequent call to `.task_done` tells the queue that the processing on the task is complete. If a `.join` is blocking, it resumes when all items have been processed; that is, when every `.put` is matched by a `.task_done`. Raises `ValueError` if called more times than `.put`. """ if self._unfinished_tasks <= 0: raise ValueError("task_done() called too many times") self._unfinished_tasks -= 1 if self._unfinished_tasks == 0: self._finished.set()
python
def task_done(self) -> None: """Indicate that a formerly enqueued task is complete. Used by queue consumers. For each `.get` used to fetch a task, a subsequent call to `.task_done` tells the queue that the processing on the task is complete. If a `.join` is blocking, it resumes when all items have been processed; that is, when every `.put` is matched by a `.task_done`. Raises `ValueError` if called more times than `.put`. """ if self._unfinished_tasks <= 0: raise ValueError("task_done() called too many times") self._unfinished_tasks -= 1 if self._unfinished_tasks == 0: self._finished.set()
[ "def", "task_done", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_unfinished_tasks", "<=", "0", ":", "raise", "ValueError", "(", "\"task_done() called too many times\"", ")", "self", ".", "_unfinished_tasks", "-=", "1", "if", "self", ".", "_unfinished_tasks", "==", "0", ":", "self", ".", "_finished", ".", "set", "(", ")" ]
Indicate that a formerly enqueued task is complete. Used by queue consumers. For each `.get` used to fetch a task, a subsequent call to `.task_done` tells the queue that the processing on the task is complete. If a `.join` is blocking, it resumes when all items have been processed; that is, when every `.put` is matched by a `.task_done`. Raises `ValueError` if called more times than `.put`.
[ "Indicate", "that", "a", "formerly", "enqueued", "task", "is", "complete", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/queues.py#L272-L288
train
tornadoweb/tornado
tornado/queues.py
Queue.join
def join(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]: """Block until all items in the queue are processed. Returns an awaitable, which raises `tornado.util.TimeoutError` after a timeout. """ return self._finished.wait(timeout)
python
def join(self, timeout: Union[float, datetime.timedelta] = None) -> Awaitable[None]: """Block until all items in the queue are processed. Returns an awaitable, which raises `tornado.util.TimeoutError` after a timeout. """ return self._finished.wait(timeout)
[ "def", "join", "(", "self", ",", "timeout", ":", "Union", "[", "float", ",", "datetime", ".", "timedelta", "]", "=", "None", ")", "->", "Awaitable", "[", "None", "]", ":", "return", "self", ".", "_finished", ".", "wait", "(", "timeout", ")" ]
Block until all items in the queue are processed. Returns an awaitable, which raises `tornado.util.TimeoutError` after a timeout.
[ "Block", "until", "all", "items", "in", "the", "queue", "are", "processed", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/queues.py#L290-L296
train
tornadoweb/tornado
tornado/process.py
cpu_count
def cpu_count() -> int: """Returns the number of processors on this machine.""" if multiprocessing is None: return 1 try: return multiprocessing.cpu_count() except NotImplementedError: pass try: return os.sysconf("SC_NPROCESSORS_CONF") except (AttributeError, ValueError): pass gen_log.error("Could not detect number of processors; assuming 1") return 1
python
def cpu_count() -> int: """Returns the number of processors on this machine.""" if multiprocessing is None: return 1 try: return multiprocessing.cpu_count() except NotImplementedError: pass try: return os.sysconf("SC_NPROCESSORS_CONF") except (AttributeError, ValueError): pass gen_log.error("Could not detect number of processors; assuming 1") return 1
[ "def", "cpu_count", "(", ")", "->", "int", ":", "if", "multiprocessing", "is", "None", ":", "return", "1", "try", ":", "return", "multiprocessing", ".", "cpu_count", "(", ")", "except", "NotImplementedError", ":", "pass", "try", ":", "return", "os", ".", "sysconf", "(", "\"SC_NPROCESSORS_CONF\"", ")", "except", "(", "AttributeError", ",", "ValueError", ")", ":", "pass", "gen_log", ".", "error", "(", "\"Could not detect number of processors; assuming 1\"", ")", "return", "1" ]
Returns the number of processors on this machine.
[ "Returns", "the", "number", "of", "processors", "on", "this", "machine", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/process.py#L51-L64
train
tornadoweb/tornado
tornado/process.py
fork_processes
def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> int: """Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix """ if max_restarts is None: max_restarts = 100 global _task_id assert _task_id is None if num_processes is None or num_processes <= 0: num_processes = cpu_count() gen_log.info("Starting %d processes", num_processes) children = {} def start_child(i: int) -> Optional[int]: pid = os.fork() if pid == 0: # child process _reseed_random() global _task_id _task_id = i return i else: children[pid] = i return None for i in range(num_processes): id = start_child(i) if id is not None: return id num_restarts = 0 while children: try: pid, status = os.wait() except OSError as e: if errno_from_exception(e) == errno.EINTR: continue raise if pid not in children: continue id = children.pop(pid) if os.WIFSIGNALED(status): gen_log.warning( "child %d (pid %d) killed by signal %d, restarting", id, pid, os.WTERMSIG(status), ) elif os.WEXITSTATUS(status) != 0: gen_log.warning( "child %d (pid %d) exited with status %d, restarting", id, pid, os.WEXITSTATUS(status), ) else: gen_log.info("child %d (pid %d) exited normally", id, pid) continue num_restarts += 1 if num_restarts > max_restarts: raise RuntimeError("Too many child restarts, giving up") new_id = start_child(id) if new_id is not None: return new_id # All child processes exited cleanly, so exit the master process # instead of just returning to right after the call to # fork_processes (which will probably just start up another IOLoop # unless the caller checks the return value). sys.exit(0)
python
def fork_processes(num_processes: Optional[int], max_restarts: int = None) -> int: """Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix """ if max_restarts is None: max_restarts = 100 global _task_id assert _task_id is None if num_processes is None or num_processes <= 0: num_processes = cpu_count() gen_log.info("Starting %d processes", num_processes) children = {} def start_child(i: int) -> Optional[int]: pid = os.fork() if pid == 0: # child process _reseed_random() global _task_id _task_id = i return i else: children[pid] = i return None for i in range(num_processes): id = start_child(i) if id is not None: return id num_restarts = 0 while children: try: pid, status = os.wait() except OSError as e: if errno_from_exception(e) == errno.EINTR: continue raise if pid not in children: continue id = children.pop(pid) if os.WIFSIGNALED(status): gen_log.warning( "child %d (pid %d) killed by signal %d, restarting", id, pid, os.WTERMSIG(status), ) elif os.WEXITSTATUS(status) != 0: gen_log.warning( "child %d (pid %d) exited with status %d, restarting", id, pid, os.WEXITSTATUS(status), ) else: gen_log.info("child %d (pid %d) exited normally", id, pid) continue num_restarts += 1 if num_restarts > max_restarts: raise RuntimeError("Too many child restarts, giving up") new_id = start_child(id) if new_id is not None: return new_id # All child processes exited cleanly, so exit the master process # instead of just returning to right after the call to # fork_processes (which will probably just start up another IOLoop # unless the caller checks the return value). sys.exit(0)
[ "def", "fork_processes", "(", "num_processes", ":", "Optional", "[", "int", "]", ",", "max_restarts", ":", "int", "=", "None", ")", "->", "int", ":", "if", "max_restarts", "is", "None", ":", "max_restarts", "=", "100", "global", "_task_id", "assert", "_task_id", "is", "None", "if", "num_processes", "is", "None", "or", "num_processes", "<=", "0", ":", "num_processes", "=", "cpu_count", "(", ")", "gen_log", ".", "info", "(", "\"Starting %d processes\"", ",", "num_processes", ")", "children", "=", "{", "}", "def", "start_child", "(", "i", ":", "int", ")", "->", "Optional", "[", "int", "]", ":", "pid", "=", "os", ".", "fork", "(", ")", "if", "pid", "==", "0", ":", "# child process", "_reseed_random", "(", ")", "global", "_task_id", "_task_id", "=", "i", "return", "i", "else", ":", "children", "[", "pid", "]", "=", "i", "return", "None", "for", "i", "in", "range", "(", "num_processes", ")", ":", "id", "=", "start_child", "(", "i", ")", "if", "id", "is", "not", "None", ":", "return", "id", "num_restarts", "=", "0", "while", "children", ":", "try", ":", "pid", ",", "status", "=", "os", ".", "wait", "(", ")", "except", "OSError", "as", "e", ":", "if", "errno_from_exception", "(", "e", ")", "==", "errno", ".", "EINTR", ":", "continue", "raise", "if", "pid", "not", "in", "children", ":", "continue", "id", "=", "children", ".", "pop", "(", "pid", ")", "if", "os", ".", "WIFSIGNALED", "(", "status", ")", ":", "gen_log", ".", "warning", "(", "\"child %d (pid %d) killed by signal %d, restarting\"", ",", "id", ",", "pid", ",", "os", ".", "WTERMSIG", "(", "status", ")", ",", ")", "elif", "os", ".", "WEXITSTATUS", "(", "status", ")", "!=", "0", ":", "gen_log", ".", "warning", "(", "\"child %d (pid %d) exited with status %d, restarting\"", ",", "id", ",", "pid", ",", "os", ".", "WEXITSTATUS", "(", "status", ")", ",", ")", "else", ":", "gen_log", ".", "info", "(", "\"child %d (pid %d) exited normally\"", ",", "id", ",", "pid", ")", "continue", "num_restarts", "+=", "1", "if", "num_restarts", ">", "max_restarts", ":", "raise", "RuntimeError", "(", "\"Too many child restarts, giving up\"", ")", "new_id", "=", "start_child", "(", "id", ")", "if", "new_id", "is", "not", "None", ":", "return", "new_id", "# All child processes exited cleanly, so exit the master process", "# instead of just returning to right after the call to", "# fork_processes (which will probably just start up another IOLoop", "# unless the caller checks the return value).", "sys", ".", "exit", "(", "0", ")" ]
Starts multiple worker processes. If ``num_processes`` is None or <= 0, we detect the number of cores available on this machine and fork that number of child processes. If ``num_processes`` is given and > 0, we fork that specific number of sub-processes. Since we use processes and not threads, there is no shared memory between any server code. Note that multiple processes are not compatible with the autoreload module (or the ``autoreload=True`` option to `tornado.web.Application` which defaults to True when ``debug=True``). When using multiple processes, no IOLoops can be created or referenced until after the call to ``fork_processes``. In each child process, ``fork_processes`` returns its *task id*, a number between 0 and ``num_processes``. Processes that exit abnormally (due to a signal or non-zero exit status) are restarted with the same id (up to ``max_restarts`` times). In the parent process, ``fork_processes`` returns None if all child processes have exited normally, but will otherwise only exit by throwing an exception. max_restarts defaults to 100. Availability: Unix
[ "Starts", "multiple", "worker", "processes", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/process.py#L92-L185
train
tornadoweb/tornado
tornado/process.py
Subprocess.set_exit_callback
def set_exit_callback(self, callback: Callable[[int], None]) -> None: """Runs ``callback`` when this process exits. The callback takes one argument, the return code of the process. This method uses a ``SIGCHLD`` handler, which is a global setting and may conflict if you have other libraries trying to handle the same signal. If you are using more than one ``IOLoop`` it may be necessary to call `Subprocess.initialize` first to designate one ``IOLoop`` to run the signal handlers. In many cases a close callback on the stdout or stderr streams can be used as an alternative to an exit callback if the signal handler is causing a problem. Availability: Unix """ self._exit_callback = callback Subprocess.initialize() Subprocess._waiting[self.pid] = self Subprocess._try_cleanup_process(self.pid)
python
def set_exit_callback(self, callback: Callable[[int], None]) -> None: """Runs ``callback`` when this process exits. The callback takes one argument, the return code of the process. This method uses a ``SIGCHLD`` handler, which is a global setting and may conflict if you have other libraries trying to handle the same signal. If you are using more than one ``IOLoop`` it may be necessary to call `Subprocess.initialize` first to designate one ``IOLoop`` to run the signal handlers. In many cases a close callback on the stdout or stderr streams can be used as an alternative to an exit callback if the signal handler is causing a problem. Availability: Unix """ self._exit_callback = callback Subprocess.initialize() Subprocess._waiting[self.pid] = self Subprocess._try_cleanup_process(self.pid)
[ "def", "set_exit_callback", "(", "self", ",", "callback", ":", "Callable", "[", "[", "int", "]", ",", "None", "]", ")", "->", "None", ":", "self", ".", "_exit_callback", "=", "callback", "Subprocess", ".", "initialize", "(", ")", "Subprocess", ".", "_waiting", "[", "self", ".", "pid", "]", "=", "self", "Subprocess", ".", "_try_cleanup_process", "(", "self", ".", "pid", ")" ]
Runs ``callback`` when this process exits. The callback takes one argument, the return code of the process. This method uses a ``SIGCHLD`` handler, which is a global setting and may conflict if you have other libraries trying to handle the same signal. If you are using more than one ``IOLoop`` it may be necessary to call `Subprocess.initialize` first to designate one ``IOLoop`` to run the signal handlers. In many cases a close callback on the stdout or stderr streams can be used as an alternative to an exit callback if the signal handler is causing a problem. Availability: Unix
[ "Runs", "callback", "when", "this", "process", "exits", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/process.py#L264-L284
train
tornadoweb/tornado
tornado/process.py
Subprocess.wait_for_exit
def wait_for_exit(self, raise_error: bool = True) -> "Future[int]": """Returns a `.Future` which resolves when the process exits. Usage:: ret = yield proc.wait_for_exit() This is a coroutine-friendly alternative to `set_exit_callback` (and a replacement for the blocking `subprocess.Popen.wait`). By default, raises `subprocess.CalledProcessError` if the process has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` to suppress this behavior and return the exit status without raising. .. versionadded:: 4.2 Availability: Unix """ future = Future() # type: Future[int] def callback(ret: int) -> None: if ret != 0 and raise_error: # Unfortunately we don't have the original args any more. future_set_exception_unless_cancelled( future, CalledProcessError(ret, "unknown") ) else: future_set_result_unless_cancelled(future, ret) self.set_exit_callback(callback) return future
python
def wait_for_exit(self, raise_error: bool = True) -> "Future[int]": """Returns a `.Future` which resolves when the process exits. Usage:: ret = yield proc.wait_for_exit() This is a coroutine-friendly alternative to `set_exit_callback` (and a replacement for the blocking `subprocess.Popen.wait`). By default, raises `subprocess.CalledProcessError` if the process has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` to suppress this behavior and return the exit status without raising. .. versionadded:: 4.2 Availability: Unix """ future = Future() # type: Future[int] def callback(ret: int) -> None: if ret != 0 and raise_error: # Unfortunately we don't have the original args any more. future_set_exception_unless_cancelled( future, CalledProcessError(ret, "unknown") ) else: future_set_result_unless_cancelled(future, ret) self.set_exit_callback(callback) return future
[ "def", "wait_for_exit", "(", "self", ",", "raise_error", ":", "bool", "=", "True", ")", "->", "\"Future[int]\"", ":", "future", "=", "Future", "(", ")", "# type: Future[int]", "def", "callback", "(", "ret", ":", "int", ")", "->", "None", ":", "if", "ret", "!=", "0", "and", "raise_error", ":", "# Unfortunately we don't have the original args any more.", "future_set_exception_unless_cancelled", "(", "future", ",", "CalledProcessError", "(", "ret", ",", "\"unknown\"", ")", ")", "else", ":", "future_set_result_unless_cancelled", "(", "future", ",", "ret", ")", "self", ".", "set_exit_callback", "(", "callback", ")", "return", "future" ]
Returns a `.Future` which resolves when the process exits. Usage:: ret = yield proc.wait_for_exit() This is a coroutine-friendly alternative to `set_exit_callback` (and a replacement for the blocking `subprocess.Popen.wait`). By default, raises `subprocess.CalledProcessError` if the process has a non-zero exit status. Use ``wait_for_exit(raise_error=False)`` to suppress this behavior and return the exit status without raising. .. versionadded:: 4.2 Availability: Unix
[ "Returns", "a", ".", "Future", "which", "resolves", "when", "the", "process", "exits", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/process.py#L286-L316
train
tornadoweb/tornado
tornado/process.py
Subprocess.initialize
def initialize(cls) -> None: """Initializes the ``SIGCHLD`` handler. The signal handler is run on an `.IOLoop` to avoid locking issues. Note that the `.IOLoop` used for signal handling need not be the same one used by individual Subprocess objects (as long as the ``IOLoops`` are each running in separate threads). .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. Availability: Unix """ if cls._initialized: return io_loop = ioloop.IOLoop.current() cls._old_sigchld = signal.signal( signal.SIGCHLD, lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup), ) cls._initialized = True
python
def initialize(cls) -> None: """Initializes the ``SIGCHLD`` handler. The signal handler is run on an `.IOLoop` to avoid locking issues. Note that the `.IOLoop` used for signal handling need not be the same one used by individual Subprocess objects (as long as the ``IOLoops`` are each running in separate threads). .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. Availability: Unix """ if cls._initialized: return io_loop = ioloop.IOLoop.current() cls._old_sigchld = signal.signal( signal.SIGCHLD, lambda sig, frame: io_loop.add_callback_from_signal(cls._cleanup), ) cls._initialized = True
[ "def", "initialize", "(", "cls", ")", "->", "None", ":", "if", "cls", ".", "_initialized", ":", "return", "io_loop", "=", "ioloop", ".", "IOLoop", ".", "current", "(", ")", "cls", ".", "_old_sigchld", "=", "signal", ".", "signal", "(", "signal", ".", "SIGCHLD", ",", "lambda", "sig", ",", "frame", ":", "io_loop", ".", "add_callback_from_signal", "(", "cls", ".", "_cleanup", ")", ",", ")", "cls", ".", "_initialized", "=", "True" ]
Initializes the ``SIGCHLD`` handler. The signal handler is run on an `.IOLoop` to avoid locking issues. Note that the `.IOLoop` used for signal handling need not be the same one used by individual Subprocess objects (as long as the ``IOLoops`` are each running in separate threads). .. versionchanged:: 5.0 The ``io_loop`` argument (deprecated since version 4.1) has been removed. Availability: Unix
[ "Initializes", "the", "SIGCHLD", "handler", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/process.py#L319-L340
train
tornadoweb/tornado
tornado/process.py
Subprocess.uninitialize
def uninitialize(cls) -> None: """Removes the ``SIGCHLD`` handler.""" if not cls._initialized: return signal.signal(signal.SIGCHLD, cls._old_sigchld) cls._initialized = False
python
def uninitialize(cls) -> None: """Removes the ``SIGCHLD`` handler.""" if not cls._initialized: return signal.signal(signal.SIGCHLD, cls._old_sigchld) cls._initialized = False
[ "def", "uninitialize", "(", "cls", ")", "->", "None", ":", "if", "not", "cls", ".", "_initialized", ":", "return", "signal", ".", "signal", "(", "signal", ".", "SIGCHLD", ",", "cls", ".", "_old_sigchld", ")", "cls", ".", "_initialized", "=", "False" ]
Removes the ``SIGCHLD`` handler.
[ "Removes", "the", "SIGCHLD", "handler", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/process.py#L343-L348
train
tornadoweb/tornado
tornado/curl_httpclient.py
CurlAsyncHTTPClient._handle_socket
def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None: """Called by libcurl when it wants to change the file descriptors it cares about. """ event_map = { pycurl.POLL_NONE: ioloop.IOLoop.NONE, pycurl.POLL_IN: ioloop.IOLoop.READ, pycurl.POLL_OUT: ioloop.IOLoop.WRITE, pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE, } if event == pycurl.POLL_REMOVE: if fd in self._fds: self.io_loop.remove_handler(fd) del self._fds[fd] else: ioloop_event = event_map[event] # libcurl sometimes closes a socket and then opens a new # one using the same FD without giving us a POLL_NONE in # between. This is a problem with the epoll IOLoop, # because the kernel can tell when a socket is closed and # removes it from the epoll automatically, causing future # update_handler calls to fail. Since we can't tell when # this has happened, always use remove and re-add # instead of update. if fd in self._fds: self.io_loop.remove_handler(fd) self.io_loop.add_handler(fd, self._handle_events, ioloop_event) self._fds[fd] = ioloop_event
python
def _handle_socket(self, event: int, fd: int, multi: Any, data: bytes) -> None: """Called by libcurl when it wants to change the file descriptors it cares about. """ event_map = { pycurl.POLL_NONE: ioloop.IOLoop.NONE, pycurl.POLL_IN: ioloop.IOLoop.READ, pycurl.POLL_OUT: ioloop.IOLoop.WRITE, pycurl.POLL_INOUT: ioloop.IOLoop.READ | ioloop.IOLoop.WRITE, } if event == pycurl.POLL_REMOVE: if fd in self._fds: self.io_loop.remove_handler(fd) del self._fds[fd] else: ioloop_event = event_map[event] # libcurl sometimes closes a socket and then opens a new # one using the same FD without giving us a POLL_NONE in # between. This is a problem with the epoll IOLoop, # because the kernel can tell when a socket is closed and # removes it from the epoll automatically, causing future # update_handler calls to fail. Since we can't tell when # this has happened, always use remove and re-add # instead of update. if fd in self._fds: self.io_loop.remove_handler(fd) self.io_loop.add_handler(fd, self._handle_events, ioloop_event) self._fds[fd] = ioloop_event
[ "def", "_handle_socket", "(", "self", ",", "event", ":", "int", ",", "fd", ":", "int", ",", "multi", ":", "Any", ",", "data", ":", "bytes", ")", "->", "None", ":", "event_map", "=", "{", "pycurl", ".", "POLL_NONE", ":", "ioloop", ".", "IOLoop", ".", "NONE", ",", "pycurl", ".", "POLL_IN", ":", "ioloop", ".", "IOLoop", ".", "READ", ",", "pycurl", ".", "POLL_OUT", ":", "ioloop", ".", "IOLoop", ".", "WRITE", ",", "pycurl", ".", "POLL_INOUT", ":", "ioloop", ".", "IOLoop", ".", "READ", "|", "ioloop", ".", "IOLoop", ".", "WRITE", ",", "}", "if", "event", "==", "pycurl", ".", "POLL_REMOVE", ":", "if", "fd", "in", "self", ".", "_fds", ":", "self", ".", "io_loop", ".", "remove_handler", "(", "fd", ")", "del", "self", ".", "_fds", "[", "fd", "]", "else", ":", "ioloop_event", "=", "event_map", "[", "event", "]", "# libcurl sometimes closes a socket and then opens a new", "# one using the same FD without giving us a POLL_NONE in", "# between. This is a problem with the epoll IOLoop,", "# because the kernel can tell when a socket is closed and", "# removes it from the epoll automatically, causing future", "# update_handler calls to fail. Since we can't tell when", "# this has happened, always use remove and re-add", "# instead of update.", "if", "fd", "in", "self", ".", "_fds", ":", "self", ".", "io_loop", ".", "remove_handler", "(", "fd", ")", "self", ".", "io_loop", ".", "add_handler", "(", "fd", ",", "self", ".", "_handle_events", ",", "ioloop_event", ")", "self", ".", "_fds", "[", "fd", "]", "=", "ioloop_event" ]
Called by libcurl when it wants to change the file descriptors it cares about.
[ "Called", "by", "libcurl", "when", "it", "wants", "to", "change", "the", "file", "descriptors", "it", "cares", "about", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/curl_httpclient.py#L104-L131
train
tornadoweb/tornado
tornado/curl_httpclient.py
CurlAsyncHTTPClient._set_timeout
def _set_timeout(self, msecs: int) -> None: """Called by libcurl to schedule a timeout.""" if self._timeout is not None: self.io_loop.remove_timeout(self._timeout) self._timeout = self.io_loop.add_timeout( self.io_loop.time() + msecs / 1000.0, self._handle_timeout )
python
def _set_timeout(self, msecs: int) -> None: """Called by libcurl to schedule a timeout.""" if self._timeout is not None: self.io_loop.remove_timeout(self._timeout) self._timeout = self.io_loop.add_timeout( self.io_loop.time() + msecs / 1000.0, self._handle_timeout )
[ "def", "_set_timeout", "(", "self", ",", "msecs", ":", "int", ")", "->", "None", ":", "if", "self", ".", "_timeout", "is", "not", "None", ":", "self", ".", "io_loop", ".", "remove_timeout", "(", "self", ".", "_timeout", ")", "self", ".", "_timeout", "=", "self", ".", "io_loop", ".", "add_timeout", "(", "self", ".", "io_loop", ".", "time", "(", ")", "+", "msecs", "/", "1000.0", ",", "self", ".", "_handle_timeout", ")" ]
Called by libcurl to schedule a timeout.
[ "Called", "by", "libcurl", "to", "schedule", "a", "timeout", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/curl_httpclient.py#L133-L139
train
tornadoweb/tornado
tornado/curl_httpclient.py
CurlAsyncHTTPClient._handle_events
def _handle_events(self, fd: int, events: int) -> None: """Called by IOLoop when there is activity on one of our file descriptors. """ action = 0 if events & ioloop.IOLoop.READ: action |= pycurl.CSELECT_IN if events & ioloop.IOLoop.WRITE: action |= pycurl.CSELECT_OUT while True: try: ret, num_handles = self._multi.socket_action(fd, action) except pycurl.error as e: ret = e.args[0] if ret != pycurl.E_CALL_MULTI_PERFORM: break self._finish_pending_requests()
python
def _handle_events(self, fd: int, events: int) -> None: """Called by IOLoop when there is activity on one of our file descriptors. """ action = 0 if events & ioloop.IOLoop.READ: action |= pycurl.CSELECT_IN if events & ioloop.IOLoop.WRITE: action |= pycurl.CSELECT_OUT while True: try: ret, num_handles = self._multi.socket_action(fd, action) except pycurl.error as e: ret = e.args[0] if ret != pycurl.E_CALL_MULTI_PERFORM: break self._finish_pending_requests()
[ "def", "_handle_events", "(", "self", ",", "fd", ":", "int", ",", "events", ":", "int", ")", "->", "None", ":", "action", "=", "0", "if", "events", "&", "ioloop", ".", "IOLoop", ".", "READ", ":", "action", "|=", "pycurl", ".", "CSELECT_IN", "if", "events", "&", "ioloop", ".", "IOLoop", ".", "WRITE", ":", "action", "|=", "pycurl", ".", "CSELECT_OUT", "while", "True", ":", "try", ":", "ret", ",", "num_handles", "=", "self", ".", "_multi", ".", "socket_action", "(", "fd", ",", "action", ")", "except", "pycurl", ".", "error", "as", "e", ":", "ret", "=", "e", ".", "args", "[", "0", "]", "if", "ret", "!=", "pycurl", ".", "E_CALL_MULTI_PERFORM", ":", "break", "self", ".", "_finish_pending_requests", "(", ")" ]
Called by IOLoop when there is activity on one of our file descriptors.
[ "Called", "by", "IOLoop", "when", "there", "is", "activity", "on", "one", "of", "our", "file", "descriptors", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/curl_httpclient.py#L141-L157
train
tornadoweb/tornado
tornado/curl_httpclient.py
CurlAsyncHTTPClient._handle_timeout
def _handle_timeout(self) -> None: """Called by IOLoop when the requested timeout has passed.""" self._timeout = None while True: try: ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0) except pycurl.error as e: ret = e.args[0] if ret != pycurl.E_CALL_MULTI_PERFORM: break self._finish_pending_requests() # In theory, we shouldn't have to do this because curl will # call _set_timeout whenever the timeout changes. However, # sometimes after _handle_timeout we will need to reschedule # immediately even though nothing has changed from curl's # perspective. This is because when socket_action is # called with SOCKET_TIMEOUT, libcurl decides internally which # timeouts need to be processed by using a monotonic clock # (where available) while tornado uses python's time.time() # to decide when timeouts have occurred. When those clocks # disagree on elapsed time (as they will whenever there is an # NTP adjustment), tornado might call _handle_timeout before # libcurl is ready. After each timeout, resync the scheduled # timeout with libcurl's current state. new_timeout = self._multi.timeout() if new_timeout >= 0: self._set_timeout(new_timeout)
python
def _handle_timeout(self) -> None: """Called by IOLoop when the requested timeout has passed.""" self._timeout = None while True: try: ret, num_handles = self._multi.socket_action(pycurl.SOCKET_TIMEOUT, 0) except pycurl.error as e: ret = e.args[0] if ret != pycurl.E_CALL_MULTI_PERFORM: break self._finish_pending_requests() # In theory, we shouldn't have to do this because curl will # call _set_timeout whenever the timeout changes. However, # sometimes after _handle_timeout we will need to reschedule # immediately even though nothing has changed from curl's # perspective. This is because when socket_action is # called with SOCKET_TIMEOUT, libcurl decides internally which # timeouts need to be processed by using a monotonic clock # (where available) while tornado uses python's time.time() # to decide when timeouts have occurred. When those clocks # disagree on elapsed time (as they will whenever there is an # NTP adjustment), tornado might call _handle_timeout before # libcurl is ready. After each timeout, resync the scheduled # timeout with libcurl's current state. new_timeout = self._multi.timeout() if new_timeout >= 0: self._set_timeout(new_timeout)
[ "def", "_handle_timeout", "(", "self", ")", "->", "None", ":", "self", ".", "_timeout", "=", "None", "while", "True", ":", "try", ":", "ret", ",", "num_handles", "=", "self", ".", "_multi", ".", "socket_action", "(", "pycurl", ".", "SOCKET_TIMEOUT", ",", "0", ")", "except", "pycurl", ".", "error", "as", "e", ":", "ret", "=", "e", ".", "args", "[", "0", "]", "if", "ret", "!=", "pycurl", ".", "E_CALL_MULTI_PERFORM", ":", "break", "self", ".", "_finish_pending_requests", "(", ")", "# In theory, we shouldn't have to do this because curl will", "# call _set_timeout whenever the timeout changes. However,", "# sometimes after _handle_timeout we will need to reschedule", "# immediately even though nothing has changed from curl's", "# perspective. This is because when socket_action is", "# called with SOCKET_TIMEOUT, libcurl decides internally which", "# timeouts need to be processed by using a monotonic clock", "# (where available) while tornado uses python's time.time()", "# to decide when timeouts have occurred. When those clocks", "# disagree on elapsed time (as they will whenever there is an", "# NTP adjustment), tornado might call _handle_timeout before", "# libcurl is ready. After each timeout, resync the scheduled", "# timeout with libcurl's current state.", "new_timeout", "=", "self", ".", "_multi", ".", "timeout", "(", ")", "if", "new_timeout", ">=", "0", ":", "self", ".", "_set_timeout", "(", "new_timeout", ")" ]
Called by IOLoop when the requested timeout has passed.
[ "Called", "by", "IOLoop", "when", "the", "requested", "timeout", "has", "passed", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/curl_httpclient.py#L159-L186
train
tornadoweb/tornado
tornado/curl_httpclient.py
CurlAsyncHTTPClient._handle_force_timeout
def _handle_force_timeout(self) -> None: """Called by IOLoop periodically to ask libcurl to process any events it may have forgotten about. """ while True: try: ret, num_handles = self._multi.socket_all() except pycurl.error as e: ret = e.args[0] if ret != pycurl.E_CALL_MULTI_PERFORM: break self._finish_pending_requests()
python
def _handle_force_timeout(self) -> None: """Called by IOLoop periodically to ask libcurl to process any events it may have forgotten about. """ while True: try: ret, num_handles = self._multi.socket_all() except pycurl.error as e: ret = e.args[0] if ret != pycurl.E_CALL_MULTI_PERFORM: break self._finish_pending_requests()
[ "def", "_handle_force_timeout", "(", "self", ")", "->", "None", ":", "while", "True", ":", "try", ":", "ret", ",", "num_handles", "=", "self", ".", "_multi", ".", "socket_all", "(", ")", "except", "pycurl", ".", "error", "as", "e", ":", "ret", "=", "e", ".", "args", "[", "0", "]", "if", "ret", "!=", "pycurl", ".", "E_CALL_MULTI_PERFORM", ":", "break", "self", ".", "_finish_pending_requests", "(", ")" ]
Called by IOLoop periodically to ask libcurl to process any events it may have forgotten about.
[ "Called", "by", "IOLoop", "periodically", "to", "ask", "libcurl", "to", "process", "any", "events", "it", "may", "have", "forgotten", "about", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/curl_httpclient.py#L188-L199
train
tornadoweb/tornado
tornado/curl_httpclient.py
CurlAsyncHTTPClient._finish_pending_requests
def _finish_pending_requests(self) -> None: """Process any requests that were completed by the last call to multi.socket_action. """ while True: num_q, ok_list, err_list = self._multi.info_read() for curl in ok_list: self._finish(curl) for curl, errnum, errmsg in err_list: self._finish(curl, errnum, errmsg) if num_q == 0: break self._process_queue()
python
def _finish_pending_requests(self) -> None: """Process any requests that were completed by the last call to multi.socket_action. """ while True: num_q, ok_list, err_list = self._multi.info_read() for curl in ok_list: self._finish(curl) for curl, errnum, errmsg in err_list: self._finish(curl, errnum, errmsg) if num_q == 0: break self._process_queue()
[ "def", "_finish_pending_requests", "(", "self", ")", "->", "None", ":", "while", "True", ":", "num_q", ",", "ok_list", ",", "err_list", "=", "self", ".", "_multi", ".", "info_read", "(", ")", "for", "curl", "in", "ok_list", ":", "self", ".", "_finish", "(", "curl", ")", "for", "curl", ",", "errnum", ",", "errmsg", "in", "err_list", ":", "self", ".", "_finish", "(", "curl", ",", "errnum", ",", "errmsg", ")", "if", "num_q", "==", "0", ":", "break", "self", ".", "_process_queue", "(", ")" ]
Process any requests that were completed by the last call to multi.socket_action.
[ "Process", "any", "requests", "that", "were", "completed", "by", "the", "last", "call", "to", "multi", ".", "socket_action", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/curl_httpclient.py#L201-L213
train
tornadoweb/tornado
demos/s3server/s3server.py
start
def start(port, root_directory, bucket_depth): """Starts the mock S3 server on the given port at the given path.""" application = S3Application(root_directory, bucket_depth) http_server = httpserver.HTTPServer(application) http_server.listen(port) ioloop.IOLoop.current().start()
python
def start(port, root_directory, bucket_depth): """Starts the mock S3 server on the given port at the given path.""" application = S3Application(root_directory, bucket_depth) http_server = httpserver.HTTPServer(application) http_server.listen(port) ioloop.IOLoop.current().start()
[ "def", "start", "(", "port", ",", "root_directory", ",", "bucket_depth", ")", ":", "application", "=", "S3Application", "(", "root_directory", ",", "bucket_depth", ")", "http_server", "=", "httpserver", ".", "HTTPServer", "(", "application", ")", "http_server", ".", "listen", "(", "port", ")", "ioloop", ".", "IOLoop", ".", "current", "(", ")", ".", "start", "(", ")" ]
Starts the mock S3 server on the given port at the given path.
[ "Starts", "the", "mock", "S3", "server", "on", "the", "given", "port", "at", "the", "given", "path", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/demos/s3server/s3server.py#L57-L62
train
tornadoweb/tornado
tornado/httpclient.py
HTTPClient.close
def close(self) -> None: """Closes the HTTPClient, freeing any resources used.""" if not self._closed: self._async_client.close() self._io_loop.close() self._closed = True
python
def close(self) -> None: """Closes the HTTPClient, freeing any resources used.""" if not self._closed: self._async_client.close() self._io_loop.close() self._closed = True
[ "def", "close", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "_closed", ":", "self", ".", "_async_client", ".", "close", "(", ")", "self", ".", "_io_loop", ".", "close", "(", ")", "self", ".", "_closed", "=", "True" ]
Closes the HTTPClient, freeing any resources used.
[ "Closes", "the", "HTTPClient", "freeing", "any", "resources", "used", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/httpclient.py#L113-L118
train
tornadoweb/tornado
tornado/httpclient.py
HTTPClient.fetch
def fetch( self, request: Union["HTTPRequest", str], **kwargs: Any ) -> "HTTPResponse": """Executes a request, returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` If an error occurs during the fetch, we raise an `HTTPError` unless the ``raise_error`` keyword argument is set to False. """ response = self._io_loop.run_sync( functools.partial(self._async_client.fetch, request, **kwargs) ) return response
python
def fetch( self, request: Union["HTTPRequest", str], **kwargs: Any ) -> "HTTPResponse": """Executes a request, returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` If an error occurs during the fetch, we raise an `HTTPError` unless the ``raise_error`` keyword argument is set to False. """ response = self._io_loop.run_sync( functools.partial(self._async_client.fetch, request, **kwargs) ) return response
[ "def", "fetch", "(", "self", ",", "request", ":", "Union", "[", "\"HTTPRequest\"", ",", "str", "]", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "\"HTTPResponse\"", ":", "response", "=", "self", ".", "_io_loop", ".", "run_sync", "(", "functools", ".", "partial", "(", "self", ".", "_async_client", ".", "fetch", ",", "request", ",", "*", "*", "kwargs", ")", ")", "return", "response" ]
Executes a request, returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` If an error occurs during the fetch, we raise an `HTTPError` unless the ``raise_error`` keyword argument is set to False.
[ "Executes", "a", "request", "returning", "an", "HTTPResponse", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/httpclient.py#L120-L135
train
tornadoweb/tornado
tornado/httpclient.py
AsyncHTTPClient.close
def close(self) -> None: """Destroys this HTTP client, freeing any file descriptors used. This method is **not needed in normal use** due to the way that `AsyncHTTPClient` objects are transparently reused. ``close()`` is generally only necessary when either the `.IOLoop` is also being closed, or the ``force_instance=True`` argument was used when creating the `AsyncHTTPClient`. No other methods may be called on the `AsyncHTTPClient` after ``close()``. """ if self._closed: return self._closed = True if self._instance_cache is not None: cached_val = self._instance_cache.pop(self.io_loop, None) # If there's an object other than self in the instance # cache for our IOLoop, something has gotten mixed up. A # value of None appears to be possible when this is called # from a destructor (HTTPClient.__del__) as the weakref # gets cleared before the destructor runs. if cached_val is not None and cached_val is not self: raise RuntimeError("inconsistent AsyncHTTPClient cache")
python
def close(self) -> None: """Destroys this HTTP client, freeing any file descriptors used. This method is **not needed in normal use** due to the way that `AsyncHTTPClient` objects are transparently reused. ``close()`` is generally only necessary when either the `.IOLoop` is also being closed, or the ``force_instance=True`` argument was used when creating the `AsyncHTTPClient`. No other methods may be called on the `AsyncHTTPClient` after ``close()``. """ if self._closed: return self._closed = True if self._instance_cache is not None: cached_val = self._instance_cache.pop(self.io_loop, None) # If there's an object other than self in the instance # cache for our IOLoop, something has gotten mixed up. A # value of None appears to be possible when this is called # from a destructor (HTTPClient.__del__) as the weakref # gets cleared before the destructor runs. if cached_val is not None and cached_val is not self: raise RuntimeError("inconsistent AsyncHTTPClient cache")
[ "def", "close", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_closed", ":", "return", "self", ".", "_closed", "=", "True", "if", "self", ".", "_instance_cache", "is", "not", "None", ":", "cached_val", "=", "self", ".", "_instance_cache", ".", "pop", "(", "self", ".", "io_loop", ",", "None", ")", "# If there's an object other than self in the instance", "# cache for our IOLoop, something has gotten mixed up. A", "# value of None appears to be possible when this is called", "# from a destructor (HTTPClient.__del__) as the weakref", "# gets cleared before the destructor runs.", "if", "cached_val", "is", "not", "None", "and", "cached_val", "is", "not", "self", ":", "raise", "RuntimeError", "(", "\"inconsistent AsyncHTTPClient cache\"", ")" ]
Destroys this HTTP client, freeing any file descriptors used. This method is **not needed in normal use** due to the way that `AsyncHTTPClient` objects are transparently reused. ``close()`` is generally only necessary when either the `.IOLoop` is also being closed, or the ``force_instance=True`` argument was used when creating the `AsyncHTTPClient`. No other methods may be called on the `AsyncHTTPClient` after ``close()``.
[ "Destroys", "this", "HTTP", "client", "freeing", "any", "file", "descriptors", "used", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/httpclient.py#L221-L245
train
tornadoweb/tornado
tornado/httpclient.py
AsyncHTTPClient.fetch
def fetch( self, request: Union[str, "HTTPRequest"], raise_error: bool = True, **kwargs: Any ) -> Awaitable["HTTPResponse"]: """Executes a request, asynchronously returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` if the request returned a non-200 response code (other errors may also be raised if the server could not be contacted). Instead, if ``raise_error`` is set to False, the response will always be returned regardless of the response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. Instead, you must check the response's ``error`` attribute or call its `~HTTPResponse.rethrow` method. .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. The ``raise_error=False`` argument only affects the `HTTPError` raised when a non-200 response code is used, instead of suppressing all errors. """ if self._closed: raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) else: if kwargs: raise ValueError( "kwargs can't be used if request is an HTTPRequest object" ) # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. request.headers = httputil.HTTPHeaders(request.headers) request_proxy = _RequestProxy(request, self.defaults) future = Future() # type: Future[HTTPResponse] def handle_response(response: "HTTPResponse") -> None: if response.error: if raise_error or not response._error_is_response_code: future_set_exception_unless_cancelled(future, response.error) return future_set_result_unless_cancelled(future, response) self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response) return future
python
def fetch( self, request: Union[str, "HTTPRequest"], raise_error: bool = True, **kwargs: Any ) -> Awaitable["HTTPResponse"]: """Executes a request, asynchronously returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` if the request returned a non-200 response code (other errors may also be raised if the server could not be contacted). Instead, if ``raise_error`` is set to False, the response will always be returned regardless of the response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. Instead, you must check the response's ``error`` attribute or call its `~HTTPResponse.rethrow` method. .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. The ``raise_error=False`` argument only affects the `HTTPError` raised when a non-200 response code is used, instead of suppressing all errors. """ if self._closed: raise RuntimeError("fetch() called on closed AsyncHTTPClient") if not isinstance(request, HTTPRequest): request = HTTPRequest(url=request, **kwargs) else: if kwargs: raise ValueError( "kwargs can't be used if request is an HTTPRequest object" ) # We may modify this (to add Host, Accept-Encoding, etc), # so make sure we don't modify the caller's object. This is also # where normal dicts get converted to HTTPHeaders objects. request.headers = httputil.HTTPHeaders(request.headers) request_proxy = _RequestProxy(request, self.defaults) future = Future() # type: Future[HTTPResponse] def handle_response(response: "HTTPResponse") -> None: if response.error: if raise_error or not response._error_is_response_code: future_set_exception_unless_cancelled(future, response.error) return future_set_result_unless_cancelled(future, response) self.fetch_impl(cast(HTTPRequest, request_proxy), handle_response) return future
[ "def", "fetch", "(", "self", ",", "request", ":", "Union", "[", "str", ",", "\"HTTPRequest\"", "]", ",", "raise_error", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "Awaitable", "[", "\"HTTPResponse\"", "]", ":", "if", "self", ".", "_closed", ":", "raise", "RuntimeError", "(", "\"fetch() called on closed AsyncHTTPClient\"", ")", "if", "not", "isinstance", "(", "request", ",", "HTTPRequest", ")", ":", "request", "=", "HTTPRequest", "(", "url", "=", "request", ",", "*", "*", "kwargs", ")", "else", ":", "if", "kwargs", ":", "raise", "ValueError", "(", "\"kwargs can't be used if request is an HTTPRequest object\"", ")", "# We may modify this (to add Host, Accept-Encoding, etc),", "# so make sure we don't modify the caller's object. This is also", "# where normal dicts get converted to HTTPHeaders objects.", "request", ".", "headers", "=", "httputil", ".", "HTTPHeaders", "(", "request", ".", "headers", ")", "request_proxy", "=", "_RequestProxy", "(", "request", ",", "self", ".", "defaults", ")", "future", "=", "Future", "(", ")", "# type: Future[HTTPResponse]", "def", "handle_response", "(", "response", ":", "\"HTTPResponse\"", ")", "->", "None", ":", "if", "response", ".", "error", ":", "if", "raise_error", "or", "not", "response", ".", "_error_is_response_code", ":", "future_set_exception_unless_cancelled", "(", "future", ",", "response", ".", "error", ")", "return", "future_set_result_unless_cancelled", "(", "future", ",", "response", ")", "self", ".", "fetch_impl", "(", "cast", "(", "HTTPRequest", ",", "request_proxy", ")", ",", "handle_response", ")", "return", "future" ]
Executes a request, asynchronously returning an `HTTPResponse`. The request may be either a string URL or an `HTTPRequest` object. If it is a string, we construct an `HTTPRequest` using any additional kwargs: ``HTTPRequest(request, **kwargs)`` This method returns a `.Future` whose result is an `HTTPResponse`. By default, the ``Future`` will raise an `HTTPError` if the request returned a non-200 response code (other errors may also be raised if the server could not be contacted). Instead, if ``raise_error`` is set to False, the response will always be returned regardless of the response code. If a ``callback`` is given, it will be invoked with the `HTTPResponse`. In the callback interface, `HTTPError` is not automatically raised. Instead, you must check the response's ``error`` attribute or call its `~HTTPResponse.rethrow` method. .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned `.Future` instead. The ``raise_error=False`` argument only affects the `HTTPError` raised when a non-200 response code is used, instead of suppressing all errors.
[ "Executes", "a", "request", "asynchronously", "returning", "an", "HTTPResponse", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/httpclient.py#L247-L305
train
tornadoweb/tornado
tornado/httpclient.py
AsyncHTTPClient.configure
def configure( cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any ) -> None: """Configures the `AsyncHTTPClient` subclass to use. ``AsyncHTTPClient()`` actually creates an instance of a subclass. This method may be called with either a class object or the fully-qualified name of such a class (or ``None`` to use the default, ``SimpleAsyncHTTPClient``) If additional keyword arguments are given, they will be passed to the constructor of each subclass instance created. The keyword argument ``max_clients`` determines the maximum number of simultaneous `~AsyncHTTPClient.fetch()` operations that can execute in parallel on each `.IOLoop`. Additional arguments may be supported depending on the implementation class in use. Example:: AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") """ super(AsyncHTTPClient, cls).configure(impl, **kwargs)
python
def configure( cls, impl: "Union[None, str, Type[Configurable]]", **kwargs: Any ) -> None: """Configures the `AsyncHTTPClient` subclass to use. ``AsyncHTTPClient()`` actually creates an instance of a subclass. This method may be called with either a class object or the fully-qualified name of such a class (or ``None`` to use the default, ``SimpleAsyncHTTPClient``) If additional keyword arguments are given, they will be passed to the constructor of each subclass instance created. The keyword argument ``max_clients`` determines the maximum number of simultaneous `~AsyncHTTPClient.fetch()` operations that can execute in parallel on each `.IOLoop`. Additional arguments may be supported depending on the implementation class in use. Example:: AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient") """ super(AsyncHTTPClient, cls).configure(impl, **kwargs)
[ "def", "configure", "(", "cls", ",", "impl", ":", "\"Union[None, str, Type[Configurable]]\"", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "super", "(", "AsyncHTTPClient", ",", "cls", ")", ".", "configure", "(", "impl", ",", "*", "*", "kwargs", ")" ]
Configures the `AsyncHTTPClient` subclass to use. ``AsyncHTTPClient()`` actually creates an instance of a subclass. This method may be called with either a class object or the fully-qualified name of such a class (or ``None`` to use the default, ``SimpleAsyncHTTPClient``) If additional keyword arguments are given, they will be passed to the constructor of each subclass instance created. The keyword argument ``max_clients`` determines the maximum number of simultaneous `~AsyncHTTPClient.fetch()` operations that can execute in parallel on each `.IOLoop`. Additional arguments may be supported depending on the implementation class in use. Example:: AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
[ "Configures", "the", "AsyncHTTPClient", "subclass", "to", "use", "." ]
b8b481770bcdb333a69afde5cce7eaa449128326
https://github.com/tornadoweb/tornado/blob/b8b481770bcdb333a69afde5cce7eaa449128326/tornado/httpclient.py#L313-L334
train
aio-libs/aiohttp
aiohttp/connector.py
BaseConnector._cleanup
def _cleanup(self) -> None: """Cleanup unused transports.""" if self._cleanup_handle: self._cleanup_handle.cancel() now = self._loop.time() timeout = self._keepalive_timeout if self._conns: connections = {} deadline = now - timeout for key, conns in self._conns.items(): alive = [] for proto, use_time in conns: if proto.is_connected(): if use_time - deadline < 0: transport = proto.transport proto.close() if (key.is_ssl and not self._cleanup_closed_disabled): self._cleanup_closed_transports.append( transport) else: alive.append((proto, use_time)) if alive: connections[key] = alive self._conns = connections if self._conns: self._cleanup_handle = helpers.weakref_handle( self, '_cleanup', timeout, self._loop)
python
def _cleanup(self) -> None: """Cleanup unused transports.""" if self._cleanup_handle: self._cleanup_handle.cancel() now = self._loop.time() timeout = self._keepalive_timeout if self._conns: connections = {} deadline = now - timeout for key, conns in self._conns.items(): alive = [] for proto, use_time in conns: if proto.is_connected(): if use_time - deadline < 0: transport = proto.transport proto.close() if (key.is_ssl and not self._cleanup_closed_disabled): self._cleanup_closed_transports.append( transport) else: alive.append((proto, use_time)) if alive: connections[key] = alive self._conns = connections if self._conns: self._cleanup_handle = helpers.weakref_handle( self, '_cleanup', timeout, self._loop)
[ "def", "_cleanup", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_cleanup_handle", ":", "self", ".", "_cleanup_handle", ".", "cancel", "(", ")", "now", "=", "self", ".", "_loop", ".", "time", "(", ")", "timeout", "=", "self", ".", "_keepalive_timeout", "if", "self", ".", "_conns", ":", "connections", "=", "{", "}", "deadline", "=", "now", "-", "timeout", "for", "key", ",", "conns", "in", "self", ".", "_conns", ".", "items", "(", ")", ":", "alive", "=", "[", "]", "for", "proto", ",", "use_time", "in", "conns", ":", "if", "proto", ".", "is_connected", "(", ")", ":", "if", "use_time", "-", "deadline", "<", "0", ":", "transport", "=", "proto", ".", "transport", "proto", ".", "close", "(", ")", "if", "(", "key", ".", "is_ssl", "and", "not", "self", ".", "_cleanup_closed_disabled", ")", ":", "self", ".", "_cleanup_closed_transports", ".", "append", "(", "transport", ")", "else", ":", "alive", ".", "append", "(", "(", "proto", ",", "use_time", ")", ")", "if", "alive", ":", "connections", "[", "key", "]", "=", "alive", "self", ".", "_conns", "=", "connections", "if", "self", ".", "_conns", ":", "self", ".", "_cleanup_handle", "=", "helpers", ".", "weakref_handle", "(", "self", ",", "'_cleanup'", ",", "timeout", ",", "self", ".", "_loop", ")" ]
Cleanup unused transports.
[ "Cleanup", "unused", "transports", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L327-L359
train
aio-libs/aiohttp
aiohttp/connector.py
BaseConnector._cleanup_closed
def _cleanup_closed(self) -> None: """Double confirmation for transport close. Some broken ssl servers may leave socket open without proper close. """ if self._cleanup_closed_handle: self._cleanup_closed_handle.cancel() for transport in self._cleanup_closed_transports: if transport is not None: transport.abort() self._cleanup_closed_transports = [] if not self._cleanup_closed_disabled: self._cleanup_closed_handle = helpers.weakref_handle( self, '_cleanup_closed', self._cleanup_closed_period, self._loop)
python
def _cleanup_closed(self) -> None: """Double confirmation for transport close. Some broken ssl servers may leave socket open without proper close. """ if self._cleanup_closed_handle: self._cleanup_closed_handle.cancel() for transport in self._cleanup_closed_transports: if transport is not None: transport.abort() self._cleanup_closed_transports = [] if not self._cleanup_closed_disabled: self._cleanup_closed_handle = helpers.weakref_handle( self, '_cleanup_closed', self._cleanup_closed_period, self._loop)
[ "def", "_cleanup_closed", "(", "self", ")", "->", "None", ":", "if", "self", ".", "_cleanup_closed_handle", ":", "self", ".", "_cleanup_closed_handle", ".", "cancel", "(", ")", "for", "transport", "in", "self", ".", "_cleanup_closed_transports", ":", "if", "transport", "is", "not", "None", ":", "transport", ".", "abort", "(", ")", "self", ".", "_cleanup_closed_transports", "=", "[", "]", "if", "not", "self", ".", "_cleanup_closed_disabled", ":", "self", ".", "_cleanup_closed_handle", "=", "helpers", ".", "weakref_handle", "(", "self", ",", "'_cleanup_closed'", ",", "self", ".", "_cleanup_closed_period", ",", "self", ".", "_loop", ")" ]
Double confirmation for transport close. Some broken ssl servers may leave socket open without proper close.
[ "Double", "confirmation", "for", "transport", "close", ".", "Some", "broken", "ssl", "servers", "may", "leave", "socket", "open", "without", "proper", "close", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L371-L387
train
aio-libs/aiohttp
aiohttp/connector.py
BaseConnector._available_connections
def _available_connections(self, key: 'ConnectionKey') -> int: """ Return number of available connections taking into account the limit, limit_per_host and the connection key. If it returns less than 1 means that there is no connections availables. """ if self._limit: # total calc available connections available = self._limit - len(self._acquired) # check limit per host if (self._limit_per_host and available > 0 and key in self._acquired_per_host): acquired = self._acquired_per_host.get(key) assert acquired is not None available = self._limit_per_host - len(acquired) elif self._limit_per_host and key in self._acquired_per_host: # check limit per host acquired = self._acquired_per_host.get(key) assert acquired is not None available = self._limit_per_host - len(acquired) else: available = 1 return available
python
def _available_connections(self, key: 'ConnectionKey') -> int: """ Return number of available connections taking into account the limit, limit_per_host and the connection key. If it returns less than 1 means that there is no connections availables. """ if self._limit: # total calc available connections available = self._limit - len(self._acquired) # check limit per host if (self._limit_per_host and available > 0 and key in self._acquired_per_host): acquired = self._acquired_per_host.get(key) assert acquired is not None available = self._limit_per_host - len(acquired) elif self._limit_per_host and key in self._acquired_per_host: # check limit per host acquired = self._acquired_per_host.get(key) assert acquired is not None available = self._limit_per_host - len(acquired) else: available = 1 return available
[ "def", "_available_connections", "(", "self", ",", "key", ":", "'ConnectionKey'", ")", "->", "int", ":", "if", "self", ".", "_limit", ":", "# total calc available connections", "available", "=", "self", ".", "_limit", "-", "len", "(", "self", ".", "_acquired", ")", "# check limit per host", "if", "(", "self", ".", "_limit_per_host", "and", "available", ">", "0", "and", "key", "in", "self", ".", "_acquired_per_host", ")", ":", "acquired", "=", "self", ".", "_acquired_per_host", ".", "get", "(", "key", ")", "assert", "acquired", "is", "not", "None", "available", "=", "self", ".", "_limit_per_host", "-", "len", "(", "acquired", ")", "elif", "self", ".", "_limit_per_host", "and", "key", "in", "self", ".", "_acquired_per_host", ":", "# check limit per host", "acquired", "=", "self", ".", "_acquired_per_host", ".", "get", "(", "key", ")", "assert", "acquired", "is", "not", "None", "available", "=", "self", ".", "_limit_per_host", "-", "len", "(", "acquired", ")", "else", ":", "available", "=", "1", "return", "available" ]
Return number of available connections taking into account the limit, limit_per_host and the connection key. If it returns less than 1 means that there is no connections availables.
[ "Return", "number", "of", "available", "connections", "taking", "into", "account", "the", "limit", "limit_per_host", "and", "the", "connection", "key", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L439-L467
train
aio-libs/aiohttp
aiohttp/connector.py
BaseConnector.connect
async def connect(self, req: 'ClientRequest', traces: List['Trace'], timeout: 'ClientTimeout') -> Connection: """Get from pool or create new connection.""" key = req.connection_key available = self._available_connections(key) # Wait if there are no available connections. if available <= 0: fut = self._loop.create_future() # This connection will now count towards the limit. waiters = self._waiters[key] waiters.append(fut) if traces: for trace in traces: await trace.send_connection_queued_start() try: await fut except BaseException as e: # remove a waiter even if it was cancelled, normally it's # removed when it's notified try: waiters.remove(fut) except ValueError: # fut may no longer be in list pass raise e finally: if not waiters: try: del self._waiters[key] except KeyError: # the key was evicted before. pass if traces: for trace in traces: await trace.send_connection_queued_end() proto = self._get(key) if proto is None: placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) if traces: for trace in traces: await trace.send_connection_create_start() try: proto = await self._create_connection(req, traces, timeout) if self._closed: proto.close() raise ClientConnectionError("Connector is closed.") except BaseException: if not self._closed: self._acquired.remove(placeholder) self._drop_acquired_per_host(key, placeholder) self._release_waiter() raise else: if not self._closed: self._acquired.remove(placeholder) self._drop_acquired_per_host(key, placeholder) if traces: for trace in traces: await trace.send_connection_create_end() else: if traces: for trace in traces: await trace.send_connection_reuseconn() self._acquired.add(proto) self._acquired_per_host[key].add(proto) return Connection(self, key, proto, self._loop)
python
async def connect(self, req: 'ClientRequest', traces: List['Trace'], timeout: 'ClientTimeout') -> Connection: """Get from pool or create new connection.""" key = req.connection_key available = self._available_connections(key) # Wait if there are no available connections. if available <= 0: fut = self._loop.create_future() # This connection will now count towards the limit. waiters = self._waiters[key] waiters.append(fut) if traces: for trace in traces: await trace.send_connection_queued_start() try: await fut except BaseException as e: # remove a waiter even if it was cancelled, normally it's # removed when it's notified try: waiters.remove(fut) except ValueError: # fut may no longer be in list pass raise e finally: if not waiters: try: del self._waiters[key] except KeyError: # the key was evicted before. pass if traces: for trace in traces: await trace.send_connection_queued_end() proto = self._get(key) if proto is None: placeholder = cast(ResponseHandler, _TransportPlaceholder()) self._acquired.add(placeholder) self._acquired_per_host[key].add(placeholder) if traces: for trace in traces: await trace.send_connection_create_start() try: proto = await self._create_connection(req, traces, timeout) if self._closed: proto.close() raise ClientConnectionError("Connector is closed.") except BaseException: if not self._closed: self._acquired.remove(placeholder) self._drop_acquired_per_host(key, placeholder) self._release_waiter() raise else: if not self._closed: self._acquired.remove(placeholder) self._drop_acquired_per_host(key, placeholder) if traces: for trace in traces: await trace.send_connection_create_end() else: if traces: for trace in traces: await trace.send_connection_reuseconn() self._acquired.add(proto) self._acquired_per_host[key].add(proto) return Connection(self, key, proto, self._loop)
[ "async", "def", "connect", "(", "self", ",", "req", ":", "'ClientRequest'", ",", "traces", ":", "List", "[", "'Trace'", "]", ",", "timeout", ":", "'ClientTimeout'", ")", "->", "Connection", ":", "key", "=", "req", ".", "connection_key", "available", "=", "self", ".", "_available_connections", "(", "key", ")", "# Wait if there are no available connections.", "if", "available", "<=", "0", ":", "fut", "=", "self", ".", "_loop", ".", "create_future", "(", ")", "# This connection will now count towards the limit.", "waiters", "=", "self", ".", "_waiters", "[", "key", "]", "waiters", ".", "append", "(", "fut", ")", "if", "traces", ":", "for", "trace", "in", "traces", ":", "await", "trace", ".", "send_connection_queued_start", "(", ")", "try", ":", "await", "fut", "except", "BaseException", "as", "e", ":", "# remove a waiter even if it was cancelled, normally it's", "# removed when it's notified", "try", ":", "waiters", ".", "remove", "(", "fut", ")", "except", "ValueError", ":", "# fut may no longer be in list", "pass", "raise", "e", "finally", ":", "if", "not", "waiters", ":", "try", ":", "del", "self", ".", "_waiters", "[", "key", "]", "except", "KeyError", ":", "# the key was evicted before.", "pass", "if", "traces", ":", "for", "trace", "in", "traces", ":", "await", "trace", ".", "send_connection_queued_end", "(", ")", "proto", "=", "self", ".", "_get", "(", "key", ")", "if", "proto", "is", "None", ":", "placeholder", "=", "cast", "(", "ResponseHandler", ",", "_TransportPlaceholder", "(", ")", ")", "self", ".", "_acquired", ".", "add", "(", "placeholder", ")", "self", ".", "_acquired_per_host", "[", "key", "]", ".", "add", "(", "placeholder", ")", "if", "traces", ":", "for", "trace", "in", "traces", ":", "await", "trace", ".", "send_connection_create_start", "(", ")", "try", ":", "proto", "=", "await", "self", ".", "_create_connection", "(", "req", ",", "traces", ",", "timeout", ")", "if", "self", ".", "_closed", ":", "proto", ".", "close", "(", ")", "raise", "ClientConnectionError", "(", "\"Connector is closed.\"", ")", "except", "BaseException", ":", "if", "not", "self", ".", "_closed", ":", "self", ".", "_acquired", ".", "remove", "(", "placeholder", ")", "self", ".", "_drop_acquired_per_host", "(", "key", ",", "placeholder", ")", "self", ".", "_release_waiter", "(", ")", "raise", "else", ":", "if", "not", "self", ".", "_closed", ":", "self", ".", "_acquired", ".", "remove", "(", "placeholder", ")", "self", ".", "_drop_acquired_per_host", "(", "key", ",", "placeholder", ")", "if", "traces", ":", "for", "trace", "in", "traces", ":", "await", "trace", ".", "send_connection_create_end", "(", ")", "else", ":", "if", "traces", ":", "for", "trace", "in", "traces", ":", "await", "trace", ".", "send_connection_reuseconn", "(", ")", "self", ".", "_acquired", ".", "add", "(", "proto", ")", "self", ".", "_acquired_per_host", "[", "key", "]", ".", "add", "(", "proto", ")", "return", "Connection", "(", "self", ",", "key", ",", "proto", ",", "self", ".", "_loop", ")" ]
Get from pool or create new connection.
[ "Get", "from", "pool", "or", "create", "new", "connection", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L469-L547
train
aio-libs/aiohttp
aiohttp/connector.py
BaseConnector._release_waiter
def _release_waiter(self) -> None: """ Iterates over all waiters till found one that is not finsihed and belongs to a host that has available connections. """ if not self._waiters: return # Having the dict keys ordered this avoids to iterate # at the same order at each call. queues = list(self._waiters.keys()) random.shuffle(queues) for key in queues: if self._available_connections(key) < 1: continue waiters = self._waiters[key] while waiters: waiter = waiters.popleft() if not waiter.done(): waiter.set_result(None) return
python
def _release_waiter(self) -> None: """ Iterates over all waiters till found one that is not finsihed and belongs to a host that has available connections. """ if not self._waiters: return # Having the dict keys ordered this avoids to iterate # at the same order at each call. queues = list(self._waiters.keys()) random.shuffle(queues) for key in queues: if self._available_connections(key) < 1: continue waiters = self._waiters[key] while waiters: waiter = waiters.popleft() if not waiter.done(): waiter.set_result(None) return
[ "def", "_release_waiter", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "_waiters", ":", "return", "# Having the dict keys ordered this avoids to iterate", "# at the same order at each call.", "queues", "=", "list", "(", "self", ".", "_waiters", ".", "keys", "(", ")", ")", "random", ".", "shuffle", "(", "queues", ")", "for", "key", "in", "queues", ":", "if", "self", ".", "_available_connections", "(", "key", ")", "<", "1", ":", "continue", "waiters", "=", "self", ".", "_waiters", "[", "key", "]", "while", "waiters", ":", "waiter", "=", "waiters", ".", "popleft", "(", ")", "if", "not", "waiter", ".", "done", "(", ")", ":", "waiter", ".", "set_result", "(", "None", ")", "return" ]
Iterates over all waiters till found one that is not finsihed and belongs to a host that has available connections.
[ "Iterates", "over", "all", "waiters", "till", "found", "one", "that", "is", "not", "finsihed", "and", "belongs", "to", "a", "host", "that", "has", "available", "connections", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L575-L597
train
aio-libs/aiohttp
aiohttp/connector.py
TCPConnector.close
def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" for ev in self._throttle_dns_events.values(): ev.cancel() return super().close()
python
def close(self) -> Awaitable[None]: """Close all ongoing DNS calls.""" for ev in self._throttle_dns_events.values(): ev.cancel() return super().close()
[ "def", "close", "(", "self", ")", "->", "Awaitable", "[", "None", "]", ":", "for", "ev", "in", "self", ".", "_throttle_dns_events", ".", "values", "(", ")", ":", "ev", ".", "cancel", "(", ")", "return", "super", "(", ")", ".", "close", "(", ")" ]
Close all ongoing DNS calls.
[ "Close", "all", "ongoing", "DNS", "calls", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L744-L749
train
aio-libs/aiohttp
aiohttp/connector.py
TCPConnector.clear_dns_cache
def clear_dns_cache(self, host: Optional[str]=None, port: Optional[int]=None) -> None: """Remove specified host/port or clear all dns local cache.""" if host is not None and port is not None: self._cached_hosts.remove((host, port)) elif host is not None or port is not None: raise ValueError("either both host and port " "or none of them are allowed") else: self._cached_hosts.clear()
python
def clear_dns_cache(self, host: Optional[str]=None, port: Optional[int]=None) -> None: """Remove specified host/port or clear all dns local cache.""" if host is not None and port is not None: self._cached_hosts.remove((host, port)) elif host is not None or port is not None: raise ValueError("either both host and port " "or none of them are allowed") else: self._cached_hosts.clear()
[ "def", "clear_dns_cache", "(", "self", ",", "host", ":", "Optional", "[", "str", "]", "=", "None", ",", "port", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "None", ":", "if", "host", "is", "not", "None", "and", "port", "is", "not", "None", ":", "self", ".", "_cached_hosts", ".", "remove", "(", "(", "host", ",", "port", ")", ")", "elif", "host", "is", "not", "None", "or", "port", "is", "not", "None", ":", "raise", "ValueError", "(", "\"either both host and port \"", "\"or none of them are allowed\"", ")", "else", ":", "self", ".", "_cached_hosts", ".", "clear", "(", ")" ]
Remove specified host/port or clear all dns local cache.
[ "Remove", "specified", "host", "/", "port", "or", "clear", "all", "dns", "local", "cache", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L761-L771
train
aio-libs/aiohttp
aiohttp/connector.py
TCPConnector._create_connection
async def _create_connection(self, req: 'ClientRequest', traces: List['Trace'], timeout: 'ClientTimeout') -> ResponseHandler: """Create connection. Has same keyword arguments as BaseEventLoop.create_connection. """ if req.proxy: _, proto = await self._create_proxy_connection( req, traces, timeout) else: _, proto = await self._create_direct_connection( req, traces, timeout) return proto
python
async def _create_connection(self, req: 'ClientRequest', traces: List['Trace'], timeout: 'ClientTimeout') -> ResponseHandler: """Create connection. Has same keyword arguments as BaseEventLoop.create_connection. """ if req.proxy: _, proto = await self._create_proxy_connection( req, traces, timeout) else: _, proto = await self._create_direct_connection( req, traces, timeout) return proto
[ "async", "def", "_create_connection", "(", "self", ",", "req", ":", "'ClientRequest'", ",", "traces", ":", "List", "[", "'Trace'", "]", ",", "timeout", ":", "'ClientTimeout'", ")", "->", "ResponseHandler", ":", "if", "req", ".", "proxy", ":", "_", ",", "proto", "=", "await", "self", ".", "_create_proxy_connection", "(", "req", ",", "traces", ",", "timeout", ")", "else", ":", "_", ",", "proto", "=", "await", "self", ".", "_create_direct_connection", "(", "req", ",", "traces", ",", "timeout", ")", "return", "proto" ]
Create connection. Has same keyword arguments as BaseEventLoop.create_connection.
[ "Create", "connection", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L842-L856
train
aio-libs/aiohttp
aiohttp/connector.py
TCPConnector._get_ssl_context
def _get_ssl_context(self, req: 'ClientRequest') -> Optional[SSLContext]: """Logic to get the correct SSL context 0. if req.ssl is false, return None 1. if ssl_context is specified in req, use it 2. if _ssl_context is specified in self, use it 3. otherwise: 1. if verify_ssl is not specified in req, use self.ssl_context (will generate a default context according to self.verify_ssl) 2. if verify_ssl is True in req, generate a default SSL context 3. if verify_ssl is False in req, generate a SSL context that won't verify """ if req.is_ssl(): if ssl is None: # pragma: no cover raise RuntimeError('SSL is not supported.') sslcontext = req.ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not None: # not verified or fingerprinted return self._make_ssl_context(False) sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not None: # not verified or fingerprinted return self._make_ssl_context(False) return self._make_ssl_context(True) else: return None
python
def _get_ssl_context(self, req: 'ClientRequest') -> Optional[SSLContext]: """Logic to get the correct SSL context 0. if req.ssl is false, return None 1. if ssl_context is specified in req, use it 2. if _ssl_context is specified in self, use it 3. otherwise: 1. if verify_ssl is not specified in req, use self.ssl_context (will generate a default context according to self.verify_ssl) 2. if verify_ssl is True in req, generate a default SSL context 3. if verify_ssl is False in req, generate a SSL context that won't verify """ if req.is_ssl(): if ssl is None: # pragma: no cover raise RuntimeError('SSL is not supported.') sslcontext = req.ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not None: # not verified or fingerprinted return self._make_ssl_context(False) sslcontext = self._ssl if isinstance(sslcontext, ssl.SSLContext): return sslcontext if sslcontext is not None: # not verified or fingerprinted return self._make_ssl_context(False) return self._make_ssl_context(True) else: return None
[ "def", "_get_ssl_context", "(", "self", ",", "req", ":", "'ClientRequest'", ")", "->", "Optional", "[", "SSLContext", "]", ":", "if", "req", ".", "is_ssl", "(", ")", ":", "if", "ssl", "is", "None", ":", "# pragma: no cover", "raise", "RuntimeError", "(", "'SSL is not supported.'", ")", "sslcontext", "=", "req", ".", "ssl", "if", "isinstance", "(", "sslcontext", ",", "ssl", ".", "SSLContext", ")", ":", "return", "sslcontext", "if", "sslcontext", "is", "not", "None", ":", "# not verified or fingerprinted", "return", "self", ".", "_make_ssl_context", "(", "False", ")", "sslcontext", "=", "self", ".", "_ssl", "if", "isinstance", "(", "sslcontext", ",", "ssl", ".", "SSLContext", ")", ":", "return", "sslcontext", "if", "sslcontext", "is", "not", "None", ":", "# not verified or fingerprinted", "return", "self", ".", "_make_ssl_context", "(", "False", ")", "return", "self", ".", "_make_ssl_context", "(", "True", ")", "else", ":", "return", "None" ]
Logic to get the correct SSL context 0. if req.ssl is false, return None 1. if ssl_context is specified in req, use it 2. if _ssl_context is specified in self, use it 3. otherwise: 1. if verify_ssl is not specified in req, use self.ssl_context (will generate a default context according to self.verify_ssl) 2. if verify_ssl is True in req, generate a default SSL context 3. if verify_ssl is False in req, generate a SSL context that won't verify
[ "Logic", "to", "get", "the", "correct", "SSL", "context" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/connector.py#L871-L902
train
aio-libs/aiohttp
aiohttp/http_websocket.py
_websocket_mask_python
def _websocket_mask_python(mask: bytes, data: bytearray) -> None: """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytearray` object of any length. The contents of `data` are masked with `mask`, as specified in section 5.3 of RFC 6455. Note that this function mutates the `data` argument. This pure-python implementation may be replaced by an optimized version when available. """ assert isinstance(data, bytearray), data assert len(mask) == 4, mask if data: a, b, c, d = (_XOR_TABLE[n] for n in mask) data[::4] = data[::4].translate(a) data[1::4] = data[1::4].translate(b) data[2::4] = data[2::4].translate(c) data[3::4] = data[3::4].translate(d)
python
def _websocket_mask_python(mask: bytes, data: bytearray) -> None: """Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytearray` object of any length. The contents of `data` are masked with `mask`, as specified in section 5.3 of RFC 6455. Note that this function mutates the `data` argument. This pure-python implementation may be replaced by an optimized version when available. """ assert isinstance(data, bytearray), data assert len(mask) == 4, mask if data: a, b, c, d = (_XOR_TABLE[n] for n in mask) data[::4] = data[::4].translate(a) data[1::4] = data[1::4].translate(b) data[2::4] = data[2::4].translate(c) data[3::4] = data[3::4].translate(d)
[ "def", "_websocket_mask_python", "(", "mask", ":", "bytes", ",", "data", ":", "bytearray", ")", "->", "None", ":", "assert", "isinstance", "(", "data", ",", "bytearray", ")", ",", "data", "assert", "len", "(", "mask", ")", "==", "4", ",", "mask", "if", "data", ":", "a", ",", "b", ",", "c", ",", "d", "=", "(", "_XOR_TABLE", "[", "n", "]", "for", "n", "in", "mask", ")", "data", "[", ":", ":", "4", "]", "=", "data", "[", ":", ":", "4", "]", ".", "translate", "(", "a", ")", "data", "[", "1", ":", ":", "4", "]", "=", "data", "[", "1", ":", ":", "4", "]", ".", "translate", "(", "b", ")", "data", "[", "2", ":", ":", "4", "]", "=", "data", "[", "2", ":", ":", "4", "]", ".", "translate", "(", "c", ")", "data", "[", "3", ":", ":", "4", "]", "=", "data", "[", "3", ":", ":", "4", "]", ".", "translate", "(", "d", ")" ]
Websocket masking function. `mask` is a `bytes` object of length 4; `data` is a `bytearray` object of any length. The contents of `data` are masked with `mask`, as specified in section 5.3 of RFC 6455. Note that this function mutates the `data` argument. This pure-python implementation may be replaced by an optimized version when available.
[ "Websocket", "masking", "function", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L117-L138
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WSMessage.json
def json(self, *, # type: ignore loads: Callable[[Any], Any]=json.loads) -> None: """Return parsed JSON data. .. versionadded:: 0.22 """ return loads(self.data)
python
def json(self, *, # type: ignore loads: Callable[[Any], Any]=json.loads) -> None: """Return parsed JSON data. .. versionadded:: 0.22 """ return loads(self.data)
[ "def", "json", "(", "self", ",", "*", ",", "# type: ignore", "loads", ":", "Callable", "[", "[", "Any", "]", ",", "Any", "]", "=", "json", ".", "loads", ")", "->", "None", ":", "return", "loads", "(", "self", ".", "data", ")" ]
Return parsed JSON data. .. versionadded:: 0.22
[ "Return", "parsed", "JSON", "data", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L85-L91
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WebSocketReader.parse_frame
def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: """Return the next frame from the socket.""" frames = [] if self._tail: buf, self._tail = self._tail + buf, b'' start_pos = 0 buf_length = len(buf) while True: # read header if self._state == WSParserState.READ_HEADER: if buf_length - start_pos >= 2: data = buf[start_pos:start_pos+2] start_pos += 2 first_byte, second_byte = data fin = (first_byte >> 7) & 1 rsv1 = (first_byte >> 6) & 1 rsv2 = (first_byte >> 5) & 1 rsv3 = (first_byte >> 4) & 1 opcode = first_byte & 0xf # frame-fin = %x0 ; more frames of this message follow # / %x1 ; final frame of this message # frame-rsv1 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # frame-rsv2 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # frame-rsv3 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # # Remove rsv1 from this test for deflate development if rsv2 or rsv3 or (rsv1 and not self._compress): raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received frame with non-zero reserved bits') if opcode > 0x7 and fin == 0: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received fragmented control frame') has_mask = (second_byte >> 7) & 1 length = second_byte & 0x7f # Control frames MUST have a payload # length of 125 bytes or less if opcode > 0x7 and length > 125: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Control frame payload cannot be ' 'larger than 125 bytes') # Set compress status if last package is FIN # OR set compress status if this is first fragment # Raise error if not first fragment with rsv1 = 0x1 if self._frame_fin or self._compressed is None: self._compressed = True if rsv1 else False elif rsv1: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received frame with non-zero reserved bits') self._frame_fin = bool(fin) self._frame_opcode = opcode self._has_mask = bool(has_mask) self._payload_length_flag = length self._state = WSParserState.READ_PAYLOAD_LENGTH else: break # read payload length if self._state == WSParserState.READ_PAYLOAD_LENGTH: length = self._payload_length_flag if length == 126: if buf_length - start_pos >= 2: data = buf[start_pos:start_pos+2] start_pos += 2 length = UNPACK_LEN2(data)[0] self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) else: break elif length > 126: if buf_length - start_pos >= 8: data = buf[start_pos:start_pos+8] start_pos += 8 length = UNPACK_LEN3(data)[0] self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) else: break else: self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) # read payload mask if self._state == WSParserState.READ_PAYLOAD_MASK: if buf_length - start_pos >= 4: self._frame_mask = buf[start_pos:start_pos+4] start_pos += 4 self._state = WSParserState.READ_PAYLOAD else: break if self._state == WSParserState.READ_PAYLOAD: length = self._payload_length payload = self._frame_payload chunk_len = buf_length - start_pos if length >= chunk_len: self._payload_length = length - chunk_len payload.extend(buf[start_pos:]) start_pos = buf_length else: self._payload_length = 0 payload.extend(buf[start_pos:start_pos+length]) start_pos = start_pos + length if self._payload_length == 0: if self._has_mask: assert self._frame_mask is not None _websocket_mask(self._frame_mask, payload) frames.append(( self._frame_fin, self._frame_opcode, payload, self._compressed)) self._frame_payload = bytearray() self._state = WSParserState.READ_HEADER else: break self._tail = buf[start_pos:] return frames
python
def parse_frame(self, buf: bytes) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]: """Return the next frame from the socket.""" frames = [] if self._tail: buf, self._tail = self._tail + buf, b'' start_pos = 0 buf_length = len(buf) while True: # read header if self._state == WSParserState.READ_HEADER: if buf_length - start_pos >= 2: data = buf[start_pos:start_pos+2] start_pos += 2 first_byte, second_byte = data fin = (first_byte >> 7) & 1 rsv1 = (first_byte >> 6) & 1 rsv2 = (first_byte >> 5) & 1 rsv3 = (first_byte >> 4) & 1 opcode = first_byte & 0xf # frame-fin = %x0 ; more frames of this message follow # / %x1 ; final frame of this message # frame-rsv1 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # frame-rsv2 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # frame-rsv3 = %x0 ; # 1 bit, MUST be 0 unless negotiated otherwise # # Remove rsv1 from this test for deflate development if rsv2 or rsv3 or (rsv1 and not self._compress): raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received frame with non-zero reserved bits') if opcode > 0x7 and fin == 0: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received fragmented control frame') has_mask = (second_byte >> 7) & 1 length = second_byte & 0x7f # Control frames MUST have a payload # length of 125 bytes or less if opcode > 0x7 and length > 125: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Control frame payload cannot be ' 'larger than 125 bytes') # Set compress status if last package is FIN # OR set compress status if this is first fragment # Raise error if not first fragment with rsv1 = 0x1 if self._frame_fin or self._compressed is None: self._compressed = True if rsv1 else False elif rsv1: raise WebSocketError( WSCloseCode.PROTOCOL_ERROR, 'Received frame with non-zero reserved bits') self._frame_fin = bool(fin) self._frame_opcode = opcode self._has_mask = bool(has_mask) self._payload_length_flag = length self._state = WSParserState.READ_PAYLOAD_LENGTH else: break # read payload length if self._state == WSParserState.READ_PAYLOAD_LENGTH: length = self._payload_length_flag if length == 126: if buf_length - start_pos >= 2: data = buf[start_pos:start_pos+2] start_pos += 2 length = UNPACK_LEN2(data)[0] self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) else: break elif length > 126: if buf_length - start_pos >= 8: data = buf[start_pos:start_pos+8] start_pos += 8 length = UNPACK_LEN3(data)[0] self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) else: break else: self._payload_length = length self._state = ( WSParserState.READ_PAYLOAD_MASK if self._has_mask else WSParserState.READ_PAYLOAD) # read payload mask if self._state == WSParserState.READ_PAYLOAD_MASK: if buf_length - start_pos >= 4: self._frame_mask = buf[start_pos:start_pos+4] start_pos += 4 self._state = WSParserState.READ_PAYLOAD else: break if self._state == WSParserState.READ_PAYLOAD: length = self._payload_length payload = self._frame_payload chunk_len = buf_length - start_pos if length >= chunk_len: self._payload_length = length - chunk_len payload.extend(buf[start_pos:]) start_pos = buf_length else: self._payload_length = 0 payload.extend(buf[start_pos:start_pos+length]) start_pos = start_pos + length if self._payload_length == 0: if self._has_mask: assert self._frame_mask is not None _websocket_mask(self._frame_mask, payload) frames.append(( self._frame_fin, self._frame_opcode, payload, self._compressed)) self._frame_payload = bytearray() self._state = WSParserState.READ_HEADER else: break self._tail = buf[start_pos:] return frames
[ "def", "parse_frame", "(", "self", ",", "buf", ":", "bytes", ")", "->", "List", "[", "Tuple", "[", "bool", ",", "Optional", "[", "int", "]", ",", "bytearray", ",", "Optional", "[", "bool", "]", "]", "]", ":", "frames", "=", "[", "]", "if", "self", ".", "_tail", ":", "buf", ",", "self", ".", "_tail", "=", "self", ".", "_tail", "+", "buf", ",", "b''", "start_pos", "=", "0", "buf_length", "=", "len", "(", "buf", ")", "while", "True", ":", "# read header", "if", "self", ".", "_state", "==", "WSParserState", ".", "READ_HEADER", ":", "if", "buf_length", "-", "start_pos", ">=", "2", ":", "data", "=", "buf", "[", "start_pos", ":", "start_pos", "+", "2", "]", "start_pos", "+=", "2", "first_byte", ",", "second_byte", "=", "data", "fin", "=", "(", "first_byte", ">>", "7", ")", "&", "1", "rsv1", "=", "(", "first_byte", ">>", "6", ")", "&", "1", "rsv2", "=", "(", "first_byte", ">>", "5", ")", "&", "1", "rsv3", "=", "(", "first_byte", ">>", "4", ")", "&", "1", "opcode", "=", "first_byte", "&", "0xf", "# frame-fin = %x0 ; more frames of this message follow", "# / %x1 ; final frame of this message", "# frame-rsv1 = %x0 ;", "# 1 bit, MUST be 0 unless negotiated otherwise", "# frame-rsv2 = %x0 ;", "# 1 bit, MUST be 0 unless negotiated otherwise", "# frame-rsv3 = %x0 ;", "# 1 bit, MUST be 0 unless negotiated otherwise", "#", "# Remove rsv1 from this test for deflate development", "if", "rsv2", "or", "rsv3", "or", "(", "rsv1", "and", "not", "self", ".", "_compress", ")", ":", "raise", "WebSocketError", "(", "WSCloseCode", ".", "PROTOCOL_ERROR", ",", "'Received frame with non-zero reserved bits'", ")", "if", "opcode", ">", "0x7", "and", "fin", "==", "0", ":", "raise", "WebSocketError", "(", "WSCloseCode", ".", "PROTOCOL_ERROR", ",", "'Received fragmented control frame'", ")", "has_mask", "=", "(", "second_byte", ">>", "7", ")", "&", "1", "length", "=", "second_byte", "&", "0x7f", "# Control frames MUST have a payload", "# length of 125 bytes or less", "if", "opcode", ">", "0x7", "and", "length", ">", "125", ":", "raise", "WebSocketError", "(", "WSCloseCode", ".", "PROTOCOL_ERROR", ",", "'Control frame payload cannot be '", "'larger than 125 bytes'", ")", "# Set compress status if last package is FIN", "# OR set compress status if this is first fragment", "# Raise error if not first fragment with rsv1 = 0x1", "if", "self", ".", "_frame_fin", "or", "self", ".", "_compressed", "is", "None", ":", "self", ".", "_compressed", "=", "True", "if", "rsv1", "else", "False", "elif", "rsv1", ":", "raise", "WebSocketError", "(", "WSCloseCode", ".", "PROTOCOL_ERROR", ",", "'Received frame with non-zero reserved bits'", ")", "self", ".", "_frame_fin", "=", "bool", "(", "fin", ")", "self", ".", "_frame_opcode", "=", "opcode", "self", ".", "_has_mask", "=", "bool", "(", "has_mask", ")", "self", ".", "_payload_length_flag", "=", "length", "self", ".", "_state", "=", "WSParserState", ".", "READ_PAYLOAD_LENGTH", "else", ":", "break", "# read payload length", "if", "self", ".", "_state", "==", "WSParserState", ".", "READ_PAYLOAD_LENGTH", ":", "length", "=", "self", ".", "_payload_length_flag", "if", "length", "==", "126", ":", "if", "buf_length", "-", "start_pos", ">=", "2", ":", "data", "=", "buf", "[", "start_pos", ":", "start_pos", "+", "2", "]", "start_pos", "+=", "2", "length", "=", "UNPACK_LEN2", "(", "data", ")", "[", "0", "]", "self", ".", "_payload_length", "=", "length", "self", ".", "_state", "=", "(", "WSParserState", ".", "READ_PAYLOAD_MASK", "if", "self", ".", "_has_mask", "else", "WSParserState", ".", "READ_PAYLOAD", ")", "else", ":", "break", "elif", "length", ">", "126", ":", "if", "buf_length", "-", "start_pos", ">=", "8", ":", "data", "=", "buf", "[", "start_pos", ":", "start_pos", "+", "8", "]", "start_pos", "+=", "8", "length", "=", "UNPACK_LEN3", "(", "data", ")", "[", "0", "]", "self", ".", "_payload_length", "=", "length", "self", ".", "_state", "=", "(", "WSParserState", ".", "READ_PAYLOAD_MASK", "if", "self", ".", "_has_mask", "else", "WSParserState", ".", "READ_PAYLOAD", ")", "else", ":", "break", "else", ":", "self", ".", "_payload_length", "=", "length", "self", ".", "_state", "=", "(", "WSParserState", ".", "READ_PAYLOAD_MASK", "if", "self", ".", "_has_mask", "else", "WSParserState", ".", "READ_PAYLOAD", ")", "# read payload mask", "if", "self", ".", "_state", "==", "WSParserState", ".", "READ_PAYLOAD_MASK", ":", "if", "buf_length", "-", "start_pos", ">=", "4", ":", "self", ".", "_frame_mask", "=", "buf", "[", "start_pos", ":", "start_pos", "+", "4", "]", "start_pos", "+=", "4", "self", ".", "_state", "=", "WSParserState", ".", "READ_PAYLOAD", "else", ":", "break", "if", "self", ".", "_state", "==", "WSParserState", ".", "READ_PAYLOAD", ":", "length", "=", "self", ".", "_payload_length", "payload", "=", "self", ".", "_frame_payload", "chunk_len", "=", "buf_length", "-", "start_pos", "if", "length", ">=", "chunk_len", ":", "self", ".", "_payload_length", "=", "length", "-", "chunk_len", "payload", ".", "extend", "(", "buf", "[", "start_pos", ":", "]", ")", "start_pos", "=", "buf_length", "else", ":", "self", ".", "_payload_length", "=", "0", "payload", ".", "extend", "(", "buf", "[", "start_pos", ":", "start_pos", "+", "length", "]", ")", "start_pos", "=", "start_pos", "+", "length", "if", "self", ".", "_payload_length", "==", "0", ":", "if", "self", ".", "_has_mask", ":", "assert", "self", ".", "_frame_mask", "is", "not", "None", "_websocket_mask", "(", "self", ".", "_frame_mask", ",", "payload", ")", "frames", ".", "append", "(", "(", "self", ".", "_frame_fin", ",", "self", ".", "_frame_opcode", ",", "payload", ",", "self", ".", "_compressed", ")", ")", "self", ".", "_frame_payload", "=", "bytearray", "(", ")", "self", ".", "_state", "=", "WSParserState", ".", "READ_HEADER", "else", ":", "break", "self", ".", "_tail", "=", "buf", "[", "start_pos", ":", "]", "return", "frames" ]
Return the next frame from the socket.
[ "Return", "the", "next", "frame", "from", "the", "socket", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L392-L541
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WebSocketWriter._send_frame
async def _send_frame(self, message: bytes, opcode: int, compress: Optional[int]=None) -> None: """Send a frame over the websocket with message as its payload.""" if self._closing: ws_logger.warning('websocket connection is closing.') rsv = 0 # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: if compress: # Do not set self._compress if compressing is for this frame compressobj = zlib.compressobj(wbits=-compress) else: # self.compress if not self._compressobj: self._compressobj = zlib.compressobj(wbits=-self.compress) compressobj = self._compressobj message = compressobj.compress(message) message = message + compressobj.flush( zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask if use_mask: mask_bit = 0x80 else: mask_bit = 0 if msg_length < 126: header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) elif msg_length < (1 << 16): header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: mask = self.randrange(0, 0xffffffff) mask = mask.to_bytes(4, 'big') message = bytearray(message) _websocket_mask(mask, message) self.transport.write(header + mask + message) self._output_size += len(header) + len(mask) + len(message) else: if len(message) > MSG_SIZE: self.transport.write(header) self.transport.write(message) else: self.transport.write(header + message) self._output_size += len(header) + len(message) if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper()
python
async def _send_frame(self, message: bytes, opcode: int, compress: Optional[int]=None) -> None: """Send a frame over the websocket with message as its payload.""" if self._closing: ws_logger.warning('websocket connection is closing.') rsv = 0 # Only compress larger packets (disabled) # Does small packet needs to be compressed? # if self.compress and opcode < 8 and len(message) > 124: if (compress or self.compress) and opcode < 8: if compress: # Do not set self._compress if compressing is for this frame compressobj = zlib.compressobj(wbits=-compress) else: # self.compress if not self._compressobj: self._compressobj = zlib.compressobj(wbits=-self.compress) compressobj = self._compressobj message = compressobj.compress(message) message = message + compressobj.flush( zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH) if message.endswith(_WS_DEFLATE_TRAILING): message = message[:-4] rsv = rsv | 0x40 msg_length = len(message) use_mask = self.use_mask if use_mask: mask_bit = 0x80 else: mask_bit = 0 if msg_length < 126: header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit) elif msg_length < (1 << 16): header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length) else: header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length) if use_mask: mask = self.randrange(0, 0xffffffff) mask = mask.to_bytes(4, 'big') message = bytearray(message) _websocket_mask(mask, message) self.transport.write(header + mask + message) self._output_size += len(header) + len(mask) + len(message) else: if len(message) > MSG_SIZE: self.transport.write(header) self.transport.write(message) else: self.transport.write(header + message) self._output_size += len(header) + len(message) if self._output_size > self._limit: self._output_size = 0 await self.protocol._drain_helper()
[ "async", "def", "_send_frame", "(", "self", ",", "message", ":", "bytes", ",", "opcode", ":", "int", ",", "compress", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "None", ":", "if", "self", ".", "_closing", ":", "ws_logger", ".", "warning", "(", "'websocket connection is closing.'", ")", "rsv", "=", "0", "# Only compress larger packets (disabled)", "# Does small packet needs to be compressed?", "# if self.compress and opcode < 8 and len(message) > 124:", "if", "(", "compress", "or", "self", ".", "compress", ")", "and", "opcode", "<", "8", ":", "if", "compress", ":", "# Do not set self._compress if compressing is for this frame", "compressobj", "=", "zlib", ".", "compressobj", "(", "wbits", "=", "-", "compress", ")", "else", ":", "# self.compress", "if", "not", "self", ".", "_compressobj", ":", "self", ".", "_compressobj", "=", "zlib", ".", "compressobj", "(", "wbits", "=", "-", "self", ".", "compress", ")", "compressobj", "=", "self", ".", "_compressobj", "message", "=", "compressobj", ".", "compress", "(", "message", ")", "message", "=", "message", "+", "compressobj", ".", "flush", "(", "zlib", ".", "Z_FULL_FLUSH", "if", "self", ".", "notakeover", "else", "zlib", ".", "Z_SYNC_FLUSH", ")", "if", "message", ".", "endswith", "(", "_WS_DEFLATE_TRAILING", ")", ":", "message", "=", "message", "[", ":", "-", "4", "]", "rsv", "=", "rsv", "|", "0x40", "msg_length", "=", "len", "(", "message", ")", "use_mask", "=", "self", ".", "use_mask", "if", "use_mask", ":", "mask_bit", "=", "0x80", "else", ":", "mask_bit", "=", "0", "if", "msg_length", "<", "126", ":", "header", "=", "PACK_LEN1", "(", "0x80", "|", "rsv", "|", "opcode", ",", "msg_length", "|", "mask_bit", ")", "elif", "msg_length", "<", "(", "1", "<<", "16", ")", ":", "header", "=", "PACK_LEN2", "(", "0x80", "|", "rsv", "|", "opcode", ",", "126", "|", "mask_bit", ",", "msg_length", ")", "else", ":", "header", "=", "PACK_LEN3", "(", "0x80", "|", "rsv", "|", "opcode", ",", "127", "|", "mask_bit", ",", "msg_length", ")", "if", "use_mask", ":", "mask", "=", "self", ".", "randrange", "(", "0", ",", "0xffffffff", ")", "mask", "=", "mask", ".", "to_bytes", "(", "4", ",", "'big'", ")", "message", "=", "bytearray", "(", "message", ")", "_websocket_mask", "(", "mask", ",", "message", ")", "self", ".", "transport", ".", "write", "(", "header", "+", "mask", "+", "message", ")", "self", ".", "_output_size", "+=", "len", "(", "header", ")", "+", "len", "(", "mask", ")", "+", "len", "(", "message", ")", "else", ":", "if", "len", "(", "message", ")", ">", "MSG_SIZE", ":", "self", ".", "transport", ".", "write", "(", "header", ")", "self", ".", "transport", ".", "write", "(", "message", ")", "else", ":", "self", ".", "transport", ".", "write", "(", "header", "+", "message", ")", "self", ".", "_output_size", "+=", "len", "(", "header", ")", "+", "len", "(", "message", ")", "if", "self", ".", "_output_size", ">", "self", ".", "_limit", ":", "self", ".", "_output_size", "=", "0", "await", "self", ".", "protocol", ".", "_drain_helper", "(", ")" ]
Send a frame over the websocket with message as its payload.
[ "Send", "a", "frame", "over", "the", "websocket", "with", "message", "as", "its", "payload", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L561-L620
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WebSocketWriter.pong
async def pong(self, message: bytes=b'') -> None: """Send pong message.""" if isinstance(message, str): message = message.encode('utf-8') await self._send_frame(message, WSMsgType.PONG)
python
async def pong(self, message: bytes=b'') -> None: """Send pong message.""" if isinstance(message, str): message = message.encode('utf-8') await self._send_frame(message, WSMsgType.PONG)
[ "async", "def", "pong", "(", "self", ",", "message", ":", "bytes", "=", "b''", ")", "->", "None", ":", "if", "isinstance", "(", "message", ",", "str", ")", ":", "message", "=", "message", ".", "encode", "(", "'utf-8'", ")", "await", "self", ".", "_send_frame", "(", "message", ",", "WSMsgType", ".", "PONG", ")" ]
Send pong message.
[ "Send", "pong", "message", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L622-L626
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WebSocketWriter.ping
async def ping(self, message: bytes=b'') -> None: """Send ping message.""" if isinstance(message, str): message = message.encode('utf-8') await self._send_frame(message, WSMsgType.PING)
python
async def ping(self, message: bytes=b'') -> None: """Send ping message.""" if isinstance(message, str): message = message.encode('utf-8') await self._send_frame(message, WSMsgType.PING)
[ "async", "def", "ping", "(", "self", ",", "message", ":", "bytes", "=", "b''", ")", "->", "None", ":", "if", "isinstance", "(", "message", ",", "str", ")", ":", "message", "=", "message", ".", "encode", "(", "'utf-8'", ")", "await", "self", ".", "_send_frame", "(", "message", ",", "WSMsgType", ".", "PING", ")" ]
Send ping message.
[ "Send", "ping", "message", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L628-L632
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WebSocketWriter.send
async def send(self, message: Union[str, bytes], binary: bool=False, compress: Optional[int]=None) -> None: """Send a frame over the websocket with message as its payload.""" if isinstance(message, str): message = message.encode('utf-8') if binary: await self._send_frame(message, WSMsgType.BINARY, compress) else: await self._send_frame(message, WSMsgType.TEXT, compress)
python
async def send(self, message: Union[str, bytes], binary: bool=False, compress: Optional[int]=None) -> None: """Send a frame over the websocket with message as its payload.""" if isinstance(message, str): message = message.encode('utf-8') if binary: await self._send_frame(message, WSMsgType.BINARY, compress) else: await self._send_frame(message, WSMsgType.TEXT, compress)
[ "async", "def", "send", "(", "self", ",", "message", ":", "Union", "[", "str", ",", "bytes", "]", ",", "binary", ":", "bool", "=", "False", ",", "compress", ":", "Optional", "[", "int", "]", "=", "None", ")", "->", "None", ":", "if", "isinstance", "(", "message", ",", "str", ")", ":", "message", "=", "message", ".", "encode", "(", "'utf-8'", ")", "if", "binary", ":", "await", "self", ".", "_send_frame", "(", "message", ",", "WSMsgType", ".", "BINARY", ",", "compress", ")", "else", ":", "await", "self", ".", "_send_frame", "(", "message", ",", "WSMsgType", ".", "TEXT", ",", "compress", ")" ]
Send a frame over the websocket with message as its payload.
[ "Send", "a", "frame", "over", "the", "websocket", "with", "message", "as", "its", "payload", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L634-L643
train
aio-libs/aiohttp
aiohttp/http_websocket.py
WebSocketWriter.close
async def close(self, code: int=1000, message: bytes=b'') -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): message = message.encode('utf-8') try: await self._send_frame( PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE) finally: self._closing = True
python
async def close(self, code: int=1000, message: bytes=b'') -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): message = message.encode('utf-8') try: await self._send_frame( PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE) finally: self._closing = True
[ "async", "def", "close", "(", "self", ",", "code", ":", "int", "=", "1000", ",", "message", ":", "bytes", "=", "b''", ")", "->", "None", ":", "if", "isinstance", "(", "message", ",", "str", ")", ":", "message", "=", "message", ".", "encode", "(", "'utf-8'", ")", "try", ":", "await", "self", ".", "_send_frame", "(", "PACK_CLOSE_CODE", "(", "code", ")", "+", "message", ",", "opcode", "=", "WSMsgType", ".", "CLOSE", ")", "finally", ":", "self", ".", "_closing", "=", "True" ]
Close the websocket, sending the specified code and message.
[ "Close", "the", "websocket", "sending", "the", "specified", "code", "and", "message", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_websocket.py#L645-L653
train
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar.update_cookies
def update_cookies(self, cookies: LooseCookies, response_url: URL=URL()) -> None: """Update cookies.""" hostname = response_url.raw_host if not self._unsafe and is_ip_address(hostname): # Don't accept cookies from IPs return if isinstance(cookies, Mapping): cookies = cookies.items() # type: ignore for name, cookie in cookies: if not isinstance(cookie, Morsel): tmp = SimpleCookie() tmp[name] = cookie # type: ignore cookie = tmp[name] domain = cookie["domain"] # ignore domains with trailing dots if domain.endswith('.'): domain = "" del cookie["domain"] if not domain and hostname is not None: # Set the cookie's domain to the response hostname # and set its host-only-flag self._host_only_cookies.add((hostname, name)) domain = cookie["domain"] = hostname if domain.startswith("."): # Remove leading dot domain = domain[1:] cookie["domain"] = domain if hostname and not self._is_domain_match(domain, hostname): # Setting cookies for different domains is not allowed continue path = cookie["path"] if not path or not path.startswith("/"): # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): path = "/" else: # Cut everything from the last slash to the end path = "/" + path[1:path.rfind("/")] cookie["path"] = path max_age = cookie["max-age"] if max_age: try: delta_seconds = int(max_age) self._expire_cookie(self._loop.time() + delta_seconds, domain, name) except ValueError: cookie["max-age"] = "" else: expires = cookie["expires"] if expires: expire_time = self._parse_date(expires) if expire_time: self._expire_cookie(expire_time.timestamp(), domain, name) else: cookie["expires"] = "" self._cookies[domain][name] = cookie self._do_expiration()
python
def update_cookies(self, cookies: LooseCookies, response_url: URL=URL()) -> None: """Update cookies.""" hostname = response_url.raw_host if not self._unsafe and is_ip_address(hostname): # Don't accept cookies from IPs return if isinstance(cookies, Mapping): cookies = cookies.items() # type: ignore for name, cookie in cookies: if not isinstance(cookie, Morsel): tmp = SimpleCookie() tmp[name] = cookie # type: ignore cookie = tmp[name] domain = cookie["domain"] # ignore domains with trailing dots if domain.endswith('.'): domain = "" del cookie["domain"] if not domain and hostname is not None: # Set the cookie's domain to the response hostname # and set its host-only-flag self._host_only_cookies.add((hostname, name)) domain = cookie["domain"] = hostname if domain.startswith("."): # Remove leading dot domain = domain[1:] cookie["domain"] = domain if hostname and not self._is_domain_match(domain, hostname): # Setting cookies for different domains is not allowed continue path = cookie["path"] if not path or not path.startswith("/"): # Set the cookie's path to the response path path = response_url.path if not path.startswith("/"): path = "/" else: # Cut everything from the last slash to the end path = "/" + path[1:path.rfind("/")] cookie["path"] = path max_age = cookie["max-age"] if max_age: try: delta_seconds = int(max_age) self._expire_cookie(self._loop.time() + delta_seconds, domain, name) except ValueError: cookie["max-age"] = "" else: expires = cookie["expires"] if expires: expire_time = self._parse_date(expires) if expire_time: self._expire_cookie(expire_time.timestamp(), domain, name) else: cookie["expires"] = "" self._cookies[domain][name] = cookie self._do_expiration()
[ "def", "update_cookies", "(", "self", ",", "cookies", ":", "LooseCookies", ",", "response_url", ":", "URL", "=", "URL", "(", ")", ")", "->", "None", ":", "hostname", "=", "response_url", ".", "raw_host", "if", "not", "self", ".", "_unsafe", "and", "is_ip_address", "(", "hostname", ")", ":", "# Don't accept cookies from IPs", "return", "if", "isinstance", "(", "cookies", ",", "Mapping", ")", ":", "cookies", "=", "cookies", ".", "items", "(", ")", "# type: ignore", "for", "name", ",", "cookie", "in", "cookies", ":", "if", "not", "isinstance", "(", "cookie", ",", "Morsel", ")", ":", "tmp", "=", "SimpleCookie", "(", ")", "tmp", "[", "name", "]", "=", "cookie", "# type: ignore", "cookie", "=", "tmp", "[", "name", "]", "domain", "=", "cookie", "[", "\"domain\"", "]", "# ignore domains with trailing dots", "if", "domain", ".", "endswith", "(", "'.'", ")", ":", "domain", "=", "\"\"", "del", "cookie", "[", "\"domain\"", "]", "if", "not", "domain", "and", "hostname", "is", "not", "None", ":", "# Set the cookie's domain to the response hostname", "# and set its host-only-flag", "self", ".", "_host_only_cookies", ".", "add", "(", "(", "hostname", ",", "name", ")", ")", "domain", "=", "cookie", "[", "\"domain\"", "]", "=", "hostname", "if", "domain", ".", "startswith", "(", "\".\"", ")", ":", "# Remove leading dot", "domain", "=", "domain", "[", "1", ":", "]", "cookie", "[", "\"domain\"", "]", "=", "domain", "if", "hostname", "and", "not", "self", ".", "_is_domain_match", "(", "domain", ",", "hostname", ")", ":", "# Setting cookies for different domains is not allowed", "continue", "path", "=", "cookie", "[", "\"path\"", "]", "if", "not", "path", "or", "not", "path", ".", "startswith", "(", "\"/\"", ")", ":", "# Set the cookie's path to the response path", "path", "=", "response_url", ".", "path", "if", "not", "path", ".", "startswith", "(", "\"/\"", ")", ":", "path", "=", "\"/\"", "else", ":", "# Cut everything from the last slash to the end", "path", "=", "\"/\"", "+", "path", "[", "1", ":", "path", ".", "rfind", "(", "\"/\"", ")", "]", "cookie", "[", "\"path\"", "]", "=", "path", "max_age", "=", "cookie", "[", "\"max-age\"", "]", "if", "max_age", ":", "try", ":", "delta_seconds", "=", "int", "(", "max_age", ")", "self", ".", "_expire_cookie", "(", "self", ".", "_loop", ".", "time", "(", ")", "+", "delta_seconds", ",", "domain", ",", "name", ")", "except", "ValueError", ":", "cookie", "[", "\"max-age\"", "]", "=", "\"\"", "else", ":", "expires", "=", "cookie", "[", "\"expires\"", "]", "if", "expires", ":", "expire_time", "=", "self", ".", "_parse_date", "(", "expires", ")", "if", "expire_time", ":", "self", ".", "_expire_cookie", "(", "expire_time", ".", "timestamp", "(", ")", ",", "domain", ",", "name", ")", "else", ":", "cookie", "[", "\"expires\"", "]", "=", "\"\"", "self", ".", "_cookies", "[", "domain", "]", "[", "name", "]", "=", "cookie", "self", ".", "_do_expiration", "(", ")" ]
Update cookies.
[ "Update", "cookies", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L113-L186
train
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar.filter_cookies
def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]': """Returns this jar's cookies filtered by their attributes.""" self._do_expiration() request_url = URL(request_url) filtered = SimpleCookie() hostname = request_url.raw_host or "" is_not_secure = request_url.scheme not in ("https", "wss") for cookie in self: name = cookie.key domain = cookie["domain"] # Send shared cookies if not domain: filtered[name] = cookie.value continue if not self._unsafe and is_ip_address(hostname): continue if (domain, name) in self._host_only_cookies: if domain != hostname: continue elif not self._is_domain_match(domain, hostname): continue if not self._is_path_match(request_url.path, cookie["path"]): continue if is_not_secure and cookie["secure"]: continue # It's critical we use the Morsel so the coded_value # (based on cookie version) is preserved mrsl_val = cast('Morsel[str]', cookie.get(cookie.key, Morsel())) mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) filtered[name] = mrsl_val return filtered
python
def filter_cookies(self, request_url: URL=URL()) -> 'BaseCookie[str]': """Returns this jar's cookies filtered by their attributes.""" self._do_expiration() request_url = URL(request_url) filtered = SimpleCookie() hostname = request_url.raw_host or "" is_not_secure = request_url.scheme not in ("https", "wss") for cookie in self: name = cookie.key domain = cookie["domain"] # Send shared cookies if not domain: filtered[name] = cookie.value continue if not self._unsafe and is_ip_address(hostname): continue if (domain, name) in self._host_only_cookies: if domain != hostname: continue elif not self._is_domain_match(domain, hostname): continue if not self._is_path_match(request_url.path, cookie["path"]): continue if is_not_secure and cookie["secure"]: continue # It's critical we use the Morsel so the coded_value # (based on cookie version) is preserved mrsl_val = cast('Morsel[str]', cookie.get(cookie.key, Morsel())) mrsl_val.set(cookie.key, cookie.value, cookie.coded_value) filtered[name] = mrsl_val return filtered
[ "def", "filter_cookies", "(", "self", ",", "request_url", ":", "URL", "=", "URL", "(", ")", ")", "->", "'BaseCookie[str]'", ":", "self", ".", "_do_expiration", "(", ")", "request_url", "=", "URL", "(", "request_url", ")", "filtered", "=", "SimpleCookie", "(", ")", "hostname", "=", "request_url", ".", "raw_host", "or", "\"\"", "is_not_secure", "=", "request_url", ".", "scheme", "not", "in", "(", "\"https\"", ",", "\"wss\"", ")", "for", "cookie", "in", "self", ":", "name", "=", "cookie", ".", "key", "domain", "=", "cookie", "[", "\"domain\"", "]", "# Send shared cookies", "if", "not", "domain", ":", "filtered", "[", "name", "]", "=", "cookie", ".", "value", "continue", "if", "not", "self", ".", "_unsafe", "and", "is_ip_address", "(", "hostname", ")", ":", "continue", "if", "(", "domain", ",", "name", ")", "in", "self", ".", "_host_only_cookies", ":", "if", "domain", "!=", "hostname", ":", "continue", "elif", "not", "self", ".", "_is_domain_match", "(", "domain", ",", "hostname", ")", ":", "continue", "if", "not", "self", ".", "_is_path_match", "(", "request_url", ".", "path", ",", "cookie", "[", "\"path\"", "]", ")", ":", "continue", "if", "is_not_secure", "and", "cookie", "[", "\"secure\"", "]", ":", "continue", "# It's critical we use the Morsel so the coded_value", "# (based on cookie version) is preserved", "mrsl_val", "=", "cast", "(", "'Morsel[str]'", ",", "cookie", ".", "get", "(", "cookie", ".", "key", ",", "Morsel", "(", ")", ")", ")", "mrsl_val", ".", "set", "(", "cookie", ".", "key", ",", "cookie", ".", "value", ",", "cookie", ".", "coded_value", ")", "filtered", "[", "name", "]", "=", "mrsl_val", "return", "filtered" ]
Returns this jar's cookies filtered by their attributes.
[ "Returns", "this", "jar", "s", "cookies", "filtered", "by", "their", "attributes", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L188-L226
train
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar._is_domain_match
def _is_domain_match(domain: str, hostname: str) -> bool: """Implements domain matching adhering to RFC 6265.""" if hostname == domain: return True if not hostname.endswith(domain): return False non_matching = hostname[:-len(domain)] if not non_matching.endswith("."): return False return not is_ip_address(hostname)
python
def _is_domain_match(domain: str, hostname: str) -> bool: """Implements domain matching adhering to RFC 6265.""" if hostname == domain: return True if not hostname.endswith(domain): return False non_matching = hostname[:-len(domain)] if not non_matching.endswith("."): return False return not is_ip_address(hostname)
[ "def", "_is_domain_match", "(", "domain", ":", "str", ",", "hostname", ":", "str", ")", "->", "bool", ":", "if", "hostname", "==", "domain", ":", "return", "True", "if", "not", "hostname", ".", "endswith", "(", "domain", ")", ":", "return", "False", "non_matching", "=", "hostname", "[", ":", "-", "len", "(", "domain", ")", "]", "if", "not", "non_matching", ".", "endswith", "(", "\".\"", ")", ":", "return", "False", "return", "not", "is_ip_address", "(", "hostname", ")" ]
Implements domain matching adhering to RFC 6265.
[ "Implements", "domain", "matching", "adhering", "to", "RFC", "6265", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L229-L242
train
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar._is_path_match
def _is_path_match(req_path: str, cookie_path: str) -> bool: """Implements path matching adhering to RFC 6265.""" if not req_path.startswith("/"): req_path = "/" if req_path == cookie_path: return True if not req_path.startswith(cookie_path): return False if cookie_path.endswith("/"): return True non_matching = req_path[len(cookie_path):] return non_matching.startswith("/")
python
def _is_path_match(req_path: str, cookie_path: str) -> bool: """Implements path matching adhering to RFC 6265.""" if not req_path.startswith("/"): req_path = "/" if req_path == cookie_path: return True if not req_path.startswith(cookie_path): return False if cookie_path.endswith("/"): return True non_matching = req_path[len(cookie_path):] return non_matching.startswith("/")
[ "def", "_is_path_match", "(", "req_path", ":", "str", ",", "cookie_path", ":", "str", ")", "->", "bool", ":", "if", "not", "req_path", ".", "startswith", "(", "\"/\"", ")", ":", "req_path", "=", "\"/\"", "if", "req_path", "==", "cookie_path", ":", "return", "True", "if", "not", "req_path", ".", "startswith", "(", "cookie_path", ")", ":", "return", "False", "if", "cookie_path", ".", "endswith", "(", "\"/\"", ")", ":", "return", "True", "non_matching", "=", "req_path", "[", "len", "(", "cookie_path", ")", ":", "]", "return", "non_matching", ".", "startswith", "(", "\"/\"", ")" ]
Implements path matching adhering to RFC 6265.
[ "Implements", "path", "matching", "adhering", "to", "RFC", "6265", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L245-L261
train
aio-libs/aiohttp
aiohttp/cookiejar.py
CookieJar._parse_date
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: """Implements date string parsing adhering to RFC 6265.""" if not date_str: return None found_time = False found_day = False found_month = False found_year = False hour = minute = second = 0 day = 0 month = 0 year = 0 for token_match in cls.DATE_TOKENS_RE.finditer(date_str): token = token_match.group("token") if not found_time: time_match = cls.DATE_HMS_TIME_RE.match(token) if time_match: found_time = True hour, minute, second = [ int(s) for s in time_match.groups()] continue if not found_day: day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) if day_match: found_day = True day = int(day_match.group()) continue if not found_month: month_match = cls.DATE_MONTH_RE.match(token) if month_match: found_month = True month = month_match.lastindex continue if not found_year: year_match = cls.DATE_YEAR_RE.match(token) if year_match: found_year = True year = int(year_match.group()) if 70 <= year <= 99: year += 1900 elif 0 <= year <= 69: year += 2000 if False in (found_day, found_month, found_year, found_time): return None if not 1 <= day <= 31: return None if year < 1601 or hour > 23 or minute > 59 or second > 59: return None return datetime.datetime(year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc)
python
def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]: """Implements date string parsing adhering to RFC 6265.""" if not date_str: return None found_time = False found_day = False found_month = False found_year = False hour = minute = second = 0 day = 0 month = 0 year = 0 for token_match in cls.DATE_TOKENS_RE.finditer(date_str): token = token_match.group("token") if not found_time: time_match = cls.DATE_HMS_TIME_RE.match(token) if time_match: found_time = True hour, minute, second = [ int(s) for s in time_match.groups()] continue if not found_day: day_match = cls.DATE_DAY_OF_MONTH_RE.match(token) if day_match: found_day = True day = int(day_match.group()) continue if not found_month: month_match = cls.DATE_MONTH_RE.match(token) if month_match: found_month = True month = month_match.lastindex continue if not found_year: year_match = cls.DATE_YEAR_RE.match(token) if year_match: found_year = True year = int(year_match.group()) if 70 <= year <= 99: year += 1900 elif 0 <= year <= 69: year += 2000 if False in (found_day, found_month, found_year, found_time): return None if not 1 <= day <= 31: return None if year < 1601 or hour > 23 or minute > 59 or second > 59: return None return datetime.datetime(year, month, day, hour, minute, second, tzinfo=datetime.timezone.utc)
[ "def", "_parse_date", "(", "cls", ",", "date_str", ":", "str", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "if", "not", "date_str", ":", "return", "None", "found_time", "=", "False", "found_day", "=", "False", "found_month", "=", "False", "found_year", "=", "False", "hour", "=", "minute", "=", "second", "=", "0", "day", "=", "0", "month", "=", "0", "year", "=", "0", "for", "token_match", "in", "cls", ".", "DATE_TOKENS_RE", ".", "finditer", "(", "date_str", ")", ":", "token", "=", "token_match", ".", "group", "(", "\"token\"", ")", "if", "not", "found_time", ":", "time_match", "=", "cls", ".", "DATE_HMS_TIME_RE", ".", "match", "(", "token", ")", "if", "time_match", ":", "found_time", "=", "True", "hour", ",", "minute", ",", "second", "=", "[", "int", "(", "s", ")", "for", "s", "in", "time_match", ".", "groups", "(", ")", "]", "continue", "if", "not", "found_day", ":", "day_match", "=", "cls", ".", "DATE_DAY_OF_MONTH_RE", ".", "match", "(", "token", ")", "if", "day_match", ":", "found_day", "=", "True", "day", "=", "int", "(", "day_match", ".", "group", "(", ")", ")", "continue", "if", "not", "found_month", ":", "month_match", "=", "cls", ".", "DATE_MONTH_RE", ".", "match", "(", "token", ")", "if", "month_match", ":", "found_month", "=", "True", "month", "=", "month_match", ".", "lastindex", "continue", "if", "not", "found_year", ":", "year_match", "=", "cls", ".", "DATE_YEAR_RE", ".", "match", "(", "token", ")", "if", "year_match", ":", "found_year", "=", "True", "year", "=", "int", "(", "year_match", ".", "group", "(", ")", ")", "if", "70", "<=", "year", "<=", "99", ":", "year", "+=", "1900", "elif", "0", "<=", "year", "<=", "69", ":", "year", "+=", "2000", "if", "False", "in", "(", "found_day", ",", "found_month", ",", "found_year", ",", "found_time", ")", ":", "return", "None", "if", "not", "1", "<=", "day", "<=", "31", ":", "return", "None", "if", "year", "<", "1601", "or", "hour", ">", "23", "or", "minute", ">", "59", "or", "second", ">", "59", ":", "return", "None", "return", "datetime", ".", "datetime", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "minute", ",", "second", ",", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")" ]
Implements date string parsing adhering to RFC 6265.
[ "Implements", "date", "string", "parsing", "adhering", "to", "RFC", "6265", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/cookiejar.py#L264-L327
train
aio-libs/aiohttp
examples/legacy/tcp_protocol_parser.py
my_protocol_parser
def my_protocol_parser(out, buf): """Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py """ while True: tp = yield from buf.read(5) if tp in (MSG_PING, MSG_PONG): # skip line yield from buf.skipuntil(b'\r\n') out.feed_data(Message(tp, None)) elif tp == MSG_STOP: out.feed_data(Message(tp, None)) elif tp == MSG_TEXT: # read text text = yield from buf.readuntil(b'\r\n') out.feed_data(Message(tp, text.strip().decode('utf-8'))) else: raise ValueError('Unknown protocol prefix.')
python
def my_protocol_parser(out, buf): """Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py """ while True: tp = yield from buf.read(5) if tp in (MSG_PING, MSG_PONG): # skip line yield from buf.skipuntil(b'\r\n') out.feed_data(Message(tp, None)) elif tp == MSG_STOP: out.feed_data(Message(tp, None)) elif tp == MSG_TEXT: # read text text = yield from buf.readuntil(b'\r\n') out.feed_data(Message(tp, text.strip().decode('utf-8'))) else: raise ValueError('Unknown protocol prefix.')
[ "def", "my_protocol_parser", "(", "out", ",", "buf", ")", ":", "while", "True", ":", "tp", "=", "yield", "from", "buf", ".", "read", "(", "5", ")", "if", "tp", "in", "(", "MSG_PING", ",", "MSG_PONG", ")", ":", "# skip line", "yield", "from", "buf", ".", "skipuntil", "(", "b'\\r\\n'", ")", "out", ".", "feed_data", "(", "Message", "(", "tp", ",", "None", ")", ")", "elif", "tp", "==", "MSG_STOP", ":", "out", ".", "feed_data", "(", "Message", "(", "tp", ",", "None", ")", ")", "elif", "tp", "==", "MSG_TEXT", ":", "# read text", "text", "=", "yield", "from", "buf", ".", "readuntil", "(", "b'\\r\\n'", ")", "out", ".", "feed_data", "(", "Message", "(", "tp", ",", "text", ".", "strip", "(", ")", ".", "decode", "(", "'utf-8'", ")", ")", ")", "else", ":", "raise", "ValueError", "(", "'Unknown protocol prefix.'", ")" ]
Parser is used with StreamParser for incremental protocol parsing. Parser is a generator function, but it is not a coroutine. Usually parsers are implemented as a state machine. more details in asyncio/parsers.py existing parsers: * HTTP protocol parsers asyncio/http/protocol.py * websocket parser asyncio/http/websocket.py
[ "Parser", "is", "used", "with", "StreamParser", "for", "incremental", "protocol", "parsing", ".", "Parser", "is", "a", "generator", "function", "but", "it", "is", "not", "a", "coroutine", ".", "Usually", "parsers", "are", "implemented", "as", "a", "state", "machine", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/examples/legacy/tcp_protocol_parser.py#L23-L46
train
aio-libs/aiohttp
aiohttp/payload.py
Payload.set_content_disposition
def set_content_disposition(self, disptype: str, quote_fields: bool=True, **params: Any) -> None: """Sets ``Content-Disposition`` header.""" self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( disptype, quote_fields=quote_fields, **params)
python
def set_content_disposition(self, disptype: str, quote_fields: bool=True, **params: Any) -> None: """Sets ``Content-Disposition`` header.""" self._headers[hdrs.CONTENT_DISPOSITION] = content_disposition_header( disptype, quote_fields=quote_fields, **params)
[ "def", "set_content_disposition", "(", "self", ",", "disptype", ":", "str", ",", "quote_fields", ":", "bool", "=", "True", ",", "*", "*", "params", ":", "Any", ")", "->", "None", ":", "self", ".", "_headers", "[", "hdrs", ".", "CONTENT_DISPOSITION", "]", "=", "content_disposition_header", "(", "disptype", ",", "quote_fields", "=", "quote_fields", ",", "*", "*", "params", ")" ]
Sets ``Content-Disposition`` header.
[ "Sets", "Content", "-", "Disposition", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/payload.py#L187-L193
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.clone
def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel, headers: LooseHeaders=sentinel, scheme: str=sentinel, host: str=sentinel, remote: str=sentinel) -> 'BaseRequest': """Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object. """ if self._read_bytes: raise RuntimeError("Cannot clone request " "after reading its content") dct = {} # type: Dict[str, Any] if method is not sentinel: dct['method'] = method if rel_url is not sentinel: new_url = URL(rel_url) dct['url'] = new_url dct['path'] = str(new_url) if headers is not sentinel: # a copy semantic dct['headers'] = CIMultiDictProxy(CIMultiDict(headers)) dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) message = self._message._replace(**dct) kwargs = {} if scheme is not sentinel: kwargs['scheme'] = scheme if host is not sentinel: kwargs['host'] = host if remote is not sentinel: kwargs['remote'] = remote return self.__class__( message, self._payload, self._protocol, self._payload_writer, self._task, self._loop, client_max_size=self._client_max_size, state=self._state.copy(), **kwargs)
python
def clone(self, *, method: str=sentinel, rel_url: StrOrURL=sentinel, headers: LooseHeaders=sentinel, scheme: str=sentinel, host: str=sentinel, remote: str=sentinel) -> 'BaseRequest': """Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object. """ if self._read_bytes: raise RuntimeError("Cannot clone request " "after reading its content") dct = {} # type: Dict[str, Any] if method is not sentinel: dct['method'] = method if rel_url is not sentinel: new_url = URL(rel_url) dct['url'] = new_url dct['path'] = str(new_url) if headers is not sentinel: # a copy semantic dct['headers'] = CIMultiDictProxy(CIMultiDict(headers)) dct['raw_headers'] = tuple((k.encode('utf-8'), v.encode('utf-8')) for k, v in headers.items()) message = self._message._replace(**dct) kwargs = {} if scheme is not sentinel: kwargs['scheme'] = scheme if host is not sentinel: kwargs['host'] = host if remote is not sentinel: kwargs['remote'] = remote return self.__class__( message, self._payload, self._protocol, self._payload_writer, self._task, self._loop, client_max_size=self._client_max_size, state=self._state.copy(), **kwargs)
[ "def", "clone", "(", "self", ",", "*", ",", "method", ":", "str", "=", "sentinel", ",", "rel_url", ":", "StrOrURL", "=", "sentinel", ",", "headers", ":", "LooseHeaders", "=", "sentinel", ",", "scheme", ":", "str", "=", "sentinel", ",", "host", ":", "str", "=", "sentinel", ",", "remote", ":", "str", "=", "sentinel", ")", "->", "'BaseRequest'", ":", "if", "self", ".", "_read_bytes", ":", "raise", "RuntimeError", "(", "\"Cannot clone request \"", "\"after reading its content\"", ")", "dct", "=", "{", "}", "# type: Dict[str, Any]", "if", "method", "is", "not", "sentinel", ":", "dct", "[", "'method'", "]", "=", "method", "if", "rel_url", "is", "not", "sentinel", ":", "new_url", "=", "URL", "(", "rel_url", ")", "dct", "[", "'url'", "]", "=", "new_url", "dct", "[", "'path'", "]", "=", "str", "(", "new_url", ")", "if", "headers", "is", "not", "sentinel", ":", "# a copy semantic", "dct", "[", "'headers'", "]", "=", "CIMultiDictProxy", "(", "CIMultiDict", "(", "headers", ")", ")", "dct", "[", "'raw_headers'", "]", "=", "tuple", "(", "(", "k", ".", "encode", "(", "'utf-8'", ")", ",", "v", ".", "encode", "(", "'utf-8'", ")", ")", "for", "k", ",", "v", "in", "headers", ".", "items", "(", ")", ")", "message", "=", "self", ".", "_message", ".", "_replace", "(", "*", "*", "dct", ")", "kwargs", "=", "{", "}", "if", "scheme", "is", "not", "sentinel", ":", "kwargs", "[", "'scheme'", "]", "=", "scheme", "if", "host", "is", "not", "sentinel", ":", "kwargs", "[", "'host'", "]", "=", "host", "if", "remote", "is", "not", "sentinel", ":", "kwargs", "[", "'remote'", "]", "=", "remote", "return", "self", ".", "__class__", "(", "message", ",", "self", ".", "_payload", ",", "self", ".", "_protocol", ",", "self", ".", "_payload_writer", ",", "self", ".", "_task", ",", "self", ".", "_loop", ",", "client_max_size", "=", "self", ".", "_client_max_size", ",", "state", "=", "self", ".", "_state", ".", "copy", "(", ")", ",", "*", "*", "kwargs", ")" ]
Clone itself with replacement some attributes. Creates and returns a new instance of Request object. If no parameters are given, an exact copy is returned. If a parameter is not passed, it will reuse the one from the current request object.
[ "Clone", "itself", "with", "replacement", "some", "attributes", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L148-L196
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.forwarded
def forwarded(self) -> Tuple[Mapping[str, str], ...]: """A tuple containing all parsed Forwarded header(s). Makes an effort to parse Forwarded headers as specified by RFC 7239: - It adds one (immutable) dictionary per Forwarded 'field-value', ie per proxy. The element corresponds to the data in the Forwarded field-value added by the first proxy encountered by the client. Each subsequent item corresponds to those added by later proxies. - It checks that every value has valid syntax in general as specified in section 4: either a 'token' or a 'quoted-string'. - It un-escapes found escape sequences. - It does NOT validate 'by' and 'for' contents as specified in section 6. - It does NOT validate 'host' contents (Host ABNF). - It does NOT validate 'proto' contents for valid URI scheme names. Returns a tuple containing one or more immutable dicts """ elems = [] for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): length = len(field_value) pos = 0 need_separator = False elem = {} # type: Dict[str, str] elems.append(types.MappingProxyType(elem)) while 0 <= pos < length: match = _FORWARDED_PAIR_RE.match(field_value, pos) if match is not None: # got a valid forwarded-pair if need_separator: # bad syntax here, skip to next comma pos = field_value.find(',', pos) else: name, value, port = match.groups() if value[0] == '"': # quoted string: remove quotes and unescape value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1', value[1:-1]) if port: value += port elem[name.lower()] = value pos += len(match.group(0)) need_separator = True elif field_value[pos] == ',': # next forwarded-element need_separator = False elem = {} elems.append(types.MappingProxyType(elem)) pos += 1 elif field_value[pos] == ';': # next forwarded-pair need_separator = False pos += 1 elif field_value[pos] in ' \t': # Allow whitespace even between forwarded-pairs, though # RFC 7239 doesn't. This simplifies code and is in line # with Postel's law. pos += 1 else: # bad syntax here, skip to next comma pos = field_value.find(',', pos) return tuple(elems)
python
def forwarded(self) -> Tuple[Mapping[str, str], ...]: """A tuple containing all parsed Forwarded header(s). Makes an effort to parse Forwarded headers as specified by RFC 7239: - It adds one (immutable) dictionary per Forwarded 'field-value', ie per proxy. The element corresponds to the data in the Forwarded field-value added by the first proxy encountered by the client. Each subsequent item corresponds to those added by later proxies. - It checks that every value has valid syntax in general as specified in section 4: either a 'token' or a 'quoted-string'. - It un-escapes found escape sequences. - It does NOT validate 'by' and 'for' contents as specified in section 6. - It does NOT validate 'host' contents (Host ABNF). - It does NOT validate 'proto' contents for valid URI scheme names. Returns a tuple containing one or more immutable dicts """ elems = [] for field_value in self._message.headers.getall(hdrs.FORWARDED, ()): length = len(field_value) pos = 0 need_separator = False elem = {} # type: Dict[str, str] elems.append(types.MappingProxyType(elem)) while 0 <= pos < length: match = _FORWARDED_PAIR_RE.match(field_value, pos) if match is not None: # got a valid forwarded-pair if need_separator: # bad syntax here, skip to next comma pos = field_value.find(',', pos) else: name, value, port = match.groups() if value[0] == '"': # quoted string: remove quotes and unescape value = _QUOTED_PAIR_REPLACE_RE.sub(r'\1', value[1:-1]) if port: value += port elem[name.lower()] = value pos += len(match.group(0)) need_separator = True elif field_value[pos] == ',': # next forwarded-element need_separator = False elem = {} elems.append(types.MappingProxyType(elem)) pos += 1 elif field_value[pos] == ';': # next forwarded-pair need_separator = False pos += 1 elif field_value[pos] in ' \t': # Allow whitespace even between forwarded-pairs, though # RFC 7239 doesn't. This simplifies code and is in line # with Postel's law. pos += 1 else: # bad syntax here, skip to next comma pos = field_value.find(',', pos) return tuple(elems)
[ "def", "forwarded", "(", "self", ")", "->", "Tuple", "[", "Mapping", "[", "str", ",", "str", "]", ",", "...", "]", ":", "elems", "=", "[", "]", "for", "field_value", "in", "self", ".", "_message", ".", "headers", ".", "getall", "(", "hdrs", ".", "FORWARDED", ",", "(", ")", ")", ":", "length", "=", "len", "(", "field_value", ")", "pos", "=", "0", "need_separator", "=", "False", "elem", "=", "{", "}", "# type: Dict[str, str]", "elems", ".", "append", "(", "types", ".", "MappingProxyType", "(", "elem", ")", ")", "while", "0", "<=", "pos", "<", "length", ":", "match", "=", "_FORWARDED_PAIR_RE", ".", "match", "(", "field_value", ",", "pos", ")", "if", "match", "is", "not", "None", ":", "# got a valid forwarded-pair", "if", "need_separator", ":", "# bad syntax here, skip to next comma", "pos", "=", "field_value", ".", "find", "(", "','", ",", "pos", ")", "else", ":", "name", ",", "value", ",", "port", "=", "match", ".", "groups", "(", ")", "if", "value", "[", "0", "]", "==", "'\"'", ":", "# quoted string: remove quotes and unescape", "value", "=", "_QUOTED_PAIR_REPLACE_RE", ".", "sub", "(", "r'\\1'", ",", "value", "[", "1", ":", "-", "1", "]", ")", "if", "port", ":", "value", "+=", "port", "elem", "[", "name", ".", "lower", "(", ")", "]", "=", "value", "pos", "+=", "len", "(", "match", ".", "group", "(", "0", ")", ")", "need_separator", "=", "True", "elif", "field_value", "[", "pos", "]", "==", "','", ":", "# next forwarded-element", "need_separator", "=", "False", "elem", "=", "{", "}", "elems", ".", "append", "(", "types", ".", "MappingProxyType", "(", "elem", ")", ")", "pos", "+=", "1", "elif", "field_value", "[", "pos", "]", "==", "';'", ":", "# next forwarded-pair", "need_separator", "=", "False", "pos", "+=", "1", "elif", "field_value", "[", "pos", "]", "in", "' \\t'", ":", "# Allow whitespace even between forwarded-pairs, though", "# RFC 7239 doesn't. This simplifies code and is in line", "# with Postel's law.", "pos", "+=", "1", "else", ":", "# bad syntax here, skip to next comma", "pos", "=", "field_value", ".", "find", "(", "','", ",", "pos", ")", "return", "tuple", "(", "elems", ")" ]
A tuple containing all parsed Forwarded header(s). Makes an effort to parse Forwarded headers as specified by RFC 7239: - It adds one (immutable) dictionary per Forwarded 'field-value', ie per proxy. The element corresponds to the data in the Forwarded field-value added by the first proxy encountered by the client. Each subsequent item corresponds to those added by later proxies. - It checks that every value has valid syntax in general as specified in section 4: either a 'token' or a 'quoted-string'. - It un-escapes found escape sequences. - It does NOT validate 'by' and 'for' contents as specified in section 6. - It does NOT validate 'host' contents (Host ABNF). - It does NOT validate 'proto' contents for valid URI scheme names. Returns a tuple containing one or more immutable dicts
[ "A", "tuple", "containing", "all", "parsed", "Forwarded", "header", "(", "s", ")", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L259-L318
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.host
def host(self) -> str: """Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value """ host = self._message.headers.get(hdrs.HOST) if host is not None: return host else: return socket.getfqdn()
python
def host(self) -> str: """Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value """ host = self._message.headers.get(hdrs.HOST) if host is not None: return host else: return socket.getfqdn()
[ "def", "host", "(", "self", ")", "->", "str", ":", "host", "=", "self", ".", "_message", ".", "headers", ".", "get", "(", "hdrs", ".", "HOST", ")", "if", "host", "is", "not", "None", ":", "return", "host", "else", ":", "return", "socket", ".", "getfqdn", "(", ")" ]
Hostname of the request. Hostname is resolved in this order: - overridden value by .clone(host=new_host) call. - HOST HTTP header - socket.getfqdn() value
[ "Hostname", "of", "the", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L353-L366
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.remote
def remote(self) -> Optional[str]: """Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket """ if isinstance(self._transport_peername, (list, tuple)): return self._transport_peername[0] else: return self._transport_peername
python
def remote(self) -> Optional[str]: """Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket """ if isinstance(self._transport_peername, (list, tuple)): return self._transport_peername[0] else: return self._transport_peername
[ "def", "remote", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "isinstance", "(", "self", ".", "_transport_peername", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "self", ".", "_transport_peername", "[", "0", "]", "else", ":", "return", "self", ".", "_transport_peername" ]
Remote IP of client initiated HTTP request. The IP is resolved in this order: - overridden value by .clone(remote=new_remote) call. - peername of opened socket
[ "Remote", "IP", "of", "client", "initiated", "HTTP", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L369-L380
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest._http_date
def _http_date(_date_str: str) -> Optional[datetime.datetime]: """Process a date string, return a datetime object """ if _date_str is not None: timetuple = parsedate(_date_str) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
python
def _http_date(_date_str: str) -> Optional[datetime.datetime]: """Process a date string, return a datetime object """ if _date_str is not None: timetuple = parsedate(_date_str) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
[ "def", "_http_date", "(", "_date_str", ":", "str", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "if", "_date_str", "is", "not", "None", ":", "timetuple", "=", "parsedate", "(", "_date_str", ")", "if", "timetuple", "is", "not", "None", ":", "return", "datetime", ".", "datetime", "(", "*", "timetuple", "[", ":", "6", "]", ",", "tzinfo", "=", "datetime", ".", "timezone", ".", "utc", ")", "return", "None" ]
Process a date string, return a datetime object
[ "Process", "a", "date", "string", "return", "a", "datetime", "object" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L436-L444
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.if_modified_since
def if_modified_since(self) -> Optional[datetime.datetime]: """The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
python
def if_modified_since(self) -> Optional[datetime.datetime]: """The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE))
[ "def", "if_modified_since", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_MODIFIED_SINCE", ")", ")" ]
The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "If", "-", "Modified", "-", "Since", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L447-L452
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.if_unmodified_since
def if_unmodified_since(self) -> Optional[datetime.datetime]: """The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
python
def if_unmodified_since(self) -> Optional[datetime.datetime]: """The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_UNMODIFIED_SINCE))
[ "def", "if_unmodified_since", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_UNMODIFIED_SINCE", ")", ")" ]
The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "If", "-", "Unmodified", "-", "Since", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L455-L460
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.if_range
def if_range(self) -> Optional[datetime.datetime]: """The value of If-Range HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_RANGE))
python
def if_range(self) -> Optional[datetime.datetime]: """The value of If-Range HTTP header, or None. This header is represented as a `datetime` object. """ return self._http_date(self.headers.get(hdrs.IF_RANGE))
[ "def", "if_range", "(", "self", ")", "->", "Optional", "[", "datetime", ".", "datetime", "]", ":", "return", "self", ".", "_http_date", "(", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "IF_RANGE", ")", ")" ]
The value of If-Range HTTP header, or None. This header is represented as a `datetime` object.
[ "The", "value", "of", "If", "-", "Range", "HTTP", "header", "or", "None", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L463-L468
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.cookies
def cookies(self) -> Mapping[str, str]: """Return request cookies. A read-only dictionary-like object. """ raw = self.headers.get(hdrs.COOKIE, '') parsed = SimpleCookie(raw) return MappingProxyType( {key: val.value for key, val in parsed.items()})
python
def cookies(self) -> Mapping[str, str]: """Return request cookies. A read-only dictionary-like object. """ raw = self.headers.get(hdrs.COOKIE, '') parsed = SimpleCookie(raw) return MappingProxyType( {key: val.value for key, val in parsed.items()})
[ "def", "cookies", "(", "self", ")", "->", "Mapping", "[", "str", ",", "str", "]", ":", "raw", "=", "self", ".", "headers", ".", "get", "(", "hdrs", ".", "COOKIE", ",", "''", ")", "parsed", "=", "SimpleCookie", "(", "raw", ")", "return", "MappingProxyType", "(", "{", "key", ":", "val", ".", "value", "for", "key", ",", "val", "in", "parsed", ".", "items", "(", ")", "}", ")" ]
Return request cookies. A read-only dictionary-like object.
[ "Return", "request", "cookies", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L476-L484
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.http_range
def http_range(self) -> slice: """The content of Range HTTP header. Return a slice instance. """ rng = self._headers.get(hdrs.RANGE) start, end = None, None if rng is not None: try: pattern = r'^bytes=(\d*)-(\d*)$' start, end = re.findall(pattern, rng)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptable format") end = int(end) if end else None start = int(start) if start else None if start is None and end is not None: # end with no start is to return tail of content start = -end end = None if start is not None and end is not None: # end is inclusive in range header, exclusive for slice end += 1 if start >= end: raise ValueError('start cannot be after end') if start is end is None: # No valid range supplied raise ValueError('No start or end of range specified') return slice(start, end, 1)
python
def http_range(self) -> slice: """The content of Range HTTP header. Return a slice instance. """ rng = self._headers.get(hdrs.RANGE) start, end = None, None if rng is not None: try: pattern = r'^bytes=(\d*)-(\d*)$' start, end = re.findall(pattern, rng)[0] except IndexError: # pattern was not found in header raise ValueError("range not in acceptable format") end = int(end) if end else None start = int(start) if start else None if start is None and end is not None: # end with no start is to return tail of content start = -end end = None if start is not None and end is not None: # end is inclusive in range header, exclusive for slice end += 1 if start >= end: raise ValueError('start cannot be after end') if start is end is None: # No valid range supplied raise ValueError('No start or end of range specified') return slice(start, end, 1)
[ "def", "http_range", "(", "self", ")", "->", "slice", ":", "rng", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "RANGE", ")", "start", ",", "end", "=", "None", ",", "None", "if", "rng", "is", "not", "None", ":", "try", ":", "pattern", "=", "r'^bytes=(\\d*)-(\\d*)$'", "start", ",", "end", "=", "re", ".", "findall", "(", "pattern", ",", "rng", ")", "[", "0", "]", "except", "IndexError", ":", "# pattern was not found in header", "raise", "ValueError", "(", "\"range not in acceptable format\"", ")", "end", "=", "int", "(", "end", ")", "if", "end", "else", "None", "start", "=", "int", "(", "start", ")", "if", "start", "else", "None", "if", "start", "is", "None", "and", "end", "is", "not", "None", ":", "# end with no start is to return tail of content", "start", "=", "-", "end", "end", "=", "None", "if", "start", "is", "not", "None", "and", "end", "is", "not", "None", ":", "# end is inclusive in range header, exclusive for slice", "end", "+=", "1", "if", "start", ">=", "end", ":", "raise", "ValueError", "(", "'start cannot be after end'", ")", "if", "start", "is", "end", "is", "None", ":", "# No valid range supplied", "raise", "ValueError", "(", "'No start or end of range specified'", ")", "return", "slice", "(", "start", ",", "end", ",", "1", ")" ]
The content of Range HTTP header. Return a slice instance.
[ "The", "content", "of", "Range", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L487-L520
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.has_body
def has_body(self) -> bool: """Return True if request's HTTP BODY can be read, False otherwise.""" warnings.warn( "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2) return not self._payload.at_eof()
python
def has_body(self) -> bool: """Return True if request's HTTP BODY can be read, False otherwise.""" warnings.warn( "Deprecated, use .can_read_body #2005", DeprecationWarning, stacklevel=2) return not self._payload.at_eof()
[ "def", "has_body", "(", "self", ")", "->", "bool", ":", "warnings", ".", "warn", "(", "\"Deprecated, use .can_read_body #2005\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "return", "not", "self", ".", "_payload", ".", "at_eof", "(", ")" ]
Return True if request's HTTP BODY can be read, False otherwise.
[ "Return", "True", "if", "request", "s", "HTTP", "BODY", "can", "be", "read", "False", "otherwise", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L528-L533
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.read
async def read(self) -> bytes: """Read request body if present. Returns bytes object with full request content. """ if self._read_bytes is None: body = bytearray() while True: chunk = await self._payload.readany() body.extend(chunk) if self._client_max_size: body_size = len(body) if body_size >= self._client_max_size: raise HTTPRequestEntityTooLarge( max_size=self._client_max_size, actual_size=body_size ) if not chunk: break self._read_bytes = bytes(body) return self._read_bytes
python
async def read(self) -> bytes: """Read request body if present. Returns bytes object with full request content. """ if self._read_bytes is None: body = bytearray() while True: chunk = await self._payload.readany() body.extend(chunk) if self._client_max_size: body_size = len(body) if body_size >= self._client_max_size: raise HTTPRequestEntityTooLarge( max_size=self._client_max_size, actual_size=body_size ) if not chunk: break self._read_bytes = bytes(body) return self._read_bytes
[ "async", "def", "read", "(", "self", ")", "->", "bytes", ":", "if", "self", ".", "_read_bytes", "is", "None", ":", "body", "=", "bytearray", "(", ")", "while", "True", ":", "chunk", "=", "await", "self", ".", "_payload", ".", "readany", "(", ")", "body", ".", "extend", "(", "chunk", ")", "if", "self", ".", "_client_max_size", ":", "body_size", "=", "len", "(", "body", ")", "if", "body_size", ">=", "self", ".", "_client_max_size", ":", "raise", "HTTPRequestEntityTooLarge", "(", "max_size", "=", "self", ".", "_client_max_size", ",", "actual_size", "=", "body_size", ")", "if", "not", "chunk", ":", "break", "self", ".", "_read_bytes", "=", "bytes", "(", "body", ")", "return", "self", ".", "_read_bytes" ]
Read request body if present. Returns bytes object with full request content.
[ "Read", "request", "body", "if", "present", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L553-L573
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.text
async def text(self) -> str: """Return BODY as text using encoding from .charset.""" bytes_body = await self.read() encoding = self.charset or 'utf-8' return bytes_body.decode(encoding)
python
async def text(self) -> str: """Return BODY as text using encoding from .charset.""" bytes_body = await self.read() encoding = self.charset or 'utf-8' return bytes_body.decode(encoding)
[ "async", "def", "text", "(", "self", ")", "->", "str", ":", "bytes_body", "=", "await", "self", ".", "read", "(", ")", "encoding", "=", "self", ".", "charset", "or", "'utf-8'", "return", "bytes_body", ".", "decode", "(", "encoding", ")" ]
Return BODY as text using encoding from .charset.
[ "Return", "BODY", "as", "text", "using", "encoding", "from", ".", "charset", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L575-L579
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.json
async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any: """Return BODY as JSON.""" body = await self.text() return loads(body)
python
async def json(self, *, loads: JSONDecoder=DEFAULT_JSON_DECODER) -> Any: """Return BODY as JSON.""" body = await self.text() return loads(body)
[ "async", "def", "json", "(", "self", ",", "*", ",", "loads", ":", "JSONDecoder", "=", "DEFAULT_JSON_DECODER", ")", "->", "Any", ":", "body", "=", "await", "self", ".", "text", "(", ")", "return", "loads", "(", "body", ")" ]
Return BODY as JSON.
[ "Return", "BODY", "as", "JSON", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L581-L584
train
aio-libs/aiohttp
aiohttp/web_request.py
BaseRequest.post
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]': """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
python
async def post(self) -> 'MultiDictProxy[Union[str, bytes, FileField]]': """Return POST parameters.""" if self._post is not None: return self._post if self._method not in self.POST_METHODS: self._post = MultiDictProxy(MultiDict()) return self._post content_type = self.content_type if (content_type not in ('', 'application/x-www-form-urlencoded', 'multipart/form-data')): self._post = MultiDictProxy(MultiDict()) return self._post out = MultiDict() # type: MultiDict[Union[str, bytes, FileField]] if content_type == 'multipart/form-data': multipart = await self.multipart() max_size = self._client_max_size field = await multipart.next() while field is not None: size = 0 content_type = field.headers.get(hdrs.CONTENT_TYPE) if field.filename: # store file in temp file tmp = tempfile.TemporaryFile() chunk = await field.read_chunk(size=2**16) while chunk: chunk = field.decode(chunk) tmp.write(chunk) size += len(chunk) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) chunk = await field.read_chunk(size=2**16) tmp.seek(0) ff = FileField(field.name, field.filename, cast(io.BufferedReader, tmp), content_type, field.headers) out.add(field.name, ff) else: value = await field.read(decode=True) if content_type is None or \ content_type.startswith('text/'): charset = field.get_charset(default='utf-8') value = value.decode(charset) out.add(field.name, value) size += len(value) if 0 < max_size < size: raise HTTPRequestEntityTooLarge( max_size=max_size, actual_size=size ) field = await multipart.next() else: data = await self.read() if data: charset = self.charset or 'utf-8' out.extend( parse_qsl( data.rstrip().decode(charset), keep_blank_values=True, encoding=charset)) self._post = MultiDictProxy(out) return self._post
[ "async", "def", "post", "(", "self", ")", "->", "'MultiDictProxy[Union[str, bytes, FileField]]'", ":", "if", "self", ".", "_post", "is", "not", "None", ":", "return", "self", ".", "_post", "if", "self", ".", "_method", "not", "in", "self", ".", "POST_METHODS", ":", "self", ".", "_post", "=", "MultiDictProxy", "(", "MultiDict", "(", ")", ")", "return", "self", ".", "_post", "content_type", "=", "self", ".", "content_type", "if", "(", "content_type", "not", "in", "(", "''", ",", "'application/x-www-form-urlencoded'", ",", "'multipart/form-data'", ")", ")", ":", "self", ".", "_post", "=", "MultiDictProxy", "(", "MultiDict", "(", ")", ")", "return", "self", ".", "_post", "out", "=", "MultiDict", "(", ")", "# type: MultiDict[Union[str, bytes, FileField]]", "if", "content_type", "==", "'multipart/form-data'", ":", "multipart", "=", "await", "self", ".", "multipart", "(", ")", "max_size", "=", "self", ".", "_client_max_size", "field", "=", "await", "multipart", ".", "next", "(", ")", "while", "field", "is", "not", "None", ":", "size", "=", "0", "content_type", "=", "field", ".", "headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ")", "if", "field", ".", "filename", ":", "# store file in temp file", "tmp", "=", "tempfile", ".", "TemporaryFile", "(", ")", "chunk", "=", "await", "field", ".", "read_chunk", "(", "size", "=", "2", "**", "16", ")", "while", "chunk", ":", "chunk", "=", "field", ".", "decode", "(", "chunk", ")", "tmp", ".", "write", "(", "chunk", ")", "size", "+=", "len", "(", "chunk", ")", "if", "0", "<", "max_size", "<", "size", ":", "raise", "HTTPRequestEntityTooLarge", "(", "max_size", "=", "max_size", ",", "actual_size", "=", "size", ")", "chunk", "=", "await", "field", ".", "read_chunk", "(", "size", "=", "2", "**", "16", ")", "tmp", ".", "seek", "(", "0", ")", "ff", "=", "FileField", "(", "field", ".", "name", ",", "field", ".", "filename", ",", "cast", "(", "io", ".", "BufferedReader", ",", "tmp", ")", ",", "content_type", ",", "field", ".", "headers", ")", "out", ".", "add", "(", "field", ".", "name", ",", "ff", ")", "else", ":", "value", "=", "await", "field", ".", "read", "(", "decode", "=", "True", ")", "if", "content_type", "is", "None", "or", "content_type", ".", "startswith", "(", "'text/'", ")", ":", "charset", "=", "field", ".", "get_charset", "(", "default", "=", "'utf-8'", ")", "value", "=", "value", ".", "decode", "(", "charset", ")", "out", ".", "add", "(", "field", ".", "name", ",", "value", ")", "size", "+=", "len", "(", "value", ")", "if", "0", "<", "max_size", "<", "size", ":", "raise", "HTTPRequestEntityTooLarge", "(", "max_size", "=", "max_size", ",", "actual_size", "=", "size", ")", "field", "=", "await", "multipart", ".", "next", "(", ")", "else", ":", "data", "=", "await", "self", ".", "read", "(", ")", "if", "data", ":", "charset", "=", "self", ".", "charset", "or", "'utf-8'", "out", ".", "extend", "(", "parse_qsl", "(", "data", ".", "rstrip", "(", ")", ".", "decode", "(", "charset", ")", ",", "keep_blank_values", "=", "True", ",", "encoding", "=", "charset", ")", ")", "self", ".", "_post", "=", "MultiDictProxy", "(", "out", ")", "return", "self", ".", "_post" ]
Return POST parameters.
[ "Return", "POST", "parameters", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_request.py#L590-L662
train
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.shutdown
async def shutdown(self, timeout: Optional[float]=15.0) -> None: """Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.""" self._force_close = True if self._keepalive_handle is not None: self._keepalive_handle.cancel() if self._waiter: self._waiter.cancel() # wait for handlers with suppress(asyncio.CancelledError, asyncio.TimeoutError): with CeilTimeout(timeout, loop=self._loop): if (self._error_handler is not None and not self._error_handler.done()): await self._error_handler if (self._task_handler is not None and not self._task_handler.done()): await self._task_handler # force-close non-idle handler if self._task_handler is not None: self._task_handler.cancel() if self.transport is not None: self.transport.close() self.transport = None
python
async def shutdown(self, timeout: Optional[float]=15.0) -> None: """Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.""" self._force_close = True if self._keepalive_handle is not None: self._keepalive_handle.cancel() if self._waiter: self._waiter.cancel() # wait for handlers with suppress(asyncio.CancelledError, asyncio.TimeoutError): with CeilTimeout(timeout, loop=self._loop): if (self._error_handler is not None and not self._error_handler.done()): await self._error_handler if (self._task_handler is not None and not self._task_handler.done()): await self._task_handler # force-close non-idle handler if self._task_handler is not None: self._task_handler.cancel() if self.transport is not None: self.transport.close() self.transport = None
[ "async", "def", "shutdown", "(", "self", ",", "timeout", ":", "Optional", "[", "float", "]", "=", "15.0", ")", "->", "None", ":", "self", ".", "_force_close", "=", "True", "if", "self", ".", "_keepalive_handle", "is", "not", "None", ":", "self", ".", "_keepalive_handle", ".", "cancel", "(", ")", "if", "self", ".", "_waiter", ":", "self", ".", "_waiter", ".", "cancel", "(", ")", "# wait for handlers", "with", "suppress", "(", "asyncio", ".", "CancelledError", ",", "asyncio", ".", "TimeoutError", ")", ":", "with", "CeilTimeout", "(", "timeout", ",", "loop", "=", "self", ".", "_loop", ")", ":", "if", "(", "self", ".", "_error_handler", "is", "not", "None", "and", "not", "self", ".", "_error_handler", ".", "done", "(", ")", ")", ":", "await", "self", ".", "_error_handler", "if", "(", "self", ".", "_task_handler", "is", "not", "None", "and", "not", "self", ".", "_task_handler", ".", "done", "(", ")", ")", ":", "await", "self", ".", "_task_handler", "# force-close non-idle handler", "if", "self", ".", "_task_handler", "is", "not", "None", ":", "self", ".", "_task_handler", ".", "cancel", "(", ")", "if", "self", ".", "transport", "is", "not", "None", ":", "self", ".", "transport", ".", "close", "(", ")", "self", ".", "transport", "=", "None" ]
Worker process is about to exit, we need cleanup everything and stop accepting requests. It is especially important for keep-alive connections.
[ "Worker", "process", "is", "about", "to", "exit", "we", "need", "cleanup", "everything", "and", "stop", "accepting", "requests", ".", "It", "is", "especially", "important", "for", "keep", "-", "alive", "connections", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L184-L213
train
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.keep_alive
def keep_alive(self, val: bool) -> None: """Set keep-alive connection mode. :param bool val: new state. """ self._keepalive = val if self._keepalive_handle: self._keepalive_handle.cancel() self._keepalive_handle = None
python
def keep_alive(self, val: bool) -> None: """Set keep-alive connection mode. :param bool val: new state. """ self._keepalive = val if self._keepalive_handle: self._keepalive_handle.cancel() self._keepalive_handle = None
[ "def", "keep_alive", "(", "self", ",", "val", ":", "bool", ")", "->", "None", ":", "self", ".", "_keepalive", "=", "val", "if", "self", ".", "_keepalive_handle", ":", "self", ".", "_keepalive_handle", ".", "cancel", "(", ")", "self", ".", "_keepalive_handle", "=", "None" ]
Set keep-alive connection mode. :param bool val: new state.
[ "Set", "keep", "-", "alive", "connection", "mode", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L316-L324
train
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.close
def close(self) -> None: """Stop accepting new pipelinig messages and close connection when handlers done processing messages""" self._close = True if self._waiter: self._waiter.cancel()
python
def close(self) -> None: """Stop accepting new pipelinig messages and close connection when handlers done processing messages""" self._close = True if self._waiter: self._waiter.cancel()
[ "def", "close", "(", "self", ")", "->", "None", ":", "self", ".", "_close", "=", "True", "if", "self", ".", "_waiter", ":", "self", ".", "_waiter", ".", "cancel", "(", ")" ]
Stop accepting new pipelinig messages and close connection when handlers done processing messages
[ "Stop", "accepting", "new", "pipelinig", "messages", "and", "close", "connection", "when", "handlers", "done", "processing", "messages" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L326-L331
train
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.force_close
def force_close(self) -> None: """Force close connection""" self._force_close = True if self._waiter: self._waiter.cancel() if self.transport is not None: self.transport.close() self.transport = None
python
def force_close(self) -> None: """Force close connection""" self._force_close = True if self._waiter: self._waiter.cancel() if self.transport is not None: self.transport.close() self.transport = None
[ "def", "force_close", "(", "self", ")", "->", "None", ":", "self", ".", "_force_close", "=", "True", "if", "self", ".", "_waiter", ":", "self", ".", "_waiter", ".", "cancel", "(", ")", "if", "self", ".", "transport", "is", "not", "None", ":", "self", ".", "transport", ".", "close", "(", ")", "self", ".", "transport", "=", "None" ]
Force close connection
[ "Force", "close", "connection" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L333-L340
train
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.start
async def start(self) -> None: """Process incoming request. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ loop = self._loop handler = self._task_handler assert handler is not None manager = self._manager assert manager is not None keepalive_timeout = self._keepalive_timeout resp = None assert self._request_factory is not None assert self._request_handler is not None while not self._force_close: if not self._messages: try: # wait for next request self._waiter = loop.create_future() await self._waiter except asyncio.CancelledError: break finally: self._waiter = None message, payload = self._messages.popleft() if self.access_log: now = loop.time() manager.requests_count += 1 writer = StreamWriter(self, loop) request = self._request_factory( message, payload, self, writer, handler) try: try: # a new task is used for copy context vars (#3406) task = self._loop.create_task( self._request_handler(request)) resp = await task except HTTPException as exc: resp = Response(status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers) except asyncio.CancelledError: self.log_debug('Ignored premature client disconnection') break except asyncio.TimeoutError as exc: self.log_debug('Request handler timed out.', exc_info=exc) resp = self.handle_error(request, 504) except Exception as exc: resp = self.handle_error(request, 500, exc) try: prepare_meth = resp.prepare except AttributeError: if resp is None: raise RuntimeError("Missing return " "statement on request handler") else: raise RuntimeError("Web-handler should return " "a response instance, " "got {!r}".format(resp)) await prepare_meth(request) await resp.write_eof() # notify server about keep-alive self._keepalive = bool(resp.keep_alive) # log access if self.access_log: self.log_access(request, resp, loop.time() - now) # check payload if not payload.is_eof(): lingering_time = self._lingering_time if not self._force_close and lingering_time: self.log_debug( 'Start lingering close timer for %s sec.', lingering_time) now = loop.time() end_t = now + lingering_time with suppress( asyncio.TimeoutError, asyncio.CancelledError): while not payload.is_eof() and now < end_t: with CeilTimeout(end_t - now, loop=loop): # read and ignore await payload.readany() now = loop.time() # if payload still uncompleted if not payload.is_eof() and not self._force_close: self.log_debug('Uncompleted request.') self.close() payload.set_exception(PayloadAccessError()) except asyncio.CancelledError: self.log_debug('Ignored premature client disconnection ') break except RuntimeError as exc: if self.debug: self.log_exception( 'Unhandled runtime exception', exc_info=exc) self.force_close() except Exception as exc: self.log_exception('Unhandled exception', exc_info=exc) self.force_close() finally: if self.transport is None and resp is not None: self.log_debug('Ignored premature client disconnection.') elif not self._force_close: if self._keepalive and not self._close: # start keep-alive timer if keepalive_timeout is not None: now = self._loop.time() self._keepalive_time = now if self._keepalive_handle is None: self._keepalive_handle = loop.call_at( now + keepalive_timeout, self._process_keepalive) else: break # remove handler, close transport if no handlers left if not self._force_close: self._task_handler = None if self.transport is not None and self._error_handler is None: self.transport.close()
python
async def start(self) -> None: """Process incoming request. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified. """ loop = self._loop handler = self._task_handler assert handler is not None manager = self._manager assert manager is not None keepalive_timeout = self._keepalive_timeout resp = None assert self._request_factory is not None assert self._request_handler is not None while not self._force_close: if not self._messages: try: # wait for next request self._waiter = loop.create_future() await self._waiter except asyncio.CancelledError: break finally: self._waiter = None message, payload = self._messages.popleft() if self.access_log: now = loop.time() manager.requests_count += 1 writer = StreamWriter(self, loop) request = self._request_factory( message, payload, self, writer, handler) try: try: # a new task is used for copy context vars (#3406) task = self._loop.create_task( self._request_handler(request)) resp = await task except HTTPException as exc: resp = Response(status=exc.status, reason=exc.reason, text=exc.text, headers=exc.headers) except asyncio.CancelledError: self.log_debug('Ignored premature client disconnection') break except asyncio.TimeoutError as exc: self.log_debug('Request handler timed out.', exc_info=exc) resp = self.handle_error(request, 504) except Exception as exc: resp = self.handle_error(request, 500, exc) try: prepare_meth = resp.prepare except AttributeError: if resp is None: raise RuntimeError("Missing return " "statement on request handler") else: raise RuntimeError("Web-handler should return " "a response instance, " "got {!r}".format(resp)) await prepare_meth(request) await resp.write_eof() # notify server about keep-alive self._keepalive = bool(resp.keep_alive) # log access if self.access_log: self.log_access(request, resp, loop.time() - now) # check payload if not payload.is_eof(): lingering_time = self._lingering_time if not self._force_close and lingering_time: self.log_debug( 'Start lingering close timer for %s sec.', lingering_time) now = loop.time() end_t = now + lingering_time with suppress( asyncio.TimeoutError, asyncio.CancelledError): while not payload.is_eof() and now < end_t: with CeilTimeout(end_t - now, loop=loop): # read and ignore await payload.readany() now = loop.time() # if payload still uncompleted if not payload.is_eof() and not self._force_close: self.log_debug('Uncompleted request.') self.close() payload.set_exception(PayloadAccessError()) except asyncio.CancelledError: self.log_debug('Ignored premature client disconnection ') break except RuntimeError as exc: if self.debug: self.log_exception( 'Unhandled runtime exception', exc_info=exc) self.force_close() except Exception as exc: self.log_exception('Unhandled exception', exc_info=exc) self.force_close() finally: if self.transport is None and resp is not None: self.log_debug('Ignored premature client disconnection.') elif not self._force_close: if self._keepalive and not self._close: # start keep-alive timer if keepalive_timeout is not None: now = self._loop.time() self._keepalive_time = now if self._keepalive_handle is None: self._keepalive_handle = loop.call_at( now + keepalive_timeout, self._process_keepalive) else: break # remove handler, close transport if no handlers left if not self._force_close: self._task_handler = None if self.transport is not None and self._error_handler is None: self.transport.close()
[ "async", "def", "start", "(", "self", ")", "->", "None", ":", "loop", "=", "self", ".", "_loop", "handler", "=", "self", ".", "_task_handler", "assert", "handler", "is", "not", "None", "manager", "=", "self", ".", "_manager", "assert", "manager", "is", "not", "None", "keepalive_timeout", "=", "self", ".", "_keepalive_timeout", "resp", "=", "None", "assert", "self", ".", "_request_factory", "is", "not", "None", "assert", "self", ".", "_request_handler", "is", "not", "None", "while", "not", "self", ".", "_force_close", ":", "if", "not", "self", ".", "_messages", ":", "try", ":", "# wait for next request", "self", ".", "_waiter", "=", "loop", ".", "create_future", "(", ")", "await", "self", ".", "_waiter", "except", "asyncio", ".", "CancelledError", ":", "break", "finally", ":", "self", ".", "_waiter", "=", "None", "message", ",", "payload", "=", "self", ".", "_messages", ".", "popleft", "(", ")", "if", "self", ".", "access_log", ":", "now", "=", "loop", ".", "time", "(", ")", "manager", ".", "requests_count", "+=", "1", "writer", "=", "StreamWriter", "(", "self", ",", "loop", ")", "request", "=", "self", ".", "_request_factory", "(", "message", ",", "payload", ",", "self", ",", "writer", ",", "handler", ")", "try", ":", "try", ":", "# a new task is used for copy context vars (#3406)", "task", "=", "self", ".", "_loop", ".", "create_task", "(", "self", ".", "_request_handler", "(", "request", ")", ")", "resp", "=", "await", "task", "except", "HTTPException", "as", "exc", ":", "resp", "=", "Response", "(", "status", "=", "exc", ".", "status", ",", "reason", "=", "exc", ".", "reason", ",", "text", "=", "exc", ".", "text", ",", "headers", "=", "exc", ".", "headers", ")", "except", "asyncio", ".", "CancelledError", ":", "self", ".", "log_debug", "(", "'Ignored premature client disconnection'", ")", "break", "except", "asyncio", ".", "TimeoutError", "as", "exc", ":", "self", ".", "log_debug", "(", "'Request handler timed out.'", ",", "exc_info", "=", "exc", ")", "resp", "=", "self", ".", "handle_error", "(", "request", ",", "504", ")", "except", "Exception", "as", "exc", ":", "resp", "=", "self", ".", "handle_error", "(", "request", ",", "500", ",", "exc", ")", "try", ":", "prepare_meth", "=", "resp", ".", "prepare", "except", "AttributeError", ":", "if", "resp", "is", "None", ":", "raise", "RuntimeError", "(", "\"Missing return \"", "\"statement on request handler\"", ")", "else", ":", "raise", "RuntimeError", "(", "\"Web-handler should return \"", "\"a response instance, \"", "\"got {!r}\"", ".", "format", "(", "resp", ")", ")", "await", "prepare_meth", "(", "request", ")", "await", "resp", ".", "write_eof", "(", ")", "# notify server about keep-alive", "self", ".", "_keepalive", "=", "bool", "(", "resp", ".", "keep_alive", ")", "# log access", "if", "self", ".", "access_log", ":", "self", ".", "log_access", "(", "request", ",", "resp", ",", "loop", ".", "time", "(", ")", "-", "now", ")", "# check payload", "if", "not", "payload", ".", "is_eof", "(", ")", ":", "lingering_time", "=", "self", ".", "_lingering_time", "if", "not", "self", ".", "_force_close", "and", "lingering_time", ":", "self", ".", "log_debug", "(", "'Start lingering close timer for %s sec.'", ",", "lingering_time", ")", "now", "=", "loop", ".", "time", "(", ")", "end_t", "=", "now", "+", "lingering_time", "with", "suppress", "(", "asyncio", ".", "TimeoutError", ",", "asyncio", ".", "CancelledError", ")", ":", "while", "not", "payload", ".", "is_eof", "(", ")", "and", "now", "<", "end_t", ":", "with", "CeilTimeout", "(", "end_t", "-", "now", ",", "loop", "=", "loop", ")", ":", "# read and ignore", "await", "payload", ".", "readany", "(", ")", "now", "=", "loop", ".", "time", "(", ")", "# if payload still uncompleted", "if", "not", "payload", ".", "is_eof", "(", ")", "and", "not", "self", ".", "_force_close", ":", "self", ".", "log_debug", "(", "'Uncompleted request.'", ")", "self", ".", "close", "(", ")", "payload", ".", "set_exception", "(", "PayloadAccessError", "(", ")", ")", "except", "asyncio", ".", "CancelledError", ":", "self", ".", "log_debug", "(", "'Ignored premature client disconnection '", ")", "break", "except", "RuntimeError", "as", "exc", ":", "if", "self", ".", "debug", ":", "self", ".", "log_exception", "(", "'Unhandled runtime exception'", ",", "exc_info", "=", "exc", ")", "self", ".", "force_close", "(", ")", "except", "Exception", "as", "exc", ":", "self", ".", "log_exception", "(", "'Unhandled exception'", ",", "exc_info", "=", "exc", ")", "self", ".", "force_close", "(", ")", "finally", ":", "if", "self", ".", "transport", "is", "None", "and", "resp", "is", "not", "None", ":", "self", ".", "log_debug", "(", "'Ignored premature client disconnection.'", ")", "elif", "not", "self", ".", "_force_close", ":", "if", "self", ".", "_keepalive", "and", "not", "self", ".", "_close", ":", "# start keep-alive timer", "if", "keepalive_timeout", "is", "not", "None", ":", "now", "=", "self", ".", "_loop", ".", "time", "(", ")", "self", ".", "_keepalive_time", "=", "now", "if", "self", ".", "_keepalive_handle", "is", "None", ":", "self", ".", "_keepalive_handle", "=", "loop", ".", "call_at", "(", "now", "+", "keepalive_timeout", ",", "self", ".", "_process_keepalive", ")", "else", ":", "break", "# remove handler, close transport if no handlers left", "if", "not", "self", ".", "_force_close", ":", "self", ".", "_task_handler", "=", "None", "if", "self", ".", "transport", "is", "not", "None", "and", "self", ".", "_error_handler", "is", "None", ":", "self", ".", "transport", ".", "close", "(", ")" ]
Process incoming request. It reads request line, request headers and request payload, then calls handle_request() method. Subclass has to override handle_request(). start() handles various exceptions in request or response handling. Connection is being closed always unless keep_alive(True) specified.
[ "Process", "incoming", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L373-L509
train
aio-libs/aiohttp
aiohttp/web_protocol.py
RequestHandler.handle_error
def handle_error(self, request: BaseRequest, status: int=500, exc: Optional[BaseException]=None, message: Optional[str]=None) -> StreamResponse: """Handle errors. Returns HTTP response with specific status code. Logs additional information. It always closes current connection.""" self.log_exception("Error handling request", exc_info=exc) ct = 'text/plain' if status == HTTPStatus.INTERNAL_SERVER_ERROR: title = '{0.value} {0.phrase}'.format( HTTPStatus.INTERNAL_SERVER_ERROR ) msg = HTTPStatus.INTERNAL_SERVER_ERROR.description tb = None if self.debug: with suppress(Exception): tb = traceback.format_exc() if 'text/html' in request.headers.get('Accept', ''): if tb: tb = html_escape(tb) msg = '<h2>Traceback:</h2>\n<pre>{}</pre>'.format(tb) message = ( "<html><head>" "<title>{title}</title>" "</head><body>\n<h1>{title}</h1>" "\n{msg}\n</body></html>\n" ).format(title=title, msg=msg) ct = 'text/html' else: if tb: msg = tb message = title + '\n\n' + msg resp = Response(status=status, text=message, content_type=ct) resp.force_close() # some data already got sent, connection is broken if request.writer.output_size > 0 or self.transport is None: self.force_close() return resp
python
def handle_error(self, request: BaseRequest, status: int=500, exc: Optional[BaseException]=None, message: Optional[str]=None) -> StreamResponse: """Handle errors. Returns HTTP response with specific status code. Logs additional information. It always closes current connection.""" self.log_exception("Error handling request", exc_info=exc) ct = 'text/plain' if status == HTTPStatus.INTERNAL_SERVER_ERROR: title = '{0.value} {0.phrase}'.format( HTTPStatus.INTERNAL_SERVER_ERROR ) msg = HTTPStatus.INTERNAL_SERVER_ERROR.description tb = None if self.debug: with suppress(Exception): tb = traceback.format_exc() if 'text/html' in request.headers.get('Accept', ''): if tb: tb = html_escape(tb) msg = '<h2>Traceback:</h2>\n<pre>{}</pre>'.format(tb) message = ( "<html><head>" "<title>{title}</title>" "</head><body>\n<h1>{title}</h1>" "\n{msg}\n</body></html>\n" ).format(title=title, msg=msg) ct = 'text/html' else: if tb: msg = tb message = title + '\n\n' + msg resp = Response(status=status, text=message, content_type=ct) resp.force_close() # some data already got sent, connection is broken if request.writer.output_size > 0 or self.transport is None: self.force_close() return resp
[ "def", "handle_error", "(", "self", ",", "request", ":", "BaseRequest", ",", "status", ":", "int", "=", "500", ",", "exc", ":", "Optional", "[", "BaseException", "]", "=", "None", ",", "message", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "StreamResponse", ":", "self", ".", "log_exception", "(", "\"Error handling request\"", ",", "exc_info", "=", "exc", ")", "ct", "=", "'text/plain'", "if", "status", "==", "HTTPStatus", ".", "INTERNAL_SERVER_ERROR", ":", "title", "=", "'{0.value} {0.phrase}'", ".", "format", "(", "HTTPStatus", ".", "INTERNAL_SERVER_ERROR", ")", "msg", "=", "HTTPStatus", ".", "INTERNAL_SERVER_ERROR", ".", "description", "tb", "=", "None", "if", "self", ".", "debug", ":", "with", "suppress", "(", "Exception", ")", ":", "tb", "=", "traceback", ".", "format_exc", "(", ")", "if", "'text/html'", "in", "request", ".", "headers", ".", "get", "(", "'Accept'", ",", "''", ")", ":", "if", "tb", ":", "tb", "=", "html_escape", "(", "tb", ")", "msg", "=", "'<h2>Traceback:</h2>\\n<pre>{}</pre>'", ".", "format", "(", "tb", ")", "message", "=", "(", "\"<html><head>\"", "\"<title>{title}</title>\"", "\"</head><body>\\n<h1>{title}</h1>\"", "\"\\n{msg}\\n</body></html>\\n\"", ")", ".", "format", "(", "title", "=", "title", ",", "msg", "=", "msg", ")", "ct", "=", "'text/html'", "else", ":", "if", "tb", ":", "msg", "=", "tb", "message", "=", "title", "+", "'\\n\\n'", "+", "msg", "resp", "=", "Response", "(", "status", "=", "status", ",", "text", "=", "message", ",", "content_type", "=", "ct", ")", "resp", ".", "force_close", "(", ")", "# some data already got sent, connection is broken", "if", "request", ".", "writer", ".", "output_size", ">", "0", "or", "self", ".", "transport", "is", "None", ":", "self", ".", "force_close", "(", ")", "return", "resp" ]
Handle errors. Returns HTTP response with specific status code. Logs additional information. It always closes current connection.
[ "Handle", "errors", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_protocol.py#L511-L556
train
aio-libs/aiohttp
aiohttp/web.py
run_app
def run_app(app: Union[Application, Awaitable[Application]], *, host: Optional[str]=None, port: Optional[int]=None, path: Optional[str]=None, sock: Optional[socket.socket]=None, shutdown_timeout: float=60.0, ssl_context: Optional[SSLContext]=None, print: Optional[Callable[..., None]]=print, backlog: int=128, access_log_class: Type[AbstractAccessLogger]=AccessLogger, access_log_format: str=AccessLogger.LOG_FORMAT, access_log: Optional[logging.Logger]=access_logger, handle_signals: bool=True, reuse_address: Optional[bool]=None, reuse_port: Optional[bool]=None) -> None: """Run an app locally""" loop = asyncio.get_event_loop() # Configure if and only if in debugging mode and using the default logger if loop.get_debug() and access_log and access_log.name == 'aiohttp.access': if access_log.level == logging.NOTSET: access_log.setLevel(logging.DEBUG) if not access_log.hasHandlers(): access_log.addHandler(logging.StreamHandler()) try: loop.run_until_complete(_run_app(app, host=host, port=port, path=path, sock=sock, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, print=print, backlog=backlog, access_log_class=access_log_class, access_log_format=access_log_format, access_log=access_log, handle_signals=handle_signals, reuse_address=reuse_address, reuse_port=reuse_port)) except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: _cancel_all_tasks(loop) if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy loop.run_until_complete(loop.shutdown_asyncgens()) loop.close()
python
def run_app(app: Union[Application, Awaitable[Application]], *, host: Optional[str]=None, port: Optional[int]=None, path: Optional[str]=None, sock: Optional[socket.socket]=None, shutdown_timeout: float=60.0, ssl_context: Optional[SSLContext]=None, print: Optional[Callable[..., None]]=print, backlog: int=128, access_log_class: Type[AbstractAccessLogger]=AccessLogger, access_log_format: str=AccessLogger.LOG_FORMAT, access_log: Optional[logging.Logger]=access_logger, handle_signals: bool=True, reuse_address: Optional[bool]=None, reuse_port: Optional[bool]=None) -> None: """Run an app locally""" loop = asyncio.get_event_loop() # Configure if and only if in debugging mode and using the default logger if loop.get_debug() and access_log and access_log.name == 'aiohttp.access': if access_log.level == logging.NOTSET: access_log.setLevel(logging.DEBUG) if not access_log.hasHandlers(): access_log.addHandler(logging.StreamHandler()) try: loop.run_until_complete(_run_app(app, host=host, port=port, path=path, sock=sock, shutdown_timeout=shutdown_timeout, ssl_context=ssl_context, print=print, backlog=backlog, access_log_class=access_log_class, access_log_format=access_log_format, access_log=access_log, handle_signals=handle_signals, reuse_address=reuse_address, reuse_port=reuse_port)) except (GracefulExit, KeyboardInterrupt): # pragma: no cover pass finally: _cancel_all_tasks(loop) if sys.version_info >= (3, 6): # don't use PY_36 to pass mypy loop.run_until_complete(loop.shutdown_asyncgens()) loop.close()
[ "def", "run_app", "(", "app", ":", "Union", "[", "Application", ",", "Awaitable", "[", "Application", "]", "]", ",", "*", ",", "host", ":", "Optional", "[", "str", "]", "=", "None", ",", "port", ":", "Optional", "[", "int", "]", "=", "None", ",", "path", ":", "Optional", "[", "str", "]", "=", "None", ",", "sock", ":", "Optional", "[", "socket", ".", "socket", "]", "=", "None", ",", "shutdown_timeout", ":", "float", "=", "60.0", ",", "ssl_context", ":", "Optional", "[", "SSLContext", "]", "=", "None", ",", "print", ":", "Optional", "[", "Callable", "[", "...", ",", "None", "]", "]", "=", "print", ",", "backlog", ":", "int", "=", "128", ",", "access_log_class", ":", "Type", "[", "AbstractAccessLogger", "]", "=", "AccessLogger", ",", "access_log_format", ":", "str", "=", "AccessLogger", ".", "LOG_FORMAT", ",", "access_log", ":", "Optional", "[", "logging", ".", "Logger", "]", "=", "access_logger", ",", "handle_signals", ":", "bool", "=", "True", ",", "reuse_address", ":", "Optional", "[", "bool", "]", "=", "None", ",", "reuse_port", ":", "Optional", "[", "bool", "]", "=", "None", ")", "->", "None", ":", "loop", "=", "asyncio", ".", "get_event_loop", "(", ")", "# Configure if and only if in debugging mode and using the default logger", "if", "loop", ".", "get_debug", "(", ")", "and", "access_log", "and", "access_log", ".", "name", "==", "'aiohttp.access'", ":", "if", "access_log", ".", "level", "==", "logging", ".", "NOTSET", ":", "access_log", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "if", "not", "access_log", ".", "hasHandlers", "(", ")", ":", "access_log", ".", "addHandler", "(", "logging", ".", "StreamHandler", "(", ")", ")", "try", ":", "loop", ".", "run_until_complete", "(", "_run_app", "(", "app", ",", "host", "=", "host", ",", "port", "=", "port", ",", "path", "=", "path", ",", "sock", "=", "sock", ",", "shutdown_timeout", "=", "shutdown_timeout", ",", "ssl_context", "=", "ssl_context", ",", "print", "=", "print", ",", "backlog", "=", "backlog", ",", "access_log_class", "=", "access_log_class", ",", "access_log_format", "=", "access_log_format", ",", "access_log", "=", "access_log", ",", "handle_signals", "=", "handle_signals", ",", "reuse_address", "=", "reuse_address", ",", "reuse_port", "=", "reuse_port", ")", ")", "except", "(", "GracefulExit", ",", "KeyboardInterrupt", ")", ":", "# pragma: no cover", "pass", "finally", ":", "_cancel_all_tasks", "(", "loop", ")", "if", "sys", ".", "version_info", ">=", "(", "3", ",", "6", ")", ":", "# don't use PY_36 to pass mypy", "loop", ".", "run_until_complete", "(", "loop", ".", "shutdown_asyncgens", "(", ")", ")", "loop", ".", "close", "(", ")" ]
Run an app locally
[ "Run", "an", "app", "locally" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web.py#L375-L422
train
aio-libs/aiohttp
aiohttp/streams.py
AsyncStreamReaderMixin.iter_chunked
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: """Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only """ return AsyncStreamIterator(lambda: self.read(n))
python
def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]: """Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only """ return AsyncStreamIterator(lambda: self.read(n))
[ "def", "iter_chunked", "(", "self", ",", "n", ":", "int", ")", "->", "AsyncStreamIterator", "[", "bytes", "]", ":", "return", "AsyncStreamIterator", "(", "lambda", ":", "self", ".", "read", "(", "n", ")", ")" ]
Returns an asynchronous iterator that yields chunks of size n. Python-3.5 available for Python 3.5+ only
[ "Returns", "an", "asynchronous", "iterator", "that", "yields", "chunks", "of", "size", "n", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L68-L73
train
aio-libs/aiohttp
aiohttp/streams.py
StreamReader.unread_data
def unread_data(self, data: bytes) -> None: """ rollback reading some data from stream, inserting it to buffer head. """ warnings.warn("unread_data() is deprecated " "and will be removed in future releases (#3260)", DeprecationWarning, stacklevel=2) if not data: return if self._buffer_offset: self._buffer[0] = self._buffer[0][self._buffer_offset:] self._buffer_offset = 0 self._size += len(data) self._cursor -= len(data) self._buffer.appendleft(data) self._eof_counter = 0
python
def unread_data(self, data: bytes) -> None: """ rollback reading some data from stream, inserting it to buffer head. """ warnings.warn("unread_data() is deprecated " "and will be removed in future releases (#3260)", DeprecationWarning, stacklevel=2) if not data: return if self._buffer_offset: self._buffer[0] = self._buffer[0][self._buffer_offset:] self._buffer_offset = 0 self._size += len(data) self._cursor -= len(data) self._buffer.appendleft(data) self._eof_counter = 0
[ "def", "unread_data", "(", "self", ",", "data", ":", "bytes", ")", "->", "None", ":", "warnings", ".", "warn", "(", "\"unread_data() is deprecated \"", "\"and will be removed in future releases (#3260)\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "if", "not", "data", ":", "return", "if", "self", ".", "_buffer_offset", ":", "self", ".", "_buffer", "[", "0", "]", "=", "self", ".", "_buffer", "[", "0", "]", "[", "self", ".", "_buffer_offset", ":", "]", "self", ".", "_buffer_offset", "=", "0", "self", ".", "_size", "+=", "len", "(", "data", ")", "self", ".", "_cursor", "-=", "len", "(", "data", ")", "self", ".", "_buffer", ".", "appendleft", "(", "data", ")", "self", ".", "_eof_counter", "=", "0" ]
rollback reading some data from stream, inserting it to buffer head.
[ "rollback", "reading", "some", "data", "from", "stream", "inserting", "it", "to", "buffer", "head", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L211-L227
train
aio-libs/aiohttp
aiohttp/streams.py
StreamReader.readchunk
async def readchunk(self) -> Tuple[bytes, bool]: """Returns a tuple of (data, end_of_http_chunk). When chunked transfer encoding is used, end_of_http_chunk is a boolean indicating if the end of the data corresponds to the end of a HTTP chunk , otherwise it is always False. """ while True: if self._exception is not None: raise self._exception while self._http_chunk_splits: pos = self._http_chunk_splits.pop(0) if pos == self._cursor: return (b"", True) if pos > self._cursor: return (self._read_nowait(pos-self._cursor), True) internal_logger.warning('Skipping HTTP chunk end due to data ' 'consumption beyond chunk boundary') if self._buffer: return (self._read_nowait_chunk(-1), False) # return (self._read_nowait(-1), False) if self._eof: # Special case for signifying EOF. # (b'', True) is not a final return value actually. return (b'', False) await self._wait('readchunk')
python
async def readchunk(self) -> Tuple[bytes, bool]: """Returns a tuple of (data, end_of_http_chunk). When chunked transfer encoding is used, end_of_http_chunk is a boolean indicating if the end of the data corresponds to the end of a HTTP chunk , otherwise it is always False. """ while True: if self._exception is not None: raise self._exception while self._http_chunk_splits: pos = self._http_chunk_splits.pop(0) if pos == self._cursor: return (b"", True) if pos > self._cursor: return (self._read_nowait(pos-self._cursor), True) internal_logger.warning('Skipping HTTP chunk end due to data ' 'consumption beyond chunk boundary') if self._buffer: return (self._read_nowait_chunk(-1), False) # return (self._read_nowait(-1), False) if self._eof: # Special case for signifying EOF. # (b'', True) is not a final return value actually. return (b'', False) await self._wait('readchunk')
[ "async", "def", "readchunk", "(", "self", ")", "->", "Tuple", "[", "bytes", ",", "bool", "]", ":", "while", "True", ":", "if", "self", ".", "_exception", "is", "not", "None", ":", "raise", "self", ".", "_exception", "while", "self", ".", "_http_chunk_splits", ":", "pos", "=", "self", ".", "_http_chunk_splits", ".", "pop", "(", "0", ")", "if", "pos", "==", "self", ".", "_cursor", ":", "return", "(", "b\"\"", ",", "True", ")", "if", "pos", ">", "self", ".", "_cursor", ":", "return", "(", "self", ".", "_read_nowait", "(", "pos", "-", "self", ".", "_cursor", ")", ",", "True", ")", "internal_logger", ".", "warning", "(", "'Skipping HTTP chunk end due to data '", "'consumption beyond chunk boundary'", ")", "if", "self", ".", "_buffer", ":", "return", "(", "self", ".", "_read_nowait_chunk", "(", "-", "1", ")", ",", "False", ")", "# return (self._read_nowait(-1), False)", "if", "self", ".", "_eof", ":", "# Special case for signifying EOF.", "# (b'', True) is not a final return value actually.", "return", "(", "b''", ",", "False", ")", "await", "self", ".", "_wait", "(", "'readchunk'", ")" ]
Returns a tuple of (data, end_of_http_chunk). When chunked transfer encoding is used, end_of_http_chunk is a boolean indicating if the end of the data corresponds to the end of a HTTP chunk , otherwise it is always False.
[ "Returns", "a", "tuple", "of", "(", "data", "end_of_http_chunk", ")", ".", "When", "chunked", "transfer", "encoding", "is", "used", "end_of_http_chunk", "is", "a", "boolean", "indicating", "if", "the", "end", "of", "the", "data", "corresponds", "to", "the", "end", "of", "a", "HTTP", "chunk", "otherwise", "it", "is", "always", "False", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L385-L413
train
aio-libs/aiohttp
aiohttp/streams.py
StreamReader._read_nowait
def _read_nowait(self, n: int) -> bytes: """ Read not more than n bytes, or whole buffer is n == -1 """ chunks = [] while self._buffer: chunk = self._read_nowait_chunk(n) chunks.append(chunk) if n != -1: n -= len(chunk) if n == 0: break return b''.join(chunks) if chunks else b''
python
def _read_nowait(self, n: int) -> bytes: """ Read not more than n bytes, or whole buffer is n == -1 """ chunks = [] while self._buffer: chunk = self._read_nowait_chunk(n) chunks.append(chunk) if n != -1: n -= len(chunk) if n == 0: break return b''.join(chunks) if chunks else b''
[ "def", "_read_nowait", "(", "self", ",", "n", ":", "int", ")", "->", "bytes", ":", "chunks", "=", "[", "]", "while", "self", ".", "_buffer", ":", "chunk", "=", "self", ".", "_read_nowait_chunk", "(", "n", ")", "chunks", ".", "append", "(", "chunk", ")", "if", "n", "!=", "-", "1", ":", "n", "-=", "len", "(", "chunk", ")", "if", "n", "==", "0", ":", "break", "return", "b''", ".", "join", "(", "chunks", ")", "if", "chunks", "else", "b''" ]
Read not more than n bytes, or whole buffer is n == -1
[ "Read", "not", "more", "than", "n", "bytes", "or", "whole", "buffer", "is", "n", "==", "-", "1" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/streams.py#L472-L484
train
aio-libs/aiohttp
aiohttp/signals.py
Signal.send
async def send(self, *args, **kwargs): """ Sends data to all registered receivers. """ if not self.frozen: raise RuntimeError("Cannot send non-frozen signal.") for receiver in self: await receiver(*args, **kwargs)
python
async def send(self, *args, **kwargs): """ Sends data to all registered receivers. """ if not self.frozen: raise RuntimeError("Cannot send non-frozen signal.") for receiver in self: await receiver(*args, **kwargs)
[ "async", "def", "send", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "frozen", ":", "raise", "RuntimeError", "(", "\"Cannot send non-frozen signal.\"", ")", "for", "receiver", "in", "self", ":", "await", "receiver", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
Sends data to all registered receivers.
[ "Sends", "data", "to", "all", "registered", "receivers", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/signals.py#L26-L34
train
aio-libs/aiohttp
aiohttp/web_log.py
AccessLogger.compile_format
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: """Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in appropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial) """ # list of (key, method) tuples, we don't use an OrderedDict as users # can repeat the same key more than once methods = list() for atom in self.FORMAT_RE.findall(log_format): if atom[1] == '': format_key1 = self.LOG_FORMAT_MAP[atom[0]] m = getattr(AccessLogger, '_format_%s' % atom[0]) key_method = KeyMethod(format_key1, m) else: format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) m = getattr(AccessLogger, '_format_%s' % atom[2]) key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) methods.append(key_method) log_format = self.FORMAT_RE.sub(r'%s', log_format) log_format = self.CLEANUP_RE.sub(r'%\1', log_format) return log_format, methods
python
def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: """Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in appropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial) """ # list of (key, method) tuples, we don't use an OrderedDict as users # can repeat the same key more than once methods = list() for atom in self.FORMAT_RE.findall(log_format): if atom[1] == '': format_key1 = self.LOG_FORMAT_MAP[atom[0]] m = getattr(AccessLogger, '_format_%s' % atom[0]) key_method = KeyMethod(format_key1, m) else: format_key2 = (self.LOG_FORMAT_MAP[atom[2]], atom[1]) m = getattr(AccessLogger, '_format_%s' % atom[2]) key_method = KeyMethod(format_key2, functools.partial(m, atom[1])) methods.append(key_method) log_format = self.FORMAT_RE.sub(r'%s', log_format) log_format = self.CLEANUP_RE.sub(r'%\1', log_format) return log_format, methods
[ "def", "compile_format", "(", "self", ",", "log_format", ":", "str", ")", "->", "Tuple", "[", "str", ",", "List", "[", "KeyMethod", "]", "]", ":", "# list of (key, method) tuples, we don't use an OrderedDict as users", "# can repeat the same key more than once", "methods", "=", "list", "(", ")", "for", "atom", "in", "self", ".", "FORMAT_RE", ".", "findall", "(", "log_format", ")", ":", "if", "atom", "[", "1", "]", "==", "''", ":", "format_key1", "=", "self", ".", "LOG_FORMAT_MAP", "[", "atom", "[", "0", "]", "]", "m", "=", "getattr", "(", "AccessLogger", ",", "'_format_%s'", "%", "atom", "[", "0", "]", ")", "key_method", "=", "KeyMethod", "(", "format_key1", ",", "m", ")", "else", ":", "format_key2", "=", "(", "self", ".", "LOG_FORMAT_MAP", "[", "atom", "[", "2", "]", "]", ",", "atom", "[", "1", "]", ")", "m", "=", "getattr", "(", "AccessLogger", ",", "'_format_%s'", "%", "atom", "[", "2", "]", ")", "key_method", "=", "KeyMethod", "(", "format_key2", ",", "functools", ".", "partial", "(", "m", ",", "atom", "[", "1", "]", ")", ")", "methods", ".", "append", "(", "key_method", ")", "log_format", "=", "self", ".", "FORMAT_RE", ".", "sub", "(", "r'%s'", ",", "log_format", ")", "log_format", "=", "self", ".", "CLEANUP_RE", ".", "sub", "(", "r'%\\1'", ",", "log_format", ")", "return", "log_format", ",", "methods" ]
Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s Also methods for formatting of those atoms will be added to _methods in appropriate order For example we have log_format = "%a %t" This format will be translated to "%s %s" Also contents of _methods will be [self._format_a, self._format_t] These method will be called and results will be passed to translated string format. Each _format_* method receive 'args' which is list of arguments given to self.log Exceptions are _format_e, _format_i and _format_o methods which also receive key name (by functools.partial)
[ "Translate", "log_format", "into", "form", "usable", "by", "modulo", "formatting" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_log.py#L78-L118
train
aio-libs/aiohttp
aiohttp/web_middlewares.py
normalize_path_middleware
def normalize_path_middleware( *, append_slash: bool=True, remove_slash: bool=False, merge_slashes: bool=True, redirect_class: Type[HTTPMove]=HTTPMovedPermanently) -> _Middleware: """ Middleware factory which produces a middleware that normalizes the path of a request. By normalizing it means: - Add or remove a trailing slash to the path. - Double slashes are replaced by one. The middleware returns as soon as it finds a path that resolves correctly. The order if both merge and append/remove are enabled is 1) merge slashes 2) append/remove slash 3) both merge slashes and append/remove slash. If the path resolves with at least one of those conditions, it will redirect to the new path. Only one of `append_slash` and `remove_slash` can be enabled. If both are `True` the factory will raise an assertion error If `append_slash` is `True` the middleware will append a slash when needed. If a resource is defined with trailing slash and the request comes without it, it will append it automatically. If `remove_slash` is `True`, `append_slash` must be `False`. When enabled the middleware will remove trailing slashes and redirect if the resource is defined If merge_slashes is True, merge multiple consecutive slashes in the path into one. """ correct_configuration = not (append_slash and remove_slash) assert correct_configuration, "Cannot both remove and append slash" @middleware async def impl(request: Request, handler: _Handler) -> StreamResponse: if isinstance(request.match_info.route, SystemRoute): paths_to_check = [] if '?' in request.raw_path: path, query = request.raw_path.split('?', 1) query = '?' + query else: query = '' path = request.raw_path if merge_slashes: paths_to_check.append(re.sub('//+', '/', path)) if append_slash and not request.path.endswith('/'): paths_to_check.append(path + '/') if remove_slash and request.path.endswith('/'): paths_to_check.append(path[:-1]) if merge_slashes and append_slash: paths_to_check.append( re.sub('//+', '/', path + '/')) if merge_slashes and remove_slash and path.endswith('/'): merged_slashes = re.sub('//+', '/', path) paths_to_check.append(merged_slashes[:-1]) for path in paths_to_check: resolves, request = await _check_request_resolves( request, path) if resolves: raise redirect_class(request.raw_path + query) return await handler(request) return impl
python
def normalize_path_middleware( *, append_slash: bool=True, remove_slash: bool=False, merge_slashes: bool=True, redirect_class: Type[HTTPMove]=HTTPMovedPermanently) -> _Middleware: """ Middleware factory which produces a middleware that normalizes the path of a request. By normalizing it means: - Add or remove a trailing slash to the path. - Double slashes are replaced by one. The middleware returns as soon as it finds a path that resolves correctly. The order if both merge and append/remove are enabled is 1) merge slashes 2) append/remove slash 3) both merge slashes and append/remove slash. If the path resolves with at least one of those conditions, it will redirect to the new path. Only one of `append_slash` and `remove_slash` can be enabled. If both are `True` the factory will raise an assertion error If `append_slash` is `True` the middleware will append a slash when needed. If a resource is defined with trailing slash and the request comes without it, it will append it automatically. If `remove_slash` is `True`, `append_slash` must be `False`. When enabled the middleware will remove trailing slashes and redirect if the resource is defined If merge_slashes is True, merge multiple consecutive slashes in the path into one. """ correct_configuration = not (append_slash and remove_slash) assert correct_configuration, "Cannot both remove and append slash" @middleware async def impl(request: Request, handler: _Handler) -> StreamResponse: if isinstance(request.match_info.route, SystemRoute): paths_to_check = [] if '?' in request.raw_path: path, query = request.raw_path.split('?', 1) query = '?' + query else: query = '' path = request.raw_path if merge_slashes: paths_to_check.append(re.sub('//+', '/', path)) if append_slash and not request.path.endswith('/'): paths_to_check.append(path + '/') if remove_slash and request.path.endswith('/'): paths_to_check.append(path[:-1]) if merge_slashes and append_slash: paths_to_check.append( re.sub('//+', '/', path + '/')) if merge_slashes and remove_slash and path.endswith('/'): merged_slashes = re.sub('//+', '/', path) paths_to_check.append(merged_slashes[:-1]) for path in paths_to_check: resolves, request = await _check_request_resolves( request, path) if resolves: raise redirect_class(request.raw_path + query) return await handler(request) return impl
[ "def", "normalize_path_middleware", "(", "*", ",", "append_slash", ":", "bool", "=", "True", ",", "remove_slash", ":", "bool", "=", "False", ",", "merge_slashes", ":", "bool", "=", "True", ",", "redirect_class", ":", "Type", "[", "HTTPMove", "]", "=", "HTTPMovedPermanently", ")", "->", "_Middleware", ":", "correct_configuration", "=", "not", "(", "append_slash", "and", "remove_slash", ")", "assert", "correct_configuration", ",", "\"Cannot both remove and append slash\"", "@", "middleware", "async", "def", "impl", "(", "request", ":", "Request", ",", "handler", ":", "_Handler", ")", "->", "StreamResponse", ":", "if", "isinstance", "(", "request", ".", "match_info", ".", "route", ",", "SystemRoute", ")", ":", "paths_to_check", "=", "[", "]", "if", "'?'", "in", "request", ".", "raw_path", ":", "path", ",", "query", "=", "request", ".", "raw_path", ".", "split", "(", "'?'", ",", "1", ")", "query", "=", "'?'", "+", "query", "else", ":", "query", "=", "''", "path", "=", "request", ".", "raw_path", "if", "merge_slashes", ":", "paths_to_check", ".", "append", "(", "re", ".", "sub", "(", "'//+'", ",", "'/'", ",", "path", ")", ")", "if", "append_slash", "and", "not", "request", ".", "path", ".", "endswith", "(", "'/'", ")", ":", "paths_to_check", ".", "append", "(", "path", "+", "'/'", ")", "if", "remove_slash", "and", "request", ".", "path", ".", "endswith", "(", "'/'", ")", ":", "paths_to_check", ".", "append", "(", "path", "[", ":", "-", "1", "]", ")", "if", "merge_slashes", "and", "append_slash", ":", "paths_to_check", ".", "append", "(", "re", ".", "sub", "(", "'//+'", ",", "'/'", ",", "path", "+", "'/'", ")", ")", "if", "merge_slashes", "and", "remove_slash", "and", "path", ".", "endswith", "(", "'/'", ")", ":", "merged_slashes", "=", "re", ".", "sub", "(", "'//+'", ",", "'/'", ",", "path", ")", "paths_to_check", ".", "append", "(", "merged_slashes", "[", ":", "-", "1", "]", ")", "for", "path", "in", "paths_to_check", ":", "resolves", ",", "request", "=", "await", "_check_request_resolves", "(", "request", ",", "path", ")", "if", "resolves", ":", "raise", "redirect_class", "(", "request", ".", "raw_path", "+", "query", ")", "return", "await", "handler", "(", "request", ")", "return", "impl" ]
Middleware factory which produces a middleware that normalizes the path of a request. By normalizing it means: - Add or remove a trailing slash to the path. - Double slashes are replaced by one. The middleware returns as soon as it finds a path that resolves correctly. The order if both merge and append/remove are enabled is 1) merge slashes 2) append/remove slash 3) both merge slashes and append/remove slash. If the path resolves with at least one of those conditions, it will redirect to the new path. Only one of `append_slash` and `remove_slash` can be enabled. If both are `True` the factory will raise an assertion error If `append_slash` is `True` the middleware will append a slash when needed. If a resource is defined with trailing slash and the request comes without it, it will append it automatically. If `remove_slash` is `True`, `append_slash` must be `False`. When enabled the middleware will remove trailing slashes and redirect if the resource is defined If merge_slashes is True, merge multiple consecutive slashes in the path into one.
[ "Middleware", "factory", "which", "produces", "a", "middleware", "that", "normalizes", "the", "path", "of", "a", "request", ".", "By", "normalizing", "it", "means", ":" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/web_middlewares.py#L42-L111
train
aio-libs/aiohttp
aiohttp/formdata.py
FormData._gen_form_data
def _gen_form_data(self) -> multipart.MultipartWriter: """Encode a list of fields using the multipart/form-data MIME format""" for dispparams, headers, value in self._fields: try: if hdrs.CONTENT_TYPE in headers: part = payload.get_payload( value, content_type=headers[hdrs.CONTENT_TYPE], headers=headers, encoding=self._charset) else: part = payload.get_payload( value, headers=headers, encoding=self._charset) except Exception as exc: raise TypeError( 'Can not serialize value type: %r\n ' 'headers: %r\n value: %r' % ( type(value), headers, value)) from exc if dispparams: part.set_content_disposition( 'form-data', quote_fields=self._quote_fields, **dispparams ) # FIXME cgi.FieldStorage doesn't likes body parts with # Content-Length which were sent via chunked transfer encoding assert part.headers is not None part.headers.popall(hdrs.CONTENT_LENGTH, None) self._writer.append_payload(part) return self._writer
python
def _gen_form_data(self) -> multipart.MultipartWriter: """Encode a list of fields using the multipart/form-data MIME format""" for dispparams, headers, value in self._fields: try: if hdrs.CONTENT_TYPE in headers: part = payload.get_payload( value, content_type=headers[hdrs.CONTENT_TYPE], headers=headers, encoding=self._charset) else: part = payload.get_payload( value, headers=headers, encoding=self._charset) except Exception as exc: raise TypeError( 'Can not serialize value type: %r\n ' 'headers: %r\n value: %r' % ( type(value), headers, value)) from exc if dispparams: part.set_content_disposition( 'form-data', quote_fields=self._quote_fields, **dispparams ) # FIXME cgi.FieldStorage doesn't likes body parts with # Content-Length which were sent via chunked transfer encoding assert part.headers is not None part.headers.popall(hdrs.CONTENT_LENGTH, None) self._writer.append_payload(part) return self._writer
[ "def", "_gen_form_data", "(", "self", ")", "->", "multipart", ".", "MultipartWriter", ":", "for", "dispparams", ",", "headers", ",", "value", "in", "self", ".", "_fields", ":", "try", ":", "if", "hdrs", ".", "CONTENT_TYPE", "in", "headers", ":", "part", "=", "payload", ".", "get_payload", "(", "value", ",", "content_type", "=", "headers", "[", "hdrs", ".", "CONTENT_TYPE", "]", ",", "headers", "=", "headers", ",", "encoding", "=", "self", ".", "_charset", ")", "else", ":", "part", "=", "payload", ".", "get_payload", "(", "value", ",", "headers", "=", "headers", ",", "encoding", "=", "self", ".", "_charset", ")", "except", "Exception", "as", "exc", ":", "raise", "TypeError", "(", "'Can not serialize value type: %r\\n '", "'headers: %r\\n value: %r'", "%", "(", "type", "(", "value", ")", ",", "headers", ",", "value", ")", ")", "from", "exc", "if", "dispparams", ":", "part", ".", "set_content_disposition", "(", "'form-data'", ",", "quote_fields", "=", "self", ".", "_quote_fields", ",", "*", "*", "dispparams", ")", "# FIXME cgi.FieldStorage doesn't likes body parts with", "# Content-Length which were sent via chunked transfer encoding", "assert", "part", ".", "headers", "is", "not", "None", "part", ".", "headers", ".", "popall", "(", "hdrs", ".", "CONTENT_LENGTH", ",", "None", ")", "self", ".", "_writer", ".", "append_payload", "(", "part", ")", "return", "self", ".", "_writer" ]
Encode a list of fields using the multipart/form-data MIME format
[ "Encode", "a", "list", "of", "fields", "using", "the", "multipart", "/", "form", "-", "data", "MIME", "format" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/formdata.py#L116-L144
train
aio-libs/aiohttp
aiohttp/http_writer.py
StreamWriter.write
async def write(self, chunk: bytes, *, drain: bool=True, LIMIT: int=0x10000) -> None: """Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future. """ if self._on_chunk_sent is not None: await self._on_chunk_sent(chunk) if self._compress is not None: chunk = self._compress.compress(chunk) if not chunk: return if self.length is not None: chunk_len = len(chunk) if self.length >= chunk_len: self.length = self.length - chunk_len else: chunk = chunk[:self.length] self.length = 0 if not chunk: return if chunk: if self.chunked: chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len_pre + chunk + b'\r\n' self._write(chunk) if self.buffer_size > LIMIT and drain: self.buffer_size = 0 await self.drain()
python
async def write(self, chunk: bytes, *, drain: bool=True, LIMIT: int=0x10000) -> None: """Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future. """ if self._on_chunk_sent is not None: await self._on_chunk_sent(chunk) if self._compress is not None: chunk = self._compress.compress(chunk) if not chunk: return if self.length is not None: chunk_len = len(chunk) if self.length >= chunk_len: self.length = self.length - chunk_len else: chunk = chunk[:self.length] self.length = 0 if not chunk: return if chunk: if self.chunked: chunk_len_pre = ('%x\r\n' % len(chunk)).encode('ascii') chunk = chunk_len_pre + chunk + b'\r\n' self._write(chunk) if self.buffer_size > LIMIT and drain: self.buffer_size = 0 await self.drain()
[ "async", "def", "write", "(", "self", ",", "chunk", ":", "bytes", ",", "*", ",", "drain", ":", "bool", "=", "True", ",", "LIMIT", ":", "int", "=", "0x10000", ")", "->", "None", ":", "if", "self", ".", "_on_chunk_sent", "is", "not", "None", ":", "await", "self", ".", "_on_chunk_sent", "(", "chunk", ")", "if", "self", ".", "_compress", "is", "not", "None", ":", "chunk", "=", "self", ".", "_compress", ".", "compress", "(", "chunk", ")", "if", "not", "chunk", ":", "return", "if", "self", ".", "length", "is", "not", "None", ":", "chunk_len", "=", "len", "(", "chunk", ")", "if", "self", ".", "length", ">=", "chunk_len", ":", "self", ".", "length", "=", "self", ".", "length", "-", "chunk_len", "else", ":", "chunk", "=", "chunk", "[", ":", "self", ".", "length", "]", "self", ".", "length", "=", "0", "if", "not", "chunk", ":", "return", "if", "chunk", ":", "if", "self", ".", "chunked", ":", "chunk_len_pre", "=", "(", "'%x\\r\\n'", "%", "len", "(", "chunk", ")", ")", ".", "encode", "(", "'ascii'", ")", "chunk", "=", "chunk_len_pre", "+", "chunk", "+", "b'\\r\\n'", "self", ".", "_write", "(", "chunk", ")", "if", "self", ".", "buffer_size", ">", "LIMIT", "and", "drain", ":", "self", ".", "buffer_size", "=", "0", "await", "self", ".", "drain", "(", ")" ]
Writes chunk of data to a stream. write_eof() indicates end of stream. writer can't be used after write_eof() method being called. write() return drain future.
[ "Writes", "chunk", "of", "data", "to", "a", "stream", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_writer.py#L70-L105
train
aio-libs/aiohttp
aiohttp/http_writer.py
StreamWriter.write_headers
async def write_headers(self, status_line: str, headers: 'CIMultiDict[str]') -> None: """Write request/response status and headers.""" # status + headers buf = _serialize_headers(status_line, headers) self._write(buf)
python
async def write_headers(self, status_line: str, headers: 'CIMultiDict[str]') -> None: """Write request/response status and headers.""" # status + headers buf = _serialize_headers(status_line, headers) self._write(buf)
[ "async", "def", "write_headers", "(", "self", ",", "status_line", ":", "str", ",", "headers", ":", "'CIMultiDict[str]'", ")", "->", "None", ":", "# status + headers", "buf", "=", "_serialize_headers", "(", "status_line", ",", "headers", ")", "self", ".", "_write", "(", "buf", ")" ]
Write request/response status and headers.
[ "Write", "request", "/", "response", "status", "and", "headers", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/http_writer.py#L107-L112
train
aio-libs/aiohttp
aiohttp/helpers.py
netrc_from_env
def netrc_from_env() -> Optional[netrc.netrc]: """Attempt to load the netrc file from the path specified by the env-var NETRC or in the default location in the user's home directory. Returns None if it couldn't be found or fails to parse. """ netrc_env = os.environ.get('NETRC') if netrc_env is not None: netrc_path = Path(netrc_env) else: try: home_dir = Path.home() except RuntimeError as e: # pragma: no cover # if pathlib can't resolve home, it may raise a RuntimeError client_logger.debug('Could not resolve home directory when ' 'trying to look for .netrc file: %s', e) return None netrc_path = home_dir / ( '_netrc' if platform.system() == 'Windows' else '.netrc') try: return netrc.netrc(str(netrc_path)) except netrc.NetrcParseError as e: client_logger.warning('Could not parse .netrc file: %s', e) except OSError as e: # we couldn't read the file (doesn't exist, permissions, etc.) if netrc_env or netrc_path.is_file(): # only warn if the environment wanted us to load it, # or it appears like the default file does actually exist client_logger.warning('Could not read .netrc file: %s', e) return None
python
def netrc_from_env() -> Optional[netrc.netrc]: """Attempt to load the netrc file from the path specified by the env-var NETRC or in the default location in the user's home directory. Returns None if it couldn't be found or fails to parse. """ netrc_env = os.environ.get('NETRC') if netrc_env is not None: netrc_path = Path(netrc_env) else: try: home_dir = Path.home() except RuntimeError as e: # pragma: no cover # if pathlib can't resolve home, it may raise a RuntimeError client_logger.debug('Could not resolve home directory when ' 'trying to look for .netrc file: %s', e) return None netrc_path = home_dir / ( '_netrc' if platform.system() == 'Windows' else '.netrc') try: return netrc.netrc(str(netrc_path)) except netrc.NetrcParseError as e: client_logger.warning('Could not parse .netrc file: %s', e) except OSError as e: # we couldn't read the file (doesn't exist, permissions, etc.) if netrc_env or netrc_path.is_file(): # only warn if the environment wanted us to load it, # or it appears like the default file does actually exist client_logger.warning('Could not read .netrc file: %s', e) return None
[ "def", "netrc_from_env", "(", ")", "->", "Optional", "[", "netrc", ".", "netrc", "]", ":", "netrc_env", "=", "os", ".", "environ", ".", "get", "(", "'NETRC'", ")", "if", "netrc_env", "is", "not", "None", ":", "netrc_path", "=", "Path", "(", "netrc_env", ")", "else", ":", "try", ":", "home_dir", "=", "Path", ".", "home", "(", ")", "except", "RuntimeError", "as", "e", ":", "# pragma: no cover", "# if pathlib can't resolve home, it may raise a RuntimeError", "client_logger", ".", "debug", "(", "'Could not resolve home directory when '", "'trying to look for .netrc file: %s'", ",", "e", ")", "return", "None", "netrc_path", "=", "home_dir", "/", "(", "'_netrc'", "if", "platform", ".", "system", "(", ")", "==", "'Windows'", "else", "'.netrc'", ")", "try", ":", "return", "netrc", ".", "netrc", "(", "str", "(", "netrc_path", ")", ")", "except", "netrc", ".", "NetrcParseError", "as", "e", ":", "client_logger", ".", "warning", "(", "'Could not parse .netrc file: %s'", ",", "e", ")", "except", "OSError", "as", "e", ":", "# we couldn't read the file (doesn't exist, permissions, etc.)", "if", "netrc_env", "or", "netrc_path", ".", "is_file", "(", ")", ":", "# only warn if the environment wanted us to load it,", "# or it appears like the default file does actually exist", "client_logger", ".", "warning", "(", "'Could not read .netrc file: %s'", ",", "e", ")", "return", "None" ]
Attempt to load the netrc file from the path specified by the env-var NETRC or in the default location in the user's home directory. Returns None if it couldn't be found or fails to parse.
[ "Attempt", "to", "load", "the", "netrc", "file", "from", "the", "path", "specified", "by", "the", "env", "-", "var", "NETRC", "or", "in", "the", "default", "location", "in", "the", "user", "s", "home", "directory", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L186-L219
train
aio-libs/aiohttp
aiohttp/helpers.py
parse_mimetype
def parse_mimetype(mimetype: str) -> MimeType: """Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'}) """ if not mimetype: return MimeType(type='', subtype='', suffix='', parameters=MultiDictProxy(MultiDict())) parts = mimetype.split(';') params = MultiDict() # type: MultiDict[str] for item in parts[1:]: if not item: continue key, value = cast(Tuple[str, str], item.split('=', 1) if '=' in item else (item, '')) params.add(key.lower().strip(), value.strip(' "')) fulltype = parts[0].strip().lower() if fulltype == '*': fulltype = '*/*' mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1)) if '/' in fulltype else (fulltype, '')) stype, suffix = (cast(Tuple[str, str], stype.split('+', 1)) if '+' in stype else (stype, '')) return MimeType(type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params))
python
def parse_mimetype(mimetype: str) -> MimeType: """Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'}) """ if not mimetype: return MimeType(type='', subtype='', suffix='', parameters=MultiDictProxy(MultiDict())) parts = mimetype.split(';') params = MultiDict() # type: MultiDict[str] for item in parts[1:]: if not item: continue key, value = cast(Tuple[str, str], item.split('=', 1) if '=' in item else (item, '')) params.add(key.lower().strip(), value.strip(' "')) fulltype = parts[0].strip().lower() if fulltype == '*': fulltype = '*/*' mtype, stype = (cast(Tuple[str, str], fulltype.split('/', 1)) if '/' in fulltype else (fulltype, '')) stype, suffix = (cast(Tuple[str, str], stype.split('+', 1)) if '+' in stype else (stype, '')) return MimeType(type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params))
[ "def", "parse_mimetype", "(", "mimetype", ":", "str", ")", "->", "MimeType", ":", "if", "not", "mimetype", ":", "return", "MimeType", "(", "type", "=", "''", ",", "subtype", "=", "''", ",", "suffix", "=", "''", ",", "parameters", "=", "MultiDictProxy", "(", "MultiDict", "(", ")", ")", ")", "parts", "=", "mimetype", ".", "split", "(", "';'", ")", "params", "=", "MultiDict", "(", ")", "# type: MultiDict[str]", "for", "item", "in", "parts", "[", "1", ":", "]", ":", "if", "not", "item", ":", "continue", "key", ",", "value", "=", "cast", "(", "Tuple", "[", "str", ",", "str", "]", ",", "item", ".", "split", "(", "'='", ",", "1", ")", "if", "'='", "in", "item", "else", "(", "item", ",", "''", ")", ")", "params", ".", "add", "(", "key", ".", "lower", "(", ")", ".", "strip", "(", ")", ",", "value", ".", "strip", "(", "' \"'", ")", ")", "fulltype", "=", "parts", "[", "0", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "if", "fulltype", "==", "'*'", ":", "fulltype", "=", "'*/*'", "mtype", ",", "stype", "=", "(", "cast", "(", "Tuple", "[", "str", ",", "str", "]", ",", "fulltype", ".", "split", "(", "'/'", ",", "1", ")", ")", "if", "'/'", "in", "fulltype", "else", "(", "fulltype", ",", "''", ")", ")", "stype", ",", "suffix", "=", "(", "cast", "(", "Tuple", "[", "str", ",", "str", "]", ",", "stype", ".", "split", "(", "'+'", ",", "1", ")", ")", "if", "'+'", "in", "stype", "else", "(", "stype", ",", "''", ")", ")", "return", "MimeType", "(", "type", "=", "mtype", ",", "subtype", "=", "stype", ",", "suffix", "=", "suffix", ",", "parameters", "=", "MultiDictProxy", "(", "params", ")", ")" ]
Parses a MIME type into its components. mimetype is a MIME type string. Returns a MimeType object. Example: >>> parse_mimetype('text/html; charset=utf-8') MimeType(type='text', subtype='html', suffix='', parameters={'charset': 'utf-8'})
[ "Parses", "a", "MIME", "type", "into", "its", "components", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L291-L328
train
aio-libs/aiohttp
aiohttp/helpers.py
content_disposition_header
def content_disposition_header(disptype: str, quote_fields: bool=True, **params: str) -> str: """Sets ``Content-Disposition`` header. disptype is a disposition type: inline, attachment, form-data. Should be valid extension token (see RFC 2183) params is a dict with disposition params. """ if not disptype or not (TOKEN > set(disptype)): raise ValueError('bad content disposition type {!r}' ''.format(disptype)) value = disptype if params: lparams = [] for key, val in params.items(): if not key or not (TOKEN > set(key)): raise ValueError('bad content disposition parameter' ' {!r}={!r}'.format(key, val)) qval = quote(val, '') if quote_fields else val lparams.append((key, '"%s"' % qval)) if key == 'filename': lparams.append(('filename*', "utf-8''" + qval)) sparams = '; '.join('='.join(pair) for pair in lparams) value = '; '.join((value, sparams)) return value
python
def content_disposition_header(disptype: str, quote_fields: bool=True, **params: str) -> str: """Sets ``Content-Disposition`` header. disptype is a disposition type: inline, attachment, form-data. Should be valid extension token (see RFC 2183) params is a dict with disposition params. """ if not disptype or not (TOKEN > set(disptype)): raise ValueError('bad content disposition type {!r}' ''.format(disptype)) value = disptype if params: lparams = [] for key, val in params.items(): if not key or not (TOKEN > set(key)): raise ValueError('bad content disposition parameter' ' {!r}={!r}'.format(key, val)) qval = quote(val, '') if quote_fields else val lparams.append((key, '"%s"' % qval)) if key == 'filename': lparams.append(('filename*', "utf-8''" + qval)) sparams = '; '.join('='.join(pair) for pair in lparams) value = '; '.join((value, sparams)) return value
[ "def", "content_disposition_header", "(", "disptype", ":", "str", ",", "quote_fields", ":", "bool", "=", "True", ",", "*", "*", "params", ":", "str", ")", "->", "str", ":", "if", "not", "disptype", "or", "not", "(", "TOKEN", ">", "set", "(", "disptype", ")", ")", ":", "raise", "ValueError", "(", "'bad content disposition type {!r}'", "''", ".", "format", "(", "disptype", ")", ")", "value", "=", "disptype", "if", "params", ":", "lparams", "=", "[", "]", "for", "key", ",", "val", "in", "params", ".", "items", "(", ")", ":", "if", "not", "key", "or", "not", "(", "TOKEN", ">", "set", "(", "key", ")", ")", ":", "raise", "ValueError", "(", "'bad content disposition parameter'", "' {!r}={!r}'", ".", "format", "(", "key", ",", "val", ")", ")", "qval", "=", "quote", "(", "val", ",", "''", ")", "if", "quote_fields", "else", "val", "lparams", ".", "append", "(", "(", "key", ",", "'\"%s\"'", "%", "qval", ")", ")", "if", "key", "==", "'filename'", ":", "lparams", ".", "append", "(", "(", "'filename*'", ",", "\"utf-8''\"", "+", "qval", ")", ")", "sparams", "=", "'; '", ".", "join", "(", "'='", ".", "join", "(", "pair", ")", "for", "pair", "in", "lparams", ")", "value", "=", "'; '", ".", "join", "(", "(", "value", ",", "sparams", ")", ")", "return", "value" ]
Sets ``Content-Disposition`` header. disptype is a disposition type: inline, attachment, form-data. Should be valid extension token (see RFC 2183) params is a dict with disposition params.
[ "Sets", "Content", "-", "Disposition", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L338-L365
train
aio-libs/aiohttp
aiohttp/helpers.py
BasicAuth.decode
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth': """Create a BasicAuth object from an Authorization HTTP header.""" try: auth_type, encoded_credentials = auth_header.split(' ', 1) except ValueError: raise ValueError('Could not parse authorization header.') if auth_type.lower() != 'basic': raise ValueError('Unknown authorization method %s' % auth_type) try: decoded = base64.b64decode( encoded_credentials.encode('ascii'), validate=True ).decode(encoding) except binascii.Error: raise ValueError('Invalid base64 encoding.') try: # RFC 2617 HTTP Authentication # https://www.ietf.org/rfc/rfc2617.txt # the colon must be present, but the username and password may be # otherwise blank. username, password = decoded.split(':', 1) except ValueError: raise ValueError('Invalid credentials.') return cls(username, password, encoding=encoding)
python
def decode(cls, auth_header: str, encoding: str='latin1') -> 'BasicAuth': """Create a BasicAuth object from an Authorization HTTP header.""" try: auth_type, encoded_credentials = auth_header.split(' ', 1) except ValueError: raise ValueError('Could not parse authorization header.') if auth_type.lower() != 'basic': raise ValueError('Unknown authorization method %s' % auth_type) try: decoded = base64.b64decode( encoded_credentials.encode('ascii'), validate=True ).decode(encoding) except binascii.Error: raise ValueError('Invalid base64 encoding.') try: # RFC 2617 HTTP Authentication # https://www.ietf.org/rfc/rfc2617.txt # the colon must be present, but the username and password may be # otherwise blank. username, password = decoded.split(':', 1) except ValueError: raise ValueError('Invalid credentials.') return cls(username, password, encoding=encoding)
[ "def", "decode", "(", "cls", ",", "auth_header", ":", "str", ",", "encoding", ":", "str", "=", "'latin1'", ")", "->", "'BasicAuth'", ":", "try", ":", "auth_type", ",", "encoded_credentials", "=", "auth_header", ".", "split", "(", "' '", ",", "1", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Could not parse authorization header.'", ")", "if", "auth_type", ".", "lower", "(", ")", "!=", "'basic'", ":", "raise", "ValueError", "(", "'Unknown authorization method %s'", "%", "auth_type", ")", "try", ":", "decoded", "=", "base64", ".", "b64decode", "(", "encoded_credentials", ".", "encode", "(", "'ascii'", ")", ",", "validate", "=", "True", ")", ".", "decode", "(", "encoding", ")", "except", "binascii", ".", "Error", ":", "raise", "ValueError", "(", "'Invalid base64 encoding.'", ")", "try", ":", "# RFC 2617 HTTP Authentication", "# https://www.ietf.org/rfc/rfc2617.txt", "# the colon must be present, but the username and password may be", "# otherwise blank.", "username", ",", "password", "=", "decoded", ".", "split", "(", "':'", ",", "1", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Invalid credentials.'", ")", "return", "cls", "(", "username", ",", "password", ",", "encoding", "=", "encoding", ")" ]
Create a BasicAuth object from an Authorization HTTP header.
[ "Create", "a", "BasicAuth", "object", "from", "an", "Authorization", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L134-L160
train
aio-libs/aiohttp
aiohttp/helpers.py
BasicAuth.from_url
def from_url(cls, url: URL, *, encoding: str='latin1') -> Optional['BasicAuth']: """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") if url.user is None: return None return cls(url.user, url.password or '', encoding=encoding)
python
def from_url(cls, url: URL, *, encoding: str='latin1') -> Optional['BasicAuth']: """Create BasicAuth from url.""" if not isinstance(url, URL): raise TypeError("url should be yarl.URL instance") if url.user is None: return None return cls(url.user, url.password or '', encoding=encoding)
[ "def", "from_url", "(", "cls", ",", "url", ":", "URL", ",", "*", ",", "encoding", ":", "str", "=", "'latin1'", ")", "->", "Optional", "[", "'BasicAuth'", "]", ":", "if", "not", "isinstance", "(", "url", ",", "URL", ")", ":", "raise", "TypeError", "(", "\"url should be yarl.URL instance\"", ")", "if", "url", ".", "user", "is", "None", ":", "return", "None", "return", "cls", "(", "url", ".", "user", ",", "url", ".", "password", "or", "''", ",", "encoding", "=", "encoding", ")" ]
Create BasicAuth from url.
[ "Create", "BasicAuth", "from", "url", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L163-L170
train
aio-libs/aiohttp
aiohttp/helpers.py
BasicAuth.encode
def encode(self) -> str: """Encode credentials.""" creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
python
def encode(self) -> str: """Encode credentials.""" creds = ('%s:%s' % (self.login, self.password)).encode(self.encoding) return 'Basic %s' % base64.b64encode(creds).decode(self.encoding)
[ "def", "encode", "(", "self", ")", "->", "str", ":", "creds", "=", "(", "'%s:%s'", "%", "(", "self", ".", "login", ",", "self", ".", "password", ")", ")", ".", "encode", "(", "self", ".", "encoding", ")", "return", "'Basic %s'", "%", "base64", ".", "b64encode", "(", "creds", ")", ".", "decode", "(", "self", ".", "encoding", ")" ]
Encode credentials.
[ "Encode", "credentials", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L172-L175
train
aio-libs/aiohttp
aiohttp/helpers.py
HeadersMixin.content_type
def content_type(self) -> str: """The value of content part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_type
python
def content_type(self) -> str: """The value of content part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_type
[ "def", "content_type", "(", "self", ")", "->", "str", ":", "raw", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ")", "# type: ignore", "if", "self", ".", "_stored_content_type", "!=", "raw", ":", "self", ".", "_parse_content_type", "(", "raw", ")", "return", "self", ".", "_content_type" ]
The value of content part for Content-Type HTTP header.
[ "The", "value", "of", "content", "part", "for", "Content", "-", "Type", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L624-L629
train
aio-libs/aiohttp
aiohttp/helpers.py
HeadersMixin.charset
def charset(self) -> Optional[str]: """The value of charset part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_dict.get('charset')
python
def charset(self) -> Optional[str]: """The value of charset part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore if self._stored_content_type != raw: self._parse_content_type(raw) return self._content_dict.get('charset')
[ "def", "charset", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "raw", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "CONTENT_TYPE", ")", "# type: ignore", "if", "self", ".", "_stored_content_type", "!=", "raw", ":", "self", ".", "_parse_content_type", "(", "raw", ")", "return", "self", ".", "_content_dict", ".", "get", "(", "'charset'", ")" ]
The value of charset part for Content-Type HTTP header.
[ "The", "value", "of", "charset", "part", "for", "Content", "-", "Type", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L632-L637
train
aio-libs/aiohttp
aiohttp/helpers.py
HeadersMixin.content_length
def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore if content_length is not None: return int(content_length) else: return None
python
def content_length(self) -> Optional[int]: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) # type: ignore if content_length is not None: return int(content_length) else: return None
[ "def", "content_length", "(", "self", ")", "->", "Optional", "[", "int", "]", ":", "content_length", "=", "self", ".", "_headers", ".", "get", "(", "hdrs", ".", "CONTENT_LENGTH", ")", "# type: ignore", "if", "content_length", "is", "not", "None", ":", "return", "int", "(", "content_length", ")", "else", ":", "return", "None" ]
The value of Content-Length HTTP header.
[ "The", "value", "of", "Content", "-", "Length", "HTTP", "header", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/helpers.py#L640-L647
train
aio-libs/aiohttp
aiohttp/client.py
request
def request( method: str, url: StrOrURL, *, params: Optional[Mapping[str, str]]=None, data: Any=None, json: Any=None, headers: LooseHeaders=None, skip_auto_headers: Optional[Iterable[str]]=None, auth: Optional[BasicAuth]=None, allow_redirects: bool=True, max_redirects: int=10, compress: Optional[str]=None, chunked: Optional[bool]=None, expect100: bool=False, raise_for_status: Optional[bool]=None, read_until_eof: bool=True, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, timeout: Union[ClientTimeout, object]=sentinel, cookies: Optional[LooseCookies]=None, version: HttpVersion=http.HttpVersion11, connector: Optional[BaseConnector]=None, loop: Optional[asyncio.AbstractEventLoop]=None ) -> _SessionRequestContextManager: """Constructs and sends a request. Returns response object. method - HTTP method url - request url params - (optional) Dictionary or bytes to be sent in the query string of the new request data - (optional) Dictionary, bytes, or file-like object to send in the body of the request json - (optional) Any json compatible python object headers - (optional) Dictionary of HTTP Headers to send with the request cookies - (optional) Dict object to send with the request auth - (optional) BasicAuth named tuple represent HTTP Basic Auth auth - aiohttp.helpers.BasicAuth allow_redirects - (optional) If set to False, do not follow redirects version - Request HTTP version. compress - Set to True if request has to be compressed with deflate encoding. chunked - Set to chunk size for chunked transfer encoding. expect100 - Expect 100-continue response from server. connector - BaseConnector sub-class instance to support connection pooling. read_until_eof - Read response until eof if response does not have Content-Length header. loop - Optional event loop. timeout - Optional ClientTimeout settings structure, 5min total timeout by default. Usage:: >>> import aiohttp >>> resp = await aiohttp.request('GET', 'http://python.org/') >>> resp <ClientResponse(python.org/) [200]> >>> data = await resp.read() """ connector_owner = False if connector is None: connector_owner = True connector = TCPConnector(loop=loop, force_close=True) session = ClientSession( loop=loop, cookies=cookies, version=version, timeout=timeout, connector=connector, connector_owner=connector_owner) return _SessionRequestContextManager( session._request(method, url, params=params, data=data, json=json, headers=headers, skip_auto_headers=skip_auto_headers, auth=auth, allow_redirects=allow_redirects, max_redirects=max_redirects, compress=compress, chunked=chunked, expect100=expect100, raise_for_status=raise_for_status, read_until_eof=read_until_eof, proxy=proxy, proxy_auth=proxy_auth,), session)
python
def request( method: str, url: StrOrURL, *, params: Optional[Mapping[str, str]]=None, data: Any=None, json: Any=None, headers: LooseHeaders=None, skip_auto_headers: Optional[Iterable[str]]=None, auth: Optional[BasicAuth]=None, allow_redirects: bool=True, max_redirects: int=10, compress: Optional[str]=None, chunked: Optional[bool]=None, expect100: bool=False, raise_for_status: Optional[bool]=None, read_until_eof: bool=True, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, timeout: Union[ClientTimeout, object]=sentinel, cookies: Optional[LooseCookies]=None, version: HttpVersion=http.HttpVersion11, connector: Optional[BaseConnector]=None, loop: Optional[asyncio.AbstractEventLoop]=None ) -> _SessionRequestContextManager: """Constructs and sends a request. Returns response object. method - HTTP method url - request url params - (optional) Dictionary or bytes to be sent in the query string of the new request data - (optional) Dictionary, bytes, or file-like object to send in the body of the request json - (optional) Any json compatible python object headers - (optional) Dictionary of HTTP Headers to send with the request cookies - (optional) Dict object to send with the request auth - (optional) BasicAuth named tuple represent HTTP Basic Auth auth - aiohttp.helpers.BasicAuth allow_redirects - (optional) If set to False, do not follow redirects version - Request HTTP version. compress - Set to True if request has to be compressed with deflate encoding. chunked - Set to chunk size for chunked transfer encoding. expect100 - Expect 100-continue response from server. connector - BaseConnector sub-class instance to support connection pooling. read_until_eof - Read response until eof if response does not have Content-Length header. loop - Optional event loop. timeout - Optional ClientTimeout settings structure, 5min total timeout by default. Usage:: >>> import aiohttp >>> resp = await aiohttp.request('GET', 'http://python.org/') >>> resp <ClientResponse(python.org/) [200]> >>> data = await resp.read() """ connector_owner = False if connector is None: connector_owner = True connector = TCPConnector(loop=loop, force_close=True) session = ClientSession( loop=loop, cookies=cookies, version=version, timeout=timeout, connector=connector, connector_owner=connector_owner) return _SessionRequestContextManager( session._request(method, url, params=params, data=data, json=json, headers=headers, skip_auto_headers=skip_auto_headers, auth=auth, allow_redirects=allow_redirects, max_redirects=max_redirects, compress=compress, chunked=chunked, expect100=expect100, raise_for_status=raise_for_status, read_until_eof=read_until_eof, proxy=proxy, proxy_auth=proxy_auth,), session)
[ "def", "request", "(", "method", ":", "str", ",", "url", ":", "StrOrURL", ",", "*", ",", "params", ":", "Optional", "[", "Mapping", "[", "str", ",", "str", "]", "]", "=", "None", ",", "data", ":", "Any", "=", "None", ",", "json", ":", "Any", "=", "None", ",", "headers", ":", "LooseHeaders", "=", "None", ",", "skip_auto_headers", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ",", "auth", ":", "Optional", "[", "BasicAuth", "]", "=", "None", ",", "allow_redirects", ":", "bool", "=", "True", ",", "max_redirects", ":", "int", "=", "10", ",", "compress", ":", "Optional", "[", "str", "]", "=", "None", ",", "chunked", ":", "Optional", "[", "bool", "]", "=", "None", ",", "expect100", ":", "bool", "=", "False", ",", "raise_for_status", ":", "Optional", "[", "bool", "]", "=", "None", ",", "read_until_eof", ":", "bool", "=", "True", ",", "proxy", ":", "Optional", "[", "StrOrURL", "]", "=", "None", ",", "proxy_auth", ":", "Optional", "[", "BasicAuth", "]", "=", "None", ",", "timeout", ":", "Union", "[", "ClientTimeout", ",", "object", "]", "=", "sentinel", ",", "cookies", ":", "Optional", "[", "LooseCookies", "]", "=", "None", ",", "version", ":", "HttpVersion", "=", "http", ".", "HttpVersion11", ",", "connector", ":", "Optional", "[", "BaseConnector", "]", "=", "None", ",", "loop", ":", "Optional", "[", "asyncio", ".", "AbstractEventLoop", "]", "=", "None", ")", "->", "_SessionRequestContextManager", ":", "connector_owner", "=", "False", "if", "connector", "is", "None", ":", "connector_owner", "=", "True", "connector", "=", "TCPConnector", "(", "loop", "=", "loop", ",", "force_close", "=", "True", ")", "session", "=", "ClientSession", "(", "loop", "=", "loop", ",", "cookies", "=", "cookies", ",", "version", "=", "version", ",", "timeout", "=", "timeout", ",", "connector", "=", "connector", ",", "connector_owner", "=", "connector_owner", ")", "return", "_SessionRequestContextManager", "(", "session", ".", "_request", "(", "method", ",", "url", ",", "params", "=", "params", ",", "data", "=", "data", ",", "json", "=", "json", ",", "headers", "=", "headers", ",", "skip_auto_headers", "=", "skip_auto_headers", ",", "auth", "=", "auth", ",", "allow_redirects", "=", "allow_redirects", ",", "max_redirects", "=", "max_redirects", ",", "compress", "=", "compress", ",", "chunked", "=", "chunked", ",", "expect100", "=", "expect100", ",", "raise_for_status", "=", "raise_for_status", ",", "read_until_eof", "=", "read_until_eof", ",", "proxy", "=", "proxy", ",", "proxy_auth", "=", "proxy_auth", ",", ")", ",", "session", ")" ]
Constructs and sends a request. Returns response object. method - HTTP method url - request url params - (optional) Dictionary or bytes to be sent in the query string of the new request data - (optional) Dictionary, bytes, or file-like object to send in the body of the request json - (optional) Any json compatible python object headers - (optional) Dictionary of HTTP Headers to send with the request cookies - (optional) Dict object to send with the request auth - (optional) BasicAuth named tuple represent HTTP Basic Auth auth - aiohttp.helpers.BasicAuth allow_redirects - (optional) If set to False, do not follow redirects version - Request HTTP version. compress - Set to True if request has to be compressed with deflate encoding. chunked - Set to chunk size for chunked transfer encoding. expect100 - Expect 100-continue response from server. connector - BaseConnector sub-class instance to support connection pooling. read_until_eof - Read response until eof if response does not have Content-Length header. loop - Optional event loop. timeout - Optional ClientTimeout settings structure, 5min total timeout by default. Usage:: >>> import aiohttp >>> resp = await aiohttp.request('GET', 'http://python.org/') >>> resp <ClientResponse(python.org/) [200]> >>> data = await resp.read()
[ "Constructs", "and", "sends", "a", "request", ".", "Returns", "response", "object", ".", "method", "-", "HTTP", "method", "url", "-", "request", "url", "params", "-", "(", "optional", ")", "Dictionary", "or", "bytes", "to", "be", "sent", "in", "the", "query", "string", "of", "the", "new", "request", "data", "-", "(", "optional", ")", "Dictionary", "bytes", "or", "file", "-", "like", "object", "to", "send", "in", "the", "body", "of", "the", "request", "json", "-", "(", "optional", ")", "Any", "json", "compatible", "python", "object", "headers", "-", "(", "optional", ")", "Dictionary", "of", "HTTP", "Headers", "to", "send", "with", "the", "request", "cookies", "-", "(", "optional", ")", "Dict", "object", "to", "send", "with", "the", "request", "auth", "-", "(", "optional", ")", "BasicAuth", "named", "tuple", "represent", "HTTP", "Basic", "Auth", "auth", "-", "aiohttp", ".", "helpers", ".", "BasicAuth", "allow_redirects", "-", "(", "optional", ")", "If", "set", "to", "False", "do", "not", "follow", "redirects", "version", "-", "Request", "HTTP", "version", ".", "compress", "-", "Set", "to", "True", "if", "request", "has", "to", "be", "compressed", "with", "deflate", "encoding", ".", "chunked", "-", "Set", "to", "chunk", "size", "for", "chunked", "transfer", "encoding", ".", "expect100", "-", "Expect", "100", "-", "continue", "response", "from", "server", ".", "connector", "-", "BaseConnector", "sub", "-", "class", "instance", "to", "support", "connection", "pooling", ".", "read_until_eof", "-", "Read", "response", "until", "eof", "if", "response", "does", "not", "have", "Content", "-", "Length", "header", ".", "loop", "-", "Optional", "event", "loop", ".", "timeout", "-", "Optional", "ClientTimeout", "settings", "structure", "5min", "total", "timeout", "by", "default", ".", "Usage", "::", ">>>", "import", "aiohttp", ">>>", "resp", "=", "await", "aiohttp", ".", "request", "(", "GET", "http", ":", "//", "python", ".", "org", "/", ")", ">>>", "resp", "<ClientResponse", "(", "python", ".", "org", "/", ")", "[", "200", "]", ">", ">>>", "data", "=", "await", "resp", ".", "read", "()" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L1035-L1119
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.request
def request(self, method: str, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP request.""" return _RequestContextManager(self._request(method, url, **kwargs))
python
def request(self, method: str, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP request.""" return _RequestContextManager(self._request(method, url, **kwargs))
[ "def", "request", "(", "self", ",", "method", ":", "str", ",", "url", ":", "StrOrURL", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP request.
[ "Perform", "HTTP", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L297-L302
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.ws_connect
def ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager': """Initiate websocket connection.""" return _WSRequestContextManager( self._ws_connect(url, method=method, protocols=protocols, timeout=timeout, receive_timeout=receive_timeout, autoclose=autoclose, autoping=autoping, heartbeat=heartbeat, auth=auth, origin=origin, headers=headers, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size))
python
def ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager': """Initiate websocket connection.""" return _WSRequestContextManager( self._ws_connect(url, method=method, protocols=protocols, timeout=timeout, receive_timeout=receive_timeout, autoclose=autoclose, autoping=autoping, heartbeat=heartbeat, auth=auth, origin=origin, headers=headers, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size))
[ "def", "ws_connect", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "method", ":", "str", "=", "hdrs", ".", "METH_GET", ",", "protocols", ":", "Iterable", "[", "str", "]", "=", "(", ")", ",", "timeout", ":", "float", "=", "10.0", ",", "receive_timeout", ":", "Optional", "[", "float", "]", "=", "None", ",", "autoclose", ":", "bool", "=", "True", ",", "autoping", ":", "bool", "=", "True", ",", "heartbeat", ":", "Optional", "[", "float", "]", "=", "None", ",", "auth", ":", "Optional", "[", "BasicAuth", "]", "=", "None", ",", "origin", ":", "Optional", "[", "str", "]", "=", "None", ",", "headers", ":", "Optional", "[", "LooseHeaders", "]", "=", "None", ",", "proxy", ":", "Optional", "[", "StrOrURL", "]", "=", "None", ",", "proxy_auth", ":", "Optional", "[", "BasicAuth", "]", "=", "None", ",", "ssl", ":", "Union", "[", "SSLContext", ",", "bool", ",", "None", ",", "Fingerprint", "]", "=", "None", ",", "proxy_headers", ":", "Optional", "[", "LooseHeaders", "]", "=", "None", ",", "compress", ":", "int", "=", "0", ",", "max_msg_size", ":", "int", "=", "4", "*", "1024", "*", "1024", ")", "->", "'_WSRequestContextManager'", ":", "return", "_WSRequestContextManager", "(", "self", ".", "_ws_connect", "(", "url", ",", "method", "=", "method", ",", "protocols", "=", "protocols", ",", "timeout", "=", "timeout", ",", "receive_timeout", "=", "receive_timeout", ",", "autoclose", "=", "autoclose", ",", "autoping", "=", "autoping", ",", "heartbeat", "=", "heartbeat", ",", "auth", "=", "auth", ",", "origin", "=", "origin", ",", "headers", "=", "headers", ",", "proxy", "=", "proxy", ",", "proxy_auth", "=", "proxy_auth", ",", "ssl", "=", "ssl", ",", "proxy_headers", "=", "proxy_headers", ",", "compress", "=", "compress", ",", "max_msg_size", "=", "max_msg_size", ")", ")" ]
Initiate websocket connection.
[ "Initiate", "websocket", "connection", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L604-L641
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession._prepare_headers
def _prepare_headers( self, headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]': """ Add default headers and transform it to CIMultiDict """ # Convert headers to MultiDict result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) added_names = set() # type: Set[str] for key, value in headers.items(): if key in added_names: result.add(key, value) else: result[key] = value added_names.add(key) return result
python
def _prepare_headers( self, headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]': """ Add default headers and transform it to CIMultiDict """ # Convert headers to MultiDict result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) added_names = set() # type: Set[str] for key, value in headers.items(): if key in added_names: result.add(key, value) else: result[key] = value added_names.add(key) return result
[ "def", "_prepare_headers", "(", "self", ",", "headers", ":", "Optional", "[", "LooseHeaders", "]", ")", "->", "'CIMultiDict[str]'", ":", "# Convert headers to MultiDict", "result", "=", "CIMultiDict", "(", "self", ".", "_default_headers", ")", "if", "headers", ":", "if", "not", "isinstance", "(", "headers", ",", "(", "MultiDictProxy", ",", "MultiDict", ")", ")", ":", "headers", "=", "CIMultiDict", "(", "headers", ")", "added_names", "=", "set", "(", ")", "# type: Set[str]", "for", "key", ",", "value", "in", "headers", ".", "items", "(", ")", ":", "if", "key", "in", "added_names", ":", "result", ".", "add", "(", "key", ",", "value", ")", "else", ":", "result", "[", "key", "]", "=", "value", "added_names", ".", "add", "(", "key", ")", "return", "result" ]
Add default headers and transform it to CIMultiDict
[ "Add", "default", "headers", "and", "transform", "it", "to", "CIMultiDict" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L800-L817
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.get
def get(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP GET request.""" return _RequestContextManager( self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs))
python
def get(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP GET request.""" return _RequestContextManager( self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs))
[ "def", "get", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "allow_redirects", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_GET", ",", "url", ",", "allow_redirects", "=", "allow_redirects", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP GET request.
[ "Perform", "HTTP", "GET", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L819-L825
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.options
def options(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP OPTIONS request.""" return _RequestContextManager( self._request(hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs))
python
def options(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP OPTIONS request.""" return _RequestContextManager( self._request(hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs))
[ "def", "options", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "allow_redirects", ":", "bool", "=", "True", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_OPTIONS", ",", "url", ",", "allow_redirects", "=", "allow_redirects", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP OPTIONS request.
[ "Perform", "HTTP", "OPTIONS", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L827-L833
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.head
def head(self, url: StrOrURL, *, allow_redirects: bool=False, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP HEAD request.""" return _RequestContextManager( self._request(hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs))
python
def head(self, url: StrOrURL, *, allow_redirects: bool=False, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP HEAD request.""" return _RequestContextManager( self._request(hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs))
[ "def", "head", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "allow_redirects", ":", "bool", "=", "False", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_HEAD", ",", "url", ",", "allow_redirects", "=", "allow_redirects", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP HEAD request.
[ "Perform", "HTTP", "HEAD", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L835-L841
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.post
def post(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP POST request.""" return _RequestContextManager( self._request(hdrs.METH_POST, url, data=data, **kwargs))
python
def post(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP POST request.""" return _RequestContextManager( self._request(hdrs.METH_POST, url, data=data, **kwargs))
[ "def", "post", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "data", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_POST", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP POST request.
[ "Perform", "HTTP", "POST", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L843-L849
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.put
def put(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PUT request.""" return _RequestContextManager( self._request(hdrs.METH_PUT, url, data=data, **kwargs))
python
def put(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PUT request.""" return _RequestContextManager( self._request(hdrs.METH_PUT, url, data=data, **kwargs))
[ "def", "put", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "data", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_PUT", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP PUT request.
[ "Perform", "HTTP", "PUT", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L851-L857
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.patch
def patch(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PATCH request.""" return _RequestContextManager( self._request(hdrs.METH_PATCH, url, data=data, **kwargs))
python
def patch(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP PATCH request.""" return _RequestContextManager( self._request(hdrs.METH_PATCH, url, data=data, **kwargs))
[ "def", "patch", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", ",", "data", ":", "Any", "=", "None", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_PATCH", ",", "url", ",", "data", "=", "data", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP PATCH request.
[ "Perform", "HTTP", "PATCH", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L859-L865
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.delete
def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP DELETE request.""" return _RequestContextManager( self._request(hdrs.METH_DELETE, url, **kwargs))
python
def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': """Perform HTTP DELETE request.""" return _RequestContextManager( self._request(hdrs.METH_DELETE, url, **kwargs))
[ "def", "delete", "(", "self", ",", "url", ":", "StrOrURL", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "'_RequestContextManager'", ":", "return", "_RequestContextManager", "(", "self", ".", "_request", "(", "hdrs", ".", "METH_DELETE", ",", "url", ",", "*", "*", "kwargs", ")", ")" ]
Perform HTTP DELETE request.
[ "Perform", "HTTP", "DELETE", "request", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L867-L871
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.close
async def close(self) -> None: """Close underlying connector. Release all acquired resources. """ if not self.closed: if self._connector is not None and self._connector_owner: await self._connector.close() self._connector = None
python
async def close(self) -> None: """Close underlying connector. Release all acquired resources. """ if not self.closed: if self._connector is not None and self._connector_owner: await self._connector.close() self._connector = None
[ "async", "def", "close", "(", "self", ")", "->", "None", ":", "if", "not", "self", ".", "closed", ":", "if", "self", ".", "_connector", "is", "not", "None", "and", "self", ".", "_connector_owner", ":", "await", "self", ".", "_connector", ".", "close", "(", ")", "self", ".", "_connector", "=", "None" ]
Close underlying connector. Release all acquired resources.
[ "Close", "underlying", "connector", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L873-L881
train
aio-libs/aiohttp
aiohttp/client.py
ClientSession.requote_redirect_url
def requote_redirect_url(self, val: bool) -> None: """Do URL requoting on redirection handling.""" warnings.warn("session.requote_redirect_url modification " "is deprecated #2778", DeprecationWarning, stacklevel=2) self._requote_redirect_url = val
python
def requote_redirect_url(self, val: bool) -> None: """Do URL requoting on redirection handling.""" warnings.warn("session.requote_redirect_url modification " "is deprecated #2778", DeprecationWarning, stacklevel=2) self._requote_redirect_url = val
[ "def", "requote_redirect_url", "(", "self", ",", "val", ":", "bool", ")", "->", "None", ":", "warnings", ".", "warn", "(", "\"session.requote_redirect_url modification \"", "\"is deprecated #2778\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ")", "self", ".", "_requote_redirect_url", "=", "val" ]
Do URL requoting on redirection handling.
[ "Do", "URL", "requoting", "on", "redirection", "handling", "." ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/client.py#L912-L918
train
aio-libs/aiohttp
aiohttp/abc.py
AbstractResolver.resolve
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: """Return IP address for given hostname"""
python
async def resolve(self, host: str, port: int, family: int) -> List[Dict[str, Any]]: """Return IP address for given hostname"""
[ "async", "def", "resolve", "(", "self", ",", "host", ":", "str", ",", "port", ":", "int", ",", "family", ":", "int", ")", "->", "List", "[", "Dict", "[", "str", ",", "Any", "]", "]", ":" ]
Return IP address for given hostname
[ "Return", "IP", "address", "for", "given", "hostname" ]
9504fe2affaaff673fa4f3754c1c44221f8ba47d
https://github.com/aio-libs/aiohttp/blob/9504fe2affaaff673fa4f3754c1c44221f8ba47d/aiohttp/abc.py#L126-L128
train